diff --git a/AUTHORS b/AUTHORS index 2fdd9fd4f4..bd7ab248c2 100644 --- a/AUTHORS +++ b/AUTHORS @@ -68,6 +68,7 @@ Jose Antonio Olivera Ortega Keiichi Enomoto Kiran Thind Korniltsev Anatoly +Kyutae Lee Lennart Grahl Luke Weber Maksim Khobat @@ -79,6 +80,7 @@ Maksim Sisov Maxim Pavlov Maxim Potapov Michael Iedema +MichaƂ Zarach Michel Promonet Miguel Paris Mike Gilbert @@ -106,6 +108,7 @@ Sarah Thompson Satender Saroha Saul Kravitz Sergio Garcia Murillo +Shaofan Qi Shuhai Peng Silviu Caragea Stefan Gula @@ -137,14 +140,17 @@ Pengfei Han Agora IO <*@agora.io> ARM Holdings <*@arm.com> BroadSoft Inc. <*@broadsoft.com> +Canonical Ltd <*@canonical.com> CoSMo Software Consulting, Pte Ltd <*@cosmosoftware.io> Facebook Inc. <*@fb.com> Google Inc. <*@google.com> Highfive, Inc. <*@highfive.com> +Hopin Ltd. <*@hopin.to> HyperConnect Inc. <*@hpcnt.com> Intel Corporation <*@intel.com> LG Electronics, Inc. <*@lge.com> Life On Air Inc. <*@lifeonair.com> +Meta Platforms, Inc. <*@meta.com> Microsoft Corporation <*@microsoft.com> MIPS Technologies <*@mips.com> Mozilla Foundation <*@mozilla.com> diff --git a/BUILD.gn b/BUILD.gn index f5c1973f01..5395ad9b1b 100644 --- a/BUILD.gn +++ b/BUILD.gn @@ -47,6 +47,7 @@ if (!build_with_chromium) { } if (rtc_include_tests) { deps += [ + ":fuchsia_perf_tests", ":rtc_unittests", ":video_engine_tests", ":voip_unittests", @@ -106,6 +107,9 @@ if (!build_with_chromium) { "tools_webrtc/perf:webrtc_dashboard_upload", ] } + if ((is_linux || is_chromeos) && rtc_use_pipewire) { + deps += [ "modules/desktop_capture:shared_screencast_stream_test" ] + } } if (target_os == "android") { deps += [ "tools_webrtc:binary_version_check" ] @@ -270,6 +274,12 @@ config("common_config") { defines += [ "WEBRTC_ENABLE_PROTOBUF=0" ] } + if (rtc_strict_field_trials) { + defines += [ "WEBRTC_STRICT_FIELD_TRIALS=1" ] + } else { + defines += [ "WEBRTC_STRICT_FIELD_TRIALS=0" ] + } + if (rtc_include_internal_audio_device) { defines += [ "WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE" ] } @@ -719,6 +729,22 @@ if (rtc_include_tests && !build_with_chromium) { } } + rtc_test("fuchsia_perf_tests") { + testonly = true + deps = [ + #TODO(fxbug.dev/115601) - Enable when fixed + #"call:call_perf_tests", + #"video:video_pc_full_stack_tests", + "modules/audio_coding:audio_coding_perf_tests", + "modules/audio_processing:audio_processing_perf_tests", + "pc:peerconnection_perf_tests", + "test:test_main", + "video:video_full_stack_tests", + ] + + data = webrtc_perf_tests_resources + } + rtc_test("webrtc_nonparallel_tests") { testonly = true deps = [ "rtc_base:rtc_base_nonparallel_tests" ] diff --git a/DEPS b/DEPS index 0550aa83c4..e751e9dd63 100644 --- a/DEPS +++ b/DEPS @@ -10,27 +10,28 @@ vars = { # chromium waterfalls. More info at: crbug.com/570091. 'checkout_configuration': 'default', 'checkout_instrumented_libraries': 'checkout_linux and checkout_configuration == "default"', - 'chromium_revision': '77bb30c0feae9b39c57af30e7b77a68d75727b83', + 'chromium_revision': 'd4870f767ea66ffff0f83f8267d2b61dfff0bf5d', # Keep the Chromium default of generating location tags. 'generate_location_tags': True, # ResultDB version - 'resultdb_version': 'git_revision:6cc18e2763e180929d70c786b419c1f8e6bcc66c', + 'resultdb_version': 'git_revision:39e20ee396fe4a84eaa7f7d389e5659198c12e87', # By default, download the fuchsia sdk from the public sdk directory. 'fuchsia_sdk_cipd_prefix': 'fuchsia/sdk/gn/', - 'fuchsia_version': 'version:9.20220919.2.1', + 'fuchsia_version': 'version:10.20221201.3.1', # By default, download the fuchsia images from the fuchsia GCS bucket. 'fuchsia_images_bucket': 'fuchsia', - 'checkout_fuchsia_boot_images': "qemu.x64", 'checkout_fuchsia': False, - - # By default, do not check out the re-client binaries. - 'checkout_reclient': False, + # Since the images are hundreds of MB, default to only downloading the image + # most commonly useful for developers. Bots and developers that need to use + # other images can override this with additional images. + 'checkout_fuchsia_boot_images': "terminal.qemu-x64", + 'checkout_fuchsia_product_bundles': '"{checkout_fuchsia_boot_images}" != ""', # reclient CIPD package version - 'reclient_version': 're_client_version:0.81.1.0853992-gomaip', + 'reclient_version': 're_client_version:0.87.0.b6908b3-gomaip', # ninja CIPD package version # https://chrome-infra-packages.appspot.com/p/infra/3pp/tools/ninja @@ -40,30 +41,30 @@ vars = { deps = { # TODO(kjellander): Move this to be Android-only. 'src/base': - 'https://chromium.googlesource.com/chromium/src/base@b054aaefd4d4ec5aad7189c4e97000a06b594163', + 'https://chromium.googlesource.com/chromium/src/base@4a17a70520935f05e354de004dcb44c7b1df534f', 'src/build': - 'https://chromium.googlesource.com/chromium/src/build@1c4f38fd4f534d78b72cefc376a03b3e8b486e7c', + 'https://chromium.googlesource.com/chromium/src/build@c91a4dbdb666e9bd82b187109ad311c58a552ce6', 'src/buildtools': - 'https://chromium.googlesource.com/chromium/src/buildtools@24fa2da896a027e7202bb8886177cccfe885b67d', + 'https://chromium.googlesource.com/chromium/src/buildtools@dcbf73cdcbcd0a2948b9e40bf500de166f622261', # Gradle 6.6.1. Used for testing Android Studio project generation for WebRTC. 'src/examples/androidtests/third_party/gradle': { 'url': 'https://chromium.googlesource.com/external/github.com/gradle/gradle.git@f2d1fb54a951d8b11d25748e4711bec8d128d7e3', 'condition': 'checkout_android', }, 'src/ios': { - 'url': 'https://chromium.googlesource.com/chromium/src/ios@2043810d93b43e6c40586f228b85ff65fd277067', + 'url': 'https://chromium.googlesource.com/chromium/src/ios@36316fedfa1873be15dcaf681bf1295696abafbc', 'condition': 'checkout_ios', }, 'src/testing': - 'https://chromium.googlesource.com/chromium/src/testing@6f2362298838e8789b09282cf198c8582f9c4555', + 'https://chromium.googlesource.com/chromium/src/testing@9adab94016c5e0840a235b4c0a7dd85173d3f370', 'src/third_party': - 'https://chromium.googlesource.com/chromium/src/third_party@28a4580f804c4fc4279ecf10100a409811030235', + 'https://chromium.googlesource.com/chromium/src/third_party@fc733299410a7104a0848539baab0131b8a616b8', 'src/buildtools/linux64': { 'packages': [ { 'package': 'gn/gn/linux-${{arch}}', - 'version': 'git_revision:b9c6c19be95a3863e02f00f1fe403b2502e345b6', + 'version': 'git_revision:5e19d2fb166fbd4f6f32147fbb2f497091a54ad8', } ], 'dep_type': 'cipd', @@ -73,7 +74,7 @@ deps = { 'packages': [ { 'package': 'gn/gn/mac-${{arch}}', - 'version': 'git_revision:b9c6c19be95a3863e02f00f1fe403b2502e345b6', + 'version': 'git_revision:5e19d2fb166fbd4f6f32147fbb2f497091a54ad8', } ], 'dep_type': 'cipd', @@ -83,7 +84,7 @@ deps = { 'packages': [ { 'package': 'gn/gn/windows-amd64', - 'version': 'git_revision:b9c6c19be95a3863e02f00f1fe403b2502e345b6', + 'version': 'git_revision:5e19d2fb166fbd4f6f32147fbb2f497091a54ad8', } ], 'dep_type': 'cipd', @@ -92,22 +93,24 @@ deps = { 'src/buildtools/reclient': { 'packages': [ { + # https://chrome-infra-packages.appspot.com/p/infra/rbe/client/ 'package': 'infra/rbe/client/${{platform}}', 'version': Var('reclient_version'), } ], 'dep_type': 'cipd', - 'condition': 'checkout_reclient', + # Reclient doesn't have linux-arm64 package. + 'condition': 'not (host_os == "linux" and host_cpu == "arm64")', }, 'src/buildtools/clang_format/script': 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/clang/tools/clang-format.git@8b525d2747f2584fc35d8c7e612e66f377858df7', 'src/buildtools/third_party/libc++/trunk': - 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxx.git@64d36e572d3f9719c5d75011a718f33f11126851', + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxx.git@2fc3d704672fbd3e85fad8492d39e02d49412891', 'src/buildtools/third_party/libc++abi/trunk': - 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxxabi.git@9572e56a12c88c011d504a707ca94952be4664f9', + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxxabi.git@123239cdb67b3d69c5af933e364a84019a33575c', 'src/buildtools/third_party/libunwind/trunk': - 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git@1111799723f6a003e6f52202b9bf84387c552081', + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git@5e22a7fe2335161ab267867c8e1be481bf6c8300', 'src/third_party/ninja': { 'packages': [ @@ -143,7 +146,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_build_tools/aapt2', - 'version': 'nSnWUNu6ssPA-kPMvFQj4JjDXRWj2iubvvjfT1F6HCMC', + 'version': 'cbNG7g8Sinh-lsT8hWsU-RyXqLT_uh4jIb1fjCdhrzIC', }, ], 'condition': 'checkout_android', @@ -154,7 +157,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_build_tools/bundletool', - 'version': 'IEZQhHFQzO9Ci1QxWZmssKqGmt2r_nCDMKr8t4cKY34C', + 'version': 'eYz83zbG33sGLyNdc-a64qo1K6LRcS9GwW7GmSvyWisC', }, ], 'condition': 'checkout_android', @@ -162,11 +165,11 @@ deps = { }, 'src/third_party/boringssl/src': - 'https://boringssl.googlesource.com/boringssl.git@1ee71185a2322dc354bee5e5a0abfb1810a27dc6', + 'https://boringssl.googlesource.com/boringssl.git@28f96c2686459add7acedcd97cb841030bdda019', 'src/third_party/breakpad/breakpad': - 'https://chromium.googlesource.com/breakpad/breakpad.git@e085b3b50bde862d0cf3ce4594e3f391bcf5faec', + 'https://chromium.googlesource.com/breakpad/breakpad.git@cc7abac08b0c52e6581b9c9c4226816b17a4c26d', 'src/third_party/catapult': - 'https://chromium.googlesource.com/catapult.git@4793433248183dd073e608f655204d4acfdc7193', + 'https://chromium.googlesource.com/catapult.git@bf0782db65682f3918886ba69807c03fe515c2e8', 'src/third_party/ced/src': { 'url': 'https://chromium.googlesource.com/external/github.com/google/compact_enc_det.git@ba412eaaacd3186085babcd901679a48863c7dd5', }, @@ -175,9 +178,11 @@ deps = { 'src/third_party/crc32c/src': 'https://chromium.googlesource.com/external/github.com/google/crc32c.git@fa5ade41ee480003d9c5af6f43567ba22e4e17e6', 'src/third_party/depot_tools': - 'https://chromium.googlesource.com/chromium/tools/depot_tools.git@2c0a8c736a59044e4acc7be9e172343adc5c4310', + 'https://chromium.googlesource.com/chromium/tools/depot_tools.git@41a2d0f1a0173723f63ca2994e17c81eaf302b65', 'src/third_party/ffmpeg': - 'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@b9f01c3c54576330b2cf8918c54d5ee5be8faefe', + 'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@a249b21db6516234e5456716ae074fbb00176b3f', + 'src/third_party/flatbuffers/src': + 'https://chromium.googlesource.com/external/github.com/google/flatbuffers.git@e3017029647a88eb6f509ee9744012fffeb0d371', 'src/third_party/grpc/src': { 'url': 'https://chromium.googlesource.com/external/github.com/grpc/grpc.git@dd77c67217b10ffeaf766e25eb8b46d2d59de4ff', }, @@ -187,9 +192,9 @@ deps = { 'condition': 'checkout_linux', }, 'src/third_party/freetype/src': - 'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@5182264a40e70ff31be0a0ec8a0d5ffb5f65582e', + 'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@ace97a02a4461bbdae29da4019c105eead95e277', 'src/third_party/harfbuzz-ng/src': - 'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@56c467093598ec559a7148b61e112e9de52b7076', + 'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@2822b589bc837fae6f66233e2cf2eef0f6ce8470', 'src/third_party/google_benchmark/src': { 'url': 'https://chromium.googlesource.com/external/github.com/google/benchmark.git@f730846b0a3c0dc0699978846fb14ffb2fad0bdc', }, @@ -209,7 +214,7 @@ deps = { 'src/third_party/googletest/src': 'https://chromium.googlesource.com/external/github.com/google/googletest.git@af29db7ec28d6df1c7f0f745186884091e602e07', 'src/third_party/icu': { - 'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@20f8ac695af59b6c830def7d4e95bfeb13dd7be5', + 'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@1b7d391f0528fb3a4976b7541b387ee04f915f83', }, 'src/third_party/jdk': { 'packages': [ @@ -224,7 +229,7 @@ deps = { 'src/third_party/jsoncpp/source': 'https://chromium.googlesource.com/external/github.com/open-source-parsers/jsoncpp.git@42e892d96e47b1f6e29844cc705e148ec4856448', # from svn 248 'src/third_party/junit/src': { - 'url': 'https://chromium.googlesource.com/external/junit.git@64155f8a9babcfcf4263cf4d08253a1556e75481', + 'url': 'https://chromium.googlesource.com/external/junit.git@05fe2a64f59127c02135be22f416e91260d6ede6', 'condition': 'checkout_android', }, # Used for building libFuzzers (only supports Linux). @@ -237,17 +242,17 @@ deps = { 'src/third_party/dav1d/libdav1d': 'https://chromium.googlesource.com/external/github.com/videolan/dav1d.git@87f9a81cd770e49394a45deca7a3df41243de00b', 'src/third_party/libaom/source/libaom': - 'https://aomedia.googlesource.com/aom.git@7f32eb35ff2589369f095388701e3dfc4d6a9381', + 'https://aomedia.googlesource.com/aom.git@a84503456d4276348da3e80de7569adb1b389a60', 'src/third_party/libunwindstack': { - 'url': 'https://chromium.googlesource.com/chromium/src/third_party/libunwindstack.git@8740b09bd1f8b81bdba92766afcb9df1d6a1f14e', + 'url': 'https://chromium.googlesource.com/chromium/src/third_party/libunwindstack.git@4dbfa0e8c844c8e243b297bc185e54a99ff94f9e', 'condition': 'checkout_android', }, 'src/third_party/perfetto': - 'https://android.googlesource.com/platform/external/perfetto.git@129b11632395a84eb3307d72fde9a90945e18619', + 'https://android.googlesource.com/platform/external/perfetto.git@61ba4b9b606100828e425eb9a245dd45c5591f28', 'src/third_party/libvpx/source/libvpx': - 'https://chromium.googlesource.com/webm/libvpx.git@9d6d0624d7943a09cc0be9df1a7402522989ac1a', + 'https://chromium.googlesource.com/webm/libvpx.git@605350bd5b68ac47f595d60cc8ef346588e773c0', 'src/third_party/libyuv': - 'https://chromium.googlesource.com/libyuv/libyuv.git@00950840d1c9bcbb3eb6ebc5aac5793e71166c8b', + 'https://chromium.googlesource.com/libyuv/libyuv.git@4a3c79cb31aee310443039c37d64377ed06f1d14', 'src/third_party/lss': { 'url': 'https://chromium.googlesource.com/linux-syscall-support.git@ce877209e11aa69dcfffbd53ef90ea1d07136521', 'condition': 'checkout_android or checkout_linux', @@ -259,16 +264,16 @@ deps = { # Used by boringssl. 'src/third_party/nasm': { - 'url': 'https://chromium.googlesource.com/chromium/deps/nasm.git@9215e8e1d0fe474ffd3e16c1a07a0f97089e6224' + 'url': 'https://chromium.googlesource.com/chromium/deps/nasm.git@0873b2bae6a5388a1c55deac8456e3c60a47ca08' }, 'src/third_party/openh264/src': - 'https://chromium.googlesource.com/external/github.com/cisco/openh264@fac04ceb3e966f613ed17e98178e9d690280bba6', + 'https://chromium.googlesource.com/external/github.com/cisco/openh264@db956674bbdfbaab5acdd3fdb4117c2fef5527e9', 'src/third_party/r8': { 'packages': [ { 'package': 'chromium/third_party/r8', - 'version': 'szXK3tCGU7smsNs4r2mGqxme7d9KWLaOk0_ghbCJxUQC', + 'version': 'pv_BIbpK8sxEFp63muv1gKsbyWJoyv4PDw342wc9H6AC', }, ], 'condition': 'checkout_android', @@ -281,7 +286,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/r8', - 'version': 'tQcmB4wHWxamdPd8ix5IwMv8eBEbMBeN4vEtGjikDeQC', + 'version': 'qGtBu6TtxyR5XNy4cmsslb7c946YtkZF5_QCjVP-wc8C', }, ], 'condition': 'checkout_android', @@ -292,7 +297,7 @@ deps = { 'condition': 'checkout_android', }, 'src/tools': - 'https://chromium.googlesource.com/chromium/src/tools@d97453670f86a8cc5050802a4a49083c5db3b39a', + 'https://chromium.googlesource.com/chromium/src/tools@0c34fd995e2cfdb007209c44bb0d28e894b1d2ea', 'src/third_party/accessibility_test_framework': { 'packages': [ @@ -369,7 +374,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/androidx', - 'version': 'h7h2tZ_Dqu-O57Bk14oz6B7AaJLu1naK5jGnsQ5vaJQC', + 'version': '3ADwB26rDMIdmScjo6j4e98VQl6amFOyrvsvrVRthBMC', }, ], 'condition': 'checkout_android', @@ -380,7 +385,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_build_tools/manifest_merger', - 'version': 'bUREd_PkCqlp2ww6zmyOLGf0jhqgbnf6GT4V1xkAZ10C', + 'version': 'X4l8RIBEAF108FpSEWRF7UHqq-kY8T3ibSsObGU5u3UC', }, ], 'condition': 'checkout_android', @@ -415,7 +420,7 @@ deps = { }, { 'package': 'chromium/third_party/android_sdk/public/cmdline-tools', - 'version': 'IPzAG-uU5zVMxohpg9-7-N0tQC1TCSW1VbrBFw7Ld04C', + 'version': 'oWlET2yQhaPKQ66tYNuSPaueU78Z9VlxpyxOoUjwRuIC', }, ], 'condition': 'checkout_android', @@ -470,7 +475,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/turbine', - 'version': 'rrpgWQ-uylo8c5IPgUVP464LwcVOmt29MqwsR59O_zkC', + 'version': 'R-Qp1tMBqIuETMfXNqQU9GB00ij6dsPjVmjDuvH_194C', }, ], 'condition': 'checkout_android', @@ -481,15 +486,30 @@ deps = { 'packages': [ { 'package': 'infra/tools/luci/isolate/${{platform}}', - 'version': 'git_revision:9f65ffe719f73af390727d369b342c22fa37ea54', + 'version': 'git_revision:bac571b5399502fa16ac48a1d3820e1117505085', }, { 'package': 'infra/tools/luci/swarming/${{platform}}', - 'version': 'git_revision:9f65ffe719f73af390727d369b342c22fa37ea54', + 'version': 'git_revision:bac571b5399502fa16ac48a1d3820e1117505085', }, ], 'dep_type': 'cipd', }, + 'src/third_party/pipewire/linux-amd64': { + 'packages': [ + { + 'package': 'chromium/third_party/pipewire/linux-amd64', + 'version': 'BaVKmAmwpjdS6O0pnjSaMNSKhO1nmk5mRnyPVAJ2-HEC', + }, + { + 'package': 'chromium/third_party/pipewire-media-session/linux-amd64', + 'version': 'Y6wUeITvAA0QD1vt8_a7eQdzbp0gkI1B02qfZUMJdowC', + }, + ], + + 'condition': 'checkout_linux', + 'dep_type': 'cipd', + }, # Everything coming after this is automatically updated by the auto-roller. # === ANDROID_DEPS Generated Code Start === @@ -923,28 +943,6 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_tools_desugar_jdk_libs': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_tools_desugar_jdk_libs', - 'version': 'version:2@1.1.5.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_android_tools_desugar_jdk_libs_configuration': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_tools_desugar_jdk_libs_configuration', - 'version': 'version:2@1.1.5.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - 'src/third_party/android_deps/libs/com_android_tools_layoutlib_layoutlib_api': { 'packages': [ { @@ -1583,17 +1581,6 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_flatbuffers_flatbuffers_java': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_flatbuffers_flatbuffers_java', - 'version': 'version:2@2.0.3.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - 'src/third_party/android_deps/libs/com_google_googlejavaformat_google_java_format': { 'packages': [ { @@ -2416,16 +2403,14 @@ hooks = [ '--version={fuchsia_version}', ], }, - { 'name': 'Download Fuchsia system images', 'pattern': '.', - 'condition': 'checkout_fuchsia', + 'condition': 'checkout_fuchsia and checkout_fuchsia_product_bundles', 'action': [ 'python3', - 'src/build/fuchsia/update_images.py', - '--boot-images={checkout_fuchsia_boot_images}', - '--default-bucket={fuchsia_images_bucket}', + 'src/build/fuchsia/update_product_bundles.py', + '{checkout_fuchsia_boot_images}', ], }, { @@ -2571,27 +2556,51 @@ hooks = [ ], }, { - 'name': 'msan_chained_origins', + 'name': 'msan_chained_origins_focal', 'pattern': '.', 'condition': 'checkout_instrumented_libraries', 'action': [ 'python3', 'src/third_party/depot_tools/download_from_google_storage.py', - "--no_resume", - "--no_auth", - "--bucket", "chromium-instrumented-libraries", - "-s", "src/third_party/instrumented_libraries/binaries/msan-chained-origins.tgz.sha1", + '--no_resume', + '--no_auth', + '--bucket', 'chromium-instrumented-libraries', + '-s', 'src/third_party/instrumented_libraries/binaries/msan-chained-origins-focal.tgz.sha1', ], }, { - 'name': 'msan_no_origins', + 'name': 'msan_no_origins_focal', 'pattern': '.', 'condition': 'checkout_instrumented_libraries', 'action': [ 'python3', 'src/third_party/depot_tools/download_from_google_storage.py', - "--no_resume", - "--no_auth", - "--bucket", "chromium-instrumented-libraries", - "-s", "src/third_party/instrumented_libraries/binaries/msan-no-origins.tgz.sha1", + '--no_resume', + '--no_auth', + '--bucket', 'chromium-instrumented-libraries', + '-s', 'src/third_party/instrumented_libraries/binaries/msan-no-origins-focal.tgz.sha1', + ], + }, + { + 'name': 'msan_chained_origins_xenial', + 'pattern': '.', + 'condition': 'checkout_instrumented_libraries', + 'action': [ 'python3', + 'src/third_party/depot_tools/download_from_google_storage.py', + '--no_resume', + '--no_auth', + '--bucket', 'chromium-instrumented-libraries', + '-s', 'src/third_party/instrumented_libraries/binaries/msan-chained-origins-xenial.tgz.sha1', + ], + }, + { + 'name': 'msan_no_origins_xenial', + 'pattern': '.', + 'condition': 'checkout_instrumented_libraries', + 'action': [ 'python3', + 'src/third_party/depot_tools/download_from_google_storage.py', + '--no_resume', + '--no_auth', + '--bucket', 'chromium-instrumented-libraries', + '-s', 'src/third_party/instrumented_libraries/binaries/msan-no-origins-xenial.tgz.sha1', ], }, { @@ -2663,6 +2672,7 @@ include_rules = [ "+absl/meta/type_traits.h", "+absl/numeric/bits.h", "+absl/strings/ascii.h", + "+absl/strings/escaping.h", "+absl/strings/match.h", "+absl/strings/str_replace.h", "+absl/strings/string_view.h", diff --git a/OWNERS b/OWNERS index 6ae4b59a95..bfcca980eb 100644 --- a/OWNERS +++ b/OWNERS @@ -3,20 +3,4 @@ hta@webrtc.org mflodman@webrtc.org stefan@webrtc.org tommi@webrtc.org -per-file .gitignore=* -per-file .gn=mbonadei@webrtc.org -per-file BUILD.gn=mbonadei@webrtc.org -per-file .../BUILD.gn=mbonadei@webrtc.org -per-file *.gni=mbonadei@webrtc.org -per-file .../*.gni=mbonadei@webrtc.org -per-file .vpython=mbonadei@webrtc.org -per-file .vpython3=mbonadei@webrtc.org -per-file AUTHORS=* -per-file DEPS=* -per-file pylintrc=mbonadei@webrtc.org -per-file WATCHLISTS=* -per-file native-api.md=mbonadei@webrtc.org -per-file ....lua=titovartem@webrtc.org -per-file .style.yapf=jleconte@webrtc.org -per-file *.py=jansson@webrtc.org -per-file *.py=jleconte@webrtc.org +include OWNERS_INFRA #{Owners for infra and repo related files} diff --git a/OWNERS_INFRA b/OWNERS_INFRA new file mode 100644 index 0000000000..7172570152 --- /dev/null +++ b/OWNERS_INFRA @@ -0,0 +1,17 @@ +#Owners for infra and repo related files +per-file .gitignore=* +per-file .gn=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org +per-file BUILD.gn=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org +per-file .../BUILD.gn=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org +per-file *.gni=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org +per-file .../*.gni=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org +per-file .vpython=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org +per-file .vpython3=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org +per-file AUTHORS=* +per-file DEPS=* +per-file pylintrc=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org +per-file WATCHLISTS=* +per-file native-api.md=mbonadei@webrtc.org +per-file ....lua=titovartem@webrtc.org +per-file .style.yapf=jleconte@webrtc.org +per-file *.py=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org diff --git a/WATCHLISTS b/WATCHLISTS index bb1dccf89b..3c35dd4be7 100644 --- a/WATCHLISTS +++ b/WATCHLISTS @@ -107,9 +107,7 @@ 'yujie.mao@webrtc.org'], 'build_files': ['mbonadei@webrtc.org'], 'common_audio': ['alessiob@webrtc.org', - 'aluebs@webrtc.org', 'audio-team@agora.io', - 'minyue@webrtc.org', 'peah@webrtc.org', 'saza@webrtc.org'], 'audio': ['peah@webrtc.org'], @@ -135,23 +133,19 @@ 'audio_coding': ['alessiob@webrtc.org', 'audio-team@agora.io', 'henrik.lundin@webrtc.org', - 'minyue@webrtc.org', 'peah@webrtc.org', 'saza@webrtc.org'], 'neteq': ['alessiob@webrtc.org', 'audio-team@agora.io', 'henrik.lundin@webrtc.org', - 'minyue@webrtc.org', 'saza@webrtc.org'], 'audio_mixer': ['aleloi@webrtc.org', 'henrik.lundin@webrtc.org', 'peah@webrtc.org', 'saza@webrtc.org'], 'audio_processing': ['alessiob@webrtc.org', - 'aluebs@webrtc.org', 'audio-team@agora.io', 'henrik.lundin@webrtc.org', - 'minyue@webrtc.org', 'peah@webrtc.org', 'saza@webrtc.org'], 'video_coding': ['mflodman@webrtc.org', diff --git a/api/BUILD.gn b/api/BUILD.gn index 608c89dd33..47d9be43f5 100644 --- a/api/BUILD.gn +++ b/api/BUILD.gn @@ -193,6 +193,40 @@ rtc_library("dtls_transport_interface") { absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } +rtc_library("dtmf_sender_interface") { + visibility = [ "*" ] + + sources = [ "dtmf_sender_interface.h" ] + deps = [ + ":media_stream_interface", + "../rtc_base:refcount", + ] +} + +rtc_library("rtp_sender_interface") { + visibility = [ "*" ] + + sources = [ + "rtp_sender_interface.cc", + "rtp_sender_interface.h", + ] + deps = [ + ":dtls_transport_interface", + ":dtmf_sender_interface", + ":frame_transformer_interface", + ":media_stream_interface", + ":rtc_error", + ":rtp_parameters", + ":scoped_refptr", + "../rtc_base:checks", + "../rtc_base:refcount", + "../rtc_base/system:rtc_export", + "crypto:frame_encryptor_interface", + "video_codecs:video_codecs_api", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable" ] +} + rtc_library("libjingle_peerconnection_api") { visibility = [ "*" ] cflags = [] @@ -200,7 +234,6 @@ rtc_library("libjingle_peerconnection_api") { "crypto_params.h", "data_channel_interface.cc", "data_channel_interface.h", - "dtmf_sender_interface.h", # RingRTC change to add ICE forking "ice_gatherer_interface.h", "jsep.cc", @@ -208,21 +241,29 @@ rtc_library("libjingle_peerconnection_api") { "jsep_ice_candidate.cc", "jsep_ice_candidate.h", "jsep_session_description.h", + "legacy_stats_types.cc", + "legacy_stats_types.h", "peer_connection_interface.cc", "peer_connection_interface.h", "rtp_receiver_interface.cc", "rtp_receiver_interface.h", - "rtp_sender_interface.h", "rtp_transceiver_interface.cc", "rtp_transceiver_interface.h", "sctp_transport_interface.cc", "sctp_transport_interface.h", "set_local_description_observer_interface.h", "set_remote_description_observer_interface.h", - "stats_types.cc", - "stats_types.h", "uma_metrics.h", "video_track_source_proxy_factory.h", + + # Remove when downstream has been updated + "dtmf_sender_interface.h", + "rtp_sender_interface.h", + ] + public_deps = [ # no-presubmit-check TODO(webrtc:8603) + # Remove when downstream has been updated + ":dtmf_sender_interface", + ":rtp_sender_interface", ] deps = [ ":array_view", @@ -246,6 +287,7 @@ rtc_library("libjingle_peerconnection_api") { ":rtc_stats_api", ":rtp_packet_info", ":rtp_parameters", + ":rtp_sender_interface", ":rtp_transceiver_direction", ":scoped_refptr", ":sequence_checker", @@ -296,6 +338,7 @@ rtc_library("libjingle_peerconnection_api") { absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", @@ -509,10 +552,7 @@ rtc_source_set("peer_network_dependencies") { rtc_source_set("peer_connection_quality_test_fixture_api") { visibility = [ "*" ] testonly = true - sources = [ - "test/peerconnection_quality_test_fixture.cc", - "test/peerconnection_quality_test_fixture.h", - ] + sources = [ "test/peerconnection_quality_test_fixture.h" ] deps = [ ":array_view", @@ -538,10 +578,12 @@ rtc_source_set("peer_connection_quality_test_fixture_api") { "../rtc_base:stringutils", "../rtc_base:threading", "../test:fileutils", - "../test/pc/e2e:video_dumping", "audio:audio_mixer_api", "rtc_event_log", "task_queue", + "test/pclf:media_configuration", + "test/pclf:media_quality_test_params", + "test/pclf:peer_configurer", "test/video:video_frame_writer", "transport:network_control", "units:time_delta", @@ -549,6 +591,7 @@ rtc_source_set("peer_connection_quality_test_fixture_api") { "video_codecs:video_codecs_api", ] absl_deps = [ + "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", @@ -662,9 +705,9 @@ rtc_library("create_peer_connection_quality_test_frame_generator") { deps = [ ":create_frame_generator", ":frame_generator_api", - ":peer_connection_quality_test_fixture_api", "../rtc_base:checks", "../test:fileutils", + "test/pclf:media_configuration", ] absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } @@ -710,6 +753,8 @@ rtc_source_set("rtc_stats_api") { "../rtc_base/system:rtc_export", "units:timestamp", ] + + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("audio_options_api") { @@ -942,22 +987,50 @@ if (rtc_include_tests) { ] } - rtc_library("videocodec_test_fixture_api") { + rtc_library("videocodec_test_stats_api") { visibility = [ "*" ] testonly = true sources = [ - "test/videocodec_test_fixture.h", "test/videocodec_test_stats.cc", "test/videocodec_test_stats.h", ] deps = [ - "../modules/video_coding:video_codec_interface", + "../api/units:data_rate", + "../api/units:frequency", "../rtc_base:stringutils", "video:video_frame_type", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } + + rtc_library("videocodec_test_fixture_api") { + visibility = [ "*" ] + testonly = true + sources = [ "test/videocodec_test_fixture.h" ] + deps = [ + ":videocodec_test_stats_api", + "../modules/video_coding:video_codec_interface", "video_codecs:video_codecs_api", ] } + rtc_library("video_codec_tester_api") { + visibility = [ "*" ] + testonly = true + sources = [ "test/video_codec_tester.h" ] + deps = [ + ":videocodec_test_stats_api", + "../modules/video_coding/svc:scalability_mode_util", + "video:encoded_image", + "video:resolution", + "video:video_frame", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/functional:any_invocable", + "//third_party/abseil-cpp/absl/types:optional", + ] + } + rtc_library("create_videocodec_test_fixture_api") { visibility = [ "*" ] testonly = true @@ -973,6 +1046,19 @@ if (rtc_include_tests) { ] } + rtc_library("create_video_codec_tester_api") { + visibility = [ "*" ] + testonly = true + sources = [ + "test/create_video_codec_tester.cc", + "test/create_video_codec_tester.h", + ] + deps = [ + ":video_codec_tester_api", + "../modules/video_coding:videocodec_test_impl", + ] + } + rtc_source_set("mock_audio_mixer") { visibility = [ "*" ] testonly = true @@ -1013,6 +1099,7 @@ if (rtc_include_tests) { sources = [ "test/mock_dtmf_sender.h" ] deps = [ + ":dtmf_sender_interface", ":libjingle_peerconnection_api", "../test:test_support", ] @@ -1143,6 +1230,16 @@ if (rtc_include_tests) { ] } + rtc_source_set("mock_session_description_interface") { + visibility = [ "*" ] + testonly = true + sources = [ "test/mock_session_description_interface.h" ] + deps = [ + ":libjingle_peerconnection_api", + "../test:test_support", + ] + } + rtc_source_set("mock_async_dns_resolver") { visibility = [ "*" ] testonly = true @@ -1164,6 +1261,8 @@ if (rtc_include_tests) { deps = [ ":libjingle_peerconnection_api", + ":rtp_sender_interface", + "../api/crypto:frame_decryptor_interface", "../test:test_support", ] } @@ -1308,12 +1407,16 @@ if (rtc_include_tests) { "../rtc_base:rtc_event", "../rtc_base:rtc_task_queue", "../rtc_base:task_queue_for_test", + "../rtc_base/containers:flat_set", "../rtc_base/task_utils:repeating_task", "../system_wrappers:field_trial", + "../test:field_trial", "../test:fileutils", "../test:rtc_expect_death", "../test:test_support", "task_queue:task_queue_default_factory_unittests", + "test/pclf:media_configuration", + "test/video:video_frame_writer", "transport:field_trial_based_config", "units:time_delta", "units:timestamp", @@ -1322,7 +1425,10 @@ if (rtc_include_tests) { "video:rtp_video_frame_assembler_unittests", "video:video_unittests", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] } rtc_library("compile_all_headers") { @@ -1345,6 +1451,7 @@ if (rtc_include_tests) { ":mock_peer_connection_factory_interface", ":mock_peerconnectioninterface", ":mock_rtp", + ":mock_session_description_interface", ":mock_transformable_video_frame", ":mock_video_bitrate_allocator", ":mock_video_bitrate_allocator_factory", @@ -1358,6 +1465,25 @@ if (rtc_include_tests) { } } +rtc_source_set("field_trials_registry") { + visibility = [ "*" ] + sources = [ + "field_trials_registry.cc", + "field_trials_registry.h", + ] + deps = [ + ":field_trials_view", + "../experiments:registered_field_trials", + "../rtc_base:checks", + "../rtc_base/containers:flat_set", + "../rtc_base/system:rtc_export", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings", + ] +} + rtc_source_set("field_trials_view") { visibility = [ "*" ] sources = [ "field_trials_view.h" ] @@ -1378,10 +1504,26 @@ rtc_library("field_trials") { "field_trials.h", ] deps = [ - ":field_trials_view", + ":field_trials_registry", "../rtc_base:checks", "../rtc_base/containers:flat_map", "../system_wrappers:field_trial", ] absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } + +rtc_library("frame_transformer_factory") { + visibility = [ "*" ] + sources = [ + "frame_transformer_factory.cc", + "frame_transformer_factory.h", + ] + deps = [ + ":frame_transformer_interface", + ":scoped_refptr", + "../modules/rtp_rtcp", + "../rtc_base:refcount", + "video:encoded_frame", + "video:video_frame_metadata", + ] +} diff --git a/api/DEPS b/api/DEPS index 66b6ca0a41..bc26b56660 100644 --- a/api/DEPS +++ b/api/DEPS @@ -11,6 +11,7 @@ include_rules = [ "-common_video", "-data", "-examples", + "-experiments", "-g3doc", "-ios", "-infra", @@ -184,7 +185,7 @@ specific_include_rules = { "+rtc_base/ref_count.h", ], - "stats_types\.h": [ + "legacy_stats_types\.h": [ "+rtc_base/ref_count.h", "+rtc_base/thread_checker.h", ], @@ -314,6 +315,10 @@ specific_include_rules = { "+rtc_base/thread.h", ], + "field_trials_registry\.h": [ + "+rtc_base/containers/flat_set.h", + ], + # .cc files in api/ should not be restricted in what they can #include, # so we re-add all the top-level directories here. (That's because .h # files leak their #includes to whoever's #including them, but .cc files @@ -324,6 +329,7 @@ specific_include_rules = { "+common_audio", "+common_video", "+examples", + "+experiments", "+logging", "+media", "+modules", diff --git a/api/audio/echo_canceller3_config.h b/api/audio/echo_canceller3_config.h index 96a204a981..4b1c7fbc47 100644 --- a/api/audio/echo_canceller3_config.h +++ b/api/audio/echo_canceller3_config.h @@ -59,7 +59,7 @@ struct RTC_EXPORT EchoCanceller3Config { }; AlignmentMixing render_alignment_mixing = {false, true, 10000.f, true}; AlignmentMixing capture_alignment_mixing = {false, true, 10000.f, false}; - bool detect_pre_echo = false; + bool detect_pre_echo = true; } delay; struct Filter { diff --git a/api/audio_codecs/BUILD.gn b/api/audio_codecs/BUILD.gn index b4b06fb32b..82ed31a5da 100644 --- a/api/audio_codecs/BUILD.gn +++ b/api/audio_codecs/BUILD.gn @@ -62,7 +62,6 @@ rtc_library("builtin_audio_decoder_factory") { "L16:audio_decoder_L16", "g711:audio_decoder_g711", "g722:audio_decoder_g722", - "isac:audio_decoder_isac", ] defines = [] if (rtc_include_ilbc) { @@ -95,7 +94,6 @@ rtc_library("builtin_audio_encoder_factory") { "L16:audio_encoder_L16", "g711:audio_encoder_g711", "g722:audio_encoder_g722", - "isac:audio_encoder_isac", ] defines = [] if (rtc_include_ilbc) { diff --git a/api/audio_codecs/OWNERS b/api/audio_codecs/OWNERS index 77e9d0022a..77b414abc3 100644 --- a/api/audio_codecs/OWNERS +++ b/api/audio_codecs/OWNERS @@ -1,2 +1,3 @@ -minyue@webrtc.org +alessiob@webrtc.org henrik.lundin@webrtc.org +jakobi@webrtc.org diff --git a/api/audio_codecs/builtin_audio_decoder_factory.cc b/api/audio_codecs/builtin_audio_decoder_factory.cc index 963cfe5cb9..881113d985 100644 --- a/api/audio_codecs/builtin_audio_decoder_factory.cc +++ b/api/audio_codecs/builtin_audio_decoder_factory.cc @@ -20,7 +20,6 @@ #if WEBRTC_USE_BUILTIN_ILBC #include "api/audio_codecs/ilbc/audio_decoder_ilbc.h" // nogncheck #endif -#include "api/audio_codecs/isac/audio_decoder_isac.h" #if WEBRTC_USE_BUILTIN_OPUS #include "api/audio_codecs/opus/audio_decoder_multi_channel_opus.h" #include "api/audio_codecs/opus/audio_decoder_opus.h" // nogncheck @@ -57,7 +56,7 @@ rtc::scoped_refptr CreateBuiltinAudioDecoderFactory() { AudioDecoderOpus, NotAdvertised, #endif - AudioDecoderIsac, AudioDecoderG722, + AudioDecoderG722, #if WEBRTC_USE_BUILTIN_ILBC AudioDecoderIlbc, diff --git a/api/audio_codecs/builtin_audio_encoder_factory.cc b/api/audio_codecs/builtin_audio_encoder_factory.cc index 530d64b2ba..4546a2eaee 100644 --- a/api/audio_codecs/builtin_audio_encoder_factory.cc +++ b/api/audio_codecs/builtin_audio_encoder_factory.cc @@ -20,7 +20,6 @@ #if WEBRTC_USE_BUILTIN_ILBC #include "api/audio_codecs/ilbc/audio_encoder_ilbc.h" // nogncheck #endif -#include "api/audio_codecs/isac/audio_encoder_isac.h" #if WEBRTC_USE_BUILTIN_OPUS #include "api/audio_codecs/opus/audio_encoder_multi_channel_opus.h" #include "api/audio_codecs/opus/audio_encoder_opus.h" // nogncheck @@ -63,7 +62,7 @@ rtc::scoped_refptr CreateBuiltinAudioEncoderFactory() { AudioEncoderOpus, NotAdvertised, #endif - AudioEncoderIsac, AudioEncoderG722, + AudioEncoderG722, #if WEBRTC_USE_BUILTIN_ILBC AudioEncoderIlbc, diff --git a/api/audio_codecs/isac/BUILD.gn b/api/audio_codecs/isac/BUILD.gn deleted file mode 100644 index 96a0ed5013..0000000000 --- a/api/audio_codecs/isac/BUILD.gn +++ /dev/null @@ -1,135 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. - -import("../../../webrtc.gni") -if (is_android) { - import("//build/config/android/config.gni") - import("//build/config/android/rules.gni") -} - -# The targets with _fix and _float suffixes unconditionally use the -# fixed-point and floating-point iSAC implementations, respectively. -# The targets without suffixes pick one of the implementations based -# on cleverly chosen criteria. - -rtc_source_set("audio_encoder_isac") { - visibility = [ "*" ] - poisonous = [ "audio_codecs" ] - public = [ "audio_encoder_isac.h" ] - public_configs = [ ":isac_config" ] - if (current_cpu == "arm") { - deps = [ ":audio_encoder_isac_fix" ] - } else { - deps = [ ":audio_encoder_isac_float" ] - } -} - -rtc_source_set("audio_decoder_isac") { - visibility = [ "*" ] - poisonous = [ "audio_codecs" ] - public = [ "audio_decoder_isac.h" ] - public_configs = [ ":isac_config" ] - if (current_cpu == "arm") { - deps = [ ":audio_decoder_isac_fix" ] - } else { - deps = [ ":audio_decoder_isac_float" ] - } -} - -config("isac_config") { - visibility = [ ":*" ] - if (current_cpu == "arm") { - defines = [ - "WEBRTC_USE_BUILTIN_ISAC_FIX=1", - "WEBRTC_USE_BUILTIN_ISAC_FLOAT=0", - ] - } else { - defines = [ - "WEBRTC_USE_BUILTIN_ISAC_FIX=0", - "WEBRTC_USE_BUILTIN_ISAC_FLOAT=1", - ] - } -} - -rtc_library("audio_encoder_isac_fix") { - visibility = [ "*" ] - poisonous = [ "audio_codecs" ] - sources = [ - "audio_encoder_isac_fix.cc", - "audio_encoder_isac_fix.h", - ] - deps = [ - "..:audio_codecs_api", - "../../../api:field_trials_view", - "../../../modules/audio_coding:isac_fix", - "../../../rtc_base:stringutils", - "../../../rtc_base/system:rtc_export", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] -} - -rtc_library("audio_decoder_isac_fix") { - visibility = [ "*" ] - poisonous = [ "audio_codecs" ] - sources = [ - "audio_decoder_isac_fix.cc", - "audio_decoder_isac_fix.h", - ] - deps = [ - "..:audio_codecs_api", - "../../../api:field_trials_view", - "../../../modules/audio_coding:isac_fix", - "../../../rtc_base/system:rtc_export", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] -} - -rtc_library("audio_encoder_isac_float") { - visibility = [ "*" ] - poisonous = [ "audio_codecs" ] - sources = [ - "audio_encoder_isac_float.cc", - "audio_encoder_isac_float.h", - ] - deps = [ - "..:audio_codecs_api", - "../../../api:field_trials_view", - "../../../modules/audio_coding:isac", - "../../../rtc_base:stringutils", - "../../../rtc_base/system:rtc_export", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] -} - -rtc_library("audio_decoder_isac_float") { - visibility = [ "*" ] - poisonous = [ "audio_codecs" ] - sources = [ - "audio_decoder_isac_float.cc", - "audio_decoder_isac_float.h", - ] - deps = [ - "..:audio_codecs_api", - "../../../api:field_trials_view", - "../../../modules/audio_coding:isac", - "../../../rtc_base/system:rtc_export", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] -} diff --git a/api/audio_codecs/isac/audio_decoder_isac.h b/api/audio_codecs/isac/audio_decoder_isac.h deleted file mode 100644 index f4e9331282..0000000000 --- a/api/audio_codecs/isac/audio_decoder_isac.h +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_H_ -#define API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_H_ - -#if WEBRTC_USE_BUILTIN_ISAC_FIX && !WEBRTC_USE_BUILTIN_ISAC_FLOAT -#include "api/audio_codecs/isac/audio_decoder_isac_fix.h" // nogncheck -#elif WEBRTC_USE_BUILTIN_ISAC_FLOAT && !WEBRTC_USE_BUILTIN_ISAC_FIX -#include "api/audio_codecs/isac/audio_decoder_isac_float.h" // nogncheck -#else -#error "Must choose either fix or float" -#endif - -namespace webrtc { - -#if WEBRTC_USE_BUILTIN_ISAC_FIX -using AudioDecoderIsac = AudioDecoderIsacFix; -#elif WEBRTC_USE_BUILTIN_ISAC_FLOAT -using AudioDecoderIsac = AudioDecoderIsacFloat; -#endif - -} // namespace webrtc - -#endif // API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_H_ diff --git a/api/audio_codecs/isac/audio_decoder_isac_fix.cc b/api/audio_codecs/isac/audio_decoder_isac_fix.cc deleted file mode 100644 index 3dea97c7f8..0000000000 --- a/api/audio_codecs/isac/audio_decoder_isac_fix.cc +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/audio_codecs/isac/audio_decoder_isac_fix.h" - -#include - -#include "absl/strings/match.h" -#include "modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h" - -namespace webrtc { - -absl::optional AudioDecoderIsacFix::SdpToConfig( - const SdpAudioFormat& format) { - if (absl::EqualsIgnoreCase(format.name, "ISAC") && - format.clockrate_hz == 16000 && format.num_channels == 1) { - return Config(); - } - return absl::nullopt; -} - -void AudioDecoderIsacFix::AppendSupportedDecoders( - std::vector* specs) { - // RingRTC change to unused audio codecs - // specs->push_back({{"ISAC", 16000, 1}, {16000, 1, 32000, 10000, 32000}}); -} - -std::unique_ptr AudioDecoderIsacFix::MakeAudioDecoder( - Config config, - absl::optional /*codec_pair_id*/, - const FieldTrialsView* field_trials) { - AudioDecoderIsacFixImpl::Config c; - c.sample_rate_hz = 16000; - return std::make_unique(c); -} - -} // namespace webrtc diff --git a/api/audio_codecs/isac/audio_decoder_isac_fix.h b/api/audio_codecs/isac/audio_decoder_isac_fix.h deleted file mode 100644 index 8f61d9ab0e..0000000000 --- a/api/audio_codecs/isac/audio_decoder_isac_fix.h +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FIX_H_ -#define API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FIX_H_ - -#include -#include - -#include "absl/types/optional.h" -#include "api/audio_codecs/audio_codec_pair_id.h" -#include "api/audio_codecs/audio_decoder.h" -#include "api/audio_codecs/audio_format.h" -#include "api/field_trials_view.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// iSAC decoder API (fixed-point implementation) for use as a template -// parameter to CreateAudioDecoderFactory<...>(). -struct RTC_EXPORT AudioDecoderIsacFix { - struct Config {}; // Empty---no config values needed! - static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); - static void AppendSupportedDecoders(std::vector* specs); - static std::unique_ptr MakeAudioDecoder( - Config config, - absl::optional codec_pair_id = absl::nullopt, - const FieldTrialsView* field_trials = nullptr); -}; - -} // namespace webrtc - -#endif // API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FIX_H_ diff --git a/api/audio_codecs/isac/audio_decoder_isac_float.cc b/api/audio_codecs/isac/audio_decoder_isac_float.cc deleted file mode 100644 index 55a799c755..0000000000 --- a/api/audio_codecs/isac/audio_decoder_isac_float.cc +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/audio_codecs/isac/audio_decoder_isac_float.h" - -#include - -#include "absl/strings/match.h" -#include "modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h" - -namespace webrtc { - -absl::optional -AudioDecoderIsacFloat::SdpToConfig(const SdpAudioFormat& format) { - if (absl::EqualsIgnoreCase(format.name, "ISAC") && - (format.clockrate_hz == 16000 || format.clockrate_hz == 32000) && - format.num_channels == 1) { - Config config; - config.sample_rate_hz = format.clockrate_hz; - if (!config.IsOk()) { - RTC_DCHECK_NOTREACHED(); - return absl::nullopt; - } - return config; - } else { - return absl::nullopt; - } -} - -void AudioDecoderIsacFloat::AppendSupportedDecoders( - std::vector* specs) { - // RingRTC change to unused audio codecs - // specs->push_back({{"ISAC", 16000, 1}, {16000, 1, 32000, 10000, 32000}}); - // specs->push_back({{"ISAC", 32000, 1}, {32000, 1, 56000, 10000, 56000}}); -} - -std::unique_ptr AudioDecoderIsacFloat::MakeAudioDecoder( - Config config, - absl::optional /*codec_pair_id*/, - const FieldTrialsView* field_trials) { - AudioDecoderIsacFloatImpl::Config c; - c.sample_rate_hz = config.sample_rate_hz; - if (!config.IsOk()) { - RTC_DCHECK_NOTREACHED(); - return nullptr; - } - return std::make_unique(c); -} - -} // namespace webrtc diff --git a/api/audio_codecs/isac/audio_decoder_isac_float.h b/api/audio_codecs/isac/audio_decoder_isac_float.h deleted file mode 100644 index 864c6b999f..0000000000 --- a/api/audio_codecs/isac/audio_decoder_isac_float.h +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FLOAT_H_ -#define API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FLOAT_H_ - -#include -#include - -#include "absl/types/optional.h" -#include "api/audio_codecs/audio_codec_pair_id.h" -#include "api/audio_codecs/audio_decoder.h" -#include "api/audio_codecs/audio_format.h" -#include "api/field_trials_view.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// iSAC decoder API (floating-point implementation) for use as a template -// parameter to CreateAudioDecoderFactory<...>(). -struct RTC_EXPORT AudioDecoderIsacFloat { - struct Config { - bool IsOk() const { - return sample_rate_hz == 16000 || sample_rate_hz == 32000; - } - int sample_rate_hz = 16000; - }; - static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); - static void AppendSupportedDecoders(std::vector* specs); - static std::unique_ptr MakeAudioDecoder( - Config config, - absl::optional codec_pair_id = absl::nullopt, - const FieldTrialsView* field_trials = nullptr); -}; - -} // namespace webrtc - -#endif // API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FLOAT_H_ diff --git a/api/audio_codecs/isac/audio_encoder_isac.h b/api/audio_codecs/isac/audio_encoder_isac.h deleted file mode 100644 index 3cb0a1f053..0000000000 --- a/api/audio_codecs/isac/audio_encoder_isac.h +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_H_ -#define API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_H_ - -#if WEBRTC_USE_BUILTIN_ISAC_FIX && !WEBRTC_USE_BUILTIN_ISAC_FLOAT -#include "api/audio_codecs/isac/audio_encoder_isac_fix.h" // nogncheck -#elif WEBRTC_USE_BUILTIN_ISAC_FLOAT && !WEBRTC_USE_BUILTIN_ISAC_FIX -#include "api/audio_codecs/isac/audio_encoder_isac_float.h" // nogncheck -#else -#error "Must choose either fix or float" -#endif - -namespace webrtc { - -#if WEBRTC_USE_BUILTIN_ISAC_FIX -using AudioEncoderIsac = AudioEncoderIsacFix; -#elif WEBRTC_USE_BUILTIN_ISAC_FLOAT -using AudioEncoderIsac = AudioEncoderIsacFloat; -#endif - -} // namespace webrtc - -#endif // API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_H_ diff --git a/api/audio_codecs/isac/audio_encoder_isac_fix.cc b/api/audio_codecs/isac/audio_encoder_isac_fix.cc deleted file mode 100644 index bd0e6efe41..0000000000 --- a/api/audio_codecs/isac/audio_encoder_isac_fix.cc +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/audio_codecs/isac/audio_encoder_isac_fix.h" - -#include - -#include "absl/strings/match.h" -#include "modules/audio_coding/codecs/isac/fix/include/audio_encoder_isacfix.h" -#include "rtc_base/string_to_number.h" - -namespace webrtc { - -absl::optional AudioEncoderIsacFix::SdpToConfig( - const SdpAudioFormat& format) { - if (absl::EqualsIgnoreCase(format.name, "ISAC") && - format.clockrate_hz == 16000 && format.num_channels == 1) { - Config config; - const auto ptime_iter = format.parameters.find("ptime"); - if (ptime_iter != format.parameters.end()) { - const auto ptime = rtc::StringToNumber(ptime_iter->second); - if (ptime && *ptime >= 60) { - config.frame_size_ms = 60; - } - } - if (!config.IsOk()) { - RTC_DCHECK_NOTREACHED(); - return absl::nullopt; - } - return config; - } else { - return absl::nullopt; - } -} - -void AudioEncoderIsacFix::AppendSupportedEncoders( - std::vector* specs) { - // RingRTC change to unused audio codecs - // const SdpAudioFormat fmt = {"ISAC", 16000, 1}; - // const AudioCodecInfo info = QueryAudioEncoder(*SdpToConfig(fmt)); - // specs->push_back({fmt, info}); -} - -AudioCodecInfo AudioEncoderIsacFix::QueryAudioEncoder( - AudioEncoderIsacFix::Config config) { - RTC_DCHECK(config.IsOk()); - return {16000, 1, 32000, 10000, 32000}; -} - -std::unique_ptr AudioEncoderIsacFix::MakeAudioEncoder( - AudioEncoderIsacFix::Config config, - int payload_type, - absl::optional /*codec_pair_id*/, - const FieldTrialsView* field_trials) { - AudioEncoderIsacFixImpl::Config c; - c.frame_size_ms = config.frame_size_ms; - c.bit_rate = config.bit_rate; - c.payload_type = payload_type; - if (!config.IsOk()) { - RTC_DCHECK_NOTREACHED(); - return nullptr; - } - return std::make_unique(c); -} - -} // namespace webrtc diff --git a/api/audio_codecs/isac/audio_encoder_isac_fix.h b/api/audio_codecs/isac/audio_encoder_isac_fix.h deleted file mode 100644 index de0f1d1308..0000000000 --- a/api/audio_codecs/isac/audio_encoder_isac_fix.h +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FIX_H_ -#define API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FIX_H_ - -#include -#include - -#include "absl/types/optional.h" -#include "api/audio_codecs/audio_codec_pair_id.h" -#include "api/audio_codecs/audio_encoder.h" -#include "api/audio_codecs/audio_format.h" -#include "api/field_trials_view.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// iSAC encoder API (fixed-point implementation) for use as a template -// parameter to CreateAudioEncoderFactory<...>(). -struct RTC_EXPORT AudioEncoderIsacFix { - struct Config { - bool IsOk() const { - if (frame_size_ms != 30 && frame_size_ms != 60) { - return false; - } - if (bit_rate < 10000 || bit_rate > 32000) { - return false; - } - return true; - } - int frame_size_ms = 30; - int bit_rate = 32000; // Limit on short-term average bit rate, in bits/s. - }; - static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); - static void AppendSupportedEncoders(std::vector* specs); - static AudioCodecInfo QueryAudioEncoder(Config config); - static std::unique_ptr MakeAudioEncoder( - Config config, - int payload_type, - absl::optional codec_pair_id = absl::nullopt, - const FieldTrialsView* field_trials = nullptr); -}; - -} // namespace webrtc - -#endif // API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FIX_H_ diff --git a/api/audio_codecs/isac/audio_encoder_isac_float.cc b/api/audio_codecs/isac/audio_encoder_isac_float.cc deleted file mode 100644 index ab09ac3ebf..0000000000 --- a/api/audio_codecs/isac/audio_encoder_isac_float.cc +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/audio_codecs/isac/audio_encoder_isac_float.h" - -#include - -#include "absl/strings/match.h" -#include "modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h" -#include "rtc_base/string_to_number.h" - -namespace webrtc { - -absl::optional -AudioEncoderIsacFloat::SdpToConfig(const SdpAudioFormat& format) { - if (absl::EqualsIgnoreCase(format.name, "ISAC") && - (format.clockrate_hz == 16000 || format.clockrate_hz == 32000) && - format.num_channels == 1) { - Config config; - config.sample_rate_hz = format.clockrate_hz; - config.bit_rate = format.clockrate_hz == 16000 ? 32000 : 56000; - if (config.sample_rate_hz == 16000) { - // For sample rate 16 kHz, optionally use 60 ms frames, instead of the - // default 30 ms. - const auto ptime_iter = format.parameters.find("ptime"); - if (ptime_iter != format.parameters.end()) { - const auto ptime = rtc::StringToNumber(ptime_iter->second); - if (ptime && *ptime >= 60) { - config.frame_size_ms = 60; - } - } - } - if (!config.IsOk()) { - RTC_DCHECK_NOTREACHED(); - return absl::nullopt; - } - return config; - } else { - return absl::nullopt; - } -} - -void AudioEncoderIsacFloat::AppendSupportedEncoders( - std::vector* specs) { - // RingRTC change to unused audio codecs - // for (int sample_rate_hz : {16000, 32000}) { - // const SdpAudioFormat fmt = {"ISAC", sample_rate_hz, 1}; - // const AudioCodecInfo info = QueryAudioEncoder(*SdpToConfig(fmt)); - // specs->push_back({fmt, info}); - // } -} - -AudioCodecInfo AudioEncoderIsacFloat::QueryAudioEncoder( - const AudioEncoderIsacFloat::Config& config) { - RTC_DCHECK(config.IsOk()); - constexpr int min_bitrate = 10000; - const int max_bitrate = config.sample_rate_hz == 16000 ? 32000 : 56000; - const int default_bitrate = max_bitrate; - return {config.sample_rate_hz, 1, default_bitrate, min_bitrate, max_bitrate}; -} - -std::unique_ptr AudioEncoderIsacFloat::MakeAudioEncoder( - const AudioEncoderIsacFloat::Config& config, - int payload_type, - absl::optional /*codec_pair_id*/, - const FieldTrialsView* field_trials) { - AudioEncoderIsacFloatImpl::Config c; - c.payload_type = payload_type; - c.sample_rate_hz = config.sample_rate_hz; - c.frame_size_ms = config.frame_size_ms; - c.bit_rate = config.bit_rate; - if (!config.IsOk()) { - RTC_DCHECK_NOTREACHED(); - return nullptr; - } - return std::make_unique(c); -} - -} // namespace webrtc diff --git a/api/audio_codecs/isac/audio_encoder_isac_float.h b/api/audio_codecs/isac/audio_encoder_isac_float.h deleted file mode 100644 index d031d76db1..0000000000 --- a/api/audio_codecs/isac/audio_encoder_isac_float.h +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FLOAT_H_ -#define API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FLOAT_H_ - -#include -#include - -#include "absl/types/optional.h" -#include "api/audio_codecs/audio_codec_pair_id.h" -#include "api/audio_codecs/audio_encoder.h" -#include "api/audio_codecs/audio_format.h" -#include "api/field_trials_view.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// iSAC encoder API (floating-point implementation) for use as a template -// parameter to CreateAudioEncoderFactory<...>(). -struct RTC_EXPORT AudioEncoderIsacFloat { - struct Config { - bool IsOk() const { - switch (sample_rate_hz) { - case 16000: - if (frame_size_ms != 30 && frame_size_ms != 60) { - return false; - } - if (bit_rate < 10000 || bit_rate > 32000) { - return false; - } - return true; - case 32000: - if (frame_size_ms != 30) { - return false; - } - if (bit_rate < 10000 || bit_rate > 56000) { - return false; - } - return true; - default: - return false; - } - } - int sample_rate_hz = 16000; - int frame_size_ms = 30; - int bit_rate = 32000; // Limit on short-term average bit rate, in bits/s. - }; - static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); - static void AppendSupportedEncoders(std::vector* specs); - static AudioCodecInfo QueryAudioEncoder(const Config& config); - static std::unique_ptr MakeAudioEncoder( - const Config& config, - int payload_type, - absl::optional codec_pair_id = absl::nullopt, - const FieldTrialsView* field_trials = nullptr); -}; - -} // namespace webrtc - -#endif // API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FLOAT_H_ diff --git a/api/audio_codecs/test/BUILD.gn b/api/audio_codecs/test/BUILD.gn index 12df649feb..89f5fef1ea 100644 --- a/api/audio_codecs/test/BUILD.gn +++ b/api/audio_codecs/test/BUILD.gn @@ -32,10 +32,6 @@ if (rtc_include_tests) { "../g722:audio_encoder_g722", "../ilbc:audio_decoder_ilbc", "../ilbc:audio_encoder_ilbc", - "../isac:audio_decoder_isac_fix", - "../isac:audio_decoder_isac_float", - "../isac:audio_encoder_isac_fix", - "../isac:audio_encoder_isac_float", "../opus:audio_decoder_opus", "../opus:audio_encoder_opus", ] diff --git a/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc b/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc index 3662f3b76d..0b18cf934a 100644 --- a/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc +++ b/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc @@ -16,8 +16,6 @@ #include "api/audio_codecs/g711/audio_decoder_g711.h" #include "api/audio_codecs/g722/audio_decoder_g722.h" #include "api/audio_codecs/ilbc/audio_decoder_ilbc.h" -#include "api/audio_codecs/isac/audio_decoder_isac_fix.h" -#include "api/audio_codecs/isac/audio_decoder_isac_float.h" #include "api/audio_codecs/opus/audio_decoder_opus.h" #include "test/gmock.h" #include "test/gtest.h" @@ -182,41 +180,6 @@ TEST(AudioDecoderFactoryTemplateTest, Ilbc) { EXPECT_EQ(8000, dec->SampleRateHz()); } -TEST(AudioDecoderFactoryTemplateTest, IsacFix) { - auto factory = CreateAudioDecoderFactory(); - EXPECT_THAT(factory->GetSupportedDecoders(), - ::testing::ElementsAre(AudioCodecSpec{ - {"ISAC", 16000, 1}, {16000, 1, 32000, 10000, 32000}})); - EXPECT_FALSE(factory->IsSupportedDecoder({"isac", 16000, 2})); - EXPECT_TRUE(factory->IsSupportedDecoder({"isac", 16000, 1})); - EXPECT_FALSE(factory->IsSupportedDecoder({"isac", 32000, 1})); - EXPECT_EQ(nullptr, - factory->MakeAudioDecoder({"isac", 8000, 1}, absl::nullopt)); - auto dec = factory->MakeAudioDecoder({"isac", 16000, 1}, absl::nullopt); - ASSERT_NE(nullptr, dec); - EXPECT_EQ(16000, dec->SampleRateHz()); -} - -TEST(AudioDecoderFactoryTemplateTest, IsacFloat) { - auto factory = CreateAudioDecoderFactory(); - EXPECT_THAT( - factory->GetSupportedDecoders(), - ::testing::ElementsAre( - AudioCodecSpec{{"ISAC", 16000, 1}, {16000, 1, 32000, 10000, 32000}}, - AudioCodecSpec{{"ISAC", 32000, 1}, {32000, 1, 56000, 10000, 56000}})); - EXPECT_FALSE(factory->IsSupportedDecoder({"isac", 16000, 2})); - EXPECT_TRUE(factory->IsSupportedDecoder({"isac", 16000, 1})); - EXPECT_TRUE(factory->IsSupportedDecoder({"isac", 32000, 1})); - EXPECT_EQ(nullptr, - factory->MakeAudioDecoder({"isac", 8000, 1}, absl::nullopt)); - auto dec1 = factory->MakeAudioDecoder({"isac", 16000, 1}, absl::nullopt); - ASSERT_NE(nullptr, dec1); - EXPECT_EQ(16000, dec1->SampleRateHz()); - auto dec2 = factory->MakeAudioDecoder({"isac", 32000, 1}, absl::nullopt); - ASSERT_NE(nullptr, dec2); - EXPECT_EQ(32000, dec2->SampleRateHz()); -} - TEST(AudioDecoderFactoryTemplateTest, L16) { auto factory = CreateAudioDecoderFactory(); EXPECT_THAT( diff --git a/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc b/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc index 67b6883583..dbba387724 100644 --- a/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc +++ b/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc @@ -16,8 +16,6 @@ #include "api/audio_codecs/g711/audio_encoder_g711.h" #include "api/audio_codecs/g722/audio_encoder_g722.h" #include "api/audio_codecs/ilbc/audio_encoder_ilbc.h" -#include "api/audio_codecs/isac/audio_encoder_isac_fix.h" -#include "api/audio_codecs/isac/audio_encoder_isac_float.h" #include "api/audio_codecs/opus/audio_encoder_opus.h" #include "test/gmock.h" #include "test/gtest.h" @@ -180,49 +178,6 @@ TEST(AudioEncoderFactoryTemplateTest, Ilbc) { EXPECT_EQ(8000, enc->SampleRateHz()); } -TEST(AudioEncoderFactoryTemplateTest, IsacFix) { - auto factory = CreateAudioEncoderFactory(); - EXPECT_THAT(factory->GetSupportedEncoders(), - ::testing::ElementsAre(AudioCodecSpec{ - {"ISAC", 16000, 1}, {16000, 1, 32000, 10000, 32000}})); - EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"isac", 16000, 2})); - EXPECT_EQ(AudioCodecInfo(16000, 1, 32000, 10000, 32000), - factory->QueryAudioEncoder({"isac", 16000, 1})); - EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"isac", 32000, 1})); - EXPECT_EQ(nullptr, - factory->MakeAudioEncoder(17, {"isac", 8000, 1}, absl::nullopt)); - auto enc1 = factory->MakeAudioEncoder(17, {"isac", 16000, 1}, absl::nullopt); - ASSERT_NE(nullptr, enc1); - EXPECT_EQ(16000, enc1->SampleRateHz()); - EXPECT_EQ(3u, enc1->Num10MsFramesInNextPacket()); - auto enc2 = factory->MakeAudioEncoder( - 17, {"isac", 16000, 1, {{"ptime", "60"}}}, absl::nullopt); - ASSERT_NE(nullptr, enc2); - EXPECT_EQ(6u, enc2->Num10MsFramesInNextPacket()); -} - -TEST(AudioEncoderFactoryTemplateTest, IsacFloat) { - auto factory = CreateAudioEncoderFactory(); - EXPECT_THAT( - factory->GetSupportedEncoders(), - ::testing::ElementsAre( - AudioCodecSpec{{"ISAC", 16000, 1}, {16000, 1, 32000, 10000, 32000}}, - AudioCodecSpec{{"ISAC", 32000, 1}, {32000, 1, 56000, 10000, 56000}})); - EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"isac", 16000, 2})); - EXPECT_EQ(AudioCodecInfo(16000, 1, 32000, 10000, 32000), - factory->QueryAudioEncoder({"isac", 16000, 1})); - EXPECT_EQ(AudioCodecInfo(32000, 1, 56000, 10000, 56000), - factory->QueryAudioEncoder({"isac", 32000, 1})); - EXPECT_EQ(nullptr, - factory->MakeAudioEncoder(17, {"isac", 8000, 1}, absl::nullopt)); - auto enc1 = factory->MakeAudioEncoder(17, {"isac", 16000, 1}, absl::nullopt); - ASSERT_NE(nullptr, enc1); - EXPECT_EQ(16000, enc1->SampleRateHz()); - auto enc2 = factory->MakeAudioEncoder(17, {"isac", 32000, 1}, absl::nullopt); - ASSERT_NE(nullptr, enc2); - EXPECT_EQ(32000, enc2->SampleRateHz()); -} - TEST(AudioEncoderFactoryTemplateTest, L16) { auto factory = CreateAudioEncoderFactory(); EXPECT_THAT( diff --git a/api/candidate.cc b/api/candidate.cc index 4d17256c2e..a14dda350c 100644 --- a/api/candidate.cc +++ b/api/candidate.cc @@ -22,6 +22,7 @@ Candidate::Candidate() component_(0), priority_(0), network_type_(rtc::ADAPTER_TYPE_UNKNOWN), + underlying_type_for_vpn_(rtc::ADAPTER_TYPE_UNKNOWN), generation_(0), network_id_(0), network_cost_(0) {} @@ -46,6 +47,7 @@ Candidate::Candidate(int component, password_(password), type_(type), network_type_(rtc::ADAPTER_TYPE_UNKNOWN), + underlying_type_for_vpn_(rtc::ADAPTER_TYPE_UNKNOWN), generation_(generation), foundation_(foundation), network_id_(network_id), diff --git a/api/candidate.h b/api/candidate.h index b8aaebc14a..281f2f01a5 100644 --- a/api/candidate.h +++ b/api/candidate.h @@ -25,6 +25,10 @@ namespace cricket { +// TURN servers are limited to 32 in accordance with +// https://w3c.github.io/webrtc-pc/#dom-rtcconfiguration-iceservers +static constexpr size_t kMaxTurnServers = 32; + // Candidate for ICE based connection discovery. // TODO(phoglund): remove things in here that are not needed in the public API. diff --git a/api/field_trials.cc b/api/field_trials.cc index d6b53acafb..4bd11271dc 100644 --- a/api/field_trials.cc +++ b/api/field_trials.cc @@ -90,7 +90,7 @@ FieldTrials::~FieldTrials() { } } -std::string FieldTrials::Lookup(absl::string_view key) const { +std::string FieldTrials::GetValue(absl::string_view key) const { auto it = key_value_map_.find(std::string(key)); if (it != key_value_map_.end()) return it->second; diff --git a/api/field_trials.h b/api/field_trials.h index 0bfa4b7871..bf7a7cc625 100644 --- a/api/field_trials.h +++ b/api/field_trials.h @@ -15,7 +15,7 @@ #include #include "absl/strings/string_view.h" -#include "api/field_trials_view.h" +#include "api/field_trials_registry.h" #include "rtc_base/containers/flat_map.h" namespace webrtc { @@ -34,7 +34,7 @@ namespace webrtc { // NOTE: Creating multiple FieldTrials-object is currently prohibited // until we remove the global string (TODO(bugs.webrtc.org/10335)) // (unless using CreateNoGlobal): -class FieldTrials : public FieldTrialsView { +class FieldTrials : public FieldTrialsRegistry { public: explicit FieldTrials(const std::string& s); ~FieldTrials(); @@ -43,10 +43,11 @@ class FieldTrials : public FieldTrialsView { // global variable (i.e can not be used for all parts of webrtc). static std::unique_ptr CreateNoGlobal(const std::string& s); - std::string Lookup(absl::string_view key) const override; - private: explicit FieldTrials(const std::string& s, bool); + + std::string GetValue(absl::string_view key) const override; + const bool uses_global_; const std::string field_trial_string_; const char* const previous_field_trial_string_; diff --git a/api/field_trials_registry.cc b/api/field_trials_registry.cc new file mode 100644 index 0000000000..f97e8a85a9 --- /dev/null +++ b/api/field_trials_registry.cc @@ -0,0 +1,31 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/field_trials_registry.h" + +#include + +#include "absl/algorithm/container.h" +#include "absl/strings/string_view.h" +#include "experiments/registered_field_trials.h" +#include "rtc_base/checks.h" +#include "rtc_base/containers/flat_set.h" + +namespace webrtc { + +std::string FieldTrialsRegistry::Lookup(absl::string_view key) const { +#if WEBRTC_STRICT_FIELD_TRIALS + RTC_DCHECK(absl::c_linear_search(kRegisteredFieldTrials, key) || + test_keys_.contains(key)) + << key << " is not registered."; +#endif + return GetValue(key); +} + +} // namespace webrtc diff --git a/api/field_trials_registry.h b/api/field_trials_registry.h new file mode 100644 index 0000000000..dc7e8445b1 --- /dev/null +++ b/api/field_trials_registry.h @@ -0,0 +1,54 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_FIELD_TRIALS_REGISTRY_H_ +#define API_FIELD_TRIALS_REGISTRY_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/field_trials_view.h" +#include "rtc_base/containers/flat_set.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Abstract base class for a field trial registry that verifies that any looked +// up key has been pre-registered in accordance with `g3doc/field-trials.md`. +class RTC_EXPORT FieldTrialsRegistry : public FieldTrialsView { + public: + FieldTrialsRegistry() = default; + + FieldTrialsRegistry(const FieldTrialsRegistry&) = default; + FieldTrialsRegistry& operator=(const FieldTrialsRegistry&) = default; + + ~FieldTrialsRegistry() override = default; + + // Verifies that `key` is a registered field trial and then returns the + // configured value for `key` or an empty string if the field trial isn't + // configured. + std::string Lookup(absl::string_view key) const override; + + // Register additional `keys` for testing. This should only be used for + // imaginary keys that are never used outside test code. + void RegisterKeysForTesting(flat_set keys) { + test_keys_ = std::move(keys); + } + + private: + virtual std::string GetValue(absl::string_view key) const = 0; + + // Imaginary keys only used for testing. + flat_set test_keys_; +}; + +} // namespace webrtc + +#endif // API_FIELD_TRIALS_REGISTRY_H_ diff --git a/api/field_trials_unittest.cc b/api/field_trials_unittest.cc index dc8289881b..804b52a818 100644 --- a/api/field_trials_unittest.cc +++ b/api/field_trials_unittest.cc @@ -11,9 +11,13 @@ #include "api/field_trials.h" #include +#include +#include "absl/strings/string_view.h" #include "api/transport/field_trial_based_config.h" +#include "rtc_base/containers/flat_set.h" #include "system_wrappers/include/field_trial.h" +#include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" @@ -25,49 +29,50 @@ namespace webrtc { namespace { using ::testing::NotNull; -using ::webrtc::field_trial::InitFieldTrialsFromString; +using ::webrtc::field_trial::FieldTrialsAllowedInScopeForTesting; +using ::webrtc::test::ScopedFieldTrials; -class FieldTrialsTest : public testing::Test { - protected: - FieldTrialsTest() { - // Make sure global state is consistent between test runs. - InitFieldTrialsFromString(nullptr); - } -}; - -TEST_F(FieldTrialsTest, EmptyStringHasNoEffect) { +TEST(FieldTrialsTest, EmptyStringHasNoEffect) { + FieldTrialsAllowedInScopeForTesting k({"MyCoolTrial"}); FieldTrials f(""); + f.RegisterKeysForTesting({"MyCoolTrial"}); + EXPECT_FALSE(f.IsEnabled("MyCoolTrial")); EXPECT_FALSE(f.IsDisabled("MyCoolTrial")); } -TEST_F(FieldTrialsTest, EnabledDisabledMustBeFirstInValue) { +TEST(FieldTrialsTest, EnabledDisabledMustBeFirstInValue) { FieldTrials f( "MyCoolTrial/EnabledFoo/" "MyUncoolTrial/DisabledBar/" "AnotherTrial/BazEnabled/"); + f.RegisterKeysForTesting({"MyCoolTrial", "MyUncoolTrial", "AnotherTrial"}); + EXPECT_TRUE(f.IsEnabled("MyCoolTrial")); EXPECT_TRUE(f.IsDisabled("MyUncoolTrial")); EXPECT_FALSE(f.IsEnabled("AnotherTrial")); } -TEST_F(FieldTrialsTest, FieldTrialsDoesNotReadGlobalString) { - static constexpr char s[] = "MyCoolTrial/Enabled/MyUncoolTrial/Disabled/"; - InitFieldTrialsFromString(s); +TEST(FieldTrialsTest, FieldTrialsDoesNotReadGlobalString) { + FieldTrialsAllowedInScopeForTesting k({"MyCoolTrial", "MyUncoolTrial"}); + ScopedFieldTrials g("MyCoolTrial/Enabled/MyUncoolTrial/Disabled/"); FieldTrials f(""); + f.RegisterKeysForTesting({"MyCoolTrial", "MyUncoolTrial"}); + EXPECT_FALSE(f.IsEnabled("MyCoolTrial")); EXPECT_FALSE(f.IsDisabled("MyUncoolTrial")); } -TEST_F(FieldTrialsTest, FieldTrialsWritesGlobalString) { +TEST(FieldTrialsTest, FieldTrialsWritesGlobalString) { + FieldTrialsAllowedInScopeForTesting k({"MyCoolTrial", "MyUncoolTrial"}); FieldTrials f("MyCoolTrial/Enabled/MyUncoolTrial/Disabled/"); EXPECT_TRUE(webrtc::field_trial::IsEnabled("MyCoolTrial")); EXPECT_TRUE(webrtc::field_trial::IsDisabled("MyUncoolTrial")); } -TEST_F(FieldTrialsTest, FieldTrialsRestoresGlobalStringAfterDestruction) { +TEST(FieldTrialsTest, FieldTrialsRestoresGlobalStringAfterDestruction) { static constexpr char s[] = "SomeString/Enabled/"; - InitFieldTrialsFromString(s); + ScopedFieldTrials g(s); { FieldTrials f("SomeOtherString/Enabled/"); EXPECT_STREQ(webrtc::field_trial::GetFieldTrialString(), @@ -77,33 +82,38 @@ TEST_F(FieldTrialsTest, FieldTrialsRestoresGlobalStringAfterDestruction) { } #if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST_F(FieldTrialsTest, FieldTrialsDoesNotSupportSimultaneousInstances) { +TEST(FieldTrialsTest, FieldTrialsDoesNotSupportSimultaneousInstances) { FieldTrials f("SomeString/Enabled/"); RTC_EXPECT_DEATH(FieldTrials("SomeOtherString/Enabled/").Lookup("Whatever"), "Only one instance"); } #endif // GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST_F(FieldTrialsTest, FieldTrialsSupportsSeparateInstances) { +TEST(FieldTrialsTest, FieldTrialsSupportsSeparateInstances) { { FieldTrials f("SomeString/Enabled/"); } { FieldTrials f("SomeOtherString/Enabled/"); } } -TEST_F(FieldTrialsTest, NonGlobalFieldTrialsInstanceDoesNotModifyGlobalString) { +TEST(FieldTrialsTest, NonGlobalFieldTrialsInstanceDoesNotModifyGlobalString) { + FieldTrialsAllowedInScopeForTesting k({"SomeString"}); std::unique_ptr f = FieldTrials::CreateNoGlobal("SomeString/Enabled/"); ASSERT_THAT(f, NotNull()); + f->RegisterKeysForTesting({"SomeString"}); + EXPECT_TRUE(f->IsEnabled("SomeString")); EXPECT_FALSE(webrtc::field_trial::IsEnabled("SomeString")); } -TEST_F(FieldTrialsTest, NonGlobalFieldTrialsSupportSimultaneousInstances) { +TEST(FieldTrialsTest, NonGlobalFieldTrialsSupportSimultaneousInstances) { std::unique_ptr f1 = FieldTrials::CreateNoGlobal("SomeString/Enabled/"); std::unique_ptr f2 = FieldTrials::CreateNoGlobal("SomeOtherString/Enabled/"); ASSERT_THAT(f1, NotNull()); ASSERT_THAT(f2, NotNull()); + f1->RegisterKeysForTesting({"SomeString", "SomeOtherString"}); + f2->RegisterKeysForTesting({"SomeString", "SomeOtherString"}); EXPECT_TRUE(f1->IsEnabled("SomeString")); EXPECT_FALSE(f1->IsEnabled("SomeOtherString")); @@ -112,11 +122,14 @@ TEST_F(FieldTrialsTest, NonGlobalFieldTrialsSupportSimultaneousInstances) { EXPECT_TRUE(f2->IsEnabled("SomeOtherString")); } -TEST_F(FieldTrialsTest, GlobalAndNonGlobalFieldTrialsAreDisjoint) { +TEST(FieldTrialsTest, GlobalAndNonGlobalFieldTrialsAreDisjoint) { + FieldTrialsAllowedInScopeForTesting k({"SomeString", "SomeOtherString"}); FieldTrials f1("SomeString/Enabled/"); std::unique_ptr f2 = FieldTrials::CreateNoGlobal("SomeOtherString/Enabled/"); ASSERT_THAT(f2, NotNull()); + f1.RegisterKeysForTesting({"SomeString", "SomeOtherString"}); + f2->RegisterKeysForTesting({"SomeString", "SomeOtherString"}); EXPECT_TRUE(f1.IsEnabled("SomeString")); EXPECT_FALSE(f1.IsEnabled("SomeOtherString")); @@ -125,10 +138,12 @@ TEST_F(FieldTrialsTest, GlobalAndNonGlobalFieldTrialsAreDisjoint) { EXPECT_TRUE(f2->IsEnabled("SomeOtherString")); } -TEST_F(FieldTrialsTest, FieldTrialBasedConfigReadsGlobalString) { - static constexpr char s[] = "MyCoolTrial/Enabled/MyUncoolTrial/Disabled/"; - InitFieldTrialsFromString(s); +TEST(FieldTrialsTest, FieldTrialBasedConfigReadsGlobalString) { + FieldTrialsAllowedInScopeForTesting k({"MyCoolTrial", "MyUncoolTrial"}); + ScopedFieldTrials g("MyCoolTrial/Enabled/MyUncoolTrial/Disabled/"); FieldTrialBasedConfig f; + f.RegisterKeysForTesting({"MyCoolTrial", "MyUncoolTrial"}); + EXPECT_TRUE(f.IsEnabled("MyCoolTrial")); EXPECT_TRUE(f.IsDisabled("MyUncoolTrial")); } diff --git a/api/field_trials_view.h b/api/field_trials_view.h index 299205d1d3..45e6f7899b 100644 --- a/api/field_trials_view.h +++ b/api/field_trials_view.h @@ -17,15 +17,18 @@ namespace webrtc { -// An interface that provides a key-value mapping for configuring internal -// details of WebRTC. Note that there's no guarantess that the meaning of a -// particular key value mapping will be preserved over time and no announcements -// will be made if they are changed. It's up to the library user to ensure that -// the behavior does not break. +// An interface that provides the means to access field trials. +// +// Note that there are no guarantess that the meaning of a particular key-value +// mapping will be preserved over time and no announcements will be made if they +// are changed. It's up to the library user to ensure that the behavior does not +// break. class RTC_EXPORT FieldTrialsView { public: virtual ~FieldTrialsView() = default; - // The configured value for the given key. Defaults to an empty string. + + // Returns the configured value for `key` or an empty string if the field + // trial isn't configured. virtual std::string Lookup(absl::string_view key) const = 0; bool IsEnabled(absl::string_view key) const { diff --git a/api/frame_transformer_factory.cc b/api/frame_transformer_factory.cc new file mode 100644 index 0000000000..af08372e37 --- /dev/null +++ b/api/frame_transformer_factory.cc @@ -0,0 +1,33 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/frame_transformer_factory.h" + +#include "modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h" + +namespace webrtc { + +std::unique_ptr CreateVideoSenderFrame() { + RTC_CHECK_NOTREACHED(); + return nullptr; +} + +std::unique_ptr CreateVideoReceiverFrame() { + RTC_CHECK_NOTREACHED(); + return nullptr; +} + +std::unique_ptr CloneVideoFrame( + TransformableVideoFrameInterface* original) { + // At the moment, only making sender frames from receiver frames is supported. + return CloneSenderVideoFrame(original); +} + +} // namespace webrtc diff --git a/api/frame_transformer_factory.h b/api/frame_transformer_factory.h new file mode 100644 index 0000000000..8ba9c292d5 --- /dev/null +++ b/api/frame_transformer_factory.h @@ -0,0 +1,39 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_FRAME_TRANSFORMER_FACTORY_H_ +#define API_FRAME_TRANSFORMER_FACTORY_H_ + +#include +#include + +#include "api/frame_transformer_interface.h" +#include "api/scoped_refptr.h" +#include "api/video/encoded_frame.h" +#include "api/video/video_frame_metadata.h" + +// This file contains EXPERIMENTAL functions to create video frames from +// either an old video frame or directly from parameters. +// These functions will be used in Chrome functionality to manipulate +// encoded frames from Javascript. +namespace webrtc { + +// TODO(bugs.webrtc.org/14708): Add the required parameters to these APIs. +std::unique_ptr CreateVideoSenderFrame(); +// TODO(bugs.webrtc.org/14708): Consider whether Receiver frames ever make sense +// to create. +std::unique_ptr CreateVideoReceiverFrame(); +// Creates a new frame with the same metadata as the original. +// The original can be a sender or receiver frame. +RTC_EXPORT std::unique_ptr CloneVideoFrame( + TransformableVideoFrameInterface* original); +} // namespace webrtc + +#endif // API_FRAME_TRANSFORMER_FACTORY_H_ diff --git a/api/stats_types.cc b/api/legacy_stats_types.cc similarity index 99% rename from api/stats_types.cc rename to api/legacy_stats_types.cc index 61a0b8499d..e3b2144edd 100644 --- a/api/stats_types.cc +++ b/api/legacy_stats_types.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "api/stats_types.h" +#include "api/legacy_stats_types.h" #include diff --git a/api/stats_types.h b/api/legacy_stats_types.h similarity index 99% rename from api/stats_types.h rename to api/legacy_stats_types.h index d75da46439..a62e014834 100644 --- a/api/stats_types.h +++ b/api/legacy_stats_types.h @@ -11,8 +11,8 @@ // This file contains structures used for retrieving statistics from an ongoing // libjingle session. -#ifndef API_STATS_TYPES_H_ -#define API_STATS_TYPES_H_ +#ifndef API_LEGACY_STATS_TYPES_H_ +#define API_LEGACY_STATS_TYPES_H_ #include #include @@ -452,4 +452,4 @@ class StatsCollection { } // namespace webrtc -#endif // API_STATS_TYPES_H_ +#endif // API_LEGACY_STATS_TYPES_H_ diff --git a/modules/audio_coding/codecs/isac/fix/source/audio_decoder_isacfix.cc b/api/metronome/metronome.cc similarity index 57% rename from modules/audio_coding/codecs/isac/fix/source/audio_decoder_isacfix.cc rename to api/metronome/metronome.cc index 21259ee2e2..8d74f928a0 100644 --- a/modules/audio_coding/codecs/isac/fix/source/audio_decoder_isacfix.cc +++ b/api/metronome/metronome.cc @@ -1,5 +1,5 @@ /* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,13 +8,12 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h" - -#include "modules/audio_coding/codecs/isac/audio_decoder_isac_t_impl.h" +#include "api/metronome/metronome.h" namespace webrtc { -// Explicit instantiation: -template class AudioDecoderIsacT; +// TODO(crbug.com/1381982): Remove outdated methods. +void Metronome::AddListener(TickListener* listener) {} +void Metronome::RemoveListener(TickListener* listener) {} } // namespace webrtc diff --git a/api/metronome/metronome.h b/api/metronome/metronome.h index fc5f350db2..a312b1c862 100644 --- a/api/metronome/metronome.h +++ b/api/metronome/metronome.h @@ -17,44 +17,26 @@ namespace webrtc { -// The Metronome posts OnTick() on task queues provided by its listeners' task -// queue periodically. The metronome can be used as an alternative to using -// PostDelayedTask on a thread or task queue for coalescing work and reducing -// the number of idle-wakeups. -// -// Listeners can be added and removed from any sequence, but it is illegal to -// remove a listener from an OnTick invocation. +// The Metronome posts OnTick() calls requested with RequestCallOnNextTick. +// The API is designed to be fully used from a single task queue. Scheduled +// callbacks are executed on the same sequence as they were requested on. There +// are no features implemented for cancellation. When that's needed, use e.g. +// ScopedTaskSafety from the client. // // The metronome concept is still under experimentation, and may not be availble // in all platforms or applications. See https://crbug.com/1253787 for more // details. // -// Metronome implementations must be thread-safe. +// Metronome implementations must be thread-compatible. class RTC_EXPORT Metronome { public: - class RTC_EXPORT TickListener { - public: - virtual ~TickListener() = default; - - // OnTick is run on the task queue provided by OnTickTaskQueue each time the - // metronome ticks. - virtual void OnTick() = 0; - - // The task queue that OnTick will run on. Must not be null. - virtual TaskQueueBase* OnTickTaskQueue() = 0; - }; - virtual ~Metronome() = default; - // Adds a tick listener to the metronome. Once this method has returned - // OnTick will be invoked on each metronome tick. A listener may - // only be added to the metronome once. - virtual void AddListener(TickListener* listener) = 0; - - // Removes the tick listener from the metronome. Once this method has returned - // OnTick will never be called again. This method must not be called from - // within OnTick. - virtual void RemoveListener(TickListener* listener) = 0; + // Requests a call to `callback` on the next tick. Scheduled callbacks are + // executed on the same sequence as they were requested on. There are no + // features for cancellation. When that's needed, use e.g. ScopedTaskSafety + // from the client. + virtual void RequestCallOnNextTick(absl::AnyInvocable callback) {} // Returns the current tick period of the metronome. virtual TimeDelta TickPeriod() const = 0; diff --git a/api/metronome/test/BUILD.gn b/api/metronome/test/BUILD.gn index 0ea13b3de5..f415d98a0b 100644 --- a/api/metronome/test/BUILD.gn +++ b/api/metronome/test/BUILD.gn @@ -23,6 +23,7 @@ rtc_library("fake_metronome") { "../../../rtc_base:rtc_task_queue", "../../../rtc_base/synchronization:mutex", "../../../rtc_base/task_utils:repeating_task", + "../../../test:test_support", "../../task_queue", "../../units:time_delta", ] diff --git a/api/metronome/test/fake_metronome.cc b/api/metronome/test/fake_metronome.cc index cb471b9ba9..025f7ce5a6 100644 --- a/api/metronome/test/fake_metronome.cc +++ b/api/metronome/test/fake_metronome.cc @@ -10,8 +10,12 @@ #include "api/metronome/test/fake_metronome.h" +#include +#include + #include "api/priority.h" #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" #include "api/units/time_delta.h" #include "rtc_base/event.h" @@ -22,12 +26,9 @@ namespace webrtc::test { ForcedTickMetronome::ForcedTickMetronome(TimeDelta tick_period) : tick_period_(tick_period) {} -void ForcedTickMetronome::AddListener(TickListener* listener) { - listeners_.insert(listener); -} - -void ForcedTickMetronome::RemoveListener(TickListener* listener) { - listeners_.erase(listener); +void ForcedTickMetronome::RequestCallOnNextTick( + absl::AnyInvocable callback) { + callbacks_.push_back(std::move(callback)); } TimeDelta ForcedTickMetronome::TickPeriod() const { @@ -35,55 +36,35 @@ TimeDelta ForcedTickMetronome::TickPeriod() const { } size_t ForcedTickMetronome::NumListeners() { - return listeners_.size(); + return callbacks_.size(); } void ForcedTickMetronome::Tick() { - for (auto* listener : listeners_) { - listener->OnTickTaskQueue()->PostTask([listener] { listener->OnTick(); }); + std::vector> callbacks; + callbacks_.swap(callbacks); + for (auto& callback : callbacks) + std::move(callback)(); +} + +FakeMetronome::FakeMetronome(TimeDelta tick_period) + : tick_period_(tick_period) {} + +void FakeMetronome::RequestCallOnNextTick( + absl::AnyInvocable callback) { + TaskQueueBase* current = TaskQueueBase::Current(); + callbacks_.push_back(std::move(callback)); + if (callbacks_.size() == 1) { + current->PostDelayedTask( + [this] { + std::vector> callbacks; + callbacks_.swap(callbacks); + for (auto& callback : callbacks) + std::move(callback)(); + }, + tick_period_); } } -FakeMetronome::FakeMetronome(TaskQueueFactory* factory, TimeDelta tick_period) - : tick_period_(tick_period), - queue_(factory->CreateTaskQueue("MetronomeQueue", - TaskQueueFactory::Priority::HIGH)) {} - -FakeMetronome::~FakeMetronome() { - RTC_DCHECK(listeners_.empty()); -} - -void FakeMetronome::AddListener(TickListener* listener) { - MutexLock lock(&mutex_); - listeners_.insert(listener); - if (!started_) { - tick_task_ = RepeatingTaskHandle::Start(queue_.Get(), [this] { - MutexLock lock(&mutex_); - // Stop if empty. - if (listeners_.empty()) - return TimeDelta::PlusInfinity(); - for (auto* listener : listeners_) { - listener->OnTickTaskQueue()->PostTask( - [listener] { listener->OnTick(); }); - } - return tick_period_; - }); - started_ = true; - } -} - -void FakeMetronome::RemoveListener(TickListener* listener) { - MutexLock lock(&mutex_); - listeners_.erase(listener); -} - -void FakeMetronome::Stop() { - MutexLock lock(&mutex_); - RTC_DCHECK(listeners_.empty()); - if (started_) - queue_.PostTask([this] { tick_task_.Stop(); }); -} - TimeDelta FakeMetronome::TickPeriod() const { return tick_period_; } diff --git a/api/metronome/test/fake_metronome.h b/api/metronome/test/fake_metronome.h index 28a79e06ff..73c938e9cd 100644 --- a/api/metronome/test/fake_metronome.h +++ b/api/metronome/test/fake_metronome.h @@ -13,6 +13,7 @@ #include #include +#include #include "api/metronome/metronome.h" #include "api/task_queue/task_queue_base.h" @@ -36,13 +37,12 @@ class ForcedTickMetronome : public Metronome { size_t NumListeners(); // Metronome implementation. - void AddListener(TickListener* listener) override; - void RemoveListener(TickListener* listener) override; + void RequestCallOnNextTick(absl::AnyInvocable callback) override; TimeDelta TickPeriod() const override; private: const TimeDelta tick_period_; - std::set listeners_; + std::vector> callbacks_; }; // FakeMetronome is a metronome that ticks based on a repeating task at the @@ -53,23 +53,15 @@ class ForcedTickMetronome : public Metronome { // on the proper task queue. class FakeMetronome : public Metronome { public: - FakeMetronome(TaskQueueFactory* factory, TimeDelta tick_period); - ~FakeMetronome() override; + explicit FakeMetronome(TimeDelta tick_period); // Metronome implementation. - void AddListener(TickListener* listener) override; - void RemoveListener(TickListener* listener) override; + void RequestCallOnNextTick(absl::AnyInvocable callback) override; TimeDelta TickPeriod() const override; - void Stop(); - private: const TimeDelta tick_period_; - RepeatingTaskHandle tick_task_; - bool started_ RTC_GUARDED_BY(mutex_) = false; - std::set listeners_ RTC_GUARDED_BY(mutex_); - Mutex mutex_; - rtc::TaskQueue queue_; + std::vector> callbacks_; }; } // namespace webrtc::test diff --git a/api/neteq/neteq.h b/api/neteq/neteq.h index ffc3958345..5300c5601e 100644 --- a/api/neteq/neteq.h +++ b/api/neteq/neteq.h @@ -128,7 +128,7 @@ class NetEq { std::string ToString() const; - int sample_rate_hz = 16000; // Initial value. Will change with input data. + int sample_rate_hz = 48000; // Initial value. Will change with input data. bool enable_post_decode_vad = false; size_t max_packets_in_buffer = 200; int max_delay_ms = 0; diff --git a/api/peer_connection_interface.h b/api/peer_connection_interface.h index 5addbe3bef..c15ac34929 100644 --- a/api/peer_connection_interface.h +++ b/api/peer_connection_interface.h @@ -95,6 +95,7 @@ #include "api/ice_gatherer_interface.h" #include "api/ice_transport_interface.h" #include "api/jsep.h" +#include "api/legacy_stats_types.h" #include "api/media_stream_interface.h" #include "api/media_types.h" #include "api/metronome/metronome.h" @@ -113,7 +114,6 @@ #include "api/set_local_description_observer_interface.h" #include "api/set_remote_description_observer_interface.h" #include "api/stats/rtc_stats_collector_callback.h" -#include "api/stats_types.h" #include "api/task_queue/task_queue_factory.h" #include "api/transport/bitrate_settings.h" #include "api/transport/enums.h" @@ -434,11 +434,6 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // default will be used. ////////////////////////////////////////////////////////////////////////// - // If set to true, don't gather IPv6 ICE candidates. - // TODO(https://crbug.com/1315576): Remove the ability to set it in Chromium - // and delete this flag. - bool disable_ipv6 = false; - // If set to true, don't gather IPv6 ICE candidates on Wi-Fi. // Only intended to be used on specific devices. Certain phones disable IPv6 // when the screen is turned off and it would be better to just disable the @@ -702,6 +697,9 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { PortAllocatorConfig port_allocator_config; + // The burst interval of the pacer, see TaskQueuePacedSender constructor. + absl::optional pacer_burst_interval; + // // Don't forget to update operator== if adding something. // @@ -813,6 +811,16 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { rtc::scoped_refptr track, const std::vector& stream_ids) = 0; + // Add a new MediaStreamTrack as above, but with an additional parameter, + // `init_send_encodings` : initial RtpEncodingParameters for RtpSender, + // similar to init_send_encodings in RtpTransceiverInit. + // Note that a new transceiver will always be created. + // + virtual RTCErrorOr> AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids, + const std::vector& init_send_encodings) = 0; + // Removes the connection between a MediaStreamTrack and the PeerConnection. // Stops sending on the RtpSender and marks the // corresponding RtpTransceiver direction as no longer sending. diff --git a/modules/audio_coding/codecs/isac/fix/source/audio_encoder_isacfix.cc b/api/rtp_sender_interface.cc similarity index 52% rename from modules/audio_coding/codecs/isac/fix/source/audio_encoder_isacfix.cc rename to api/rtp_sender_interface.cc index 0190ab91b6..f1ca5c2203 100644 --- a/modules/audio_coding/codecs/isac/fix/source/audio_encoder_isacfix.cc +++ b/api/rtp_sender_interface.cc @@ -1,5 +1,5 @@ /* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * Copyright 2022 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,13 +8,15 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_coding/codecs/isac/fix/include/audio_encoder_isacfix.h" +#include "api/rtp_sender_interface.h" -#include "modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h" +#include "rtc_base/checks.h" namespace webrtc { -// Explicit instantiation: -template class AudioEncoderIsacT; +void RtpSenderInterface::SetParametersAsync(const RtpParameters& parameters, + SetParametersCallback callback) { + RTC_DCHECK_NOTREACHED() << "Default implementation called"; +} } // namespace webrtc diff --git a/api/rtp_sender_interface.h b/api/rtp_sender_interface.h index 500bd252b8..2786a2ac19 100644 --- a/api/rtp_sender_interface.h +++ b/api/rtp_sender_interface.h @@ -18,6 +18,7 @@ #include #include +#include "absl/functional/any_invocable.h" #include "api/crypto/frame_encryptor_interface.h" #include "api/dtls_transport_interface.h" #include "api/dtmf_sender_interface.h" @@ -33,6 +34,8 @@ namespace webrtc { +using SetParametersCallback = absl::AnyInvocable; + class RTC_EXPORT RtpSenderInterface : public rtc::RefCountInterface { public: // Returns true if successful in setting the track. @@ -79,6 +82,8 @@ class RTC_EXPORT RtpSenderInterface : public rtc::RefCountInterface { // rtpparameters.h // The encodings are in increasing quality order for simulcast. virtual RTCError SetParameters(const RtpParameters& parameters) = 0; + virtual void SetParametersAsync(const RtpParameters& parameters, + SetParametersCallback callback); // Returns null for a video sender. virtual rtc::scoped_refptr GetDtmfSender() const = 0; @@ -104,6 +109,11 @@ class RTC_EXPORT RtpSenderInterface : public rtc::RefCountInterface { std::unique_ptr encoder_selector) = 0; + // TODO(crbug.com/1354101): make pure virtual again after Chrome roll. + virtual RTCError GenerateKeyFrame(const std::vector& rids) { + return RTCError::OK(); + } + protected: ~RtpSenderInterface() override = default; }; diff --git a/api/stats/rtc_stats.h b/api/stats/rtc_stats.h index e22043df5c..273ea316cb 100644 --- a/api/stats/rtc_stats.h +++ b/api/stats/rtc_stats.h @@ -20,6 +20,7 @@ #include #include +#include "absl/types/optional.h" #include "rtc_base/checks.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/system/rtc_export_template.h" @@ -260,7 +261,7 @@ class RTCStatsMemberInterface { virtual Type type() const = 0; virtual bool is_sequence() const = 0; virtual bool is_string() const = 0; - bool is_defined() const { return is_defined_; } + virtual bool is_defined() const = 0; // Is this part of the stats spec? Used so that chromium can easily filter // out anything unstandardized. virtual bool is_standardized() const = 0; @@ -296,13 +297,11 @@ class RTCStatsMemberInterface { } protected: - RTCStatsMemberInterface(const char* name, bool is_defined) - : name_(name), is_defined_(is_defined) {} + explicit RTCStatsMemberInterface(const char* name) : name_(name) {} virtual bool IsEqual(const RTCStatsMemberInterface& other) const = 0; const char* const name_; - bool is_defined_; }; // Template implementation of `RTCStatsMemberInterface`. @@ -312,70 +311,58 @@ template class RTCStatsMember : public RTCStatsMemberInterface { public: explicit RTCStatsMember(const char* name) - : RTCStatsMemberInterface(name, - /*is_defined=*/false), - value_() {} + : RTCStatsMemberInterface(name), value_() {} RTCStatsMember(const char* name, const T& value) - : RTCStatsMemberInterface(name, - /*is_defined=*/true), - value_(value) {} + : RTCStatsMemberInterface(name), value_(value) {} RTCStatsMember(const char* name, T&& value) - : RTCStatsMemberInterface(name, - /*is_defined=*/true), - value_(std::move(value)) {} - RTCStatsMember(const RTCStatsMember& other) - : RTCStatsMemberInterface(other.name_, other.is_defined_), - value_(other.value_) {} - RTCStatsMember(RTCStatsMember&& other) - : RTCStatsMemberInterface(other.name_, other.is_defined_), - value_(std::move(other.value_)) {} + : RTCStatsMemberInterface(name), value_(std::move(value)) {} + explicit RTCStatsMember(const RTCStatsMember& other) + : RTCStatsMemberInterface(other.name_), value_(other.value_) {} + explicit RTCStatsMember(RTCStatsMember&& other) + : RTCStatsMemberInterface(other.name_), value_(std::move(other.value_)) {} static Type StaticType(); Type type() const override { return StaticType(); } bool is_sequence() const override; bool is_string() const override; + bool is_defined() const override { return value_.has_value(); } bool is_standardized() const override { return true; } std::string ValueToString() const override; std::string ValueToJson() const override; template inline T ValueOrDefault(U default_value) const { - if (is_defined()) { - return *(*this); - } - return default_value; + return value_.value_or(default_value); } // Assignment operators. T& operator=(const T& value) { value_ = value; - is_defined_ = true; - return value_; + return value_.value(); } T& operator=(const T&& value) { value_ = std::move(value); - is_defined_ = true; - return value_; + return value_.value(); } // Value getters. T& operator*() { - RTC_DCHECK(is_defined_); - return value_; + RTC_DCHECK(value_); + return *value_; } const T& operator*() const { - RTC_DCHECK(is_defined_); - return value_; + RTC_DCHECK(value_); + return *value_; } // Value getters, arrow operator. T* operator->() { - RTC_DCHECK(is_defined_); - return &value_; + RTC_DCHECK(value_); + return &(*value_); } const T* operator->() const { - RTC_DCHECK(is_defined_); - return &value_; + RTC_DCHECK(value_); + return &(*value_); } protected: @@ -386,15 +373,11 @@ class RTCStatsMember : public RTCStatsMemberInterface { return false; const RTCStatsMember& other_t = static_cast&>(other); - if (!is_defined_) - return !other_t.is_defined(); - if (!other.is_defined()) - return false; return value_ == other_t.value_; } private: - T value_; + absl::optional value_; }; namespace rtc_stats_internal { diff --git a/api/stats/rtcstats_objects.h b/api/stats/rtcstats_objects.h index 749895072d..c4efd1c84a 100644 --- a/api/stats/rtcstats_objects.h +++ b/api/stats/rtcstats_objects.h @@ -222,6 +222,8 @@ class RTC_EXPORT RTCIceCandidatePairStats final : public RTCStats { RTCStatsMember consent_requests_sent; RTCStatsMember packets_discarded_on_send; RTCStatsMember bytes_discarded_on_send; + RTCStatsMember last_packet_received_timestamp; + RTCStatsMember last_packet_sent_timestamp; }; // https://w3c.github.io/webrtc-stats/#icecandidate-dict* @@ -286,35 +288,36 @@ class RTC_EXPORT RTCRemoteIceCandidateStats final const char* type() const override; }; -// https://w3c.github.io/webrtc-stats/#dom-rtcmediastreamstats -// TODO(https://crbug.com/webrtc/14172): Deprecate and remove. -class RTC_EXPORT RTCMediaStreamStats final : public RTCStats { +// TODO(https://crbug.com/webrtc/14419): Delete this class, it's deprecated. +class RTC_EXPORT DEPRECATED_RTCMediaStreamStats final : public RTCStats { public: WEBRTC_RTCSTATS_DECL(); - RTCMediaStreamStats(const std::string& id, int64_t timestamp_us); - RTCMediaStreamStats(std::string&& id, int64_t timestamp_us); - RTCMediaStreamStats(const RTCMediaStreamStats& other); - ~RTCMediaStreamStats() override; + DEPRECATED_RTCMediaStreamStats(const std::string& id, int64_t timestamp_us); + DEPRECATED_RTCMediaStreamStats(std::string&& id, int64_t timestamp_us); + DEPRECATED_RTCMediaStreamStats(const DEPRECATED_RTCMediaStreamStats& other); + ~DEPRECATED_RTCMediaStreamStats() override; RTCStatsMember stream_identifier; RTCStatsMember> track_ids; }; +using RTCMediaStreamStats [[deprecated("bugs.webrtc.org/14419")]] = + DEPRECATED_RTCMediaStreamStats; -// TODO(https://crbug.com/webrtc/14175): Deprecate and remove in favor of -// RTCMediaSourceStats/RTCOutboundRtpStreamStats and RTCInboundRtpStreamStats. -class RTC_EXPORT RTCMediaStreamTrackStats final : public RTCStats { +// TODO(https://crbug.com/webrtc/14175): Delete this class, it's deprecated. +class RTC_EXPORT DEPRECATED_RTCMediaStreamTrackStats final : public RTCStats { public: WEBRTC_RTCSTATS_DECL(); - RTCMediaStreamTrackStats(const std::string& id, - int64_t timestamp_us, - const char* kind); - RTCMediaStreamTrackStats(std::string&& id, - int64_t timestamp_us, - const char* kind); - RTCMediaStreamTrackStats(const RTCMediaStreamTrackStats& other); - ~RTCMediaStreamTrackStats() override; + DEPRECATED_RTCMediaStreamTrackStats(const std::string& id, + int64_t timestamp_us, + const char* kind); + DEPRECATED_RTCMediaStreamTrackStats(std::string&& id, + int64_t timestamp_us, + const char* kind); + DEPRECATED_RTCMediaStreamTrackStats( + const DEPRECATED_RTCMediaStreamTrackStats& other); + ~DEPRECATED_RTCMediaStreamTrackStats() override; RTCStatsMember track_identifier; RTCStatsMember media_source_id; @@ -347,22 +350,9 @@ class RTC_EXPORT RTCMediaStreamTrackStats final : public RTCStats { RTCStatsMember concealment_events; RTCStatsMember inserted_samples_for_deceleration; RTCStatsMember removed_samples_for_acceleration; - // TODO(crbug.com/webrtc/14524): These metrics have been moved, delete them. - RTCNonStandardStatsMember jitter_buffer_flushes; - RTCNonStandardStatsMember delayed_packet_outage_samples; - RTCNonStandardStatsMember relative_packet_arrival_delay; - RTCNonStandardStatsMember interruption_count; - RTCNonStandardStatsMember total_interruption_duration; - // Non-standard video-only members. - // https://w3c.github.io/webrtc-provisional-stats/#dom-rtcvideoreceiverstats - RTCNonStandardStatsMember total_frames_duration; - RTCNonStandardStatsMember sum_squared_frame_durations; - // TODO(crbug.com/webrtc/14521): These metrics have been moved, delete them. - RTCNonStandardStatsMember freeze_count; - RTCNonStandardStatsMember pause_count; - RTCNonStandardStatsMember total_freezes_duration; - RTCNonStandardStatsMember total_pauses_duration; }; +using RTCMediaStreamTrackStats [[deprecated("bugs.webrtc.org/14175")]] = + DEPRECATED_RTCMediaStreamTrackStats; // https://w3c.github.io/webrtc-stats/#pcstats-dict* class RTC_EXPORT RTCPeerConnectionStats final : public RTCStats { @@ -479,9 +469,8 @@ class RTC_EXPORT RTCInboundRTPStreamStats final RTCStatsMember frames_dropped; RTCStatsMember total_decode_time; RTCStatsMember total_processing_delay; - // TODO(https://crbug.com/webrtc/13986): standardize - RTCNonStandardStatsMember total_assembly_time; - RTCNonStandardStatsMember frames_assembled_from_multiple_packets; + RTCStatsMember total_assembly_time; + RTCStatsMember frames_assembled_from_multiple_packets; RTCStatsMember total_inter_frame_delay; RTCStatsMember total_squared_inter_frame_delay; RTCStatsMember pause_count; @@ -495,7 +484,9 @@ class RTC_EXPORT RTCInboundRTPStreamStats final RTCStatsMember estimated_playout_timestamp; // Only implemented for video. // TODO(https://crbug.com/webrtc/14178): Also implement for audio. - RTCStatsMember decoder_implementation; + RTCRestrictedStatsMember + decoder_implementation; // FIR and PLI counts are only defined for |kind == "video"|. RTCStatsMember fir_count; RTCStatsMember pli_count; @@ -508,6 +499,8 @@ class RTC_EXPORT RTCInboundRTPStreamStats final // TimingFrameInfo::ToString(). // TODO(https://crbug.com/webrtc/14586): Unship or standardize this metric. RTCStatsMember goog_timing_frame_info; + RTCRestrictedStatsMember + power_efficient_decoder; // Non-standard audio metrics. RTCNonStandardStatsMember jitter_buffer_flushes; RTCNonStandardStatsMember delayed_packet_outage_samples; @@ -548,8 +541,6 @@ class RTC_EXPORT RTCOutboundRTPStreamStats final : public RTCRTPStreamStats { RTCStatsMember frames_per_second; RTCStatsMember frames_sent; RTCStatsMember huge_frames_sent; - // TODO(https://crbug.com/webrtc/10635): This is only implemented for video; - // implement it for audio as well. RTCStatsMember total_packet_send_delay; // Enum type RTCQualityLimitationReason RTCStatsMember quality_limitation_reason; @@ -560,13 +551,18 @@ class RTC_EXPORT RTCOutboundRTPStreamStats final : public RTCRTPStreamStats { RTCStatsMember content_type; // Only implemented for video. // TODO(https://crbug.com/webrtc/14178): Implement for audio as well. - RTCStatsMember encoder_implementation; + RTCRestrictedStatsMember + encoder_implementation; // FIR and PLI counts are only defined for |kind == "video"|. RTCStatsMember fir_count; RTCStatsMember pli_count; RTCStatsMember nack_count; RTCStatsMember qp_sum; RTCStatsMember active; + RTCRestrictedStatsMember + power_efficient_encoder; + RTCStatsMember scalability_mode; }; // https://w3c.github.io/webrtc-stats/#remoteinboundrtpstats-dict* diff --git a/api/test/compile_all_headers.cc b/api/test/compile_all_headers.cc index 6e0db4fede..1fcf63e97b 100644 --- a/api/test/compile_all_headers.cc +++ b/api/test/compile_all_headers.cc @@ -42,6 +42,7 @@ #include "api/test/mock_rtp_transceiver.h" #include "api/test/mock_rtpreceiver.h" #include "api/test/mock_rtpsender.h" +#include "api/test/mock_session_description_interface.h" #include "api/test/mock_transformable_video_frame.h" #include "api/test/mock_video_bitrate_allocator.h" #include "api/test/mock_video_bitrate_allocator_factory.h" diff --git a/api/test/create_network_emulation_manager.cc b/api/test/create_network_emulation_manager.cc index 089a2f8a86..f5d5a1bc88 100644 --- a/api/test/create_network_emulation_manager.cc +++ b/api/test/create_network_emulation_manager.cc @@ -18,8 +18,10 @@ namespace webrtc { std::unique_ptr CreateNetworkEmulationManager( - TimeMode mode) { - return std::make_unique(mode); + TimeMode time_mode, + EmulatedNetworkStatsGatheringMode stats_gathering_mode) { + return std::make_unique( + time_mode, stats_gathering_mode); } } // namespace webrtc diff --git a/api/test/create_network_emulation_manager.h b/api/test/create_network_emulation_manager.h index f444743786..941b2b1c52 100644 --- a/api/test/create_network_emulation_manager.h +++ b/api/test/create_network_emulation_manager.h @@ -19,7 +19,9 @@ namespace webrtc { // Returns a non-null NetworkEmulationManager instance. std::unique_ptr CreateNetworkEmulationManager( - TimeMode mode = TimeMode::kRealTime); + TimeMode time_mode = TimeMode::kRealTime, + EmulatedNetworkStatsGatheringMode stats_gathering_mode = + EmulatedNetworkStatsGatheringMode::kDefault); } // namespace webrtc diff --git a/api/test/create_peer_connection_quality_test_frame_generator.cc b/api/test/create_peer_connection_quality_test_frame_generator.cc index 29eb41ca42..a1c53635f9 100644 --- a/api/test/create_peer_connection_quality_test_frame_generator.cc +++ b/api/test/create_peer_connection_quality_test_frame_generator.cc @@ -14,18 +14,13 @@ #include #include "api/test/create_frame_generator.h" -#include "api/test/peerconnection_quality_test_fixture.h" +#include "api/test/pclf/media_configuration.h" #include "rtc_base/checks.h" #include "test/testsupport/file_utils.h" namespace webrtc { namespace webrtc_pc_e2e { -using VideoConfig = - ::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::VideoConfig; -using ScreenShareConfig = ::webrtc::webrtc_pc_e2e:: - PeerConnectionE2EQualityTestFixture::ScreenShareConfig; - void ValidateScreenShareConfig(const VideoConfig& video_config, const ScreenShareConfig& screen_share_config) { if (screen_share_config.slides_yuv_file_names.empty()) { diff --git a/api/test/create_peer_connection_quality_test_frame_generator.h b/api/test/create_peer_connection_quality_test_frame_generator.h index ab3f65aa57..62043d140a 100644 --- a/api/test/create_peer_connection_quality_test_frame_generator.h +++ b/api/test/create_peer_connection_quality_test_frame_generator.h @@ -15,7 +15,7 @@ #include "absl/types/optional.h" #include "api/test/frame_generator_interface.h" -#include "api/test/peerconnection_quality_test_fixture.h" +#include "api/test/pclf/media_configuration.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -25,19 +25,18 @@ namespace webrtc_pc_e2e { // FrameGeneratorInterface::OutputType::I420. video_config specifies frame // weight and height. std::unique_ptr CreateSquareFrameGenerator( - const PeerConnectionE2EQualityTestFixture::VideoConfig& video_config, + const VideoConfig& video_config, absl::optional type); // Creates a frame generator that plays frames from the yuv file. std::unique_ptr CreateFromYuvFileFrameGenerator( - const PeerConnectionE2EQualityTestFixture::VideoConfig& video_config, + const VideoConfig& video_config, std::string filename); // Creates a proper frame generator for testing screen sharing. std::unique_ptr CreateScreenShareFrameGenerator( - const PeerConnectionE2EQualityTestFixture::VideoConfig& video_config, - const PeerConnectionE2EQualityTestFixture::ScreenShareConfig& - screen_share_config); + const VideoConfig& video_config, + const ScreenShareConfig& screen_share_config); } // namespace webrtc_pc_e2e } // namespace webrtc diff --git a/api/test/create_video_codec_tester.cc b/api/test/create_video_codec_tester.cc new file mode 100644 index 0000000000..a1efefdb48 --- /dev/null +++ b/api/test/create_video_codec_tester.cc @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/create_video_codec_tester.h" + +#include +#include + +#include "api/test/video_codec_tester.h" +#include "modules/video_coding/codecs/test/video_codec_tester_impl.h" + +namespace webrtc { +namespace test { + +std::unique_ptr CreateVideoCodecTester() { + return std::make_unique(); +} + +} // namespace test +} // namespace webrtc diff --git a/modules/audio_coding/codecs/isac/main/source/audio_decoder_isac.cc b/api/test/create_video_codec_tester.h similarity index 52% rename from modules/audio_coding/codecs/isac/main/source/audio_decoder_isac.cc rename to api/test/create_video_codec_tester.h index b671002e1e..c68864ce85 100644 --- a/modules/audio_coding/codecs/isac/main/source/audio_decoder_isac.cc +++ b/api/test/create_video_codec_tester.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,13 +8,19 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h" +#ifndef API_TEST_CREATE_VIDEO_CODEC_TESTER_H_ +#define API_TEST_CREATE_VIDEO_CODEC_TESTER_H_ -#include "modules/audio_coding/codecs/isac/audio_decoder_isac_t_impl.h" +#include + +#include "api/test/video_codec_tester.h" namespace webrtc { +namespace test { -// Explicit instantiation: -template class AudioDecoderIsacT; +std::unique_ptr CreateVideoCodecTester(); +} // namespace test } // namespace webrtc + +#endif // API_TEST_CREATE_VIDEO_CODEC_TESTER_H_ diff --git a/api/test/metrics/BUILD.gn b/api/test/metrics/BUILD.gn index b635cf2943..309b699329 100644 --- a/api/test/metrics/BUILD.gn +++ b/api/test/metrics/BUILD.gn @@ -18,7 +18,6 @@ group("metrics") { ":metrics_accumulator", ":metrics_exporter", ":metrics_logger", - ":metrics_logger_and_exporter", ":stdout_metrics_exporter", ] } @@ -30,7 +29,6 @@ if (rtc_include_tests) { deps = [ ":global_metrics_logger_and_exporter_test", ":metrics_accumulator_test", - ":metrics_logger_and_exporter_test", ":metrics_logger_test", ":print_result_proxy_metrics_exporter_test", ":stdout_metrics_exporter_test", @@ -176,29 +174,6 @@ rtc_library("print_result_proxy_metrics_exporter") { ] } -rtc_library("metrics_logger_and_exporter") { - visibility = [ "*" ] - sources = [ - "metrics_logger_and_exporter.cc", - "metrics_logger_and_exporter.h", - ] - deps = [ - ":metric", - ":metrics_exporter", - ":metrics_logger", - "../../../rtc_base:checks", - "../../../rtc_base:logging", - "../../../rtc_base/synchronization:mutex", - "../../../system_wrappers", - "../../numerics", - ] - - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] -} - rtc_library("global_metrics_logger_and_exporter") { visibility = [ "*" ] sources = [ @@ -208,7 +183,6 @@ rtc_library("global_metrics_logger_and_exporter") { deps = [ ":metrics_exporter", ":metrics_logger", - ":metrics_logger_and_exporter", "../../../rtc_base:checks", "../../../system_wrappers", ] @@ -262,20 +236,6 @@ if (rtc_include_tests) { ] } - rtc_library("metrics_logger_and_exporter_test") { - testonly = true - sources = [ "metrics_logger_and_exporter_test.cc" ] - deps = [ - ":metric", - ":metrics_exporter", - ":metrics_logger_and_exporter", - "../../../system_wrappers", - "../../../test:test_support", - "../../numerics", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } - rtc_library("global_metrics_logger_and_exporter_test") { testonly = true sources = [ "global_metrics_logger_and_exporter_test.cc" ] @@ -284,7 +244,6 @@ if (rtc_include_tests) { ":metric", ":metrics_exporter", ":metrics_logger", - ":metrics_logger_and_exporter", "../../../system_wrappers", "../../../test:test_support", ] diff --git a/api/test/metrics/global_metrics_logger_and_exporter.cc b/api/test/metrics/global_metrics_logger_and_exporter.cc index 9c3c8978f5..2d42a976aa 100644 --- a/api/test/metrics/global_metrics_logger_and_exporter.cc +++ b/api/test/metrics/global_metrics_logger_and_exporter.cc @@ -15,7 +15,6 @@ #include "api/test/metrics/metrics_exporter.h" #include "api/test/metrics/metrics_logger.h" -#include "api/test/metrics/metrics_logger_and_exporter.h" #include "rtc_base/checks.h" #include "system_wrappers/include/clock.h" diff --git a/api/test/metrics/global_metrics_logger_and_exporter.h b/api/test/metrics/global_metrics_logger_and_exporter.h index 42bdf93c12..f77ff1c737 100644 --- a/api/test/metrics/global_metrics_logger_and_exporter.h +++ b/api/test/metrics/global_metrics_logger_and_exporter.h @@ -15,7 +15,7 @@ #include #include "api/test/metrics/metrics_exporter.h" -#include "api/test/metrics/metrics_logger_and_exporter.h" +#include "api/test/metrics/metrics_logger.h" namespace webrtc { namespace test { diff --git a/api/test/metrics/metrics_logger_and_exporter.cc b/api/test/metrics/metrics_logger_and_exporter.cc deleted file mode 100644 index 9f91eac334..0000000000 --- a/api/test/metrics/metrics_logger_and_exporter.cc +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "api/test/metrics/metrics_logger_and_exporter.h" - -#include -#include -#include -#include - -#include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "api/numerics/samples_stats_counter.h" -#include "api/test/metrics/metric.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/synchronization/mutex.h" - -namespace webrtc { -namespace test { -namespace { - -Metric::Stats ToStats(const SamplesStatsCounter& values) { - if (values.IsEmpty()) { - return Metric::Stats(); - } - return Metric::Stats{.mean = values.GetAverage(), - .stddev = values.GetStandardDeviation(), - .min = values.GetMin(), - .max = values.GetMax()}; -} - -} // namespace - -MetricsLoggerAndExporter::~MetricsLoggerAndExporter() { - bool export_result = Export(); - if (crash_on_export_failure_) { - RTC_CHECK(export_result); - } else { - RTC_LOG(LS_ERROR) << "One of exporters failed to export collected metrics"; - } -} - -void MetricsLoggerAndExporter::LogSingleValueMetric( - absl::string_view name, - absl::string_view test_case_name, - double value, - Unit unit, - ImprovementDirection improvement_direction, - std::map metadata) { - MutexLock lock(&mutex_); - metrics_.push_back(Metric{ - .name = std::string(name), - .unit = unit, - .improvement_direction = improvement_direction, - .test_case = std::string(test_case_name), - .metric_metadata = std::move(metadata), - .time_series = - Metric::TimeSeries{.samples = std::vector{Metric::TimeSeries::Sample{ - .timestamp = Now(), .value = value}}}, - .stats = Metric::Stats{ - .mean = value, .stddev = absl::nullopt, .min = value, .max = value}}); -} - -void MetricsLoggerAndExporter::LogMetric( - absl::string_view name, - absl::string_view test_case_name, - const SamplesStatsCounter& values, - Unit unit, - ImprovementDirection improvement_direction, - std::map metadata) { - MutexLock lock(&mutex_); - Metric::TimeSeries time_series; - for (const SamplesStatsCounter::StatsSample& sample : - values.GetTimedSamples()) { - time_series.samples.push_back( - Metric::TimeSeries::Sample{.timestamp = sample.time, - .value = sample.value, - .sample_metadata = sample.metadata}); - } - - metrics_.push_back(Metric{.name = std::string(name), - .unit = unit, - .improvement_direction = improvement_direction, - .test_case = std::string(test_case_name), - .metric_metadata = std::move(metadata), - .time_series = std::move(time_series), - .stats = ToStats(values)}); -} - -void MetricsLoggerAndExporter::LogMetric( - absl::string_view name, - absl::string_view test_case_name, - const Metric::Stats& metric_stats, - Unit unit, - ImprovementDirection improvement_direction, - std::map metadata) { - MutexLock lock(&mutex_); - metrics_.push_back(Metric{.name = std::string(name), - .unit = unit, - .improvement_direction = improvement_direction, - .test_case = std::string(test_case_name), - .metric_metadata = std::move(metadata), - .time_series = Metric::TimeSeries{.samples = {}}, - .stats = std::move(metric_stats)}); -} - -Timestamp MetricsLoggerAndExporter::Now() { - return clock_->CurrentTime(); -} - -bool MetricsLoggerAndExporter::Export() { - MutexLock lock(&mutex_); - bool success = true; - for (auto& exporter : exporters_) { - bool export_result = exporter->Export(metrics_); - success = success && export_result; - } - return success; -} - -} // namespace test -} // namespace webrtc diff --git a/api/test/metrics/metrics_logger_and_exporter.h b/api/test/metrics/metrics_logger_and_exporter.h deleted file mode 100644 index 562aa6e264..0000000000 --- a/api/test/metrics/metrics_logger_and_exporter.h +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_TEST_METRICS_METRICS_LOGGER_AND_EXPORTER_H_ -#define API_TEST_METRICS_METRICS_LOGGER_AND_EXPORTER_H_ - -#include -#include -#include -#include -#include - -#include "absl/strings/string_view.h" -#include "api/numerics/samples_stats_counter.h" -#include "api/test/metrics/metric.h" -#include "api/test/metrics/metrics_exporter.h" -#include "api/test/metrics/metrics_logger.h" -#include "rtc_base/synchronization/mutex.h" -#include "system_wrappers/include/clock.h" - -namespace webrtc { -namespace test { - -// Combines metrics logging and exporting to provide simple API to automatically -// export metrics at the end of the scope. -class MetricsLoggerAndExporter : public MetricsLogger { - public: - // `crash_on_export_failure` - makes MetricsLoggerAndExporter to crash if - // any of exporters failed to export data. - MetricsLoggerAndExporter( - webrtc::Clock* clock, - std::vector> exporters, - bool crash_on_export_failure = true) - : clock_(clock), - crash_on_export_failure_(crash_on_export_failure), - exporters_(std::move(exporters)) {} - ~MetricsLoggerAndExporter() override; - - // Adds a metric with a single value. - // `metadata` - metric's level metadata to add. - void LogSingleValueMetric( - absl::string_view name, - absl::string_view test_case_name, - double value, - Unit unit, - ImprovementDirection improvement_direction, - std::map metadata = {}) override; - - // Adds metrics with a time series created based on the provided `values`. - // `metadata` - metric's level metadata to add. - void LogMetric(absl::string_view name, - absl::string_view test_case_name, - const SamplesStatsCounter& values, - Unit unit, - ImprovementDirection improvement_direction, - std::map metadata = {}) override; - - // Adds metric with a time series with only stats object and without actual - // collected values. - // `metadata` - metric's level metadata to add. - void LogMetric(absl::string_view name, - absl::string_view test_case_name, - const Metric::Stats& metric_stats, - Unit unit, - ImprovementDirection improvement_direction, - std::map metadata = {}) override; - - // Returns all metrics collected by this logger. - std::vector GetCollectedMetrics() const override { - MutexLock lock(&mutex_); - return metrics_; - } - - private: - webrtc::Timestamp Now(); - bool Export(); - - webrtc::Clock* const clock_; - const bool crash_on_export_failure_; - - mutable Mutex mutex_; - std::vector metrics_ RTC_GUARDED_BY(mutex_); - std::vector> exporters_; -}; - -} // namespace test -} // namespace webrtc - -#endif // API_TEST_METRICS_METRICS_LOGGER_AND_EXPORTER_H_ diff --git a/api/test/metrics/metrics_logger_and_exporter_test.cc b/api/test/metrics/metrics_logger_and_exporter_test.cc deleted file mode 100644 index 65b1d8f68d..0000000000 --- a/api/test/metrics/metrics_logger_and_exporter_test.cc +++ /dev/null @@ -1,361 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "api/test/metrics/metrics_logger_and_exporter.h" - -#include -#include -#include -#include - -#include "absl/types/optional.h" -#include "api/numerics/samples_stats_counter.h" -#include "api/test/metrics/metric.h" -#include "api/test/metrics/metrics_exporter.h" -#include "system_wrappers/include/clock.h" -#include "test/gmock.h" -#include "test/gtest.h" - -namespace webrtc { -namespace test { -namespace { - -using ::testing::Eq; -using ::testing::IsEmpty; - -std::map DefaultMetadata() { - return std::map{{"key", "value"}}; -} - -struct TestMetricsExporterFactory { - public: - std::unique_ptr CreateExporter() { - return std::make_unique(this, /*export_result=*/true); - } - - std::unique_ptr CreateFailureExporter() { - return std::make_unique(this, /*export_result=*/false); - } - - std::vector exported_metrics; - - private: - class TestMetricsExporter : public MetricsExporter { - public: - TestMetricsExporter(TestMetricsExporterFactory* factory, bool export_result) - : factory_(factory), export_result_(export_result) {} - ~TestMetricsExporter() override = default; - - bool Export(rtc::ArrayView metrics) override { - factory_->exported_metrics = - std::vector(metrics.begin(), metrics.end()); - return export_result_; - } - - TestMetricsExporterFactory* factory_; - bool export_result_; - }; -}; - -TEST(MetricsLoggerAndExporterTest, LogSingleValueMetricRecordsMetric) { - TestMetricsExporterFactory exporter_factory; - { - std::vector> exporters; - exporters.push_back(exporter_factory.CreateExporter()); - MetricsLoggerAndExporter logger(Clock::GetRealTimeClock(), - std::move(exporters)); - logger.LogSingleValueMetric( - "metric_name", "test_case_name", - /*value=*/10, Unit::kMilliseconds, - ImprovementDirection::kBiggerIsBetter, - std::map{{"key", "value"}}); - } - - std::vector metrics = exporter_factory.exported_metrics; - ASSERT_THAT(metrics.size(), Eq(1lu)); - const Metric& metric = metrics[0]; - EXPECT_THAT(metric.name, Eq("metric_name")); - EXPECT_THAT(metric.test_case, Eq("test_case_name")); - EXPECT_THAT(metric.unit, Eq(Unit::kMilliseconds)); - EXPECT_THAT(metric.improvement_direction, - Eq(ImprovementDirection::kBiggerIsBetter)); - EXPECT_THAT(metric.metric_metadata, - Eq(std::map{{"key", "value"}})); - ASSERT_THAT(metric.time_series.samples.size(), Eq(1lu)); - EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0)); - EXPECT_THAT(metric.time_series.samples[0].sample_metadata, - Eq(std::map{})); - ASSERT_THAT(metric.stats.mean, absl::optional(10.0)); - ASSERT_THAT(metric.stats.stddev, absl::nullopt); - ASSERT_THAT(metric.stats.min, absl::optional(10.0)); - ASSERT_THAT(metric.stats.max, absl::optional(10.0)); -} - -TEST(MetricsLoggerAndExporterTest, - LogMetricWithSamplesStatsCounterRecordsMetric) { - TestMetricsExporterFactory exporter_factory; - { - std::vector> exporters; - exporters.push_back(exporter_factory.CreateExporter()); - MetricsLoggerAndExporter logger(Clock::GetRealTimeClock(), - std::move(exporters)); - - SamplesStatsCounter values; - values.AddSample(SamplesStatsCounter::StatsSample{ - .value = 10, - .time = Clock::GetRealTimeClock()->CurrentTime(), - .metadata = - std::map{{"point_key1", "value1"}}}); - values.AddSample(SamplesStatsCounter::StatsSample{ - .value = 20, - .time = Clock::GetRealTimeClock()->CurrentTime(), - .metadata = - std::map{{"point_key2", "value2"}}}); - logger.LogMetric("metric_name", "test_case_name", values, - Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, - std::map{{"key", "value"}}); - } - - std::vector metrics = exporter_factory.exported_metrics; - ASSERT_THAT(metrics.size(), Eq(1lu)); - const Metric& metric = metrics[0]; - EXPECT_THAT(metric.name, Eq("metric_name")); - EXPECT_THAT(metric.test_case, Eq("test_case_name")); - EXPECT_THAT(metric.unit, Eq(Unit::kMilliseconds)); - EXPECT_THAT(metric.improvement_direction, - Eq(ImprovementDirection::kBiggerIsBetter)); - EXPECT_THAT(metric.metric_metadata, - Eq(std::map{{"key", "value"}})); - ASSERT_THAT(metric.time_series.samples.size(), Eq(2lu)); - EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0)); - EXPECT_THAT(metric.time_series.samples[0].sample_metadata, - Eq(std::map{{"point_key1", "value1"}})); - EXPECT_THAT(metric.time_series.samples[1].value, Eq(20.0)); - EXPECT_THAT(metric.time_series.samples[1].sample_metadata, - Eq(std::map{{"point_key2", "value2"}})); - ASSERT_THAT(metric.stats.mean, absl::optional(15.0)); - ASSERT_THAT(metric.stats.stddev, absl::optional(5.0)); - ASSERT_THAT(metric.stats.min, absl::optional(10.0)); - ASSERT_THAT(metric.stats.max, absl::optional(20.0)); -} - -TEST(MetricsLoggerAndExporterTest, - LogMetricWithEmptySamplesStatsCounterRecordsEmptyMetric) { - TestMetricsExporterFactory exporter_factory; - { - std::vector> exporters; - exporters.push_back(exporter_factory.CreateExporter()); - MetricsLoggerAndExporter logger(Clock::GetRealTimeClock(), - std::move(exporters)); - SamplesStatsCounter values; - logger.LogMetric("metric_name", "test_case_name", values, Unit::kUnitless, - ImprovementDirection::kBiggerIsBetter, DefaultMetadata()); - } - - std::vector metrics = exporter_factory.exported_metrics; - ASSERT_THAT(metrics.size(), Eq(1lu)); - EXPECT_THAT(metrics[0].name, Eq("metric_name")); - EXPECT_THAT(metrics[0].test_case, Eq("test_case_name")); - EXPECT_THAT(metrics[0].time_series.samples, IsEmpty()); - ASSERT_THAT(metrics[0].stats.mean, Eq(absl::nullopt)); - ASSERT_THAT(metrics[0].stats.stddev, Eq(absl::nullopt)); - ASSERT_THAT(metrics[0].stats.min, Eq(absl::nullopt)); - ASSERT_THAT(metrics[0].stats.max, Eq(absl::nullopt)); -} - -TEST(MetricsLoggerAndExporterTest, LogMetricWithStatsRecordsMetric) { - TestMetricsExporterFactory exporter_factory; - { - std::vector> exporters; - exporters.push_back(exporter_factory.CreateExporter()); - MetricsLoggerAndExporter logger(Clock::GetRealTimeClock(), - std::move(exporters)); - Metric::Stats metric_stats{.mean = 15, .stddev = 5, .min = 10, .max = 20}; - logger.LogMetric("metric_name", "test_case_name", metric_stats, - Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, - std::map{{"key", "value"}}); - } - - std::vector metrics = exporter_factory.exported_metrics; - ASSERT_THAT(metrics.size(), Eq(1lu)); - const Metric& metric = metrics[0]; - EXPECT_THAT(metric.name, Eq("metric_name")); - EXPECT_THAT(metric.test_case, Eq("test_case_name")); - EXPECT_THAT(metric.unit, Eq(Unit::kMilliseconds)); - EXPECT_THAT(metric.improvement_direction, - Eq(ImprovementDirection::kBiggerIsBetter)); - EXPECT_THAT(metric.metric_metadata, - Eq(std::map{{"key", "value"}})); - ASSERT_THAT(metric.time_series.samples.size(), Eq(0lu)); - ASSERT_THAT(metric.stats.mean, absl::optional(15.0)); - ASSERT_THAT(metric.stats.stddev, absl::optional(5.0)); - ASSERT_THAT(metric.stats.min, absl::optional(10.0)); - ASSERT_THAT(metric.stats.max, absl::optional(20.0)); -} - -TEST(MetricsLoggerAndExporterTest, LogSingleValueMetricRecordsMultipleMetrics) { - TestMetricsExporterFactory exporter_factory; - { - std::vector> exporters; - exporters.push_back(exporter_factory.CreateExporter()); - MetricsLoggerAndExporter logger(Clock::GetRealTimeClock(), - std::move(exporters)); - - logger.LogSingleValueMetric("metric_name1", "test_case_name1", - /*value=*/10, Unit::kMilliseconds, - ImprovementDirection::kBiggerIsBetter, - DefaultMetadata()); - logger.LogSingleValueMetric("metric_name2", "test_case_name2", - /*value=*/10, Unit::kMilliseconds, - ImprovementDirection::kBiggerIsBetter, - DefaultMetadata()); - } - - std::vector metrics = exporter_factory.exported_metrics; - ASSERT_THAT(metrics.size(), Eq(2lu)); - EXPECT_THAT(metrics[0].name, Eq("metric_name1")); - EXPECT_THAT(metrics[0].test_case, Eq("test_case_name1")); - EXPECT_THAT(metrics[1].name, Eq("metric_name2")); - EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2")); -} - -TEST(MetricsLoggerAndExporterTest, - LogMetricWithSamplesStatsCounterRecordsMultipleMetrics) { - TestMetricsExporterFactory exporter_factory; - { - std::vector> exporters; - exporters.push_back(exporter_factory.CreateExporter()); - MetricsLoggerAndExporter logger(Clock::GetRealTimeClock(), - std::move(exporters)); - SamplesStatsCounter values; - values.AddSample(SamplesStatsCounter::StatsSample{ - .value = 10, - .time = Clock::GetRealTimeClock()->CurrentTime(), - .metadata = DefaultMetadata()}); - values.AddSample(SamplesStatsCounter::StatsSample{ - .value = 20, - .time = Clock::GetRealTimeClock()->CurrentTime(), - .metadata = DefaultMetadata()}); - - logger.LogMetric("metric_name1", "test_case_name1", values, - Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, - DefaultMetadata()); - logger.LogMetric("metric_name2", "test_case_name2", values, - Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, - DefaultMetadata()); - } - - std::vector metrics = exporter_factory.exported_metrics; - ASSERT_THAT(metrics.size(), Eq(2lu)); - EXPECT_THAT(metrics[0].name, Eq("metric_name1")); - EXPECT_THAT(metrics[0].test_case, Eq("test_case_name1")); - EXPECT_THAT(metrics[1].name, Eq("metric_name2")); - EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2")); -} - -TEST(MetricsLoggerAndExporterTest, LogMetricWithStatsRecordsMultipleMetrics) { - TestMetricsExporterFactory exporter_factory; - { - std::vector> exporters; - exporters.push_back(exporter_factory.CreateExporter()); - MetricsLoggerAndExporter logger(Clock::GetRealTimeClock(), - std::move(exporters)); - Metric::Stats metric_stats{.mean = 15, .stddev = 5, .min = 10, .max = 20}; - - logger.LogMetric("metric_name1", "test_case_name1", metric_stats, - Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, - DefaultMetadata()); - logger.LogMetric("metric_name2", "test_case_name2", metric_stats, - Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, - DefaultMetadata()); - } - - std::vector metrics = exporter_factory.exported_metrics; - ASSERT_THAT(metrics.size(), Eq(2lu)); - EXPECT_THAT(metrics[0].name, Eq("metric_name1")); - EXPECT_THAT(metrics[0].test_case, Eq("test_case_name1")); - EXPECT_THAT(metrics[1].name, Eq("metric_name2")); - EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2")); -} - -TEST(MetricsLoggerAndExporterTest, - LogMetricThroughtAllMethodsAccumulateAllMetrics) { - TestMetricsExporterFactory exporter_factory; - { - std::vector> exporters; - exporters.push_back(exporter_factory.CreateExporter()); - MetricsLoggerAndExporter logger(Clock::GetRealTimeClock(), - std::move(exporters)); - SamplesStatsCounter values; - values.AddSample(SamplesStatsCounter::StatsSample{ - .value = 10, - .time = Clock::GetRealTimeClock()->CurrentTime(), - .metadata = DefaultMetadata()}); - values.AddSample(SamplesStatsCounter::StatsSample{ - .value = 20, - .time = Clock::GetRealTimeClock()->CurrentTime(), - .metadata = DefaultMetadata()}); - Metric::Stats metric_stats{.mean = 15, .stddev = 5, .min = 10, .max = 20}; - - logger.LogSingleValueMetric("metric_name1", "test_case_name1", - /*value=*/10, Unit::kMilliseconds, - ImprovementDirection::kBiggerIsBetter, - DefaultMetadata()); - logger.LogMetric("metric_name2", "test_case_name2", values, - Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, - DefaultMetadata()); - logger.LogMetric("metric_name3", "test_case_name3", metric_stats, - Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, - DefaultMetadata()); - } - - std::vector metrics = exporter_factory.exported_metrics; - ASSERT_THAT(metrics.size(), Eq(3lu)); - EXPECT_THAT(metrics[0].name, Eq("metric_name1")); - EXPECT_THAT(metrics[0].test_case, Eq("test_case_name1")); - EXPECT_THAT(metrics[1].name, Eq("metric_name2")); - EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2")); - EXPECT_THAT(metrics[2].name, Eq("metric_name3")); - EXPECT_THAT(metrics[2].test_case, Eq("test_case_name3")); -} - -TEST(MetricsLoggerAndExporterTest, - OneFailedExporterDoesNotPreventExportToOthers) { - TestMetricsExporterFactory exporter_factory1; - TestMetricsExporterFactory exporter_factory2; - TestMetricsExporterFactory exporter_factory3; - { - std::vector> exporters; - exporters.push_back(exporter_factory1.CreateExporter()); - exporters.push_back(exporter_factory2.CreateFailureExporter()); - exporters.push_back(exporter_factory3.CreateExporter()); - MetricsLoggerAndExporter logger(Clock::GetRealTimeClock(), - std::move(exporters), - /*crash_on_export_failure=*/false); - - logger.LogSingleValueMetric("metric_name", "test_case_name", - /*value=*/10, Unit::kMilliseconds, - ImprovementDirection::kBiggerIsBetter, - DefaultMetadata()); - } - - std::vector metrics1 = exporter_factory1.exported_metrics; - std::vector metrics2 = exporter_factory2.exported_metrics; - std::vector metrics3 = exporter_factory3.exported_metrics; - ASSERT_THAT(metrics1.size(), Eq(1lu)); - EXPECT_THAT(metrics1[0].name, Eq("metric_name")); - ASSERT_THAT(metrics2.size(), Eq(1lu)); - EXPECT_THAT(metrics2[0].name, Eq("metric_name")); - ASSERT_THAT(metrics3.size(), Eq(1lu)); - EXPECT_THAT(metrics3[0].name, Eq("metric_name")); -} - -} // namespace -} // namespace test -} // namespace webrtc diff --git a/api/test/mock_audio_sink.h b/api/test/mock_audio_sink.h index 0c17dc45ca..88f38a3c57 100644 --- a/api/test/mock_audio_sink.h +++ b/api/test/mock_audio_sink.h @@ -17,7 +17,7 @@ namespace webrtc { -class MockAudioSink final : public webrtc::AudioTrackSinkInterface { +class MockAudioSink : public webrtc::AudioTrackSinkInterface { public: MOCK_METHOD(void, OnData, diff --git a/api/test/mock_data_channel.h b/api/test/mock_data_channel.h index 40f7edb08a..38730eaa51 100644 --- a/api/test/mock_data_channel.h +++ b/api/test/mock_data_channel.h @@ -18,7 +18,7 @@ namespace webrtc { -class MockDataChannelInterface final +class MockDataChannelInterface : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { diff --git a/api/test/mock_media_stream_interface.h b/api/test/mock_media_stream_interface.h index 209962358d..dfdbab35e9 100644 --- a/api/test/mock_media_stream_interface.h +++ b/api/test/mock_media_stream_interface.h @@ -18,8 +18,7 @@ namespace webrtc { -class MockAudioSource final - : public rtc::RefCountedObject { +class MockAudioSource : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { return rtc::scoped_refptr(new MockAudioSource()); @@ -52,7 +51,7 @@ class MockAudioSource final MockAudioSource() = default; }; -class MockAudioTrack final : public rtc::RefCountedObject { +class MockAudioTrack : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { return rtc::scoped_refptr(new MockAudioTrack()); diff --git a/api/test/mock_peer_connection_factory_interface.h b/api/test/mock_peer_connection_factory_interface.h index 6bab595b5a..ae1fbfbbb7 100644 --- a/api/test/mock_peer_connection_factory_interface.h +++ b/api/test/mock_peer_connection_factory_interface.h @@ -19,7 +19,7 @@ namespace webrtc { -class MockPeerConnectionFactoryInterface final +class MockPeerConnectionFactoryInterface : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { diff --git a/api/test/mock_peerconnectioninterface.h b/api/test/mock_peerconnectioninterface.h index 6e1f9c7efc..504f034486 100644 --- a/api/test/mock_peerconnectioninterface.h +++ b/api/test/mock_peerconnectioninterface.h @@ -47,6 +47,12 @@ class MockPeerConnectionInterface : public webrtc::PeerConnectionInterface { (rtc::scoped_refptr, const std::vector&), (override)); + MOCK_METHOD(RTCErrorOr>, + AddTrack, + (rtc::scoped_refptr, + const std::vector&, + const std::vector&), + (override)); MOCK_METHOD(RTCError, RemoveTrackOrError, (rtc::scoped_refptr), diff --git a/api/test/mock_rtpreceiver.h b/api/test/mock_rtpreceiver.h index 4bcf064b2a..63318dc32d 100644 --- a/api/test/mock_rtpreceiver.h +++ b/api/test/mock_rtpreceiver.h @@ -14,6 +14,7 @@ #include #include +#include "api/crypto/frame_decryptor_interface.h" #include "api/rtp_receiver_interface.h" #include "test/gmock.h" @@ -32,12 +33,24 @@ class MockRtpReceiver : public rtc::RefCountedObject { MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); MOCK_METHOD(std::string, id, (), (const, override)); MOCK_METHOD(RtpParameters, GetParameters, (), (const, override)); + MOCK_METHOD(bool, + SetParameters, + (const webrtc::RtpParameters& parameters), + (override)); MOCK_METHOD(void, SetObserver, (RtpReceiverObserverInterface*), (override)); MOCK_METHOD(void, SetJitterBufferMinimumDelay, (absl::optional), (override)); MOCK_METHOD(std::vector, GetSources, (), (const, override)); + MOCK_METHOD(void, + SetFrameDecryptor, + (rtc::scoped_refptr), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + GetFrameDecryptor, + (), + (const, override)); }; } // namespace webrtc diff --git a/api/test/mock_rtpsender.h b/api/test/mock_rtpsender.h index e2351f87fe..22113678b9 100644 --- a/api/test/mock_rtpsender.h +++ b/api/test/mock_rtpsender.h @@ -46,6 +46,10 @@ class MockRtpSender : public RtpSenderInterface { (const, override)); MOCK_METHOD(RtpParameters, GetParameters, (), (const, override)); MOCK_METHOD(RTCError, SetParameters, (const RtpParameters&), (override)); + MOCK_METHOD(void, + SetParametersAsync, + (const RtpParameters&, SetParametersCallback), + (override)); MOCK_METHOD(rtc::scoped_refptr, GetDtmfSender, (), diff --git a/api/test/mock_session_description_interface.h b/api/test/mock_session_description_interface.h new file mode 100644 index 0000000000..f0346ceb11 --- /dev/null +++ b/api/test/mock_session_description_interface.h @@ -0,0 +1,56 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_SESSION_DESCRIPTION_INTERFACE_H_ +#define API_TEST_MOCK_SESSION_DESCRIPTION_INTERFACE_H_ + +#include +#include +#include +#include + +#include "api/jsep.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockSessionDescriptionInterface : public SessionDescriptionInterface { + public: + MOCK_METHOD(std::unique_ptr, + Clone, + (), + (const, override)); + MOCK_METHOD(cricket::SessionDescription*, description, (), (override)); + MOCK_METHOD(const cricket::SessionDescription*, + description, + (), + (const, override)); + MOCK_METHOD(std::string, session_id, (), (const, override)); + MOCK_METHOD(std::string, session_version, (), (const, override)); + MOCK_METHOD(SdpType, GetType, (), (const, override)); + MOCK_METHOD(std::string, type, (), (const, override)); + MOCK_METHOD(bool, AddCandidate, (const IceCandidateInterface*), (override)); + MOCK_METHOD(size_t, + RemoveCandidates, + (const std::vector&), + (override)); + MOCK_METHOD(size_t, number_of_mediasections, (), (const, override)); + MOCK_METHOD(const IceCandidateCollection*, + candidates, + (size_t), + (const, override)); + MOCK_METHOD(bool, ToString, (std::string*), (const, override)); +}; + +static_assert(!std::is_abstract_v); + +} // namespace webrtc + +#endif // API_TEST_MOCK_SESSION_DESCRIPTION_INTERFACE_H_ diff --git a/api/test/mock_video_track.h b/api/test/mock_video_track.h index 705d13509b..1212a32527 100644 --- a/api/test/mock_video_track.h +++ b/api/test/mock_video_track.h @@ -20,7 +20,7 @@ namespace webrtc { -class MockVideoTrack final +class MockVideoTrack : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { diff --git a/api/test/network_emulation/network_emulation_interfaces.cc b/api/test/network_emulation/network_emulation_interfaces.cc index ac2eb1d971..0f3a7f8ffd 100644 --- a/api/test/network_emulation/network_emulation_interfaces.cc +++ b/api/test/network_emulation/network_emulation_interfaces.cc @@ -12,6 +12,7 @@ #include "rtc_base/net_helper.h" namespace webrtc { + EmulatedIpPacket::EmulatedIpPacket(const rtc::SocketAddress& from, const rtc::SocketAddress& to, rtc::CopyOnWriteBuffer data, @@ -26,4 +27,20 @@ EmulatedIpPacket::EmulatedIpPacket(const rtc::SocketAddress& from, RTC_DCHECK(to.family() == AF_INET || to.family() == AF_INET6); } +DataRate EmulatedNetworkOutgoingStats::AverageSendRate() const { + RTC_DCHECK_GE(packets_sent, 2); + RTC_DCHECK(first_packet_sent_time.IsFinite()); + RTC_DCHECK(last_packet_sent_time.IsFinite()); + return (bytes_sent - first_sent_packet_size) / + (last_packet_sent_time - first_packet_sent_time); +} + +DataRate EmulatedNetworkIncomingStats::AverageReceiveRate() const { + RTC_DCHECK_GE(packets_received, 2); + RTC_DCHECK(first_packet_received_time.IsFinite()); + RTC_DCHECK(last_packet_received_time.IsFinite()); + return (bytes_received - first_received_packet_size) / + (last_packet_received_time - first_packet_received_time); +} + } // namespace webrtc diff --git a/api/test/network_emulation/network_emulation_interfaces.h b/api/test/network_emulation/network_emulation_interfaces.h index 735689c734..7cab07b75d 100644 --- a/api/test/network_emulation/network_emulation_interfaces.h +++ b/api/test/network_emulation/network_emulation_interfaces.h @@ -62,140 +62,182 @@ class EmulatedNetworkReceiverInterface { virtual void OnPacketReceived(EmulatedIpPacket packet) = 0; }; -class EmulatedNetworkOutgoingStats { - public: - virtual ~EmulatedNetworkOutgoingStats() = default; +struct EmulatedNetworkOutgoingStats { + int64_t packets_sent = 0; - virtual int64_t PacketsSent() const = 0; + DataSize bytes_sent = DataSize::Zero(); - virtual DataSize BytesSent() const = 0; + // Sizes of all sent packets. + // Collected iff EmulatedNetworkStatsGatheringMode::kDebug is enabled. + SamplesStatsCounter sent_packets_size; - // Returns the timestamped sizes of all sent packets if - // EmulatedEndpointConfig::stats_gatherming_mode was set to - // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. + DataSize first_sent_packet_size = DataSize::Zero(); + + // Time of the first packet sent or infinite value if no packets were sent. + Timestamp first_packet_sent_time = Timestamp::PlusInfinity(); + + // Time of the last packet sent or infinite value if no packets were sent. + Timestamp last_packet_sent_time = Timestamp::MinusInfinity(); + + // Returns average send rate. Requires that at least 2 packets were sent. + DataRate AverageSendRate() const; +}; + +struct EmulatedNetworkIncomingStats { + // Total amount of packets received with or without destination. + int64_t packets_received = 0; + + // Total amount of bytes in received packets. + DataSize bytes_received = DataSize::Zero(); + + // Sizes of all received packets. + // Collected iff EmulatedNetworkStatsGatheringMode::kDebug is enabled. + SamplesStatsCounter received_packets_size; + + // Total amount of packets that were received, but no destination was found. + int64_t packets_discarded_no_receiver = 0; + + // Total amount of bytes in discarded packets. + DataSize bytes_discarded_no_receiver = DataSize::Zero(); + + // Sizes of all packets that were received, but no destination was found. + // Collected iff EmulatedNetworkStatsGatheringMode::kDebug is enabled. + SamplesStatsCounter packets_discarded_no_receiver_size; + + DataSize first_received_packet_size = DataSize::Zero(); + + // Time of the first packet received or infinite value if no packets were + // received. + Timestamp first_packet_received_time = Timestamp::PlusInfinity(); + + // Time of the last packet received or infinite value if no packets were + // received. + Timestamp last_packet_received_time = Timestamp::MinusInfinity(); + + DataRate AverageReceiveRate() const; +}; + +struct EmulatedNetworkStats { + int64_t PacketsSent() const { return overall_outgoing_stats.packets_sent; } + + DataSize BytesSent() const { return overall_outgoing_stats.bytes_sent; } + + // Returns the timestamped sizes of all sent packets. // Returned reference is valid until the next call to a non-const method. - virtual const SamplesStatsCounter& SentPacketsSizeCounter() const = 0; + // Collected iff EmulatedNetworkStatsGatheringMode::kDebug is enabled. + const SamplesStatsCounter& SentPacketsSizeCounter() const { + return overall_outgoing_stats.sent_packets_size; + } - virtual DataSize FirstSentPacketSize() const = 0; + DataSize FirstSentPacketSize() const { + return overall_outgoing_stats.first_sent_packet_size; + } // Returns time of the first packet sent or infinite value if no packets were // sent. - virtual Timestamp FirstPacketSentTime() const = 0; + Timestamp FirstPacketSentTime() const { + return overall_outgoing_stats.first_packet_sent_time; + } // Returns time of the last packet sent or infinite value if no packets were // sent. - virtual Timestamp LastPacketSentTime() const = 0; + Timestamp LastPacketSentTime() const { + return overall_outgoing_stats.last_packet_sent_time; + } - // Returns average send rate. Requires that at least 2 packets were sent. - virtual DataRate AverageSendRate() const = 0; -}; + DataRate AverageSendRate() const { + return overall_outgoing_stats.AverageSendRate(); + } -class EmulatedNetworkIncomingStats { - public: - virtual ~EmulatedNetworkIncomingStats() = default; + // Total amount of packets received regardless of the destination address. + int64_t PacketsReceived() const { + return overall_incoming_stats.packets_received; + } - // Total amount of packets received with or without destination. - virtual int64_t PacketsReceived() const = 0; // Total amount of bytes in received packets. - virtual DataSize BytesReceived() const = 0; - // Returns the timestamped sizes of all received packets if - // EmulatedEndpointConfig::stats_gatherming_mode was set to - // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. - // Returned reference is valid until the next call to a non-const method. - virtual const SamplesStatsCounter& ReceivedPacketsSizeCounter() const = 0; - // Total amount of packets that were received, but no destination was found. - virtual int64_t PacketsDropped() const = 0; - // Total amount of bytes in dropped packets. - virtual DataSize BytesDropped() const = 0; - // Returns the timestamped sizes of all packets that were received, - // but no destination was found if - // EmulatedEndpointConfig::stats_gatherming_mode was set to - // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. - // Returned reference is valid until the next call to a non-const method. - virtual const SamplesStatsCounter& DroppedPacketsSizeCounter() const = 0; + DataSize BytesReceived() const { + return overall_incoming_stats.bytes_received; + } - virtual DataSize FirstReceivedPacketSize() const = 0; + // Returns the timestamped sizes of all received packets. + // Returned reference is valid until the next call to a non-const method. + // Collected iff EmulatedNetworkStatsGatheringMode::kDebug is enabled. + const SamplesStatsCounter& ReceivedPacketsSizeCounter() const { + return overall_incoming_stats.received_packets_size; + } + + // Total amount of packets that were received, but no destination was found. + int64_t PacketsDiscardedNoReceiver() const { + return overall_incoming_stats.packets_discarded_no_receiver; + } + + // Total amount of bytes in dropped packets. + DataSize BytesDiscardedNoReceiver() const { + return overall_incoming_stats.bytes_discarded_no_receiver; + } + + // Returns counter with timestamped sizes of all packets that were received, + // but no destination was found. + // Returned reference is valid until the next call to a non-const method. + // Collected iff EmulatedNetworkStatsGatheringMode::kDebug is enabled. + const SamplesStatsCounter& PacketsDiscardedNoReceiverSizeCounter() const { + return overall_incoming_stats.packets_discarded_no_receiver_size; + } + + DataSize FirstReceivedPacketSize() const { + return overall_incoming_stats.first_received_packet_size; + } // Returns time of the first packet received or infinite value if no packets // were received. - virtual Timestamp FirstPacketReceivedTime() const = 0; + Timestamp FirstPacketReceivedTime() const { + return overall_incoming_stats.first_packet_received_time; + } // Returns time of the last packet received or infinite value if no packets // were received. - virtual Timestamp LastPacketReceivedTime() const = 0; + Timestamp LastPacketReceivedTime() const { + return overall_incoming_stats.last_packet_received_time; + } - virtual DataRate AverageReceiveRate() const = 0; -}; - -class EmulatedNetworkStats { - public: - virtual ~EmulatedNetworkStats() = default; + DataRate AverageReceiveRate() const { + return overall_incoming_stats.AverageReceiveRate(); + } // List of IP addresses that were used to send data considered in this stats // object. - virtual std::vector LocalAddresses() const = 0; + std::vector local_addresses; - virtual int64_t PacketsSent() const = 0; + // Overall outgoing stats for all IP addresses which were requested. + EmulatedNetworkOutgoingStats overall_outgoing_stats; - virtual DataSize BytesSent() const = 0; - // Returns the timestamped sizes of all sent packets if - // EmulatedEndpointConfig::stats_gatherming_mode was set to - // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. - // Returned reference is valid until the next call to a non-const method. - virtual const SamplesStatsCounter& SentPacketsSizeCounter() const = 0; - // Returns the timestamped duration between packet was received on - // network interface and was dispatched to the network in microseconds if - // EmulatedEndpointConfig::stats_gatherming_mode was set to - // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. - // Returned reference is valid until the next call to a non-const method. - virtual const SamplesStatsCounter& SentPacketsQueueWaitTimeUs() const = 0; + // Overall incoming stats for all IP addresses from which data was received + // on requested interfaces. + EmulatedNetworkIncomingStats overall_incoming_stats; - virtual DataSize FirstSentPacketSize() const = 0; - // Returns time of the first packet sent or infinite value if no packets were - // sent. - virtual Timestamp FirstPacketSentTime() const = 0; - // Returns time of the last packet sent or infinite value if no packets were - // sent. - virtual Timestamp LastPacketSentTime() const = 0; + std::map + outgoing_stats_per_destination; + std::map + incoming_stats_per_source; - virtual DataRate AverageSendRate() const = 0; - // Total amount of packets received regardless of the destination address. - virtual int64_t PacketsReceived() const = 0; - // Total amount of bytes in received packets. - virtual DataSize BytesReceived() const = 0; - // Returns the timestamped sizes of all received packets if - // EmulatedEndpointConfig::stats_gatherming_mode was set to - // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. - // Returned reference is valid until the next call to a non-const method. - virtual const SamplesStatsCounter& ReceivedPacketsSizeCounter() const = 0; - // Total amount of packets that were received, but no destination was found. - virtual int64_t PacketsDropped() const = 0; - // Total amount of bytes in dropped packets. - virtual DataSize BytesDropped() const = 0; - // Returns counter with timestamped sizes of all packets that were received, - // but no destination was found if - // EmulatedEndpointConfig::stats_gatherming_mode was set to - // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. - // Returned reference is valid until the next call to a non-const method. - virtual const SamplesStatsCounter& DroppedPacketsSizeCounter() const = 0; + // Duration between packet was received on network interface and was + // dispatched to the network in microseconds. + // Collected iff EmulatedNetworkStatsGatheringMode::kDebug is enabled. + SamplesStatsCounter sent_packets_queue_wait_time_us; +}; - virtual DataSize FirstReceivedPacketSize() const = 0; - // Returns time of the first packet received or infinite value if no packets - // were received. - virtual Timestamp FirstPacketReceivedTime() const = 0; - // Returns time of the last packet received or infinite value if no packets - // were received. - virtual Timestamp LastPacketReceivedTime() const = 0; +struct EmulatedNetworkNodeStats { + // Amount of time each packet spent in the emulated network node for which + // stats were collected. + // + // Collected iff EmulatedNetworkStatsGatheringMode::kDebug is enabled. + SamplesStatsCounter packet_transport_time; - virtual DataRate AverageReceiveRate() const = 0; - - virtual std::map> - OutgoingStatsPerDestination() const = 0; - - virtual std::map> - IncomingStatsPerSource() const = 0; + // For each packet contains its size divided on the amount of time which it + // spent in the emulated network node for which stats were collected. + // + // Collected iff EmulatedNetworkStatsGatheringMode::kDebug is enabled. + SamplesStatsCounter size_to_packet_transport_time; }; // EmulatedEndpoint is an abstraction for network interface on device. Instances diff --git a/api/test/network_emulation_manager.h b/api/test/network_emulation_manager.h index 427ad6d4d4..bc9279d306 100644 --- a/api/test/network_emulation_manager.h +++ b/api/test/network_emulation_manager.h @@ -49,15 +49,18 @@ class EmulatedNetworkNode; // peer device to another network interface on another peer device. class EmulatedRoute; +enum class EmulatedNetworkStatsGatheringMode { + // Gather main network stats counters. See more details on which particular + // metrics are collected in the `EmulatedNetworkStats` and + // `EmulatedNetworkNodeStats` documentation. + kDefault, + // kDefault + also gather per packet statistics. In this mode more memory + // will be used. + kDebug +}; + struct EmulatedEndpointConfig { enum class IpAddressFamily { kIpv4, kIpv6 }; - enum class StatsGatheringMode { - // Gather main network stats counters. - kDefault, - // kDefault + also gather per packet statistics. In this mode more memory - // will be used. - kDebug - }; // If specified will be used to name endpoint for logging purposes. absl::optional name = absl::nullopt; @@ -70,7 +73,6 @@ struct EmulatedEndpointConfig { bool start_as_enabled = true; // Network type which will be used to represent endpoint to WebRTC. rtc::AdapterType type = rtc::AdapterType::ADAPTER_TYPE_UNKNOWN; - StatsGatheringMode stats_gathering_mode = StatsGatheringMode::kDefault; // Allow endpoint to send packets specifying source IP address different to // the current endpoint IP address. If false endpoint will crash if attempt // to send such packet will be done. @@ -143,8 +145,7 @@ class EmulatedNetworkManagerInterface { // specified `stats_callback`. Callback will be executed on network emulation // internal task queue. virtual void GetStats( - std::function)> stats_callback) - const = 0; + std::function stats_callback) const = 0; }; enum class TimeMode { kRealTime, kSimulated }; @@ -324,13 +325,19 @@ class NetworkEmulationManager { CreateEmulatedNetworkManagerInterface( const std::vector& endpoints) = 0; - // Passes summarized network stats for specified `endpoints` into specified + // Passes combined network stats for all specified `endpoints` into specified // `stats_callback`. Callback will be executed on network emulation // internal task queue. virtual void GetStats( rtc::ArrayView endpoints, - std::function)> - stats_callback) = 0; + std::function stats_callback) = 0; + + // Passes combined network stats for all specified `nodes` into specified + // `stats_callback`. Callback will be executed on network emulation + // internal task queue. + virtual void GetStats( + rtc::ArrayView nodes, + std::function stats_callback) = 0; // Create a EmulatedTURNServer. // The TURN server has 2 endpoints that need to be connected with routes, diff --git a/api/test/pclf/BUILD.gn b/api/test/pclf/BUILD.gn new file mode 100644 index 0000000000..a50744e92b --- /dev/null +++ b/api/test/pclf/BUILD.gn @@ -0,0 +1,108 @@ +# Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") + +rtc_source_set("media_configuration") { + visibility = [ "*" ] + testonly = true + sources = [ + "media_configuration.cc", + "media_configuration.h", + ] + + deps = [ + "../..:array_view", + "../..:audio_options_api", + "../..:audio_quality_analyzer_api", + "../..:callfactory_api", + "../..:fec_controller_api", + "../..:frame_generator_api", + "../..:function_view", + "../..:libjingle_peerconnection_api", + "../..:media_stream_interface", + "../..:packet_socket_factory", + "../..:peer_network_dependencies", + "../..:rtp_parameters", + "../..:simulated_network_api", + "../..:stats_observer_interface", + "../..:track_id_stream_info_map", + "../..:video_quality_analyzer_api", + "../../../modules/audio_processing:api", + "../../../rtc_base:checks", + "../../../rtc_base:rtc_base", + "../../../rtc_base:stringutils", + "../../../rtc_base:threading", + "../../../test:fileutils", + "../../../test:video_test_support", + "../../../test/pc/e2e/analyzer/video:video_dumping", + "../../audio:audio_mixer_api", + "../../rtc_event_log", + "../../task_queue", + "../../transport:network_control", + "../../units:time_delta", + "../../video_codecs:video_codecs_api", + "../video:video_frame_writer", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("media_quality_test_params") { + visibility = [ "*" ] + testonly = true + sources = [ "media_quality_test_params.h" ] + + deps = [ + ":media_configuration", + "../../../api:callfactory_api", + "../../../api:fec_controller_api", + "../../../api:field_trials_view", + "../../../api:libjingle_peerconnection_api", + "../../../api:packet_socket_factory", + "../../../api/audio:audio_mixer_api", + "../../../api/rtc_event_log", + "../../../api/task_queue", + "../../../api/transport:network_control", + "../../../api/video_codecs:video_codecs_api", + "../../../modules/audio_processing:api", + "../../../p2p:rtc_p2p", + "../../../rtc_base", + "../../../rtc_base:threading", + ] +} + +rtc_library("peer_configurer") { + visibility = [ "*" ] + testonly = true + sources = [ + "peer_configurer.cc", + "peer_configurer.h", + ] + deps = [ + ":media_configuration", + ":media_quality_test_params", + "../../../api:callfactory_api", + "../../../api:create_peer_connection_quality_test_frame_generator", + "../../../api:fec_controller_api", + "../../../api:packet_socket_factory", + "../../../api:peer_network_dependencies", + "../../../api/audio:audio_mixer_api", + "../../../api/rtc_event_log", + "../../../api/task_queue", + "../../../api/transport:network_control", + "../../../api/video_codecs:video_codecs_api", + "../../../modules/audio_processing:api", + "../../../rtc_base", + "../../../rtc_base:threading", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] +} diff --git a/api/test/pclf/DEPS b/api/test/pclf/DEPS new file mode 100644 index 0000000000..60cc0aeeb3 --- /dev/null +++ b/api/test/pclf/DEPS @@ -0,0 +1,13 @@ +specific_include_rules = { + ".*": [ + "+modules/audio_processing/include/audio_processing.h", + "+rtc_base/checks.h", + "+rtc_base/network.h", + "+rtc_base/rtc_certificate_generator.h", + "+rtc_base/ssl_certificate.h", + "+rtc_base/thread.h", + ], + "media_quality_test_params\.h": [ + "+p2p/base/port_allocator.h", + ], +} diff --git a/api/test/pclf/media_configuration.cc b/api/test/pclf/media_configuration.cc new file mode 100644 index 0000000000..56b9e52e01 --- /dev/null +++ b/api/test/pclf/media_configuration.cc @@ -0,0 +1,314 @@ +/* + * Copyright 2022 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/pclf/media_configuration.h" + +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/test/video/video_frame_writer.h" +#include "rtc_base/checks.h" +#include "rtc_base/strings/string_builder.h" +#include "test/pc/e2e/analyzer/video/video_dumping.h" +#include "test/testsupport/file_utils.h" +#include "test/testsupport/video_frame_writer.h" + +namespace webrtc { +namespace webrtc_pc_e2e { +namespace { + +std::string SpecToString(VideoResolution::Spec spec) { + switch (spec) { + case VideoResolution::Spec::kNone: + return "None"; + case VideoResolution::Spec::kMaxFromSender: + return "MaxFromSender"; + } +} + +void AppendResolution(const VideoResolution& resolution, + rtc::StringBuilder& builder) { + builder << "_" << resolution.width() << "x" << resolution.height() << "_" + << resolution.fps(); +} + +} // namespace + +ScreenShareConfig::ScreenShareConfig(TimeDelta slide_change_interval) + : slide_change_interval(slide_change_interval) { + RTC_CHECK_GT(slide_change_interval.ms(), 0); +} +VideoSimulcastConfig::VideoSimulcastConfig(int simulcast_streams_count) + : simulcast_streams_count(simulcast_streams_count) { + RTC_CHECK_GT(simulcast_streams_count, 1); +} +EmulatedSFUConfig::EmulatedSFUConfig(int target_layer_index) + : target_layer_index(target_layer_index) { + RTC_CHECK_GE(target_layer_index, 0); +} + +EmulatedSFUConfig::EmulatedSFUConfig(absl::optional target_layer_index, + absl::optional target_temporal_index) + : target_layer_index(target_layer_index), + target_temporal_index(target_temporal_index) { + RTC_CHECK_GE(target_temporal_index.value_or(0), 0); + if (target_temporal_index) + RTC_CHECK_GE(*target_temporal_index, 0); +} + +VideoResolution::VideoResolution(size_t width, size_t height, int32_t fps) + : width_(width), height_(height), fps_(fps), spec_(Spec::kNone) {} +VideoResolution::VideoResolution(Spec spec) + : width_(0), height_(0), fps_(0), spec_(spec) {} + +bool VideoResolution::operator==(const VideoResolution& other) const { + if (spec_ != Spec::kNone && spec_ == other.spec_) { + // If there is some particular spec set, then it doesn't matter what + // values we have in other fields. + return true; + } + return width_ == other.width_ && height_ == other.height_ && + fps_ == other.fps_ && spec_ == other.spec_; +} +bool VideoResolution::operator!=(const VideoResolution& other) const { + return !(*this == other); +} + +bool VideoResolution::IsRegular() const { + return spec_ == Spec::kNone; +} +std::string VideoResolution::ToString() const { + rtc::StringBuilder out; + out << "{ width=" << width_ << ", height=" << height_ << ", fps=" << fps_ + << ", spec=" << SpecToString(spec_) << " }"; + return out.Release(); +} + +VideoDumpOptions::VideoDumpOptions( + absl::string_view output_directory, + int sampling_modulo, + bool export_frame_ids, + std::function( + absl::string_view file_name_prefix, + const VideoResolution& resolution)> video_frame_writer_factory) + : output_directory_(output_directory), + sampling_modulo_(sampling_modulo), + export_frame_ids_(export_frame_ids), + video_frame_writer_factory_(video_frame_writer_factory) { + RTC_CHECK_GT(sampling_modulo, 0); +} + +VideoDumpOptions::VideoDumpOptions(absl::string_view output_directory, + bool export_frame_ids) + : VideoDumpOptions(output_directory, + kDefaultSamplingModulo, + export_frame_ids) {} + +std::unique_ptr +VideoDumpOptions::CreateInputDumpVideoFrameWriter( + absl::string_view stream_label, + const VideoResolution& resolution) const { + std::unique_ptr writer = video_frame_writer_factory_( + GetInputDumpFileName(stream_label, resolution), resolution); + absl::optional frame_ids_file = + GetInputFrameIdsDumpFileName(stream_label, resolution); + if (frame_ids_file.has_value()) { + writer = CreateVideoFrameWithIdsWriter(std::move(writer), *frame_ids_file); + } + return writer; +} + +std::unique_ptr +VideoDumpOptions::CreateOutputDumpVideoFrameWriter( + absl::string_view stream_label, + absl::string_view receiver, + const VideoResolution& resolution) const { + std::unique_ptr writer = video_frame_writer_factory_( + GetOutputDumpFileName(stream_label, receiver, resolution), resolution); + absl::optional frame_ids_file = + GetOutputFrameIdsDumpFileName(stream_label, receiver, resolution); + if (frame_ids_file.has_value()) { + writer = CreateVideoFrameWithIdsWriter(std::move(writer), *frame_ids_file); + } + return writer; +} + +std::unique_ptr +VideoDumpOptions::Y4mVideoFrameWriterFactory( + absl::string_view file_name_prefix, + const VideoResolution& resolution) { + return std::make_unique( + std::string(file_name_prefix) + ".y4m", resolution.width(), + resolution.height(), resolution.fps()); +} + +std::string VideoDumpOptions::GetInputDumpFileName( + absl::string_view stream_label, + const VideoResolution& resolution) const { + rtc::StringBuilder file_name; + file_name << stream_label; + AppendResolution(resolution, file_name); + return test::JoinFilename(output_directory_, file_name.Release()); +} + +absl::optional VideoDumpOptions::GetInputFrameIdsDumpFileName( + absl::string_view stream_label, + const VideoResolution& resolution) const { + if (!export_frame_ids_) { + return absl::nullopt; + } + return GetInputDumpFileName(stream_label, resolution) + ".frame_ids.txt"; +} + +std::string VideoDumpOptions::GetOutputDumpFileName( + absl::string_view stream_label, + absl::string_view receiver, + const VideoResolution& resolution) const { + rtc::StringBuilder file_name; + file_name << stream_label << "_" << receiver; + AppendResolution(resolution, file_name); + return test::JoinFilename(output_directory_, file_name.Release()); +} + +absl::optional VideoDumpOptions::GetOutputFrameIdsDumpFileName( + absl::string_view stream_label, + absl::string_view receiver, + const VideoResolution& resolution) const { + if (!export_frame_ids_) { + return absl::nullopt; + } + return GetOutputDumpFileName(stream_label, receiver, resolution) + + ".frame_ids.txt"; +} + +std::string VideoDumpOptions::ToString() const { + rtc::StringBuilder out; + out << "{ output_directory_=" << output_directory_ + << ", sampling_modulo_=" << sampling_modulo_ + << ", export_frame_ids_=" << export_frame_ids_ << " }"; + return out.Release(); +} + +VideoConfig::VideoConfig(const VideoResolution& resolution) + : width(resolution.width()), + height(resolution.height()), + fps(resolution.fps()) { + RTC_CHECK(resolution.IsRegular()); +} +VideoConfig::VideoConfig(size_t width, size_t height, int32_t fps) + : width(width), height(height), fps(fps) {} +VideoConfig::VideoConfig(std::string stream_label, + size_t width, + size_t height, + int32_t fps) + : width(width), + height(height), + fps(fps), + stream_label(std::move(stream_label)) {} + +AudioConfig::AudioConfig(std::string stream_label) + : stream_label(std::move(stream_label)) {} + +VideoCodecConfig::VideoCodecConfig(std::string name) + : name(std::move(name)), required_params() {} +VideoCodecConfig::VideoCodecConfig( + std::string name, + std::map required_params) + : name(std::move(name)), required_params(std::move(required_params)) {} + +absl::optional VideoSubscription::GetMaxResolution( + rtc::ArrayView video_configs) { + std::vector resolutions; + for (const auto& video_config : video_configs) { + resolutions.push_back(video_config.GetResolution()); + } + return GetMaxResolution(resolutions); +} + +absl::optional VideoSubscription::GetMaxResolution( + rtc::ArrayView resolutions) { + if (resolutions.empty()) { + return absl::nullopt; + } + + VideoResolution max_resolution; + for (const VideoResolution& resolution : resolutions) { + if (max_resolution.width() < resolution.width()) { + max_resolution.set_width(resolution.width()); + } + if (max_resolution.height() < resolution.height()) { + max_resolution.set_height(resolution.height()); + } + if (max_resolution.fps() < resolution.fps()) { + max_resolution.set_fps(resolution.fps()); + } + } + return max_resolution; +} + +bool VideoSubscription::operator==(const VideoSubscription& other) const { + return default_resolution_ == other.default_resolution_ && + peers_resolution_ == other.peers_resolution_; +} +bool VideoSubscription::operator!=(const VideoSubscription& other) const { + return !(*this == other); +} + +VideoSubscription& VideoSubscription::SubscribeToPeer( + absl::string_view peer_name, + VideoResolution resolution) { + peers_resolution_[std::string(peer_name)] = resolution; + return *this; +} + +VideoSubscription& VideoSubscription::SubscribeToAllPeers( + VideoResolution resolution) { + default_resolution_ = resolution; + return *this; +} + +absl::optional VideoSubscription::GetResolutionForPeer( + absl::string_view peer_name) const { + auto it = peers_resolution_.find(std::string(peer_name)); + if (it == peers_resolution_.end()) { + return default_resolution_; + } + return it->second; +} + +std::vector VideoSubscription::GetSubscribedPeers() const { + std::vector subscribed_streams; + subscribed_streams.reserve(peers_resolution_.size()); + for (const auto& entry : peers_resolution_) { + subscribed_streams.push_back(entry.first); + } + return subscribed_streams; +} + +std::string VideoSubscription::ToString() const { + rtc::StringBuilder out; + out << "{ default_resolution_=["; + if (default_resolution_.has_value()) { + out << default_resolution_->ToString(); + } else { + out << "undefined"; + } + out << "], {"; + for (const auto& [peer_name, resolution] : peers_resolution_) { + out << "[" << peer_name << ": " << resolution.ToString() << "], "; + } + out << "} }"; + return out.Release(); +} +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/api/test/pclf/media_configuration.h b/api/test/pclf/media_configuration.h new file mode 100644 index 0000000000..8e841a265b --- /dev/null +++ b/api/test/pclf/media_configuration.h @@ -0,0 +1,484 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TEST_PCLF_MEDIA_CONFIGURATION_H_ +#define API_TEST_PCLF_MEDIA_CONFIGURATION_H_ + +#include +#include + +#include +#include +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/async_resolver_factory.h" +#include "api/audio/audio_mixer.h" +#include "api/audio_options.h" +#include "api/call/call_factory_interface.h" +#include "api/fec_controller.h" +#include "api/function_view.h" +#include "api/media_stream_interface.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_event_log/rtc_event_log_factory_interface.h" +#include "api/rtp_parameters.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/test/audio_quality_analyzer_interface.h" +#include "api/test/frame_generator_interface.h" +#include "api/test/peer_network_dependencies.h" +#include "api/test/simulated_network.h" +#include "api/test/stats_observer_interface.h" +#include "api/test/track_id_stream_info_map.h" +#include "api/test/video/video_frame_writer.h" +#include "api/test/video_quality_analyzer_interface.h" +#include "api/transport/network_control.h" +#include "api/units/time_delta.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "modules/audio_processing/include/audio_processing.h" +#include "rtc_base/checks.h" +#include "rtc_base/network.h" +#include "rtc_base/rtc_certificate_generator.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/thread.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +constexpr size_t kDefaultSlidesWidth = 1850; +constexpr size_t kDefaultSlidesHeight = 1110; + +// The index of required capturing device in OS provided list of video +// devices. On Linux and Windows the list will be obtained via +// webrtc::VideoCaptureModule::DeviceInfo, on Mac OS via +// [RTCCameraVideoCapturer captureDevices]. +enum class CapturingDeviceIndex : size_t {}; + +// Contains parameters for screen share scrolling. +// +// If scrolling is enabled, then it will be done by putting sliding window +// on source video and moving this window from top left corner to the +// bottom right corner of the picture. +// +// In such case source dimensions must be greater or equal to the sliding +// window dimensions. So `source_width` and `source_height` are the dimensions +// of the source frame, while `VideoConfig::width` and `VideoConfig::height` +// are the dimensions of the sliding window. +// +// Because `source_width` and `source_height` are dimensions of the source +// frame, they have to be width and height of videos from +// `ScreenShareConfig::slides_yuv_file_names`. +// +// Because scrolling have to be done on single slide it also requires, that +// `duration` must be less or equal to +// `ScreenShareConfig::slide_change_interval`. +struct ScrollingParams { + // Duration of scrolling. + TimeDelta duration; + // Width of source slides video. + size_t source_width = kDefaultSlidesWidth; + // Height of source slides video. + size_t source_height = kDefaultSlidesHeight; +}; + +// Contains screen share video stream properties. +struct ScreenShareConfig { + explicit ScreenShareConfig(TimeDelta slide_change_interval); + + // Shows how long one slide should be presented on the screen during + // slide generation. + TimeDelta slide_change_interval; + // If true, slides will be generated programmatically. No scrolling params + // will be applied in such case. + bool generate_slides = false; + // If present scrolling will be applied. Please read extra requirement on + // `slides_yuv_file_names` for scrolling. + absl::optional scrolling_params; + // Contains list of yuv files with slides. + // + // If empty, default set of slides will be used. In such case + // `VideoConfig::width` must be equal to `kDefaultSlidesWidth` and + // `VideoConfig::height` must be equal to `kDefaultSlidesHeight` or if + // `scrolling_params` are specified, then `ScrollingParams::source_width` + // must be equal to `kDefaultSlidesWidth` and + // `ScrollingParams::source_height` must be equal to `kDefaultSlidesHeight`. + std::vector slides_yuv_file_names; +}; + +// Config for Vp8 simulcast or non-standard Vp9 SVC testing. +// +// To configure standard SVC setting, use `scalability_mode` in the +// `encoding_params` array. +// This configures Vp9 SVC by requesting simulcast layers, the request is +// internally converted to a request for SVC layers. +// +// SVC support is limited: +// During SVC testing there is no SFU, so framework will try to emulate SFU +// behavior in regular p2p call. Because of it there are such limitations: +// * if `target_spatial_index` is not equal to the highest spatial layer +// then no packet/frame drops are allowed. +// +// If there will be any drops, that will affect requested layer, then +// WebRTC SVC implementation will continue decoding only the highest +// available layer and won't restore lower layers, so analyzer won't +// receive required data which will cause wrong results or test failures. +struct VideoSimulcastConfig { + explicit VideoSimulcastConfig(int simulcast_streams_count); + + // Specified amount of simulcast streams/SVC layers, depending on which + // encoder is used. + int simulcast_streams_count; +}; + +// Configuration for the emulated Selective Forward Unit (SFU) +// +// The framework can optionally filter out frames that are decoded +// using an emulated SFU. +// When using simulcast or SVC, it's not always desirable to receive +// all frames. In a real world call, a SFU will only forward a subset +// of the frames. +// The emulated SFU is not able to change its configuration dynamically, +// if adaptation happens during the call, layers may be dropped and the +// analyzer won't receive the required data which will cause wrong results or +// test failures. +struct EmulatedSFUConfig { + EmulatedSFUConfig() = default; + explicit EmulatedSFUConfig(int target_layer_index); + EmulatedSFUConfig(absl::optional target_layer_index, + absl::optional target_temporal_index); + + // Specifies simulcast or spatial index of the video stream to analyze. + // There are 2 cases: + // 1. simulcast encoding is used: + // in such case `target_layer_index` will specify the index of + // simulcast stream, that should be analyzed. Other streams will be + // dropped. + // 2. SVC encoding is used: + // in such case `target_layer_index` will specify the top interesting + // spatial layer and all layers below, including target one will be + // processed. All layers above target one will be dropped. + // If not specified then all streams will be received and analyzed. + // When set, it instructs the framework to create an emulated Selective + // Forwarding Unit (SFU) that will propagate only the requested layers. + absl::optional target_layer_index; + // Specifies the index of the maximum temporal unit to keep. + // If not specified then all temporal layers will be received and analyzed. + // When set, it instructs the framework to create an emulated Selective + // Forwarding Unit (SFU) that will propagate only up to the requested layer. + absl::optional target_temporal_index; +}; + +class VideoResolution { + public: + // Determines special resolutions, which can't be expressed in terms of + // width, height and fps. + enum class Spec { + // No extra spec set. It describes a regular resolution described by + // width, height and fps. + kNone, + // Describes resolution which contains max value among all sender's + // video streams in each dimension (width, height, fps). + kMaxFromSender + }; + + VideoResolution(size_t width, size_t height, int32_t fps); + explicit VideoResolution(Spec spec = Spec::kNone); + + bool operator==(const VideoResolution& other) const; + bool operator!=(const VideoResolution& other) const; + + size_t width() const { return width_; } + void set_width(size_t width) { width_ = width; } + size_t height() const { return height_; } + void set_height(size_t height) { height_ = height; } + int32_t fps() const { return fps_; } + void set_fps(int32_t fps) { fps_ = fps; } + + // Returns if it is a regular resolution or not. The resolution is regular + // if it's spec is `Spec::kNone`. + bool IsRegular() const; + + std::string ToString() const; + + private: + size_t width_ = 0; + size_t height_ = 0; + int32_t fps_ = 0; + Spec spec_ = Spec::kNone; +}; + +class VideoDumpOptions { + public: + static constexpr int kDefaultSamplingModulo = 1; + + // output_directory - the output directory where stream will be dumped. The + // output files' names will be constructed as + // __. for output dumps + // and _. for input dumps. + // By default is "y4m". Resolution is in the format + // x_. + // sampling_modulo - the module for the video frames to be dumped. Modulo + // equals X means every Xth frame will be written to the dump file. The + // value must be greater than 0. (Default: 1) + // export_frame_ids - specifies if frame ids should be exported together + // with content of the stream. If true, an output file with the same name as + // video dump and suffix ".frame_ids.txt" will be created. It will contain + // the frame ids in the same order as original frames in the output + // file with stream content. File will contain one frame id per line. + // (Default: false) + // `video_frame_writer_factory` - factory function to create a video frame + // writer for input and output video files. (Default: Y4M video writer + // factory). + explicit VideoDumpOptions( + absl::string_view output_directory, + int sampling_modulo = kDefaultSamplingModulo, + bool export_frame_ids = false, + std::function( + absl::string_view file_name_prefix, + const VideoResolution& resolution)> video_frame_writer_factory = + Y4mVideoFrameWriterFactory); + VideoDumpOptions(absl::string_view output_directory, bool export_frame_ids); + + VideoDumpOptions(const VideoDumpOptions&) = default; + VideoDumpOptions& operator=(const VideoDumpOptions&) = default; + VideoDumpOptions(VideoDumpOptions&&) = default; + VideoDumpOptions& operator=(VideoDumpOptions&&) = default; + + std::string output_directory() const { return output_directory_; } + int sampling_modulo() const { return sampling_modulo_; } + bool export_frame_ids() const { return export_frame_ids_; } + + std::unique_ptr CreateInputDumpVideoFrameWriter( + absl::string_view stream_label, + const VideoResolution& resolution) const; + + std::unique_ptr CreateOutputDumpVideoFrameWriter( + absl::string_view stream_label, + absl::string_view receiver, + const VideoResolution& resolution) const; + + std::string ToString() const; + + private: + static std::unique_ptr Y4mVideoFrameWriterFactory( + absl::string_view file_name_prefix, + const VideoResolution& resolution); + std::string GetInputDumpFileName(absl::string_view stream_label, + const VideoResolution& resolution) const; + // Returns file name for input frame ids dump if `export_frame_ids()` is + // true, absl::nullopt otherwise. + absl::optional GetInputFrameIdsDumpFileName( + absl::string_view stream_label, + const VideoResolution& resolution) const; + std::string GetOutputDumpFileName(absl::string_view stream_label, + absl::string_view receiver, + const VideoResolution& resolution) const; + // Returns file name for output frame ids dump if `export_frame_ids()` is + // true, absl::nullopt otherwise. + absl::optional GetOutputFrameIdsDumpFileName( + absl::string_view stream_label, + absl::string_view receiver, + const VideoResolution& resolution) const; + + std::string output_directory_; + int sampling_modulo_ = 1; + bool export_frame_ids_ = false; + std::function( + absl::string_view file_name_prefix, + const VideoResolution& resolution)> + video_frame_writer_factory_; +}; + +// Contains properties of single video stream. +struct VideoConfig { + explicit VideoConfig(const VideoResolution& resolution); + VideoConfig(size_t width, size_t height, int32_t fps); + VideoConfig(std::string stream_label, + size_t width, + size_t height, + int32_t fps); + + // Video stream width. + size_t width; + // Video stream height. + size_t height; + int32_t fps; + VideoResolution GetResolution() const { + return VideoResolution(width, height, fps); + } + + // Have to be unique among all specified configs for all peers in the call. + // Will be auto generated if omitted. + absl::optional stream_label; + // Will be set for current video track. If equals to kText or kDetailed - + // screencast in on. + absl::optional content_hint; + // If presented video will be transfered in simulcast/SVC mode depending on + // which encoder is used. + // + // Simulcast is supported only from 1st added peer. For VP8 simulcast only + // without RTX is supported so it will be automatically disabled for all + // simulcast tracks. For VP9 simulcast enables VP9 SVC mode and support RTX, + // but only on non-lossy networks. See more in documentation to + // VideoSimulcastConfig. + absl::optional simulcast_config; + // Configuration for the emulated Selective Forward Unit (SFU). + absl::optional emulated_sfu_config; + // Encoding parameters for both singlecast and per simulcast layer. + // If singlecast is used, if not empty, a single value can be provided. + // If simulcast is used, if not empty, `encoding_params` size have to be + // equal to `simulcast_config.simulcast_streams_count`. Will be used to set + // transceiver send encoding params for each layer. + // RtpEncodingParameters::rid may be changed by fixture implementation to + // ensure signaling correctness. + std::vector encoding_params; + // Count of temporal layers for video stream. This value will be set into + // each RtpEncodingParameters of RtpParameters of corresponding + // RtpSenderInterface for this video stream. + absl::optional temporal_layers_count; + // If specified defines how input should be dumped. It is actually one of + // the test's output file, which contains copy of what was captured during + // the test for this video stream on sender side. It is useful when + // generator is used as input. + absl::optional input_dump_options; + // If specified defines how output should be dumped on the receiver side for + // this stream. The produced files contain what was rendered for this video + // stream on receiver side per each receiver. + absl::optional output_dump_options; + // If set to true uses fixed frame rate while dumping output video to the + // file. Requested `VideoSubscription::fps()` will be used as frame rate. + bool output_dump_use_fixed_framerate = false; + // If true will display input and output video on the user's screen. + bool show_on_screen = false; + // If specified, determines a sync group to which this video stream belongs. + // According to bugs.webrtc.org/4762 WebRTC supports synchronization only + // for pair of single audio and single video stream. + absl::optional sync_group; + // If specified, it will be set into RtpParameters of corresponding + // RtpSenderInterface for this video stream. + // Note that this setting takes precedence over `content_hint`. + absl::optional degradation_preference; +}; + +// Contains properties for audio in the call. +struct AudioConfig { + enum Mode { + kGenerated, + kFile, + }; + + AudioConfig() = default; + explicit AudioConfig(std::string stream_label); + + // Have to be unique among all specified configs for all peers in the call. + // Will be auto generated if omitted. + absl::optional stream_label; + Mode mode = kGenerated; + // Have to be specified only if mode = kFile + absl::optional input_file_name; + // If specified the input stream will be also copied to specified file. + absl::optional input_dump_file_name; + // If specified the output stream will be copied to specified file. + absl::optional output_dump_file_name; + + // Audio options to use. + cricket::AudioOptions audio_options; + // Sampling frequency of input audio data (from file or generated). + int sampling_frequency_in_hz = 48000; + // If specified, determines a sync group to which this audio stream belongs. + // According to bugs.webrtc.org/4762 WebRTC supports synchronization only + // for pair of single audio and single video stream. + absl::optional sync_group; +}; + +struct VideoCodecConfig { + explicit VideoCodecConfig(std::string name); + VideoCodecConfig(std::string name, + std::map required_params); + // Next two fields are used to specify concrete video codec, that should be + // used in the test. Video code will be negotiated in SDP during offer/ + // answer exchange. + // Video codec name. You can find valid names in + // media/base/media_constants.h + std::string name; + // Map of parameters, that have to be specified on SDP codec. Each parameter + // is described by key and value. Codec parameters will match the specified + // map if and only if for each key from `required_params` there will be + // a parameter with name equal to this key and parameter value will be equal + // to the value from `required_params` for this key. + // If empty then only name will be used to match the codec. + std::map required_params; +}; + +// Subscription to the remote video streams. It declares which remote stream +// peer should receive and in which resolution (width x height x fps). +class VideoSubscription { + public: + // Returns the resolution constructed as maximum from all resolution + // dimensions: width, height and fps. + static absl::optional GetMaxResolution( + rtc::ArrayView video_configs); + static absl::optional GetMaxResolution( + rtc::ArrayView resolutions); + + bool operator==(const VideoSubscription& other) const; + bool operator!=(const VideoSubscription& other) const; + + // Subscribes receiver to all streams sent by the specified peer with + // specified resolution. It will override any resolution that was used in + // `SubscribeToAll` independently from methods call order. + VideoSubscription& SubscribeToPeer( + absl::string_view peer_name, + VideoResolution resolution = + VideoResolution(VideoResolution::Spec::kMaxFromSender)); + + // Subscribes receiver to the all sent streams with specified resolution. + // If any stream was subscribed to with `SubscribeTo` method that will + // override resolution passed to this function independently from methods + // call order. + VideoSubscription& SubscribeToAllPeers( + VideoResolution resolution = + VideoResolution(VideoResolution::Spec::kMaxFromSender)); + + // Returns resolution for specific sender. If no specific resolution was + // set for this sender, then will return resolution used for all streams. + // If subscription doesn't subscribe to all streams, `absl::nullopt` will be + // returned. + absl::optional GetResolutionForPeer( + absl::string_view peer_name) const; + + // Returns a maybe empty list of senders for which peer explicitly + // subscribed to with specific resolution. + std::vector GetSubscribedPeers() const; + + std::string ToString() const; + + private: + absl::optional default_resolution_ = absl::nullopt; + std::map peers_resolution_; +}; + +// Contains configuration for echo emulator. +struct EchoEmulationConfig { + // Delay which represents the echo path delay, i.e. how soon rendered signal + // should reach capturer. + TimeDelta echo_delay = TimeDelta::Millis(50); +}; + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // API_TEST_PCLF_MEDIA_CONFIGURATION_H_ diff --git a/test/pc/e2e/peer_connection_quality_test_params.h b/api/test/pclf/media_quality_test_params.h similarity index 81% rename from test/pc/e2e/peer_connection_quality_test_params.h rename to api/test/pclf/media_quality_test_params.h index 221f6b3f7f..65ca1c5cb7 100644 --- a/test/pc/e2e/peer_connection_quality_test_params.h +++ b/api/test/pclf/media_quality_test_params.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -7,8 +7,8 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#ifndef TEST_PC_E2E_PEER_CONNECTION_QUALITY_TEST_PARAMS_H_ -#define TEST_PC_E2E_PEER_CONNECTION_QUALITY_TEST_PARAMS_H_ +#ifndef API_TEST_PCLF_MEDIA_QUALITY_TEST_PARAMS_H_ +#define API_TEST_PCLF_MEDIA_QUALITY_TEST_PARAMS_H_ #include #include @@ -22,7 +22,7 @@ #include "api/field_trials_view.h" #include "api/rtc_event_log/rtc_event_log_factory_interface.h" #include "api/task_queue/task_queue_factory.h" -#include "api/test/peerconnection_quality_test_fixture.h" +#include "api/test/pclf/media_configuration.h" #include "api/transport/network_control.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_factory.h" @@ -118,7 +118,7 @@ struct Params { // Peer name. If empty - default one will be set by the fixture. absl::optional name; // If `audio_config` is set audio stream will be configured - absl::optional audio_config; + absl::optional audio_config; // Flags to set on `cricket::PortAllocator`. These flags will be added // to the default ones that are presented on the port allocator. uint32_t port_allocator_extra_flags = cricket::kDefaultPortAllocatorFlags; @@ -142,21 +142,41 @@ struct Params { PeerConnectionInterface::RTCConfiguration rtc_configuration; PeerConnectionInterface::RTCOfferAnswerOptions rtc_offer_answer_options; BitrateSettings bitrate_settings; - std::vector - video_codecs; + std::vector video_codecs; }; // Contains parameters that maybe changed by test writer during the test call. struct ConfigurableParams { // If `video_configs` is empty - no video should be added to the test call. - std::vector video_configs; + std::vector video_configs; - PeerConnectionE2EQualityTestFixture::VideoSubscription video_subscription = - PeerConnectionE2EQualityTestFixture::VideoSubscription() - .SubscribeToAllPeers(); + VideoSubscription video_subscription = + VideoSubscription().SubscribeToAllPeers(); +}; + +// Contains parameters, that describe how long framework should run quality +// test. +struct RunParams { + explicit RunParams(TimeDelta run_duration) : run_duration(run_duration) {} + + // Specifies how long the test should be run. This time shows how long + // the media should flow after connection was established and before + // it will be shut downed. + TimeDelta run_duration; + + // If set to true peers will be able to use Flex FEC, otherwise they won't + // be able to negotiate it even if it's enabled on per peer level. + bool enable_flex_fec_support = false; + // If true will set conference mode in SDP media section for all video + // tracks for all peers. + bool use_conference_mode = false; + // If specified echo emulation will be done, by mixing the render audio into + // the capture signal. In such case input signal will be reduced by half to + // avoid saturation or compression in the echo path simulation. + absl::optional echo_emulation_config; }; } // namespace webrtc_pc_e2e } // namespace webrtc -#endif // TEST_PC_E2E_PEER_CONNECTION_QUALITY_TEST_PARAMS_H_ +#endif // API_TEST_PCLF_MEDIA_QUALITY_TEST_PARAMS_H_ diff --git a/api/test/pclf/peer_configurer.cc b/api/test/pclf/peer_configurer.cc new file mode 100644 index 0000000000..ead1b5db06 --- /dev/null +++ b/api/test/pclf/peer_configurer.cc @@ -0,0 +1,235 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/pclf/peer_configurer.h" + +#include + +#include "absl/strings/string_view.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/media_quality_test_params.h" +#include "api/test/peer_network_dependencies.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +PeerConfigurer::PeerConfigurer( + const PeerNetworkDependencies& network_dependencies) + : components_(std::make_unique( + network_dependencies.network_thread, + network_dependencies.network_manager, + network_dependencies.packet_socket_factory)), + params_(std::make_unique()), + configurable_params_(std::make_unique()) {} + +PeerConfigurer* PeerConfigurer::SetName(absl::string_view name) { + params_->name = std::string(name); + return this; +} + +PeerConfigurer* PeerConfigurer::SetTaskQueueFactory( + std::unique_ptr task_queue_factory) { + components_->pcf_dependencies->task_queue_factory = + std::move(task_queue_factory); + return this; +} +PeerConfigurer* PeerConfigurer::SetCallFactory( + std::unique_ptr call_factory) { + components_->pcf_dependencies->call_factory = std::move(call_factory); + return this; +} +PeerConfigurer* PeerConfigurer::SetEventLogFactory( + std::unique_ptr event_log_factory) { + components_->pcf_dependencies->event_log_factory = + std::move(event_log_factory); + return this; +} +PeerConfigurer* PeerConfigurer::SetFecControllerFactory( + std::unique_ptr fec_controller_factory) { + components_->pcf_dependencies->fec_controller_factory = + std::move(fec_controller_factory); + return this; +} +PeerConfigurer* PeerConfigurer::SetNetworkControllerFactory( + std::unique_ptr + network_controller_factory) { + components_->pcf_dependencies->network_controller_factory = + std::move(network_controller_factory); + return this; +} +PeerConfigurer* PeerConfigurer::SetVideoEncoderFactory( + std::unique_ptr video_encoder_factory) { + components_->pcf_dependencies->video_encoder_factory = + std::move(video_encoder_factory); + return this; +} +PeerConfigurer* PeerConfigurer::SetVideoDecoderFactory( + std::unique_ptr video_decoder_factory) { + components_->pcf_dependencies->video_decoder_factory = + std::move(video_decoder_factory); + return this; +} + +PeerConfigurer* PeerConfigurer::SetAsyncResolverFactory( + std::unique_ptr async_resolver_factory) { + components_->pc_dependencies->async_resolver_factory = + std::move(async_resolver_factory); + return this; +} +PeerConfigurer* PeerConfigurer::SetRTCCertificateGenerator( + std::unique_ptr cert_generator) { + components_->pc_dependencies->cert_generator = std::move(cert_generator); + return this; +} +PeerConfigurer* PeerConfigurer::SetSSLCertificateVerifier( + std::unique_ptr tls_cert_verifier) { + components_->pc_dependencies->tls_cert_verifier = + std::move(tls_cert_verifier); + return this; +} + +PeerConfigurer* PeerConfigurer::AddVideoConfig(VideoConfig config) { + video_sources_.push_back( + CreateSquareFrameGenerator(config, /*type=*/absl::nullopt)); + configurable_params_->video_configs.push_back(std::move(config)); + return this; +} +PeerConfigurer* PeerConfigurer::AddVideoConfig( + VideoConfig config, + std::unique_ptr generator) { + configurable_params_->video_configs.push_back(std::move(config)); + video_sources_.push_back(std::move(generator)); + return this; +} +PeerConfigurer* PeerConfigurer::AddVideoConfig(VideoConfig config, + CapturingDeviceIndex index) { + configurable_params_->video_configs.push_back(std::move(config)); + video_sources_.push_back(index); + return this; +} +PeerConfigurer* PeerConfigurer::SetVideoSubscription( + VideoSubscription subscription) { + configurable_params_->video_subscription = std::move(subscription); + return this; +} +PeerConfigurer* PeerConfigurer::SetAudioConfig(AudioConfig config) { + params_->audio_config = std::move(config); + return this; +} +PeerConfigurer* PeerConfigurer::SetUseUlpFEC(bool value) { + params_->use_ulp_fec = value; + return this; +} +PeerConfigurer* PeerConfigurer::SetUseFlexFEC(bool value) { + params_->use_flex_fec = value; + return this; +} +PeerConfigurer* PeerConfigurer::SetVideoEncoderBitrateMultiplier( + double multiplier) { + params_->video_encoder_bitrate_multiplier = multiplier; + return this; +} +PeerConfigurer* PeerConfigurer::SetNetEqFactory( + std::unique_ptr neteq_factory) { + components_->pcf_dependencies->neteq_factory = std::move(neteq_factory); + return this; +} +PeerConfigurer* PeerConfigurer::SetAudioProcessing( + rtc::scoped_refptr audio_processing) { + components_->pcf_dependencies->audio_processing = audio_processing; + return this; +} +PeerConfigurer* PeerConfigurer::SetAudioMixer( + rtc::scoped_refptr audio_mixer) { + components_->pcf_dependencies->audio_mixer = audio_mixer; + return this; +} + +PeerConfigurer* PeerConfigurer::SetUseNetworkThreadAsWorkerThread() { + components_->worker_thread = components_->network_thread; + return this; +} + +PeerConfigurer* PeerConfigurer::SetRtcEventLogPath(std::string path) { + params_->rtc_event_log_path = std::move(path); + return this; +} +PeerConfigurer* PeerConfigurer::SetAecDumpPath(std::string path) { + params_->aec_dump_path = std::move(path); + return this; +} +PeerConfigurer* PeerConfigurer::SetRTCConfiguration( + PeerConnectionInterface::RTCConfiguration configuration) { + params_->rtc_configuration = std::move(configuration); + return this; +} +PeerConfigurer* PeerConfigurer::SetRTCOfferAnswerOptions( + PeerConnectionInterface::RTCOfferAnswerOptions options) { + params_->rtc_offer_answer_options = std::move(options); + return this; +} +PeerConfigurer* PeerConfigurer::SetBitrateSettings( + BitrateSettings bitrate_settings) { + params_->bitrate_settings = bitrate_settings; + return this; +} +PeerConfigurer* PeerConfigurer::SetVideoCodecs( + std::vector video_codecs) { + params_->video_codecs = std::move(video_codecs); + return this; +} + +PeerConfigurer* PeerConfigurer::SetIceTransportFactory( + std::unique_ptr factory) { + components_->pc_dependencies->ice_transport_factory = std::move(factory); + return this; +} + +PeerConfigurer* PeerConfigurer::SetPortAllocatorExtraFlags( + uint32_t extra_flags) { + params_->port_allocator_extra_flags = extra_flags; + return this; +} +std::unique_ptr PeerConfigurer::ReleaseComponents() { + RTC_CHECK(components_); + auto components = std::move(components_); + components_ = nullptr; + return components; +} + +// Returns Params and transfer ownership to the caller. +// Can be called once. +std::unique_ptr PeerConfigurer::ReleaseParams() { + RTC_CHECK(params_); + auto params = std::move(params_); + params_ = nullptr; + return params; +} + +// Returns ConfigurableParams and transfer ownership to the caller. +// Can be called once. +std::unique_ptr +PeerConfigurer::ReleaseConfigurableParams() { + RTC_CHECK(configurable_params_); + auto configurable_params = std::move(configurable_params_); + configurable_params_ = nullptr; + return configurable_params; +} + +// Returns video sources and transfer frame generators ownership to the +// caller. Can be called once. +std::vector PeerConfigurer::ReleaseVideoSources() { + auto video_sources = std::move(video_sources_); + video_sources_.clear(); + return video_sources; +} + +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/api/test/pclf/peer_configurer.h b/api/test/pclf/peer_configurer.h new file mode 100644 index 0000000000..7841a261b3 --- /dev/null +++ b/api/test/pclf/peer_configurer.h @@ -0,0 +1,192 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TEST_PCLF_PEER_CONFIGURER_H_ +#define API_TEST_PCLF_PEER_CONFIGURER_H_ + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/async_resolver_factory.h" +#include "api/audio/audio_mixer.h" +#include "api/call/call_factory_interface.h" +#include "api/fec_controller.h" +#include "api/rtc_event_log/rtc_event_log_factory_interface.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/test/create_peer_connection_quality_test_frame_generator.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/media_quality_test_params.h" +#include "api/test/peer_network_dependencies.h" +#include "api/transport/network_control.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "modules/audio_processing/include/audio_processing.h" +#include "rtc_base/network.h" +#include "rtc_base/rtc_certificate_generator.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/thread.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +// This class is used to fully configure one peer inside a call. +class PeerConfigurer { + public: + using VideoSource = + absl::variant, + CapturingDeviceIndex>; + + explicit PeerConfigurer(const PeerNetworkDependencies& network_dependencies); + + // Sets peer name that will be used to report metrics related to this peer. + // If not set, some default name will be assigned. All names have to be + // unique. + PeerConfigurer* SetName(absl::string_view name); + + // The parameters of the following 9 methods will be passed to the + // PeerConnectionFactoryInterface implementation that will be created for + // this peer. + PeerConfigurer* SetTaskQueueFactory( + std::unique_ptr task_queue_factory); + PeerConfigurer* SetCallFactory( + std::unique_ptr call_factory); + PeerConfigurer* SetEventLogFactory( + std::unique_ptr event_log_factory); + PeerConfigurer* SetFecControllerFactory( + std::unique_ptr fec_controller_factory); + PeerConfigurer* SetNetworkControllerFactory( + std::unique_ptr + network_controller_factory); + PeerConfigurer* SetVideoEncoderFactory( + std::unique_ptr video_encoder_factory); + PeerConfigurer* SetVideoDecoderFactory( + std::unique_ptr video_decoder_factory); + // Set a custom NetEqFactory to be used in the call. + PeerConfigurer* SetNetEqFactory(std::unique_ptr neteq_factory); + PeerConfigurer* SetAudioProcessing( + rtc::scoped_refptr audio_processing); + PeerConfigurer* SetAudioMixer( + rtc::scoped_refptr audio_mixer); + + // Forces the Peerconnection to use the network thread as the worker thread. + // Ie, worker thread and the network thread is the same thread. + PeerConfigurer* SetUseNetworkThreadAsWorkerThread(); + + // The parameters of the following 4 methods will be passed to the + // PeerConnectionInterface implementation that will be created for this + // peer. + PeerConfigurer* SetAsyncResolverFactory( + std::unique_ptr async_resolver_factory); + PeerConfigurer* SetRTCCertificateGenerator( + std::unique_ptr cert_generator); + PeerConfigurer* SetSSLCertificateVerifier( + std::unique_ptr tls_cert_verifier); + PeerConfigurer* SetIceTransportFactory( + std::unique_ptr factory); + // Flags to set on `cricket::PortAllocator`. These flags will be added + // to the default ones that are presented on the port allocator. + // For possible values check p2p/base/port_allocator.h. + PeerConfigurer* SetPortAllocatorExtraFlags(uint32_t extra_flags); + + // Add new video stream to the call that will be sent from this peer. + // Default implementation of video frames generator will be used. + PeerConfigurer* AddVideoConfig(VideoConfig config); + // Add new video stream to the call that will be sent from this peer with + // provided own implementation of video frames generator. + PeerConfigurer* AddVideoConfig( + VideoConfig config, + std::unique_ptr generator); + // Add new video stream to the call that will be sent from this peer. + // Capturing device with specified index will be used to get input video. + PeerConfigurer* AddVideoConfig(VideoConfig config, + CapturingDeviceIndex capturing_device_index); + // Sets video subscription for the peer. By default subscription will + // include all streams with `VideoSubscription::kSameAsSendStream` + // resolution. To this behavior use this method. + PeerConfigurer* SetVideoSubscription(VideoSubscription subscription); + // Set the list of video codecs used by the peer during the test. These + // codecs will be negotiated in SDP during offer/answer exchange. The order + // of these codecs during negotiation will be the same as in `video_codecs`. + // Codecs have to be available in codecs list provided by peer connection to + // be negotiated. If some of specified codecs won't be found, the test will + // crash. + PeerConfigurer* SetVideoCodecs(std::vector video_codecs); + // Set the audio stream for the call from this peer. If this method won't + // be invoked, this peer will send no audio. + PeerConfigurer* SetAudioConfig(AudioConfig config); + + // Set if ULP FEC should be used or not. False by default. + PeerConfigurer* SetUseUlpFEC(bool value); + // Set if Flex FEC should be used or not. False by default. + // Client also must enable `enable_flex_fec_support` in the `RunParams` to + // be able to use this feature. + PeerConfigurer* SetUseFlexFEC(bool value); + // Specifies how much video encoder target bitrate should be different than + // target bitrate, provided by WebRTC stack. Must be greater than 0. Can be + // used to emulate overshooting of video encoders. This multiplier will + // be applied for all video encoder on both sides for all layers. Bitrate + // estimated by WebRTC stack will be multiplied by this multiplier and then + // provided into VideoEncoder::SetRates(...). 1.0 by default. + PeerConfigurer* SetVideoEncoderBitrateMultiplier(double multiplier); + + // If is set, an RTCEventLog will be saved in that location and it will be + // available for further analysis. + PeerConfigurer* SetRtcEventLogPath(std::string path); + // If is set, an AEC dump will be saved in that location and it will be + // available for further analysis. + PeerConfigurer* SetAecDumpPath(std::string path); + PeerConfigurer* SetRTCConfiguration( + PeerConnectionInterface::RTCConfiguration configuration); + PeerConfigurer* SetRTCOfferAnswerOptions( + PeerConnectionInterface::RTCOfferAnswerOptions options); + // Set bitrate parameters on PeerConnection. This constraints will be + // applied to all summed RTP streams for this peer. + PeerConfigurer* SetBitrateSettings(BitrateSettings bitrate_settings); + + // Returns InjectableComponents and transfer ownership to the caller. + // Can be called once. + std::unique_ptr ReleaseComponents(); + + // Returns Params and transfer ownership to the caller. + // Can be called once. + std::unique_ptr ReleaseParams(); + + // Returns ConfigurableParams and transfer ownership to the caller. + // Can be called once. + std::unique_ptr ReleaseConfigurableParams(); + + // Returns video sources and transfer frame generators ownership to the + // caller. Can be called once. + std::vector ReleaseVideoSources(); + + InjectableComponents* components() { return components_.get(); } + Params* params() { return params_.get(); } + ConfigurableParams* configurable_params() { + return configurable_params_.get(); + } + const Params& params() const { return *params_; } + const ConfigurableParams& configurable_params() const { + return *configurable_params_; + } + std::vector* video_sources() { return &video_sources_; } + + private: + std::unique_ptr components_; + std::unique_ptr params_; + std::unique_ptr configurable_params_; + std::vector video_sources_; +}; + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // API_TEST_PCLF_PEER_CONFIGURER_H_ diff --git a/api/test/peerconnection_quality_test_fixture.cc b/api/test/peerconnection_quality_test_fixture.cc deleted file mode 100644 index c452e8152c..0000000000 --- a/api/test/peerconnection_quality_test_fixture.cc +++ /dev/null @@ -1,240 +0,0 @@ -/* - * Copyright 2022 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/test/peerconnection_quality_test_fixture.h" - -#include - -#include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "api/array_view.h" -#include "api/test/video/video_frame_writer.h" -#include "rtc_base/checks.h" -#include "rtc_base/strings/string_builder.h" -#include "test/pc/e2e/analyzer/video/video_dumping.h" -#include "test/testsupport/file_utils.h" - -namespace webrtc { -namespace webrtc_pc_e2e { -namespace { - -using VideoCodecConfig = ::webrtc::webrtc_pc_e2e:: - PeerConnectionE2EQualityTestFixture::VideoCodecConfig; -using VideoSubscription = ::webrtc::webrtc_pc_e2e:: - PeerConnectionE2EQualityTestFixture::VideoSubscription; - -std::string SpecToString( - PeerConnectionE2EQualityTestFixture::VideoResolution::VideoResolution::Spec - spec) { - switch (spec) { - case PeerConnectionE2EQualityTestFixture::VideoResolution::Spec::kNone: - return "None"; - case PeerConnectionE2EQualityTestFixture::VideoResolution::Spec:: - kMaxFromSender: - return "MaxFromSender"; - } -} - -} // namespace - -PeerConnectionE2EQualityTestFixture::VideoResolution::VideoResolution( - size_t width, - size_t height, - int32_t fps) - : width_(width), height_(height), fps_(fps), spec_(Spec::kNone) {} -PeerConnectionE2EQualityTestFixture::VideoResolution::VideoResolution(Spec spec) - : width_(0), height_(0), fps_(0), spec_(spec) {} - -bool PeerConnectionE2EQualityTestFixture::VideoResolution::operator==( - const VideoResolution& other) const { - if (spec_ != Spec::kNone && spec_ == other.spec_) { - // If there is some particular spec set, then it doesn't matter what - // values we have in other fields. - return true; - } - return width_ == other.width_ && height_ == other.height_ && - fps_ == other.fps_ && spec_ == other.spec_; -} - -std::string PeerConnectionE2EQualityTestFixture::VideoResolution::ToString() - const { - rtc::StringBuilder out; - out << "{ width=" << width_ << ", height=" << height_ << ", fps=" << fps_ - << ", spec=" << SpecToString(spec_) << " }"; - return out.Release(); -} - -bool PeerConnectionE2EQualityTestFixture::VideoSubscription::operator==( - const VideoSubscription& other) const { - return default_resolution_ == other.default_resolution_ && - peers_resolution_ == other.peers_resolution_; -} - -absl::optional -PeerConnectionE2EQualityTestFixture::VideoSubscription::GetMaxResolution( - rtc::ArrayView video_configs) { - std::vector resolutions; - for (const auto& video_config : video_configs) { - resolutions.push_back(video_config.GetResolution()); - } - return GetMaxResolution(resolutions); -} - -absl::optional -PeerConnectionE2EQualityTestFixture::VideoSubscription::GetMaxResolution( - rtc::ArrayView resolutions) { - if (resolutions.empty()) { - return absl::nullopt; - } - - VideoResolution max_resolution; - for (const VideoResolution& resolution : resolutions) { - if (max_resolution.width() < resolution.width()) { - max_resolution.set_width(resolution.width()); - } - if (max_resolution.height() < resolution.height()) { - max_resolution.set_height(resolution.height()); - } - if (max_resolution.fps() < resolution.fps()) { - max_resolution.set_fps(resolution.fps()); - } - } - return max_resolution; -} - -std::string PeerConnectionE2EQualityTestFixture::VideoSubscription::ToString() - const { - rtc::StringBuilder out; - out << "{ default_resolution_=["; - if (default_resolution_.has_value()) { - out << default_resolution_->ToString(); - } else { - out << "undefined"; - } - out << "], {"; - for (const auto& [peer_name, resolution] : peers_resolution_) { - out << "[" << peer_name << ": " << resolution.ToString() << "], "; - } - out << "} }"; - return out.Release(); -} - -PeerConnectionE2EQualityTestFixture::VideoDumpOptions::VideoDumpOptions( - absl::string_view output_directory, - int sampling_modulo, - bool export_frame_ids, - std::function( - absl::string_view file_name_prefix, - const VideoResolution& resolution)> video_frame_writer_factory) - : output_directory_(output_directory), - sampling_modulo_(sampling_modulo), - export_frame_ids_(export_frame_ids), - video_frame_writer_factory_(video_frame_writer_factory) { - RTC_CHECK_GT(sampling_modulo, 0); -} - -PeerConnectionE2EQualityTestFixture::VideoDumpOptions::VideoDumpOptions( - absl::string_view output_directory, - bool export_frame_ids) - : VideoDumpOptions(output_directory, - kDefaultSamplingModulo, - export_frame_ids) {} - -std::unique_ptr PeerConnectionE2EQualityTestFixture:: - VideoDumpOptions::CreateInputDumpVideoFrameWriter( - absl::string_view stream_label, - const VideoResolution& resolution) const { - std::unique_ptr writer = video_frame_writer_factory_( - GetInputDumpFileName(stream_label), resolution); - absl::optional frame_ids_file = - GetInputFrameIdsDumpFileName(stream_label); - if (frame_ids_file.has_value()) { - writer = CreateVideoFrameWithIdsWriter(std::move(writer), *frame_ids_file); - } - return writer; -} - -std::unique_ptr PeerConnectionE2EQualityTestFixture:: - VideoDumpOptions::CreateOutputDumpVideoFrameWriter( - absl::string_view stream_label, - absl::string_view receiver, - const VideoResolution& resolution) const { - std::unique_ptr writer = video_frame_writer_factory_( - GetOutputDumpFileName(stream_label, receiver), resolution); - absl::optional frame_ids_file = - GetOutputFrameIdsDumpFileName(stream_label, receiver); - if (frame_ids_file.has_value()) { - writer = CreateVideoFrameWithIdsWriter(std::move(writer), *frame_ids_file); - } - return writer; -} - -std::unique_ptr PeerConnectionE2EQualityTestFixture:: - VideoDumpOptions::Y4mVideoFrameWriterFactory( - absl::string_view file_name_prefix, - const VideoResolution& resolution) { - return std::make_unique( - std::string(file_name_prefix) + ".y4m", resolution.width(), - resolution.height(), resolution.fps()); -} - -std::string -PeerConnectionE2EQualityTestFixture::VideoDumpOptions::GetInputDumpFileName( - absl::string_view stream_label) const { - return test::JoinFilename(output_directory_, stream_label); -} - -absl::optional PeerConnectionE2EQualityTestFixture:: - VideoDumpOptions::GetInputFrameIdsDumpFileName( - absl::string_view stream_label) const { - if (!export_frame_ids_) { - return absl::nullopt; - } - return GetInputDumpFileName(stream_label) + ".frame_ids.txt"; -} - -std::string -PeerConnectionE2EQualityTestFixture::VideoDumpOptions::GetOutputDumpFileName( - absl::string_view stream_label, - absl::string_view receiver) const { - rtc::StringBuilder file_name; - file_name << stream_label << "_" << receiver; - return test::JoinFilename(output_directory_, file_name.Release()); -} - -absl::optional PeerConnectionE2EQualityTestFixture:: - VideoDumpOptions::GetOutputFrameIdsDumpFileName( - absl::string_view stream_label, - absl::string_view receiver) const { - if (!export_frame_ids_) { - return absl::nullopt; - } - return GetOutputDumpFileName(stream_label, receiver) + ".frame_ids.txt"; -} - -std::string PeerConnectionE2EQualityTestFixture::VideoDumpOptions::ToString() - const { - rtc::StringBuilder out; - out << "{ output_directory_=" << output_directory_ - << ", sampling_modulo_=" << sampling_modulo_ - << ", export_frame_ids_=" << export_frame_ids_ << " }"; - return out.Release(); -} - -PeerConnectionE2EQualityTestFixture::VideoConfig::VideoConfig( - const VideoResolution& resolution) - : width(resolution.width()), - height(resolution.height()), - fps(resolution.fps()) { - RTC_CHECK(resolution.IsRegular()); -} - -} // namespace webrtc_pc_e2e -} // namespace webrtc diff --git a/api/test/peerconnection_quality_test_fixture.h b/api/test/peerconnection_quality_test_fixture.h index ff44e9f9aa..74470cdf86 100644 --- a/api/test/peerconnection_quality_test_fixture.h +++ b/api/test/peerconnection_quality_test_fixture.h @@ -10,12 +10,17 @@ #ifndef API_TEST_PEERCONNECTION_QUALITY_TEST_FIXTURE_H_ #define API_TEST_PEERCONNECTION_QUALITY_TEST_FIXTURE_H_ +#include +#include + +#include #include #include #include #include #include +#include "absl/base/macros.h" #include "absl/memory/memory.h" #include "absl/strings/string_view.h" #include "absl/types/optional.h" @@ -32,6 +37,9 @@ #include "api/task_queue/task_queue_factory.h" #include "api/test/audio_quality_analyzer_interface.h" #include "api/test/frame_generator_interface.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/media_quality_test_params.h" +#include "api/test/pclf/peer_configurer.h" #include "api/test/peer_network_dependencies.h" #include "api/test/simulated_network.h" #include "api/test/stats_observer_interface.h" @@ -45,6 +53,7 @@ #include "api/video_codecs/video_encoder_factory.h" #include "media/base/media_constants.h" #include "modules/audio_processing/include/audio_processing.h" +#include "rtc_base/checks.h" #include "rtc_base/network.h" #include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/ssl_certificate.h" @@ -53,623 +62,9 @@ namespace webrtc { namespace webrtc_pc_e2e { -constexpr size_t kDefaultSlidesWidth = 1850; -constexpr size_t kDefaultSlidesHeight = 1110; - // API is in development. Can be changed/removed without notice. class PeerConnectionE2EQualityTestFixture { public: - // The index of required capturing device in OS provided list of video - // devices. On Linux and Windows the list will be obtained via - // webrtc::VideoCaptureModule::DeviceInfo, on Mac OS via - // [RTCCameraVideoCapturer captureDevices]. - enum class CapturingDeviceIndex : size_t {}; - - // Contains parameters for screen share scrolling. - // - // If scrolling is enabled, then it will be done by putting sliding window - // on source video and moving this window from top left corner to the - // bottom right corner of the picture. - // - // In such case source dimensions must be greater or equal to the sliding - // window dimensions. So `source_width` and `source_height` are the dimensions - // of the source frame, while `VideoConfig::width` and `VideoConfig::height` - // are the dimensions of the sliding window. - // - // Because `source_width` and `source_height` are dimensions of the source - // frame, they have to be width and height of videos from - // `ScreenShareConfig::slides_yuv_file_names`. - // - // Because scrolling have to be done on single slide it also requires, that - // `duration` must be less or equal to - // `ScreenShareConfig::slide_change_interval`. - struct ScrollingParams { - ScrollingParams(TimeDelta duration, - size_t source_width, - size_t source_height) - : duration(duration), - source_width(source_width), - source_height(source_height) { - RTC_CHECK_GT(duration.ms(), 0); - } - - // Duration of scrolling. - TimeDelta duration; - // Width of source slides video. - size_t source_width; - // Height of source slides video. - size_t source_height; - }; - - // Contains screen share video stream properties. - struct ScreenShareConfig { - explicit ScreenShareConfig(TimeDelta slide_change_interval) - : slide_change_interval(slide_change_interval) { - RTC_CHECK_GT(slide_change_interval.ms(), 0); - } - - // Shows how long one slide should be presented on the screen during - // slide generation. - TimeDelta slide_change_interval; - // If true, slides will be generated programmatically. No scrolling params - // will be applied in such case. - bool generate_slides = false; - // If present scrolling will be applied. Please read extra requirement on - // `slides_yuv_file_names` for scrolling. - absl::optional scrolling_params; - // Contains list of yuv files with slides. - // - // If empty, default set of slides will be used. In such case - // `VideoConfig::width` must be equal to `kDefaultSlidesWidth` and - // `VideoConfig::height` must be equal to `kDefaultSlidesHeight` or if - // `scrolling_params` are specified, then `ScrollingParams::source_width` - // must be equal to `kDefaultSlidesWidth` and - // `ScrollingParams::source_height` must be equal to `kDefaultSlidesHeight`. - std::vector slides_yuv_file_names; - }; - - // Config for Vp8 simulcast or non-standard Vp9 SVC testing. - // - // To configure standard SVC setting, use `scalability_mode` in the - // `encoding_params` array. - // This configures Vp9 SVC by requesting simulcast layers, the request is - // internally converted to a request for SVC layers. - // - // SVC support is limited: - // During SVC testing there is no SFU, so framework will try to emulate SFU - // behavior in regular p2p call. Because of it there are such limitations: - // * if `target_spatial_index` is not equal to the highest spatial layer - // then no packet/frame drops are allowed. - // - // If there will be any drops, that will affect requested layer, then - // WebRTC SVC implementation will continue decoding only the highest - // available layer and won't restore lower layers, so analyzer won't - // receive required data which will cause wrong results or test failures. - struct VideoSimulcastConfig { - explicit VideoSimulcastConfig(int simulcast_streams_count) - : simulcast_streams_count(simulcast_streams_count) { - RTC_CHECK_GT(simulcast_streams_count, 1); - } - - // Specified amount of simulcast streams/SVC layers, depending on which - // encoder is used. - int simulcast_streams_count; - }; - - // Configuration for the emulated Selective Forward Unit (SFU) - // - // The framework can optionally filter out frames that are decoded - // using an emulated SFU. - // When using simulcast or SVC, it's not always desirable to receive - // all frames. In a real world call, a SFU will only forward a subset - // of the frames. - // The emulated SFU is not able to change its configuration dynamically, - // if adaptation happens during the call, layers may be dropped and the - // analyzer won't receive the required data which will cause wrong results or - // test failures. - struct EmulatedSFUConfig { - EmulatedSFUConfig() {} - explicit EmulatedSFUConfig(int target_layer_index) - : target_layer_index(target_layer_index) { - RTC_CHECK_GE(target_layer_index, 0); - } - - EmulatedSFUConfig(absl::optional target_layer_index, - absl::optional target_temporal_index) - : target_layer_index(target_layer_index), - target_temporal_index(target_temporal_index) { - RTC_CHECK_GE(target_temporal_index.value_or(0), 0); - if (target_temporal_index) - RTC_CHECK_GE(*target_temporal_index, 0); - } - - // Specifies simulcast or spatial index of the video stream to analyze. - // There are 2 cases: - // 1. simulcast encoding is used: - // in such case `target_layer_index` will specify the index of - // simulcast stream, that should be analyzed. Other streams will be - // dropped. - // 2. SVC encoding is used: - // in such case `target_layer_index` will specify the top interesting - // spatial layer and all layers below, including target one will be - // processed. All layers above target one will be dropped. - // If not specified then all streams will be received and analyzed. - // When set, it instructs the framework to create an emulated Selective - // Forwarding Unit (SFU) that will propagate only the requested layers. - absl::optional target_layer_index; - // Specifies the index of the maximum temporal unit to keep. - // If not specified then all temporal layers will be received and analyzed. - // When set, it instructs the framework to create an emulated Selective - // Forwarding Unit (SFU) that will propagate only up to the requested layer. - absl::optional target_temporal_index; - }; - - class VideoResolution { - public: - // Determines special resolutions, which can't be expressed in terms of - // width, height and fps. - enum class Spec { - // No extra spec set. It describes a regular resolution described by - // width, height and fps. - kNone, - // Describes resolution which contains max value among all sender's - // video streams in each dimension (width, height, fps). - kMaxFromSender - }; - - VideoResolution(size_t width, size_t height, int32_t fps); - explicit VideoResolution(Spec spec = Spec::kNone); - - bool operator==(const VideoResolution& other) const; - bool operator!=(const VideoResolution& other) const { - return !(*this == other); - } - - size_t width() const { return width_; } - void set_width(size_t width) { width_ = width; } - size_t height() const { return height_; } - void set_height(size_t height) { height_ = height; } - int32_t fps() const { return fps_; } - void set_fps(int32_t fps) { fps_ = fps; } - - // Returns if it is a regular resolution or not. The resolution is regular - // if it's spec is `Spec::kNone`. - bool IsRegular() const { return spec_ == Spec::kNone; } - - std::string ToString() const; - - private: - size_t width_ = 0; - size_t height_ = 0; - int32_t fps_ = 0; - Spec spec_ = Spec::kNone; - }; - - class VideoDumpOptions { - public: - static constexpr int kDefaultSamplingModulo = 1; - - // output_directory - the output directory where stream will be dumped. The - // output files' names will be constructed as - // _. for output dumps and - // . for input dumps. By default is - // "y4m". - // sampling_modulo - the module for the video frames to be dumped. Modulo - // equals X means every Xth frame will be written to the dump file. The - // value must be greater than 0. (Default: 1) - // export_frame_ids - specifies if frame ids should be exported together - // with content of the stream. If true, an output file with the same name as - // video dump and suffix ".frame_ids.txt" will be created. It will contain - // the frame ids in the same order as original frames in the output - // file with stream content. File will contain one frame id per line. - // (Default: false) - // `video_frame_writer_factory` - factory function to create a video frame - // writer for input and output video files. (Default: Y4M video writer - // factory). - explicit VideoDumpOptions( - absl::string_view output_directory, - int sampling_modulo = kDefaultSamplingModulo, - bool export_frame_ids = false, - std::function( - absl::string_view file_name_prefix, - const VideoResolution& resolution)> video_frame_writer_factory = - Y4mVideoFrameWriterFactory); - VideoDumpOptions(absl::string_view output_directory, bool export_frame_ids); - - VideoDumpOptions(const VideoDumpOptions&) = default; - VideoDumpOptions& operator=(const VideoDumpOptions&) = default; - VideoDumpOptions(VideoDumpOptions&&) = default; - VideoDumpOptions& operator=(VideoDumpOptions&&) = default; - - std::string output_directory() const { return output_directory_; } - int sampling_modulo() const { return sampling_modulo_; } - bool export_frame_ids() const { return export_frame_ids_; } - - std::unique_ptr CreateInputDumpVideoFrameWriter( - absl::string_view stream_label, - const VideoResolution& resolution) const; - - std::unique_ptr CreateOutputDumpVideoFrameWriter( - absl::string_view stream_label, - absl::string_view receiver, - const VideoResolution& resolution) const; - - std::string ToString() const; - - private: - static std::unique_ptr Y4mVideoFrameWriterFactory( - absl::string_view file_name_prefix, - const VideoResolution& resolution); - std::string GetInputDumpFileName(absl::string_view stream_label) const; - // Returns file name for input frame ids dump if `export_frame_ids()` is - // true, absl::nullopt otherwise. - absl::optional GetInputFrameIdsDumpFileName( - absl::string_view stream_label) const; - std::string GetOutputDumpFileName(absl::string_view stream_label, - absl::string_view receiver) const; - // Returns file name for output frame ids dump if `export_frame_ids()` is - // true, absl::nullopt otherwise. - absl::optional GetOutputFrameIdsDumpFileName( - absl::string_view stream_label, - absl::string_view receiver) const; - - std::string output_directory_; - int sampling_modulo_ = 1; - bool export_frame_ids_ = false; - std::function( - absl::string_view file_name_prefix, - const VideoResolution& resolution)> - video_frame_writer_factory_; - }; - - // Contains properties of single video stream. - struct VideoConfig { - explicit VideoConfig(const VideoResolution& resolution); - VideoConfig(size_t width, size_t height, int32_t fps) - : width(width), height(height), fps(fps) {} - VideoConfig(std::string stream_label, - size_t width, - size_t height, - int32_t fps) - : width(width), - height(height), - fps(fps), - stream_label(std::move(stream_label)) {} - - // Video stream width. - size_t width; - // Video stream height. - size_t height; - int32_t fps; - VideoResolution GetResolution() const { - return VideoResolution(width, height, fps); - } - - // Have to be unique among all specified configs for all peers in the call. - // Will be auto generated if omitted. - absl::optional stream_label; - // Will be set for current video track. If equals to kText or kDetailed - - // screencast in on. - absl::optional content_hint; - // If presented video will be transfered in simulcast/SVC mode depending on - // which encoder is used. - // - // Simulcast is supported only from 1st added peer. For VP8 simulcast only - // without RTX is supported so it will be automatically disabled for all - // simulcast tracks. For VP9 simulcast enables VP9 SVC mode and support RTX, - // but only on non-lossy networks. See more in documentation to - // VideoSimulcastConfig. - absl::optional simulcast_config; - // Configuration for the emulated Selective Forward Unit (SFU). - absl::optional emulated_sfu_config; - // Encoding parameters for both singlecast and per simulcast layer. - // If singlecast is used, if not empty, a single value can be provided. - // If simulcast is used, if not empty, `encoding_params` size have to be - // equal to `simulcast_config.simulcast_streams_count`. Will be used to set - // transceiver send encoding params for each layer. - // RtpEncodingParameters::rid may be changed by fixture implementation to - // ensure signaling correctness. - std::vector encoding_params; - // Count of temporal layers for video stream. This value will be set into - // each RtpEncodingParameters of RtpParameters of corresponding - // RtpSenderInterface for this video stream. - absl::optional temporal_layers_count; - // If specified defines how input should be dumped. It is actually one of - // the test's output file, which contains copy of what was captured during - // the test for this video stream on sender side. It is useful when - // generator is used as input. - absl::optional input_dump_options; - // If specified defines how output should be dumped on the receiver side for - // this stream. The produced files contain what was rendered for this video - // stream on receiver side per each receiver. - absl::optional output_dump_options; - // If set to true uses fixed frame rate while dumping output video to the - // file. `fps` will be used as frame rate. - bool output_dump_use_fixed_framerate = false; - // If true will display input and output video on the user's screen. - bool show_on_screen = false; - // If specified, determines a sync group to which this video stream belongs. - // According to bugs.webrtc.org/4762 WebRTC supports synchronization only - // for pair of single audio and single video stream. - absl::optional sync_group; - // If specified, it will be set into RtpParameters of corresponding - // RtpSenderInterface for this video stream. - // Note that this setting takes precedence over `content_hint`. - absl::optional degradation_preference; - }; - - // Contains properties for audio in the call. - struct AudioConfig { - enum Mode { - kGenerated, - kFile, - }; - - AudioConfig() = default; - explicit AudioConfig(std::string stream_label) - : stream_label(std::move(stream_label)) {} - - // Have to be unique among all specified configs for all peers in the call. - // Will be auto generated if omitted. - absl::optional stream_label; - Mode mode = kGenerated; - // Have to be specified only if mode = kFile - absl::optional input_file_name; - // If specified the input stream will be also copied to specified file. - absl::optional input_dump_file_name; - // If specified the output stream will be copied to specified file. - absl::optional output_dump_file_name; - - // Audio options to use. - cricket::AudioOptions audio_options; - // Sampling frequency of input audio data (from file or generated). - int sampling_frequency_in_hz = 48000; - // If specified, determines a sync group to which this audio stream belongs. - // According to bugs.webrtc.org/4762 WebRTC supports synchronization only - // for pair of single audio and single video stream. - absl::optional sync_group; - }; - - struct VideoCodecConfig { - explicit VideoCodecConfig(std::string name) - : name(std::move(name)), required_params() {} - VideoCodecConfig(std::string name, - std::map required_params) - : name(std::move(name)), required_params(std::move(required_params)) {} - // Next two fields are used to specify concrete video codec, that should be - // used in the test. Video code will be negotiated in SDP during offer/ - // answer exchange. - // Video codec name. You can find valid names in - // media/base/media_constants.h - std::string name = cricket::kVp8CodecName; - // Map of parameters, that have to be specified on SDP codec. Each parameter - // is described by key and value. Codec parameters will match the specified - // map if and only if for each key from `required_params` there will be - // a parameter with name equal to this key and parameter value will be equal - // to the value from `required_params` for this key. - // If empty then only name will be used to match the codec. - std::map required_params; - }; - - // Subscription to the remote video streams. It declares which remote stream - // peer should receive and in which resolution (width x height x fps). - class VideoSubscription { - public: - // Returns the resolution constructed as maximum from all resolution - // dimensions: width, height and fps. - static absl::optional GetMaxResolution( - rtc::ArrayView video_configs); - static absl::optional GetMaxResolution( - rtc::ArrayView resolutions); - - bool operator==(const VideoSubscription& other) const; - bool operator!=(const VideoSubscription& other) const { - return !(*this == other); - } - - // Subscribes receiver to all streams sent by the specified peer with - // specified resolution. It will override any resolution that was used in - // `SubscribeToAll` independently from methods call order. - VideoSubscription& SubscribeToPeer( - absl::string_view peer_name, - VideoResolution resolution = - VideoResolution(VideoResolution::Spec::kMaxFromSender)) { - peers_resolution_[std::string(peer_name)] = resolution; - return *this; - } - - // Subscribes receiver to the all sent streams with specified resolution. - // If any stream was subscribed to with `SubscribeTo` method that will - // override resolution passed to this function independently from methods - // call order. - VideoSubscription& SubscribeToAllPeers( - VideoResolution resolution = - VideoResolution(VideoResolution::Spec::kMaxFromSender)) { - default_resolution_ = resolution; - return *this; - } - - // Returns resolution for specific sender. If no specific resolution was - // set for this sender, then will return resolution used for all streams. - // If subscription doesn't subscribe to all streams, `absl::nullopt` will be - // returned. - absl::optional GetResolutionForPeer( - absl::string_view peer_name) const { - auto it = peers_resolution_.find(std::string(peer_name)); - if (it == peers_resolution_.end()) { - return default_resolution_; - } - return it->second; - } - - // Returns a maybe empty list of senders for which peer explicitly - // subscribed to with specific resolution. - std::vector GetSubscribedPeers() const { - std::vector subscribed_streams; - subscribed_streams.reserve(peers_resolution_.size()); - for (const auto& entry : peers_resolution_) { - subscribed_streams.push_back(entry.first); - } - return subscribed_streams; - } - - std::string ToString() const; - - private: - absl::optional default_resolution_ = absl::nullopt; - std::map peers_resolution_; - }; - - // This class is used to fully configure one peer inside the call. - class PeerConfigurer { - public: - virtual ~PeerConfigurer() = default; - - // Sets peer name that will be used to report metrics related to this peer. - // If not set, some default name will be assigned. All names have to be - // unique. - virtual PeerConfigurer* SetName(absl::string_view name) = 0; - - // The parameters of the following 9 methods will be passed to the - // PeerConnectionFactoryInterface implementation that will be created for - // this peer. - virtual PeerConfigurer* SetTaskQueueFactory( - std::unique_ptr task_queue_factory) = 0; - virtual PeerConfigurer* SetCallFactory( - std::unique_ptr call_factory) = 0; - virtual PeerConfigurer* SetEventLogFactory( - std::unique_ptr event_log_factory) = 0; - virtual PeerConfigurer* SetFecControllerFactory( - std::unique_ptr - fec_controller_factory) = 0; - virtual PeerConfigurer* SetNetworkControllerFactory( - std::unique_ptr - network_controller_factory) = 0; - virtual PeerConfigurer* SetVideoEncoderFactory( - std::unique_ptr video_encoder_factory) = 0; - virtual PeerConfigurer* SetVideoDecoderFactory( - std::unique_ptr video_decoder_factory) = 0; - // Set a custom NetEqFactory to be used in the call. - virtual PeerConfigurer* SetNetEqFactory( - std::unique_ptr neteq_factory) = 0; - virtual PeerConfigurer* SetAudioProcessing( - rtc::scoped_refptr audio_processing) = 0; - virtual PeerConfigurer* SetAudioMixer( - rtc::scoped_refptr audio_mixer) = 0; - - // Forces the Peerconnection to use the network thread as the worker thread. - // Ie, worker thread and the network thread is the same thread. - virtual PeerConfigurer* SetUseNetworkThreadAsWorkerThread() = 0; - - // The parameters of the following 4 methods will be passed to the - // PeerConnectionInterface implementation that will be created for this - // peer. - virtual PeerConfigurer* SetAsyncResolverFactory( - std::unique_ptr - async_resolver_factory) = 0; - virtual PeerConfigurer* SetRTCCertificateGenerator( - std::unique_ptr - cert_generator) = 0; - virtual PeerConfigurer* SetSSLCertificateVerifier( - std::unique_ptr tls_cert_verifier) = 0; - virtual PeerConfigurer* SetIceTransportFactory( - std::unique_ptr factory) = 0; - // Flags to set on `cricket::PortAllocator`. These flags will be added - // to the default ones that are presented on the port allocator. - // For possible values check p2p/base/port_allocator.h. - virtual PeerConfigurer* SetPortAllocatorExtraFlags( - uint32_t extra_flags) = 0; - - // Add new video stream to the call that will be sent from this peer. - // Default implementation of video frames generator will be used. - virtual PeerConfigurer* AddVideoConfig(VideoConfig config) = 0; - // Add new video stream to the call that will be sent from this peer with - // provided own implementation of video frames generator. - virtual PeerConfigurer* AddVideoConfig( - VideoConfig config, - std::unique_ptr generator) = 0; - // Add new video stream to the call that will be sent from this peer. - // Capturing device with specified index will be used to get input video. - virtual PeerConfigurer* AddVideoConfig( - VideoConfig config, - CapturingDeviceIndex capturing_device_index) = 0; - // Sets video subscription for the peer. By default subscription will - // include all streams with `VideoSubscription::kSameAsSendStream` - // resolution. To override this behavior use this method. - virtual PeerConfigurer* SetVideoSubscription( - VideoSubscription subscription) = 0; - // Set the list of video codecs used by the peer during the test. These - // codecs will be negotiated in SDP during offer/answer exchange. The order - // of these codecs during negotiation will be the same as in `video_codecs`. - // Codecs have to be available in codecs list provided by peer connection to - // be negotiated. If some of specified codecs won't be found, the test will - // crash. - virtual PeerConfigurer* SetVideoCodecs( - std::vector video_codecs) = 0; - // Set the audio stream for the call from this peer. If this method won't - // be invoked, this peer will send no audio. - virtual PeerConfigurer* SetAudioConfig(AudioConfig config) = 0; - - // Set if ULP FEC should be used or not. False by default. - virtual PeerConfigurer* SetUseUlpFEC(bool value) = 0; - // Set if Flex FEC should be used or not. False by default. - // Client also must enable `enable_flex_fec_support` in the `RunParams` to - // be able to use this feature. - virtual PeerConfigurer* SetUseFlexFEC(bool value) = 0; - // Specifies how much video encoder target bitrate should be different than - // target bitrate, provided by WebRTC stack. Must be greater than 0. Can be - // used to emulate overshooting of video encoders. This multiplier will - // be applied for all video encoder on both sides for all layers. Bitrate - // estimated by WebRTC stack will be multiplied by this multiplier and then - // provided into VideoEncoder::SetRates(...). 1.0 by default. - virtual PeerConfigurer* SetVideoEncoderBitrateMultiplier( - double multiplier) = 0; - - // If is set, an RTCEventLog will be saved in that location and it will be - // available for further analysis. - virtual PeerConfigurer* SetRtcEventLogPath(std::string path) = 0; - // If is set, an AEC dump will be saved in that location and it will be - // available for further analysis. - virtual PeerConfigurer* SetAecDumpPath(std::string path) = 0; - virtual PeerConfigurer* SetRTCConfiguration( - PeerConnectionInterface::RTCConfiguration configuration) = 0; - virtual PeerConfigurer* SetRTCOfferAnswerOptions( - PeerConnectionInterface::RTCOfferAnswerOptions options) = 0; - // Set bitrate parameters on PeerConnection. This constraints will be - // applied to all summed RTP streams for this peer. - virtual PeerConfigurer* SetBitrateSettings( - BitrateSettings bitrate_settings) = 0; - }; - - // Contains configuration for echo emulator. - struct EchoEmulationConfig { - // Delay which represents the echo path delay, i.e. how soon rendered signal - // should reach capturer. - TimeDelta echo_delay = TimeDelta::Millis(50); - }; - - // Contains parameters, that describe how long framework should run quality - // test. - struct RunParams { - explicit RunParams(TimeDelta run_duration) : run_duration(run_duration) {} - - // Specifies how long the test should be run. This time shows how long - // the media should flow after connection was established and before - // it will be shut downed. - TimeDelta run_duration; - - // If set to true peers will be able to use Flex FEC, otherwise they won't - // be able to negotiate it even if it's enabled on per peer level. - bool enable_flex_fec_support = false; - // If true will set conference mode in SDP media section for all video - // tracks for all peers. - bool use_conference_mode = false; - // If specified echo emulation will be done, by mixing the render audio into - // the capture signal. In such case input signal will be reduced by half to - // avoid saturation or compression in the echo path simulation. - absl::optional echo_emulation_config; - }; - // Represent an entity that will report quality metrics after test. class QualityMetricsReporter : public StatsObserverInterface { public: @@ -723,9 +118,7 @@ class PeerConnectionE2EQualityTestFixture { // `network_dependencies` are used to provide networking for peer's peer // connection. Members must be non-null. // `configurer` function will be used to configure peer in the call. - virtual PeerHandle* AddPeer( - const PeerNetworkDependencies& network_dependencies, - rtc::FunctionView configurer) = 0; + virtual PeerHandle* AddPeer(std::unique_ptr configurer) = 0; // Runs the media quality test, which includes setting up the call with // configured participants, running it according to provided `run_params` and diff --git a/api/test/peerconnection_quality_test_fixture_unittest.cc b/api/test/peerconnection_quality_test_fixture_unittest.cc index b3ec62f134..26ae8cf98f 100644 --- a/api/test/peerconnection_quality_test_fixture_unittest.cc +++ b/api/test/peerconnection_quality_test_fixture_unittest.cc @@ -13,19 +13,17 @@ #include #include "absl/types/optional.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/video/video_frame_writer.h" #include "rtc_base/gunit.h" #include "test/gmock.h" +#include "test/testsupport/file_utils.h" namespace webrtc { namespace webrtc_pc_e2e { namespace { -using VideoResolution = ::webrtc::webrtc_pc_e2e:: - PeerConnectionE2EQualityTestFixture::VideoResolution; -using VideoConfig = - ::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::VideoConfig; -using VideoSubscription = ::webrtc::webrtc_pc_e2e:: - PeerConnectionE2EQualityTestFixture::VideoSubscription; +using ::testing::Eq; TEST(PclfVideoSubscriptionTest, MaxFromSenderSpecEqualIndependentOfOtherFields) { @@ -78,6 +76,67 @@ TEST(PclfVideoSubscriptionTest, GetMaxResolutionSelectMaxForEachDimention) { EXPECT_EQ(resolution->fps(), 10); } +struct TestVideoFrameWriter : public test::VideoFrameWriter { + public: + TestVideoFrameWriter(absl::string_view file_name_prefix, + const VideoResolution& resolution) + : file_name_prefix(file_name_prefix), resolution(resolution) {} + + bool WriteFrame(const VideoFrame& frame) override { return true; } + + void Close() override {} + + std::string file_name_prefix; + VideoResolution resolution; +}; + +TEST(VideoDumpOptionsTest, InputVideoWriterHasCorrectFileName) { + VideoResolution resolution(/*width=*/1280, /*height=*/720, /*fps=*/30); + + TestVideoFrameWriter* writer = nullptr; + VideoDumpOptions options("foo", /*sampling_modulo=*/1, + /*export_frame_ids=*/false, + /*video_frame_writer_factory=*/ + [&](absl::string_view file_name_prefix, + const VideoResolution& resolution) { + auto out = std::make_unique( + file_name_prefix, resolution); + writer = out.get(); + return out; + }); + std::unique_ptr created_writer = + options.CreateInputDumpVideoFrameWriter("alice-video", resolution); + + ASSERT_TRUE(writer != nullptr); + ASSERT_THAT(writer->file_name_prefix, + Eq(test::JoinFilename("foo", "alice-video_1280x720_30"))); + ASSERT_THAT(writer->resolution, Eq(resolution)); +} + +TEST(VideoDumpOptionsTest, OutputVideoWriterHasCorrectFileName) { + VideoResolution resolution(/*width=*/1280, /*height=*/720, /*fps=*/30); + + TestVideoFrameWriter* writer = nullptr; + VideoDumpOptions options("foo", /*sampling_modulo=*/1, + /*export_frame_ids=*/false, + /*video_frame_writer_factory=*/ + [&](absl::string_view file_name_prefix, + const VideoResolution& resolution) { + auto out = std::make_unique( + file_name_prefix, resolution); + writer = out.get(); + return out; + }); + std::unique_ptr created_writer = + options.CreateOutputDumpVideoFrameWriter("alice-video", "bob", + resolution); + + ASSERT_TRUE(writer != nullptr); + ASSERT_THAT(writer->file_name_prefix, + Eq(test::JoinFilename("foo", "alice-video_bob_1280x720_30"))); + ASSERT_THAT(writer->resolution, Eq(resolution)); +} + } // namespace } // namespace webrtc_pc_e2e } // namespace webrtc diff --git a/api/test/simulated_network.h b/api/test/simulated_network.h index fbf5c5ca29..04c5517c8d 100644 --- a/api/test/simulated_network.h +++ b/api/test/simulated_network.h @@ -38,6 +38,12 @@ struct PacketDeliveryInfo { static constexpr int kNotReceived = -1; PacketDeliveryInfo(PacketInFlightInfo source, int64_t receive_time_us) : receive_time_us(receive_time_us), packet_id(source.packet_id) {} + + bool operator==(const PacketDeliveryInfo& other) const { + return receive_time_us == other.receive_time_us && + packet_id == other.packet_id; + } + int64_t receive_time_us; uint64_t packet_id; }; @@ -64,14 +70,50 @@ struct BuiltInNetworkBehaviorConfig { int packet_overhead = 0; }; +// Interface that represents a Network behaviour. +// +// It is clients of this interface responsibility to enqueue and dequeue +// packets (based on the estimated delivery time expressed by +// NextDeliveryTimeUs). +// +// To enqueue packets, call EnqueuePacket: +// EXPECT_TRUE(network.EnqueuePacket( +// PacketInFlightInfo(/*size=*/1, /*send_time_us=*/0, /*packet_id=*/1))); +// +// To know when to call DequeueDeliverablePackets to pull packets out of the +// network, call NextDeliveryTimeUs and schedule a task to invoke +// DequeueDeliverablePackets (if not already scheduled). +// +// DequeueDeliverablePackets will return a vector of delivered packets, but this +// vector can be empty in case of extra delay. In such case, make sure to invoke +// NextDeliveryTimeUs and schedule a task to call DequeueDeliverablePackets for +// the next estimated delivery of packets. +// +// std::vector delivered_packets = +// network.DequeueDeliverablePackets(/*receive_time_us=*/1000000); class NetworkBehaviorInterface { public: + // Enqueues a packet in the network and returns true if the action was + // successful, false otherwise (for example, because the network capacity has + // been saturated). If the return value is false, the packet should be + // considered as dropped and it will not be returned by future calls + // to DequeueDeliverablePackets. + // Packets enqueued will exit the network when DequeueDeliverablePackets is + // called and enough time has passed (see NextDeliveryTimeUs). virtual bool EnqueuePacket(PacketInFlightInfo packet_info) = 0; // Retrieves all packets that should be delivered by the given receive time. + // Not all the packets in the returned std::vector are actually delivered. + // In order to know the state of each packet it is necessary to check the + // `receive_time_us` field of each packet. If that is set to + // PacketDeliveryInfo::kNotReceived then the packet is considered lost in the + // network. virtual std::vector DequeueDeliverablePackets( int64_t receive_time_us) = 0; // Returns time in microseconds when caller should call - // DequeueDeliverablePackets to get next set of packets to deliver. + // DequeueDeliverablePackets to get the next set of delivered packets. It is + // possible that no packet will be delivered by that time (e.g. in case of + // random extra delay), in such case this method should be called again to get + // the updated estimated delivery time. virtual absl::optional NextDeliveryTimeUs() const = 0; virtual ~NetworkBehaviorInterface() = default; }; @@ -81,10 +123,14 @@ class NetworkBehaviorInterface { // capacity introduced delay. class SimulatedNetworkInterface : public NetworkBehaviorInterface { public: - // Sets a new configuration. This won't affect packets already in the pipe. + // Sets a new configuration. virtual void SetConfig(const BuiltInNetworkBehaviorConfig& config) = 0; virtual void UpdateConfig( std::function config_modifier) = 0; + // Pauses the network until `until_us`. This affects both delivery (calling + // DequeueDeliverablePackets before `until_us` results in an empty std::vector + // of packets) and capacity (the network is paused, so packets are not + // flowing and they will restart flowing at `until_us`). virtual void PauseTransmissionUntil(int64_t until_us) = 0; }; diff --git a/api/test/simulcast_test_fixture.h b/api/test/simulcast_test_fixture.h index cd470703c3..c7130d2909 100644 --- a/api/test/simulcast_test_fixture.h +++ b/api/test/simulcast_test_fixture.h @@ -19,6 +19,7 @@ class SimulcastTestFixture { virtual ~SimulcastTestFixture() = default; virtual void TestKeyFrameRequestsOnAllStreams() = 0; + virtual void TestKeyFrameRequestsOnSpecificStreams() = 0; virtual void TestPaddingAllStreams() = 0; virtual void TestPaddingTwoStreams() = 0; virtual void TestPaddingTwoStreamsOneMaxedOut() = 0; diff --git a/api/test/track_id_stream_info_map.h b/api/test/track_id_stream_info_map.h index 0f8e43e20e..b016de57a9 100644 --- a/api/test/track_id_stream_info_map.h +++ b/api/test/track_id_stream_info_map.h @@ -11,6 +11,8 @@ #ifndef API_TEST_TRACK_ID_STREAM_INFO_MAP_H_ #define API_TEST_TRACK_ID_STREAM_INFO_MAP_H_ +#include + #include "absl/strings/string_view.h" namespace webrtc { @@ -20,19 +22,19 @@ namespace webrtc_pc_e2e { // are useful to associate stats reports track_ids to the remote stream info. class TrackIdStreamInfoMap { public: + struct StreamInfo { + std::string receiver_peer; + std::string stream_label; + std::string sync_group; + }; + virtual ~TrackIdStreamInfoMap() = default; // These methods must be called on the same thread where // StatsObserverInterface::OnStatsReports is invoked. - // Returns a reference to a stream label owned by the TrackIdStreamInfoMap. - // Precondition: `track_id` must be already mapped to stream label. - virtual absl::string_view GetStreamLabelFromTrackId( - absl::string_view track_id) const = 0; - - // Returns a reference to a sync group name owned by the TrackIdStreamInfoMap. - // Precondition: `track_id` must be already mapped to sync group. - virtual absl::string_view GetSyncGroupLabelFromTrackId( + // Precondition: `track_id` must be already mapped to stream info. + virtual StreamInfo GetStreamInfoFromTrackId( absl::string_view track_id) const = 0; }; diff --git a/api/test/video_codec_tester.h b/api/test/video_codec_tester.h new file mode 100644 index 0000000000..0eaaa1b895 --- /dev/null +++ b/api/test/video_codec_tester.h @@ -0,0 +1,134 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_VIDEO_CODEC_TESTER_H_ +#define API_TEST_VIDEO_CODEC_TESTER_H_ + +#include + +#include "absl/functional/any_invocable.h" +#include "api/test/videocodec_test_stats.h" +#include "api/video/encoded_image.h" +#include "api/video/resolution.h" +#include "api/video/video_frame.h" + +namespace webrtc { +namespace test { + +// Interface for a video codec tester. The interface provides minimalistic set +// of data structures that enables implementation of decode-only, encode-only +// and encode-decode tests. +class VideoCodecTester { + public: + // Pacing settings for codec input. + struct PacingSettings { + enum PacingMode { + // Pacing is not used. Frames are sent to codec back-to-back. + kNoPacing, + // Pace with the rate equal to the target video frame rate. Pacing time is + // derived from RTP timestamp. + kRealTime, + // Pace with the explicitly provided rate. + kConstantRate, + }; + PacingMode mode = PacingMode::kNoPacing; + // Pacing rate for `kConstantRate` mode. + Frequency constant_rate = Frequency::Zero(); + }; + + struct DecoderSettings { + PacingSettings pacing; + }; + + struct EncoderSettings { + PacingSettings pacing; + }; + + virtual ~VideoCodecTester() = default; + + // Interface for a raw video frames source. + class RawVideoSource { + public: + virtual ~RawVideoSource() = default; + + // Returns next frame. If no more frames to pull, returns `absl::nullopt`. + // For analysis and pacing purposes, frame must have RTP timestamp set. The + // timestamp must represent the target video frame rate and be unique. + virtual absl::optional PullFrame() = 0; + + // Returns early pulled frame with RTP timestamp equal to `timestamp_rtp`. + virtual VideoFrame GetFrame(uint32_t timestamp_rtp, + Resolution resolution) = 0; + }; + + // Interface for a coded video frames source. + class CodedVideoSource { + public: + virtual ~CodedVideoSource() = default; + + // Returns next frame. If no more frames to pull, returns `absl::nullopt`. + // For analysis and pacing purposes, frame must have RTP timestamp set. The + // timestamp must represent the target video frame rate and be unique. + virtual absl::optional PullFrame() = 0; + }; + + // Interface for a video encoder. + class Encoder { + public: + using EncodeCallback = + absl::AnyInvocable; + + virtual ~Encoder() = default; + + virtual void Encode(const VideoFrame& frame, EncodeCallback callback) = 0; + }; + + // Interface for a video decoder. + class Decoder { + public: + using DecodeCallback = + absl::AnyInvocable; + + virtual ~Decoder() = default; + + virtual void Decode(const EncodedImage& frame, DecodeCallback callback) = 0; + }; + + // Pulls coded video frames from `video_source` and passes them to `decoder`. + // Returns `VideoCodecTestStats` object that contains collected per-frame + // metrics. + virtual std::unique_ptr RunDecodeTest( + std::unique_ptr video_source, + std::unique_ptr decoder, + const DecoderSettings& decoder_settings) = 0; + + // Pulls raw video frames from `video_source` and passes them to `encoder`. + // Returns `VideoCodecTestStats` object that contains collected per-frame + // metrics. + virtual std::unique_ptr RunEncodeTest( + std::unique_ptr video_source, + std::unique_ptr encoder, + const EncoderSettings& encoder_settings) = 0; + + // Pulls raw video frames from `video_source`, passes them to `encoder` and + // then passes encoded frames to `decoder`. Returns `VideoCodecTestStats` + // object that contains collected per-frame metrics. + virtual std::unique_ptr RunEncodeDecodeTest( + std::unique_ptr video_source, + std::unique_ptr encoder, + std::unique_ptr decoder, + const EncoderSettings& encoder_settings, + const DecoderSettings& decoder_settings) = 0; +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_VIDEO_CODEC_TESTER_H_ diff --git a/api/test/video_quality_analyzer_interface.h b/api/test/video_quality_analyzer_interface.h index dc58b04967..d35be8ca1a 100644 --- a/api/test/video_quality_analyzer_interface.h +++ b/api/test/video_quality_analyzer_interface.h @@ -62,6 +62,8 @@ class VideoQualityAnalyzerInterface // https://crbug.com/webrtc/11443: improve stats API to make available // there. uint32_t target_encode_bitrate = 0; + // Encoder quantizer value. + int qp = -1; }; // Contains extra statistic provided by video decoder. struct DecoderStats { diff --git a/api/test/videocodec_test_stats.h b/api/test/videocodec_test_stats.h index a05985a665..12c60638db 100644 --- a/api/test/videocodec_test_stats.h +++ b/api/test/videocodec_test_stats.h @@ -18,6 +18,9 @@ #include #include +#include "absl/types/optional.h" +#include "api/units/data_rate.h" +#include "api/units/frequency.h" #include "api/video/video_frame_type.h" namespace webrtc { @@ -135,11 +138,16 @@ class VideoCodecTestStats { virtual ~VideoCodecTestStats() = default; - virtual std::vector GetFrameStatistics() = 0; + virtual std::vector GetFrameStatistics() const = 0; virtual std::vector SliceAndCalcLayerVideoStatistic( size_t first_frame_num, size_t last_frame_num) = 0; + + virtual VideoStatistics CalcVideoStatistic(size_t first_frame, + size_t last_frame, + DataRate target_bitrate, + Frequency target_framerate) = 0; }; } // namespace test diff --git a/api/transport/BUILD.gn b/api/transport/BUILD.gn index 3cc3559f30..86a7c8acf8 100644 --- a/api/transport/BUILD.gn +++ b/api/transport/BUILD.gn @@ -52,7 +52,7 @@ rtc_library("field_trial_based_config") { "field_trial_based_config.h", ] deps = [ - "../../api:field_trials_view", + "../../api:field_trials_registry", "../../system_wrappers:field_trial", ] absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] diff --git a/api/transport/field_trial_based_config.cc b/api/transport/field_trial_based_config.cc index 4a3a179240..0cef30f054 100644 --- a/api/transport/field_trial_based_config.cc +++ b/api/transport/field_trial_based_config.cc @@ -12,7 +12,7 @@ #include "system_wrappers/include/field_trial.h" namespace webrtc { -std::string FieldTrialBasedConfig::Lookup(absl::string_view key) const { +std::string FieldTrialBasedConfig::GetValue(absl::string_view key) const { return webrtc::field_trial::FindFullName(std::string(key)); } } // namespace webrtc diff --git a/api/transport/field_trial_based_config.h b/api/transport/field_trial_based_config.h index f0063ff95e..d47140e579 100644 --- a/api/transport/field_trial_based_config.h +++ b/api/transport/field_trial_based_config.h @@ -13,13 +13,13 @@ #include #include "absl/strings/string_view.h" -#include "api/field_trials_view.h" +#include "api/field_trials_registry.h" namespace webrtc { // Implementation using the field trial API fo the key value lookup. -class FieldTrialBasedConfig : public FieldTrialsView { - public: - std::string Lookup(absl::string_view key) const override; +class FieldTrialBasedConfig : public FieldTrialsRegistry { + private: + std::string GetValue(absl::string_view key) const override; }; } // namespace webrtc diff --git a/api/units/data_rate.h b/api/units/data_rate.h index 98572123c5..d813c61156 100644 --- a/api/units/data_rate.h +++ b/api/units/data_rate.h @@ -23,7 +23,7 @@ #include "api/units/frequency.h" #include "api/units/time_delta.h" #include "rtc_base/checks.h" -#include "rtc_base/units/unit_base.h" +#include "rtc_base/units/unit_base.h" // IWYU pragma: export namespace webrtc { // DataRate is a class that represents a given data rate. This can be used to diff --git a/api/units/data_size.h b/api/units/data_size.h index 6817e24c26..9df6434fb9 100644 --- a/api/units/data_size.h +++ b/api/units/data_size.h @@ -18,7 +18,7 @@ #include #include -#include "rtc_base/units/unit_base.h" +#include "rtc_base/units/unit_base.h" // IWYU pragma: export namespace webrtc { // DataSize is a class represeting a count of bytes. diff --git a/api/units/frequency.h b/api/units/frequency.h index 8e9cc2b5f4..06081e4c0d 100644 --- a/api/units/frequency.h +++ b/api/units/frequency.h @@ -20,7 +20,7 @@ #include #include "api/units/time_delta.h" -#include "rtc_base/units/unit_base.h" +#include "rtc_base/units/unit_base.h" // IWYU pragma: export namespace webrtc { diff --git a/api/units/time_delta.h b/api/units/time_delta.h index d5951005e3..5981e32dce 100644 --- a/api/units/time_delta.h +++ b/api/units/time_delta.h @@ -19,7 +19,7 @@ #include #include -#include "rtc_base/units/unit_base.h" +#include "rtc_base/units/unit_base.h" // IWYU pragma: export namespace webrtc { diff --git a/api/units/timestamp.h b/api/units/timestamp.h index 1e9f9d1dc5..8aabe05cad 100644 --- a/api/units/timestamp.h +++ b/api/units/timestamp.h @@ -20,6 +20,7 @@ #include "api/units/time_delta.h" #include "rtc_base/checks.h" +#include "rtc_base/units/unit_base.h" // IWYU pragma: export namespace webrtc { // Timestamp represents the time that has passed since some unspecified epoch. diff --git a/api/video/BUILD.gn b/api/video/BUILD.gn index db9078d8bb..d65f6412b4 100644 --- a/api/video/BUILD.gn +++ b/api/video/BUILD.gn @@ -332,8 +332,11 @@ rtc_source_set("video_frame_metadata") { "video_frame_metadata.h", ] deps = [ + ":video_frame", + ":video_frame_type", + ":video_rtp_headers", "..:array_view", - "../../modules/rtp_rtcp:rtp_video_header", + "../../rtc_base/system:rtc_export", "../transport/rtp:dependency_descriptor", ] absl_deps = [ @@ -363,6 +366,7 @@ rtc_library("builtin_video_bitrate_allocator_factory") { } rtc_library("frame_buffer") { + visibility = [ "*" ] sources = [ "frame_buffer.cc", "frame_buffer.h", @@ -399,12 +403,10 @@ rtc_library("frame_buffer_unittest") { if (rtc_include_tests) { rtc_library("video_unittests") { testonly = true - sources = [ - "video_frame_metadata_unittest.cc", - "video_stream_decoder_create_unittest.cc", - ] + sources = [ "video_stream_decoder_create_unittest.cc" ] deps = [ ":video_frame_metadata", + ":video_frame_type", ":video_stream_decoder_create", "../../modules/rtp_rtcp:rtp_video_header", "../../test:test_support", diff --git a/api/video/video_frame_metadata.cc b/api/video/video_frame_metadata.cc index df82875eb9..842aeb0524 100644 --- a/api/video/video_frame_metadata.cc +++ b/api/video/video_frame_metadata.cc @@ -10,19 +10,118 @@ #include "api/video/video_frame_metadata.h" -#include "modules/rtp_rtcp/source/rtp_video_header.h" - namespace webrtc { -VideoFrameMetadata::VideoFrameMetadata(const RTPVideoHeader& header) - : width_(header.width), height_(header.height) { - if (header.generic) { - frame_id_ = header.generic->frame_id; - spatial_index_ = header.generic->spatial_index; - temporal_index_ = header.generic->temporal_index; - frame_dependencies_ = header.generic->dependencies; - decode_target_indications_ = header.generic->decode_target_indications; - } +VideoFrameMetadata::VideoFrameMetadata() = default; + +VideoFrameType VideoFrameMetadata::GetFrameType() const { + return frame_type_; +} + +void VideoFrameMetadata::SetFrameType(VideoFrameType frame_type) { + frame_type_ = frame_type; +} + +uint16_t VideoFrameMetadata::GetWidth() const { + return width_; +} + +void VideoFrameMetadata::SetWidth(uint16_t width) { + width_ = width; +} + +uint16_t VideoFrameMetadata::GetHeight() const { + return height_; +} + +void VideoFrameMetadata::SetHeight(uint16_t height) { + height_ = height; +} + +VideoRotation VideoFrameMetadata::GetRotation() const { + return rotation_; +} + +void VideoFrameMetadata::SetRotation(VideoRotation rotation) { + rotation_ = rotation; +} + +VideoContentType VideoFrameMetadata::GetContentType() const { + return content_type_; +} + +void VideoFrameMetadata::SetContentType(VideoContentType content_type) { + content_type_ = content_type; +} + +absl::optional VideoFrameMetadata::GetFrameId() const { + return frame_id_; +} + +void VideoFrameMetadata::SetFrameId(absl::optional frame_id) { + frame_id_ = frame_id; +} + +int VideoFrameMetadata::GetSpatialIndex() const { + return spatial_index_; +} + +void VideoFrameMetadata::SetSpatialIndex(int spatial_index) { + spatial_index_ = spatial_index; +} + +int VideoFrameMetadata::GetTemporalIndex() const { + return temporal_index_; +} + +void VideoFrameMetadata::SetTemporalIndex(int temporal_index) { + temporal_index_ = temporal_index; +} + +rtc::ArrayView VideoFrameMetadata::GetFrameDependencies() const { + return frame_dependencies_; +} + +void VideoFrameMetadata::SetFrameDependencies( + rtc::ArrayView frame_dependencies) { + frame_dependencies_.assign(frame_dependencies.begin(), + frame_dependencies.end()); +} + +rtc::ArrayView +VideoFrameMetadata::GetDecodeTargetIndications() const { + return decode_target_indications_; +} + +void VideoFrameMetadata::SetDecodeTargetIndications( + rtc::ArrayView decode_target_indications) { + decode_target_indications_.assign(decode_target_indications.begin(), + decode_target_indications.end()); +} + +bool VideoFrameMetadata::GetIsLastFrameInPicture() const { + return is_last_frame_in_picture_; +} + +void VideoFrameMetadata::SetIsLastFrameInPicture( + bool is_last_frame_in_picture) { + is_last_frame_in_picture_ = is_last_frame_in_picture; +} + +uint8_t VideoFrameMetadata::GetSimulcastIdx() const { + return simulcast_idx_; +} + +void VideoFrameMetadata::SetSimulcastIdx(uint8_t simulcast_idx) { + simulcast_idx_ = simulcast_idx; +} + +VideoCodecType VideoFrameMetadata::GetCodec() const { + return codec_; +} + +void VideoFrameMetadata::SetCodec(VideoCodecType codec) { + codec_ = codec; } } // namespace webrtc diff --git a/api/video/video_frame_metadata.h b/api/video/video_frame_metadata.h index 2e9309841b..6e3f32fdbf 100644 --- a/api/video/video_frame_metadata.h +++ b/api/video/video_frame_metadata.h @@ -17,42 +17,80 @@ #include "absl/types/optional.h" #include "api/array_view.h" #include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { -struct RTPVideoHeader; - // A subset of metadata from the RTP video header, exposed in insertable streams // API. -class VideoFrameMetadata { +class RTC_EXPORT VideoFrameMetadata { public: - explicit VideoFrameMetadata(const RTPVideoHeader& header); + VideoFrameMetadata(); VideoFrameMetadata(const VideoFrameMetadata&) = default; VideoFrameMetadata& operator=(const VideoFrameMetadata&) = default; - uint16_t GetWidth() const { return width_; } - uint16_t GetHeight() const { return height_; } - absl::optional GetFrameId() const { return frame_id_; } - int GetSpatialIndex() const { return spatial_index_; } - int GetTemporalIndex() const { return temporal_index_; } + VideoFrameType GetFrameType() const; + void SetFrameType(VideoFrameType frame_type); - rtc::ArrayView GetFrameDependencies() const { - return frame_dependencies_; - } + uint16_t GetWidth() const; + void SetWidth(uint16_t width); + + uint16_t GetHeight() const; + void SetHeight(uint16_t height); + + VideoRotation GetRotation() const; + void SetRotation(VideoRotation rotation); + + VideoContentType GetContentType() const; + void SetContentType(VideoContentType content_type); + + absl::optional GetFrameId() const; + void SetFrameId(absl::optional frame_id); + + int GetSpatialIndex() const; + void SetSpatialIndex(int spatial_index); + + int GetTemporalIndex() const; + void SetTemporalIndex(int temporal_index); + + rtc::ArrayView GetFrameDependencies() const; + void SetFrameDependencies(rtc::ArrayView frame_dependencies); rtc::ArrayView GetDecodeTargetIndications() - const { - return decode_target_indications_; - } + const; + void SetDecodeTargetIndications( + rtc::ArrayView decode_target_indications); + + bool GetIsLastFrameInPicture() const; + void SetIsLastFrameInPicture(bool is_last_frame_in_picture); + + uint8_t GetSimulcastIdx() const; + void SetSimulcastIdx(uint8_t simulcast_idx); + + VideoCodecType GetCodec() const; + void SetCodec(VideoCodecType codec); private: - int16_t width_; - int16_t height_; + VideoFrameType frame_type_ = VideoFrameType::kEmptyFrame; + int16_t width_ = 0; + int16_t height_ = 0; + VideoRotation rotation_ = VideoRotation::kVideoRotation_0; + VideoContentType content_type_ = VideoContentType::UNSPECIFIED; + + // Corresponding to GenericDescriptorInfo. absl::optional frame_id_; int spatial_index_ = 0; int temporal_index_ = 0; absl::InlinedVector frame_dependencies_; absl::InlinedVector decode_target_indications_; + + bool is_last_frame_in_picture_ = true; + uint8_t simulcast_idx_ = 0; + VideoCodecType codec_ = VideoCodecType::kVideoCodecGeneric; }; } // namespace webrtc diff --git a/api/video/video_frame_metadata_unittest.cc b/api/video/video_frame_metadata_unittest.cc deleted file mode 100644 index 7a808e1ea9..0000000000 --- a/api/video/video_frame_metadata_unittest.cc +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/video/video_frame_metadata.h" - -#include "modules/rtp_rtcp/source/rtp_video_header.h" -#include "test/gmock.h" -#include "test/gtest.h" - -namespace webrtc { -namespace { - -using ::testing::ElementsAre; -using ::testing::IsEmpty; - -TEST(VideoFrameMetadata, GetWidthReturnsCorrectValue) { - RTPVideoHeader video_header; - video_header.width = 1280u; - VideoFrameMetadata metadata(video_header); - EXPECT_EQ(metadata.GetWidth(), video_header.width); -} - -TEST(VideoFrameMetadata, GetHeightReturnsCorrectValue) { - RTPVideoHeader video_header; - video_header.height = 720u; - VideoFrameMetadata metadata(video_header); - EXPECT_EQ(metadata.GetHeight(), video_header.height); -} - -TEST(VideoFrameMetadata, GetFrameIdReturnsCorrectValue) { - RTPVideoHeader video_header; - RTPVideoHeader::GenericDescriptorInfo& generic = - video_header.generic.emplace(); - generic.frame_id = 10; - VideoFrameMetadata metadata(video_header); - EXPECT_EQ(metadata.GetFrameId().value(), 10); -} - -TEST(VideoFrameMetadata, HasNoFrameIdForHeaderWithoutGeneric) { - RTPVideoHeader video_header; - VideoFrameMetadata metadata(video_header); - ASSERT_FALSE(video_header.generic); - EXPECT_EQ(metadata.GetFrameId(), absl::nullopt); -} - -TEST(VideoFrameMetadata, GetSpatialIndexReturnsCorrectValue) { - RTPVideoHeader video_header; - RTPVideoHeader::GenericDescriptorInfo& generic = - video_header.generic.emplace(); - generic.spatial_index = 2; - VideoFrameMetadata metadata(video_header); - EXPECT_EQ(metadata.GetSpatialIndex(), 2); -} - -TEST(VideoFrameMetadata, SpatialIndexIsZeroForHeaderWithoutGeneric) { - RTPVideoHeader video_header; - VideoFrameMetadata metadata(video_header); - ASSERT_FALSE(video_header.generic); - EXPECT_EQ(metadata.GetSpatialIndex(), 0); -} - -TEST(VideoFrameMetadata, GetTemporalIndexReturnsCorrectValue) { - RTPVideoHeader video_header; - RTPVideoHeader::GenericDescriptorInfo& generic = - video_header.generic.emplace(); - generic.temporal_index = 3; - VideoFrameMetadata metadata(video_header); - EXPECT_EQ(metadata.GetTemporalIndex(), 3); -} - -TEST(VideoFrameMetadata, TemporalIndexIsZeroForHeaderWithoutGeneric) { - RTPVideoHeader video_header; - VideoFrameMetadata metadata(video_header); - ASSERT_FALSE(video_header.generic); - EXPECT_EQ(metadata.GetTemporalIndex(), 0); -} - -TEST(VideoFrameMetadata, GetFrameDependenciesReturnsCorrectValue) { - RTPVideoHeader video_header; - RTPVideoHeader::GenericDescriptorInfo& generic = - video_header.generic.emplace(); - generic.dependencies = {5, 6, 7}; - VideoFrameMetadata metadata(video_header); - EXPECT_THAT(metadata.GetFrameDependencies(), ElementsAre(5, 6, 7)); -} - -TEST(VideoFrameMetadata, FrameDependencyVectorIsEmptyForHeaderWithoutGeneric) { - RTPVideoHeader video_header; - VideoFrameMetadata metadata(video_header); - ASSERT_FALSE(video_header.generic); - EXPECT_THAT(metadata.GetFrameDependencies(), IsEmpty()); -} - -TEST(VideoFrameMetadata, GetDecodeTargetIndicationsReturnsCorrectValue) { - RTPVideoHeader video_header; - RTPVideoHeader::GenericDescriptorInfo& generic = - video_header.generic.emplace(); - generic.decode_target_indications = {DecodeTargetIndication::kSwitch}; - VideoFrameMetadata metadata(video_header); - EXPECT_THAT(metadata.GetDecodeTargetIndications(), - ElementsAre(DecodeTargetIndication::kSwitch)); -} - -TEST(VideoFrameMetadata, - DecodeTargetIndicationsVectorIsEmptyForHeaderWithoutGeneric) { - RTPVideoHeader video_header; - VideoFrameMetadata metadata(video_header); - ASSERT_FALSE(video_header.generic); - EXPECT_THAT(metadata.GetDecodeTargetIndications(), IsEmpty()); -} - -} // namespace -} // namespace webrtc diff --git a/api/video_codecs/BUILD.gn b/api/video_codecs/BUILD.gn index 173d49ade2..e8f9815a45 100644 --- a/api/video_codecs/BUILD.gn +++ b/api/video_codecs/BUILD.gn @@ -75,6 +75,7 @@ rtc_library("video_codecs_api") { "../../api:array_view", "../../modules/video_coding:codec_globals_headers", "../../rtc_base:checks", + "../../rtc_base:logging", "../../rtc_base:macromagic", "../../rtc_base:refcount", "../../rtc_base:stringutils", diff --git a/api/video_codecs/sdp_video_format.cc b/api/video_codecs/sdp_video_format.cc index 3543806b51..cb7e98a682 100644 --- a/api/video_codecs/sdp_video_format.cc +++ b/api/video_codecs/sdp_video_format.cc @@ -11,11 +11,14 @@ #include "api/video_codecs/sdp_video_format.h" #include "absl/strings/match.h" +#include "absl/types/optional.h" +#include "api/array_view.h" #include "api/video_codecs/av1_profile.h" #include "api/video_codecs/h264_profile_level_id.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/vp9_profile.h" #include "rtc_base/checks.h" +#include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" namespace webrtc { @@ -133,4 +136,36 @@ bool operator==(const SdpVideoFormat& a, const SdpVideoFormat& b) { a.scalability_modes == b.scalability_modes; } +absl::optional FuzzyMatchSdpVideoFormat( + rtc::ArrayView supported_formats, + const SdpVideoFormat& format) { + absl::optional res; + int best_parameter_match = 0; + for (const auto& supported_format : supported_formats) { + if (absl::EqualsIgnoreCase(supported_format.name, format.name)) { + int matching_parameters = 0; + for (const auto& kv : supported_format.parameters) { + auto it = format.parameters.find(kv.first); + if (it != format.parameters.end() && it->second == kv.second) { + matching_parameters += 1; + } + } + + if (!res || matching_parameters > best_parameter_match) { + res = supported_format; + best_parameter_match = matching_parameters; + } + } + } + + if (!res) { + RTC_LOG(LS_INFO) << "Failed to match SdpVideoFormat " << format.ToString(); + } else if (*res != format) { + RTC_LOG(LS_INFO) << "Matched SdpVideoFormat " << format.ToString() + << " with " << res->ToString(); + } + + return res; +} + } // namespace webrtc diff --git a/api/video_codecs/sdp_video_format.h b/api/video_codecs/sdp_video_format.h index 850632eb9e..faaa66c241 100644 --- a/api/video_codecs/sdp_video_format.h +++ b/api/video_codecs/sdp_video_format.h @@ -15,6 +15,7 @@ #include #include "absl/container/inlined_vector.h" +#include "absl/types/optional.h" #include "api/array_view.h" #include "api/video_codecs/scalability_mode.h" #include "rtc_base/system/rtc_export.h" @@ -61,6 +62,14 @@ struct RTC_EXPORT SdpVideoFormat { absl::InlinedVector scalability_modes; }; +// For not so good reasons sometimes additional parameters are added to an +// SdpVideoFormat, which makes instances that should compare equal to not match +// anymore. Until we stop misusing SdpVideoFormats provide this convenience +// function to perform fuzzy matching. +absl::optional FuzzyMatchSdpVideoFormat( + rtc::ArrayView supported_formats, + const SdpVideoFormat& format); + } // namespace webrtc #endif // API_VIDEO_CODECS_SDP_VIDEO_FORMAT_H_ diff --git a/api/video_codecs/video_encoder.h b/api/video_codecs/video_encoder.h index 30ec58e807..395a87e089 100644 --- a/api/video_codecs/video_encoder.h +++ b/api/video_codecs/video_encoder.h @@ -174,7 +174,7 @@ class RTC_EXPORT VideoEncoder { // For example: With I420, this value would be a multiple of 2. // Note that this field is unrelated to any horizontal or vertical stride // requirements the encoder has on the incoming video frame buffers. - int requested_resolution_alignment; + uint32_t requested_resolution_alignment; // Same as above but if true, each simulcast layer should also be divisible // by `requested_resolution_alignment`. diff --git a/audio/BUILD.gn b/audio/BUILD.gn index 33cacf856e..d2ba68459d 100644 --- a/audio/BUILD.gn +++ b/audio/BUILD.gn @@ -70,6 +70,7 @@ rtc_library("audio") { "../common_audio:common_audio_c", "../logging:rtc_event_audio", "../logging:rtc_stream_config", + "../media:rtc_media_base", "../modules/async_audio_processing", "../modules/audio_coding", "../modules/audio_coding:audio_coding_module_typedefs", @@ -233,6 +234,9 @@ if (rtc_include_tests) { "../api/test/metrics:global_metrics_logger_and_exporter", "../api/test/metrics:metrics_exporter", "../api/test/metrics:stdout_metrics_exporter", + "../api/test/pclf:media_configuration", + "../api/test/pclf:media_quality_test_params", + "../api/test/pclf:peer_configurer", "../call:simulated_network", "../common_audio", "../system_wrappers", @@ -287,7 +291,7 @@ if (rtc_include_tests) { data += [ "${root_out_dir}/low_bandwidth_audio_test" ] } - if (is_linux || is_chromeos || is_android) { + if (is_linux || is_chromeos || is_android || is_fuchsia) { data += [ "../tools_webrtc/audio_quality/linux/PolqaOem64", "../tools_webrtc/audio_quality/linux/pesq", diff --git a/audio/DEPS b/audio/DEPS index 9b89dc39ab..7a0c7e7ce6 100644 --- a/audio/DEPS +++ b/audio/DEPS @@ -2,6 +2,7 @@ include_rules = [ "+call", "+common_audio", "+logging/rtc_event_log", + "+media/base", "+modules/async_audio_processing", "+modules/audio_coding", "+modules/audio_device", diff --git a/audio/audio_send_stream.cc b/audio/audio_send_stream.cc index bc4aeb9ba1..0127a2017a 100644 --- a/audio/audio_send_stream.cc +++ b/audio/audio_send_stream.cc @@ -31,6 +31,7 @@ #include "common_audio/vad/include/vad.h" #include "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h" #include "logging/rtc_event_log/rtc_stream_config.h" +#include "media/base/media_channel.h" #include "modules/audio_coding/codecs/cng/audio_encoder_cng.h" #include "modules/audio_coding/codecs/red/audio_encoder_copy_red.h" #include "modules/audio_processing/include/audio_processing.h" @@ -151,8 +152,6 @@ AudioSendStream::AudioSendStream( field_trials_.IsEnabled("WebRTC-Audio-ABWENoTWCC")), enable_audio_alr_probing_( !field_trials_.IsDisabled("WebRTC-Audio-AlrProbing")), - send_side_bwe_with_overhead_( - !field_trials_.IsDisabled("WebRTC-SendSideBwe-WithOverhead")), allocation_settings_(field_trials_), config_(Config(/*send_transport=*/nullptr)), audio_state_(audio_state), @@ -174,7 +173,7 @@ AudioSendStream::AudioSendStream( RTC_DCHECK(rtp_rtcp_module_); RTC_DCHECK_RUN_ON(&worker_thread_checker_); - ConfigureStream(config, true); + ConfigureStream(config, true, nullptr); UpdateCachedTargetAudioBitrateConstraints(); } @@ -195,9 +194,10 @@ const webrtc::AudioSendStream::Config& AudioSendStream::GetConfig() const { } void AudioSendStream::Reconfigure( - const webrtc::AudioSendStream::Config& new_config) { + const webrtc::AudioSendStream::Config& new_config, + SetParametersCallback callback) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - ConfigureStream(new_config, false); + ConfigureStream(new_config, false, std::move(callback)); } AudioSendStream::ExtensionIds AudioSendStream::FindExtensionIds( @@ -229,7 +229,8 @@ int AudioSendStream::TransportSeqNumId(const AudioSendStream::Config& config) { void AudioSendStream::ConfigureStream( const webrtc::AudioSendStream::Config& new_config, - bool first_time) { + bool first_time, + SetParametersCallback callback) { RTC_LOG(LS_INFO) << "AudioSendStream::ConfigureStream: " << new_config.ToString(); UpdateEventLogStreamConfig(event_log_, new_config, @@ -327,6 +328,10 @@ void AudioSendStream::ConfigureStream( if (!ReconfigureSendCodec(new_config)) { RTC_LOG(LS_ERROR) << "Failed to set up send codec state."; + + webrtc::InvokeSetParametersCallback( + callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR, + "Failed to set up send codec state.")); } // Set currently known overhead (used in ANA, opus only). @@ -352,6 +357,8 @@ void AudioSendStream::ConfigureStream( if (!first_time) { UpdateCachedTargetAudioBitrateConstraints(); } + + webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); } void AudioSendStream::Start() { @@ -363,8 +370,7 @@ void AudioSendStream::Start() { config_.max_bitrate_bps != -1 && (allocate_audio_without_feedback_ || TransportSeqNumId(config_) != 0)) { rtp_transport_->AccountForAudioPacketsInPacedSender(true); - if (send_side_bwe_with_overhead_) - rtp_transport_->IncludeOverheadInPacedSender(); + rtp_transport_->IncludeOverheadInPacedSender(); rtp_rtcp_module_->SetAsPartOfAllocation(true); ConfigureBitrateObserver(); } else { @@ -441,6 +447,7 @@ webrtc::AudioSendStream::Stats AudioSendStream::GetStats( call_stats.header_and_padding_bytes_sent; stats.retransmitted_bytes_sent = call_stats.retransmitted_bytes_sent; stats.packets_sent = call_stats.packetsSent; + stats.total_packet_send_delay = call_stats.total_packet_send_delay; stats.retransmitted_packets_sent = call_stats.retransmitted_packets_sent; // RTT isn't known until a RTCP report is received. Until then, VoiceEngine // returns 0 to indicate an error value. @@ -801,8 +808,7 @@ void AudioSendStream::ReconfigureBitrateObserver( if (!new_config.has_dscp && new_config.min_bitrate_bps != -1 && new_config.max_bitrate_bps != -1 && TransportSeqNumId(new_config) != 0) { rtp_transport_->AccountForAudioPacketsInPacedSender(true); - if (send_side_bwe_with_overhead_) - rtp_transport_->IncludeOverheadInPacedSender(); + rtp_transport_->IncludeOverheadInPacedSender(); // We may get a callback immediately as the observer is registered, so // make sure the bitrate limits in config_ are up-to-date. config_.min_bitrate_bps = new_config.min_bitrate_bps; @@ -825,22 +831,21 @@ void AudioSendStream::ConfigureBitrateObserver() { RTC_DCHECK(constraints.has_value()); DataRate priority_bitrate = allocation_settings_.priority_bitrate; - if (send_side_bwe_with_overhead_) { - if (use_legacy_overhead_calculation_) { - // OverheadPerPacket = Ipv4(20B) + UDP(8B) + SRTP(10B) + RTP(12) - constexpr int kOverheadPerPacket = 20 + 8 + 10 + 12; - const TimeDelta kMinPacketDuration = TimeDelta::Millis(20); - DataRate max_overhead = - DataSize::Bytes(kOverheadPerPacket) / kMinPacketDuration; - priority_bitrate += max_overhead; - } else { - RTC_DCHECK(frame_length_range_); - const DataSize overhead_per_packet = - DataSize::Bytes(total_packet_overhead_bytes_); - DataRate min_overhead = overhead_per_packet / frame_length_range_->second; - priority_bitrate += min_overhead; - } + if (use_legacy_overhead_calculation_) { + // OverheadPerPacket = Ipv4(20B) + UDP(8B) + SRTP(10B) + RTP(12) + constexpr int kOverheadPerPacket = 20 + 8 + 10 + 12; + const TimeDelta kMinPacketDuration = TimeDelta::Millis(20); + DataRate max_overhead = + DataSize::Bytes(kOverheadPerPacket) / kMinPacketDuration; + priority_bitrate += max_overhead; + } else { + RTC_DCHECK(frame_length_range_); + const DataSize overhead_per_packet = + DataSize::Bytes(total_packet_overhead_bytes_); + DataRate min_overhead = overhead_per_packet / frame_length_range_->second; + priority_bitrate += min_overhead; } + if (allocation_settings_.priority_bitrate_raw) priority_bitrate = *allocation_settings_.priority_bitrate_raw; @@ -893,25 +898,23 @@ AudioSendStream::GetMinMaxBitrateConstraints() const { << "TargetAudioBitrateConstraints::min"; return absl::nullopt; } - if (send_side_bwe_with_overhead_) { - if (use_legacy_overhead_calculation_) { - // OverheadPerPacket = Ipv4(20B) + UDP(8B) + SRTP(10B) + RTP(12) - const DataSize kOverheadPerPacket = DataSize::Bytes(20 + 8 + 10 + 12); - const TimeDelta kMaxFrameLength = - TimeDelta::Millis(60); // Based on Opus spec - const DataRate kMinOverhead = kOverheadPerPacket / kMaxFrameLength; - constraints.min += kMinOverhead; - constraints.max += kMinOverhead; - } else { - if (!frame_length_range_.has_value()) { - RTC_LOG(LS_WARNING) << "frame_length_range_ is not set"; - return absl::nullopt; - } - const DataSize kOverheadPerPacket = - DataSize::Bytes(total_packet_overhead_bytes_); - constraints.min += kOverheadPerPacket / frame_length_range_->second; - constraints.max += kOverheadPerPacket / frame_length_range_->first; + if (use_legacy_overhead_calculation_) { + // OverheadPerPacket = Ipv4(20B) + UDP(8B) + SRTP(10B) + RTP(12) + const DataSize kOverheadPerPacket = DataSize::Bytes(20 + 8 + 10 + 12); + const TimeDelta kMaxFrameLength = + TimeDelta::Millis(60); // Based on Opus spec + const DataRate kMinOverhead = kOverheadPerPacket / kMaxFrameLength; + constraints.min += kMinOverhead; + constraints.max += kMinOverhead; + } else { + if (!frame_length_range_.has_value()) { + RTC_LOG(LS_WARNING) << "frame_length_range_ is not set"; + return absl::nullopt; } + const DataSize kOverheadPerPacket = + DataSize::Bytes(total_packet_overhead_bytes_); + constraints.min += kOverheadPerPacket / frame_length_range_->second; + constraints.max += kOverheadPerPacket / frame_length_range_->first; } return constraints; } diff --git a/audio/audio_send_stream.h b/audio/audio_send_stream.h index 656cc2d44c..09ea3a08d2 100644 --- a/audio/audio_send_stream.h +++ b/audio/audio_send_stream.h @@ -88,7 +88,8 @@ class AudioSendStream final : public webrtc::AudioSendStream, // webrtc::AudioSendStream implementation. const webrtc::AudioSendStream::Config& GetConfig() const override; - void Reconfigure(const webrtc::AudioSendStream::Config& config) override; + void Reconfigure(const webrtc::AudioSendStream::Config& config, + SetParametersCallback callback) override; void Start() override; void Stop() override; void SendAudioData(std::unique_ptr audio_frame) override; @@ -138,7 +139,9 @@ class AudioSendStream final : public webrtc::AudioSendStream, void StoreEncoderProperties(int sample_rate_hz, size_t num_channels) RTC_RUN_ON(worker_thread_checker_); - void ConfigureStream(const Config& new_config, bool first_time) + void ConfigureStream(const Config& new_config, + bool first_time, + SetParametersCallback callback) RTC_RUN_ON(worker_thread_checker_); bool SetupSendCodec(const Config& new_config) RTC_RUN_ON(worker_thread_checker_); @@ -184,7 +187,6 @@ class AudioSendStream final : public webrtc::AudioSendStream, const bool allocate_audio_without_feedback_; const bool force_no_audio_feedback_ = allocate_audio_without_feedback_; const bool enable_audio_alr_probing_; - const bool send_side_bwe_with_overhead_; const AudioAllocationConfig allocation_settings_; webrtc::AudioSendStream::Config config_ diff --git a/audio/audio_send_stream_unittest.cc b/audio/audio_send_stream_unittest.cc index cbf24b5e72..a81b40cbe7 100644 --- a/audio/audio_send_stream_unittest.cc +++ b/audio/audio_send_stream_unittest.cc @@ -550,7 +550,7 @@ TEST(AudioSendStreamTest, SendCodecAppliesAudioNetworkAdaptor) { auto stream_config = helper.config(); stream_config.audio_network_adaptor_config = kAnaReconfigString; - send_stream->Reconfigure(stream_config); + send_stream->Reconfigure(stream_config, nullptr); } } @@ -590,7 +590,7 @@ TEST(AudioSendStreamTest, AudioNetworkAdaptorReceivesOverhead) { auto stream_config = helper.config(); stream_config.audio_network_adaptor_config = kAnaConfigString; - send_stream->Reconfigure(stream_config); + send_stream->Reconfigure(stream_config, nullptr); } } @@ -791,7 +791,7 @@ TEST(AudioSendStreamTest, DontRecreateEncoder) { AudioSendStream::Config::SendCodecSpec(9, kG722Format); helper.config().send_codec_spec->cng_payload_type = 105; auto send_stream = helper.CreateAudioSendStream(); - send_stream->Reconfigure(helper.config()); + send_stream->Reconfigure(helper.config(), nullptr); } } @@ -816,7 +816,7 @@ TEST(AudioSendStreamTest, ReconfigureTransportCcResetsFirst) { .Times(1); } - send_stream->Reconfigure(new_config); + send_stream->Reconfigure(new_config, nullptr); } } @@ -928,11 +928,11 @@ TEST(AudioSendStreamTest, ReconfigureWithFrameEncryptor) { new_config.frame_encryptor = mock_frame_encryptor_0; EXPECT_CALL(*helper.channel_send(), SetFrameEncryptor(Ne(nullptr))) .Times(1); - send_stream->Reconfigure(new_config); + send_stream->Reconfigure(new_config, nullptr); // Not updating the frame encryptor shouldn't force it to reconfigure. EXPECT_CALL(*helper.channel_send(), SetFrameEncryptor(_)).Times(0); - send_stream->Reconfigure(new_config); + send_stream->Reconfigure(new_config, nullptr); // Updating frame encryptor to a new object should force a call to the // proxy. @@ -942,7 +942,7 @@ TEST(AudioSendStreamTest, ReconfigureWithFrameEncryptor) { new_config.crypto_options.sframe.require_frame_encryption = true; EXPECT_CALL(*helper.channel_send(), SetFrameEncryptor(Ne(nullptr))) .Times(1); - send_stream->Reconfigure(new_config); + send_stream->Reconfigure(new_config, nullptr); } } } // namespace test diff --git a/audio/channel_receive.h b/audio/channel_receive.h index c3eca29006..b47a4b5b97 100644 --- a/audio/channel_receive.h +++ b/audio/channel_receive.h @@ -51,7 +51,7 @@ class RtpPacketReceived; class RtpRtcp; struct CallReceiveStatistics { - unsigned int cumulativeLost; + int cumulativeLost; unsigned int jitterSamples; int64_t payload_bytes_rcvd = 0; int64_t header_and_padding_bytes_rcvd = 0; diff --git a/audio/channel_send.cc b/audio/channel_send.cc index 73b3851fdb..fdc11a83fb 100644 --- a/audio/channel_send.cc +++ b/audio/channel_send.cc @@ -262,6 +262,11 @@ class RtpPacketSenderProxy : public RtpPacketSender { rtp_packet_pacer_->EnqueuePackets(std::move(packets)); } + void RemovePacketsForSsrc(uint32_t ssrc) override { + MutexLock lock(&mutex_); + rtp_packet_pacer_->RemovePacketsForSsrc(ssrc); + } + private: SequenceChecker thread_checker_; Mutex mutex_; @@ -565,6 +570,7 @@ void ChannelSend::StopSend() { RTC_DCHECK(packet_router_); packet_router_->RemoveSendRtpModule(rtp_rtcp_.get()); + rtp_packet_pacer_proxy_->RemovePacketsForSsrc(rtp_rtcp_->SSRC()); } void ChannelSend::SetEncoder(int payload_type, @@ -779,6 +785,7 @@ CallSendStatistics ChannelSend::GetRTCPStatistics() const { stats.retransmitted_bytes_sent = rtp_stats.retransmitted.payload_bytes; stats.packetsSent = rtp_stats.transmitted.packets + rtx_stats.transmitted.packets; + stats.total_packet_send_delay = rtp_stats.transmitted.total_packet_delay; stats.retransmitted_packets_sent = rtp_stats.retransmitted.packets; stats.report_block_datas = rtp_rtcp_->GetLatestReportBlockData(); diff --git a/audio/channel_send.h b/audio/channel_send.h index a555b89171..cf9a273f70 100644 --- a/audio/channel_send.h +++ b/audio/channel_send.h @@ -39,6 +39,8 @@ struct CallSendStatistics { // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-retransmittedbytessent uint64_t retransmitted_bytes_sent; int packetsSent; + // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalpacketsenddelay + TimeDelta total_packet_send_delay = TimeDelta::Zero(); // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-retransmittedpacketssent uint64_t retransmitted_packets_sent; // A snapshot of Report Blocks with additional data of interest to statistics. diff --git a/audio/test/audio_stats_test.cc b/audio/test/audio_stats_test.cc index febcb066fd..c637bff94e 100644 --- a/audio/test/audio_stats_test.cc +++ b/audio/test/audio_stats_test.cc @@ -68,7 +68,7 @@ class NoLossTest : public AudioEndToEndTest { receive_stream()->GetStats(/*get_and_clear_legacy_stats=*/true); EXPECT_PRED2(IsNear, kBytesSent, recv_stats.payload_bytes_rcvd); EXPECT_PRED2(IsNear, kPacketsSent, recv_stats.packets_rcvd); - EXPECT_EQ(0u, recv_stats.packets_lost); + EXPECT_EQ(0, recv_stats.packets_lost); EXPECT_EQ("opus", send_stats.codec_name); // recv_stats.jitter_ms // recv_stats.jitter_buffer_ms diff --git a/audio/test/pc_low_bandwidth_audio_test.cc b/audio/test/pc_low_bandwidth_audio_test.cc index 4fab15b9dd..8b733d578d 100644 --- a/audio/test/pc_low_bandwidth_audio_test.cc +++ b/audio/test/pc_low_bandwidth_audio_test.cc @@ -20,6 +20,9 @@ #include "api/test/metrics/metrics_exporter.h" #include "api/test/metrics/stdout_metrics_exporter.h" #include "api/test/network_emulation_manager.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/media_quality_test_params.h" +#include "api/test/pclf/peer_configurer.h" #include "api/test/peerconnection_quality_test_fixture.h" #include "api/test/simulated_network.h" #include "api/test/time_controller.h" @@ -35,11 +38,9 @@ ABSL_DECLARE_FLAG(bool, quick); namespace webrtc { namespace test { -using PeerConfigurer = - webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::PeerConfigurer; -using RunParams = webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::RunParams; -using AudioConfig = - webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::AudioConfig; +using ::webrtc::webrtc_pc_e2e::AudioConfig; +using ::webrtc::webrtc_pc_e2e::PeerConfigurer; +using ::webrtc::webrtc_pc_e2e::RunParams; namespace { @@ -67,10 +68,14 @@ CreateTestFixture(absl::string_view test_case_name, std::string(test_case_name), time_controller, /*audio_quality_analyzer=*/nullptr, /*video_quality_analyzer=*/nullptr); - fixture->AddPeer(network_links.first->network_dependencies(), - alice_configurer); - fixture->AddPeer(network_links.second->network_dependencies(), - bob_configurer); + auto alice = std::make_unique( + network_links.first->network_dependencies()); + auto bob = std::make_unique( + network_links.second->network_dependencies()); + alice_configurer(alice.get()); + bob_configurer(bob.get()); + fixture->AddPeer(std::move(alice)); + fixture->AddPeer(std::move(bob)); fixture->AddQualityMetricsReporter( std::make_unique( network_links.first, network_links.second, diff --git a/build_overrides/partition_alloc.gni b/build_overrides/partition_alloc.gni index 4173928515..044036879a 100644 --- a/build_overrides/partition_alloc.gni +++ b/build_overrides/partition_alloc.gni @@ -8,10 +8,5 @@ # Use default values for PartitionAlloc as standalone library from # base/allocator/partition_allocator/build_overrides/partition_alloc.gni -use_partition_alloc_as_malloc_default = false -use_allocator_shim_default = false -enable_backup_ref_ptr_support_default = false -enable_mte_checked_ptr_support_default = false -put_ref_count_in_previous_slot_default = false -enable_backup_ref_ptr_slow_checks_default = false -enable_dangling_raw_ptr_checks_default = false +import( + "//base/allocator/partition_allocator/build_overrides/partition_alloc.gni") diff --git a/call/BUILD.gn b/call/BUILD.gn index fda5f706bf..97bfdd3c4b 100644 --- a/call/BUILD.gn +++ b/call/BUILD.gn @@ -48,6 +48,7 @@ rtc_library("call_interfaces") { "../api:rtc_error", "../api:rtp_headers", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:scoped_refptr", "../api:transport_api", "../api/adaptation:resource_adaptation_api", @@ -76,6 +77,7 @@ rtc_library("call_interfaces") { "../rtc_base/network:sent_packet", ] absl_deps = [ + "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/functional:bind_front", "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", @@ -373,6 +375,7 @@ rtc_library("video_stream_api") { "../api:frame_transformer_interface", "../api:rtp_headers", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:scoped_refptr", "../api:transport_api", "../api/adaptation:resource_adaptation_api", @@ -382,6 +385,7 @@ rtc_library("video_stream_api") { "../api/video:video_frame", "../api/video:video_rtp_headers", "../api/video:video_stream_encoder", + "../api/video_codecs:scalability_mode", "../api/video_codecs:video_codecs_api", "../common_video", "../common_video:frame_counts", @@ -390,7 +394,10 @@ rtc_library("video_stream_api") { "../rtc_base:stringutils", "../video/config:encoder_config", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + absl_deps = [ + "//third_party/abseil-cpp/absl/functional:any_invocable", + "//third_party/abseil-cpp/absl/types:optional", + ] } rtc_library("simulated_network") { @@ -652,11 +659,16 @@ if (rtc_include_tests) { rtc_library("fake_network_pipe_unittests") { testonly = true - sources = [ "fake_network_pipe_unittest.cc" ] + sources = [ + "fake_network_pipe_unittest.cc", + "simulated_network_unittest.cc", + ] deps = [ ":fake_network", ":simulated_network", + "../api:simulated_network_api", "../api/units:data_rate", + "../api/units:time_delta", "../system_wrappers", "../test:test_support", "//testing/gtest", diff --git a/call/adaptation/test/fake_frame_rate_provider.h b/call/adaptation/test/fake_frame_rate_provider.h index 61cbd19191..b8815f592a 100644 --- a/call/adaptation/test/fake_frame_rate_provider.h +++ b/call/adaptation/test/fake_frame_rate_provider.h @@ -29,7 +29,7 @@ class MockVideoStreamEncoderObserver : public VideoStreamEncoderObserver { (override)); MOCK_METHOD(void, OnEncoderImplementationChanged, - (const std::string&), + (EncoderImplementation), (override)); MOCK_METHOD(void, OnFrameDropped, (DropReason), (override)); MOCK_METHOD(void, diff --git a/call/audio_receive_stream.h b/call/audio_receive_stream.h index 9569d7d0ba..1243a4a672 100644 --- a/call/audio_receive_stream.h +++ b/call/audio_receive_stream.h @@ -39,7 +39,7 @@ class AudioReceiveStreamInterface : public MediaReceiveStreamInterface { uint32_t packets_rcvd = 0; uint64_t fec_packets_received = 0; uint64_t fec_packets_discarded = 0; - uint32_t packets_lost = 0; + int32_t packets_lost = 0; uint64_t packets_discarded = 0; uint32_t nacks_sent = 0; std::string codec_name; diff --git a/call/audio_send_stream.h b/call/audio_send_stream.h index f665c7ac9d..5f1e1f3245 100644 --- a/call/audio_send_stream.h +++ b/call/audio_send_stream.h @@ -25,6 +25,7 @@ #include "api/crypto/frame_encryptor_interface.h" #include "api/frame_transformer_interface.h" #include "api/rtp_parameters.h" +#include "api/rtp_sender_interface.h" #include "api/scoped_refptr.h" #include "call/audio_sender.h" #include "call/rtp_config.h" @@ -46,6 +47,8 @@ class AudioSendStream : public AudioSender { // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-retransmittedbytessent uint64_t retransmitted_bytes_sent = 0; int32_t packets_sent = 0; + // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalpacketsenddelay + TimeDelta total_packet_send_delay = TimeDelta::Zero(); // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-retransmittedpacketssent uint64_t retransmitted_packets_sent = 0; int32_t packets_lost = -1; @@ -171,7 +174,8 @@ class AudioSendStream : public AudioSender { virtual const webrtc::AudioSendStream::Config& GetConfig() const = 0; // Reconfigure the stream according to the Configuration. - virtual void Reconfigure(const Config& config) = 0; + virtual void Reconfigure(const Config& config, + SetParametersCallback callback) = 0; // Starts stream activity. // When a stream is active, it can receive, process and deliver packets. diff --git a/call/call.cc b/call/call.cc index ae796cf6a0..fc9777c2f1 100644 --- a/call/call.cc +++ b/call/call.cc @@ -1037,6 +1037,8 @@ webrtc::VideoReceiveStreamInterface* Call::CreateVideoReceiveStream( // and `video_receiver_controller_` out of VideoReceiveStream2 construction // and set it up asynchronously on the network thread (the registration and // `video_receiver_controller_` need to live on the network thread). + // TODO(crbug.com/1381982): Re-enable decode synchronizer once the Chromium + // API has adapted to the new Metronome interface. VideoReceiveStream2* receive_stream = new VideoReceiveStream2( task_queue_factory_, this, num_cpu_cores_, transport_send_->packet_router(), std::move(configuration), diff --git a/call/call_config.cc b/call/call_config.cc index 23b60ce436..93f6b1aec4 100644 --- a/call/call_config.cc +++ b/call/call_config.cc @@ -31,6 +31,7 @@ RtpTransportConfig CallConfig::ExtractTransportConfig() const { network_state_predictor_factory; transportConfig.task_queue_factory = task_queue_factory; transportConfig.trials = trials; + transportConfig.pacer_burst_interval = pacer_burst_interval; return transportConfig; } diff --git a/call/call_config.h b/call/call_config.h index 3072fa452f..6df4ab7ed4 100644 --- a/call/call_config.h +++ b/call/call_config.h @@ -78,6 +78,9 @@ struct CallConfig { rtp_transport_controller_send_factory = nullptr; Metronome* metronome = nullptr; + + // The burst interval of the pacer, see TaskQueuePacedSender constructor. + absl::optional pacer_burst_interval; }; } // namespace webrtc diff --git a/call/call_perf_tests.cc b/call/call_perf_tests.cc index 9379dce833..d59b70418f 100644 --- a/call/call_perf_tests.cc +++ b/call/call_perf_tests.cc @@ -267,8 +267,11 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, AudioSendStream::Config audio_send_config(audio_send_transport.get()); audio_send_config.rtp.ssrc = kAudioSendSsrc; + // TODO(bugs.webrtc.org/14683): Let the tests fail with invalid config. audio_send_config.send_codec_spec = AudioSendStream::Config::SendCodecSpec( - kAudioSendPayloadType, {"ISAC", 16000, 1}); + kAudioSendPayloadType, {"OPUS", 48000, 2}); + audio_send_config.min_bitrate_bps = 6000; + audio_send_config.max_bitrate_bps = 510000; audio_send_config.encoder_factory = CreateBuiltinAudioEncoderFactory(); audio_send_stream = sender_call_->CreateAudioSendStream(audio_send_config); @@ -290,7 +293,7 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, audio_recv_config.sync_group = kSyncGroup; audio_recv_config.decoder_factory = audio_decoder_factory_; audio_recv_config.decoder_map = { - {kAudioSendPayloadType, {"ISAC", 16000, 1}}}; + {kAudioSendPayloadType, {"OPUS", 48000, 2}}}; if (create_first == CreateOrder::kAudioFirst) { audio_receive_stream = @@ -775,13 +778,9 @@ TEST_F(CallPerfTest, Bitrate_Kbps_NoPadWithoutMinTransmitBitrate) { #endif TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { static const uint32_t kInitialBitrateKbps = 400; + static const uint32_t kInitialBitrateOverheadKpbs = 6; static const uint32_t kReconfigureThresholdKbps = 600; - // We get lower bitrate than expected by this test if the following field - // trial is enabled. - test::ScopedKeyValueConfig field_trials( - field_trials_, "WebRTC-SendSideBwe-WithOverhead/Disabled/"); - class VideoStreamFactory : public VideoEncoderConfig::VideoStreamFactoryInterface { public: @@ -821,9 +820,10 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { // First time initialization. Frame size is known. // `expected_bitrate` is affected by bandwidth estimation before the // first frame arrives to the encoder. - uint32_t expected_bitrate = last_set_bitrate_kbps_ > 0 - ? last_set_bitrate_kbps_ - : kInitialBitrateKbps; + uint32_t expected_bitrate = + last_set_bitrate_kbps_ > 0 + ? last_set_bitrate_kbps_ + : kInitialBitrateKbps - kInitialBitrateOverheadKpbs; EXPECT_EQ(expected_bitrate, config->startBitrate) << "Encoder not initialized at expected bitrate."; EXPECT_EQ(kDefaultWidth, config->width); diff --git a/call/degraded_call.cc b/call/degraded_call.cc index 0090d3a081..c59a63ba69 100644 --- a/call/degraded_call.cc +++ b/call/degraded_call.cc @@ -414,6 +414,11 @@ PacketReceiver::DeliveryStatus DegradedCall::DeliverPacket( return status; } +void DegradedCall::SetClientBitratePreferences( + const webrtc::BitrateSettings& preferences) { + call_->SetClientBitratePreferences(preferences); +} + void DegradedCall::UpdateSendNetworkConfig() { send_config_index_ = (send_config_index_ + 1) % send_configs_.size(); send_simulated_network_->SetConfig(send_configs_[send_config_index_]); diff --git a/call/degraded_call.h b/call/degraded_call.h index dcdd4806e1..5906e557f1 100644 --- a/call/degraded_call.h +++ b/call/degraded_call.h @@ -191,7 +191,7 @@ class DegradedCall : public Call, private PacketReceiver { }; void SetClientBitratePreferences( - const webrtc::BitrateSettings& preferences) override {} + const webrtc::BitrateSettings& preferences) override; void UpdateSendNetworkConfig(); void UpdateReceiveNetworkConfig(); diff --git a/call/fake_network_pipe_unittest.cc b/call/fake_network_pipe_unittest.cc index b9c69c9b74..60c26e335b 100644 --- a/call/fake_network_pipe_unittest.cc +++ b/call/fake_network_pipe_unittest.cc @@ -274,7 +274,7 @@ TEST_F(FakeNetworkPipeTest, ChangingCapacityWithPacketsInPipeTest) { std::unique_ptr pipe( new FakeNetworkPipe(&fake_clock_, std::move(network), &receiver)); - // Add 20 packets of 1000 bytes, = 80 kb. + // Add 20 packets of 1000 bytes, = 160 kb. const int kNumPackets = 20; const int kPacketSize = 1000; SendPackets(pipe.get(), kNumPackets, kPacketSize); diff --git a/call/rtp_transport_config.h b/call/rtp_transport_config.h index f2030b3672..6c94f7d911 100644 --- a/call/rtp_transport_config.h +++ b/call/rtp_transport_config.h @@ -44,6 +44,9 @@ struct RtpTransportConfig { // Key-value mapping of internal configurations to apply, // e.g. field trials. const FieldTrialsView* trials = nullptr; + + // The burst interval of the pacer, see TaskQueuePacedSender constructor. + absl::optional pacer_burst_interval; }; } // namespace webrtc diff --git a/call/rtp_transport_controller_send.cc b/call/rtp_transport_controller_send.cc index 3ecec98b80..940dff7894 100644 --- a/call/rtp_transport_controller_send.cc +++ b/call/rtp_transport_controller_send.cc @@ -65,10 +65,6 @@ bool IsEnabled(const FieldTrialsView& trials, absl::string_view key) { return absl::StartsWith(trials.Lookup(key), "Enabled"); } -bool IsDisabled(const FieldTrialsView& trials, absl::string_view key) { - return absl::StartsWith(trials.Lookup(key), "Disabled"); -} - bool IsRelayed(const rtc::NetworkRoute& route) { return route.local.uses_turn() || route.remote.uses_turn(); } @@ -84,53 +80,53 @@ RtpTransportControllerSend::PacerSettings::PacerSettings( RtpTransportControllerSend::RtpTransportControllerSend( Clock* clock, - webrtc::RtcEventLog* event_log, - NetworkStatePredictorFactoryInterface* predictor_factory, - NetworkControllerFactoryInterface* controller_factory, - const BitrateConstraints& bitrate_config, - TaskQueueFactory* task_queue_factory, - const FieldTrialsView& trials) + const RtpTransportConfig& config) : clock_(clock), - event_log_(event_log), - task_queue_factory_(task_queue_factory), - bitrate_configurator_(bitrate_config), + event_log_(config.event_log), + task_queue_factory_(config.task_queue_factory), + bitrate_configurator_(config.bitrate_config), pacer_started_(false), - pacer_settings_(trials), + pacer_settings_(*config.trials), pacer_(clock, &packet_router_, - trials, - task_queue_factory, + *config.trials, + config.task_queue_factory, pacer_settings_.holdback_window.Get(), - pacer_settings_.holdback_packets.Get()), + pacer_settings_.holdback_packets.Get(), + config.pacer_burst_interval), observer_(nullptr), - controller_factory_override_(controller_factory), + controller_factory_override_(config.network_controller_factory), controller_factory_fallback_( - std::make_unique(predictor_factory)), + std::make_unique( + config.network_state_predictor_factory)), process_interval_(controller_factory_fallback_->GetProcessInterval()), last_report_block_time_(Timestamp::Millis(clock_->TimeInMilliseconds())), reset_feedback_on_route_change_( - !IsEnabled(trials, "WebRTC-Bwe-NoFeedbackReset")), - send_side_bwe_with_overhead_( - !IsDisabled(trials, "WebRTC-SendSideBwe-WithOverhead")), + !IsEnabled(*config.trials, "WebRTC-Bwe-NoFeedbackReset")), add_pacing_to_cwin_( - IsEnabled(trials, "WebRTC-AddPacingToCongestionWindowPushback")), + IsEnabled(*config.trials, + "WebRTC-AddPacingToCongestionWindowPushback")), relay_bandwidth_cap_("relay_cap", DataRate::PlusInfinity()), transport_overhead_bytes_per_packet_(0), network_available_(false), congestion_window_size_(DataSize::PlusInfinity()), is_congested_(false), retransmission_rate_limiter_(clock, kRetransmitWindowSizeMs), - task_queue_(trials, "rtp_send_controller", task_queue_factory), - field_trials_(trials) { + task_queue_(*config.trials, + "rtp_send_controller", + config.task_queue_factory), + field_trials_(*config.trials) { ParseFieldTrial({&relay_bandwidth_cap_}, - trials.Lookup("WebRTC-Bwe-NetworkRouteConstraints")); - initial_config_.constraints = ConvertConstraints(bitrate_config, clock_); - initial_config_.event_log = event_log; - initial_config_.key_value_config = &trials; - RTC_DCHECK(bitrate_config.start_bitrate_bps > 0); + config.trials->Lookup("WebRTC-Bwe-NetworkRouteConstraints")); + initial_config_.constraints = + ConvertConstraints(config.bitrate_config, clock_); + initial_config_.event_log = config.event_log; + initial_config_.key_value_config = config.trials; + RTC_DCHECK(config.bitrate_config.start_bitrate_bps > 0); - pacer_.SetPacingRates(DataRate::BitsPerSec(bitrate_config.start_bitrate_bps), - DataRate::Zero()); + pacer_.SetPacingRates( + DataRate::BitsPerSec(config.bitrate_config.start_bitrate_bps), + DataRate::Zero()); } RtpTransportControllerSend::~RtpTransportControllerSend() { @@ -552,9 +548,7 @@ void RtpTransportControllerSend::OnAddPacket( RTC_DCHECK_RUN_ON(&task_queue_); feedback_demuxer_.AddPacket(packet_info); transport_feedback_adapter_.AddPacket( - packet_info, - send_side_bwe_with_overhead_ ? transport_overhead_bytes_per_packet_ : 0, - creation_time); + packet_info, transport_overhead_bytes_per_packet_, creation_time); }); } diff --git a/call/rtp_transport_controller_send.h b/call/rtp_transport_controller_send.h index 88f5b2bae4..51bda73445 100644 --- a/call/rtp_transport_controller_send.h +++ b/call/rtp_transport_controller_send.h @@ -25,6 +25,7 @@ #include "api/transport/network_control.h" #include "api/units/data_rate.h" #include "call/rtp_bitrate_configurator.h" +#include "call/rtp_transport_config.h" #include "call/rtp_transport_controller_send_interface.h" #include "call/rtp_video_sender.h" #include "modules/congestion_controller/rtp/control_handler.h" @@ -50,14 +51,7 @@ class RtpTransportControllerSend final public TransportFeedbackObserver, public NetworkStateEstimateObserver { public: - RtpTransportControllerSend( - Clock* clock, - RtcEventLog* event_log, - NetworkStatePredictorFactoryInterface* predictor_factory, - NetworkControllerFactoryInterface* controller_factory, - const BitrateConstraints& bitrate_config, - TaskQueueFactory* task_queue_factory, - const FieldTrialsView& trials); + RtpTransportControllerSend(Clock* clock, const RtpTransportConfig& config); ~RtpTransportControllerSend() override; RtpTransportControllerSend(const RtpTransportControllerSend&) = delete; @@ -195,7 +189,6 @@ class RtpTransportControllerSend final StreamsConfig streams_config_ RTC_GUARDED_BY(task_queue_); const bool reset_feedback_on_route_change_; - const bool send_side_bwe_with_overhead_; const bool add_pacing_to_cwin_; FieldTrialParameter relay_bandwidth_cap_; diff --git a/call/rtp_transport_controller_send_factory.h b/call/rtp_transport_controller_send_factory.h index 8cdae8cfbe..6349302e45 100644 --- a/call/rtp_transport_controller_send_factory.h +++ b/call/rtp_transport_controller_send_factory.h @@ -25,10 +25,7 @@ class RtpTransportControllerSendFactory const RtpTransportConfig& config, Clock* clock) override { RTC_CHECK(config.trials); - return std::make_unique( - clock, config.event_log, config.network_state_predictor_factory, - config.network_controller_factory, config.bitrate_config, - config.task_queue_factory, *config.trials); + return std::make_unique(clock, config); } virtual ~RtpTransportControllerSendFactory() {} diff --git a/call/rtp_video_sender.cc b/call/rtp_video_sender.cc index 352d0f0eda..185d897a2c 100644 --- a/call/rtp_video_sender.cc +++ b/call/rtp_video_sender.cc @@ -375,9 +375,6 @@ RtpVideoSender::RtpVideoSender( const FieldTrialsView& field_trials, TaskQueueFactory* task_queue_factory) : field_trials_(field_trials), - send_side_bwe_with_overhead_(!absl::StartsWith( - field_trials_.Lookup("WebRTC-SendSideBwe-WithOverhead"), - "Disabled")), use_frame_rate_for_overhead_(absl::StartsWith( field_trials_.Lookup("WebRTC-Video-UseFrameRateForOverhead"), "Enabled")), @@ -409,7 +406,7 @@ RtpVideoSender::RtpVideoSender( frame_count_observer_(observers.frame_count_observer) { transport_checker_.Detach(); RTC_DCHECK_EQ(rtp_config_.ssrcs.size(), rtp_streams_.size()); - if (send_side_bwe_with_overhead_ && has_packet_feedback_) + if (has_packet_feedback_) transport_->IncludeOverheadInPacedSender(); // SSRCs are assumed to be sorted in the same order as `rtp_modules`. for (uint32_t ssrc : rtp_config_.ssrcs) { @@ -480,33 +477,24 @@ RtpVideoSender::~RtpVideoSender() { RTC_DCHECK(!registered_for_feedback_); } -void RtpVideoSender::SetActive(bool active) { +void RtpVideoSender::Stop() { RTC_DCHECK_RUN_ON(&transport_checker_); MutexLock lock(&mutex_); - if (active_ == active) + if (!active_) return; - const std::vector active_modules(rtp_streams_.size(), active); + const std::vector active_modules(rtp_streams_.size(), false); SetActiveModulesLocked(active_modules); - - auto* feedback_provider = transport_->GetStreamFeedbackProvider(); - if (active && !registered_for_feedback_) { - feedback_provider->RegisterStreamFeedbackObserver(rtp_config_.ssrcs, this); - registered_for_feedback_ = true; - } else if (!active && registered_for_feedback_) { - feedback_provider->DeRegisterStreamFeedbackObserver(this); - registered_for_feedback_ = false; - } } -void RtpVideoSender::SetActiveModules(const std::vector active_modules) { +void RtpVideoSender::SetActiveModules(const std::vector& active_modules) { RTC_DCHECK_RUN_ON(&transport_checker_); MutexLock lock(&mutex_); return SetActiveModulesLocked(active_modules); } void RtpVideoSender::SetActiveModulesLocked( - const std::vector active_modules) { + const std::vector& active_modules) { RTC_DCHECK_RUN_ON(&transport_checker_); RTC_DCHECK_EQ(rtp_streams_.size(), active_modules.size()); active_ = false; @@ -527,6 +515,17 @@ void RtpVideoSender::SetActiveModulesLocked( // prevent any stray packets in the pacer from asynchronously arriving // to a disabled module. transport_->packet_router()->RemoveSendRtpModule(&rtp_module); + + // Clear the pacer queue of any packets pertaining to this module. + transport_->packet_sender()->RemovePacketsForSsrc(rtp_module.SSRC()); + if (rtp_module.RtxSsrc().has_value()) { + transport_->packet_sender()->RemovePacketsForSsrc( + *rtp_module.RtxSsrc()); + } + if (rtp_module.FlexfecSsrc().has_value()) { + transport_->packet_sender()->RemovePacketsForSsrc( + *rtp_module.FlexfecSsrc()); + } } // If set to false this module won't send media. @@ -538,6 +537,17 @@ void RtpVideoSender::SetActiveModulesLocked( /*remb_candidate=*/true); } } + if (!active_) { + auto* feedback_provider = transport_->GetStreamFeedbackProvider(); + if (registered_for_feedback_) { + feedback_provider->DeRegisterStreamFeedbackObserver(this); + registered_for_feedback_ = false; + } + } else if (!registered_for_feedback_) { + auto* feedback_provider = transport_->GetStreamFeedbackProvider(); + feedback_provider->RegisterStreamFeedbackObserver(rtp_config_.ssrcs, this); + registered_for_feedback_ = true; + } } bool RtpVideoSender::IsActive() { @@ -835,7 +845,7 @@ void RtpVideoSender::OnBitrateUpdated(BitrateAllocationUpdate update, DataSize max_total_packet_size = DataSize::Bytes( rtp_config_.max_packet_size + transport_overhead_bytes_per_packet_); uint32_t payload_bitrate_bps = update.target_bitrate.bps(); - if (send_side_bwe_with_overhead_ && has_packet_feedback_) { + if (has_packet_feedback_) { DataRate overhead_rate = CalculateOverheadRate(update.target_bitrate, max_total_packet_size, packet_overhead, Frequency::Hertz(framerate)); @@ -869,7 +879,7 @@ void RtpVideoSender::OnBitrateUpdated(BitrateAllocationUpdate update, loss_mask_vector_.clear(); uint32_t encoder_overhead_rate_bps = 0; - if (send_side_bwe_with_overhead_ && has_packet_feedback_) { + if (has_packet_feedback_) { // TODO(srte): The packet size should probably be the same as in the // CalculateOverheadRate call above (just max_total_packet_size), it doesn't // make sense to use different packet rates for different overhead @@ -882,12 +892,11 @@ void RtpVideoSender::OnBitrateUpdated(BitrateAllocationUpdate update, encoder_overhead_rate.bps(), update.target_bitrate.bps() - encoder_target_rate_bps_); } - // When the field trial "WebRTC-SendSideBwe-WithOverhead" is enabled - // protection_bitrate includes overhead. const uint32_t media_rate = encoder_target_rate_bps_ + encoder_overhead_rate_bps + packetization_rate_bps; RTC_DCHECK_GE(update.target_bitrate, DataRate::BitsPerSec(media_rate)); + // `protection_bitrate_bps_` includes overhead. protection_bitrate_bps_ = update.target_bitrate.bps() - media_rate; } diff --git a/call/rtp_video_sender.h b/call/rtp_video_sender.h index 9804bd8630..9666b89916 100644 --- a/call/rtp_video_sender.h +++ b/call/rtp_video_sender.h @@ -95,13 +95,11 @@ class RtpVideoSender : public RtpVideoSenderInterface, RtpVideoSender(const RtpVideoSender&) = delete; RtpVideoSender& operator=(const RtpVideoSender&) = delete; - // RtpVideoSender will only route packets if being active, all packets will be - // dropped otherwise. - void SetActive(bool active) RTC_LOCKS_EXCLUDED(mutex_) override; // Sets the sending status of the rtp modules and appropriately sets the // payload router to active if any rtp modules are active. - void SetActiveModules(std::vector active_modules) + void SetActiveModules(const std::vector& active_modules) RTC_LOCKS_EXCLUDED(mutex_) override; + void Stop() RTC_LOCKS_EXCLUDED(mutex_) override; bool IsActive() RTC_LOCKS_EXCLUDED(mutex_) override; void OnNetworkAvailability(bool network_available) @@ -157,7 +155,7 @@ class RtpVideoSender : public RtpVideoSenderInterface, private: bool IsActiveLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - void SetActiveModulesLocked(std::vector active_modules) + void SetActiveModulesLocked(const std::vector& active_modules) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void UpdateModuleSendingState() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void ConfigureProtection(); @@ -170,7 +168,6 @@ class RtpVideoSender : public RtpVideoSenderInterface, Frequency framerate) const; const FieldTrialsView& field_trials_; - const bool send_side_bwe_with_overhead_; const bool use_frame_rate_for_overhead_; const bool has_packet_feedback_; diff --git a/call/rtp_video_sender_interface.h b/call/rtp_video_sender_interface.h index acb68e3ae2..3f2877155a 100644 --- a/call/rtp_video_sender_interface.h +++ b/call/rtp_video_sender_interface.h @@ -31,12 +31,12 @@ struct FecProtectionParams; class RtpVideoSenderInterface : public EncodedImageCallback, public FecControllerOverride { public: - // RtpVideoSender will only route packets if being active, all - // packets will be dropped otherwise. - virtual void SetActive(bool active) = 0; // Sets the sending status of the rtp modules and appropriately sets the // RtpVideoSender to active if any rtp modules are active. - virtual void SetActiveModules(std::vector active_modules) = 0; + // A module will only send packet if beeing active. + virtual void SetActiveModules(const std::vector& active_modules) = 0; + // Set the sending status of all rtp modules to inactive. + virtual void Stop() = 0; virtual bool IsActive() = 0; virtual void OnNetworkAvailability(bool network_available) = 0; diff --git a/call/rtp_video_sender_unittest.cc b/call/rtp_video_sender_unittest.cc index 196e29b801..da2bed649b 100644 --- a/call/rtp_video_sender_unittest.cc +++ b/call/rtp_video_sender_unittest.cc @@ -129,13 +129,14 @@ class RtpVideoSenderTestFixture { payload_type)), send_delay_stats_(time_controller_.GetClock()), bitrate_config_(GetBitrateConfig()), - transport_controller_(time_controller_.GetClock(), - &event_log_, - nullptr, - nullptr, - bitrate_config_, - time_controller_.GetTaskQueueFactory(), - field_trials ? *field_trials : field_trials_), + transport_controller_( + time_controller_.GetClock(), + RtpTransportConfig{ + .bitrate_config = bitrate_config_, + .event_log = &event_log_, + .task_queue_factory = time_controller_.GetTaskQueueFactory(), + .trials = field_trials ? field_trials : &field_trials_, + }), stats_proxy_(time_controller_.GetClock(), config_, VideoEncoderConfig::ContentType::kRealtimeVideo, @@ -186,14 +187,14 @@ class RtpVideoSenderTestFixture { /*frame_transformer=*/nullptr, field_trials) {} - ~RtpVideoSenderTestFixture() { SetActive(false); } + ~RtpVideoSenderTestFixture() { Stop(); } RtpVideoSender* router() { return router_.get(); } MockTransport& transport() { return transport_; } void AdvanceTime(TimeDelta delta) { time_controller_.AdvanceTime(delta); } - void SetActive(bool active) { - RunOnTransportQueue([&]() { router_->SetActive(active); }); + void Stop() { + RunOnTransportQueue([&]() { router_->Stop(); }); } void SetActiveModules(const std::vector& active_modules) { @@ -248,15 +249,15 @@ TEST(RtpVideoSenderTest, SendOnOneModule) { EXPECT_NE(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image, nullptr).error); - test.SetActive(true); + test.SetActiveModules({true}); EXPECT_EQ(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image, nullptr).error); - test.SetActive(false); + test.SetActiveModules({false}); EXPECT_NE(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image, nullptr).error); - test.SetActive(true); + test.SetActiveModules({true}); EXPECT_EQ(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image, nullptr).error); } @@ -275,7 +276,7 @@ TEST(RtpVideoSenderTest, SendSimulcastSetActive) { CodecSpecificInfo codec_info; codec_info.codecType = kVideoCodecVP8; - test.SetActive(true); + test.SetActiveModules({true, true}); EXPECT_EQ(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image_1, &codec_info).error); @@ -285,7 +286,7 @@ TEST(RtpVideoSenderTest, SendSimulcastSetActive) { test.router()->OnEncodedImage(encoded_image_2, &codec_info).error); // Inactive. - test.SetActive(false); + test.Stop(); EXPECT_NE(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image_1, &codec_info).error); EXPECT_NE(EncodedImageCallback::Result::OK, @@ -369,7 +370,7 @@ TEST( TEST(RtpVideoSenderTest, CreateWithNoPreviousStates) { RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {kRtxSsrc1, kRtxSsrc2}, kPayloadType, {}); - test.SetActive(true); + test.SetActiveModules({true, true}); std::map initial_states = test.router()->GetRtpPayloadStates(); @@ -394,7 +395,7 @@ TEST(RtpVideoSenderTest, CreateWithPreviousStates) { RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {kRtxSsrc1, kRtxSsrc2}, kPayloadType, states); - test.SetActive(true); + test.SetActiveModules({true, true}); std::map initial_states = test.router()->GetRtpPayloadStates(); @@ -434,7 +435,7 @@ TEST(RtpVideoSenderTest, FrameCountCallbacks) { test.router()->OnEncodedImage(encoded_image, nullptr).error); ::testing::Mock::VerifyAndClearExpectations(&callback); - test.SetActive(true); + test.SetActiveModules({true}); FrameCounts frame_counts; EXPECT_CALL(callback, FrameCountUpdated(_, kSsrc1)) @@ -463,7 +464,7 @@ TEST(RtpVideoSenderTest, FrameCountCallbacks) { TEST(RtpVideoSenderTest, DoesNotRetrasmitAckedPackets) { RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {kRtxSsrc1, kRtxSsrc2}, kPayloadType, {}); - test.SetActive(true); + test.SetActiveModules({true, true}); constexpr uint8_t kPayload = 'a'; EncodedImage encoded_image; @@ -628,7 +629,7 @@ TEST(RtpVideoSenderTest, RetransmitsOnTransportWideLossInfo) { TEST(RtpVideoSenderTest, EarlyRetransmits) { RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {kRtxSsrc1, kRtxSsrc2}, kPayloadType, {}); - test.SetActive(true); + test.SetActiveModules({true, true}); const uint8_t kPayload[1] = {'a'}; EncodedImage encoded_image; @@ -723,7 +724,7 @@ TEST(RtpVideoSenderTest, EarlyRetransmits) { TEST(RtpVideoSenderTest, SupportsDependencyDescriptor) { RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}); - test.SetActive(true); + test.SetActiveModules({true}); RtpHeaderExtensionMap extensions; extensions.Register( @@ -796,7 +797,7 @@ TEST(RtpVideoSenderTest, sent_packets.emplace_back(&extensions).Parse(packet, length)); return true; }); - test.SetActive(true); + test.SetActiveModules({true}); EncodedImage key_frame_image; key_frame_image._frameType = VideoFrameType::kVideoFrameKey; @@ -830,7 +831,7 @@ TEST(RtpVideoSenderTest, TEST(RtpVideoSenderTest, SupportsDependencyDescriptorForVp9) { RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}); - test.SetActive(true); + test.SetActiveModules({true}); RtpHeaderExtensionMap extensions; extensions.Register( @@ -886,7 +887,7 @@ TEST(RtpVideoSenderTest, SupportsDependencyDescriptorForVp9) { TEST(RtpVideoSenderTest, SupportsDependencyDescriptorForVp9NotProvidedByEncoder) { RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}); - test.SetActive(true); + test.SetActiveModules({true}); RtpHeaderExtensionMap extensions; extensions.Register( @@ -941,7 +942,7 @@ TEST(RtpVideoSenderTest, GenerateDependecyDescriptorForGenericCodecs) { test::ScopedKeyValueConfig field_trials( "WebRTC-GenericCodecDependencyDescriptor/Enabled/"); RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}, &field_trials); - test.SetActive(true); + test.SetActiveModules({true}); RtpHeaderExtensionMap extensions; extensions.Register( @@ -987,7 +988,7 @@ TEST(RtpVideoSenderTest, GenerateDependecyDescriptorForGenericCodecs) { TEST(RtpVideoSenderTest, SupportsStoppingUsingDependencyDescriptor) { RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}); - test.SetActive(true); + test.SetActiveModules({true}); RtpHeaderExtensionMap extensions; extensions.Register( @@ -1072,7 +1073,7 @@ TEST(RtpVideoSenderTest, OverheadIsSubtractedFromTargetBitrate) { kRtpHeaderSizeBytes + kTransportPacketOverheadBytes; RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}, &field_trials); test.router()->OnTransportOverheadChanged(kTransportPacketOverheadBytes); - test.SetActive(true); + test.SetActiveModules({true}); { test.router()->OnBitrateUpdated(CreateBitrateAllocationUpdate(300000), @@ -1097,4 +1098,81 @@ TEST(RtpVideoSenderTest, OverheadIsSubtractedFromTargetBitrate) { } } +TEST(RtpVideoSenderTest, ClearsPendingPacketsOnInactivation) { + RtpVideoSenderTestFixture test({kSsrc1}, {kRtxSsrc1}, kPayloadType, {}); + test.SetActiveModules({true}); + + RtpHeaderExtensionMap extensions; + extensions.Register( + kDependencyDescriptorExtensionId); + std::vector sent_packets; + ON_CALL(test.transport(), SendRtp) + .WillByDefault([&](const uint8_t* packet, size_t length, + const PacketOptions& options) { + sent_packets.emplace_back(&extensions); + EXPECT_TRUE(sent_packets.back().Parse(packet, length)); + return true; + }); + + // Set a very low bitrate. + test.router()->OnBitrateUpdated( + CreateBitrateAllocationUpdate(/*rate_bps=*/30'000), + /*framerate=*/30); + + // Create and send a large keyframe. + const size_t kImageSizeBytes = 10000; + constexpr uint8_t kPayload[kImageSizeBytes] = {'a'}; + EncodedImage encoded_image; + encoded_image.SetTimestamp(1); + encoded_image.capture_time_ms_ = 2; + encoded_image._frameType = VideoFrameType::kVideoFrameKey; + encoded_image.SetEncodedData( + EncodedImageBuffer::Create(kPayload, sizeof(kPayload))); + EXPECT_EQ(test.router() + ->OnEncodedImage(encoded_image, /*codec_specific=*/nullptr) + .error, + EncodedImageCallback::Result::OK); + + // Advance time a small amount, check that sent data is only part of the + // image. + test.AdvanceTime(TimeDelta::Millis(5)); + DataSize transmittedPayload = DataSize::Zero(); + for (const RtpPacket& packet : sent_packets) { + transmittedPayload += DataSize::Bytes(packet.payload_size()); + // Make sure we don't see the end of the frame. + EXPECT_FALSE(packet.Marker()); + } + EXPECT_GT(transmittedPayload, DataSize::Zero()); + EXPECT_LT(transmittedPayload, DataSize::Bytes(kImageSizeBytes / 4)); + + // Record the RTP timestamp of the first frame. + const uint32_t first_frame_timestamp = sent_packets[0].Timestamp(); + sent_packets.clear(); + + // Disable the sending module and advance time slightly. No packets should be + // sent. + test.SetActiveModules({false}); + test.AdvanceTime(TimeDelta::Millis(20)); + EXPECT_TRUE(sent_packets.empty()); + + // Reactive the send module - any packets should have been removed, so nothing + // should be transmitted. + test.SetActiveModules({true}); + test.AdvanceTime(TimeDelta::Millis(33)); + EXPECT_TRUE(sent_packets.empty()); + + // Send a new frame. + encoded_image.SetTimestamp(3); + encoded_image.capture_time_ms_ = 4; + EXPECT_EQ(test.router() + ->OnEncodedImage(encoded_image, /*codec_specific=*/nullptr) + .error, + EncodedImageCallback::Result::OK); + test.AdvanceTime(TimeDelta::Millis(33)); + + // Advance time, check we get new packets - but only for the second frame. + EXPECT_FALSE(sent_packets.empty()); + EXPECT_NE(sent_packets[0].Timestamp(), first_frame_timestamp); +} + } // namespace webrtc diff --git a/call/simulated_network.cc b/call/simulated_network.cc index f5d0501313..8f9d76dfe3 100644 --- a/call/simulated_network.cc +++ b/call/simulated_network.cc @@ -12,6 +12,7 @@ #include #include +#include #include #include "api/units/data_rate.h" @@ -21,11 +22,33 @@ namespace webrtc { namespace { -constexpr TimeDelta kDefaultProcessDelay = TimeDelta::Millis(5); + +// Calculate the time (in microseconds) that takes to send N `bits` on a +// network with link capacity equal to `capacity_kbps` starting at time +// `start_time_us`. +int64_t CalculateArrivalTimeUs(int64_t start_time_us, + int64_t bits, + int capacity_kbps) { + // If capacity is 0, the link capacity is assumed to be infinite. + if (capacity_kbps == 0) { + return start_time_us; + } + // Adding `capacity - 1` to the numerator rounds the extra delay caused by + // capacity constraints up to an integral microsecond. Sending 0 bits takes 0 + // extra time, while sending 1 bit gets rounded up to 1 (the multiplication by + // 1000 is because capacity is in kbps). + // The factor 1000 comes from 10^6 / 10^3, where 10^6 is due to the time unit + // being us and 10^3 is due to the rate unit being kbps. + return start_time_us + ((1000 * bits + capacity_kbps - 1) / capacity_kbps); +} + } // namespace SimulatedNetwork::SimulatedNetwork(Config config, uint64_t random_seed) - : random_(random_seed), bursting_(false) { + : random_(random_seed), + bursting_(false), + last_enqueue_time_us_(0), + last_capacity_link_exit_time_(0) { SetConfig(config); } @@ -69,26 +92,52 @@ void SimulatedNetwork::PauseTransmissionUntil(int64_t until_us) { bool SimulatedNetwork::EnqueuePacket(PacketInFlightInfo packet) { RTC_DCHECK_RUNS_SERIALIZED(&process_checker_); + + // Check that old packets don't get enqueued, the SimulatedNetwork expect that + // the packets' send time is monotonically increasing. The tolerance for + // non-monotonic enqueue events is 0.5 ms because on multi core systems + // clock_gettime(CLOCK_MONOTONIC) can show non-monotonic behaviour between + // theads running on different cores. + // TODO(bugs.webrtc.org/14525): Open a bug on this with the goal to re-enable + // the DCHECK. + // At the moment, we see more than 130ms between non-monotonic events, which + // is more than expected. + // RTC_DCHECK_GE(packet.send_time_us - last_enqueue_time_us_, -2000); + ConfigState state = GetConfigState(); - UpdateCapacityQueue(state, packet.send_time_us); - + // If the network config requires packet overhead, let's apply it as early as + // possible. packet.size += state.config.packet_overhead; + // If `queue_length_packets` is 0, the queue size is infinite. if (state.config.queue_length_packets > 0 && capacity_link_.size() >= state.config.queue_length_packets) { // Too many packet on the link, drop this one. return false; } - // Set arrival time = send time for now; actual arrival time will be - // calculated in UpdateCapacityQueue. - queue_size_bytes_ += packet.size; - capacity_link_.push({packet, packet.send_time_us}); + // If the packet has been sent before the previous packet in the network left + // the capacity queue, let's ensure the new packet will start its trip in the + // network after the last bit of the previous packet has left it. + int64_t packet_send_time_us = packet.send_time_us; + if (!capacity_link_.empty()) { + packet_send_time_us = + std::max(packet_send_time_us, capacity_link_.back().arrival_time_us); + } + capacity_link_.push({.packet = packet, + .arrival_time_us = CalculateArrivalTimeUs( + packet_send_time_us, packet.size * 8, + state.config.link_capacity_kbps)}); + + // Only update `next_process_time_us_` if not already set (if set, there is no + // way that a new packet will make the `next_process_time_us_` change). if (!next_process_time_us_) { - next_process_time_us_ = packet.send_time_us + kDefaultProcessDelay.us(); + RTC_DCHECK_EQ(capacity_link_.size(), 1); + next_process_time_us_ = capacity_link_.front().arrival_time_us; } + last_enqueue_time_us_ = packet.send_time_us; return true; } @@ -99,52 +148,40 @@ absl::optional SimulatedNetwork::NextDeliveryTimeUs() const { void SimulatedNetwork::UpdateCapacityQueue(ConfigState state, int64_t time_now_us) { - bool needs_sort = false; + // If there is at least one packet in the `capacity_link_`, let's update its + // arrival time to take into account changes in the network configuration + // since the last call to UpdateCapacityQueue. + if (!capacity_link_.empty()) { + capacity_link_.front().arrival_time_us = CalculateArrivalTimeUs( + std::max(capacity_link_.front().packet.send_time_us, + last_capacity_link_exit_time_), + capacity_link_.front().packet.size * 8, + state.config.link_capacity_kbps); + } - // Catch for thread races. - if (time_now_us < last_capacity_link_visit_us_.value_or(time_now_us)) + // The capacity link is empty or the first packet is not expected to exit yet. + if (capacity_link_.empty() || + time_now_us < capacity_link_.front().arrival_time_us) { return; + } + bool reorder_packets = false; - int64_t time_us = last_capacity_link_visit_us_.value_or(time_now_us); - // Check the capacity link first. - while (!capacity_link_.empty()) { - int64_t time_until_front_exits_us = 0; - if (state.config.link_capacity_kbps > 0) { - int64_t remaining_bits = - capacity_link_.front().packet.size * 8 - pending_drain_bits_; - RTC_DCHECK(remaining_bits > 0); - // Division rounded up - packet not delivered until its last bit is. - time_until_front_exits_us = - (1000 * remaining_bits + state.config.link_capacity_kbps - 1) / - state.config.link_capacity_kbps; - } - - if (time_us + time_until_front_exits_us > time_now_us) { - // Packet at front will not exit yet. Will not enter here on infinite - // capacity(=0) so no special handling needed. - pending_drain_bits_ += - ((time_now_us - time_us) * state.config.link_capacity_kbps) / 1000; - break; - } - if (state.config.link_capacity_kbps > 0) { - pending_drain_bits_ += - (time_until_front_exits_us * state.config.link_capacity_kbps) / 1000; - } else { - // Enough to drain the whole queue. - pending_drain_bits_ = queue_size_bytes_ * 8; - } - - // Time to get this packet. + do { + // Time to get this packet (the original or just updated arrival_time_us is + // smaller or equal to time_now_us). PacketInfo packet = capacity_link_.front(); capacity_link_.pop(); - time_us += time_until_front_exits_us; - RTC_DCHECK(time_us >= packet.packet.send_time_us); - packet.arrival_time_us = - std::max(state.pause_transmission_until_us, time_us); - queue_size_bytes_ -= packet.packet.size; - pending_drain_bits_ -= packet.packet.size * 8; - RTC_DCHECK(pending_drain_bits_ >= 0); + // If the network is paused, the pause will be implemented as an extra delay + // to be spent in the `delay_link_` queue. + if (state.pause_transmission_until_us > packet.arrival_time_us) { + packet.arrival_time_us = state.pause_transmission_until_us; + } + + // Store the original arrival time, before applying packet loss or extra + // delay. This is needed to know when it is the first available time the + // next packet in the `capacity_link_` queue can start transmitting. + last_capacity_link_exit_time_ = packet.arrival_time_us; // Drop packets at an average rate of `state.config.loss_percent` with // and average loss burst length of `state.config.avg_burst_loss_length`. @@ -153,6 +190,7 @@ void SimulatedNetwork::UpdateCapacityQueue(ConfigState state, bursting_ = true; packet.arrival_time_us = PacketDeliveryInfo::kNotReceived; } else { + // If packets are not dropped, apply extra delay as configured. bursting_ = false; int64_t arrival_time_jitter_us = std::max( random_.Gaussian(state.config.queue_delay_ms * 1000, @@ -169,24 +207,38 @@ void SimulatedNetwork::UpdateCapacityQueue(ConfigState state, arrival_time_jitter_us = last_arrival_time_us - packet.arrival_time_us; } packet.arrival_time_us += arrival_time_jitter_us; - if (packet.arrival_time_us >= last_arrival_time_us) { - last_arrival_time_us = packet.arrival_time_us; - } else { - needs_sort = true; + + // Optimization: Schedule a reorder only when a packet will exit before + // the one in front. + if (last_arrival_time_us > packet.arrival_time_us) { + reorder_packets = true; } } delay_link_.emplace_back(packet); - } - last_capacity_link_visit_us_ = time_now_us; - // Cannot save unused capacity for later. - pending_drain_bits_ = std::min(pending_drain_bits_, queue_size_bytes_ * 8); - if (needs_sort) { - // Packet(s) arrived out of order, make sure list is sorted. - std::sort(delay_link_.begin(), delay_link_.end(), - [](const PacketInfo& p1, const PacketInfo& p2) { - return p1.arrival_time_us < p2.arrival_time_us; - }); + // If there are no packets in the queue, there is nothing else to do. + if (capacity_link_.empty()) { + break; + } + // If instead there is another packet in the `capacity_link_` queue, let's + // calculate its arrival_time_us based on the latest config (which might + // have been changed since it was enqueued). + int64_t next_start = std::max(last_capacity_link_exit_time_, + capacity_link_.front().packet.send_time_us); + capacity_link_.front().arrival_time_us = CalculateArrivalTimeUs( + next_start, capacity_link_.front().packet.size * 8, + state.config.link_capacity_kbps); + // And if the next packet in the queue needs to exit, let's dequeue it. + } while (capacity_link_.front().arrival_time_us <= time_now_us); + + if (state.config.allow_reordering && reorder_packets) { + // Packets arrived out of order and since the network config allows + // reordering, let's sort them per arrival_time_us to make so they will also + // be delivered out of order. + std::stable_sort(delay_link_.begin(), delay_link_.end(), + [](const PacketInfo& p1, const PacketInfo& p2) { + return p1.arrival_time_us < p2.arrival_time_us; + }); } } @@ -198,8 +250,10 @@ SimulatedNetwork::ConfigState SimulatedNetwork::GetConfigState() const { std::vector SimulatedNetwork::DequeueDeliverablePackets( int64_t receive_time_us) { RTC_DCHECK_RUNS_SERIALIZED(&process_checker_); + UpdateCapacityQueue(GetConfigState(), receive_time_us); std::vector packets_to_deliver; + // Check the extra delay queue. while (!delay_link_.empty() && receive_time_us >= delay_link_.front().arrival_time_us) { @@ -212,7 +266,7 @@ std::vector SimulatedNetwork::DequeueDeliverablePackets( if (!delay_link_.empty()) { next_process_time_us_ = delay_link_.front().arrival_time_us; } else if (!capacity_link_.empty()) { - next_process_time_us_ = receive_time_us + kDefaultProcessDelay.us(); + next_process_time_us_ = capacity_link_.front().arrival_time_us; } else { next_process_time_us_.reset(); } diff --git a/call/simulated_network.h b/call/simulated_network.h index d3092aefba..8597367add 100644 --- a/call/simulated_network.h +++ b/call/simulated_network.h @@ -28,16 +28,27 @@ namespace webrtc { -// Class simulating a network link. This is a simple and naive solution just -// faking capacity and adding an extra transport delay in addition to the -// capacity introduced delay. +// Class simulating a network link. +// +// This is a basic implementation of NetworkBehaviorInterface that supports: +// - Packet loss +// - Capacity delay +// - Extra delay with or without packets reorder +// - Packet overhead +// - Queue max capacity class SimulatedNetwork : public SimulatedNetworkInterface { public: using Config = BuiltInNetworkBehaviorConfig; explicit SimulatedNetwork(Config config, uint64_t random_seed = 1); ~SimulatedNetwork() override; - // Sets a new configuration. This won't affect packets already in the pipe. + // Sets a new configuration. This will affect packets that will be sent with + // EnqueuePacket but also packets in the network that have not left the + // network emulation. Packets that are ready to be retrieved by + // DequeueDeliverablePackets are not affected by the new configuration. + // TODO(bugs.webrtc.org/14525): Fix SetConfig and make it apply only to the + // part of the packet that is currently being sent (instead of applying to + // all of it). void SetConfig(const Config& config) override; void UpdateConfig(std::function config_modifier) override; @@ -53,6 +64,7 @@ class SimulatedNetwork : public SimulatedNetworkInterface { private: struct PacketInfo { PacketInFlightInfo packet; + // Time when the packet has left (or will leave) the network. int64_t arrival_time_us; }; // Contains current configuration state. @@ -75,25 +87,46 @@ class SimulatedNetwork : public SimulatedNetworkInterface { mutable Mutex config_lock_; - // `process_checker_` guards the data structures involved in delay and loss - // processes, such as the packet queues. + // Guards the data structures involved in delay and loss processing, such as + // the packet queues. rtc::RaceChecker process_checker_; + // Models the capacity of the network by rejecting packets if the queue is + // full and keeping them in the queue until they are ready to exit (according + // to the link capacity, which cannot be violated, e.g. a 1 kbps link will + // only be able to deliver 1000 bits per second). + // + // Invariant: + // The head of the `capacity_link_` has arrival_time_us correctly set to the + // time when the packet is supposed to be delivered (without accounting + // potential packet loss or potential extra delay and without accounting for a + // new configuration of the network, which requires a re-computation of the + // arrival_time_us). std::queue capacity_link_ RTC_GUARDED_BY(process_checker_); - Random random_; - + // Models the extra delay of the network (see `queue_delay_ms` + // and `delay_standard_deviation_ms` in BuiltInNetworkBehaviorConfig), packets + // in the `delay_link_` have technically already left the network and don't + // use its capacity but they are not delivered yet. std::deque delay_link_ RTC_GUARDED_BY(process_checker_); + // Represents the next moment in time when the network is supposed to deliver + // packets to the client (either by pulling them from `delay_link_` or + // `capacity_link_` or both). + absl::optional next_process_time_us_ + RTC_GUARDED_BY(process_checker_); ConfigState config_state_ RTC_GUARDED_BY(config_lock_); + Random random_ RTC_GUARDED_BY(process_checker_); // Are we currently dropping a burst of packets? bool bursting_; - int64_t queue_size_bytes_ RTC_GUARDED_BY(process_checker_) = 0; - int64_t pending_drain_bits_ RTC_GUARDED_BY(process_checker_) = 0; - absl::optional last_capacity_link_visit_us_ - RTC_GUARDED_BY(process_checker_); - absl::optional next_process_time_us_ - RTC_GUARDED_BY(process_checker_); + // The send time of the last enqueued packet, this is only used to check that + // the send time of enqueued packets is monotonically increasing. + int64_t last_enqueue_time_us_; + + // The last time a packet left the capacity_link_ (used to enforce + // the capacity of the link and avoid packets starts to get sent before + // the link it free). + int64_t last_capacity_link_exit_time_; }; } // namespace webrtc diff --git a/call/simulated_network_unittest.cc b/call/simulated_network_unittest.cc new file mode 100644 index 0000000000..825dd6d065 --- /dev/null +++ b/call/simulated_network_unittest.cc @@ -0,0 +1,513 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "call/simulated_network.h" + +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "api/test/simulated_network.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::ElementsAre; + +PacketInFlightInfo PacketWithSize(size_t size) { + return PacketInFlightInfo(/*size=*/size, /*send_time_us=*/0, /*packet_id=*/1); +} + +TEST(SimulatedNetworkTest, NextDeliveryTimeIsUnknownOnEmptyNetwork) { + SimulatedNetwork network = SimulatedNetwork({}); + EXPECT_EQ(network.NextDeliveryTimeUs(), absl::nullopt); +} + +TEST(SimulatedNetworkTest, EnqueueFirstPacketOnNetworkWithInfiniteCapacity) { + // A packet of 1 kB that gets enqueued on a network with infinite capacity + // should be ready to exit the network immediately. + SimulatedNetwork network = SimulatedNetwork({}); + ASSERT_TRUE(network.EnqueuePacket(PacketWithSize(1'000))); + + EXPECT_EQ(network.NextDeliveryTimeUs(), 0); +} + +TEST(SimulatedNetworkTest, EnqueueFirstPacketOnNetworkWithLimitedCapacity) { + // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity + // should be ready to exit the network in 1 second. + SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); + ASSERT_TRUE(network.EnqueuePacket(PacketWithSize(125))); + + EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Seconds(1).us()); +} + +TEST(SimulatedNetworkTest, + EnqueuePacketsButNextDeliveryIsBasedOnFirstEnqueuedPacket) { + // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity + // should be ready to exit the network in 1 second. + SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); + ASSERT_TRUE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/1))); + EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Seconds(1).us()); + + // Enqueuing another packet after 100 us doesn't change the next delivery + // time. + ASSERT_TRUE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/125, /*send_time_us=*/100, /*packet_id=*/2))); + EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Seconds(1).us()); + + // Enqueuing another packet after 2 seconds doesn't change the next delivery + // time since the first packet has not left the network yet. + ASSERT_TRUE(network.EnqueuePacket(PacketInFlightInfo( + /*size=*/125, /*send_time_us=*/TimeDelta::Seconds(2).us(), + /*packet_id=*/3))); + EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Seconds(1).us()); +} + +TEST(SimulatedNetworkTest, EnqueueFailsWhenQueueLengthIsReached) { + SimulatedNetwork network = + SimulatedNetwork({.queue_length_packets = 1, .link_capacity_kbps = 1}); + ASSERT_TRUE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/1))); + + // Until there is 1 packet in the queue, no other packets can be enqueued, + // the only way to make space for new packets is calling + // DequeueDeliverablePackets at a time greater than or equal to + // NextDeliveryTimeUs. + EXPECT_FALSE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/125, + /*send_time_us=*/TimeDelta::Seconds(0.5).us(), + /*packet_id=*/2))); + + // Even if the send_time_us is after NextDeliveryTimeUs, it is still not + // possible to enqueue a new packet since the client didn't deque any packet + // from the queue (in this case the client is introducing unbounded delay but + // the network cannot do anything about it). + EXPECT_FALSE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/125, + /*send_time_us=*/TimeDelta::Seconds(2).us(), + /*packet_id=*/3))); +} + +TEST(SimulatedNetworkTest, PacketOverhead) { + // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity + // should be ready to exit the network in 1 second, but since there is an + // overhead per packet of 125 bytes, it will exit the network after 2 seconds. + SimulatedNetwork network = + SimulatedNetwork({.link_capacity_kbps = 1, .packet_overhead = 125}); + ASSERT_TRUE(network.EnqueuePacket(PacketWithSize(125))); + + EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Seconds(2).us()); +} + +TEST(SimulatedNetworkTest, + DequeueDeliverablePacketsLeavesPacketsInCapacityLink) { + // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity + // should be ready to exit the network in 1 second. + SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); + ASSERT_TRUE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/1))); + // Enqueue another packet of 125 bytes (this one should exit after 2 seconds). + ASSERT_TRUE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/125, + /*send_time_us=*/TimeDelta::Seconds(1).us(), + /*packet_id=*/2))); + + // The first packet will exit after 1 second, so that is the next delivery + // time. + EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Seconds(1).us()); + + // After 1 seconds, we collect the delivered packets... + std::vector delivered_packets = + network.DequeueDeliverablePackets( + /*receive_time_us=*/TimeDelta::Seconds(1).us()); + ASSERT_EQ(delivered_packets.size(), 1ul); + EXPECT_EQ(delivered_packets[0].packet_id, 1ul); + EXPECT_EQ(delivered_packets[0].receive_time_us, TimeDelta::Seconds(1).us()); + + // ... And after the first enqueued packet has left the network, the next + // delivery time reflects the delivery time of the next packet. + EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Seconds(2).us()); +} + +TEST(SimulatedNetworkTest, + DequeueDeliverablePacketsAppliesConfigChangesToCapacityLink) { + // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity + // should be ready to exit the network in 1 second. + SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); + const PacketInFlightInfo packet_1 = + PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/1); + ASSERT_TRUE(network.EnqueuePacket(packet_1)); + + // Enqueue another packet of 125 bytes with send time 1 second so this should + // exit after 2 seconds. + PacketInFlightInfo packet_2 = + PacketInFlightInfo(/*size=*/125, + /*send_time_us=*/TimeDelta::Seconds(1).us(), + /*packet_id=*/2); + ASSERT_TRUE(network.EnqueuePacket(packet_2)); + + // The first packet will exit after 1 second, so that is the next delivery + // time. + EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Seconds(1).us()); + + // Since the link capacity changes from 1 kbps to 10 kbps, packets will take + // 100 ms each to leave the network. + network.SetConfig({.link_capacity_kbps = 10}); + + // The next delivery time doesn't change (it will be updated, if needed at + // DequeueDeliverablePackets time). + EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Seconds(1).us()); + + // Getting the first enqueued packet after 100 ms. + std::vector delivered_packets = + network.DequeueDeliverablePackets( + /*receive_time_us=*/TimeDelta::Millis(100).us()); + ASSERT_EQ(delivered_packets.size(), 1ul); + EXPECT_THAT(delivered_packets, + ElementsAre(PacketDeliveryInfo( + /*source=*/packet_1, + /*receive_time_us=*/TimeDelta::Millis(100).us()))); + + // Getting the second enqueued packet that cannot be delivered before its send + // time, hence it will be delivered after 1.1 seconds. + EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Millis(1100).us()); + delivered_packets = network.DequeueDeliverablePackets( + /*receive_time_us=*/TimeDelta::Millis(1100).us()); + ASSERT_EQ(delivered_packets.size(), 1ul); + EXPECT_THAT(delivered_packets, + ElementsAre(PacketDeliveryInfo( + /*source=*/packet_2, + /*receive_time_us=*/TimeDelta::Millis(1100).us()))); +} + +TEST(SimulatedNetworkTest, NetworkEmptyAfterLastPacketDequeued) { + // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity + // should be ready to exit the network in 1 second. + SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); + ASSERT_TRUE(network.EnqueuePacket(PacketWithSize(125))); + + // Collecting all the delivered packets ... + std::vector delivered_packets = + network.DequeueDeliverablePackets( + /*receive_time_us=*/TimeDelta::Seconds(1).us()); + EXPECT_EQ(delivered_packets.size(), 1ul); + + // ... leaves the network empty. + EXPECT_EQ(network.NextDeliveryTimeUs(), absl::nullopt); +} + +TEST(SimulatedNetworkTest, DequeueDeliverablePacketsOnLateCall) { + // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity + // should be ready to exit the network in 1 second. + SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); + ASSERT_TRUE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/1))); + + // Enqueue another packet of 125 bytes with send time 1 second so this should + // exit after 2 seconds. + ASSERT_TRUE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/125, + /*send_time_us=*/TimeDelta::Seconds(1).us(), + /*packet_id=*/2))); + + // Collecting delivered packets after 3 seconds will result in the delivery of + // both the enqueued packets. + std::vector delivered_packets = + network.DequeueDeliverablePackets( + /*receive_time_us=*/TimeDelta::Seconds(3).us()); + EXPECT_EQ(delivered_packets.size(), 2ul); +} + +TEST(SimulatedNetworkTest, + DequeueDeliverablePacketsOnEarlyCallReturnsNoPackets) { + // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity + // should be ready to exit the network in 1 second. + SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); + ASSERT_TRUE(network.EnqueuePacket(PacketWithSize(125))); + + // Collecting delivered packets after 0.5 seconds will result in the delivery + // of 0 packets. + std::vector delivered_packets = + network.DequeueDeliverablePackets( + /*receive_time_us=*/TimeDelta::Seconds(0.5).us()); + EXPECT_EQ(delivered_packets.size(), 0ul); + + // Since the first enqueued packet was supposed to exit after 1 second. + EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Seconds(1).us()); +} + +TEST(SimulatedNetworkTest, QueueDelayMsWithoutStandardDeviation) { + // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity + // should be ready to exit the network in 1 second. + SimulatedNetwork network = + SimulatedNetwork({.queue_delay_ms = 100, .link_capacity_kbps = 1}); + ASSERT_TRUE(network.EnqueuePacket(PacketWithSize(125))); + // The next delivery time is still 1 second even if there are 100 ms of + // extra delay but this will be applied at DequeueDeliverablePackets time. + ASSERT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Seconds(1).us()); + + // Since all packets are delayed by 100 ms, after 1 second, no packets will + // exit the network. + std::vector delivered_packets = + network.DequeueDeliverablePackets( + /*receive_time_us=*/TimeDelta::Seconds(1).us()); + EXPECT_EQ(delivered_packets.size(), 0ul); + + // And the updated next delivery time takes into account the extra delay of + // 100 ms so the first packet in the network will be delivered after 1.1 + // seconds. + EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Millis(1100).us()); + delivered_packets = network.DequeueDeliverablePackets( + /*receive_time_us=*/TimeDelta::Millis(1100).us()); + EXPECT_EQ(delivered_packets.size(), 1ul); +} + +TEST(SimulatedNetworkTest, + QueueDelayMsWithStandardDeviationAndReorderNotAllowed) { + SimulatedNetwork network = + SimulatedNetwork({.queue_delay_ms = 100, + .delay_standard_deviation_ms = 90, + .link_capacity_kbps = 1, + .allow_reordering = false}); + // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity + // should be ready to exit the network in 1 second. + ASSERT_TRUE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/1))); + + // But 3 more packets of size 1 byte are enqueued at the same time. + ASSERT_TRUE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/1, /*send_time_us=*/0, /*packet_id=*/2))); + ASSERT_TRUE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/1, /*send_time_us=*/0, /*packet_id=*/3))); + ASSERT_TRUE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/1, /*send_time_us=*/0, /*packet_id=*/4))); + + // After 5 seconds all of them exit the network. + std::vector delivered_packets = + network.DequeueDeliverablePackets( + /*receive_time_us=*/TimeDelta::Seconds(5).us()); + ASSERT_EQ(delivered_packets.size(), 4ul); + + // And they are still in order even if the delay was applied. + EXPECT_EQ(delivered_packets[0].packet_id, 1ul); + EXPECT_EQ(delivered_packets[1].packet_id, 2ul); + EXPECT_GE(delivered_packets[1].receive_time_us, + delivered_packets[0].receive_time_us); + EXPECT_EQ(delivered_packets[2].packet_id, 3ul); + EXPECT_GE(delivered_packets[2].receive_time_us, + delivered_packets[1].receive_time_us); + EXPECT_EQ(delivered_packets[3].packet_id, 4ul); + EXPECT_GE(delivered_packets[3].receive_time_us, + delivered_packets[2].receive_time_us); +} + +TEST(SimulatedNetworkTest, QueueDelayMsWithStandardDeviationAndReorderAllowed) { + SimulatedNetwork network = + SimulatedNetwork({.queue_delay_ms = 100, + .delay_standard_deviation_ms = 90, + .link_capacity_kbps = 1, + .allow_reordering = true}, + /*random_seed=*/1); + // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity + // should be ready to exit the network in 1 second. + ASSERT_TRUE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/1))); + + // But 3 more packets of size 1 byte are enqueued at the same time. + ASSERT_TRUE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/1, /*send_time_us=*/0, /*packet_id=*/2))); + ASSERT_TRUE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/1, /*send_time_us=*/0, /*packet_id=*/3))); + ASSERT_TRUE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/1, /*send_time_us=*/0, /*packet_id=*/4))); + + // After 5 seconds all of them exit the network. + std::vector delivered_packets = + network.DequeueDeliverablePackets( + /*receive_time_us=*/TimeDelta::Seconds(5).us()); + ASSERT_EQ(delivered_packets.size(), 4ul); + + // And they have been reordered accorting to the applied extra delay. + EXPECT_EQ(delivered_packets[0].packet_id, 3ul); + EXPECT_EQ(delivered_packets[1].packet_id, 1ul); + EXPECT_GE(delivered_packets[1].receive_time_us, + delivered_packets[0].receive_time_us); + EXPECT_EQ(delivered_packets[2].packet_id, 2ul); + EXPECT_GE(delivered_packets[2].receive_time_us, + delivered_packets[1].receive_time_us); + EXPECT_EQ(delivered_packets[3].packet_id, 4ul); + EXPECT_GE(delivered_packets[3].receive_time_us, + delivered_packets[2].receive_time_us); +} + +TEST(SimulatedNetworkTest, PacketLoss) { + // On a network with 50% probablility of packet loss ... + SimulatedNetwork network = SimulatedNetwork({.loss_percent = 50}); + + // Enqueueing 8 packets ... + for (int i = 0; i < 8; i++) { + ASSERT_TRUE(network.EnqueuePacket(PacketInFlightInfo( + /*size=*/1, /*send_time_us=*/0, /*packet_id=*/i + 1))); + } + + std::vector delivered_packets = + network.DequeueDeliverablePackets( + /*receive_time_us=*/TimeDelta::Seconds(5).us()); + EXPECT_EQ(delivered_packets.size(), 8ul); + + // Results in the loss of 4 of them. + int lost_packets = 0; + for (const auto& packet : delivered_packets) { + if (packet.receive_time_us == PacketDeliveryInfo::kNotReceived) { + lost_packets++; + } + } + EXPECT_EQ(lost_packets, 4); +} + +TEST(SimulatedNetworkTest, PacketLossBurst) { + // On a network with 50% probablility of packet loss and an average burst loss + // length of 100 ... + SimulatedNetwork network = SimulatedNetwork( + {.loss_percent = 50, .avg_burst_loss_length = 100}, /*random_seed=*/1); + + // Enqueueing 20 packets ... + for (int i = 0; i < 20; i++) { + ASSERT_TRUE(network.EnqueuePacket(PacketInFlightInfo( + /*size=*/1, /*send_time_us=*/0, /*packet_id=*/i + 1))); + } + + std::vector delivered_packets = + network.DequeueDeliverablePackets( + /*receive_time_us=*/TimeDelta::Seconds(5).us()); + EXPECT_EQ(delivered_packets.size(), 20ul); + + // Results in a burst of lost packets after the first packet lost. + // With the current random seed, the first 12 are not lost, while the + // last 8 are. + int current_packet = 0; + for (const auto& packet : delivered_packets) { + if (current_packet < 12) { + EXPECT_NE(packet.receive_time_us, PacketDeliveryInfo::kNotReceived); + current_packet++; + } else { + EXPECT_EQ(packet.receive_time_us, PacketDeliveryInfo::kNotReceived); + current_packet++; + } + } +} + +TEST(SimulatedNetworkTest, PauseTransmissionUntil) { + // 3 packets of 125 bytes that gets enqueued on a network with 1 kbps capacity + // should be ready to exit the network after 1, 2 and 3 seconds respectively. + SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); + ASSERT_TRUE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/1))); + ASSERT_TRUE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/2))); + ASSERT_TRUE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/3))); + ASSERT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Seconds(1).us()); + + // The network gets paused for 5 seconds, which means that the first packet + // can exit after 5 seconds instead of 1 second. + network.PauseTransmissionUntil(TimeDelta::Seconds(5).us()); + + // No packets after 1 second. + std::vector delivered_packets = + network.DequeueDeliverablePackets( + /*receive_time_us=*/TimeDelta::Seconds(1).us()); + EXPECT_EQ(delivered_packets.size(), 0ul); + EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Seconds(5).us()); + + // The first packet exits after 5 seconds. + delivered_packets = network.DequeueDeliverablePackets( + /*receive_time_us=*/TimeDelta::Seconds(5).us()); + EXPECT_EQ(delivered_packets.size(), 1ul); + + // After the first packet is exited, the next delivery time reflects the + // delivery time of the next packet which accounts for the network pause. + EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Seconds(6).us()); + + // And 2 seconds after the exit of the first enqueued packet, the following 2 + // packets are also delivered. + delivered_packets = network.DequeueDeliverablePackets( + /*receive_time_us=*/TimeDelta::Seconds(7).us()); + EXPECT_EQ(delivered_packets.size(), 2ul); +} + +TEST(SimulatedNetworkTest, CongestedNetworkRespectsLinkCapacity) { + SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); + for (size_t i = 0; i < 1'000; ++i) { + ASSERT_TRUE(network.EnqueuePacket( + PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/i))); + } + PacketDeliveryInfo last_delivered_packet{ + PacketInFlightInfo(/*size=*/0, /*send_time_us=*/0, /*packet_id=*/0), 0}; + while (network.NextDeliveryTimeUs().has_value()) { + std::vector delivered_packets = + network.DequeueDeliverablePackets( + /*receive_time_us=*/network.NextDeliveryTimeUs().value()); + if (!delivered_packets.empty()) { + last_delivered_packet = delivered_packets.back(); + } + } + // 1000 packets of 1000 bits each will take 1000 seconds to exit a 1 kpbs + // network. + EXPECT_EQ(last_delivered_packet.receive_time_us, + TimeDelta::Seconds(1000).us()); + EXPECT_EQ(last_delivered_packet.packet_id, 999ul); +} + +TEST(SimulatedNetworkTest, EnqueuePacketWithSubSecondNonMonotonicBehaviour) { + // On multi-core systems, different threads can experience sub-millisecond non + // monothonic behaviour when running on different cores. This test checks that + // when a non monotonic packet enqueue, the network continues to work and the + // out of order packet is sent anyway. + SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); + ASSERT_TRUE(network.EnqueuePacket(PacketInFlightInfo( + /*size=*/125, /*send_time_us=*/TimeDelta::Seconds(1).us(), + /*packet_id=*/0))); + ASSERT_TRUE(network.EnqueuePacket(PacketInFlightInfo( + /*size=*/125, /*send_time_us=*/TimeDelta::Seconds(1).us() - 1, + /*packet_id=*/1))); + + std::vector delivered_packets = + network.DequeueDeliverablePackets( + /*receive_time_us=*/TimeDelta::Seconds(2).us()); + ASSERT_EQ(delivered_packets.size(), 1ul); + EXPECT_EQ(delivered_packets[0].packet_id, 0ul); + EXPECT_EQ(delivered_packets[0].receive_time_us, TimeDelta::Seconds(2).us()); + + delivered_packets = network.DequeueDeliverablePackets( + /*receive_time_us=*/TimeDelta::Seconds(3).us()); + ASSERT_EQ(delivered_packets.size(), 1ul); + EXPECT_EQ(delivered_packets[0].packet_id, 1ul); + EXPECT_EQ(delivered_packets[0].receive_time_us, TimeDelta::Seconds(3).us()); +} + +// TODO(bugs.webrtc.org/14525): Re-enable when the DCHECK will be uncommented +// and the non-monotonic events on real time clock tests is solved/understood. +// TEST(SimulatedNetworkDeathTest, EnqueuePacketExpectMonotonicSendTime) { +// SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); +// ASSERT_TRUE(network.EnqueuePacket(PacketInFlightInfo( +// /*size=*/125, /*send_time_us=*/2'000'000, /*packet_id=*/0))); +// EXPECT_DEATH_IF_SUPPORTED(network.EnqueuePacket(PacketInFlightInfo( +// /*size=*/125, /*send_time_us=*/900'000, /*packet_id=*/1)), ""); +// } +} // namespace +} // namespace webrtc diff --git a/call/test/mock_audio_send_stream.h b/call/test/mock_audio_send_stream.h index 4164dd550e..1993de8de0 100644 --- a/call/test/mock_audio_send_stream.h +++ b/call/test/mock_audio_send_stream.h @@ -25,7 +25,10 @@ class MockAudioSendStream : public AudioSendStream { GetConfig, (), (const, override)); - MOCK_METHOD(void, Reconfigure, (const Config& config), (override)); + MOCK_METHOD(void, + Reconfigure, + (const Config& config, SetParametersCallback callback), + (override)); MOCK_METHOD(void, Start, (), (override)); MOCK_METHOD(void, Stop, (), (override)); // GMock doesn't like move-only types, such as std::unique_ptr. diff --git a/call/version.cc b/call/version.cc index a84e16c5b1..4b2b9cc22e 100644 --- a/call/version.cc +++ b/call/version.cc @@ -13,7 +13,7 @@ namespace webrtc { // The timestamp is always in UTC. -const char* const kSourceTimestamp = "WebRTC source stamp 2022-10-09T04:11:21"; +const char* const kSourceTimestamp = "WebRTC source stamp 2022-12-14T04:03:07"; void LoadWebRTCVersionInRegister() { // Using volatile to instruct the compiler to not optimize `p` away even diff --git a/call/video_receive_stream.h b/call/video_receive_stream.h index 3dbf66e1b1..2e2742a814 100644 --- a/call/video_receive_stream.h +++ b/call/video_receive_stream.h @@ -87,6 +87,7 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { // Decoder stats. std::string decoder_implementation_name = "unknown"; + absl::optional power_efficient_decoder; FrameCounts frame_counts; int decode_ms = 0; int max_decode_ms = 0; diff --git a/call/video_send_stream.h b/call/video_send_stream.h index 50374377ba..431c267e1e 100644 --- a/call/video_send_stream.h +++ b/call/video_send_stream.h @@ -23,12 +23,14 @@ #include "api/crypto/crypto_options.h" #include "api/frame_transformer_interface.h" #include "api/rtp_parameters.h" +#include "api/rtp_sender_interface.h" #include "api/scoped_refptr.h" #include "api/video/video_content_type.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "api/video/video_stream_encoder_settings.h" +#include "api/video_codecs/scalability_mode.h" #include "call/rtp_config.h" #include "common_video/frame_counts.h" #include "common_video/include/quality_limitation_reason.h" @@ -92,6 +94,7 @@ class VideoSendStream { uint64_t total_encode_time_ms = 0; uint64_t total_encoded_bytes_target = 0; uint32_t huge_frames_sent = 0; + absl::optional scalability_mode; }; struct Stats { @@ -141,6 +144,7 @@ class VideoSendStream { webrtc::VideoContentType::UNSPECIFIED; uint32_t frames_sent = 0; uint32_t huge_frames_sent = 0; + absl::optional power_efficient_encoder; }; struct Config { @@ -214,11 +218,15 @@ class VideoSendStream { // Note: This starts stream activity if it is inactive and one of the layers // is active. This stops stream activity if it is active and all layers are // inactive. - virtual void UpdateActiveSimulcastLayers(std::vector active_layers) = 0; + // `active_layers` should have the same size as the number of configured + // simulcast layers or one if only one rtp stream is used. + virtual void StartPerRtpStream(std::vector active_layers) = 0; // Starts stream activity. // When a stream is active, it can receive, process and deliver packets. + // Prefer to use StartPerRtpStream. virtual void Start() = 0; + // Stops stream activity. // When a stream is stopped, it can't receive, process or deliver packets. virtual void Stop() = 0; @@ -226,9 +234,9 @@ class VideoSendStream { // Accessor for determining if the stream is active. This is an inexpensive // call that must be made on the same thread as `Start()` and `Stop()` methods // are called on and will return `true` iff activity has been started either - // via `Start()` or `UpdateActiveSimulcastLayers()`. If activity is either + // via `Start()` or `StartPerRtpStream()`. If activity is either // stopped or is in the process of being stopped as a result of a call to - // either `Stop()` or `UpdateActiveSimulcastLayers()` where all layers were + // either `Stop()` or `StartPerRtpStream()` where all layers were // deactivated, the return value will be `false`. virtual bool started() = 0; @@ -250,8 +258,13 @@ class VideoSendStream { // with the VideoStream settings. virtual void ReconfigureVideoEncoder(VideoEncoderConfig config) = 0; + virtual void ReconfigureVideoEncoder(VideoEncoderConfig config, + SetParametersCallback callback) = 0; + virtual Stats GetStats() = 0; + virtual void GenerateKeyFrame(const std::vector& rids) = 0; + protected: virtual ~VideoSendStream() {} }; diff --git a/docs/native-code/development/index.md b/docs/native-code/development/index.md index f3cfd556bc..f8c65b276b 100644 --- a/docs/native-code/development/index.md +++ b/docs/native-code/development/index.md @@ -116,15 +116,17 @@ When you have Ninja project files generated (see previous section), compile For [Ninja][ninja] project files generated in `out/Default`: ``` -$ ninja -C out/Default +$ autoninja -C out/Default ``` To build everything in the generated folder (`out/Default`): ``` -$ ninja all -C out/Default +$ autoninja all -C out/Default ``` +`autoninja` is a wrapper that automatically provides optimal values for the arguments passed to `ninja`. + See [Ninja build rules][ninja-build-rules] to read more about difference between `ninja` and `ninja all`. diff --git a/docs/native-code/ios/index.md b/docs/native-code/ios/index.md index 29db854e0c..307379f17f 100644 --- a/docs/native-code/ios/index.md +++ b/docs/native-code/ios/index.md @@ -101,14 +101,14 @@ Xcode is the default and preferred IDE to develop for the iOS platform. *Generating an Xcode project* To have GN generate Xcode project files, pass the argument `--ide=xcode` -when running `gn gen`. This will result in a file named `all.xcworkspace` +when running `gn gen`. This will result in a file named `all.xcodeproj` placed in your specified output directory. Example: ``` $ gn gen out/ios --args='target_os="ios" target_cpu="arm64"' --ide=xcode -$ open -a Xcode.app out/ios/all.xcworkspace +$ open -a Xcode.app out/ios/all.xcodeproj ``` *Compile and run with Xcode* diff --git a/docs/native-code/rtp-hdrext/video-layers-allocation00/README.md b/docs/native-code/rtp-hdrext/video-layers-allocation00/README.md index f367adab4c..c4454d8ee1 100644 --- a/docs/native-code/rtp-hdrext/video-layers-allocation00/README.md +++ b/docs/native-code/rtp-hdrext/video-layers-allocation00/README.md @@ -2,7 +2,7 @@ The goal of this extension is for a video sender to provide information about the target bitrate, resolution and frame rate of each scalability layer in order -to aid a middle box to decide which layer to relay. +to aid a selective forwarding middlebox to decide which layer to relay. **Name:** "Video layers allocation version 0" @@ -18,7 +18,7 @@ layers and a middle box can choose a layer to relay for each receiver. This extension support temporal layers, multiple spatial layers sent on a single rtp stream (SVC), or independent spatial layers sent on multiple rtp streams -(Simulcast). +(simulcast). ## RTP header extension format @@ -32,9 +32,8 @@ rtp stream (SVC), or independent spatial layers sent on multiple rtp streams // up to 2 bytes |---------------| // when sl_bm == 0 |sl2_bm |sl3_bm | // +-+-+-+-+-+-+-+-+ -// Number of temporal |#tl|#tl|#tl|#tl| -// layers per spatial layer :---------------: -// up to 4 bytes | ... | +// Number of temporal layers |#tl|#tl|#tl|#tl| +// per spatial layer | | | | | // +-+-+-+-+-+-+-+-+ // Target bitrate in kpbs | | // per temporal layer : ... : @@ -56,23 +55,24 @@ rtp stream (SVC), or independent spatial layers sent on multiple rtp streams RID: RTP stream index this allocation is sent on, numbered from 0. 2 bits. -NS: Number of RTP streams - 1. 2 bits, thus allowing up-to 4 RTP streams. +NS: Number of RTP streams minus one. 2 bits, thus allowing up-to 4 RTP streams. sl_bm: BitMask of the active Spatial Layers when same for all RTP streams or 0 -otherwise. 4 bits thus allows up to 4 spatial layers per RTP streams. +otherwise. 4 bits, thus allows up to 4 spatial layers per RTP streams. slX_bm: BitMask of the active Spatial Layers for RTP stream with index=X. -byte-aligned. When NS < 2, takes one byte, otherwise uses two bytes. +When NS < 2, takes one byte, otherwise uses two bytes. Zero-padded to byte +alignment. \#tl: 2-bit value of number of temporal layers-1, thus allowing up-to 4 temporal -layer per spatial layer. One per spatial layer per RTP stream. values are stored -in (RTP stream id, spatial id) ascending order. zero-padded to byte alignment. +layers. Values are stored in ascending order of spatial id. Zero-padded to byte +alignment. -Target bitrate in kbps. Values are stored using leb128 encoding. one value per -temporal layer. values are stored in (RTP stream id, spatial id, temporal id) +Target bitrate in kbps. Values are stored using leb128 encoding [1]. One value per +temporal layer. Values are stored in (RTP stream id, spatial id, temporal id) ascending order. All bitrates are total required bitrate to receive the corresponding layer, i.e. in simulcast mode they include only corresponding -spatial layer, in full-svc all lower spatial layers are included. All lower +spatial layers, in full-svc all lower spatial layers are included. All lower temporal layers are also included. Resolution and framerate. Optional. Presence is inferred from the rtp header @@ -82,3 +82,5 @@ id, spatial id) ascending order. An empty layer allocation (i.e nothing sent on ssrc) is encoded as special case with a single 0 byte. + +[1] https://aomediacodec.github.io/av1-spec/#leb128 diff --git a/examples/BUILD.gn b/examples/BUILD.gn index e683c192dc..7d87a01c77 100644 --- a/examples/BUILD.gn +++ b/examples/BUILD.gn @@ -690,6 +690,7 @@ if (is_linux || is_chromeos || is_win) { "../api:create_peerconnection_factory", "../api:libjingle_peerconnection_api", "../api:media_stream_interface", + "../api:rtp_sender_interface", "../api:scoped_refptr", "../api/audio:audio_mixer_api", "../api/audio_codecs:audio_codecs_api", diff --git a/experiments/BUILD.gn b/experiments/BUILD.gn new file mode 100644 index 0000000000..82fce7f08d --- /dev/null +++ b/experiments/BUILD.gn @@ -0,0 +1,26 @@ +# Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../webrtc.gni") + +action("registered_field_trials_header") { + visibility = [ ":*" ] + script = "field_trials.py" + args = [ + "header", + "--output", + rebase_path(target_gen_dir, root_build_dir) + "/registered_field_trials.h", + ] + outputs = [ "$target_gen_dir/registered_field_trials.h" ] +} + +rtc_library("registered_field_trials") { + visibility = [ "*" ] + sources = get_target_outputs(":registered_field_trials_header") + deps = [ ":registered_field_trials_header" ] +} diff --git a/experiments/field_trials.py b/experiments/field_trials.py new file mode 100755 index 0000000000..c9a73ce1f3 --- /dev/null +++ b/experiments/field_trials.py @@ -0,0 +1,114 @@ +#!/usr/bin/env vpython3 + +# Copyright (c) 2022 The WebRTC Project Authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import sys +from typing import Set + +import argparse +import dataclasses + + +# TODO(bugs.webrtc.org/14154): End date and bug should also be stored. +@dataclasses.dataclass(frozen=True) +class FieldTrial: + """Representation of all attributes associated with a field trial. + + Attributes: + key: Field trial key. + """ + key: str + + +# As per the policy in `g3doc/field-trials.md`, all field trials should be +# registered in the container below. Please keep the keys sorted. +REGISTERED_FIELD_TRIALS: Set[FieldTrial] = { + FieldTrial(''), # TODO(bugs.webrtc.org/14154): Populate +} + + +def RegistryHeader(field_trials: Set[FieldTrial] = None) -> str: + """Generates a C++ header with all field trial keys. + + Args: + field_trials: Field trials to include in the header. + + Returns: + String representation of a C++ header file containing all field trial keys. + + >>> trials = {FieldTrial('B'), FieldTrial('A'), FieldTrial('B')} + >>> print(RegistryHeader(trials)) + // This file was automatically generated. Do not edit. + + #ifndef GEN_REGISTERED_FIELD_TRIALS_H_ + #define GEN_REGISTERED_FIELD_TRIALS_H_ + + #include "absl/strings/string_view.h" + + namespace webrtc { + + inline constexpr absl::string_view kRegisteredFieldTrials[] = { + "A", + "B", + }; + + } // namespace webrtc + + #endif // GEN_REGISTERED_FIELD_TRIALS_H_ + + """ + if not field_trials: + field_trials = REGISTERED_FIELD_TRIALS + registered_keys = [f.key for f in field_trials] + keys = '\n'.join(f' "{k}",' for k in sorted(registered_keys)) + return ('// This file was automatically generated. Do not edit.\n' + '\n' + '#ifndef GEN_REGISTERED_FIELD_TRIALS_H_\n' + '#define GEN_REGISTERED_FIELD_TRIALS_H_\n' + '\n' + '#include "absl/strings/string_view.h"\n' + '\n' + 'namespace webrtc {\n' + '\n' + 'inline constexpr absl::string_view kRegisteredFieldTrials[] = {\n' + f'{keys}\n' + '};\n' + '\n' + '} // namespace webrtc\n' + '\n' + '#endif // GEN_REGISTERED_FIELD_TRIALS_H_\n') + + +def CmdHeader(args: argparse.Namespace) -> None: + args.output.write(RegistryHeader()) + + +def main() -> None: + parser = argparse.ArgumentParser() + subcommand = parser.add_subparsers(dest='cmd') + parser_header = subcommand.add_parser( + 'header', + help='generate C++ header file containing registered field trial keys') + parser_header.add_argument('--output', + default=sys.stdout, + type=argparse.FileType('w'), + required=False, + help='output file') + parser_header.set_defaults(cmd=CmdHeader) + args = parser.parse_args() + + if not args.cmd: + parser.print_help(sys.stderr) + sys.exit(1) + + args.cmd(args) + + +if __name__ == '__main__': + main() diff --git a/g3doc/abseil-in-webrtc.md b/g3doc/abseil-in-webrtc.md index 8561975340..80572a3245 100644 --- a/g3doc/abseil-in-webrtc.md +++ b/g3doc/abseil-in-webrtc.md @@ -34,6 +34,7 @@ will generate a shared library. * `absl::string_view` * The functions in `absl/strings/ascii.h`, `absl/strings/match.h`, and `absl/strings/str_replace.h`. +* The functions in `absl/strings/escaping.h`. * `absl::is_trivially_copy_constructible`, `absl::is_trivially_copy_assignable`, and `absl::is_trivially_destructible` from `absl/meta/type_traits.h`. diff --git a/g3doc/style-guide.md b/g3doc/style-guide.md index 9d713c68c9..44c752173b 100644 --- a/g3doc/style-guide.md +++ b/g3doc/style-guide.md @@ -173,6 +173,16 @@ headers you need. [goog-forward-declarations]: https://google.github.io/styleguide/cppguide.html#Forward_Declarations +### RTTI and dynamic_cast + +The Google style guide [permits the use of dynamic_cast](https://google.github.io/styleguide/cppguide.html#Run-Time_Type_Information__RTTI_). + +However, WebRTC does not permit it. WebRTC (and Chrome) is compiled with the +-fno-rtti flag, and the overhead of enabling RTTI it is on the order of 220 +Kbytes (for Android Arm64). + +Use static_cast and take your own steps to ensure type safety. + ## C There's a substantial chunk of legacy C code in WebRTC, and a lot of it is old diff --git a/infra/config/PRESUBMIT.py b/infra/config/PRESUBMIT.py index 6aa75c7df5..b1112937e3 100644 --- a/infra/config/PRESUBMIT.py +++ b/infra/config/PRESUBMIT.py @@ -6,6 +6,8 @@ # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. +USE_PYTHON3 = True + def CheckChangeOnUpload(input_api, output_api): return input_api.RunTests( diff --git a/infra/config/commit-queue.cfg b/infra/config/commit-queue.cfg index 1c703c7022..e691169425 100644 --- a/infra/config/commit-queue.cfg +++ b/infra/config/commit-queue.cfg @@ -46,12 +46,20 @@ config_groups { builders { name: "webrtc/try/android_arm_rel" } + builders { + name: "webrtc/try/android_arm_rel_reclient" + experiment_percentage: 100 + } builders { name: "webrtc/try/android_chromium_compile" } builders { name: "webrtc/try/android_compile_arm64_rel" } + builders { + name: "webrtc/try/android_compile_arm_dbg" + experiment_percentage: 100 + } builders { name: "webrtc/try/android_compile_arm_rel" } @@ -73,6 +81,10 @@ config_groups { builders { name: "webrtc/try/ios_compile_arm64_rel" } + builders { + name: "webrtc/try/ios_compile_arm64_rel_reclient" + experiment_percentage: 100 + } builders { name: "webrtc/try/ios_sim_x64_dbg_ios12" } @@ -121,6 +133,10 @@ config_groups { builders { name: "webrtc/try/linux_rel" } + builders { + name: "webrtc/try/linux_rel_reclient" + experiment_percentage: 100 + } builders { name: "webrtc/try/linux_tsan2" } @@ -154,6 +170,10 @@ config_groups { builders { name: "webrtc/try/mac_rel_m1" } + builders { + name: "webrtc/try/mac_rel_reclient" + experiment_percentage: 100 + } builders { name: "webrtc/try/presubmit" disable_reuse: true @@ -173,6 +193,10 @@ config_groups { builders { name: "webrtc/try/win_compile_x64_clang_rel" } + builders { + name: "webrtc/try/win_compile_x64_clang_rel_reclient" + experiment_percentage: 100 + } builders { name: "webrtc/try/win_compile_x86_clang_dbg" } @@ -219,9 +243,17 @@ config_groups { builders { name: "webrtc/try/android_arm_rel" } + builders { + name: "webrtc/try/android_arm_rel_reclient" + experiment_percentage: 100 + } builders { name: "webrtc/try/android_compile_arm64_rel" } + builders { + name: "webrtc/try/android_compile_arm_dbg" + experiment_percentage: 100 + } builders { name: "webrtc/try/android_compile_arm_rel" } @@ -243,6 +275,10 @@ config_groups { builders { name: "webrtc/try/ios_compile_arm64_rel" } + builders { + name: "webrtc/try/ios_compile_arm64_rel_reclient" + experiment_percentage: 100 + } builders { name: "webrtc/try/ios_sim_x64_dbg_ios12" } @@ -285,6 +321,10 @@ config_groups { builders { name: "webrtc/try/linux_rel" } + builders { + name: "webrtc/try/linux_rel_reclient" + experiment_percentage: 100 + } builders { name: "webrtc/try/linux_tsan2" } @@ -315,6 +355,10 @@ config_groups { builders { name: "webrtc/try/mac_rel_m1" } + builders { + name: "webrtc/try/mac_rel_reclient" + experiment_percentage: 100 + } builders { name: "webrtc/try/presubmit" disable_reuse: true @@ -328,6 +372,10 @@ config_groups { builders { name: "webrtc/try/win_compile_x64_clang_rel" } + builders { + name: "webrtc/try/win_compile_x64_clang_rel_reclient" + experiment_percentage: 100 + } builders { name: "webrtc/try/win_compile_x86_clang_dbg" } diff --git a/infra/config/config.star b/infra/config/config.star index 51112a043c..589f9e1440 100755 --- a/infra/config/config.star +++ b/infra/config/config.star @@ -58,11 +58,6 @@ def make_reclient_properties(instance, jobs = None): # useful when a failure can be safely ignored while fixing it without # blocking the LKGR finder on it. skipped_lkgr_bots = [ - "Android32 (M Nexus5X)(reclient)", - "Linux64 Release (reclient)", - "Mac64 Release (reclient)", - "Win64 Release (Clang)(reclient)", - "iOS64 Release (reclient)", "Fuchsia Release", ] @@ -405,8 +400,7 @@ luci.tree_closer( "compile", "gn", ], - # TODO(b/239908030, b/243594984): remove reclient builders after migration. - failed_step_regexp_exclude = ".*(\\(experimental\\)|\\(reclient\\)).*", + failed_step_regexp_exclude = ".*\\(experimental\\).*", ) # Recipe definitions: @@ -603,6 +597,7 @@ def try_builder( properties = properties or {} properties["builder_group"] = "tryserver.webrtc" properties.update(make_goma_properties(enable_ats = goma_enable_ats, jobs = goma_jobs)) + properties.update(make_reclient_properties("rbe-webrtc-untrusted")) if cq != None: luci.cq_tryjob_verifier(name, cq_group = "cq", **cq) if branch_cq: @@ -634,9 +629,13 @@ def perf_builder(name, perf_cat, **kwargs): properties = make_goma_properties() properties.update(make_reclient_properties("rbe-webrtc-trusted")) properties["builder_group"] = "client.webrtc.perf" + dimensions = {"pool": "luci.webrtc.perf", "os": "Linux", "cores": "2"} + if "Android" in name: + # Android perf testers require more performant bots to finish under 3 hours. + dimensions["cores"] = "8" return webrtc_builder( name = name, - dimensions = {"pool": "luci.webrtc.perf", "os": "Linux"}, + dimensions = dimensions, properties = properties, bucket = "perf", service_account = "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com", @@ -709,23 +708,26 @@ ios_builder, ios_try_job = normal_builder_factory( # Actual builder configuration: -android_builder("Android32 (M Nexus5X)(dbg)", "Android|arm|dbg") -android_try_job("android_compile_arm_dbg", cq = None) +android_builder("Android32 (dbg)", "Android|arm|dbg") +android_try_job("android_compile_arm_dbg", cq = {"experiment_percentage": 100}) android_try_job("android_arm_dbg") -android_builder("Android32 (M Nexus5X)", "Android|arm|rel") -android_builder("Android32 (M Nexus5X)(reclient)", "Android|arm|re") +android_builder("Android32", "Android|arm|rel") android_try_job("android_arm_rel") +android_try_job("android_arm_rel_reclient", cq = {"experiment_percentage": 100}) android_builder("Android32 Builder arm", "Android|arm|size", perf_cat = "Android|arm|Builder|", prioritized = True) android_try_job("android_compile_arm_rel") perf_builder("Perf Android32 (M Nexus5)", "Android|arm|Tester|M Nexus5", triggered_by = ["Android32 Builder arm"]) perf_builder("Perf Android32 (M AOSP Nexus6)", "Android|arm|Tester|M AOSP Nexus6", triggered_by = ["Android32 Builder arm"]) +perf_builder("Perf Android32 (O Pixel2)", "Android|arm|Tester|O Pixel2", triggered_by = ["Android32 Builder arm"]) +perf_builder("Perf Android32 (R Pixel5)", "Android|arm|Tester|R Pixel5", triggered_by = ["Android32 Builder arm"]) android_try_job("android_compile_arm64_dbg", cq = None) android_try_job("android_arm64_dbg", cq = None) -android_builder("Android64 (M Nexus5X)", "Android|arm64|rel") +android_builder("Android64", "Android|arm64|rel") android_try_job("android_arm64_rel") android_builder("Android64 Builder arm64", "Android|arm64|size", perf_cat = "Android|arm64|Builder|", prioritized = True) perf_builder("Perf Android64 (M Nexus5X)", "Android|arm64|Tester|M Nexus5X", triggered_by = ["Android64 Builder arm64"]) perf_builder("Perf Android64 (O Pixel2)", "Android|arm64|Tester|O Pixel2", triggered_by = ["Android64 Builder arm64"]) +perf_builder("Perf Android64 (R Pixel5)", "Android|arm64|Tester|R Pixel5", triggered_by = ["Android64 Builder arm64"]) android_try_job("android_compile_arm64_rel") android_builder("Android64 Builder x64 (dbg)", "Android|x64|dbg") android_try_job("android_compile_x64_dbg") @@ -741,8 +743,8 @@ android_try_job("android_chromium_compile", recipe = "chromium_trybot", branch_c ios_builder("iOS64 Debug", "iOS|arm64|dbg") ios_try_job("ios_compile_arm64_dbg") ios_builder("iOS64 Release", "iOS|arm64|rel") -ios_builder("iOS64 Release (reclient)", "iOS|arm64|re") ios_try_job("ios_compile_arm64_rel") +ios_try_job("ios_compile_arm64_rel_reclient", cq = {"experiment_percentage": 100}) ios_builder("iOS64 Sim Debug (iOS 14)", "iOS|x64|14") ios_try_job("ios_sim_x64_dbg_ios14") ios_builder("iOS64 Sim Debug (iOS 13)", "iOS|x64|13") @@ -760,8 +762,8 @@ linux_builder("Linux64 Debug", "Linux|x64|dbg") linux_try_job("linux_dbg", cq = None) linux_try_job("linux_compile_dbg") linux_builder("Linux64 Release", "Linux|x64|rel") -linux_builder("Linux64 Release (reclient)", "Linux|x64|re") linux_try_job("linux_rel") +linux_try_job("linux_rel_reclient", cq = {"experiment_percentage": 100}) linux_builder("Linux64 Builder", "Linux|x64|size", perf_cat = "Linux|x64|Builder|", prioritized = True) linux_try_job("linux_compile_rel") perf_builder("Perf Linux Bionic", "Linux|x64|Tester|Bionic", triggered_by = ["Linux64 Builder"]) @@ -789,18 +791,23 @@ linux_builder("Linux (more configs)", "Linux|x64|more") linux_try_job("linux_more_configs") linux_try_job("linux_chromium_compile", recipe = "chromium_trybot", branch_cq = False) linux_try_job("linux_chromium_compile_dbg", recipe = "chromium_trybot", branch_cq = False) +linux_try_job("linux_coverage", cq = None) + +linux_builder("Fuchsia Builder", ci_cat = None, perf_cat = "Fuchsia|x64|Builder|", prioritized = True) linux_builder("Fuchsia Release", "Fuchsia|x64|rel") linux_try_job("fuchsia_rel", cq = None) +perf_builder("Perf Fuchsia", "Fuchsia|x64|Tester|", triggered_by = ["Fuchsia Builder"]) mac_builder("Mac64 Debug", "Mac|x64|dbg") mac_try_job("mac_dbg", cq = None) mac_try_job("mac_compile_dbg") mac_builder("Mac64 Release", "Mac|x64|rel") -mac_builder("Mac64 Release (reclient)", "Mac|x64|re") + mac_try_job("mac_rel") +mac_try_job("mac_rel_reclient", cq = {"experiment_percentage": 100}) mac_try_job("mac_compile_rel", cq = None) mac_builder("Mac64 Builder", ci_cat = None, perf_cat = "Mac|x64|Builder|") -mac_builder("MacArm64 Builder", ci_cat = None, perf_cat = "Mac|arm64|Builder") +mac_builder("MacArm64 Builder", ci_cat = None, perf_cat = "Mac|arm64|Builder|") perf_builder("Perf Mac 11", "Mac|x64|Tester|11", triggered_by = ["Mac64 Builder"]) perf_builder("Perf Mac M1 Arm64 12", "Mac|arm64|Tester|12", triggered_by = ["MacArm64 Builder"]) @@ -817,16 +824,15 @@ win_try_job("win_compile_x86_clang_dbg") win_builder("Win32 Release (Clang)", "Win Clang|x86|rel") win_try_job("win_x86_clang_rel") win_try_job("win_compile_x86_clang_rel", cq = None) -win_builder("Win32 Builder (Clang)", ci_cat = None, perf_cat = "Win|x86|Builder|") -perf_builder("Perf Win7", "Win|x86|Tester|7", triggered_by = ["Win32 Builder (Clang)"]) +win_builder("Win64 Builder (Clang)", ci_cat = None, perf_cat = "Win|x64|Builder|") +perf_builder("Perf Win 10", "Win|x64|Tester|10", triggered_by = ["Win64 Builder (Clang)"]) win_builder("Win64 Debug (Clang)", "Win Clang|x64|dbg") win_try_job("win_x64_clang_dbg", cq = None) -win_try_job("win_x64_clang_dbg_win10", cq = None) win_try_job("win_compile_x64_clang_dbg") win_builder("Win64 Release (Clang)", "Win Clang|x64|rel") -win_builder("Win64 Release (Clang)(reclient)", "Win Clang|x64|re") win_try_job("win_x64_clang_rel", cq = None) win_try_job("win_compile_x64_clang_rel") +win_try_job("win_compile_x64_clang_rel_reclient", cq = {"experiment_percentage": 100}) win_builder("Win64 ASan", "Win Clang|x64|asan") win_try_job("win_asan") win_builder("Win (more configs)", "Win Clang|x86|more") @@ -877,7 +883,8 @@ lkgr_config = { "WebRTC Chromium FYI Android Builder (dbg)", "WebRTC Chromium FYI Android Builder ARM64 (dbg)", "WebRTC Chromium FYI Android Builder", - "WebRTC Chromium FYI Android Tests (dbg) (M Nexus5X)", + "WebRTC Chromium FYI Android Tests (dbg)", + "WebRTC Chromium FYI Android Tests ARM64 (dbg)", "WebRTC Chromium FYI Linux Builder (dbg)", "WebRTC Chromium FYI Linux Builder", "WebRTC Chromium FYI Linux Tester", diff --git a/infra/config/cr-buildbucket.cfg b/infra/config/cr-buildbucket.cfg index 87dd3d699f..6d8852c5e7 100644 --- a/infra/config/cr-buildbucket.cfg +++ b/infra/config/cr-buildbucket.cfg @@ -15,7 +15,7 @@ buckets { } swarming { builders { - name: "Android32 (M Nexus5X)" + name: "Android32" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" dimensions: "cpu:x86-64" @@ -65,57 +65,7 @@ buckets { } } builders { - name: "Android32 (M Nexus5X)(dbg)" - swarming_host: "chromium-swarm.appspot.com" - swarming_tags: "vpython:native-python-wrapper" - dimensions: "cpu:x86-64" - dimensions: "os:Linux" - dimensions: "pool:luci.webrtc.ci" - exe { - cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" - cipd_version: "refs/heads/main" - cmd: "luciexe" - } - properties: - '{' - ' "$build/goma": {' - ' "server_host": "goma.chromium.org",' - ' "use_luci_auth": true' - ' },' - ' "$build/reclient": {' - ' "instance": "rbe-webrtc-trusted",' - ' "metrics_project": "chromium-reclient-metrics"' - ' },' - ' "$recipe_engine/resultdb/test_presentation": {' - ' "column_keys": [],' - ' "grouping_keys": [' - ' "status",' - ' "v.test_suite"' - ' ]' - ' },' - ' "builder_group": "client.webrtc",' - ' "recipe": "webrtc/standalone"' - '}' - priority: 30 - execution_timeout_secs: 7200 - build_numbers: YES - service_account: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" - experiments { - key: "luci.recipes.use_python3" - value: 100 - } - resultdb { - enable: true - bq_exports { - project: "webrtc-ci" - dataset: "resultdb" - table: "ci_test_results" - test_results {} - } - } - } - builders { - name: "Android32 (M Nexus5X)(reclient)" + name: "Android32 (dbg)" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" dimensions: "cpu:x86-64" @@ -315,7 +265,7 @@ buckets { } } builders { - name: "Android64 (M Nexus5X)" + name: "Android64" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" dimensions: "cpu:x86-64" @@ -1230,57 +1180,6 @@ buckets { } } } - builders { - name: "Linux64 Release (reclient)" - swarming_host: "chromium-swarm.appspot.com" - swarming_tags: "vpython:native-python-wrapper" - dimensions: "cpu:x86-64" - dimensions: "inside_docker:0" - dimensions: "os:Linux" - dimensions: "pool:luci.webrtc.ci" - exe { - cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" - cipd_version: "refs/heads/main" - cmd: "luciexe" - } - properties: - '{' - ' "$build/goma": {' - ' "server_host": "goma.chromium.org",' - ' "use_luci_auth": true' - ' },' - ' "$build/reclient": {' - ' "instance": "rbe-webrtc-trusted",' - ' "metrics_project": "chromium-reclient-metrics"' - ' },' - ' "$recipe_engine/resultdb/test_presentation": {' - ' "column_keys": [],' - ' "grouping_keys": [' - ' "status",' - ' "v.test_suite"' - ' ]' - ' },' - ' "builder_group": "client.webrtc",' - ' "recipe": "webrtc/standalone"' - '}' - priority: 30 - execution_timeout_secs: 7200 - build_numbers: YES - service_account: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" - experiments { - key: "luci.recipes.use_python3" - value: 100 - } - resultdb { - enable: true - bq_exports { - project: "webrtc-ci" - dataset: "resultdb" - table: "ci_test_results" - test_results {} - } - } - } builders { name: "Mac Asan" swarming_host: "chromium-swarm.appspot.com" @@ -1431,56 +1330,6 @@ buckets { } } } - builders { - name: "Mac64 Release (reclient)" - swarming_host: "chromium-swarm.appspot.com" - swarming_tags: "vpython:native-python-wrapper" - dimensions: "cpu:x86-64" - dimensions: "os:Mac" - dimensions: "pool:luci.webrtc.ci" - exe { - cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" - cipd_version: "refs/heads/main" - cmd: "luciexe" - } - properties: - '{' - ' "$build/goma": {' - ' "server_host": "goma.chromium.org",' - ' "use_luci_auth": true' - ' },' - ' "$build/reclient": {' - ' "instance": "rbe-webrtc-trusted",' - ' "metrics_project": "chromium-reclient-metrics"' - ' },' - ' "$recipe_engine/resultdb/test_presentation": {' - ' "column_keys": [],' - ' "grouping_keys": [' - ' "status",' - ' "v.test_suite"' - ' ]' - ' },' - ' "builder_group": "client.webrtc",' - ' "recipe": "webrtc/standalone"' - '}' - priority: 30 - execution_timeout_secs: 7200 - build_numbers: YES - service_account: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" - experiments { - key: "luci.recipes.use_python3" - value: 100 - } - resultdb { - enable: true - bq_exports { - project: "webrtc-ci" - dataset: "resultdb" - table: "ci_test_results" - test_results {} - } - } - } builders { name: "MacARM64 M1 Release" swarming_host: "chromium-swarm.appspot.com" @@ -1831,56 +1680,6 @@ buckets { } } } - builders { - name: "Win64 Release (Clang)(reclient)" - swarming_host: "chromium-swarm.appspot.com" - swarming_tags: "vpython:native-python-wrapper" - dimensions: "cpu:x86-64" - dimensions: "os:Windows" - dimensions: "pool:luci.webrtc.ci" - exe { - cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" - cipd_version: "refs/heads/main" - cmd: "luciexe" - } - properties: - '{' - ' "$build/goma": {' - ' "server_host": "goma.chromium.org",' - ' "use_luci_auth": true' - ' },' - ' "$build/reclient": {' - ' "instance": "rbe-webrtc-trusted",' - ' "metrics_project": "chromium-reclient-metrics"' - ' },' - ' "$recipe_engine/resultdb/test_presentation": {' - ' "column_keys": [],' - ' "grouping_keys": [' - ' "status",' - ' "v.test_suite"' - ' ]' - ' },' - ' "builder_group": "client.webrtc",' - ' "recipe": "webrtc/standalone"' - '}' - priority: 30 - execution_timeout_secs: 7200 - build_numbers: YES - service_account: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" - experiments { - key: "luci.recipes.use_python3" - value: 100 - } - resultdb { - enable: true - bq_exports { - project: "webrtc-ci" - dataset: "resultdb" - table: "ci_test_results" - test_results {} - } - } - } builders { name: "iOS API Framework Builder" swarming_host: "chromium-swarm.appspot.com" @@ -2046,61 +1845,6 @@ buckets { } } } - builders { - name: "iOS64 Release (reclient)" - swarming_host: "chromium-swarm.appspot.com" - swarming_tags: "vpython:native-python-wrapper" - dimensions: "cpu:x86-64" - dimensions: "os:Mac" - dimensions: "pool:luci.webrtc.ci" - exe { - cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" - cipd_version: "refs/heads/main" - cmd: "luciexe" - } - properties: - '{' - ' "$build/goma": {' - ' "server_host": "goma.chromium.org",' - ' "use_luci_auth": true' - ' },' - ' "$build/reclient": {' - ' "instance": "rbe-webrtc-trusted",' - ' "metrics_project": "chromium-reclient-metrics"' - ' },' - ' "$recipe_engine/resultdb/test_presentation": {' - ' "column_keys": [],' - ' "grouping_keys": [' - ' "status",' - ' "v.test_suite"' - ' ]' - ' },' - ' "builder_group": "client.webrtc",' - ' "recipe": "webrtc/standalone",' - ' "xcode_build_version": "13c100"' - '}' - priority: 30 - execution_timeout_secs: 7200 - caches { - name: "xcode_ios_13c100" - path: "xcode_ios_13c100.app" - } - build_numbers: YES - service_account: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" - experiments { - key: "luci.recipes.use_python3" - value: 100 - } - resultdb { - enable: true - bq_exports { - project: "webrtc-ci" - dataset: "resultdb" - table: "ci_test_results" - test_results {} - } - } - } builders { name: "iOS64 Sim Debug (iOS 12)" swarming_host: "chromium-swarm.appspot.com" @@ -2345,7 +2089,8 @@ buckets { ' "WebRTC Chromium FYI Android Builder (dbg)",' ' "WebRTC Chromium FYI Android Builder ARM64 (dbg)",' ' "WebRTC Chromium FYI Android Builder",' - ' "WebRTC Chromium FYI Android Tests (dbg) (M Nexus5X)",' + ' "WebRTC Chromium FYI Android Tests (dbg)",' + ' "WebRTC Chromium FYI Android Tests ARM64 (dbg)",' ' "WebRTC Chromium FYI Linux Builder (dbg)",' ' "WebRTC Chromium FYI Linux Builder",' ' "WebRTC Chromium FYI Linux Tester",' @@ -2361,12 +2106,12 @@ buckets { ' },' ' "webrtc/ci": {' ' "builders": [' - ' "Android32 (M Nexus5X)",' - ' "Android32 (M Nexus5X)(dbg)",' + ' "Android32",' + ' "Android32 (dbg)",' ' "Android32 (more configs)",' ' "Android32 Builder x86",' ' "Android32 Builder x86 (dbg)",' - ' "Android64 (M Nexus5X)",' + ' "Android64",' ' "Android64 Builder x64 (dbg)",' ' "Linux (more configs)",' ' "Linux Asan",' @@ -2589,6 +2334,57 @@ buckets { } } } + builders { + name: "Fuchsia Builder" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "cpu:x86-64" + dimensions: "inside_docker:0" + dimensions: "os:Linux" + dimensions: "pool:luci.webrtc.ci" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/goma": {' + ' "server_host": "goma.chromium.org",' + ' "use_luci_auth": true' + ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-trusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "client.webrtc",' + ' "recipe": "webrtc/standalone"' + '}' + priority: 29 + execution_timeout_secs: 7200 + build_numbers: YES + service_account: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "perf_test_results" + test_results {} + } + } + } builders { name: "Linux64 Builder" swarming_host: "chromium-swarm.appspot.com" @@ -2744,6 +2540,7 @@ buckets { name: "Perf Android32 (M AOSP Nexus6)" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:8" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2793,6 +2590,107 @@ buckets { name: "Perf Android32 (M Nexus5)" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:8" + dimensions: "os:Linux" + dimensions: "pool:luci.webrtc.perf" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/goma": {' + ' "server_host": "goma.chromium.org",' + ' "use_luci_auth": true' + ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-trusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "client.webrtc.perf",' + ' "recipe": "webrtc/standalone"' + '}' + priority: 30 + execution_timeout_secs: 10800 + build_numbers: YES + service_account: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "perf_test_results" + test_results {} + } + } + } + builders { + name: "Perf Android32 (O Pixel2)" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:8" + dimensions: "os:Linux" + dimensions: "pool:luci.webrtc.perf" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/goma": {' + ' "server_host": "goma.chromium.org",' + ' "use_luci_auth": true' + ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-trusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "client.webrtc.perf",' + ' "recipe": "webrtc/standalone"' + '}' + priority: 30 + execution_timeout_secs: 10800 + build_numbers: YES + service_account: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "perf_test_results" + test_results {} + } + } + } + builders { + name: "Perf Android32 (R Pixel5)" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:8" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2842,6 +2740,7 @@ buckets { name: "Perf Android64 (M Nexus5X)" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:8" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2891,6 +2790,107 @@ buckets { name: "Perf Android64 (O Pixel2)" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:8" + dimensions: "os:Linux" + dimensions: "pool:luci.webrtc.perf" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/goma": {' + ' "server_host": "goma.chromium.org",' + ' "use_luci_auth": true' + ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-trusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "client.webrtc.perf",' + ' "recipe": "webrtc/standalone"' + '}' + priority: 30 + execution_timeout_secs: 10800 + build_numbers: YES + service_account: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "perf_test_results" + test_results {} + } + } + } + builders { + name: "Perf Android64 (R Pixel5)" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:8" + dimensions: "os:Linux" + dimensions: "pool:luci.webrtc.perf" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/goma": {' + ' "server_host": "goma.chromium.org",' + ' "use_luci_auth": true' + ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-trusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "client.webrtc.perf",' + ' "recipe": "webrtc/standalone"' + '}' + priority: 30 + execution_timeout_secs: 10800 + build_numbers: YES + service_account: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "perf_test_results" + test_results {} + } + } + } + builders { + name: "Perf Fuchsia" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:2" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2940,6 +2940,7 @@ buckets { name: "Perf Linux Bionic" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:2" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2989,6 +2990,7 @@ buckets { name: "Perf Mac 11" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:2" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -3038,6 +3040,7 @@ buckets { name: "Perf Mac M1 Arm64 12" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:2" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -3084,9 +3087,10 @@ buckets { } } builders { - name: "Perf Win7" + name: "Perf Win 10" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" + dimensions: "cores:2" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -3133,7 +3137,7 @@ buckets { } } builders { - name: "Win32 Builder (Clang)" + name: "Win64 Builder (Clang)" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" dimensions: "cpu:x86-64" @@ -3216,6 +3220,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3262,6 +3270,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3308,6 +3320,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3354,6 +3370,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3400,6 +3420,60 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "tryserver.webrtc",' + ' "recipe": "webrtc/standalone"' + '}' + priority: 30 + execution_timeout_secs: 7200 + build_numbers: YES + service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "try_test_results" + test_results {} + } + } + } + builders { + name: "android_arm_rel_reclient" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "cpu:x86-64" + dimensions: "os:Linux" + dimensions: "pool:luci.webrtc.try" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/goma": {' + ' "server_host": "goma.chromium.org",' + ' "use_luci_auth": true' + ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3446,6 +3520,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3492,6 +3570,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3538,6 +3620,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3584,6 +3670,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3630,6 +3720,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3676,6 +3770,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3722,6 +3820,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3768,6 +3870,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3814,6 +3920,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3861,6 +3971,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3907,6 +4021,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3958,6 +4076,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4009,6 +4131,65 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "tryserver.webrtc",' + ' "recipe": "webrtc/standalone",' + ' "xcode_build_version": "13c100"' + '}' + priority: 30 + execution_timeout_secs: 7200 + caches { + name: "xcode_ios_13c100" + path: "xcode_ios_13c100.app" + } + build_numbers: YES + service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "try_test_results" + test_results {} + } + } + } + builders { + name: "ios_compile_arm64_rel_reclient" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "cpu:x86-64" + dimensions: "os:Mac" + dimensions: "pool:luci.webrtc.try" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/goma": {' + ' "server_host": "goma.chromium.org",' + ' "use_luci_auth": true' + ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4060,6 +4241,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4111,6 +4296,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4162,6 +4351,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4214,6 +4407,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4261,6 +4458,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4308,6 +4509,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4355,6 +4560,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4402,6 +4611,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4449,6 +4662,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4496,6 +4713,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4543,6 +4764,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4590,6 +4815,61 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "tryserver.webrtc",' + ' "recipe": "webrtc/standalone"' + '}' + priority: 30 + execution_timeout_secs: 7200 + build_numbers: YES + service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "try_test_results" + test_results {} + } + } + } + builders { + name: "linux_coverage" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "cpu:x86-64" + dimensions: "inside_docker:0" + dimensions: "os:Linux" + dimensions: "pool:luci.webrtc.try" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/goma": {' + ' "server_host": "goma.chromium.org",' + ' "use_luci_auth": true' + ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4637,6 +4917,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4684,6 +4968,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4731,6 +5019,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4778,6 +5070,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4825,6 +5121,61 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "tryserver.webrtc",' + ' "recipe": "webrtc/standalone"' + '}' + priority: 30 + execution_timeout_secs: 7200 + build_numbers: YES + service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "try_test_results" + test_results {} + } + } + } + builders { + name: "linux_rel_reclient" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "cpu:x86-64" + dimensions: "inside_docker:0" + dimensions: "os:Linux" + dimensions: "pool:luci.webrtc.try" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/goma": {' + ' "server_host": "goma.chromium.org",' + ' "use_luci_auth": true' + ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4872,6 +5223,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4919,6 +5274,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4966,6 +5325,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5013,6 +5376,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5060,6 +5427,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5106,6 +5477,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5152,6 +5527,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5198,6 +5577,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5244,6 +5627,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5290,6 +5677,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5336,6 +5727,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5382,6 +5777,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5428,6 +5827,60 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "tryserver.webrtc",' + ' "recipe": "webrtc/standalone"' + '}' + priority: 30 + execution_timeout_secs: 7200 + build_numbers: YES + service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "try_test_results" + test_results {} + } + } + } + builders { + name: "mac_rel_reclient" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "cpu:x86-64" + dimensions: "os:Mac" + dimensions: "pool:luci.webrtc.try" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/goma": {' + ' "server_host": "goma.chromium.org",' + ' "use_luci_auth": true' + ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5475,6 +5928,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5524,6 +5981,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5572,6 +6033,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5620,6 +6085,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5667,6 +6136,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5714,6 +6187,61 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "tryserver.webrtc",' + ' "recipe": "webrtc/standalone"' + '}' + priority: 30 + execution_timeout_secs: 7200 + build_numbers: YES + service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "try_test_results" + test_results {} + } + } + } + builders { + name: "win_compile_x64_clang_rel_reclient" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "cpu:x86-64" + dimensions: "os:Windows" + dimensions: "pool:luci.webrtc.try" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/goma": {' + ' "enable_ats": false,' + ' "server_host": "goma.chromium.org",' + ' "use_luci_auth": true' + ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5761,6 +6289,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5808,6 +6340,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5855,52 +6391,9 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' - ' "$recipe_engine/resultdb/test_presentation": {' - ' "column_keys": [],' - ' "grouping_keys": [' - ' "status",' - ' "v.test_suite"' - ' ]' - ' },' - ' "builder_group": "tryserver.webrtc",' - ' "recipe": "webrtc/standalone"' - '}' - priority: 30 - execution_timeout_secs: 7200 - build_numbers: YES - service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" - experiments { - key: "luci.recipes.use_python3" - value: 100 - } - resultdb { - enable: true - bq_exports { - project: "webrtc-ci" - dataset: "resultdb" - table: "try_test_results" - test_results {} - } - } - } - builders { - name: "win_x64_clang_dbg_win10" - swarming_host: "chromium-swarm.appspot.com" - swarming_tags: "vpython:native-python-wrapper" - dimensions: "cpu:x86-64" - dimensions: "os:Windows" - dimensions: "pool:luci.webrtc.try" - exe { - cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" - cipd_version: "refs/heads/main" - cmd: "luciexe" - } - properties: - '{' - ' "$build/goma": {' - ' "enable_ats": false,' - ' "server_host": "goma.chromium.org",' - ' "use_luci_auth": true' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' @@ -5949,6 +6442,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5996,6 +6493,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -6043,6 +6544,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -6090,6 +6595,10 @@ buckets { ' "server_host": "goma.chromium.org",' ' "use_luci_auth": true' ' },' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' diff --git a/infra/config/luci-milo.cfg b/infra/config/luci-milo.cfg index 3ed012bcd3..d8cb1074a0 100644 --- a/infra/config/luci-milo.cfg +++ b/infra/config/luci-milo.cfg @@ -12,27 +12,22 @@ consoles { refs: "regexp:refs/heads/main" manifest_name: "REVISION" builders { - name: "buildbucket/luci.webrtc.ci/Android32 (M Nexus5X)(dbg)" + name: "buildbucket/luci.webrtc.ci/Android32 (dbg)" category: "Android|arm" short_name: "dbg" } builders { - name: "buildbucket/luci.webrtc.ci/Android32 (M Nexus5X)" + name: "buildbucket/luci.webrtc.ci/Android32" category: "Android|arm" short_name: "rel" } - builders { - name: "buildbucket/luci.webrtc.ci/Android32 (M Nexus5X)(reclient)" - category: "Android|arm" - short_name: "re" - } builders { name: "buildbucket/luci.webrtc.perf/Android32 Builder arm" category: "Android|arm" short_name: "size" } builders { - name: "buildbucket/luci.webrtc.ci/Android64 (M Nexus5X)" + name: "buildbucket/luci.webrtc.ci/Android64" category: "Android|arm64" short_name: "rel" } @@ -71,11 +66,6 @@ consoles { category: "iOS|arm64" short_name: "rel" } - builders { - name: "buildbucket/luci.webrtc.ci/iOS64 Release (reclient)" - category: "iOS|arm64" - short_name: "re" - } builders { name: "buildbucket/luci.webrtc.ci/iOS64 Sim Debug (iOS 14)" category: "iOS|x64" @@ -116,11 +106,6 @@ consoles { category: "Linux|x64" short_name: "rel" } - builders { - name: "buildbucket/luci.webrtc.ci/Linux64 Release (reclient)" - category: "Linux|x64" - short_name: "re" - } builders { name: "buildbucket/luci.webrtc.perf/Linux64 Builder" category: "Linux|x64" @@ -196,11 +181,6 @@ consoles { category: "Mac|x64" short_name: "rel" } - builders { - name: "buildbucket/luci.webrtc.ci/Mac64 Release (reclient)" - category: "Mac|x64" - short_name: "re" - } builders { name: "buildbucket/luci.webrtc.ci/Mac Asan" category: "Mac|x64" @@ -231,11 +211,6 @@ consoles { category: "Win Clang|x64" short_name: "rel" } - builders { - name: "buildbucket/luci.webrtc.ci/Win64 Release (Clang)(reclient)" - category: "Win Clang|x64" - short_name: "re" - } builders { name: "buildbucket/luci.webrtc.ci/Win64 ASan" category: "Win Clang|x64" @@ -329,6 +304,16 @@ consoles { category: "Android|arm|Tester" short_name: "M AOSP Nexus6" } + builders { + name: "buildbucket/luci.webrtc.perf/Perf Android32 (O Pixel2)" + category: "Android|arm|Tester" + short_name: "O Pixel2" + } + builders { + name: "buildbucket/luci.webrtc.perf/Perf Android32 (R Pixel5)" + category: "Android|arm|Tester" + short_name: "R Pixel5" + } builders { name: "buildbucket/luci.webrtc.perf/Android64 Builder arm64" category: "Android|arm64|Builder" @@ -343,6 +328,11 @@ consoles { category: "Android|arm64|Tester" short_name: "O Pixel2" } + builders { + name: "buildbucket/luci.webrtc.perf/Perf Android64 (R Pixel5)" + category: "Android|arm64|Tester" + short_name: "R Pixel5" + } builders { name: "buildbucket/luci.webrtc.perf/Linux64 Builder" category: "Linux|x64|Builder" @@ -352,14 +342,21 @@ consoles { category: "Linux|x64|Tester" short_name: "Bionic" } + builders { + name: "buildbucket/luci.webrtc.perf/Fuchsia Builder" + category: "Fuchsia|x64|Builder" + } + builders { + name: "buildbucket/luci.webrtc.perf/Perf Fuchsia" + category: "Fuchsia|x64|Tester" + } builders { name: "buildbucket/luci.webrtc.perf/Mac64 Builder" category: "Mac|x64|Builder" } builders { name: "buildbucket/luci.webrtc.perf/MacArm64 Builder" - category: "Mac|arm64" - short_name: "Builder" + category: "Mac|arm64|Builder" } builders { name: "buildbucket/luci.webrtc.perf/Perf Mac 11" @@ -372,13 +369,13 @@ consoles { short_name: "12" } builders { - name: "buildbucket/luci.webrtc.perf/Win32 Builder (Clang)" - category: "Win|x86|Builder" + name: "buildbucket/luci.webrtc.perf/Win64 Builder (Clang)" + category: "Win|x64|Builder" } builders { - name: "buildbucket/luci.webrtc.perf/Perf Win7" - category: "Win|x86|Tester" - short_name: "7" + name: "buildbucket/luci.webrtc.perf/Perf Win 10" + category: "Win|x64|Tester" + short_name: "10" } header { links { @@ -468,6 +465,9 @@ consoles { builders { name: "buildbucket/luci.webrtc.try/android_arm_rel" } + builders { + name: "buildbucket/luci.webrtc.try/android_arm_rel_reclient" + } builders { name: "buildbucket/luci.webrtc.try/android_compile_arm_rel" } @@ -507,6 +507,9 @@ consoles { builders { name: "buildbucket/luci.webrtc.try/ios_compile_arm64_rel" } + builders { + name: "buildbucket/luci.webrtc.try/ios_compile_arm64_rel_reclient" + } builders { name: "buildbucket/luci.webrtc.try/ios_sim_x64_dbg_ios14" } @@ -534,6 +537,9 @@ consoles { builders { name: "buildbucket/luci.webrtc.try/linux_rel" } + builders { + name: "buildbucket/luci.webrtc.try/linux_rel_reclient" + } builders { name: "buildbucket/luci.webrtc.try/linux_compile_rel" } @@ -576,6 +582,9 @@ consoles { builders { name: "buildbucket/luci.webrtc.try/linux_chromium_compile_dbg" } + builders { + name: "buildbucket/luci.webrtc.try/linux_coverage" + } builders { name: "buildbucket/luci.webrtc.try/fuchsia_rel" } @@ -588,6 +597,9 @@ consoles { builders { name: "buildbucket/luci.webrtc.try/mac_rel" } + builders { + name: "buildbucket/luci.webrtc.try/mac_rel_reclient" + } builders { name: "buildbucket/luci.webrtc.try/mac_compile_rel" } @@ -618,9 +630,6 @@ consoles { builders { name: "buildbucket/luci.webrtc.try/win_x64_clang_dbg" } - builders { - name: "buildbucket/luci.webrtc.try/win_x64_clang_dbg_win10" - } builders { name: "buildbucket/luci.webrtc.try/win_compile_x64_clang_dbg" } @@ -630,6 +639,9 @@ consoles { builders { name: "buildbucket/luci.webrtc.try/win_compile_x64_clang_rel" } + builders { + name: "buildbucket/luci.webrtc.try/win_compile_x64_clang_rel_reclient" + } builders { name: "buildbucket/luci.webrtc.try/win_asan" } diff --git a/infra/config/luci-notify.cfg b/infra/config/luci-notify.cfg index c429301fba..c9038914b2 100644 --- a/infra/config/luci-notify.cfg +++ b/infra/config/luci-notify.cfg @@ -22,13 +22,13 @@ notifiers { } builders { bucket: "ci" - name: "Android32 (M Nexus5X)" + name: "Android32" repository: "https://webrtc.googlesource.com/src" } tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -49,35 +49,13 @@ notifiers { } builders { bucket: "ci" - name: "Android32 (M Nexus5X)(dbg)" + name: "Android32 (dbg)" repository: "https://webrtc.googlesource.com/src" } tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" - } -} -notifiers { - notifications { - on_new_status: INFRA_FAILURE - email { - recipients: "webrtc-troopers-robots@google.com" - } - template: "infra_failure" - } - notifications { - on_new_status: FAILURE - email { - recipients: "webrtc-troopers-robots@google.com" - } - template: "build_failure" - notify_blamelist {} - } - builders { - bucket: "ci" - name: "Android32 (M Nexus5X)(reclient)" - repository: "https://webrtc.googlesource.com/src" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -104,7 +82,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -131,7 +109,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -158,7 +136,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -179,13 +157,13 @@ notifiers { } builders { bucket: "ci" - name: "Android64 (M Nexus5X)" + name: "Android64" repository: "https://webrtc.googlesource.com/src" } tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -212,7 +190,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -261,7 +239,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -288,7 +266,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -315,7 +293,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -342,7 +320,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -369,7 +347,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -396,7 +374,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -423,7 +401,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -450,7 +428,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -477,7 +455,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -504,7 +482,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -531,7 +509,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -558,7 +536,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -585,7 +563,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -612,7 +590,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -639,29 +617,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" - } -} -notifiers { - notifications { - on_new_status: INFRA_FAILURE - email { - recipients: "webrtc-troopers-robots@google.com" - } - template: "infra_failure" - } - notifications { - on_new_status: FAILURE - email { - recipients: "webrtc-troopers-robots@google.com" - } - template: "build_failure" - notify_blamelist {} - } - builders { - bucket: "ci" - name: "Linux64 Release (reclient)" - repository: "https://webrtc.googlesource.com/src" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -688,7 +644,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -715,7 +671,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -742,29 +698,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" - } -} -notifiers { - notifications { - on_new_status: INFRA_FAILURE - email { - recipients: "webrtc-troopers-robots@google.com" - } - template: "infra_failure" - } - notifications { - on_new_status: FAILURE - email { - recipients: "webrtc-troopers-robots@google.com" - } - template: "build_failure" - notify_blamelist {} - } - builders { - bucket: "ci" - name: "Mac64 Release (reclient)" - repository: "https://webrtc.googlesource.com/src" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -791,7 +725,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -818,7 +752,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -845,7 +779,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -872,7 +806,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -899,7 +833,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -926,7 +860,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -953,29 +887,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" - } -} -notifiers { - notifications { - on_new_status: INFRA_FAILURE - email { - recipients: "webrtc-troopers-robots@google.com" - } - template: "infra_failure" - } - notifications { - on_new_status: FAILURE - email { - recipients: "webrtc-troopers-robots@google.com" - } - template: "build_failure" - notify_blamelist {} - } - builders { - bucket: "ci" - name: "Win64 Release (Clang)(reclient)" - repository: "https://webrtc.googlesource.com/src" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -1002,7 +914,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -1029,7 +941,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -1056,29 +968,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" - } -} -notifiers { - notifications { - on_new_status: INFRA_FAILURE - email { - recipients: "webrtc-troopers-robots@google.com" - } - template: "infra_failure" - } - notifications { - on_new_status: FAILURE - email { - recipients: "webrtc-troopers-robots@google.com" - } - template: "build_failure" - notify_blamelist {} - } - builders { - bucket: "ci" - name: "iOS64 Release (reclient)" - repository: "https://webrtc.googlesource.com/src" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -1105,7 +995,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -1132,7 +1022,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -1159,7 +1049,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -1228,7 +1118,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -1255,7 +1145,34 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" + } +} +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + notifications { + on_new_status: FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "build_failure" + notify_blamelist {} + } + builders { + bucket: "perf" + name: "Fuchsia Builder" + repository: "https://webrtc.googlesource.com/src" + } + tree_closers { + tree_status_host: "webrtc-status.appspot.com" + failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -1282,7 +1199,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -1309,7 +1226,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -1336,7 +1253,7 @@ notifiers { tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -1383,6 +1300,50 @@ notifiers { repository: "https://webrtc.googlesource.com/src" } } +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + notifications { + on_new_status: FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "build_failure" + notify_blamelist {} + } + builders { + bucket: "perf" + name: "Perf Android32 (O Pixel2)" + repository: "https://webrtc.googlesource.com/src" + } +} +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + notifications { + on_new_status: FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "build_failure" + notify_blamelist {} + } + builders { + bucket: "perf" + name: "Perf Android32 (R Pixel5)" + repository: "https://webrtc.googlesource.com/src" + } +} notifiers { notifications { on_new_status: INFRA_FAILURE @@ -1427,6 +1388,50 @@ notifiers { repository: "https://webrtc.googlesource.com/src" } } +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + notifications { + on_new_status: FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "build_failure" + notify_blamelist {} + } + builders { + bucket: "perf" + name: "Perf Android64 (R Pixel5)" + repository: "https://webrtc.googlesource.com/src" + } +} +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + notifications { + on_new_status: FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "build_failure" + notify_blamelist {} + } + builders { + bucket: "perf" + name: "Perf Fuchsia" + repository: "https://webrtc.googlesource.com/src" + } +} notifiers { notifications { on_new_status: INFRA_FAILURE @@ -1511,7 +1516,7 @@ notifiers { } builders { bucket: "perf" - name: "Perf Win7" + name: "Perf Win 10" repository: "https://webrtc.googlesource.com/src" } } @@ -1533,13 +1538,13 @@ notifiers { } builders { bucket: "perf" - name: "Win32 Builder (Clang)" + name: "Win64 Builder (Clang)" repository: "https://webrtc.googlesource.com/src" } tree_closers { tree_status_host: "webrtc-status.appspot.com" failed_step_regexp: "bot_update|compile|gclient runhooks|runhooks|update|extract build|cleanup_temp|taskkill|compile|gn" - failed_step_regexp_exclude: ".*(\\(experimental\\)|\\(reclient\\)).*" + failed_step_regexp_exclude: ".*\\(experimental\\).*" } } notifiers { @@ -1607,6 +1612,19 @@ notifiers { name: "android_arm_rel" } } +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + builders { + bucket: "try" + name: "android_arm_rel_reclient" + } +} notifiers { notifications { on_new_status: INFRA_FAILURE @@ -1776,6 +1794,19 @@ notifiers { name: "ios_compile_arm64_rel" } } +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + builders { + bucket: "try" + name: "ios_compile_arm64_rel_reclient" + } +} notifiers { notifications { on_new_status: INFRA_FAILURE @@ -1932,6 +1963,19 @@ notifiers { name: "linux_compile_rel" } } +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + builders { + bucket: "try" + name: "linux_coverage" + } +} notifiers { notifications { on_new_status: INFRA_FAILURE @@ -1997,6 +2041,19 @@ notifiers { name: "linux_rel" } } +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + builders { + bucket: "try" + name: "linux_rel_reclient" + } +} notifiers { notifications { on_new_status: INFRA_FAILURE @@ -2166,6 +2223,19 @@ notifiers { name: "mac_rel_m1" } } +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + builders { + bucket: "try" + name: "mac_rel_reclient" + } +} notifiers { notifications { on_new_status: INFRA_FAILURE @@ -2244,6 +2314,19 @@ notifiers { name: "win_compile_x64_clang_rel" } } +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + builders { + bucket: "try" + name: "win_compile_x64_clang_rel_reclient" + } +} notifiers { notifications { on_new_status: INFRA_FAILURE @@ -2283,19 +2366,6 @@ notifiers { name: "win_x64_clang_dbg" } } -notifiers { - notifications { - on_new_status: INFRA_FAILURE - email { - recipients: "webrtc-troopers-robots@google.com" - } - template: "infra_failure" - } - builders { - bucket: "try" - name: "win_x64_clang_dbg_win10" - } -} notifiers { notifications { on_new_status: INFRA_FAILURE diff --git a/infra/config/luci-scheduler.cfg b/infra/config/luci-scheduler.cfg index 1234aca9fa..b1158747b3 100644 --- a/infra/config/luci-scheduler.cfg +++ b/infra/config/luci-scheduler.cfg @@ -5,33 +5,23 @@ # https://luci-config.appspot.com/schemas/projects:luci-scheduler.cfg job { - id: "Android32 (M Nexus5X)" + id: "Android32" realm: "ci" acl_sets: "ci" buildbucket { server: "cr-buildbucket.appspot.com" bucket: "ci" - builder: "Android32 (M Nexus5X)" + builder: "Android32" } } job { - id: "Android32 (M Nexus5X)(dbg)" + id: "Android32 (dbg)" realm: "ci" acl_sets: "ci" buildbucket { server: "cr-buildbucket.appspot.com" bucket: "ci" - builder: "Android32 (M Nexus5X)(dbg)" - } -} -job { - id: "Android32 (M Nexus5X)(reclient)" - realm: "ci" - acl_sets: "ci" - buildbucket { - server: "cr-buildbucket.appspot.com" - bucket: "ci" - builder: "Android32 (M Nexus5X)(reclient)" + builder: "Android32 (dbg)" } } job { @@ -80,13 +70,13 @@ job { } } job { - id: "Android64 (M Nexus5X)" + id: "Android64" realm: "ci" acl_sets: "ci" buildbucket { server: "cr-buildbucket.appspot.com" bucket: "ci" - builder: "Android64 (M Nexus5X)" + builder: "Android64" } } job { @@ -125,6 +115,21 @@ job { builder: "Auto-roll - WebRTC DEPS" } } +job { + id: "Fuchsia Builder" + realm: "perf" + acl_sets: "perf" + triggering_policy { + kind: GREEDY_BATCHING + max_concurrent_invocations: 3 + max_batch_size: 1 + } + buildbucket { + server: "cr-buildbucket.appspot.com" + bucket: "perf" + builder: "Fuchsia Builder" + } +} job { id: "Fuchsia Release" realm: "ci" @@ -300,16 +305,6 @@ job { builder: "Linux64 Release (Libfuzzer)" } } -job { - id: "Linux64 Release (reclient)" - realm: "ci" - acl_sets: "ci" - buildbucket { - server: "cr-buildbucket.appspot.com" - bucket: "ci" - builder: "Linux64 Release (reclient)" - } -} job { id: "Mac Asan" realm: "ci" @@ -350,16 +345,6 @@ job { builder: "Mac64 Release" } } -job { - id: "Mac64 Release (reclient)" - realm: "ci" - acl_sets: "ci" - buildbucket { - server: "cr-buildbucket.appspot.com" - bucket: "ci" - builder: "Mac64 Release (reclient)" - } -} job { id: "MacARM64 M1 Release" realm: "ci" @@ -416,6 +401,42 @@ job { builder: "Perf Android32 (M Nexus5)" } } +job { + id: "Perf Android32 (O Pixel2)" + realm: "perf" + acls { + role: TRIGGERER + granted_to: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" + } + acl_sets: "perf" + triggering_policy { + kind: LOGARITHMIC_BATCHING + log_base: 1.7 + } + buildbucket { + server: "cr-buildbucket.appspot.com" + bucket: "perf" + builder: "Perf Android32 (O Pixel2)" + } +} +job { + id: "Perf Android32 (R Pixel5)" + realm: "perf" + acls { + role: TRIGGERER + granted_to: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" + } + acl_sets: "perf" + triggering_policy { + kind: LOGARITHMIC_BATCHING + log_base: 1.7 + } + buildbucket { + server: "cr-buildbucket.appspot.com" + bucket: "perf" + builder: "Perf Android32 (R Pixel5)" + } +} job { id: "Perf Android64 (M Nexus5X)" realm: "perf" @@ -452,6 +473,42 @@ job { builder: "Perf Android64 (O Pixel2)" } } +job { + id: "Perf Android64 (R Pixel5)" + realm: "perf" + acls { + role: TRIGGERER + granted_to: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" + } + acl_sets: "perf" + triggering_policy { + kind: LOGARITHMIC_BATCHING + log_base: 1.7 + } + buildbucket { + server: "cr-buildbucket.appspot.com" + bucket: "perf" + builder: "Perf Android64 (R Pixel5)" + } +} +job { + id: "Perf Fuchsia" + realm: "perf" + acls { + role: TRIGGERER + granted_to: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" + } + acl_sets: "perf" + triggering_policy { + kind: LOGARITHMIC_BATCHING + log_base: 1.7 + } + buildbucket { + server: "cr-buildbucket.appspot.com" + bucket: "perf" + builder: "Perf Fuchsia" + } +} job { id: "Perf Linux Bionic" realm: "perf" @@ -507,7 +564,7 @@ job { } } job { - id: "Perf Win7" + id: "Perf Win 10" realm: "perf" acls { role: TRIGGERER @@ -521,7 +578,7 @@ job { buildbucket { server: "cr-buildbucket.appspot.com" bucket: "perf" - builder: "Perf Win7" + builder: "Perf Win 10" } } job { @@ -556,16 +613,6 @@ job { builder: "Win (more configs)" } } -job { - id: "Win32 Builder (Clang)" - realm: "perf" - acl_sets: "perf" - buildbucket { - server: "cr-buildbucket.appspot.com" - bucket: "perf" - builder: "Win32 Builder (Clang)" - } -} job { id: "Win32 Debug (Clang)" realm: "ci" @@ -596,6 +643,16 @@ job { builder: "Win64 ASan" } } +job { + id: "Win64 Builder (Clang)" + realm: "perf" + acl_sets: "perf" + buildbucket { + server: "cr-buildbucket.appspot.com" + bucket: "perf" + builder: "Win64 Builder (Clang)" + } +} job { id: "Win64 Debug (Clang)" realm: "ci" @@ -616,16 +673,6 @@ job { builder: "Win64 Release (Clang)" } } -job { - id: "Win64 Release (Clang)(reclient)" - realm: "ci" - acl_sets: "ci" - buildbucket { - server: "cr-buildbucket.appspot.com" - bucket: "ci" - builder: "Win64 Release (Clang)(reclient)" - } -} job { id: "iOS API Framework Builder" realm: "ci" @@ -661,16 +708,6 @@ job { builder: "iOS64 Release" } } -job { - id: "iOS64 Release (reclient)" - realm: "ci" - acl_sets: "ci" - buildbucket { - server: "cr-buildbucket.appspot.com" - bucket: "ci" - builder: "iOS64 Release (reclient)" - } -} job { id: "iOS64 Sim Debug (iOS 12)" realm: "ci" @@ -705,13 +742,12 @@ trigger { id: "webrtc-gitiles-trigger-main" realm: "ci" acl_sets: "ci" - triggers: "Android32 (M Nexus5X)" - triggers: "Android32 (M Nexus5X)(dbg)" - triggers: "Android32 (M Nexus5X)(reclient)" + triggers: "Android32" + triggers: "Android32 (dbg)" triggers: "Android32 (more configs)" triggers: "Android32 Builder x86" triggers: "Android32 Builder x86 (dbg)" - triggers: "Android64 (M Nexus5X)" + triggers: "Android64" triggers: "Android64 Builder x64 (dbg)" triggers: "Fuchsia Release" triggers: "Linux (more configs)" @@ -729,11 +765,9 @@ trigger { triggers: "Linux64 Release" triggers: "Linux64 Release (ARM)" triggers: "Linux64 Release (Libfuzzer)" - triggers: "Linux64 Release (reclient)" triggers: "Mac Asan" triggers: "Mac64 Debug" triggers: "Mac64 Release" - triggers: "Mac64 Release (reclient)" triggers: "MacARM64 M1 Release" triggers: "Win (more configs)" triggers: "Win32 Debug (Clang)" @@ -741,20 +775,19 @@ trigger { triggers: "Win64 ASan" triggers: "Win64 Debug (Clang)" triggers: "Win64 Release (Clang)" - triggers: "Win64 Release (Clang)(reclient)" triggers: "iOS API Framework Builder" triggers: "iOS64 Debug" triggers: "iOS64 Release" - triggers: "iOS64 Release (reclient)" triggers: "iOS64 Sim Debug (iOS 12)" triggers: "iOS64 Sim Debug (iOS 13)" triggers: "iOS64 Sim Debug (iOS 14)" triggers: "Android32 Builder arm" triggers: "Android64 Builder arm64" + triggers: "Fuchsia Builder" triggers: "Linux64 Builder" triggers: "Mac64 Builder" triggers: "MacArm64 Builder" - triggers: "Win32 Builder (Clang)" + triggers: "Win64 Builder (Clang)" gitiles { repo: "https://webrtc.googlesource.com/src" refs: "regexp:refs/heads/main" diff --git a/infra/config/project.cfg b/infra/config/project.cfg index 64b05a2adc..d35a697fd0 100644 --- a/infra/config/project.cfg +++ b/infra/config/project.cfg @@ -7,7 +7,7 @@ name: "webrtc" access: "group:all" lucicfg { - version: "1.32.1" + version: "1.35.2" package_dir: "." config_dir: "." entry_point: "config.star" diff --git a/infra/config/realms.cfg b/infra/config/realms.cfg index 5e05e28ee9..c39fbdff61 100644 --- a/infra/config/realms.cfg +++ b/infra/config/realms.cfg @@ -113,12 +113,16 @@ realms { attribute: "scheduler.job.name" values: "Perf Android32 (M AOSP Nexus6)" values: "Perf Android32 (M Nexus5)" + values: "Perf Android32 (O Pixel2)" + values: "Perf Android32 (R Pixel5)" values: "Perf Android64 (M Nexus5X)" values: "Perf Android64 (O Pixel2)" + values: "Perf Android64 (R Pixel5)" + values: "Perf Fuchsia" values: "Perf Linux Bionic" values: "Perf Mac 11" values: "Perf Mac M1 Arm64 12" - values: "Perf Win7" + values: "Perf Win 10" } } } diff --git a/infra/specs/PRESUBMIT.py b/infra/specs/PRESUBMIT.py index 306a3f1952..f064cacaf8 100644 --- a/infra/specs/PRESUBMIT.py +++ b/infra/specs/PRESUBMIT.py @@ -9,6 +9,7 @@ # be found in the AUTHORS file in the root of the source tree. import os +import shlex # Runs PRESUBMIT.py in py3 mode by git cl presubmit. USE_PYTHON3 = True @@ -28,7 +29,8 @@ def CheckPatchFormatted(input_api, output_api): for f in affected_files: cmd = ['yapf', '-i', f.AbsoluteLocalPath()] if input_api.subprocess.call(cmd): - results.append(output_api.PresubmitError('Error calling "' + cmd + '"')) + results.append( + output_api.PresubmitError('Error calling "' + shlex.join(cmd) + '"')) if _HasLocalChanges(input_api): msg = ('Diff found after running "yapf -i" on modified .pyl files.\n' diff --git a/infra/specs/client.webrtc.json b/infra/specs/client.webrtc.json index 4f017933bf..d5608072b2 100644 --- a/infra/specs/client.webrtc.json +++ b/infra/specs/client.webrtc.json @@ -1,7 +1,7 @@ { "AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {}, "AAAAA2 See generate_buildbot_json.py to make changes": {}, - "Android32 (M Nexus5X)": { + "Android32": { "gtest_tests": [ { "merge": { @@ -14,21 +14,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "AppRTCMobile_test_apk", "test_id_prefix": "ninja://examples:AppRTCMobile_test_apk/" @@ -44,21 +37,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "android_instrumentation_test_apk", "test_id_prefix": "ninja://sdk/android:android_instrumentation_test_apk/" @@ -74,21 +60,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" @@ -104,21 +83,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" @@ -134,21 +106,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" @@ -164,21 +129,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" @@ -194,21 +152,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, "test": "modules_tests", @@ -225,21 +176,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "modules_unittests", @@ -256,21 +200,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "peerconnection_unittests", @@ -287,21 +224,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" @@ -317,21 +247,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" @@ -347,21 +270,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" @@ -377,21 +293,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "rtc_unittests", @@ -408,21 +317,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" @@ -438,21 +340,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "svc_tests", @@ -469,21 +364,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" @@ -499,21 +387,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" @@ -529,21 +410,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" @@ -559,21 +433,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "video_engine_tests", @@ -590,21 +457,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" @@ -620,21 +480,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" @@ -663,7 +516,7 @@ } ] }, - "Android32 (M Nexus5X)(dbg)": { + "Android32 (dbg)": { "gtest_tests": [ { "merge": { @@ -676,21 +529,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "AppRTCMobile_test_apk", "test_id_prefix": "ninja://examples:AppRTCMobile_test_apk/" @@ -706,21 +552,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "android_instrumentation_test_apk", "test_id_prefix": "ninja://sdk/android:android_instrumentation_test_apk/" @@ -736,21 +575,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" @@ -766,21 +598,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" @@ -796,21 +621,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" @@ -826,21 +644,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" @@ -856,21 +667,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, "test": "modules_tests", @@ -887,21 +691,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "modules_unittests", @@ -918,21 +715,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "peerconnection_unittests", @@ -949,21 +739,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" @@ -979,21 +762,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" @@ -1009,21 +785,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" @@ -1039,21 +808,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "rtc_unittests", @@ -1070,21 +832,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" @@ -1100,21 +855,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "svc_tests", @@ -1131,21 +879,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" @@ -1161,21 +902,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" @@ -1191,21 +925,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" @@ -1221,21 +948,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "video_engine_tests", @@ -1252,21 +972,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" @@ -1282,21 +995,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" @@ -1338,21 +1044,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "peerconnection_unittests", @@ -1363,7 +1062,7 @@ "Android32 Builder arm": {}, "Android32 Builder x86": {}, "Android32 Builder x86 (dbg)": {}, - "Android64 (M Nexus5X)": { + "Android64": { "gtest_tests": [ { "merge": { @@ -1376,21 +1075,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "AppRTCMobile_test_apk", "test_id_prefix": "ninja://examples:AppRTCMobile_test_apk/" @@ -1406,21 +1098,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "android_instrumentation_test_apk", "test_id_prefix": "ninja://sdk/android:android_instrumentation_test_apk/" @@ -1436,21 +1121,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" @@ -1466,21 +1144,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" @@ -1496,21 +1167,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" @@ -1526,21 +1190,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" @@ -1556,21 +1213,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, "test": "modules_tests", @@ -1587,21 +1237,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "modules_unittests", @@ -1618,21 +1261,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "peerconnection_unittests", @@ -1649,21 +1285,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" @@ -1679,21 +1308,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" @@ -1709,21 +1331,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" @@ -1739,21 +1354,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "rtc_unittests", @@ -1770,21 +1378,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" @@ -1800,21 +1401,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "svc_tests", @@ -1831,21 +1425,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" @@ -1861,21 +1448,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" @@ -1891,21 +1471,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" @@ -1921,21 +1494,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "video_engine_tests", @@ -1952,21 +1518,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" @@ -1982,21 +1541,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" @@ -2025,7 +1577,7 @@ } ] }, - "Android64 (M Nexus5X)(dbg)": { + "Android64 (dbg)": { "gtest_tests": [ { "merge": { @@ -2038,21 +1590,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "AppRTCMobile_test_apk", "test_id_prefix": "ninja://examples:AppRTCMobile_test_apk/" @@ -2068,21 +1613,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "android_instrumentation_test_apk", "test_id_prefix": "ninja://sdk/android:android_instrumentation_test_apk/" @@ -2098,21 +1636,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" @@ -2128,21 +1659,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" @@ -2158,21 +1682,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" @@ -2188,21 +1705,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" @@ -2218,21 +1728,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, "test": "modules_tests", @@ -2249,21 +1752,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "modules_unittests", @@ -2280,21 +1776,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "peerconnection_unittests", @@ -2311,21 +1800,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" @@ -2341,21 +1823,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" @@ -2371,21 +1846,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" @@ -2401,21 +1869,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "rtc_unittests", @@ -2432,21 +1893,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" @@ -2462,21 +1916,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "svc_tests", @@ -2493,21 +1940,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" @@ -2523,21 +1963,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" @@ -2553,21 +1986,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" @@ -2583,21 +2009,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "video_engine_tests", @@ -2614,21 +2033,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" @@ -2644,21 +2056,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" @@ -2689,6 +2094,7 @@ }, "Android64 Builder arm64": {}, "Android64 Builder x64 (dbg)": {}, + "Fuchsia Builder": {}, "Fuchsia Release": { "isolated_scripts": [ { @@ -2699,7 +2105,8 @@ }, "name": "audio_decoder_unittests", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -2720,7 +2127,8 @@ }, "name": "common_audio_unittests", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -2741,7 +2149,8 @@ }, "name": "common_video_unittests", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -2762,7 +2171,8 @@ }, "name": "dcsctp_unittests", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -2786,7 +2196,8 @@ }, "name": "low_bandwidth_audio_test", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -2799,72 +2210,6 @@ }, "test_id_prefix": "ninja://audio:low_bandwidth_audio_test/" }, - { - "isolate_name": "modules_tests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "modules_tests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ], - "shards": 2 - }, - "test_id_prefix": "ninja://modules:modules_tests/" - }, - { - "isolate_name": "modules_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "modules_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ], - "shards": 6 - }, - "test_id_prefix": "ninja://modules:modules_unittests/" - }, - { - "isolate_name": "peerconnection_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peerconnection_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ], - "shards": 4 - }, - "test_id_prefix": "ninja://pc:peerconnection_unittests/" - }, { "isolate_name": "rtc_media_unittests", "merge": { @@ -2873,7 +2218,8 @@ }, "name": "rtc_media_unittests", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -2894,7 +2240,8 @@ }, "name": "rtc_pc_unittests", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -2907,70 +2254,6 @@ }, "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, - { - "isolate_name": "rtc_stats_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "rtc_stats_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://stats:rtc_stats_unittests/" - }, - { - "isolate_name": "rtc_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "rtc_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ], - "shards": 6 - }, - "test_id_prefix": "ninja://:rtc_unittests/" - }, - { - "isolate_name": "slow_peer_connection_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "slow_peer_connection_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" - }, { "isolate_name": "svc_tests", "merge": { @@ -2979,7 +2262,8 @@ }, "name": "svc_tests", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -3001,7 +2285,8 @@ }, "name": "system_wrappers_unittests", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -3014,48 +2299,6 @@ }, "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, - { - "isolate_name": "test_support_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "test_support_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://test:test_support_unittests/" - }, - { - "isolate_name": "tools_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "tools_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://rtc_tools:tools_unittests/" - }, { "isolate_name": "video_engine_tests", "merge": { @@ -3064,7 +2307,8 @@ }, "name": "video_engine_tests", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -3086,7 +2330,8 @@ }, "name": "voip_unittests", "resultdb": { - "result_format": "json" + "enable": true, + "has_native_resultdb_integration": true }, "swarming": { "can_use_on_swarming_builders": true, @@ -3098,27 +2343,6 @@ ] }, "test_id_prefix": "ninja://:voip_unittests/" - }, - { - "isolate_name": "webrtc_nonparallel_tests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "webrtc_nonparallel_tests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] }, @@ -3409,6 +2633,27 @@ }, "test_id_prefix": "ninja://:rtc_unittests/" }, + { + "isolate_name": "shared_screencast_stream_test", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "shared_screencast_stream_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" + }, { "isolate_name": "slow_peer_connection_unittests", "merge": { @@ -4708,6 +3953,27 @@ }, "test_id_prefix": "ninja://:rtc_unittests/" }, + { + "isolate_name": "shared_screencast_stream_test", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "shared_screencast_stream_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" + }, { "isolate_name": "slow_peer_connection_unittests", "merge": { @@ -5141,6 +4407,27 @@ }, "test_id_prefix": "ninja://:rtc_unittests/" }, + { + "isolate_name": "shared_screencast_stream_test", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "shared_screencast_stream_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" + }, { "isolate_name": "slow_peer_connection_unittests", "merge": { @@ -6443,6 +5730,27 @@ }, "test_id_prefix": "ninja://:rtc_unittests/" }, + { + "isolate_name": "shared_screencast_stream_test", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "shared_screencast_stream_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" + }, { "isolate_name": "slow_peer_connection_unittests", "merge": { @@ -6877,6 +6185,27 @@ }, "test_id_prefix": "ninja://:rtc_unittests/" }, + { + "isolate_name": "shared_screencast_stream_test", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "shared_screencast_stream_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" + }, { "isolate_name": "slow_peer_connection_unittests", "merge": { @@ -8914,7 +8243,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -8935,7 +8264,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -8956,7 +8285,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -8977,7 +8306,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -9001,7 +8330,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -9022,7 +8351,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ], "shards": 2 @@ -9044,7 +8373,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ], "shards": 6 @@ -9066,7 +8395,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ], "shards": 4 @@ -9088,7 +8417,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -9109,7 +8438,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -9130,7 +8459,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -9151,7 +8480,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ], "shards": 6 @@ -9173,7 +8502,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -9194,7 +8523,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ], "shards": 4 @@ -9216,7 +8545,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -9237,7 +8566,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -9258,34 +8587,12 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, - { - "isolate_name": "video_capture_tests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "video_capture_tests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10", - "pool": "WebRTC-baremetal" - } - ] - }, - "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/" - }, { "isolate_name": "video_engine_tests", "merge": { @@ -9301,7 +8608,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ], "shards": 4 @@ -9323,7 +8630,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -9344,7 +8651,7 @@ "dimension_sets": [ { "cpu": "x86-64", - "os": "Windows-10" + "os": "Windows-10-19042" } ] }, @@ -9785,8 +9092,894 @@ } ] }, - "Win64 Debug (Clang)": {}, - "Win64 Release (Clang)": {}, + "Win64 Debug (Clang)": { + "isolated_scripts": [ + { + "isolate_name": "audio_decoder_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "audio_decoder_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" + }, + { + "isolate_name": "common_audio_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "common_audio_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://common_audio:common_audio_unittests/" + }, + { + "isolate_name": "common_video_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "common_video_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://common_video:common_video_unittests/" + }, + { + "isolate_name": "dcsctp_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "dcsctp_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" + }, + { + "args": [ + "--quick" + ], + "isolate_name": "low_bandwidth_audio_test", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "low_bandwidth_audio_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://audio:low_bandwidth_audio_test/" + }, + { + "isolate_name": "modules_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "modules_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ], + "shards": 2 + }, + "test_id_prefix": "ninja://modules:modules_tests/" + }, + { + "isolate_name": "modules_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "modules_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ], + "shards": 6 + }, + "test_id_prefix": "ninja://modules:modules_unittests/" + }, + { + "isolate_name": "peerconnection_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "peerconnection_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://pc:peerconnection_unittests/" + }, + { + "isolate_name": "rtc_media_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_media_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://media:rtc_media_unittests/" + }, + { + "isolate_name": "rtc_pc_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_pc_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + }, + { + "isolate_name": "rtc_stats_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_stats_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://stats:rtc_stats_unittests/" + }, + { + "isolate_name": "rtc_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ], + "shards": 6 + }, + "test_id_prefix": "ninja://:rtc_unittests/" + }, + { + "isolate_name": "slow_peer_connection_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "slow_peer_connection_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" + }, + { + "isolate_name": "svc_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "svc_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://pc:svc_tests/" + }, + { + "isolate_name": "system_wrappers_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "system_wrappers_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" + }, + { + "isolate_name": "test_support_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "test_support_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://test:test_support_unittests/" + }, + { + "isolate_name": "tools_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "tools_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://rtc_tools:tools_unittests/" + }, + { + "isolate_name": "video_engine_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "video_engine_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://:video_engine_tests/" + }, + { + "isolate_name": "voip_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "voip_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://:voip_unittests/" + }, + { + "isolate_name": "webrtc_nonparallel_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "webrtc_nonparallel_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10-19042" + } + ] + }, + "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" + } + ] + }, + "Win64 Release (Clang)": { + "isolated_scripts": [ + { + "isolate_name": "audio_decoder_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "audio_decoder_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" + }, + { + "isolate_name": "common_audio_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "common_audio_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://common_audio:common_audio_unittests/" + }, + { + "isolate_name": "common_video_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "common_video_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://common_video:common_video_unittests/" + }, + { + "isolate_name": "dcsctp_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "dcsctp_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" + }, + { + "args": [ + "--quick" + ], + "isolate_name": "low_bandwidth_audio_test", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "low_bandwidth_audio_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://audio:low_bandwidth_audio_test/" + }, + { + "isolate_name": "modules_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "modules_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ], + "shards": 2 + }, + "test_id_prefix": "ninja://modules:modules_tests/" + }, + { + "isolate_name": "modules_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "modules_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ], + "shards": 6 + }, + "test_id_prefix": "ninja://modules:modules_unittests/" + }, + { + "isolate_name": "peerconnection_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "peerconnection_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://pc:peerconnection_unittests/" + }, + { + "isolate_name": "rtc_media_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_media_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://media:rtc_media_unittests/" + }, + { + "isolate_name": "rtc_pc_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_pc_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + }, + { + "isolate_name": "rtc_stats_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_stats_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://stats:rtc_stats_unittests/" + }, + { + "isolate_name": "rtc_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ], + "shards": 6 + }, + "test_id_prefix": "ninja://:rtc_unittests/" + }, + { + "isolate_name": "slow_peer_connection_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "slow_peer_connection_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" + }, + { + "isolate_name": "svc_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "svc_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://pc:svc_tests/" + }, + { + "isolate_name": "system_wrappers_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "system_wrappers_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" + }, + { + "isolate_name": "test_support_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "test_support_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://test:test_support_unittests/" + }, + { + "isolate_name": "tools_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "tools_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://rtc_tools:tools_unittests/" + }, + { + "isolate_name": "video_capture_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "video_capture_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10", + "pool": "WebRTC-baremetal" + } + ] + }, + "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/" + }, + { + "isolate_name": "video_engine_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "video_engine_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://:video_engine_tests/" + }, + { + "isolate_name": "voip_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "voip_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://:voip_unittests/" + }, + { + "isolate_name": "webrtc_nonparallel_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "webrtc_nonparallel_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Windows-10" + } + ] + }, + "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" + } + ] + }, "iOS64 Debug": {}, "iOS64 Release": {}, "iOS64 Sim Debug (iOS 12)": { @@ -9801,6 +9994,7 @@ "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "apprtcmobile_tests", @@ -9847,7 +10041,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "audio_decoder_unittests", "merge": { @@ -9893,7 +10088,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_audio_unittests", "merge": { @@ -9939,7 +10135,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_video_unittests", "merge": { @@ -9985,7 +10182,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "dcsctp_unittests", "merge": { @@ -10031,7 +10229,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_tests", "merge": { @@ -10078,7 +10277,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_unittests", "merge": { @@ -10101,6 +10301,7 @@ ], "dimension_sets": [ { + "cores": "12", "cpu": "x86-64", "os": "Mac-12" } @@ -10125,7 +10326,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_media_unittests", "merge": { @@ -10171,7 +10373,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_pc_unittests", "merge": { @@ -10217,7 +10420,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_stats_unittests", "merge": { @@ -10263,7 +10467,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_unittests", "merge": { @@ -10311,6 +10516,7 @@ "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_framework_unittests", @@ -10358,6 +10564,7 @@ "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_unittests", @@ -10404,7 +10611,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "svc_tests", "merge": { @@ -10427,6 +10635,7 @@ ], "dimension_sets": [ { + "cores": "12", "cpu": "x86-64", "os": "Mac-12" } @@ -10451,7 +10660,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "system_wrappers_unittests", "merge": { @@ -10497,7 +10707,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "test_support_unittests", "merge": { @@ -10543,7 +10754,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "tools_unittests", "merge": { @@ -10589,7 +10801,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_capture_tests", "merge": { @@ -10635,7 +10848,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_engine_tests", "merge": { @@ -10682,7 +10896,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "voip_unittests", "merge": { @@ -10728,7 +10943,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "webrtc_nonparallel_tests", "merge": { @@ -10779,6 +10995,7 @@ "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "apprtcmobile_tests", @@ -10825,7 +11042,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "audio_decoder_unittests", "merge": { @@ -10871,7 +11089,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_audio_unittests", "merge": { @@ -10917,7 +11136,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_video_unittests", "merge": { @@ -10963,7 +11183,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "dcsctp_unittests", "merge": { @@ -11009,7 +11230,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_tests", "merge": { @@ -11056,7 +11278,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_unittests", "merge": { @@ -11079,6 +11302,7 @@ ], "dimension_sets": [ { + "cores": "12", "cpu": "x86-64", "os": "Mac-12" } @@ -11103,7 +11327,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_media_unittests", "merge": { @@ -11149,7 +11374,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_pc_unittests", "merge": { @@ -11195,7 +11421,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_stats_unittests", "merge": { @@ -11241,7 +11468,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_unittests", "merge": { @@ -11289,6 +11517,7 @@ "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_framework_unittests", @@ -11336,6 +11565,7 @@ "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_unittests", @@ -11382,7 +11612,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "svc_tests", "merge": { @@ -11405,6 +11636,7 @@ ], "dimension_sets": [ { + "cores": "12", "cpu": "x86-64", "os": "Mac-12" } @@ -11429,7 +11661,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "system_wrappers_unittests", "merge": { @@ -11475,7 +11708,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "test_support_unittests", "merge": { @@ -11521,7 +11755,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "tools_unittests", "merge": { @@ -11567,7 +11802,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_capture_tests", "merge": { @@ -11613,7 +11849,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_engine_tests", "merge": { @@ -11660,7 +11897,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "voip_unittests", "merge": { @@ -11706,7 +11944,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "webrtc_nonparallel_tests", "merge": { @@ -11757,6 +11996,7 @@ "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "apprtcmobile_tests", @@ -11803,7 +12043,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "audio_decoder_unittests", "merge": { @@ -11849,7 +12090,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_audio_unittests", "merge": { @@ -11895,7 +12137,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_video_unittests", "merge": { @@ -11941,7 +12184,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "dcsctp_unittests", "merge": { @@ -11987,7 +12231,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_tests", "merge": { @@ -12034,7 +12279,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_unittests", "merge": { @@ -12057,6 +12303,7 @@ ], "dimension_sets": [ { + "cores": "12", "cpu": "x86-64", "os": "Mac-12" } @@ -12081,7 +12328,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_media_unittests", "merge": { @@ -12127,7 +12375,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_pc_unittests", "merge": { @@ -12173,7 +12422,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_stats_unittests", "merge": { @@ -12219,7 +12469,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_unittests", "merge": { @@ -12267,6 +12518,7 @@ "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_framework_unittests", @@ -12314,6 +12566,7 @@ "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_unittests", @@ -12360,7 +12613,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "svc_tests", "merge": { @@ -12383,6 +12637,7 @@ ], "dimension_sets": [ { + "cores": "12", "cpu": "x86-64", "os": "Mac-12" } @@ -12407,7 +12662,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "system_wrappers_unittests", "merge": { @@ -12453,7 +12709,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "test_support_unittests", "merge": { @@ -12499,7 +12756,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "tools_unittests", "merge": { @@ -12545,7 +12803,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_capture_tests", "merge": { @@ -12591,7 +12850,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_engine_tests", "merge": { @@ -12638,7 +12898,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "voip_unittests", "merge": { @@ -12684,7 +12945,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "webrtc_nonparallel_tests", "merge": { diff --git a/infra/specs/client.webrtc.perf.json b/infra/specs/client.webrtc.perf.json index 7e4127088e..3357bf2417 100644 --- a/infra/specs/client.webrtc.perf.json +++ b/infra/specs/client.webrtc.perf.json @@ -30,13 +30,15 @@ "android_devices": "1", "device_os": "M", "device_type": "shamu", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "low_bandwidth_audio_perf_test", "test_id_prefix": "ninja://audio:low_bandwidth_audio_perf_test/" @@ -64,13 +66,15 @@ "android_devices": "1", "device_os": "M", "device_type": "shamu", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" @@ -106,13 +110,15 @@ "android_devices": "1", "device_os": "M", "device_type": "hammerhead", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "low_bandwidth_audio_perf_test", "test_id_prefix": "ninja://audio:low_bandwidth_audio_perf_test/" @@ -140,13 +146,171 @@ "android_devices": "1", "device_os": "M", "device_type": "hammerhead", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, + "test": "webrtc_perf_tests", + "test_id_prefix": "ninja://:webrtc_perf_tests/" + } + ] + }, + "Perf Android32 (O Pixel2)": { + "gtest_tests": [ + { + "args": [ + ".", + "--remove", + "--android", + "--adb-path", + "../../third_party/android_sdk/public/platform-tools/adb", + "--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb" + ], + "merge": { + "args": [ + "--test-suite", + "low_bandwidth_audio_perf_test" + ], + "script": "//tools_webrtc/perf/process_perf_results.py" + }, + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "android_devices": "1", + "device_type": "walleye", + "gce": "0", + "os": "Android", + "pool": "WebRTC-perf" + } + ], + "hard_timeout": 10800, + "idempotent": false, + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, + "test": "low_bandwidth_audio_perf_test", + "test_id_prefix": "ninja://audio:low_bandwidth_audio_perf_test/" + }, + { + "args": [ + "--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb", + "--nologs" + ], + "merge": { + "args": [ + "--test-suite", + "webrtc_perf_tests" + ], + "script": "//tools_webrtc/perf/process_perf_results.py" + }, + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "android_devices": "1", + "device_type": "walleye", + "gce": "0", + "os": "Android", + "pool": "WebRTC-perf" + } + ], + "hard_timeout": 10800, + "idempotent": false, + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, + "test": "webrtc_perf_tests", + "test_id_prefix": "ninja://:webrtc_perf_tests/" + } + ] + }, + "Perf Android32 (R Pixel5)": { + "gtest_tests": [ + { + "args": [ + ".", + "--remove", + "--android", + "--adb-path", + "../../third_party/android_sdk/public/platform-tools/adb", + "--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb" + ], + "merge": { + "args": [ + "--test-suite", + "low_bandwidth_audio_perf_test" + ], + "script": "//tools_webrtc/perf/process_perf_results.py" + }, + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "android_devices": "1", + "device_type": "redfin", + "gce": "0", + "os": "Android", + "pool": "WebRTC-perf" + } + ], + "hard_timeout": 10800, + "idempotent": false, + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, + "test": "low_bandwidth_audio_perf_test", + "test_id_prefix": "ninja://audio:low_bandwidth_audio_perf_test/" + }, + { + "args": [ + "--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb", + "--nologs" + ], + "merge": { + "args": [ + "--test-suite", + "webrtc_perf_tests" + ], + "script": "//tools_webrtc/perf/process_perf_results.py" + }, + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "android_devices": "1", + "device_type": "redfin", + "gce": "0", + "os": "Android", + "pool": "WebRTC-perf" + } + ], + "hard_timeout": 10800, + "idempotent": false, + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" @@ -182,13 +346,15 @@ "android_devices": "1", "device_os": "MMB29Q", "device_type": "bullhead", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "low_bandwidth_audio_perf_test", "test_id_prefix": "ninja://audio:low_bandwidth_audio_perf_test/" @@ -216,13 +382,15 @@ "android_devices": "1", "device_os": "MMB29Q", "device_type": "bullhead", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" @@ -257,13 +425,15 @@ { "android_devices": "1", "device_type": "walleye", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "low_bandwidth_audio_perf_test", "test_id_prefix": "ninja://audio:low_bandwidth_audio_perf_test/" @@ -290,19 +460,137 @@ { "android_devices": "1", "device_type": "walleye", + "gce": "0", "os": "Android", "pool": "WebRTC-perf" } ], "hard_timeout": 10800, "idempotent": false, - "io_timeout": 10800 + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" } ] }, + "Perf Android64 (R Pixel5)": { + "gtest_tests": [ + { + "args": [ + ".", + "--remove", + "--android", + "--adb-path", + "../../third_party/android_sdk/public/platform-tools/adb", + "--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb" + ], + "merge": { + "args": [ + "--test-suite", + "low_bandwidth_audio_perf_test" + ], + "script": "//tools_webrtc/perf/process_perf_results.py" + }, + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "android_devices": "1", + "device_type": "redfin", + "gce": "0", + "os": "Android", + "pool": "WebRTC-perf" + } + ], + "hard_timeout": 10800, + "idempotent": false, + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, + "test": "low_bandwidth_audio_perf_test", + "test_id_prefix": "ninja://audio:low_bandwidth_audio_perf_test/" + }, + { + "args": [ + "--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb", + "--nologs" + ], + "merge": { + "args": [ + "--test-suite", + "webrtc_perf_tests" + ], + "script": "//tools_webrtc/perf/process_perf_results.py" + }, + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "android_devices": "1", + "device_type": "redfin", + "gce": "0", + "os": "Android", + "pool": "WebRTC-perf" + } + ], + "hard_timeout": 10800, + "idempotent": false, + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, + "test": "webrtc_perf_tests", + "test_id_prefix": "ninja://:webrtc_perf_tests/" + } + ] + }, + "Perf Fuchsia": { + "isolated_scripts": [ + { + "args": [ + "--nologs" + ], + "isolate_name": "fuchsia_perf_tests", + "merge": { + "args": [ + "--test-suite", + "fuchsia_perf_tests" + ], + "script": "//tools_webrtc/perf/process_perf_results.py" + }, + "name": "fuchsia_perf_tests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "gce": "1", + "os": "Ubuntu-18.04", + "pool": "WebRTC-perf" + } + ], + "hard_timeout": 10800, + "idempotent": false, + "io_timeout": 10800, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, + "test_id_prefix": "ninja://:fuchsia_perf_tests/" + } + ] + }, "Perf Linux Bionic": { "isolated_scripts": [ { @@ -328,6 +616,7 @@ "can_use_on_swarming_builders": true, "dimension_sets": [ { + "gce": "0", "os": "Ubuntu-18.04", "pool": "WebRTC-perf" } @@ -362,6 +651,7 @@ "can_use_on_swarming_builders": true, "dimension_sets": [ { + "gce": "0", "os": "Ubuntu-18.04", "pool": "WebRTC-perf" } @@ -400,6 +690,7 @@ "dimension_sets": [ { "cpu": "x86-64", + "gce": "0", "os": "Mac-11", "pool": "WebRTC-perf" } @@ -435,6 +726,7 @@ "dimension_sets": [ { "cpu": "x86-64", + "gce": "0", "os": "Mac-11", "pool": "WebRTC-perf" } @@ -472,6 +764,7 @@ "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", + "gce": "0", "os": "Mac-12", "pool": "WebRTC-perf" } @@ -484,7 +777,7 @@ } ] }, - "Perf Win7": { + "Perf Win 10": { "isolated_scripts": [ { "args": [ @@ -509,6 +802,7 @@ "can_use_on_swarming_builders": true, "dimension_sets": [ { + "gce": "0", "os": "Windows-10", "pool": "WebRTC-perf" } @@ -543,6 +837,7 @@ "can_use_on_swarming_builders": true, "dimension_sets": [ { + "gce": "0", "os": "Windows-10", "pool": "WebRTC-perf" } diff --git a/infra/specs/gn_isolate_map.pyl b/infra/specs/gn_isolate_map.pyl index d35a46c421..c7c472fcfa 100644 --- a/infra/specs/gn_isolate_map.pyl +++ b/infra/specs/gn_isolate_map.pyl @@ -55,10 +55,18 @@ "label": "//net/dcsctp:dcsctp_unittests", "type": "console_test_launcher", }, + "default": { + "label": "//:default", + "type": "additional_compile_target", + }, "android_instrumentation_test_apk": { "label": "//sdk/android:android_instrumentation_test_apk", "type": "console_test_launcher", }, + "fuchsia_perf_tests": { + "label": "//:fuchsia_perf_tests", + "type": "raw", + }, "low_bandwidth_audio_test": { "label": "//audio:low_bandwidth_audio_test", "type": "console_test_launcher", @@ -80,6 +88,11 @@ "label": "//pc:peerconnection_unittests", "type": "console_test_launcher", }, + "shared_screencast_stream_test": { + "label": "//modules/desktop_capture:shared_screencast_stream_test", + "type": "console_test_launcher", + "use_pipewire": True, + }, "rtc_media_unittests": { "label": "//media:rtc_media_unittests", "type": "console_test_launcher", diff --git a/infra/specs/internal.client.webrtc.json b/infra/specs/internal.client.webrtc.json index c40ec54b3f..b300db697b 100644 --- a/infra/specs/internal.client.webrtc.json +++ b/infra/specs/internal.client.webrtc.json @@ -6,7 +6,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -50,7 +49,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -95,7 +93,6 @@ "args": [ "--readline-timeout=1200", "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -142,7 +139,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -187,7 +183,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -231,7 +226,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -275,7 +269,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -319,7 +312,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -363,7 +355,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -407,7 +398,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -451,7 +441,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -555,7 +544,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -599,7 +587,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -644,7 +631,6 @@ "args": [ "--readline-timeout=1200", "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -691,7 +677,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -736,7 +721,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -780,7 +764,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -824,7 +807,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -868,7 +850,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -912,7 +893,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -956,7 +936,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", @@ -1000,7 +979,6 @@ { "args": [ "--xctest", - "--undefok=enable-run-ios-unittests-with-xctest", "--xcode-build-version", "13c100", "--out-dir", diff --git a/infra/specs/mixins.pyl b/infra/specs/mixins.pyl index eb5a68adf6..2f11efd3f8 100644 --- a/infra/specs/mixins.pyl +++ b/infra/specs/mixins.pyl @@ -124,6 +124,9 @@ } } }, + 'isolate_profile_data': { + 'isolate_profile_data': True + }, 'linux-bionic': { 'swarming': { 'dimensions': { @@ -131,18 +134,6 @@ } } }, - 'logdog-butler': { - 'swarming': { - 'cipd_packages': [{ - 'cipd_package': - 'infra/tools/luci/logdog/butler/${platform}', - 'location': - 'bin', - 'revision': - 'git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c' - }] - } - }, 'mac-m1-cpu': { 'swarming': { 'dimensions': { @@ -204,6 +195,15 @@ 'args': ['--out-dir', '${ISOLATED_OUTDIR}'] } }, + 'perf-fuchsia-perf-tests': { + 'merge': { + 'script': '//tools_webrtc/perf/process_perf_results.py', + 'args': ['--test-suite', 'fuchsia_perf_tests'] + }, + '$mixin_append': { + 'args': ['--nologs'] + } + }, 'perf-low-bandwidth-audio-perf-test': { 'merge': { 'script': '//tools_webrtc/perf/process_perf_results.py', @@ -221,7 +221,17 @@ 'swarming': { 'idempotent': False, 'dimensions': { - 'pool': 'WebRTC-perf' + 'pool': 'WebRTC-perf', + 'gce': '0' + } + } + }, + 'perf-pool-vm': { + 'swarming': { + 'idempotent': False, + 'dimensions': { + 'pool': 'WebRTC-perf', + 'gce': '1' } } }, @@ -236,8 +246,15 @@ }, 'quick-perf-tests': { '$mixin_append': { - 'args': - ['--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/', '--nologs'] + 'args': ['--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/', '--nologs'] + } + }, + 'redfin': { + 'swarming': { + 'dimensions': { + 'device_type': 'redfin', + 'os': 'Android' + } } }, 'resultdb-gtest-json-format': { @@ -299,7 +316,7 @@ }, 'webrtc-xctest': { '$mixin_append': { - 'args': ['--xctest', '--undefok=enable-run-ios-unittests-with-xctest'] + 'args': ['--xctest'] } }, 'win10': { diff --git a/infra/specs/mixins_webrtc.pyl b/infra/specs/mixins_webrtc.pyl index cfbd5cebbf..56de235da7 100644 --- a/infra/specs/mixins_webrtc.pyl +++ b/infra/specs/mixins_webrtc.pyl @@ -35,6 +35,14 @@ } } }, + 'hammerhead': { + 'swarming': { + 'dimensions': { + 'device_type': 'hammerhead', + 'os': 'Android' + } + } + }, 'ios-device-15.7': { 'swarming': { 'dimensions': { @@ -107,17 +115,6 @@ }, } }, - 'logdog-butler': { - 'swarming': { - 'cipd_packages': [ - { - "cipd_package": 'infra/tools/luci/logdog/butler/${platform}', - 'location': 'bin', - 'revision': 'git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c', - }, - ], - }, - }, 'mac-m1-cpu': { 'swarming': { 'dimensions': { @@ -132,6 +129,22 @@ } } }, + 'marshmallow_generic': { + 'swarming': { + 'dimensions': { + 'device_os': 'M' + } + } + }, + 'perf-fuchsia-perf-tests': { + 'merge': { + 'script': '//tools_webrtc/perf/process_perf_results.py', + 'args': ['--test-suite', 'fuchsia_perf_tests'], + }, + '$mixin_append': { + 'args': ['--nologs'], + }, + }, 'perf-low-bandwidth-audio-perf-test': { 'merge': { 'script': '//tools_webrtc/perf/process_perf_results.py', @@ -153,6 +166,16 @@ 'idempotent': False, 'dimensions': { 'pool': 'WebRTC-perf', + 'gce': '0', + }, + }, + }, + 'perf-pool-vm': { + 'swarming': { + 'idempotent': False, + 'dimensions': { + 'pool': 'WebRTC-perf', + 'gce': '1', }, }, }, @@ -173,6 +196,14 @@ ], } }, + 'redfin': { + 'swarming': { + 'dimensions': { + 'device_type': 'redfin', + 'os': 'Android' + } + } + }, 'resultdb-gtest-json-format': { '$mixin_append': { 'args': [ @@ -228,7 +259,6 @@ '$mixin_append': { 'args': [ '--xctest', - '--undefok=enable-run-ios-unittests-with-xctest', ], }, }, diff --git a/infra/specs/setup.cfg b/infra/specs/setup.cfg index 7dd0a8a68e..d5ed6957bb 100644 --- a/infra/specs/setup.cfg +++ b/infra/specs/setup.cfg @@ -8,4 +8,5 @@ # This is the style settings used when running yapf on .pyl files. [yapf] -continuation_indent_width = 2 \ No newline at end of file +continuation_indent_width = 2 +column_limit = 80 diff --git a/infra/specs/test_suites.pyl b/infra/specs/test_suites.pyl index d4b8cf9811..1706f6d94d 100644 --- a/infra/specs/test_suites.pyl +++ b/infra/specs/test_suites.pyl @@ -116,6 +116,40 @@ 'voip_unittests': {}, 'webrtc_nonparallel_tests': {}, }, + 'fuchsia_compatible_perf_tests': { + 'fuchsia_perf_tests': { + 'mixins': ['perf-fuchsia-perf-tests'], + }, + }, + 'fuchsia_compatible_tests': { + 'audio_decoder_unittests': {}, + 'common_audio_unittests': {}, + 'common_video_unittests': {}, + 'dcsctp_unittests': {}, + 'low_bandwidth_audio_test': { + 'args': ['--quick'] + }, + 'rtc_media_unittests': {}, + # TODO(bugs.webrtc.org/14705): Enable when NonGlobalFieldTrialsInstanceDoesNotModifyGlobalString is fixed. + # TODO(bugs.webrtc.org/14700): Enable when NetworkTest tests are fixed. + # 'rtc_unittests': {}, + 'rtc_pc_unittests': {}, + 'svc_tests': { + 'mixins': ['shards-4'], + }, + 'system_wrappers_unittests': {}, + # TODO(bugs.webrtc.org/14712): Enable once network issue is fixed. + # 'peerconnection_unittests': {}, + 'video_engine_tests': { + 'mixins': ['shards-4'], + }, + 'voip_unittests': {}, + # TODO(bugs.fuchsia.dev/115601): Enable when cpu time API's are implemented in Fuchsia + # 'test_support_unittests': {}, + # TODO(bugs.webrtc.org/14707): chromium.test component needs to allow creating listening ports. + # 'tools_unittests': {}, + # + }, 'ios_device_tests': { # TODO(bugs.webrtc.org/11362): Real XCTests fail to start on devices. #'apprtcmobile_tests': {'mixins': ['xcodebuild-device-runner']}, @@ -156,7 +190,7 @@ 'mixins': ['shards-2'], }, 'modules_unittests': { - 'mixins': ['shards-6'], + 'mixins': ['shards-6', 'cores-12'], }, 'rtc_media_unittests': {}, 'rtc_pc_unittests': {}, @@ -171,7 +205,7 @@ 'mixins': ['xcode_parallelization'] }, 'svc_tests': { - 'mixins': ['shards-4'], + 'mixins': ['shards-4', 'cores-12'], }, 'system_wrappers_unittests': {}, 'test_support_unittests': {}, @@ -183,6 +217,9 @@ 'voip_unittests': {}, 'webrtc_nonparallel_tests': {}, }, + 'linux_desktop_specific_tests': { + 'shared_screencast_stream_test': {}, + }, 'more_configs_tests': { 'peerconnection_unittests': { 'swarming': { @@ -225,5 +262,20 @@ 'desktop_tests', 'video_capture_tests', ], + 'linux_desktop_tests_tryserver': [ + 'desktop_tests', + 'linux_desktop_specific_tests', + 'video_capture_tests_tryserver', + 'webrtc_perf_tests_tryserver', + ], + 'linux_desktop_tests_with_video_capture': [ + 'desktop_tests', + 'linux_desktop_specific_tests', + 'video_capture_tests', + ], + 'linux_tests': [ + 'desktop_tests', + 'linux_desktop_specific_tests', + ], }, } diff --git a/infra/specs/tryserver.webrtc.json b/infra/specs/tryserver.webrtc.json index 6db19d0da4..5f65f32566 100644 --- a/infra/specs/tryserver.webrtc.json +++ b/infra/specs/tryserver.webrtc.json @@ -14,21 +14,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "AppRTCMobile_test_apk", "test_id_prefix": "ninja://examples:AppRTCMobile_test_apk/" @@ -44,21 +37,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "android_instrumentation_test_apk", "test_id_prefix": "ninja://sdk/android:android_instrumentation_test_apk/" @@ -74,21 +60,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" @@ -104,21 +83,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" @@ -134,21 +106,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" @@ -164,21 +129,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" @@ -194,21 +152,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, "test": "modules_tests", @@ -225,21 +176,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "modules_unittests", @@ -256,21 +200,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "peerconnection_unittests", @@ -287,21 +224,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" @@ -317,21 +247,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" @@ -347,21 +270,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" @@ -377,21 +293,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "rtc_unittests", @@ -408,21 +317,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" @@ -438,21 +340,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "svc_tests", @@ -469,21 +364,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" @@ -499,21 +387,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" @@ -529,21 +410,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" @@ -559,21 +433,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "video_engine_tests", @@ -590,21 +457,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" @@ -620,21 +480,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" @@ -654,21 +507,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" @@ -710,21 +556,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "AppRTCMobile_test_apk", "test_id_prefix": "ninja://examples:AppRTCMobile_test_apk/" @@ -740,21 +579,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "android_instrumentation_test_apk", "test_id_prefix": "ninja://sdk/android:android_instrumentation_test_apk/" @@ -770,21 +602,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" @@ -800,21 +625,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" @@ -830,21 +648,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" @@ -860,21 +671,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" @@ -890,21 +694,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, "test": "modules_tests", @@ -921,21 +718,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "modules_unittests", @@ -952,21 +742,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "peerconnection_unittests", @@ -983,21 +766,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" @@ -1013,21 +789,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" @@ -1043,21 +812,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" @@ -1073,21 +835,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "rtc_unittests", @@ -1104,21 +859,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" @@ -1134,21 +882,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "svc_tests", @@ -1165,21 +906,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" @@ -1195,21 +929,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" @@ -1225,21 +952,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" @@ -1255,21 +975,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "video_engine_tests", @@ -1286,21 +999,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" @@ -1316,21 +1022,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" @@ -1350,21 +1049,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" @@ -1406,21 +1098,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "AppRTCMobile_test_apk", "test_id_prefix": "ninja://examples:AppRTCMobile_test_apk/" @@ -1436,21 +1121,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "android_instrumentation_test_apk", "test_id_prefix": "ninja://sdk/android:android_instrumentation_test_apk/" @@ -1466,21 +1144,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" @@ -1496,21 +1167,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" @@ -1526,21 +1190,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" @@ -1556,21 +1213,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" @@ -1586,21 +1236,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, "test": "modules_tests", @@ -1617,21 +1260,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "modules_unittests", @@ -1648,21 +1284,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "peerconnection_unittests", @@ -1679,21 +1308,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" @@ -1709,21 +1331,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" @@ -1739,21 +1354,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" @@ -1769,21 +1377,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "rtc_unittests", @@ -1800,21 +1401,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" @@ -1830,21 +1424,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "svc_tests", @@ -1861,21 +1448,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" @@ -1891,21 +1471,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" @@ -1921,21 +1494,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" @@ -1951,21 +1517,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "video_engine_tests", @@ -1982,21 +1541,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" @@ -2012,21 +1564,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" @@ -2046,21 +1591,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" @@ -2102,21 +1640,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "peerconnection_unittests", @@ -2137,21 +1668,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "AppRTCMobile_test_apk", "test_id_prefix": "ninja://examples:AppRTCMobile_test_apk/" @@ -2167,21 +1691,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "android_instrumentation_test_apk", "test_id_prefix": "ninja://sdk/android:android_instrumentation_test_apk/" @@ -2197,21 +1714,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" @@ -2227,21 +1737,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" @@ -2257,21 +1760,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" @@ -2287,21 +1783,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" @@ -2317,21 +1806,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, "test": "modules_tests", @@ -2348,21 +1830,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "modules_unittests", @@ -2379,21 +1854,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "peerconnection_unittests", @@ -2410,21 +1878,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" @@ -2440,21 +1901,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" @@ -2470,21 +1924,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" @@ -2500,21 +1947,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, "test": "rtc_unittests", @@ -2531,21 +1971,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" @@ -2561,21 +1994,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "svc_tests", @@ -2592,21 +2018,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" @@ -2622,21 +2041,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" @@ -2652,21 +2064,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" @@ -2682,21 +2087,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, "test": "video_engine_tests", @@ -2713,21 +2111,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" @@ -2743,21 +2134,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" @@ -2777,21 +2161,14 @@ }, "swarming": { "can_use_on_swarming_builders": true, - "cipd_packages": [ - { - "cipd_package": "infra/tools/luci/logdog/butler/${platform}", - "location": "bin", - "revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c" - } - ], "dimension_sets": [ { "android_devices": "1", - "device_os": "MMB29Q", - "device_type": "bullhead", + "device_type": "walleye", "os": "Android" } - ] + ], + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" @@ -2828,7 +2205,257 @@ "android_compile_x64_rel": {}, "android_compile_x86_dbg": {}, "android_compile_x86_rel": {}, - "fuchsia_rel": {}, + "fuchsia_rel": { + "isolated_scripts": [ + { + "isolate_name": "audio_decoder_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "audio_decoder_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" + }, + { + "isolate_name": "common_audio_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "common_audio_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://common_audio:common_audio_unittests/" + }, + { + "isolate_name": "common_video_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "common_video_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://common_video:common_video_unittests/" + }, + { + "isolate_name": "dcsctp_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "dcsctp_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" + }, + { + "args": [ + "--quick" + ], + "isolate_name": "low_bandwidth_audio_test", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "low_bandwidth_audio_test", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://audio:low_bandwidth_audio_test/" + }, + { + "isolate_name": "rtc_media_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_media_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://media:rtc_media_unittests/" + }, + { + "isolate_name": "rtc_pc_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_pc_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + }, + { + "isolate_name": "svc_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "svc_tests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://pc:svc_tests/" + }, + { + "isolate_name": "system_wrappers_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "system_wrappers_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" + }, + { + "isolate_name": "video_engine_tests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "video_engine_tests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://:video_engine_tests/" + }, + { + "isolate_name": "voip_unittests", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "voip_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://:voip_unittests/" + } + ] + }, "ios_compile_arm64_dbg": {}, "ios_compile_arm64_rel": {}, "ios_sim_x64_dbg_ios12": { @@ -2843,6 +2470,7 @@ "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "apprtcmobile_tests", @@ -2889,7 +2517,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "audio_decoder_unittests", "merge": { @@ -2935,7 +2564,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_audio_unittests", "merge": { @@ -2981,7 +2611,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_video_unittests", "merge": { @@ -3027,7 +2658,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "dcsctp_unittests", "merge": { @@ -3073,7 +2705,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_tests", "merge": { @@ -3120,7 +2753,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_unittests", "merge": { @@ -3143,6 +2777,7 @@ ], "dimension_sets": [ { + "cores": "12", "cpu": "x86-64", "os": "Mac-12" } @@ -3167,7 +2802,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_media_unittests", "merge": { @@ -3213,7 +2849,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_pc_unittests", "merge": { @@ -3259,7 +2896,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_stats_unittests", "merge": { @@ -3305,7 +2943,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_unittests", "merge": { @@ -3353,6 +2992,7 @@ "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_framework_unittests", @@ -3400,6 +3040,7 @@ "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_unittests", @@ -3446,7 +3087,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "svc_tests", "merge": { @@ -3469,6 +3111,7 @@ ], "dimension_sets": [ { + "cores": "12", "cpu": "x86-64", "os": "Mac-12" } @@ -3493,7 +3136,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "system_wrappers_unittests", "merge": { @@ -3539,7 +3183,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "test_support_unittests", "merge": { @@ -3585,7 +3230,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "tools_unittests", "merge": { @@ -3631,7 +3277,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_capture_tests", "merge": { @@ -3677,7 +3324,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_engine_tests", "merge": { @@ -3724,7 +3372,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "voip_unittests", "merge": { @@ -3770,7 +3419,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "webrtc_nonparallel_tests", "merge": { @@ -3821,6 +3471,7 @@ "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "apprtcmobile_tests", @@ -3867,7 +3518,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "audio_decoder_unittests", "merge": { @@ -3913,7 +3565,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_audio_unittests", "merge": { @@ -3959,7 +3612,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_video_unittests", "merge": { @@ -4005,7 +3659,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "dcsctp_unittests", "merge": { @@ -4051,7 +3706,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_tests", "merge": { @@ -4098,7 +3754,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_unittests", "merge": { @@ -4121,6 +3778,7 @@ ], "dimension_sets": [ { + "cores": "12", "cpu": "x86-64", "os": "Mac-12" } @@ -4145,7 +3803,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_media_unittests", "merge": { @@ -4191,7 +3850,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_pc_unittests", "merge": { @@ -4237,7 +3897,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_stats_unittests", "merge": { @@ -4283,7 +3944,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_unittests", "merge": { @@ -4331,6 +3993,7 @@ "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_framework_unittests", @@ -4378,6 +4041,7 @@ "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_unittests", @@ -4424,7 +4088,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "svc_tests", "merge": { @@ -4447,6 +4112,7 @@ ], "dimension_sets": [ { + "cores": "12", "cpu": "x86-64", "os": "Mac-12" } @@ -4471,7 +4137,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "system_wrappers_unittests", "merge": { @@ -4517,7 +4184,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "test_support_unittests", "merge": { @@ -4563,7 +4231,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "tools_unittests", "merge": { @@ -4609,7 +4278,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_capture_tests", "merge": { @@ -4655,7 +4325,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_engine_tests", "merge": { @@ -4702,7 +4373,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "voip_unittests", "merge": { @@ -4748,7 +4420,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "webrtc_nonparallel_tests", "merge": { @@ -4799,6 +4472,7 @@ "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "apprtcmobile_tests", @@ -4845,7 +4519,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "audio_decoder_unittests", "merge": { @@ -4891,7 +4566,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_audio_unittests", "merge": { @@ -4937,7 +4613,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "common_video_unittests", "merge": { @@ -4983,7 +4660,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "dcsctp_unittests", "merge": { @@ -5029,7 +4707,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_tests", "merge": { @@ -5076,7 +4755,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "modules_unittests", "merge": { @@ -5099,6 +4779,7 @@ ], "dimension_sets": [ { + "cores": "12", "cpu": "x86-64", "os": "Mac-12" } @@ -5123,7 +4804,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_media_unittests", "merge": { @@ -5169,7 +4851,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_pc_unittests", "merge": { @@ -5215,7 +4898,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_stats_unittests", "merge": { @@ -5261,7 +4945,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "rtc_unittests", "merge": { @@ -5309,6 +4994,7 @@ "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_framework_unittests", @@ -5356,6 +5042,7 @@ "13c100", "--out-dir", "${ISOLATED_OUTDIR}", + "--xctest", "--xcode-parallelization" ], "isolate_name": "sdk_unittests", @@ -5402,7 +5089,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "svc_tests", "merge": { @@ -5425,6 +5113,7 @@ ], "dimension_sets": [ { + "cores": "12", "cpu": "x86-64", "os": "Mac-12" } @@ -5449,7 +5138,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "system_wrappers_unittests", "merge": { @@ -5495,7 +5185,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "test_support_unittests", "merge": { @@ -5541,7 +5232,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "tools_unittests", "merge": { @@ -5587,7 +5279,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_capture_tests", "merge": { @@ -5633,7 +5326,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "video_engine_tests", "merge": { @@ -5680,7 +5374,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "voip_unittests", "merge": { @@ -5726,7 +5421,8 @@ "--xcode-build-version", "13c100", "--out-dir", - "${ISOLATED_OUTDIR}" + "${ISOLATED_OUTDIR}", + "--xctest" ], "isolate_name": "webrtc_nonparallel_tests", "merge": { @@ -6026,6 +5722,27 @@ }, "test_id_prefix": "ninja://:rtc_unittests/" }, + { + "isolate_name": "shared_screencast_stream_test", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "shared_screencast_stream_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" + }, { "isolate_name": "slow_peer_connection_unittests", "merge": { @@ -6204,6 +5921,532 @@ "linux_compile_arm_rel": {}, "linux_compile_dbg": {}, "linux_compile_rel": {}, + "linux_coverage": { + "isolated_scripts": [ + { + "isolate_name": "audio_decoder_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "audio_decoder_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" + }, + { + "isolate_name": "common_audio_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "common_audio_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://common_audio:common_audio_unittests/" + }, + { + "isolate_name": "common_video_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "common_video_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://common_video:common_video_unittests/" + }, + { + "isolate_name": "dcsctp_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "dcsctp_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" + }, + { + "args": [ + "--quick" + ], + "isolate_name": "low_bandwidth_audio_test", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "low_bandwidth_audio_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://audio:low_bandwidth_audio_test/" + }, + { + "isolate_name": "modules_tests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "modules_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ], + "shards": 2 + }, + "test_id_prefix": "ninja://modules:modules_tests/" + }, + { + "isolate_name": "modules_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "modules_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ], + "shards": 6 + }, + "test_id_prefix": "ninja://modules:modules_unittests/" + }, + { + "isolate_name": "peerconnection_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "peerconnection_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://pc:peerconnection_unittests/" + }, + { + "isolate_name": "rtc_media_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_media_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://media:rtc_media_unittests/" + }, + { + "isolate_name": "rtc_pc_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_pc_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + }, + { + "isolate_name": "rtc_stats_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_stats_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://stats:rtc_stats_unittests/" + }, + { + "isolate_name": "rtc_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ], + "shards": 6 + }, + "test_id_prefix": "ninja://:rtc_unittests/" + }, + { + "isolate_name": "shared_screencast_stream_test", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "shared_screencast_stream_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" + }, + { + "isolate_name": "slow_peer_connection_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "slow_peer_connection_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" + }, + { + "isolate_name": "svc_tests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "svc_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://pc:svc_tests/" + }, + { + "isolate_name": "system_wrappers_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "system_wrappers_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" + }, + { + "isolate_name": "test_support_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "test_support_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://test:test_support_unittests/" + }, + { + "isolate_name": "tools_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "tools_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://rtc_tools:tools_unittests/" + }, + { + "isolate_name": "video_capture_tests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "video_capture_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04", + "pool": "WebRTC-baremetal-try" + } + ] + }, + "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/" + }, + { + "isolate_name": "video_engine_tests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "video_engine_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ], + "shards": 4 + }, + "test_id_prefix": "ninja://:video_engine_tests/" + }, + { + "isolate_name": "voip_unittests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "voip_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://:voip_unittests/" + }, + { + "isolate_name": "webrtc_nonparallel_tests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "webrtc_nonparallel_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" + }, + { + "args": [ + "--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/", + "--nologs", + "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" + ], + "isolate_name": "webrtc_perf_tests", + "isolate_profile_data": true, + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "webrtc_perf_tests", + "resultdb": { + "result_file": "${ISOLATED_OUTDIR}/gtest_output.json", + "result_format": "gtest_json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://:webrtc_perf_tests/" + } + ] + }, "linux_dbg": { "isolated_scripts": [ { @@ -6465,6 +6708,27 @@ }, "test_id_prefix": "ninja://:rtc_unittests/" }, + { + "isolate_name": "shared_screencast_stream_test", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "shared_screencast_stream_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" + }, { "isolate_name": "slow_peer_connection_unittests", "merge": { @@ -6899,6 +7163,27 @@ }, "test_id_prefix": "ninja://:rtc_unittests/" }, + { + "isolate_name": "shared_screencast_stream_test", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "shared_screencast_stream_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" + }, { "isolate_name": "slow_peer_connection_unittests", "merge": { @@ -7791,6 +8076,27 @@ }, "test_id_prefix": "ninja://:rtc_unittests/" }, + { + "isolate_name": "shared_screencast_stream_test", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "shared_screencast_stream_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" + }, { "isolate_name": "slow_peer_connection_unittests", "merge": { @@ -8706,6 +9012,27 @@ }, "test_id_prefix": "ninja://:rtc_unittests/" }, + { + "isolate_name": "shared_screencast_stream_test", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "shared_screencast_stream_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" + }, { "isolate_name": "slow_peer_connection_unittests", "merge": { @@ -9139,6 +9466,27 @@ }, "test_id_prefix": "ninja://:rtc_unittests/" }, + { + "isolate_name": "shared_screencast_stream_test", + "merge": { + "args": [], + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "shared_screencast_stream_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "can_use_on_swarming_builders": true, + "dimension_sets": [ + { + "cpu": "x86-64", + "os": "Ubuntu-18.04" + } + ] + }, + "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" + }, { "isolate_name": "slow_peer_connection_unittests", "merge": { @@ -13303,439 +13651,6 @@ } ] }, - "win_x64_clang_dbg_win10": { - "isolated_scripts": [ - { - "isolate_name": "audio_decoder_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "audio_decoder_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" - }, - { - "isolate_name": "common_audio_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "common_audio_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://common_audio:common_audio_unittests/" - }, - { - "isolate_name": "common_video_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "common_video_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://common_video:common_video_unittests/" - }, - { - "isolate_name": "dcsctp_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "dcsctp_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" - }, - { - "args": [ - "--quick" - ], - "isolate_name": "low_bandwidth_audio_test", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "low_bandwidth_audio_test", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://audio:low_bandwidth_audio_test/" - }, - { - "isolate_name": "modules_tests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "modules_tests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ], - "shards": 2 - }, - "test_id_prefix": "ninja://modules:modules_tests/" - }, - { - "isolate_name": "modules_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "modules_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ], - "shards": 6 - }, - "test_id_prefix": "ninja://modules:modules_unittests/" - }, - { - "isolate_name": "peerconnection_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peerconnection_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ], - "shards": 4 - }, - "test_id_prefix": "ninja://pc:peerconnection_unittests/" - }, - { - "isolate_name": "rtc_media_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "rtc_media_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://media:rtc_media_unittests/" - }, - { - "isolate_name": "rtc_pc_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "rtc_pc_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://pc:rtc_pc_unittests/" - }, - { - "isolate_name": "rtc_stats_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "rtc_stats_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://stats:rtc_stats_unittests/" - }, - { - "isolate_name": "rtc_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "rtc_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ], - "shards": 6 - }, - "test_id_prefix": "ninja://:rtc_unittests/" - }, - { - "isolate_name": "slow_peer_connection_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "slow_peer_connection_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" - }, - { - "isolate_name": "svc_tests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "svc_tests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ], - "shards": 4 - }, - "test_id_prefix": "ninja://pc:svc_tests/" - }, - { - "isolate_name": "system_wrappers_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "system_wrappers_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" - }, - { - "isolate_name": "test_support_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "test_support_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://test:test_support_unittests/" - }, - { - "isolate_name": "tools_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "tools_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://rtc_tools:tools_unittests/" - }, - { - "isolate_name": "video_engine_tests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "video_engine_tests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ], - "shards": 4 - }, - "test_id_prefix": "ninja://:video_engine_tests/" - }, - { - "isolate_name": "voip_unittests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "voip_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://:voip_unittests/" - }, - { - "isolate_name": "webrtc_nonparallel_tests", - "merge": { - "args": [], - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "webrtc_nonparallel_tests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19042" - } - ] - }, - "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" - } - ] - }, "win_x64_clang_rel": { "isolated_scripts": [ { diff --git a/infra/specs/waterfalls.pyl b/infra/specs/waterfalls.pyl index bad6afa01f..0db19e71e5 100644 --- a/infra/specs/waterfalls.pyl +++ b/infra/specs/waterfalls.pyl @@ -11,20 +11,20 @@ 'name': 'client.webrtc', 'mixins': [], 'machines': { - 'Android32 (M Nexus5X)': { + 'Android32': { 'mixins': [ - 'bullhead', 'marshmallow', 'android-devices', 'logdog-butler', - 'has_native_resultdb_integration' + 'walleye', 'android-devices', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_tests', 'junit_tests': 'android_junit_tests', }, }, - 'Android32 (M Nexus5X)(dbg)': { + 'Android32 (dbg)': { 'mixins': [ - 'bullhead', 'marshmallow', 'android-devices', 'logdog-butler', - 'has_native_resultdb_integration' + 'walleye', 'android-devices', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_tests', @@ -33,8 +33,8 @@ }, 'Android32 (more configs)': { 'mixins': [ - 'bullhead', 'marshmallow', 'android-devices', 'logdog-butler', - 'has_native_resultdb_integration' + 'walleye', 'android-devices', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'more_configs_tests', @@ -43,20 +43,20 @@ 'Android32 Builder arm': {}, 'Android32 Builder x86': {}, 'Android32 Builder x86 (dbg)': {}, - 'Android64 (M Nexus5X)': { + 'Android64': { 'mixins': [ - 'bullhead', 'marshmallow', 'android-devices', 'logdog-butler', - 'has_native_resultdb_integration' + 'walleye', 'android-devices', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_tests', 'junit_tests': 'android_junit_tests', }, }, - 'Android64 (M Nexus5X)(dbg)': { + 'Android64 (dbg)': { 'mixins': [ - 'bullhead', 'marshmallow', 'android-devices', 'logdog-butler', - 'has_native_resultdb_integration' + 'walleye', 'android-devices', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_tests', @@ -65,11 +65,12 @@ }, 'Android64 Builder arm64': {}, 'Android64 Builder x64 (dbg)': {}, + 'Fuchsia Builder': {}, 'Fuchsia Release': { 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], + 'mixins': ['linux-bionic', 'x86-64', 'has_native_resultdb_integration'], 'test_suites': { - 'isolated_scripts': 'desktop_tests', + 'isolated_scripts': 'fuchsia_compatible_tests', }, }, 'Linux (more configs)': { @@ -83,13 +84,16 @@ 'os_type': 'linux', 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], 'test_suites': { - 'isolated_scripts': 'desktop_tests', + 'isolated_scripts': 'linux_tests', }, }, 'Linux MSan': { 'os_type': 'linux', 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], 'test_suites': { + # TODO(crbug.com/webrtc/14568): Using 'linux_tests' + # fails on "MemorySanitizer: use-of-uninitialized-value in + # libpipewire-0.3.so." 'isolated_scripts': 'desktop_tests', }, }, @@ -97,6 +101,9 @@ 'os_type': 'linux', 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], 'test_suites': { + # TODO(crbug.com/webrtc/14568): Using 'linux_tests' + # fails on "ThreadSanitizer: data race on vptr (ctor/dtor vs + # virtual call) in shared_screencast_stream_test." 'isolated_scripts': 'desktop_tests', }, }, @@ -104,14 +111,14 @@ 'os_type': 'linux', 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], 'test_suites': { - 'isolated_scripts': 'desktop_tests', + 'isolated_scripts': 'linux_tests', }, }, 'Linux UBSan vptr': { 'os_type': 'linux', 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], 'test_suites': { - 'isolated_scripts': 'desktop_tests', + 'isolated_scripts': 'linux_tests', }, }, 'Linux32 Debug': { @@ -135,7 +142,7 @@ 'os_type': 'linux', 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], 'test_suites': { - 'isolated_scripts': 'desktop_tests', + 'isolated_scripts': 'linux_tests', }, }, 'Linux64 Debug (ARM)': {}, @@ -143,7 +150,7 @@ 'os_type': 'linux', 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], 'test_suites': { - 'isolated_scripts': 'desktop_tests_with_video_capture', + 'isolated_scripts': 'linux_desktop_tests_with_video_capture', }, }, 'Linux64 Release (ARM)': {}, @@ -191,9 +198,9 @@ 'Win32 Debug (Clang)': {}, 'Win32 Release (Clang)': { 'os_type': 'win', - 'mixins': ['win10-any', 'x86-64', 'resultdb-json-format'], + 'mixins': ['win10', 'x86-64', 'resultdb-json-format'], 'test_suites': { - 'isolated_scripts': 'desktop_tests_with_video_capture', + 'isolated_scripts': 'desktop_tests', }, }, 'Win64 ASan': { @@ -203,15 +210,27 @@ 'isolated_scripts': 'desktop_tests', }, }, - 'Win64 Debug (Clang)': {}, - 'Win64 Release (Clang)': {}, + 'Win64 Debug (Clang)': { + 'os_type': 'win', + 'mixins': ['win10', 'x86-64', 'resultdb-json-format'], + 'test_suites': { + 'isolated_scripts': 'desktop_tests', + }, + }, + 'Win64 Release (Clang)': { + 'os_type': 'win', + 'mixins': ['win10-any', 'x86-64', 'resultdb-json-format'], + 'test_suites': { + 'isolated_scripts': 'desktop_tests_with_video_capture', + }, + }, 'iOS64 Debug': {}, 'iOS64 Release': {}, 'iOS64 Sim Debug (iOS 12)': { 'mixins': [ - 'mac_12_x64', 'chromium-tester-service-account', - 'ios-simulator-12.4', 'xcode_13_main', 'mac_toolchain', - 'has_native_resultdb_integration', 'out_dir_arg' + 'mac_12_x64', 'chromium-tester-service-account', 'ios-simulator-12.4', + 'xcode_13_main', 'mac_toolchain', 'has_native_resultdb_integration', + 'out_dir_arg', 'webrtc-xctest' ], 'test_suites': { 'isolated_scripts': 'ios_simulator_tests', @@ -219,9 +238,9 @@ }, 'iOS64 Sim Debug (iOS 13)': { 'mixins': [ - 'mac_12_x64', 'chromium-tester-service-account', - 'ios-simulator-13.6', 'xcode_13_main', 'mac_toolchain', - 'has_native_resultdb_integration', 'out_dir_arg' + 'mac_12_x64', 'chromium-tester-service-account', 'ios-simulator-13.6', + 'xcode_13_main', 'mac_toolchain', 'has_native_resultdb_integration', + 'out_dir_arg', 'webrtc-xctest' ], 'test_suites': { 'isolated_scripts': 'ios_simulator_tests', @@ -229,9 +248,9 @@ }, 'iOS64 Sim Debug (iOS 14)': { 'mixins': [ - 'mac_12_x64', 'chromium-tester-service-account', - 'ios-simulator-14.5', 'xcode_13_main', 'mac_toolchain', - 'has_native_resultdb_integration', 'out_dir_arg' + 'mac_12_x64', 'chromium-tester-service-account', 'ios-simulator-14.5', + 'xcode_13_main', 'mac_toolchain', 'has_native_resultdb_integration', + 'out_dir_arg', 'webrtc-xctest' ], 'test_suites': { 'isolated_scripts': 'ios_simulator_tests', @@ -246,7 +265,8 @@ 'Perf Android32 (M AOSP Nexus6)': { 'mixins': [ 'shamu', 'marshmallow_generic', 'android-devices', 'perf-pool', - 'perf-output', 'timeout-3h', 'has_native_resultdb_integration' + 'perf-output', 'timeout-3h', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_perf_tests', @@ -255,7 +275,27 @@ 'Perf Android32 (M Nexus5)': { 'mixins': [ 'hammerhead', 'marshmallow_generic', 'android-devices', 'perf-pool', - 'perf-output', 'timeout-3h', 'has_native_resultdb_integration' + 'perf-output', 'timeout-3h', 'has_native_resultdb_integration', + 'chromium-tester-service-account' + ], + 'test_suites': { + 'gtest_tests': 'android_perf_tests', + }, + }, + 'Perf Android32 (O Pixel2)': { + 'mixins': [ + 'walleye', 'android-devices', 'perf-pool', 'timeout-3h', + 'perf-output', 'has_native_resultdb_integration', + 'chromium-tester-service-account' + ], + 'test_suites': { + 'gtest_tests': 'android_perf_tests', + }, + }, + 'Perf Android32 (R Pixel5)': { + 'mixins': [ + 'redfin', 'android-devices', 'perf-pool', 'timeout-3h', 'perf-output', + 'has_native_resultdb_integration', 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_perf_tests', @@ -264,7 +304,8 @@ 'Perf Android64 (M Nexus5X)': { 'mixins': [ 'bullhead', 'marshmallow', 'android-devices', 'perf-pool', - 'perf-output', 'timeout-3h', 'has_native_resultdb_integration' + 'perf-output', 'timeout-3h', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_perf_tests', @@ -273,12 +314,33 @@ 'Perf Android64 (O Pixel2)': { 'mixins': [ 'walleye', 'android-devices', 'perf-pool', 'timeout-3h', - 'perf-output', 'has_native_resultdb_integration' + 'perf-output', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_perf_tests', }, }, + 'Perf Android64 (R Pixel5)': { + 'mixins': [ + 'redfin', 'android-devices', 'perf-pool', 'timeout-3h', 'perf-output', + 'has_native_resultdb_integration', 'chromium-tester-service-account' + ], + 'test_suites': { + 'gtest_tests': 'android_perf_tests', + }, + }, + 'Perf Fuchsia': { + 'os_type': + 'linux', + 'mixins': [ + 'linux-bionic', 'x86-64', 'perf-pool-vm', 'timeout-3h', + 'has_native_resultdb_integration', 'chromium-tester-service-account' + ], + 'test_suites': { + 'isolated_scripts': 'fuchsia_compatible_perf_tests', + } + }, 'Perf Linux Bionic': { 'os_type': 'linux', @@ -312,7 +374,7 @@ 'isolated_scripts': 'webrtc_perf_tests', }, }, - 'Perf Win7': { + 'Perf Win 10': { 'os_type': 'win', 'mixins': @@ -368,8 +430,8 @@ 'machines': { 'android_arm64_dbg': { 'mixins': [ - 'bullhead', 'marshmallow', 'android-devices', 'logdog-butler', - 'has_native_resultdb_integration' + 'walleye', 'android-devices', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_tests_tryserver', @@ -378,8 +440,8 @@ }, 'android_arm64_rel': { 'mixins': [ - 'bullhead', 'marshmallow', 'android-devices', 'logdog-butler', - 'has_native_resultdb_integration' + 'walleye', 'android-devices', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_tests_tryserver', @@ -388,8 +450,8 @@ }, 'android_arm_dbg': { 'mixins': [ - 'bullhead', 'marshmallow', 'android-devices', 'logdog-butler', - 'has_native_resultdb_integration' + 'walleye', 'android-devices', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_tests_tryserver', @@ -398,8 +460,8 @@ }, 'android_arm_more_configs': { 'mixins': [ - 'bullhead', 'marshmallow', 'android-devices', 'logdog-butler', - 'has_native_resultdb_integration' + 'walleye', 'android-devices', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'more_configs_tests', @@ -407,8 +469,8 @@ }, 'android_arm_rel': { 'mixins': [ - 'bullhead', 'marshmallow', 'android-devices', 'logdog-butler', - 'has_native_resultdb_integration' + 'walleye', 'android-devices', 'has_native_resultdb_integration', + 'chromium-tester-service-account' ], 'test_suites': { 'gtest_tests': 'android_tests_tryserver', @@ -425,18 +487,18 @@ 'android_compile_x86_rel': {}, 'fuchsia_rel': { 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], - 'test_suties': { - 'isolateds_scripts': 'desktop_tests', + 'mixins': ['linux-bionic', 'x86-64', 'has_native_resultdb_integration'], + 'test_suites': { + 'isolated_scripts': 'fuchsia_compatible_tests', }, }, 'ios_compile_arm64_dbg': {}, 'ios_compile_arm64_rel': {}, 'ios_sim_x64_dbg_ios12': { 'mixins': [ - 'mac_12_x64', 'chromium-tester-service-account', - 'ios-simulator-12.4', 'xcode_13_main', 'mac_toolchain', - 'has_native_resultdb_integration', 'out_dir_arg' + 'mac_12_x64', 'chromium-tester-service-account', 'ios-simulator-12.4', + 'xcode_13_main', 'mac_toolchain', 'has_native_resultdb_integration', + 'out_dir_arg', 'webrtc-xctest' ], 'test_suites': { 'isolated_scripts': 'ios_simulator_tests', @@ -444,9 +506,9 @@ }, 'ios_sim_x64_dbg_ios13': { 'mixins': [ - 'mac_12_x64', 'chromium-tester-service-account', - 'ios-simulator-13.6', 'xcode_13_main', 'mac_toolchain', - 'has_native_resultdb_integration', 'out_dir_arg' + 'mac_12_x64', 'chromium-tester-service-account', 'ios-simulator-13.6', + 'xcode_13_main', 'mac_toolchain', 'has_native_resultdb_integration', + 'out_dir_arg', 'webrtc-xctest' ], 'test_suites': { 'isolated_scripts': 'ios_simulator_tests', @@ -454,9 +516,9 @@ }, 'ios_sim_x64_dbg_ios14': { 'mixins': [ - 'mac_12_x64', 'chromium-tester-service-account', - 'ios-simulator-14.5', 'xcode_13_main', 'mac_toolchain', - 'has_native_resultdb_integration', 'out_dir_arg' + 'mac_12_x64', 'chromium-tester-service-account', 'ios-simulator-14.5', + 'xcode_13_main', 'mac_toolchain', 'has_native_resultdb_integration', + 'out_dir_arg', 'webrtc-xctest' ], 'test_suites': { 'isolated_scripts': 'ios_simulator_tests', @@ -466,7 +528,7 @@ 'os_type': 'linux', 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], 'test_suites': { - 'isolated_scripts': 'desktop_tests', + 'isolated_scripts': 'linux_tests', }, }, 'linux_compile_arm64_dbg': {}, @@ -475,11 +537,22 @@ 'linux_compile_arm_rel': {}, 'linux_compile_dbg': {}, 'linux_compile_rel': {}, + 'linux_coverage': { + 'os_type': + 'linux', + 'mixins': [ + 'linux-bionic', 'x86-64', 'resultdb-json-format', + 'isolate_profile_data' + ], + 'test_suites': { + 'isolated_scripts': 'linux_desktop_tests_tryserver', + }, + }, 'linux_dbg': { 'os_type': 'linux', 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], 'test_suites': { - 'isolated_scripts': 'desktop_tests', + 'isolated_scripts': 'linux_tests', }, }, 'linux_libfuzzer_rel': {}, @@ -487,7 +560,7 @@ 'os_type': 'linux', 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], 'test_suites': { - 'isolated_scripts': 'desktop_tests', + 'isolated_scripts': 'linux_tests', }, }, 'linux_more_configs': { @@ -501,6 +574,9 @@ 'os_type': 'linux', 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], 'test_suites': { + # TODO(crbug.com/webrtc/14568): Using 'linux_tests' + # fails on "MemorySanitizer: use-of-uninitialized-value in + # libpipewire-0.3.so." 'isolated_scripts': 'desktop_tests', }, }, @@ -508,13 +584,16 @@ 'os_type': 'linux', 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], 'test_suites': { - 'isolated_scripts': 'desktop_tests_tryserver', + 'isolated_scripts': 'linux_desktop_tests_tryserver', }, }, 'linux_tsan2': { 'os_type': 'linux', 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], 'test_suites': { + # TODO(crbug.com/webrtc/14568): Using 'linux_tests' + # fails on "ThreadSanitizer: data race on vptr (ctor/dtor vs + # virtual call) in shared_screencast_stream_test." 'isolated_scripts': 'desktop_tests', }, }, @@ -522,14 +601,14 @@ 'os_type': 'linux', 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], 'test_suites': { - 'isolated_scripts': 'desktop_tests', + 'isolated_scripts': 'linux_tests', }, }, 'linux_ubsan_vptr': { 'os_type': 'linux', 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], 'test_suites': { - 'isolated_scripts': 'desktop_tests', + 'isolated_scripts': 'linux_tests', }, }, 'linux_x86_dbg': { @@ -603,13 +682,6 @@ 'isolated_scripts': 'desktop_tests', }, }, - 'win_x64_clang_dbg_win10': { - 'os_type': 'win', - 'mixins': ['win10', 'x86-64', 'resultdb-json-format'], - 'test_suites': { - 'isolated_scripts': 'desktop_tests', - }, - }, 'win_x64_clang_rel': { 'os_type': 'win', 'mixins': ['win10', 'x86-64', 'resultdb-json-format'], diff --git a/media/BUILD.gn b/media/BUILD.gn index 4433f44e1b..b884b0d049 100644 --- a/media/BUILD.gn +++ b/media/BUILD.gn @@ -57,13 +57,16 @@ rtc_library("rtc_media_base") { "../api:media_stream_interface", "../api:rtc_error", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:scoped_refptr", "../api:sequence_checker", + "../api:transport_api", "../api/audio:audio_frame_processor", "../api/audio_codecs:audio_codecs_api", "../api/crypto:frame_decryptor_interface", "../api/crypto:frame_encryptor_interface", "../api/crypto:options", + "../api/task_queue", "../api/task_queue:pending_task_safety_flag", "../api/transport:datagram_transport_interface", "../api/transport:stun_types", @@ -73,6 +76,7 @@ rtc_library("rtc_media_base") { "../api/video:video_bitrate_allocator_factory", "../api/video:video_frame", "../api/video:video_rtp_headers", + "../api/video_codecs:scalability_mode", "../api/video_codecs:video_codecs_api", "../call:call_interfaces", "../call:video_stream_api", @@ -92,6 +96,7 @@ rtc_library("rtc_media_base") { "../rtc_base:socket", "../rtc_base:stringutils", "../rtc_base:timeutils", + "../rtc_base/network:sent_packet", "../rtc_base/synchronization:mutex", "../rtc_base/system:file_wrapper", "../rtc_base/system:no_unique_address", @@ -103,6 +108,7 @@ rtc_library("rtc_media_base") { absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/container:inlined_vector", + "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -113,8 +119,9 @@ rtc_library("rtc_media_base") { "base/codec.cc", "base/codec.h", "base/delayable.h", - "base/media_channel.cc", "base/media_channel.h", + "base/media_channel_impl.cc", + "base/media_channel_impl.h", "base/media_constants.cc", "base/media_constants.h", "base/media_engine.cc", @@ -264,6 +271,7 @@ rtc_library("rtc_audio_video") { libs = [] deps = [ ":rtc_media_base", + "../api:array_view", "../api:call_api", "../api:field_trials_view", "../api:libjingle_peerconnection_api", @@ -617,6 +625,7 @@ if (rtc_include_tests) { "../call:call_interfaces", "../common_video", "../modules/audio_device:mock_audio_device", + "../modules/audio_mixer:audio_mixer_impl", "../modules/audio_processing", "../modules/audio_processing:api", "../modules/audio_processing:mocks", diff --git a/media/base/fake_media_engine.cc b/media/base/fake_media_engine.cc index 7692efe468..383939a3b5 100644 --- a/media/base/fake_media_engine.cc +++ b/media/base/fake_media_engine.cc @@ -427,7 +427,10 @@ void FakeVideoMediaChannel::SetRecordableEncodedFrameCallback( void FakeVideoMediaChannel::ClearRecordableEncodedFrameCallback(uint32_t ssrc) { } -void FakeVideoMediaChannel::GenerateKeyFrame(uint32_t ssrc) {} +void FakeVideoMediaChannel::RequestRecvKeyFrame(uint32_t ssrc) {} +void FakeVideoMediaChannel::GenerateSendKeyFrame( + uint32_t ssrc, + const std::vector& rids) {} FakeVoiceEngine::FakeVoiceEngine() : fail_create_channel_(false) { // Add a fake audio codec. Note that the name must not be "" as there are diff --git a/media/base/fake_media_engine.h b/media/base/fake_media_engine.h index 55c85d7c64..a03a8a6646 100644 --- a/media/base/fake_media_engine.h +++ b/media/base/fake_media_engine.h @@ -30,6 +30,7 @@ #include "modules/audio_processing/include/audio_processing.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/network_route.h" +#include "rtc_base/thread.h" using webrtc::RtpExtension; @@ -149,20 +150,25 @@ class RtpHelper : public Base { } virtual webrtc::RTCError SetRtpSendParameters( uint32_t ssrc, - const webrtc::RtpParameters& parameters) { + const webrtc::RtpParameters& parameters, + webrtc::SetParametersCallback callback) { auto parameters_iterator = rtp_send_parameters_.find(ssrc); if (parameters_iterator != rtp_send_parameters_.end()) { auto result = CheckRtpParametersInvalidModificationAndValues( parameters_iterator->second, parameters); - if (!result.ok()) - return result; + if (!result.ok()) { + return webrtc::InvokeSetParametersCallback(callback, result); + } parameters_iterator->second = parameters; - return webrtc::RTCError::OK(); + + return webrtc::InvokeSetParametersCallback(callback, + webrtc::RTCError::OK()); } // Replicate the behavior of the real media channel: return false // when setting parameters for unknown SSRCs. - return webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR); + return InvokeSetParametersCallback( + callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); } virtual webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const { @@ -232,6 +238,24 @@ class RtpHelper : public Base { rtcp_packets_.push_back(std::string(packet->cdata(), packet->size())); } + // Stuff that deals with encryptors, transformers and the like + void SetFrameEncryptor(uint32_t ssrc, + rtc::scoped_refptr + frame_encryptor) override {} + void SetEncoderToPacketizerFrameTransformer( + uint32_t ssrc, + rtc::scoped_refptr frame_transformer) + override {} + + void SetFrameDecryptor(uint32_t ssrc, + rtc::scoped_refptr + frame_decryptor) override {} + + void SetDepacketizerToDecoderFrameTransformer( + uint32_t ssrc, + rtc::scoped_refptr frame_transformer) + override {} + protected: bool MuteStream(uint32_t ssrc, bool mute) { if (!HasSendStream(ssrc) && ssrc != 0) { @@ -462,7 +486,9 @@ class FakeVideoMediaChannel : public RtpHelper { std::function callback) override; void ClearRecordableEncodedFrameCallback(uint32_t ssrc) override; - void GenerateKeyFrame(uint32_t ssrc) override; + void RequestRecvKeyFrame(uint32_t ssrc) override; + void GenerateSendKeyFrame(uint32_t ssrc, + const std::vector& rids) override; private: bool SetRecvCodecs(const std::vector& codecs); diff --git a/media/base/fake_network_interface.h b/media/base/fake_network_interface.h index 099b7cad38..53c5563935 100644 --- a/media/base/fake_network_interface.h +++ b/media/base/fake_network_interface.h @@ -31,7 +31,7 @@ namespace cricket { // Fake NetworkInterface that sends/receives RTP/RTCP packets. -class FakeNetworkInterface : public MediaChannel::NetworkInterface { +class FakeNetworkInterface : public MediaChannelNetworkInterface { public: FakeNetworkInterface() : thread_(rtc::Thread::Current()), diff --git a/media/base/media_channel.h b/media/base/media_channel.h index 48bfde638d..8824411fa9 100644 --- a/media/base/media_channel.h +++ b/media/base/media_channel.h @@ -26,6 +26,7 @@ #include "api/media_stream_interface.h" #include "api/rtc_error.h" #include "api/rtp_parameters.h" +#include "api/rtp_sender_interface.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/transport/data_channel_transport_interface.h" #include "api/transport/rtp/rtp_source.h" @@ -34,6 +35,7 @@ #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "api/video/video_timing.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/video_encoder_factory.h" #include "call/video_receive_stream.h" #include "common_video/include/quality_limitation_reason.h" @@ -61,6 +63,10 @@ class Timing; namespace webrtc { class AudioSinkInterface; class VideoFrame; + +webrtc::RTCError InvokeSetParametersCallback(SetParametersCallback& callback, + RTCError error); + } // namespace webrtc namespace cricket { @@ -69,6 +75,10 @@ class AudioSource; class VideoCapturer; struct RtpHeader; struct VideoFormat; +class VideoMediaSendChannelInterface; +class VideoMediaReceiveChannelInterface; +class VoiceMediaSendChannelInterface; +class VoiceMediaReceiveChannelInterface; const int kScreencastDefaultFps = 5; @@ -157,7 +167,6 @@ struct VideoOptions { } }; - // RingRTC change for audio level methods // Higher is louder. typedef uint16_t AudioLevel; @@ -167,29 +176,32 @@ typedef struct { AudioLevel level; } ReceivedAudioLevel; -class MediaChannel { +class MediaChannelNetworkInterface { public: - class NetworkInterface { - public: - enum SocketType { ST_RTP, ST_RTCP }; - virtual bool SendPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options) = 0; - virtual bool SendRtcp(rtc::CopyOnWriteBuffer* packet, + enum SocketType { ST_RTP, ST_RTCP }; + virtual bool SendPacket(rtc::CopyOnWriteBuffer* packet, const rtc::PacketOptions& options) = 0; - virtual int SetOption(SocketType type, - rtc::Socket::Option opt, - int option) = 0; - virtual ~NetworkInterface() {} - }; - - explicit MediaChannel(webrtc::TaskQueueBase* network_thread, - bool enable_dscp = false); - virtual ~MediaChannel(); + virtual bool SendRtcp(rtc::CopyOnWriteBuffer* packet, + const rtc::PacketOptions& options) = 0; + virtual int SetOption(SocketType type, + rtc::Socket::Option opt, + int option) = 0; + virtual ~MediaChannelNetworkInterface() {} +}; +// Functions shared across all MediaChannel interfaces. +// Because there are implementation types that implement multiple +// interfaces, this is not a base class (no diamond inheritance). +template +class MediaBaseChannelInterface { + public: + virtual ~MediaBaseChannelInterface() = default; virtual cricket::MediaType media_type() const = 0; - // Sets the abstract interface class for sending RTP/RTCP data. - virtual void SetInterface(NetworkInterface* iface); + // Networking functions. We assume that both the send channel and the + // receive channel send RTP packets (RTCP packets in the case of a receive + // channel). + // Called on the network when an RTP packet is received. virtual void OnPacketReceived(rtc::CopyOnWriteBuffer packet, int64_t packet_time_us) = 0; @@ -202,6 +214,29 @@ class MediaChannel { virtual void OnNetworkRouteChanged( absl::string_view transport_name, const rtc::NetworkRoute& network_route) = 0; + + // Corresponds to the SDP attribute extmap-allow-mixed, see RFC8285. + // Set to true if it's allowed to mix one- and two-byte RTP header extensions + // in the same stream. The setter and getter must only be called from + // worker_thread. + virtual void SetExtmapAllowMixed(bool extmap_allow_mixed) = 0; + virtual bool ExtmapAllowMixed() const = 0; +}; + +class MediaSendChannelInterface + : public MediaBaseChannelInterface { + public: + virtual ~MediaSendChannelInterface() = default; + + virtual VideoMediaSendChannelInterface* AsVideoSendChannel() { + RTC_CHECK_NOTREACHED(); + return nullptr; + } + virtual VoiceMediaSendChannelInterface* AsVoiceSendChannel() { + RTC_CHECK_NOTREACHED(); + return nullptr; + } + // Creates a new outgoing media stream with SSRCs and CNAME as described // by sp. virtual bool AddSendStream(const StreamParams& sp) = 0; @@ -210,6 +245,46 @@ class MediaChannel { // multiple SSRCs. In the case of an ssrc of 0, the possibly cached // StreamParams is removed. virtual bool RemoveSendStream(uint32_t ssrc) = 0; + // Set the frame encryptor to use on all outgoing frames. This is optional. + // This pointers lifetime is managed by the set of RtpSender it is attached + // to. + virtual void SetFrameEncryptor( + uint32_t ssrc, + rtc::scoped_refptr frame_encryptor) = 0; + + virtual webrtc::RTCError SetRtpSendParameters( + uint32_t ssrc, + const webrtc::RtpParameters& parameters, + webrtc::SetParametersCallback callback = nullptr) = 0; + + virtual void SetEncoderToPacketizerFrameTransformer( + uint32_t ssrc, + rtc::scoped_refptr + frame_transformer) = 0; + + // note: The encoder_selector object must remain valid for the lifetime of the + // MediaChannel, unless replaced. + virtual void SetEncoderSelector( + uint32_t ssrc, + webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) { + } + virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const = 0; +}; + +class MediaReceiveChannelInterface + : public MediaBaseChannelInterface, + public Delayable { + public: + virtual ~MediaReceiveChannelInterface() = default; + + virtual VideoMediaReceiveChannelInterface* AsVideoReceiveChannel() { + RTC_CHECK_NOTREACHED(); + return nullptr; + } + virtual VoiceMediaReceiveChannelInterface* AsVoiceReceiveChannel() { + RTC_CHECK_NOTREACHED(); + return nullptr; + } // Creates a new incoming media stream with SSRCs, CNAME as described // by sp. In the case of a sp without SSRCs, the unsignaled sp is cached // to be used later for unsignaled streams received. @@ -235,107 +310,17 @@ class MediaChannel { // new unsignalled ssrcs. virtual void OnDemuxerCriteriaUpdatePending() = 0; virtual void OnDemuxerCriteriaUpdateComplete() = 0; - // Returns the absoulte sendtime extension id value from media channel. - virtual int GetRtpSendTimeExtnId() const; - // Set the frame encryptor to use on all outgoing frames. This is optional. - // This pointers lifetime is managed by the set of RtpSender it is attached - // to. - // TODO(benwright) make pure virtual once internal supports it. - virtual void SetFrameEncryptor( - uint32_t ssrc, - rtc::scoped_refptr frame_encryptor); // Set the frame decryptor to use on all incoming frames. This is optional. // This pointers lifetimes is managed by the set of RtpReceivers it is // attached to. - // TODO(benwright) make pure virtual once internal supports it. virtual void SetFrameDecryptor( uint32_t ssrc, - rtc::scoped_refptr frame_decryptor); + rtc::scoped_refptr frame_decryptor) = 0; - // Enable network condition based codec switching. - virtual void SetVideoCodecSwitchingEnabled(bool enabled); - - // note: The encoder_selector object must remain valid for the lifetime of the - // MediaChannel, unless replaced. - virtual void SetEncoderSelector( - uint32_t ssrc, - webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) { - } - - // Base method to send packet using NetworkInterface. - bool SendPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options); - - bool SendRtcp(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options); - - int SetOption(NetworkInterface::SocketType type, - rtc::Socket::Option opt, - int option); - - // Corresponds to the SDP attribute extmap-allow-mixed, see RFC8285. - // Set to true if it's allowed to mix one- and two-byte RTP header extensions - // in the same stream. The setter and getter must only be called from - // worker_thread. - void SetExtmapAllowMixed(bool extmap_allow_mixed); - bool ExtmapAllowMixed() const; - - // Returns `true` if a non-null NetworkInterface pointer is held. - // Must be called on the network thread. - bool HasNetworkInterface() const; - - virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const = 0; - virtual webrtc::RTCError SetRtpSendParameters( - uint32_t ssrc, - const webrtc::RtpParameters& parameters) = 0; - - virtual void SetEncoderToPacketizerFrameTransformer( - uint32_t ssrc, - rtc::scoped_refptr frame_transformer); virtual void SetDepacketizerToDecoderFrameTransformer( uint32_t ssrc, - rtc::scoped_refptr frame_transformer); - - protected: - int SetOptionLocked(NetworkInterface::SocketType type, - rtc::Socket::Option opt, - int option) RTC_RUN_ON(network_thread_); - - bool DscpEnabled() const; - - // This is the DSCP value used for both RTP and RTCP channels if DSCP is - // enabled. It can be changed at any time via `SetPreferredDscp`. - rtc::DiffServCodePoint PreferredDscp() const; - void SetPreferredDscp(rtc::DiffServCodePoint new_dscp); - - rtc::scoped_refptr network_safety(); - - // Utility implementation for derived classes (video/voice) that applies - // the packet options and passes the data onwards to `SendPacket`. - void SendRtp(const uint8_t* data, - size_t len, - const webrtc::PacketOptions& options); - - void SendRtcp(const uint8_t* data, size_t len); - - private: - // Apply the preferred DSCP setting to the underlying network interface RTP - // and RTCP channels. If DSCP is disabled, then apply the default DSCP value. - void UpdateDscp() RTC_RUN_ON(network_thread_); - - bool DoSendPacket(rtc::CopyOnWriteBuffer* packet, - bool rtcp, - const rtc::PacketOptions& options); - - const bool enable_dscp_; - const rtc::scoped_refptr network_safety_ - RTC_PT_GUARDED_BY(network_thread_); - webrtc::TaskQueueBase* const network_thread_; - NetworkInterface* network_interface_ RTC_GUARDED_BY(network_thread_) = - nullptr; - rtc::DiffServCodePoint preferred_dscp_ RTC_GUARDED_BY(network_thread_) = - rtc::DSCP_DEFAULT; - bool extmap_allow_mixed_ = false; + rtc::scoped_refptr + frame_transformer) = 0; }; // The stats information is structured as follows: @@ -415,11 +400,14 @@ struct MediaSenderInfo { // the SSRC of the corresponding outbound RTP stream, is unique. std::vector report_block_datas; absl::optional active; + // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalpacketsenddelay + webrtc::TimeDelta total_packet_send_delay = webrtc::TimeDelta::Zero(); }; struct MediaReceiverInfo { MediaReceiverInfo(); ~MediaReceiverInfo(); + void add_ssrc(const SsrcReceiverInfo& stat) { local_stats.push_back(stat); } // Temporary utility function for call sites that only provide SSRC. // As more info is added into SsrcSenderInfo, this function should go away. @@ -605,8 +593,6 @@ struct VideoSenderInfo : public MediaSenderInfo { uint64_t total_encode_time_ms = 0; // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalencodedbytestarget uint64_t total_encoded_bytes_target = 0; - // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalpacketsenddelay - webrtc::TimeDelta total_packet_send_delay = webrtc::TimeDelta::Zero(); bool has_entered_low_resolution = false; absl::optional qp_sum; webrtc::VideoContentType content_type = webrtc::VideoContentType::UNSPECIFIED; @@ -615,6 +601,8 @@ struct VideoSenderInfo : public MediaSenderInfo { uint32_t huge_frames_sent = 0; uint32_t aggregated_huge_frames_sent = 0; absl::optional rid; + absl::optional power_efficient_encoder; + absl::optional scalability_mode; }; struct VideoReceiverInfo : public MediaReceiverInfo { @@ -622,6 +610,7 @@ struct VideoReceiverInfo : public MediaReceiverInfo { ~VideoReceiverInfo(); std::vector ssrc_groups; std::string decoder_implementation_name; + absl::optional power_efficient_decoder; int packets_concealed = 0; int firs_sent = 0; int plis_sent = 0; @@ -811,24 +800,9 @@ struct AudioSendParameters : RtpSendParameters { struct AudioRecvParameters : RtpParameters {}; -class VoiceMediaChannel : public MediaChannel, public Delayable { +class VoiceMediaSendChannelInterface : public MediaSendChannelInterface { public: - VoiceMediaChannel(webrtc::TaskQueueBase* network_thread, - bool enable_dscp = false) - : MediaChannel(network_thread, enable_dscp) {} - ~VoiceMediaChannel() override {} - - cricket::MediaType media_type() const override; virtual bool SetSendParameters(const AudioSendParameters& params) = 0; - virtual bool SetRecvParameters(const AudioRecvParameters& params) = 0; - // Get the receive parameters for the incoming stream identified by `ssrc`. - virtual webrtc::RtpParameters GetRtpReceiveParameters( - uint32_t ssrc) const = 0; - // Retrieve the receive parameters for the default receive - // stream, which is used when SSRCs are not signaled. - virtual webrtc::RtpParameters GetDefaultRtpReceiveParameters() const = 0; - // Starts or stops playout of received audio. - virtual void SetPlayout(bool playout) = 0; // Starts or stops sending (and potentially capture) of local audio. virtual void SetSend(bool send) = 0; // Configure stream for sending. @@ -836,10 +810,6 @@ class VoiceMediaChannel : public MediaChannel, public Delayable { bool enable, const AudioOptions* options, AudioSource* source) = 0; - // Set speaker output volume of the specified ssrc. - virtual bool SetOutputVolume(uint32_t ssrc, double volume) = 0; - // Set speaker output volume for future unsignaled streams. - virtual bool SetDefaultOutputVolume(double volume) = 0; // Returns if the telephone-event has been negotiated. virtual bool CanInsertDtmf() = 0; // Send a DTMF `event`. The DTMF out-of-band signal will be used. @@ -847,22 +817,34 @@ class VoiceMediaChannel : public MediaChannel, public Delayable { // The valid value for the `event` are 0 to 15 which corresponding to // DTMF event 0-9, *, #, A-D. virtual bool InsertDtmf(uint32_t ssrc, int event, int duration) = 0; - // Gets quality stats for the channel. - virtual bool GetStats(VoiceMediaInfo* info, - bool get_and_clear_legacy_stats) = 0; - - virtual void SetRawAudioSink( - uint32_t ssrc, - std::unique_ptr sink) = 0; - virtual void SetDefaultRawAudioSink( - std::unique_ptr sink) = 0; - - virtual std::vector GetSources(uint32_t ssrc) const = 0; // RingRTC change to configure opus virtual void ConfigureEncoders(const webrtc::AudioEncoder::Config& config) { RTC_LOG(LS_WARNING) << "Default VoiceMediaChannel::ConfigureEncoders(...) does nothing!"; } +}; + +class VoiceMediaReceiveChannelInterface : public MediaReceiveChannelInterface { + public: + virtual bool SetRecvParameters(const AudioRecvParameters& params) = 0; + // Get the receive parameters for the incoming stream identified by `ssrc`. + virtual webrtc::RtpParameters GetRtpReceiveParameters( + uint32_t ssrc) const = 0; + virtual std::vector GetSources(uint32_t ssrc) const = 0; + // Retrieve the receive parameters for the default receive + // stream, which is used when SSRCs are not signaled. + virtual webrtc::RtpParameters GetDefaultRtpReceiveParameters() const = 0; + // Starts or stops playout of received audio. + virtual void SetPlayout(bool playout) = 0; + // Set speaker output volume of the specified ssrc. + virtual bool SetOutputVolume(uint32_t ssrc, double volume) = 0; + // Set speaker output volume for future unsignaled streams. + virtual bool SetDefaultOutputVolume(double volume) = 0; + virtual void SetRawAudioSink( + uint32_t ssrc, + std::unique_ptr sink) = 0; + virtual void SetDefaultRawAudioSink( + std::unique_ptr sink) = 0; // RingRTC change to get audio levels virtual void GetAudioLevels( @@ -897,22 +879,9 @@ struct VideoSendParameters : RtpSendParameters { // encapsulate all the parameters needed for a video RtpReceiver. struct VideoRecvParameters : RtpParameters {}; -class VideoMediaChannel : public MediaChannel, public Delayable { +class VideoMediaSendChannelInterface : public MediaSendChannelInterface { public: - explicit VideoMediaChannel(webrtc::TaskQueueBase* network_thread, - bool enable_dscp = false) - : MediaChannel(network_thread, enable_dscp) {} - ~VideoMediaChannel() override {} - - cricket::MediaType media_type() const override; virtual bool SetSendParameters(const VideoSendParameters& params) = 0; - virtual bool SetRecvParameters(const VideoRecvParameters& params) = 0; - // Get the receive parameters for the incoming stream identified by `ssrc`. - virtual webrtc::RtpParameters GetRtpReceiveParameters( - uint32_t ssrc) const = 0; - // Retrieve the receive parameters for the default receive - // stream, which is used when SSRCs are not signaled. - virtual webrtc::RtpParameters GetDefaultRtpReceiveParameters() const = 0; // Gets the currently set codecs/payload types to be used for outgoing media. virtual bool GetSendCodec(VideoCodec* send_codec) = 0; // Starts or stops transmission (and potentially capture) of local video. @@ -923,33 +892,39 @@ class VideoMediaChannel : public MediaChannel, public Delayable { uint32_t ssrc, const VideoOptions* options, rtc::VideoSourceInterface* source) = 0; + // Cause generation of a keyframe for `ssrc` on a sending channel. + virtual void GenerateSendKeyFrame(uint32_t ssrc, + const std::vector& rids) = 0; + // Enable network condition based codec switching. + virtual void SetVideoCodecSwitchingEnabled(bool enabled) = 0; +}; + +class VideoMediaReceiveChannelInterface : public MediaReceiveChannelInterface { + public: + virtual bool SetRecvParameters(const VideoRecvParameters& params) = 0; + // Get the receive parameters for the incoming stream identified by `ssrc`. + virtual webrtc::RtpParameters GetRtpReceiveParameters( + uint32_t ssrc) const = 0; + // Retrieve the receive parameters for the default receive + // stream, which is used when SSRCs are not signaled. + virtual webrtc::RtpParameters GetDefaultRtpReceiveParameters() const = 0; // Sets the sink object to be used for the specified stream. virtual bool SetSink(uint32_t ssrc, rtc::VideoSinkInterface* sink) = 0; // The sink is used for the 'default' stream. virtual void SetDefaultSink( rtc::VideoSinkInterface* sink) = 0; - // This fills the "bitrate parts" (rtx, video bitrate) of the - // BandwidthEstimationInfo, since that part that isn't possible to get - // through webrtc::Call::GetStats, as they are statistics of the send - // streams. - // TODO(holmer): We should change this so that either BWE graphs doesn't - // need access to bitrates of the streams, or change the (RTC)StatsCollector - // so that it's getting the send stream stats separately by calling - // GetStats(), and merges with BandwidthEstimationInfo by itself. - virtual void FillBitrateInfo(BandwidthEstimationInfo* bwe_info) = 0; - // Gets quality stats for the channel. - virtual bool GetStats(VideoMediaInfo* info) = 0; + // Request generation of a keyframe for `ssrc` on a receiving channel via + // RTCP feedback. + virtual void RequestRecvKeyFrame(uint32_t ssrc) = 0; + + virtual std::vector GetSources(uint32_t ssrc) const = 0; // Set recordable encoded frame callback for `ssrc` virtual void SetRecordableEncodedFrameCallback( uint32_t ssrc, std::function callback) = 0; // Clear recordable encoded frame callback for `ssrc` virtual void ClearRecordableEncodedFrameCallback(uint32_t ssrc) = 0; - // Cause generation of a keyframe for `ssrc` - virtual void GenerateKeyFrame(uint32_t ssrc) = 0; - - virtual std::vector GetSources(uint32_t ssrc) const = 0; }; // Info about data received in DataMediaChannel. For use in diff --git a/media/base/media_channel.cc b/media/base/media_channel_impl.cc similarity index 85% rename from media/base/media_channel.cc rename to media/base/media_channel_impl.cc index e01bfb1a82..626de3438f 100644 --- a/media/base/media_channel.cc +++ b/media/base/media_channel_impl.cc @@ -8,9 +8,39 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "media/base/media_channel.h" +#include "media/base/media_channel_impl.h" +#include +#include +#include + +#include "absl/functional/any_invocable.h" +#include "api/audio_options.h" +#include "api/media_stream_interface.h" +#include "api/rtc_error.h" +#include "api/rtp_sender_interface.h" +#include "api/units/time_delta.h" +#include "api/video/video_timing.h" +#include "common_video/include/quality_limitation_reason.h" +#include "media/base/codec.h" +#include "media/base/media_channel.h" #include "media/base/rtp_utils.h" +#include "media/base/stream_params.h" +#include "modules/rtp_rtcp/include/report_block_data.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +webrtc::RTCError InvokeSetParametersCallback(SetParametersCallback& callback, + RTCError error) { + if (callback) { + std::move(callback)(error); + callback = nullptr; + } + return error; +} + +} // namespace webrtc namespace cricket { using webrtc::FrameDecryptorInterface; @@ -34,7 +64,7 @@ MediaChannel::~MediaChannel() { RTC_DCHECK(!network_interface_); } -void MediaChannel::SetInterface(NetworkInterface* iface) { +void MediaChannel::SetInterface(MediaChannelNetworkInterface* iface) { RTC_DCHECK_RUN_ON(network_thread_); iface ? network_safety_->SetAlive() : network_safety_->SetNotAlive(); network_interface_ = iface; @@ -57,8 +87,6 @@ void MediaChannel::SetFrameDecryptor( // Placeholder should be pure virtual once internal supports it. } -void MediaChannel::SetVideoCodecSwitchingEnabled(bool enabled) {} - bool MediaChannel::SendPacket(rtc::CopyOnWriteBuffer* packet, const rtc::PacketOptions& options) { return DoSendPacket(packet, false, options); @@ -69,7 +97,7 @@ bool MediaChannel::SendRtcp(rtc::CopyOnWriteBuffer* packet, return DoSendPacket(packet, true, options); } -int MediaChannel::SetOption(NetworkInterface::SocketType type, +int MediaChannel::SetOption(MediaChannelNetworkInterface::SocketType type, rtc::Socket::Option opt, int option) { RTC_DCHECK_RUN_ON(network_thread_); @@ -101,7 +129,7 @@ void MediaChannel::SetDepacketizerToDecoderFrameTransformer( uint32_t ssrc, rtc::scoped_refptr frame_transformer) {} -int MediaChannel::SetOptionLocked(NetworkInterface::SocketType type, +int MediaChannel::SetOptionLocked(MediaChannelNetworkInterface::SocketType type, rtc::Socket::Option opt, int option) { if (!network_interface_) @@ -145,10 +173,11 @@ rtc::scoped_refptr MediaChannel::network_safety() { void MediaChannel::UpdateDscp() { rtc::DiffServCodePoint value = enable_dscp_ ? preferred_dscp_ : rtc::DSCP_DEFAULT; - int ret = - SetOptionLocked(NetworkInterface::ST_RTP, rtc::Socket::OPT_DSCP, value); + int ret = SetOptionLocked(MediaChannelNetworkInterface::ST_RTP, + rtc::Socket::OPT_DSCP, value); if (ret == 0) - SetOptionLocked(NetworkInterface::ST_RTCP, rtc::Socket::OPT_DSCP, value); + SetOptionLocked(MediaChannelNetworkInterface::ST_RTCP, + rtc::Socket::OPT_DSCP, value); } bool MediaChannel::DoSendPacket(rtc::CopyOnWriteBuffer* packet, @@ -260,4 +289,6 @@ cricket::MediaType VideoMediaChannel::media_type() const { return cricket::MediaType::MEDIA_TYPE_VIDEO; } +void VideoMediaChannel::SetVideoCodecSwitchingEnabled(bool enabled) {} + } // namespace cricket diff --git a/media/base/media_channel_impl.h b/media/base/media_channel_impl.h new file mode 100644 index 0000000000..41bead70b9 --- /dev/null +++ b/media/base/media_channel_impl.h @@ -0,0 +1,242 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MEDIA_BASE_MEDIA_CHANNEL_IMPL_H_ +#define MEDIA_BASE_MEDIA_CHANNEL_IMPL_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/call/transport.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/crypto/frame_encryptor_interface.h" +#include "api/frame_transformer_interface.h" +#include "api/media_types.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" +#include "media/base/media_channel.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/dscp.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/network_route.h" +#include "rtc_base/socket.h" +#include "rtc_base/thread_annotations.h" +// This file contains the base classes for classes that implement +// the MediaChannel interfaces. +// These implementation classes used to be the exposed interface names, +// but this is in the process of being changed. +// TODO(bugs.webrtc.org/13931): Consider removing these classes. + +namespace cricket { + +class VoiceMediaChannel; +class VideoMediaChannel; + +class MediaChannel : public MediaSendChannelInterface, + public MediaReceiveChannelInterface { + public: + explicit MediaChannel(webrtc::TaskQueueBase* network_thread, + bool enable_dscp = false); + virtual ~MediaChannel(); + + // Downcasting to the implemented interfaces. + MediaSendChannelInterface* AsSendChannel() { return this; } + + MediaReceiveChannelInterface* AsReceiveChannel() { return this; } + + // Downcasting to the subclasses. + virtual VideoMediaChannel* AsVideoChannel() { + RTC_CHECK_NOTREACHED(); + return nullptr; + } + + virtual VoiceMediaChannel* AsVoiceChannel() { + RTC_CHECK_NOTREACHED(); + return nullptr; + } + + // Must declare the methods inherited from the base interface template, + // even when abstract, to tell the compiler that all instances of the name + // referred to by subclasses of this share the same implementation. + cricket::MediaType media_type() const override = 0; + void OnPacketReceived(rtc::CopyOnWriteBuffer packet, + int64_t packet_time_us) override = 0; + void OnPacketSent(const rtc::SentPacket& sent_packet) override = 0; + void OnReadyToSend(bool ready) override = 0; + void OnNetworkRouteChanged(absl::string_view transport_name, + const rtc::NetworkRoute& network_route) override = + 0; + + // Sets the abstract interface class for sending RTP/RTCP data. + virtual void SetInterface(MediaChannelNetworkInterface* iface); + // Returns the absolute sendtime extension id value from media channel. + virtual int GetRtpSendTimeExtnId() const; + // Base method to send packet using MediaChannelNetworkInterface. + bool SendPacket(rtc::CopyOnWriteBuffer* packet, + const rtc::PacketOptions& options); + + bool SendRtcp(rtc::CopyOnWriteBuffer* packet, + const rtc::PacketOptions& options); + + int SetOption(MediaChannelNetworkInterface::SocketType type, + rtc::Socket::Option opt, + int option); + + // Corresponds to the SDP attribute extmap-allow-mixed, see RFC8285. + // Set to true if it's allowed to mix one- and two-byte RTP header extensions + // in the same stream. The setter and getter must only be called from + // worker_thread. + void SetExtmapAllowMixed(bool extmap_allow_mixed) override; + bool ExtmapAllowMixed() const override; + + // Returns `true` if a non-null MediaChannelNetworkInterface pointer is held. + // Must be called on the network thread. + bool HasNetworkInterface() const; + + void SetFrameEncryptor(uint32_t ssrc, + rtc::scoped_refptr + frame_encryptor) override; + void SetFrameDecryptor(uint32_t ssrc, + rtc::scoped_refptr + frame_decryptor) override; + + void SetEncoderToPacketizerFrameTransformer( + uint32_t ssrc, + rtc::scoped_refptr frame_transformer) + override; + void SetDepacketizerToDecoderFrameTransformer( + uint32_t ssrc, + rtc::scoped_refptr frame_transformer) + override; + + protected: + int SetOptionLocked(MediaChannelNetworkInterface::SocketType type, + rtc::Socket::Option opt, + int option) RTC_RUN_ON(network_thread_); + + bool DscpEnabled() const; + + // This is the DSCP value used for both RTP and RTCP channels if DSCP is + // enabled. It can be changed at any time via `SetPreferredDscp`. + rtc::DiffServCodePoint PreferredDscp() const; + void SetPreferredDscp(rtc::DiffServCodePoint new_dscp); + + rtc::scoped_refptr network_safety(); + + // Utility implementation for derived classes (video/voice) that applies + // the packet options and passes the data onwards to `SendPacket`. + void SendRtp(const uint8_t* data, + size_t len, + const webrtc::PacketOptions& options); + + void SendRtcp(const uint8_t* data, size_t len); + + private: + // Apply the preferred DSCP setting to the underlying network interface RTP + // and RTCP channels. If DSCP is disabled, then apply the default DSCP value. + void UpdateDscp() RTC_RUN_ON(network_thread_); + + bool DoSendPacket(rtc::CopyOnWriteBuffer* packet, + bool rtcp, + const rtc::PacketOptions& options); + + const bool enable_dscp_; + const rtc::scoped_refptr network_safety_ + RTC_PT_GUARDED_BY(network_thread_); + webrtc::TaskQueueBase* const network_thread_; + MediaChannelNetworkInterface* network_interface_ + RTC_GUARDED_BY(network_thread_) = nullptr; + rtc::DiffServCodePoint preferred_dscp_ RTC_GUARDED_BY(network_thread_) = + rtc::DSCP_DEFAULT; + bool extmap_allow_mixed_ = false; +}; + +// Base class for implementation classes + +class VideoMediaChannel : public MediaChannel, + public VideoMediaSendChannelInterface, + public VideoMediaReceiveChannelInterface { + public: + explicit VideoMediaChannel(webrtc::TaskQueueBase* network_thread, + bool enable_dscp = false) + : MediaChannel(network_thread, enable_dscp) {} + ~VideoMediaChannel() override {} + + // Downcasting to the implemented interfaces. + VideoMediaSendChannelInterface* AsVideoSendChannel() override { return this; } + + VideoMediaReceiveChannelInterface* AsVideoReceiveChannel() override { + return this; + } + cricket::MediaType media_type() const override; + + // Downcasting to the subclasses. + VideoMediaChannel* AsVideoChannel() override { return this; } + + void SetExtmapAllowMixed(bool mixed) override { + MediaChannel::SetExtmapAllowMixed(mixed); + } + bool ExtmapAllowMixed() const override { + return MediaChannel::ExtmapAllowMixed(); + } + // This fills the "bitrate parts" (rtx, video bitrate) of the + // BandwidthEstimationInfo, since that part that isn't possible to get + // through webrtc::Call::GetStats, as they are statistics of the send + // streams. + // TODO(holmer): We should change this so that either BWE graphs doesn't + // need access to bitrates of the streams, or change the (RTC)StatsCollector + // so that it's getting the send stream stats separately by calling + // GetStats(), and merges with BandwidthEstimationInfo by itself. + virtual void FillBitrateInfo(BandwidthEstimationInfo* bwe_info) = 0; + // Gets quality stats for the channel. + virtual bool GetStats(VideoMediaInfo* info) = 0; + // Enable network condition based codec switching. + void SetVideoCodecSwitchingEnabled(bool enabled) override; +}; + +// Base class for implementation classes +class VoiceMediaChannel : public MediaChannel, + public VoiceMediaSendChannelInterface, + public VoiceMediaReceiveChannelInterface { + public: + MediaType media_type() const override; + VoiceMediaChannel(webrtc::TaskQueueBase* network_thread, + bool enable_dscp = false) + : MediaChannel(network_thread, enable_dscp) {} + ~VoiceMediaChannel() override {} + + // Downcasting to the implemented interfaces. + VoiceMediaSendChannelInterface* AsVoiceSendChannel() override { return this; } + + VoiceMediaReceiveChannelInterface* AsVoiceReceiveChannel() override { + return this; + } + + VoiceMediaChannel* AsVoiceChannel() override { return this; } + + void SetExtmapAllowMixed(bool mixed) override { + MediaChannel::SetExtmapAllowMixed(mixed); + } + bool ExtmapAllowMixed() const override { + return MediaChannel::ExtmapAllowMixed(); + } + + // Gets quality stats for the channel. + virtual bool GetStats(VoiceMediaInfo* info, + bool get_and_clear_legacy_stats) = 0; +}; + +} // namespace cricket + +#endif // MEDIA_BASE_MEDIA_CHANNEL_IMPL_H_ diff --git a/media/base/media_engine.cc b/media/base/media_engine.cc index 694e690056..0efbd71bf7 100644 --- a/media/base/media_engine.cc +++ b/media/base/media_engine.cc @@ -65,8 +65,46 @@ std::vector GetDefaultEnabledRtpHeaderExtensions( return extensions; } +webrtc::RTCError CheckScalabilityModeValues( + const webrtc::RtpParameters& rtp_parameters, + rtc::ArrayView codecs) { + using webrtc::RTCErrorType; + + if (codecs.empty()) { + // This is an audio sender or an extra check in the stack where the codec + // list is not available and we can't check the scalability_mode values. + return webrtc::RTCError::OK(); + } + + for (size_t i = 0; i < rtp_parameters.encodings.size(); ++i) { + if (rtp_parameters.encodings[i].scalability_mode) { + bool scalabilityModeFound = false; + for (const cricket::VideoCodec& codec : codecs) { + for (const auto& scalability_mode : codec.scalability_modes) { + if (ScalabilityModeToString(scalability_mode) == + *rtp_parameters.encodings[i].scalability_mode) { + scalabilityModeFound = true; + break; + } + } + if (scalabilityModeFound) + break; + } + + if (!scalabilityModeFound) { + LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION, + "Attempted to set RtpParameters scalabilityMode " + "to an unsupported value for the current codecs."); + } + } + } + + return webrtc::RTCError::OK(); +} + webrtc::RTCError CheckRtpParametersValues( - const webrtc::RtpParameters& rtp_parameters) { + const webrtc::RtpParameters& rtp_parameters, + rtc::ArrayView codecs) { using webrtc::RTCErrorType; for (size_t i = 0; i < rtp_parameters.encodings.size(); ++i) { @@ -115,12 +153,20 @@ webrtc::RTCError CheckRtpParametersValues( } } - return webrtc::RTCError::OK(); + return CheckScalabilityModeValues(rtp_parameters, codecs); } webrtc::RTCError CheckRtpParametersInvalidModificationAndValues( const webrtc::RtpParameters& old_rtp_parameters, const webrtc::RtpParameters& rtp_parameters) { + return CheckRtpParametersInvalidModificationAndValues(old_rtp_parameters, + rtp_parameters, {}); +} + +webrtc::RTCError CheckRtpParametersInvalidModificationAndValues( + const webrtc::RtpParameters& old_rtp_parameters, + const webrtc::RtpParameters& rtp_parameters, + rtc::ArrayView codecs) { using webrtc::RTCErrorType; if (rtp_parameters.encodings.size() != old_rtp_parameters.encodings.size()) { LOG_AND_RETURN_ERROR( @@ -155,7 +201,7 @@ webrtc::RTCError CheckRtpParametersInvalidModificationAndValues( "Attempted to set RtpParameters with modified SSRC"); } - return CheckRtpParametersValues(rtp_parameters); + return CheckRtpParametersValues(rtp_parameters, codecs); } CompositeMediaEngine::CompositeMediaEngine( diff --git a/media/base/media_engine.h b/media/base/media_engine.h index 2b08b3951b..96b54babcc 100644 --- a/media/base/media_engine.h +++ b/media/base/media_engine.h @@ -24,6 +24,7 @@ #include "call/audio_state.h" #include "media/base/codec.h" #include "media/base/media_channel.h" +#include "media/base/media_channel_impl.h" #include "media/base/media_config.h" #include "media/base/video_common.h" #include "rtc_base/system/file_wrapper.h" @@ -37,9 +38,28 @@ class Call; namespace cricket { -webrtc::RTCError CheckRtpParametersValues( - const webrtc::RtpParameters& new_parameters); +// Checks that the scalability_mode value of each encoding is supported by at +// least one video codec of the list. If the list is empty, no check is done. +webrtc::RTCError CheckScalabilityModeValues( + const webrtc::RtpParameters& new_parameters, + rtc::ArrayView codecs); +// Checks the parameters have valid and supported values, and checks parameters +// with CheckScalabilityModeValues(). +webrtc::RTCError CheckRtpParametersValues( + const webrtc::RtpParameters& new_parameters, + rtc::ArrayView codecs); + +// Checks that the immutable values have not changed in new_parameters and +// checks all parameters with CheckRtpParametersValues(). +webrtc::RTCError CheckRtpParametersInvalidModificationAndValues( + const webrtc::RtpParameters& old_parameters, + const webrtc::RtpParameters& new_parameters, + rtc::ArrayView codecs); + +// Checks that the immutable values have not changed in new_parameters and +// checks parameters (except SVC) with CheckRtpParametersValues(). It should +// usually be paired with a call to CheckScalabilityModeValues(). webrtc::RTCError CheckRtpParametersInvalidModificationAndValues( const webrtc::RtpParameters& old_parameters, const webrtc::RtpParameters& new_parameters); diff --git a/media/engine/fake_webrtc_call.cc b/media/engine/fake_webrtc_call.cc index 48a8b12092..8046c3ad3a 100644 --- a/media/engine/fake_webrtc_call.cc +++ b/media/engine/fake_webrtc_call.cc @@ -15,6 +15,7 @@ #include "absl/algorithm/container.h" #include "absl/strings/string_view.h" #include "api/call/audio_sink.h" +#include "media/base/media_channel.h" #include "modules/rtp_rtcp/source/rtp_util.h" #include "rtc_base/checks.h" #include "rtc_base/gunit.h" @@ -31,8 +32,10 @@ FakeAudioSendStream::FakeAudioSendStream( : id_(id), config_(config) {} void FakeAudioSendStream::Reconfigure( - const webrtc::AudioSendStream::Config& config) { + const webrtc::AudioSendStream::Config& config, + webrtc::SetParametersCallback callback) { config_ = config; + webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); } const webrtc::AudioSendStream::Config& FakeAudioSendStream::GetConfig() const { @@ -275,6 +278,12 @@ webrtc::VideoSendStream::Stats FakeVideoSendStream::GetStats() { void FakeVideoSendStream::ReconfigureVideoEncoder( webrtc::VideoEncoderConfig config) { + ReconfigureVideoEncoder(std::move(config), nullptr); +} + +void FakeVideoSendStream::ReconfigureVideoEncoder( + webrtc::VideoEncoderConfig config, + webrtc::SetParametersCallback callback) { int width, height; if (last_frame_) { width = last_frame_->width(); @@ -326,9 +335,10 @@ void FakeVideoSendStream::ReconfigureVideoEncoder( codec_settings_set_ = config.encoder_specific_settings != nullptr; encoder_config_ = std::move(config); ++num_encoder_reconfigurations_; + webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); } -void FakeVideoSendStream::UpdateActiveSimulcastLayers( +void FakeVideoSendStream::StartPerRtpStream( const std::vector active_layers) { sending_ = false; for (const bool active_layer : active_layers) { diff --git a/media/engine/fake_webrtc_call.h b/media/engine/fake_webrtc_call.h index 311e35a7a9..370b70700f 100644 --- a/media/engine/fake_webrtc_call.h +++ b/media/engine/fake_webrtc_call.h @@ -62,7 +62,8 @@ class FakeAudioSendStream final : public webrtc::AudioSendStream { private: // webrtc::AudioSendStream implementation. - void Reconfigure(const webrtc::AudioSendStream::Config& config) override; + void Reconfigure(const webrtc::AudioSendStream::Config& config, + webrtc::SetParametersCallback callback) override; void Start() override { sending_ = true; } void Stop() override { sending_ = false; } void SendAudioData(std::unique_ptr audio_frame) override { @@ -194,13 +195,14 @@ class FakeVideoSendStream final rtc::VideoSourceInterface* source() const { return source_; } + void GenerateKeyFrame(const std::vector& rids) override {} private: // rtc::VideoSinkInterface implementation. void OnFrame(const webrtc::VideoFrame& frame) override; // webrtc::VideoSendStream implementation. - void UpdateActiveSimulcastLayers(std::vector active_layers) override; + void StartPerRtpStream(std::vector active_layers) override; void Start() override; void Stop() override; bool started() override { return IsSending(); } @@ -212,7 +214,10 @@ class FakeVideoSendStream final rtc::VideoSourceInterface* source, const webrtc::DegradationPreference& degradation_preference) override; webrtc::VideoSendStream::Stats GetStats() override; + void ReconfigureVideoEncoder(webrtc::VideoEncoderConfig config) override; + void ReconfigureVideoEncoder(webrtc::VideoEncoderConfig config, + webrtc::SetParametersCallback callback) override; bool sending_; webrtc::VideoSendStream::Config config_; diff --git a/media/engine/internal_decoder_factory_unittest.cc b/media/engine/internal_decoder_factory_unittest.cc index d37c1a8247..53811b4879 100644 --- a/media/engine/internal_decoder_factory_unittest.cc +++ b/media/engine/internal_decoder_factory_unittest.cc @@ -16,6 +16,7 @@ #include "api/video_codecs/vp9_profile.h" #include "media/base/media_constants.h" #include "system_wrappers/include/field_trial.h" +#include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" @@ -122,7 +123,7 @@ TEST(InternalDecoderFactoryTest, Av1Profile1_Dav1dDecoderTrialEnabled) { } TEST(InternalDecoderFactoryTest, Av1Profile1_Dav1dDecoderTrialDisabled) { - InitFieldTrialsFromString(kDav1dDecoderFieldTrialDisabled); + test::ScopedFieldTrials disable_dav1d(kDav1dDecoderFieldTrialDisabled); InternalDecoderFactory factory; std::unique_ptr decoder = factory.CreateVideoDecoder( SdpVideoFormat(cricket::kAv1CodecName, diff --git a/media/engine/internal_encoder_factory.cc b/media/engine/internal_encoder_factory.cc index 4243f52481..7b5fc24e0a 100644 --- a/media/engine/internal_encoder_factory.cc +++ b/media/engine/internal_encoder_factory.cc @@ -38,32 +38,6 @@ using Factory = webrtc::LibaomAv1EncoderTemplateAdapter, #endif webrtc::LibvpxVp9EncoderTemplateAdapter>; - -absl::optional MatchOriginalFormat( - const SdpVideoFormat& format) { - const auto supported_formats = Factory().GetSupportedFormats(); - - absl::optional res; - int best_parameter_match = 0; - for (const auto& supported_format : supported_formats) { - if (absl::EqualsIgnoreCase(supported_format.name, format.name)) { - int matching_parameters = 0; - for (const auto& kv : supported_format.parameters) { - auto it = format.parameters.find(kv.first); - if (it != format.parameters.end() && it->second == kv.second) { - matching_parameters += 1; - } - } - - if (!res || matching_parameters > best_parameter_match) { - res = supported_format; - best_parameter_match = matching_parameters; - } - } - } - - return res; -} } // namespace std::vector InternalEncoderFactory::GetSupportedFormats() @@ -73,7 +47,8 @@ std::vector InternalEncoderFactory::GetSupportedFormats() std::unique_ptr InternalEncoderFactory::CreateVideoEncoder( const SdpVideoFormat& format) { - auto original_format = MatchOriginalFormat(format); + auto original_format = + FuzzyMatchSdpVideoFormat(Factory().GetSupportedFormats(), format); return original_format ? Factory().CreateVideoEncoder(*original_format) : nullptr; } @@ -81,7 +56,8 @@ std::unique_ptr InternalEncoderFactory::CreateVideoEncoder( VideoEncoderFactory::CodecSupport InternalEncoderFactory::QueryCodecSupport( const SdpVideoFormat& format, absl::optional scalability_mode) const { - auto original_format = MatchOriginalFormat(format); + auto original_format = + FuzzyMatchSdpVideoFormat(Factory().GetSupportedFormats(), format); return original_format ? Factory().QueryCodecSupport(*original_format, scalability_mode) : VideoEncoderFactory::CodecSupport{.is_supported = false}; diff --git a/media/engine/multiplex_codec_factory.cc b/media/engine/multiplex_codec_factory.cc index fb296811db..660c3594bc 100644 --- a/media/engine/multiplex_codec_factory.cc +++ b/media/engine/multiplex_codec_factory.cc @@ -82,16 +82,17 @@ MultiplexDecoderFactory::MultiplexDecoderFactory( std::vector MultiplexDecoderFactory::GetSupportedFormats() const { std::vector formats = factory_->GetSupportedFormats(); + std::vector augmented_formats = formats; for (const auto& format : formats) { if (absl::EqualsIgnoreCase(format.name, kMultiplexAssociatedCodecName)) { SdpVideoFormat multiplex_format = format; multiplex_format.parameters[cricket::kCodecParamAssociatedCodecName] = format.name; multiplex_format.name = cricket::kMultiplexCodecName; - formats.push_back(multiplex_format); + augmented_formats.push_back(multiplex_format); } } - return formats; + return augmented_formats; } std::unique_ptr MultiplexDecoderFactory::CreateVideoDecoder( diff --git a/media/engine/simulcast_encoder_adapter.cc b/media/engine/simulcast_encoder_adapter.cc index e7f6205ab6..3a73a4ac10 100644 --- a/media/engine/simulcast_encoder_adapter.cc +++ b/media/engine/simulcast_encoder_adapter.cc @@ -86,22 +86,24 @@ int CountActiveStreams(const webrtc::VideoCodec& codec) { return active_streams_count; } -int VerifyCodec(const webrtc::VideoCodec* inst) { - if (inst == nullptr) { +int VerifyCodec(const webrtc::VideoCodec* codec_settings) { + if (codec_settings == nullptr) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } - if (inst->maxFramerate < 1) { + if (codec_settings->maxFramerate < 1) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } // allow zero to represent an unspecified maxBitRate - if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) { + if (codec_settings->maxBitrate > 0 && + codec_settings->startBitrate > codec_settings->maxBitrate) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } - if (inst->width <= 1 || inst->height <= 1) { + if (codec_settings->width <= 1 || codec_settings->height <= 1) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } - if (inst->codecType == webrtc::kVideoCodecVP8 && - inst->VP8().automaticResizeOn && CountActiveStreams(*inst) > 1) { + if (codec_settings->codecType == webrtc::kVideoCodecVP8 && + codec_settings->VP8().automaticResizeOn && + CountActiveStreams(*codec_settings) > 1) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } return WEBRTC_VIDEO_CODEC_OK; @@ -299,7 +301,7 @@ int SimulcastEncoderAdapter::Release() { } int SimulcastEncoderAdapter::InitEncode( - const VideoCodec* inst, + const VideoCodec* codec_settings, const VideoEncoder::Settings& settings) { RTC_DCHECK_RUN_ON(&encoder_queue_); @@ -307,15 +309,15 @@ int SimulcastEncoderAdapter::InitEncode( return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } - int ret = VerifyCodec(inst); + int ret = VerifyCodec(codec_settings); if (ret < 0) { return ret; } Release(); - codec_ = *inst; - total_streams_count_ = CountAllStreams(*inst); + codec_ = *codec_settings; + total_streams_count_ = CountAllStreams(*codec_settings); // TODO(ronghuawu): Remove once this is handled in LibvpxVp8Encoder. if (codec_.qpMax < kDefaultMinQp) { @@ -349,7 +351,7 @@ int SimulcastEncoderAdapter::InitEncode( // (active_streams_count >= 1). SEA creates N=active_streams_count encoders // and configures each to produce a single stream. - int active_streams_count = CountActiveStreams(*inst); + int active_streams_count = CountActiveStreams(*codec_settings); // If we only have a single active layer it is better to create an encoder // with only one configured layer than creating it with all-but-one disabled // layers because that way we control scaling. @@ -461,24 +463,14 @@ int SimulcastEncoderAdapter::Encode( } } - // All active streams should generate a key frame if - // a key frame is requested by any stream. bool is_keyframe_needed = false; - if (frame_types) { - for (const auto& frame_type : *frame_types) { - if (frame_type == VideoFrameType::kVideoFrameKey) { - is_keyframe_needed = true; - break; - } - } - } - - if (!is_keyframe_needed) { - for (const auto& layer : stream_contexts_) { - if (layer.is_keyframe_needed()) { - is_keyframe_needed = true; - break; - } + for (const auto& layer : stream_contexts_) { + if (layer.is_keyframe_needed()) { + // This is legacy behavior, generating a keyframe on all layers + // when generating one for a layer that became active for the first time + // or after being disabled. + is_keyframe_needed = true; + break; } } @@ -501,17 +493,38 @@ int SimulcastEncoderAdapter::Encode( // frame types for all streams should be passed to the encoder unchanged. // Otherwise a single per-encoder frame type is passed. std::vector stream_frame_types( - bypass_mode_ ? total_streams_count_ : 1); + bypass_mode_ + ? std::max(codec_.numberOfSimulcastStreams, 1) + : 1, + VideoFrameType::kVideoFrameDelta); + + bool keyframe_requested = false; if (is_keyframe_needed) { std::fill(stream_frame_types.begin(), stream_frame_types.end(), VideoFrameType::kVideoFrameKey); - layer.OnKeyframe(frame_timestamp); - } else { - if (layer.ShouldDropFrame(frame_timestamp)) { - continue; + keyframe_requested = true; + } else if (frame_types) { + if (bypass_mode_) { + // In bypass mode, we effectively pass on frame_types. + RTC_DCHECK_EQ(frame_types->size(), stream_frame_types.size()); + stream_frame_types = *frame_types; + keyframe_requested = + absl::c_any_of(*frame_types, [](const VideoFrameType frame_type) { + return frame_type == VideoFrameType::kVideoFrameKey; + }); + } else { + size_t stream_idx = static_cast(layer.stream_idx()); + if (frame_types->size() >= stream_idx && + (*frame_types)[stream_idx] == VideoFrameType::kVideoFrameKey) { + stream_frame_types[0] = VideoFrameType::kVideoFrameKey; + keyframe_requested = true; + } } - std::fill(stream_frame_types.begin(), stream_frame_types.end(), - VideoFrameType::kVideoFrameDelta); + } + if (keyframe_requested) { + layer.OnKeyframe(frame_timestamp); + } else if (layer.ShouldDropFrame(frame_timestamp)) { + continue; } // If scaling isn't required, because the input resolution diff --git a/media/engine/simulcast_encoder_adapter_unittest.cc b/media/engine/simulcast_encoder_adapter_unittest.cc index e0e3ea86ec..15a8aeb71e 100644 --- a/media/engine/simulcast_encoder_adapter_unittest.cc +++ b/media/engine/simulcast_encoder_adapter_unittest.cc @@ -176,7 +176,7 @@ class MockVideoEncoderFactory : public VideoEncoderFactory { } void set_init_encode_return_value(int32_t value); void set_requested_resolution_alignments( - std::vector requested_resolution_alignments) { + std::vector requested_resolution_alignments) { requested_resolution_alignments_ = requested_resolution_alignments; } void set_supports_simulcast(bool supports_simulcast) { @@ -195,7 +195,7 @@ class MockVideoEncoderFactory : public VideoEncoderFactory { std::vector encoders_; std::vector encoder_names_; // Keep number of entries in sync with `kMaxSimulcastStreams`. - std::vector requested_resolution_alignments_ = {1, 1, 1}; + std::vector requested_resolution_alignments_ = {1, 1, 1}; bool supports_simulcast_ = false; std::vector resolution_bitrate_limits_; }; @@ -284,7 +284,8 @@ class MockVideoEncoder : public VideoEncoder { scaling_settings_ = settings; } - void set_requested_resolution_alignment(int requested_resolution_alignment) { + void set_requested_resolution_alignment( + uint32_t requested_resolution_alignment) { requested_resolution_alignment_ = requested_resolution_alignment; } @@ -332,7 +333,7 @@ class MockVideoEncoder : public VideoEncoder { bool supports_native_handle_ = false; std::string implementation_name_ = "unknown"; VideoEncoder::ScalingSettings scaling_settings_; - int requested_resolution_alignment_ = 1; + uint32_t requested_resolution_alignment_ = 1; bool apply_alignment_to_all_simulcast_layers_ = false; bool has_trusted_rate_controller_ = false; bool is_hardware_accelerated_ = false; @@ -1089,6 +1090,89 @@ TEST_F(TestSimulcastEncoderAdapterFake, NativeHandleForwardingOnlyIfSupported) { EXPECT_EQ(0, adapter_->Encode(input_frame, &frame_types)); } +TEST_F(TestSimulcastEncoderAdapterFake, GeneratesKeyFramesOnRequestedLayers) { + // Set up common settings for three streams. + SimulcastTestFixtureImpl::DefaultSettings( + &codec_, static_cast(kTestTemporalLayerProfile), + kVideoCodecVP8); + rate_allocator_.reset(new SimulcastRateAllocator(codec_)); + adapter_->RegisterEncodeCompleteCallback(this); + + // Input data. + rtc::scoped_refptr buffer(I420Buffer::Create(1280, 720)); + + // Encode with three streams. + codec_.startBitrate = 3000; + EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings)); + + std::vector frame_types; + frame_types.resize(3, VideoFrameType::kVideoFrameKey); + + std::vector expected_keyframe(1, + VideoFrameType::kVideoFrameKey); + std::vector expected_deltaframe( + 1, VideoFrameType::kVideoFrameDelta); + + std::vector original_encoders = + helper_->factory()->encoders(); + ASSERT_EQ(3u, original_encoders.size()); + EXPECT_CALL(*original_encoders[0], + Encode(_, ::testing::Pointee(::testing::Eq(expected_keyframe)))) + .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); + EXPECT_CALL(*original_encoders[1], + Encode(_, ::testing::Pointee(::testing::Eq(expected_keyframe)))) + .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); + EXPECT_CALL(*original_encoders[2], + Encode(_, ::testing::Pointee(::testing::Eq(expected_keyframe)))) + .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); + VideoFrame first_frame = VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_timestamp_rtp(0) + .set_timestamp_ms(0) + .build(); + EXPECT_EQ(0, adapter_->Encode(first_frame, &frame_types)); + + // Request [key, delta, delta]. + EXPECT_CALL(*original_encoders[0], + Encode(_, ::testing::Pointee(::testing::Eq(expected_keyframe)))) + .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); + EXPECT_CALL(*original_encoders[1], + Encode(_, ::testing::Pointee(::testing::Eq(expected_deltaframe)))) + .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); + EXPECT_CALL(*original_encoders[2], + Encode(_, ::testing::Pointee(::testing::Eq(expected_deltaframe)))) + .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); + frame_types[1] = VideoFrameType::kVideoFrameKey; + frame_types[1] = VideoFrameType::kVideoFrameDelta; + frame_types[2] = VideoFrameType::kVideoFrameDelta; + VideoFrame second_frame = VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_timestamp_rtp(10000) + .set_timestamp_ms(100000) + .build(); + EXPECT_EQ(0, adapter_->Encode(second_frame, &frame_types)); + + // Request [delta, key, delta]. + EXPECT_CALL(*original_encoders[0], + Encode(_, ::testing::Pointee(::testing::Eq(expected_deltaframe)))) + .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); + EXPECT_CALL(*original_encoders[1], + Encode(_, ::testing::Pointee(::testing::Eq(expected_keyframe)))) + .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); + EXPECT_CALL(*original_encoders[2], + Encode(_, ::testing::Pointee(::testing::Eq(expected_deltaframe)))) + .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); + frame_types[0] = VideoFrameType::kVideoFrameDelta; + frame_types[1] = VideoFrameType::kVideoFrameKey; + frame_types[2] = VideoFrameType::kVideoFrameDelta; + VideoFrame third_frame = VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_timestamp_rtp(20000) + .set_timestamp_ms(200000) + .build(); + EXPECT_EQ(0, adapter_->Encode(third_frame, &frame_types)); +} + TEST_F(TestSimulcastEncoderAdapterFake, TestFailureReturnCodesFromEncodeCalls) { SimulcastTestFixtureImpl::DefaultSettings( &codec_, static_cast(kTestTemporalLayerProfile), @@ -1313,7 +1397,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, helper_->factory()->set_requested_resolution_alignments({2, 4, 7}); EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings)); - EXPECT_EQ(adapter_->GetEncoderInfo().requested_resolution_alignment, 28); + EXPECT_EQ(adapter_->GetEncoderInfo().requested_resolution_alignment, 28u); } TEST_F(TestSimulcastEncoderAdapterFake, @@ -1384,7 +1468,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, EncoderInfoFromFieldTrial) { EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings)); ASSERT_EQ(3u, helper_->factory()->encoders().size()); - EXPECT_EQ(8, adapter_->GetEncoderInfo().requested_resolution_alignment); + EXPECT_EQ(8u, adapter_->GetEncoderInfo().requested_resolution_alignment); EXPECT_TRUE( adapter_->GetEncoderInfo().apply_alignment_to_all_simulcast_layers); EXPECT_TRUE(adapter_->GetEncoderInfo().resolution_bitrate_limits.empty()); @@ -1407,7 +1491,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings)); ASSERT_EQ(1u, helper_->factory()->encoders().size()); - EXPECT_EQ(9, adapter_->GetEncoderInfo().requested_resolution_alignment); + EXPECT_EQ(9u, adapter_->GetEncoderInfo().requested_resolution_alignment); EXPECT_FALSE( adapter_->GetEncoderInfo().apply_alignment_to_all_simulcast_layers); EXPECT_THAT( diff --git a/media/engine/webrtc_media_engine.cc b/media/engine/webrtc_media_engine.cc index d0d6de2034..514e228780 100644 --- a/media/engine/webrtc_media_engine.cc +++ b/media/engine/webrtc_media_engine.cc @@ -10,13 +10,19 @@ #include "media/engine/webrtc_media_engine.h" +#include #include #include +#include #include #include "absl/algorithm/container.h" #include "absl/strings/match.h" +#include "api/transport/field_trial_based_config.h" +#include "media/base/media_constants.h" #include "media/engine/webrtc_voice_engine.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" #ifdef HAVE_WEBRTC_VIDEO #include "media/engine/webrtc_video_engine.h" diff --git a/media/engine/webrtc_media_engine.h b/media/engine/webrtc_media_engine.h index 27d6f34c2f..e65824bd83 100644 --- a/media/engine/webrtc_media_engine.h +++ b/media/engine/webrtc_media_engine.h @@ -12,18 +12,19 @@ #define MEDIA_ENGINE_WEBRTC_MEDIA_ENGINE_H_ #include -#include #include +#include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/audio/audio_frame_processor.h" #include "api/audio/audio_mixer.h" #include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_encoder_factory.h" #include "api/field_trials_view.h" #include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" #include "api/task_queue/task_queue_factory.h" #include "api/transport/bitrate_settings.h" -#include "api/transport/field_trial_based_config.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_factory.h" #include "media/base/codec.h" diff --git a/media/engine/webrtc_video_engine.cc b/media/engine/webrtc_video_engine.cc index c700238fa2..ff5d3636b2 100644 --- a/media/engine/webrtc_video_engine.cc +++ b/media/engine/webrtc_video_engine.cc @@ -416,6 +416,28 @@ MergeInfoAboutOutboundRtpSubstreams( return rtp_substreams; } +bool IsActiveFromEncodings( + absl::optional ssrc, + const std::vector& encodings) { + if (ssrc.has_value()) { + // Report the `active` value of a specific ssrc, or false if an encoding + // with this ssrc does not exist. + auto encoding_it = std::find_if( + encodings.begin(), encodings.end(), + [ssrc = ssrc.value()](const webrtc::RtpEncodingParameters& encoding) { + return encoding.ssrc.has_value() && encoding.ssrc.value() == ssrc; + }); + return encoding_it != encodings.end() ? encoding_it->active : false; + } + // If `ssrc` is not specified then any encoding being active counts as active. + for (const auto& encoding : encodings) { + if (encoding.active) { + return true; + } + } + return false; +} + } // namespace // This constant is really an on/off, lower-level configurable NACK history @@ -1024,7 +1046,8 @@ webrtc::RtpParameters WebRtcVideoChannel::GetRtpSendParameters( webrtc::RTCError WebRtcVideoChannel::SetRtpSendParameters( uint32_t ssrc, - const webrtc::RtpParameters& parameters) { + const webrtc::RtpParameters& parameters, + webrtc::SetParametersCallback callback) { RTC_DCHECK_RUN_ON(&thread_checker_); TRACE_EVENT0("webrtc", "WebRtcVideoChannel::SetRtpSendParameters"); auto it = send_streams_.find(ssrc); @@ -1032,7 +1055,8 @@ webrtc::RTCError WebRtcVideoChannel::SetRtpSendParameters( RTC_LOG(LS_ERROR) << "Attempting to set RTP send parameters for stream " "with ssrc " << ssrc << " which doesn't exist."; - return webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR); + return webrtc::InvokeSetParametersCallback( + callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); } // TODO(deadbeef): Handle setting parameters with a list of codecs in a @@ -1041,7 +1065,8 @@ webrtc::RTCError WebRtcVideoChannel::SetRtpSendParameters( if (current_parameters.codecs != parameters.codecs) { RTC_DLOG(LS_ERROR) << "Using SetParameters to change the set of codecs " "is not currently supported."; - return webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR); + return webrtc::InvokeSetParametersCallback( + callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); } if (!parameters.encodings.empty()) { @@ -1067,7 +1092,7 @@ webrtc::RTCError WebRtcVideoChannel::SetRtpSendParameters( SetPreferredDscp(new_dscp); } - return it->second->SetRtpParameters(parameters); + return it->second->SetRtpParameters(parameters, std::move(callback)); } webrtc::RtpParameters WebRtcVideoChannel::GetRtpReceiveParameters( @@ -1869,12 +1894,12 @@ void WebRtcVideoChannel::OnNetworkRouteChanged( })); } -void WebRtcVideoChannel::SetInterface(NetworkInterface* iface) { +void WebRtcVideoChannel::SetInterface(MediaChannelNetworkInterface* iface) { RTC_DCHECK_RUN_ON(&network_thread_checker_); MediaChannel::SetInterface(iface); // Set the RTP recv/send buffer to a bigger size. - MediaChannel::SetOption(NetworkInterface::ST_RTP, rtc::Socket::OPT_RCVBUF, - kVideoRtpRecvBufferSize); + MediaChannel::SetOption(MediaChannelNetworkInterface::ST_RTP, + rtc::Socket::OPT_RCVBUF, kVideoRtpRecvBufferSize); // Speculative change to increase the outbound socket buffer size. // In b/15152257, we are seeing a significant number of packets discarded @@ -1891,8 +1916,8 @@ void WebRtcVideoChannel::SetInterface(NetworkInterface* iface) { send_buffer_size = kVideoRtpSendBufferSize; } - MediaChannel::SetOption(NetworkInterface::ST_RTP, rtc::Socket::OPT_SNDBUF, - send_buffer_size); + MediaChannel::SetOption(MediaChannelNetworkInterface::ST_RTP, + rtc::Socket::OPT_SNDBUF, send_buffer_size); } void WebRtcVideoChannel::SetFrameDecryptor( @@ -2141,7 +2166,7 @@ bool WebRtcVideoChannel::WebRtcVideoSendStream::SetVideoSend( old_options.is_screencast = options->is_screencast; } if (parameters_.options != old_options) { - ReconfigureEncoder(); + ReconfigureEncoder(nullptr); } } @@ -2268,7 +2293,7 @@ void WebRtcVideoChannel::WebRtcVideoSendStream::SetSendParameters( } if (params.max_bandwidth_bps) { parameters_.max_bitrate_bps = *params.max_bandwidth_bps; - ReconfigureEncoder(); + ReconfigureEncoder(nullptr); } if (params.conference_mode) { parameters_.conference_mode = *params.conference_mode; @@ -2290,11 +2315,17 @@ void WebRtcVideoChannel::WebRtcVideoSendStream::SetSendParameters( } webrtc::RTCError WebRtcVideoChannel::WebRtcVideoSendStream::SetRtpParameters( - const webrtc::RtpParameters& new_parameters) { + const webrtc::RtpParameters& new_parameters, + webrtc::SetParametersCallback callback) { RTC_DCHECK_RUN_ON(&thread_checker_); + // This is checked higher in the stack (RtpSender), so this is only checking + // for users accessing the private APIs or tests, not specification + // conformance. + // TODO(orphis): Migrate tests to later make this a DCHECK only webrtc::RTCError error = CheckRtpParametersInvalidModificationAndValues( rtp_parameters_, new_parameters); if (!error.ok()) { + // Error is propagated to the callback at a higher level return error; } @@ -2346,7 +2377,9 @@ webrtc::RTCError WebRtcVideoChannel::WebRtcVideoSendStream::SetRtpParameters( // Codecs are currently handled at the WebRtcVideoChannel level. rtp_parameters_.codecs.clear(); if (reconfigure_encoder || new_send_state) { - ReconfigureEncoder(); + // Callback responsibility is delegated to ReconfigureEncoder() + ReconfigureEncoder(std::move(callback)); + callback = nullptr; } if (new_send_state) { UpdateSendState(); @@ -2356,7 +2389,7 @@ webrtc::RTCError WebRtcVideoChannel::WebRtcVideoSendStream::SetRtpParameters( stream_->SetSource(source_, GetDegradationPreference()); } } - return webrtc::RTCError::OK(); + return webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); } webrtc::RtpParameters @@ -2413,7 +2446,7 @@ void WebRtcVideoChannel::WebRtcVideoSendStream::UpdateSendState() { } // This updates what simulcast layers are sending, and possibly starts // or stops the VideoSendStream. - stream_->UpdateActiveSimulcastLayers(active_layers); + stream_->StartPerRtpStream(active_layers); } else { if (stream_ != nullptr) { stream_->Stop(); @@ -2544,11 +2577,13 @@ WebRtcVideoChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig( return encoder_config; } -void WebRtcVideoChannel::WebRtcVideoSendStream::ReconfigureEncoder() { +void WebRtcVideoChannel::WebRtcVideoSendStream::ReconfigureEncoder( + webrtc::SetParametersCallback callback) { RTC_DCHECK_RUN_ON(&thread_checker_); if (!stream_) { // The webrtc::VideoSendStream `stream_` has not yet been created but other // parameters has changed. + webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); return; } @@ -2563,7 +2598,7 @@ void WebRtcVideoChannel::WebRtcVideoSendStream::ReconfigureEncoder() { encoder_config.encoder_specific_settings = ConfigureVideoEncoderSettings(codec_settings.codec); - stream_->ReconfigureVideoEncoder(encoder_config.Copy()); + stream_->ReconfigureVideoEncoder(encoder_config.Copy(), std::move(callback)); encoder_config.encoder_specific_settings = NULL; @@ -2627,21 +2662,18 @@ WebRtcVideoChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos( common_info.content_type = stats.content_type; common_info.aggregated_framerate_sent = stats.encode_frame_rate; common_info.aggregated_huge_frames_sent = stats.huge_frames_sent; + common_info.power_efficient_encoder = stats.power_efficient_encoder; - // If we don't have any substreams, get the remaining metrics from `stats`. - // Otherwise, these values are obtained from `sub_stream` below. + // The normal case is that substreams are present, handled below. But if + // substreams are missing (can happen before negotiated/connected where we + // have no stats yet) a single outbound-rtp is created representing any and + // all layers. if (stats.substreams.empty()) { for (uint32_t ssrc : parameters_.config.rtp.ssrcs) { common_info.add_ssrc(ssrc); - auto encoding_it = std::find_if( - rtp_parameters_.encodings.begin(), rtp_parameters_.encodings.end(), - [&ssrc](const webrtc::RtpEncodingParameters& parameters) { - return parameters.ssrc && parameters.ssrc == ssrc; - }); - if (encoding_it != rtp_parameters_.encodings.end()) { - common_info.active = encoding_it->active; - } } + common_info.active = + IsActiveFromEncodings(absl::nullopt, rtp_parameters_.encodings); common_info.framerate_sent = stats.encode_frame_rate; common_info.frames_encoded = stats.frames_encoded; common_info.total_encode_time_ms = stats.total_encode_time_ms; @@ -2652,21 +2684,24 @@ WebRtcVideoChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos( return infos; } } + // Merge `stats.substreams`, which may contain additional SSRCs for RTX or + // Flexfec, with media SSRCs. This results in a set of substreams that match + // with the outbound-rtp stats objects. auto outbound_rtp_substreams = MergeInfoAboutOutboundRtpSubstreams(stats.substreams); + // If SVC is used, one stream is configured but multiple encodings exist. This + // is not spec-compliant, but it is how we've implemented SVC so this affects + // how the RTP stream's "active" value is determined. + bool is_svc = (parameters_.encoder_config.number_of_streams == 1 && + rtp_parameters_.encodings.size() > 1); for (const auto& pair : outbound_rtp_substreams) { auto info = common_info; - info.add_ssrc(pair.first); - info.rid = parameters_.config.rtp.GetRidForSsrc(pair.first); - // Search the associated encoding by SSRC. - auto encoding_it = std::find_if( - rtp_parameters_.encodings.begin(), rtp_parameters_.encodings.end(), - [&pair](const webrtc::RtpEncodingParameters& parameters) { - return parameters.ssrc && pair.first == *parameters.ssrc; - }); - if (encoding_it != rtp_parameters_.encodings.end()) { - info.active = encoding_it->active; - } + uint32_t ssrc = pair.first; + info.add_ssrc(ssrc); + info.rid = parameters_.config.rtp.GetRidForSsrc(ssrc); + info.active = IsActiveFromEncodings( + !is_svc ? absl::optional(ssrc) : absl::nullopt, + rtp_parameters_.encodings); auto stream_stats = pair.second; RTC_DCHECK_EQ(stream_stats.type, webrtc::VideoSendStream::StreamStats::StreamType::kMedia); @@ -2703,6 +2738,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos( info.total_encode_time_ms = stream_stats.total_encode_time_ms; info.total_encoded_bytes_target = stream_stats.total_encoded_bytes_target; info.huge_frames_sent = stream_stats.huge_frames_sent; + info.scalability_mode = stream_stats.scalability_mode; infos.push_back(info); } return infos; @@ -2818,7 +2854,7 @@ void WebRtcVideoChannel::WebRtcVideoSendStream::RecreateWebRtcStream() { parameters_.encoder_config.encoder_specific_settings = NULL; - // Calls stream_->UpdateActiveSimulcastLayers() to start the VideoSendStream + // Calls stream_->StartPerRtpStream() to start the VideoSendStream // if necessary conditions are met. UpdateSendState(); @@ -2829,6 +2865,17 @@ void WebRtcVideoChannel::WebRtcVideoSendStream::RecreateWebRtcStream() { } } +void WebRtcVideoChannel::WebRtcVideoSendStream::GenerateKeyFrame( + const std::vector& rids) { + RTC_DCHECK_RUN_ON(&thread_checker_); + if (stream_ != NULL) { + stream_->GenerateKeyFrame(rids); + } else { + RTC_LOG(LS_WARNING) + << "Absent send stream; ignoring request to generate keyframe."; + } +} + WebRtcVideoChannel::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream( WebRtcVideoChannel* channel, webrtc::Call* call, @@ -3216,6 +3263,7 @@ WebRtcVideoChannel::WebRtcVideoReceiveStream::GetVideoReceiverInfo( info.add_ssrc(config_.rtp.remote_ssrc); webrtc::VideoReceiveStreamInterface::Stats stats = stream_->GetStats(); info.decoder_implementation_name = stats.decoder_implementation_name; + info.power_efficient_decoder = stats.power_efficient_decoder; if (stats.current_payload_type != -1) { info.codec_payload_type = stats.current_payload_type; auto decoder_it = absl::c_find_if(config_.decoders, [&](const auto& d) { @@ -3277,8 +3325,6 @@ WebRtcVideoChannel::WebRtcVideoReceiveStream::GetVideoReceiverInfo( info.pause_count = stats.pause_count; info.total_freezes_duration_ms = stats.total_freezes_duration_ms; info.total_pauses_duration_ms = stats.total_pauses_duration_ms; - info.total_frames_duration_ms = stats.total_frames_duration_ms; - info.sum_squared_frame_durations = stats.sum_squared_frame_durations; info.content_type = stats.content_type; @@ -3552,11 +3598,11 @@ void WebRtcVideoChannel::ClearRecordableEncodedFrameCallback(uint32_t ssrc) { } } -void WebRtcVideoChannel::GenerateKeyFrame(uint32_t ssrc) { +void WebRtcVideoChannel::RequestRecvKeyFrame(uint32_t ssrc) { RTC_DCHECK_RUN_ON(&thread_checker_); WebRtcVideoReceiveStream* stream = FindReceiveStream(ssrc); if (stream) { - stream->GenerateKeyFrame(); + return stream->GenerateKeyFrame(); } else { RTC_LOG(LS_ERROR) << "Absent receive stream; ignoring key frame generation for ssrc " @@ -3564,6 +3610,20 @@ void WebRtcVideoChannel::GenerateKeyFrame(uint32_t ssrc) { } } +void WebRtcVideoChannel::GenerateSendKeyFrame( + uint32_t ssrc, + const std::vector& rids) { + RTC_DCHECK_RUN_ON(&thread_checker_); + auto it = send_streams_.find(ssrc); + if (it != send_streams_.end()) { + it->second->GenerateKeyFrame(rids); + } else { + RTC_LOG(LS_ERROR) + << "Absent send stream; ignoring key frame generation for ssrc " + << ssrc; + } +} + void WebRtcVideoChannel::SetEncoderToPacketizerFrameTransformer( uint32_t ssrc, rtc::scoped_refptr frame_transformer) { diff --git a/media/engine/webrtc_video_engine.h b/media/engine/webrtc_video_engine.h index d87a612e72..03732330e5 100644 --- a/media/engine/webrtc_video_engine.h +++ b/media/engine/webrtc_video_engine.h @@ -149,7 +149,8 @@ class WebRtcVideoChannel : public VideoMediaChannel, webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const override; webrtc::RTCError SetRtpSendParameters( uint32_t ssrc, - const webrtc::RtpParameters& parameters) override; + const webrtc::RtpParameters& parameters, + webrtc::SetParametersCallback callback) override; webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const override; webrtc::RtpParameters GetDefaultRtpReceiveParameters() const override; bool GetSendCodec(VideoCodec* send_codec) override; @@ -179,7 +180,7 @@ class WebRtcVideoChannel : public VideoMediaChannel, void OnReadyToSend(bool ready) override; void OnNetworkRouteChanged(absl::string_view transport_name, const rtc::NetworkRoute& network_route) override; - void SetInterface(NetworkInterface* iface) override; + void SetInterface(MediaChannelNetworkInterface* iface) override; // E2E Encrypted Video Frame API // Set a frame decryptor to a particular ssrc that will intercept all @@ -248,7 +249,9 @@ class WebRtcVideoChannel : public VideoMediaChannel, std::function callback) override; void ClearRecordableEncodedFrameCallback(uint32_t ssrc) override; - void GenerateKeyFrame(uint32_t ssrc) override; + void RequestRecvKeyFrame(uint32_t ssrc) override; + void GenerateSendKeyFrame(uint32_t ssrc, + const std::vector& rids) override; void SetEncoderToPacketizerFrameTransformer( uint32_t ssrc, @@ -361,7 +364,8 @@ class WebRtcVideoChannel : public VideoMediaChannel, ~WebRtcVideoSendStream(); void SetSendParameters(const ChangedSendParameters& send_params); - webrtc::RTCError SetRtpParameters(const webrtc::RtpParameters& parameters); + webrtc::RTCError SetRtpParameters(const webrtc::RtpParameters& parameters, + webrtc::SetParametersCallback callback); webrtc::RtpParameters GetRtpParameters() const; void SetFrameEncryptor( @@ -390,6 +394,7 @@ class WebRtcVideoChannel : public VideoMediaChannel, void SetEncoderToPacketizerFrameTransformer( rtc::scoped_refptr frame_transformer); + void GenerateKeyFrame(const std::vector& rids); private: // Parameters needed to reconstruct the underlying stream. @@ -419,7 +424,7 @@ class WebRtcVideoChannel : public VideoMediaChannel, void RecreateWebRtcStream(); webrtc::VideoEncoderConfig CreateVideoEncoderConfig( const VideoCodec& codec) const; - void ReconfigureEncoder(); + void ReconfigureEncoder(webrtc::SetParametersCallback callback); // Calls Start or Stop according to whether or not `sending_` is true, // and whether or not the encoding in `rtp_parameters_` is active. diff --git a/media/engine/webrtc_video_engine_unittest.cc b/media/engine/webrtc_video_engine_unittest.cc index e866e85e70..91cd59ab37 100644 --- a/media/engine/webrtc_video_engine_unittest.cc +++ b/media/engine/webrtc_video_engine_unittest.cc @@ -437,7 +437,8 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionBeforeCapturer) { std::unique_ptr channel( SetSendParamsWithAllSupportedCodecs()); - EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + StreamParams::CreateLegacy(kSsrc))); // Add CVO extension. const int id = 1; @@ -481,7 +482,8 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionBeforeAddSendStream) { parameters.extensions.push_back( RtpExtension(RtpExtension::kVideoRotationUri, id)); EXPECT_TRUE(channel->SetSendParameters(parameters)); - EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + StreamParams::CreateLegacy(kSsrc))); // Set source. EXPECT_CALL( @@ -498,7 +500,8 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionAfterCapturer) { std::unique_ptr channel( SetSendParamsWithAllSupportedCodecs()); - EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + StreamParams::CreateLegacy(kSsrc))); // Set capturer. EXPECT_CALL( @@ -541,7 +544,8 @@ TEST_F(WebRtcVideoEngineTest, SetSendFailsBeforeSettingCodecs) { call_.get(), GetMediaConfig(), VideoOptions(), webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(123))); + EXPECT_TRUE( + channel->AsSendChannel()->AddSendStream(StreamParams::CreateLegacy(123))); EXPECT_FALSE(channel->SetSend(true)) << "Channel should not start without codecs."; @@ -555,7 +559,8 @@ TEST_F(WebRtcVideoEngineTest, GetStatsWithoutSendCodecsSetDoesNotCrash) { std::unique_ptr channel(engine_.CreateMediaChannel( call_.get(), GetMediaConfig(), VideoOptions(), webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(123))); + EXPECT_TRUE( + channel->AsSendChannel()->AddSendStream(StreamParams::CreateLegacy(123))); VideoMediaInfo info; channel->GetStats(&info); } @@ -565,10 +570,10 @@ TEST_F(WebRtcVideoEngineTest, UseFactoryForVp8WhenSupported) { std::unique_ptr channel( SetSendParamsWithAllSupportedCodecs()); - channel->OnReadyToSend(true); + channel->AsSendChannel()->OnReadyToSend(true); - EXPECT_TRUE( - channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc))); EXPECT_EQ(0, encoder_factory_->GetNumCreatedEncoders()); EXPECT_TRUE(channel->SetSend(true)); webrtc::test::FrameForwarder frame_forwarder; @@ -593,7 +598,7 @@ TEST_F(WebRtcVideoEngineTest, UseFactoryForVp8WhenSupported) { EXPECT_EQ(num_created_encoders, encoder_factory_->GetNumCreatedEncoders()); // Remove stream previously added to free the external encoder instance. - EXPECT_TRUE(channel->RemoveSendStream(kSsrc)); + EXPECT_TRUE(channel->AsSendChannel()->RemoveSendStream(kSsrc)); EXPECT_EQ(0u, encoder_factory_->encoders().size()); } @@ -645,8 +650,8 @@ TEST_F(WebRtcVideoEngineTest, CanConstructDecoderForVp9EncoderFactory) { std::unique_ptr channel( SetSendParamsWithAllSupportedCodecs()); - EXPECT_TRUE( - channel->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(kSsrc))); } #endif // defined(RTC_ENABLE_VP9) @@ -657,8 +662,8 @@ TEST_F(WebRtcVideoEngineTest, PropagatesInputFrameTimestamp) { std::unique_ptr channel( SetSendParamsWithAllSupportedCodecs()); - EXPECT_TRUE( - channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc))); webrtc::test::FrameForwarder frame_forwarder; cricket::FakeFrameSource frame_source(1280, 720, @@ -701,7 +706,7 @@ TEST_F(WebRtcVideoEngineTest, PropagatesInputFrameTimestamp) { } // Remove stream previously added to free the external encoder instance. - EXPECT_TRUE(channel->RemoveSendStream(kSsrc)); + EXPECT_TRUE(channel->AsSendChannel()->RemoveSendStream(kSsrc)); } void WebRtcVideoEngineTest::AssignDefaultAptRtxTypes() { @@ -819,7 +824,8 @@ TEST_F(WebRtcVideoEngineTest, UsesSimulcastAdapterForVp8Factories) { std::vector ssrcs = MAKE_VECTOR(kSsrcs3); - EXPECT_TRUE(channel->AddSendStream(CreateSimStreamParams("cname", ssrcs))); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + CreateSimStreamParams("cname", ssrcs))); EXPECT_TRUE(channel->SetSend(true)); webrtc::test::FrameForwarder frame_forwarder; @@ -864,8 +870,8 @@ TEST_F(WebRtcVideoEngineTest, ChannelWithH264CanChangeToVp8) { parameters.codecs.push_back(GetEngineCodec("H264")); EXPECT_TRUE(channel->SetSendParameters(parameters)); - EXPECT_TRUE( - channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc))); EXPECT_TRUE(channel->SetVideoSend(kSsrc, nullptr, &frame_forwarder)); // Sending one frame will have allocate the encoder. frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -898,7 +904,8 @@ TEST_F(WebRtcVideoEngineTest, std::vector ssrcs = MAKE_VECTOR(kSsrcs3); - EXPECT_TRUE(channel->AddSendStream(CreateSimStreamParams("cname", ssrcs))); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + CreateSimStreamParams("cname", ssrcs))); EXPECT_TRUE(channel->SetSend(true)); // Send a fake frame, or else the media engine will configure the simulcast @@ -933,8 +940,8 @@ TEST_F(WebRtcVideoEngineTest, parameters.codecs.push_back(GetEngineCodec("H264")); EXPECT_TRUE(channel->SetSendParameters(parameters)); - EXPECT_TRUE( - channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc))); // Send a frame of 720p. This should trigger a "real" encoder initialization. webrtc::test::FrameForwarder frame_forwarder; @@ -967,8 +974,8 @@ TEST_F(WebRtcVideoEngineTest, SimulcastEnabledForH264BehindFieldTrial) { EXPECT_TRUE(channel->SetSendParameters(parameters)); const std::vector ssrcs = MAKE_VECTOR(kSsrcs3); - EXPECT_TRUE( - channel->AddSendStream(cricket::CreateSimStreamParams("cname", ssrcs))); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + cricket::CreateSimStreamParams("cname", ssrcs))); // Send a frame of 720p. This should trigger a "real" encoder initialization. webrtc::test::FrameForwarder frame_forwarder; @@ -1086,8 +1093,8 @@ TEST_F(WebRtcVideoEngineTest, RegisterDecodersIfSupported) { std::unique_ptr channel( SetRecvParamsWithSupportedCodecs(parameters.codecs)); - EXPECT_TRUE( - channel->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(kSsrc))); // Decoders are not created until they are used. time_controller_.AdvanceTime(webrtc::TimeDelta::Zero()); EXPECT_EQ(0u, decoder_factory_->decoders().size()); @@ -1097,7 +1104,7 @@ TEST_F(WebRtcVideoEngineTest, RegisterDecodersIfSupported) { EXPECT_EQ(0, decoder_factory_->GetNumCreatedDecoders()); // Remove stream previously added to free the external decoder instance. - EXPECT_TRUE(channel->RemoveRecvStream(kSsrc)); + EXPECT_TRUE(channel->AsReceiveChannel()->RemoveRecvStream(kSsrc)); EXPECT_EQ(0u, decoder_factory_->decoders().size()); } @@ -1114,8 +1121,8 @@ TEST_F(WebRtcVideoEngineTest, RegisterH264DecoderIfSupported) { std::unique_ptr channel( SetRecvParamsWithSupportedCodecs(codecs)); - EXPECT_TRUE( - channel->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(kSsrc))); // Decoders are not created until they are used. time_controller_.AdvanceTime(webrtc::TimeDelta::Zero()); ASSERT_EQ(0u, decoder_factory_->decoders().size()); @@ -1131,8 +1138,8 @@ TEST_F(WebRtcVideoEngineTest, GetSourcesWithNonExistingSsrc) { std::unique_ptr channel( SetRecvParamsWithSupportedCodecs(parameters.codecs)); - EXPECT_TRUE( - channel->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(kSsrc))); // Call GetSources with |kSsrc + 1| which doesn't exist. std::vector sources = channel->GetSources(kSsrc + 1); @@ -1258,9 +1265,9 @@ TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, Vp8) { cricket::VideoSendParameters send_parameters; send_parameters.codecs.push_back(engine_codecs.at(0)); EXPECT_TRUE(send_channel->SetSendParameters(send_parameters)); - send_channel->OnReadyToSend(true); - EXPECT_TRUE( - send_channel->AddSendStream(StreamParams::CreateLegacy(send_ssrc))); + send_channel->AsSendChannel()->OnReadyToSend(true); + EXPECT_TRUE(send_channel->AsSendChannel()->AddSendStream( + StreamParams::CreateLegacy(send_ssrc))); EXPECT_TRUE(send_channel->SetSend(true)); // Set capturer. @@ -1280,15 +1287,15 @@ TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, Vp8) { cricket::VideoRecvParameters recv_parameters; recv_parameters.codecs.push_back(engine_codecs.at(0)); EXPECT_TRUE(recv_channel->SetRecvParameters(recv_parameters)); - EXPECT_TRUE(recv_channel->AddRecvStream( + EXPECT_TRUE(recv_channel->AsReceiveChannel()->AddRecvStream( cricket::StreamParams::CreateLegacy(recv_ssrc))); // Remove streams previously added to free the encoder and decoder instance. EXPECT_CALL(*encoder_factory, Die()); EXPECT_CALL(*decoder_factory, Die()); EXPECT_CALL(*rate_allocator_factory, Die()); - EXPECT_TRUE(send_channel->RemoveSendStream(send_ssrc)); - EXPECT_TRUE(recv_channel->RemoveRecvStream(recv_ssrc)); + EXPECT_TRUE(send_channel->AsSendChannel()->RemoveSendStream(send_ssrc)); + EXPECT_TRUE(recv_channel->AsReceiveChannel()->RemoveRecvStream(recv_ssrc)); } TEST_F(WebRtcVideoEngineTest, DISABLED_RecreatesEncoderOnContentTypeChange) { @@ -1296,12 +1303,12 @@ TEST_F(WebRtcVideoEngineTest, DISABLED_RecreatesEncoderOnContentTypeChange) { std::unique_ptr fake_call(new FakeCall()); std::unique_ptr channel( SetSendParamsWithAllSupportedCodecs()); - ASSERT_TRUE( - channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); + ASSERT_TRUE(channel->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc))); cricket::VideoCodec codec = GetEngineCodec("VP8"); cricket::VideoSendParameters parameters; parameters.codecs.push_back(codec); - channel->OnReadyToSend(true); + channel->AsSendChannel()->OnReadyToSend(true); channel->SetSend(true); ASSERT_TRUE(channel->SetSendParameters(parameters)); @@ -1347,7 +1354,7 @@ TEST_F(WebRtcVideoEngineTest, DISABLED_RecreatesEncoderOnContentTypeChange) { encoder_factory_->encoders().back()->GetCodecSettings().mode); // Remove stream previously added to free the external encoder instance. - EXPECT_TRUE(channel->RemoveSendStream(kSsrc)); + EXPECT_TRUE(channel->AsSendChannel()->RemoveSendStream(kSsrc)); EXPECT_EQ(0u, encoder_factory_->encoders().size()); } @@ -1467,7 +1474,7 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_)); DeliverKeyFrame(kSsrc); EXPECT_EQ_WAIT(1, renderer_.num_rendered_frames(), kTimeout); - channel_->RemoveRecvStream(kSsrc); + channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc); } TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, @@ -1480,7 +1487,7 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, channel_->SetRecordableEncodedFrameCallback(kSsrc, callback.AsStdFunction()); DeliverKeyFrame(kSsrc); EXPECT_EQ_WAIT(1, renderer_.num_rendered_frames(), kTimeout); - channel_->RemoveRecvStream(kSsrc); + channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc); } TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, @@ -1493,7 +1500,7 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, channel_->SetRecordableEncodedFrameCallback(kSsrc, callback.AsStdFunction()); DeliverKeyFrame(kSsrc); EXPECT_EQ_WAIT(1, renderer_.num_rendered_frames(), kTimeout); - channel_->RemoveRecvStream(kSsrc); + channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc); } TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, @@ -1508,7 +1515,7 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, channel_->SetRecordableEncodedFrameCallback(kSsrc, callback.AsStdFunction()); DeliverKeyFrame(kSsrc); // Expected to not cause function to fire. DeliverKeyFrameAndWait(kSsrc + 1); - channel_->RemoveRecvStream(kSsrc + 1); + channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc + 1); } TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, @@ -1523,7 +1530,7 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, channel_->SetRecordableEncodedFrameCallback(kSsrc, callback.AsStdFunction()); DeliverKeyFrame(kSsrc); // Expected to not cause function to fire. DeliverKeyFrameAndWait(kSsrc + 1); - channel_->RemoveRecvStream(kSsrc + 1); + channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc + 1); } class WebRtcVideoChannelBaseTest : public ::testing::Test { @@ -1554,14 +1561,15 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { static_cast(engine_.CreateMediaChannel( call_.get(), media_config, cricket::VideoOptions(), webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get()))); - channel_->OnReadyToSend(true); + channel_->AsSendChannel()->OnReadyToSend(true); EXPECT_TRUE(channel_.get() != NULL); network_interface_.SetDestination(channel_.get()); channel_->SetInterface(&network_interface_); cricket::VideoRecvParameters parameters; parameters.codecs = engine_.send_codecs(); channel_->SetRecvParameters(parameters); - EXPECT_TRUE(channel_->AddSendStream(DefaultSendStreamParams())); + EXPECT_TRUE( + channel_->AsSendChannel()->AddSendStream(DefaultSendStreamParams())); frame_forwarder_ = std::make_unique(); frame_source_ = std::make_unique( 640, 480, rtc::kNumMicrosecsPerSec / kFramerate); @@ -1573,7 +1581,7 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { void SetUpSecondStream() { SetUpSecondStreamWithNoRecv(); // Setup recv for second stream. - EXPECT_TRUE(channel_->AddRecvStream( + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( cricket::StreamParams::CreateLegacy(kSsrc + 2))); // Make the second renderer available for use by a new stream. EXPECT_TRUE(channel_->SetSink(kSsrc + 2, &renderer2_)); @@ -1583,12 +1591,12 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { // This is required if you want to test unsignalled recv of video rtp packets. void SetUpSecondStreamWithNoRecv() { // SetUp() already added kSsrc make sure duplicate SSRCs cant be added. - EXPECT_TRUE( - channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(kSsrc))); EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_)); - EXPECT_FALSE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); - EXPECT_TRUE(channel_->AddSendStream( + EXPECT_FALSE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( cricket::StreamParams::CreateLegacy(kSsrc + 2))); // We dont add recv for the second stream. @@ -1841,8 +1849,10 @@ TEST_F(WebRtcVideoChannelBaseTest, GetStatsMultipleRecvStreams) { parameters.conference_mode = true; EXPECT_TRUE(channel_->SetSendParameters(parameters)); EXPECT_TRUE(SetSend(true)); - EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1))); - EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(1))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(2))); EXPECT_TRUE(channel_->SetSink(1, &renderer1)); EXPECT_TRUE(channel_->SetSink(2, &renderer2)); EXPECT_EQ(0, renderer1.num_rendered_frames()); @@ -1891,8 +1901,8 @@ TEST_F(WebRtcVideoChannelBaseTest, GetStatsMultipleSendStreams) { parameters.codecs.push_back(DefaultCodec()); parameters.conference_mode = true; EXPECT_TRUE(channel_->SetSendParameters(parameters)); - EXPECT_TRUE( - channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(kSsrc))); EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_)); EXPECT_TRUE(SetSend(true)); SendFrame(); @@ -1906,11 +1916,11 @@ TEST_F(WebRtcVideoChannelBaseTest, GetStatsMultipleSendStreams) { const int kTestHeight = 120; cricket::FakeFrameSource frame_source(kTestWidth, kTestHeight, rtc::kNumMicrosecsPerSec / 5); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(5678))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(5678))); EXPECT_TRUE(channel_->SetVideoSend(5678, nullptr, &frame_forwarder)); - EXPECT_TRUE( - channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(5678))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(5678))); EXPECT_TRUE(channel_->SetSink(5678, &renderer2)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); EXPECT_FRAME_ON_RENDERER_WAIT(renderer2, 1, kTestWidth, kTestHeight, @@ -1978,10 +1988,10 @@ TEST_F(WebRtcVideoChannelBaseTest, SetSendSsrc) { // Test that we can set the SSRC even after codecs are set. TEST_F(WebRtcVideoChannelBaseTest, SetSendSsrcAfterSetCodecs) { // Remove stream added in Setup. - EXPECT_TRUE(channel_->RemoveSendStream(kSsrc)); + EXPECT_TRUE(channel_->AsSendChannel()->RemoveSendStream(kSsrc)); EXPECT_TRUE(SetDefaultCodec()); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(999))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(999))); EXPECT_TRUE(channel_->SetVideoSend(999u, nullptr, frame_forwarder_.get())); EXPECT_TRUE(SetSend(true)); EXPECT_TRUE(WaitAndSendFrame(0)); @@ -2027,11 +2037,11 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveSendStreams) { EXPECT_EQ(kSsrc, header.Ssrc()); // Remove the send stream that was added during Setup. - EXPECT_TRUE(channel_->RemoveSendStream(kSsrc)); + EXPECT_TRUE(channel_->AsSendChannel()->RemoveSendStream(kSsrc)); int rtp_packets = NumRtpPackets(); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(789u))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(789u))); EXPECT_TRUE(channel_->SetVideoSend(789u, nullptr, frame_forwarder_.get())); EXPECT_EQ(rtp_packets, NumRtpPackets()); // Wait 30ms to guarantee the engine does not drop the frame. @@ -2052,8 +2062,10 @@ TEST_F(WebRtcVideoChannelBaseTest, SimulateConference) { parameters.conference_mode = true; EXPECT_TRUE(channel_->SetSendParameters(parameters)); EXPECT_TRUE(SetSend(true)); - EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1))); - EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(1))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(2))); EXPECT_TRUE(channel_->SetSink(1, &renderer1)); EXPECT_TRUE(channel_->SetSink(2, &renderer2)); EXPECT_EQ(0, renderer1.num_rendered_frames()); @@ -2073,8 +2085,8 @@ TEST_F(WebRtcVideoChannelBaseTest, SimulateConference) { EXPECT_EQ(kVideoHeight, renderer1.height()); EXPECT_EQ(kVideoWidth, renderer2.width()); EXPECT_EQ(kVideoHeight, renderer2.height()); - EXPECT_TRUE(channel_->RemoveRecvStream(2)); - EXPECT_TRUE(channel_->RemoveRecvStream(1)); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(2)); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(1)); } // Tests that we can add and remove capturers and frames are sent out properly @@ -2172,8 +2184,8 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveCapturerMultipleSources) { // WebRTC implementation will drop frames if pushed to quickly. Wait the // interval time to avoid that. // Set up the stream associated with the engine. - EXPECT_TRUE( - channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(kSsrc))); EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_)); cricket::VideoFormat capture_format( kVideoWidth, kVideoHeight, @@ -2181,9 +2193,11 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveCapturerMultipleSources) { // Set up additional stream 1. cricket::FakeVideoRenderer renderer1; EXPECT_FALSE(channel_->SetSink(1, &renderer1)); - EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(1))); EXPECT_TRUE(channel_->SetSink(1, &renderer1)); - EXPECT_TRUE(channel_->AddSendStream(cricket::StreamParams::CreateLegacy(1))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(1))); webrtc::test::FrameForwarder frame_forwarder1; cricket::FakeFrameSource frame_source(kVideoWidth, kVideoHeight, @@ -2192,9 +2206,11 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveCapturerMultipleSources) { // Set up additional stream 2. cricket::FakeVideoRenderer renderer2; EXPECT_FALSE(channel_->SetSink(2, &renderer2)); - EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(2))); EXPECT_TRUE(channel_->SetSink(2, &renderer2)); - EXPECT_TRUE(channel_->AddSendStream(cricket::StreamParams::CreateLegacy(2))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(2))); webrtc::test::FrameForwarder frame_forwarder2; // State for all the streams. @@ -2230,29 +2246,31 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveCapturerMultipleSources) { // Tests empty StreamParams is rejected. TEST_F(WebRtcVideoChannelBaseTest, RejectEmptyStreamParams) { // Remove the send stream that was added during Setup. - EXPECT_TRUE(channel_->RemoveSendStream(kSsrc)); + EXPECT_TRUE(channel_->AsSendChannel()->RemoveSendStream(kSsrc)); cricket::StreamParams empty; - EXPECT_FALSE(channel_->AddSendStream(empty)); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(789u))); + EXPECT_FALSE(channel_->AsSendChannel()->AddSendStream(empty)); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(789u))); } // Test that multiple send streams can be created and deleted properly. TEST_F(WebRtcVideoChannelBaseTest, MultipleSendStreams) { // Remove stream added in Setup. I.e. remove stream corresponding to default // channel. - EXPECT_TRUE(channel_->RemoveSendStream(kSsrc)); + EXPECT_TRUE(channel_->AsSendChannel()->RemoveSendStream(kSsrc)); const unsigned int kSsrcsSize = sizeof(kSsrcs4) / sizeof(kSsrcs4[0]); for (unsigned int i = 0; i < kSsrcsSize; ++i) { - EXPECT_TRUE(channel_->AddSendStream( + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( cricket::StreamParams::CreateLegacy(kSsrcs4[i]))); } // Delete one of the non default channel streams, let the destructor delete // the remaining ones. - EXPECT_TRUE(channel_->RemoveSendStream(kSsrcs4[kSsrcsSize - 1])); + EXPECT_TRUE( + channel_->AsSendChannel()->RemoveSendStream(kSsrcs4[kSsrcsSize - 1])); // Stream should already be deleted. - EXPECT_FALSE(channel_->RemoveSendStream(kSsrcs4[kSsrcsSize - 1])); + EXPECT_FALSE( + channel_->AsSendChannel()->RemoveSendStream(kSsrcs4[kSsrcsSize - 1])); } TEST_F(WebRtcVideoChannelBaseTest, SendAndReceiveVp8Vga) { @@ -2366,7 +2384,8 @@ TEST_F(WebRtcVideoChannelBaseTest, SendCodecIsMovedToFrontInRtpParameters) { EXPECT_TRUE(channel_->SetSendParameters(parameters)); channel_->SetVideoCodecSwitchingEnabled(true); - auto send_codecs = channel_->GetRtpSendParameters(kSsrc).codecs; + auto send_codecs = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrc).codecs; ASSERT_EQ(send_codecs.size(), 2u); EXPECT_THAT("VP9", send_codecs[0].name); @@ -2375,7 +2394,7 @@ TEST_F(WebRtcVideoChannelBaseTest, SendCodecIsMovedToFrontInRtpParameters) { channel_->RequestEncoderFallback(); rtc::Thread::Current()->ProcessMessages(30); - send_codecs = channel_->GetRtpSendParameters(kSsrc).codecs; + send_codecs = channel_->AsSendChannel()->GetRtpSendParameters(kSsrc).codecs; ASSERT_EQ(send_codecs.size(), 2u); EXPECT_THAT("VP8", send_codecs[0].name); } @@ -2400,7 +2419,7 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { channel_.reset(engine_.CreateMediaChannel( fake_call_.get(), GetMediaConfig(), VideoOptions(), webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - channel_->OnReadyToSend(true); + channel_->AsSendChannel()->OnReadyToSend(true); last_ssrc_ = 123; send_parameters_.codecs = engine_.send_codecs(); recv_parameters_.codecs = engine_.recv_codecs(); @@ -2434,7 +2453,7 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { // the unsignalled receive stream cooldown is no longer in effect. void ReceivePacketAndAdvanceTime(rtc::CopyOnWriteBuffer packet, int64_t packet_time_us) { - channel_->OnPacketReceived(packet, packet_time_us); + channel_->AsReceiveChannel()->OnPacketReceived(packet, packet_time_us); rtc::Thread::Current()->ProcessMessages(0); time_controller_.AdvanceTime( webrtc::TimeDelta::Millis(kUnsignalledReceiveStreamCooldownMs)); @@ -2447,7 +2466,7 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { FakeVideoSendStream* AddSendStream(const StreamParams& sp) { size_t num_streams = fake_call_->GetVideoSendStreams().size(); - EXPECT_TRUE(channel_->AddSendStream(sp)); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(sp)); std::vector streams = fake_call_->GetVideoSendStreams(); EXPECT_EQ(num_streams + 1, streams.size()); @@ -2464,7 +2483,7 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { FakeVideoReceiveStream* AddRecvStream(const StreamParams& sp) { size_t num_streams = fake_call_->GetVideoReceiveStreams().size(); - EXPECT_TRUE(channel_->AddRecvStream(sp)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(sp)); std::vector streams = fake_call_->GetVideoReceiveStreams(); EXPECT_EQ(num_streams + 1, streams.size()); @@ -2509,8 +2528,8 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { void TestExtmapAllowMixedCaller(bool extmap_allow_mixed) { // For a caller, the answer will be applied in set remote description // where SetSendParameters() is called. - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc))); send_parameters_.extmap_allow_mixed = extmap_allow_mixed; EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); const webrtc::VideoSendStream::Config& config = @@ -2521,9 +2540,9 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { void TestExtmapAllowMixedCallee(bool extmap_allow_mixed) { // For a callee, the answer will be applied in set local description // where SetExtmapAllowMixed() and AddSendStream() are called. - channel_->SetExtmapAllowMixed(extmap_allow_mixed); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); + channel_->AsSendChannel()->SetExtmapAllowMixed(extmap_allow_mixed); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc))); const webrtc::VideoSendStream::Config& config = fake_call_->GetVideoSendStreams()[0]->GetConfig(); EXPECT_EQ(extmap_allow_mixed, config.rtp.extmap_allow_mixed); @@ -2702,12 +2721,14 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { limited_send_params.max_bandwidth_bps = global_max; EXPECT_TRUE(channel_->SetSendParameters(limited_send_params)); webrtc::RtpParameters parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); parameters.encodings[0].max_bitrate_bps = stream_max; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Read back the parameteres and verify they have the correct value - parameters = channel_->GetRtpSendParameters(last_ssrc_); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_EQ(stream_max, parameters.encodings[0].max_bitrate_bps); // Verify that the new value propagated down to the encoder @@ -2743,7 +2764,7 @@ TEST_F(WebRtcVideoChannelTest, SetsSyncGroupFromSyncLabel) { cricket::StreamParams sp = cricket::StreamParams::CreateLegacy(kVideoSsrc); sp.set_stream_ids({kSyncLabel}); - EXPECT_TRUE(channel_->AddRecvStream(sp)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(sp)); EXPECT_EQ(1u, fake_call_->GetVideoReceiveStreams().size()); EXPECT_EQ(kSyncLabel, @@ -3037,7 +3058,8 @@ TEST_F(WebRtcVideoChannelTest, SetRecvRtpHeaderExtensionsRejectsDuplicateIds) { } TEST_F(WebRtcVideoChannelTest, AddRecvStreamOnlyUsesOneReceiveStream) { - EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(1))); EXPECT_EQ(1u, fake_call_->GetVideoReceiveStreams().size()); } @@ -3318,7 +3340,7 @@ TEST_F(WebRtcVideoChannelTest, SetMediaConfigSuspendBelowMinBitrate) { channel_.reset(engine_.CreateMediaChannel( fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - channel_->OnReadyToSend(true); + channel_->AsSendChannel()->OnReadyToSend(true); channel_->SetSendParameters(send_parameters_); @@ -3329,7 +3351,7 @@ TEST_F(WebRtcVideoChannelTest, SetMediaConfigSuspendBelowMinBitrate) { channel_.reset(engine_.CreateMediaChannel( fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - channel_->OnReadyToSend(true); + channel_->AsSendChannel()->OnReadyToSend(true); channel_->SetSendParameters(send_parameters_); @@ -3498,13 +3520,15 @@ TEST_F(Vp9SettingsTest, VerifyVp9SpecificSettings) { EXPECT_TRUE(vp9_settings.automaticResizeOn); webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_THAT( rtp_parameters.encodings, ElementsAre(Field(&webrtc::RtpEncodingParameters::scalability_mode, absl::nullopt))); rtp_parameters.encodings[0].scalability_mode = "L2T1"; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, rtp_parameters) + .ok()); ASSERT_TRUE(stream->GetVp9Settings(&vp9_settings)) << "No VP9 config set."; EXPECT_TRUE(vp9_settings.denoisingOn); @@ -3512,12 +3536,14 @@ TEST_F(Vp9SettingsTest, VerifyVp9SpecificSettings) { EXPECT_FALSE(vp9_settings.automaticResizeOn) << "Automatic resize off for multiple spatial layers."; - rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + rtp_parameters = channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_THAT(rtp_parameters.encodings, ElementsAre(Field( &webrtc::RtpEncodingParameters::scalability_mode, "L2T1"))); rtp_parameters.encodings[0].scalability_mode = "L1T1"; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, rtp_parameters) + .ok()); ASSERT_TRUE(stream->GetVp9Settings(&vp9_settings)) << "No VP9 config set."; EXPECT_TRUE(vp9_settings.denoisingOn); @@ -3619,14 +3645,17 @@ TEST_F(Vp9SettingsTest, AllEncodingParametersCopied) { FakeVideoSendStream* stream = AddSendStream(CreateSimStreamParams("cname", ssrcs)); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(ssrcs[0]); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(ssrcs[0]); ASSERT_EQ(kNumSpatialLayers, parameters.encodings.size()); ASSERT_TRUE(parameters.encodings[0].active); ASSERT_TRUE(parameters.encodings[1].active); ASSERT_TRUE(parameters.encodings[2].active); // Invert value to verify copying. parameters.encodings[1].active = false; - EXPECT_TRUE(channel_->SetRtpSendParameters(ssrcs[0], parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(ssrcs[0], parameters) + .ok()); webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); @@ -3859,7 +3888,7 @@ TEST_F(WebRtcVideoChannelTest, PreviousAdaptationDoesNotApplyToScreenshare) { channel_.reset(engine_.CreateMediaChannel( fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - channel_->OnReadyToSend(true); + channel_->AsSendChannel()->OnReadyToSend(true); ASSERT_TRUE(channel_->SetSendParameters(parameters)); AddSendStream(); @@ -3909,7 +3938,7 @@ void WebRtcVideoChannelTest::TestDegradationPreference( channel_.reset(engine_.CreateMediaChannel( fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - channel_->OnReadyToSend(true); + channel_->AsSendChannel()->OnReadyToSend(true); EXPECT_TRUE(channel_->SetSendParameters(parameters)); @@ -3942,7 +3971,7 @@ void WebRtcVideoChannelTest::TestCpuAdaptation(bool enable_overuse, channel_.reset(engine_.CreateMediaChannel( fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - channel_->OnReadyToSend(true); + channel_->AsSendChannel()->OnReadyToSend(true); EXPECT_TRUE(channel_->SetSendParameters(parameters)); @@ -4673,10 +4702,13 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_EQ(300000, video_send_stream->GetVideoStreams()[0].max_bitrate_bps); // The RtpParameter max bitrate overrides the codec's. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(1u, parameters.encodings.size()); parameters.encodings[0].max_bitrate_bps = 500000; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); ASSERT_EQ(1u, video_send_stream->GetVideoStreams().size()); EXPECT_EQ(parameters.encodings[0].max_bitrate_bps, video_send_stream->GetVideoStreams()[0].max_bitrate_bps); @@ -4693,16 +4725,21 @@ TEST_F(WebRtcVideoChannelTest, stream->GetVideoStreams()[0].max_bitrate_bps); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1u, parameters.encodings.size()); parameters.encodings[0].max_bitrate_bps = 99999 - 1; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); EXPECT_EQ(parameters.encodings[0].max_bitrate_bps, stream->GetVideoStreams()[0].max_bitrate_bps); parameters.encodings[0].max_bitrate_bps = 99999 + 1; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); EXPECT_EQ(send_parameters_.max_bandwidth_bps, stream->GetVideoStreams()[0].max_bitrate_bps); } @@ -5288,16 +5325,21 @@ TEST_F(WebRtcVideoChannelTest, TestSetDscpOptions) { EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface->dscp()); // Create a send stream to configure - EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(kSsrc))); - parameters = channel->GetRtpSendParameters(kSsrc); + EXPECT_TRUE(channel->AsSendChannel()->AddSendStream( + StreamParams::CreateLegacy(kSsrc))); + parameters = channel->AsSendChannel()->GetRtpSendParameters(kSsrc); ASSERT_FALSE(parameters.encodings.empty()); // Various priorities map to various dscp values. parameters.encodings[0].network_priority = webrtc::Priority::kHigh; - ASSERT_TRUE(channel->SetRtpSendParameters(kSsrc, parameters).ok()); + ASSERT_TRUE(channel->AsSendChannel() + ->SetRtpSendParameters(kSsrc, parameters, nullptr) + .ok()); EXPECT_EQ(rtc::DSCP_AF41, network_interface->dscp()); parameters.encodings[0].network_priority = webrtc::Priority::kVeryLow; - ASSERT_TRUE(channel->SetRtpSendParameters(kSsrc, parameters).ok()); + ASSERT_TRUE(channel->AsSendChannel() + ->SetRtpSendParameters(kSsrc, parameters, nullptr) + .ok()); EXPECT_EQ(rtc::DSCP_CS1, network_interface->dscp()); // Packets should also self-identify their dscp in PacketOptions. @@ -5326,7 +5368,7 @@ TEST_F(WebRtcVideoChannelTest, TestSetSendRtcpReducedSize) { FakeVideoSendStream* stream1 = AddSendStream(); EXPECT_EQ(webrtc::RtcpMode::kCompound, stream1->GetConfig().rtp.rtcp_mode); webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_FALSE(rtp_parameters.rtcp.reduced_size); // Now enable reduced size mode. @@ -5334,7 +5376,7 @@ TEST_F(WebRtcVideoChannelTest, TestSetSendRtcpReducedSize) { EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); stream1 = fake_call_->GetVideoSendStreams()[0]; EXPECT_EQ(webrtc::RtcpMode::kReducedSize, stream1->GetConfig().rtp.rtcp_mode); - rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + rtp_parameters = channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_TRUE(rtp_parameters.rtcp.reduced_size); // Create a new stream and ensure it picks up the reduced size mode. @@ -5368,13 +5410,13 @@ TEST_F(WebRtcVideoChannelTest, OnReadyToSendSignalsNetworkState) { EXPECT_EQ(webrtc::kNetworkUp, fake_call_->GetNetworkState(webrtc::MediaType::AUDIO)); - channel_->OnReadyToSend(false); + channel_->AsSendChannel()->OnReadyToSend(false); EXPECT_EQ(webrtc::kNetworkDown, fake_call_->GetNetworkState(webrtc::MediaType::VIDEO)); EXPECT_EQ(webrtc::kNetworkUp, fake_call_->GetNetworkState(webrtc::MediaType::AUDIO)); - channel_->OnReadyToSend(true); + channel_->AsSendChannel()->OnReadyToSend(true); EXPECT_EQ(webrtc::kNetworkUp, fake_call_->GetNetworkState(webrtc::MediaType::VIDEO)); EXPECT_EQ(webrtc::kNetworkUp, @@ -5405,6 +5447,17 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsEncoderImplementationName) { info.senders[0].encoder_implementation_name); } +TEST_F(WebRtcVideoChannelTest, GetStatsReportsPowerEfficientEncoder) { + FakeVideoSendStream* stream = AddSendStream(); + webrtc::VideoSendStream::Stats stats; + stats.power_efficient_encoder = true; + stream->SetStats(stats); + + cricket::VideoMediaInfo info; + ASSERT_TRUE(channel_->GetStats(&info)); + EXPECT_TRUE(info.senders[0].power_efficient_encoder); +} + TEST_F(WebRtcVideoChannelTest, GetStatsReportsCpuOveruseMetrics) { FakeVideoSendStream* stream = AddSendStream(); webrtc::VideoSendStream::Stats stats; @@ -5813,6 +5866,93 @@ TEST_F(WebRtcVideoChannelTest, GetPerLayerStatsReportForSubStreams) { EXPECT_EQ(sender.rid, absl::nullopt); } +TEST_F(WebRtcVideoChannelTest, + OutboundRtpIsActiveComesFromMatchingEncodingInSimulcast) { + constexpr uint32_t kSsrc1 = 123u; + constexpr uint32_t kSsrc2 = 456u; + + // Create simulcast stream from both SSRCs. + // `kSsrc1` is the "main" ssrc used for getting parameters. + FakeVideoSendStream* stream = + AddSendStream(cricket::CreateSimStreamParams("cname", {kSsrc1, kSsrc2})); + + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrc1); + ASSERT_EQ(2u, parameters.encodings.size()); + parameters.encodings[0].active = false; + parameters.encodings[1].active = true; + channel_->AsSendChannel()->SetRtpSendParameters(kSsrc1, parameters); + + // Fill in dummy stats. + auto stats = GetInitialisedStats(); + stats.substreams[kSsrc1]; + stats.substreams[kSsrc2]; + stream->SetStats(stats); + + // GetStats() and ensure `active` matches `encodings` for each SSRC. + cricket::VideoMediaInfo video_media_info; + ASSERT_TRUE(channel_->GetStats(&video_media_info)); + ASSERT_EQ(video_media_info.senders.size(), 2u); + ASSERT_TRUE(video_media_info.senders[0].active.has_value()); + EXPECT_FALSE(video_media_info.senders[0].active.value()); + ASSERT_TRUE(video_media_info.senders[1].active.has_value()); + EXPECT_TRUE(video_media_info.senders[1].active.value()); +} + +TEST_F(WebRtcVideoChannelTest, OutboundRtpIsActiveComesFromAnyEncodingInSvc) { + cricket::VideoSendParameters send_parameters; + send_parameters.codecs.push_back(GetEngineCodec("VP9")); + ASSERT_TRUE(channel_->SetSendParameters(send_parameters)); + + constexpr uint32_t kSsrc1 = 123u; + constexpr uint32_t kSsrc2 = 456u; + constexpr uint32_t kSsrc3 = 789u; + + // Configuring SVC is done the same way that simulcast is configured, the only + // difference is that the VP9 codec is used. This triggers special hacks that + // we depend on because we don't have a proper SVC API yet. + FakeVideoSendStream* stream = AddSendStream( + cricket::CreateSimStreamParams("cname", {kSsrc1, kSsrc2, kSsrc3})); + // Expect that we got SVC. + EXPECT_EQ(stream->GetEncoderConfig().number_of_streams, 1u); + webrtc::VideoCodecVP9 vp9_settings; + ASSERT_TRUE(stream->GetVp9Settings(&vp9_settings)); + EXPECT_EQ(vp9_settings.numberOfSpatialLayers, 3u); + + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrc1); + ASSERT_EQ(3u, parameters.encodings.size()); + parameters.encodings[0].active = false; + parameters.encodings[1].active = true; + parameters.encodings[2].active = false; + channel_->AsSendChannel()->SetRtpSendParameters(kSsrc1, parameters); + + // Fill in dummy stats. + auto stats = GetInitialisedStats(); + stats.substreams[kSsrc1]; + stream->SetStats(stats); + + // GetStats() and ensure `active` is true if ANY encoding is active. + cricket::VideoMediaInfo video_media_info; + ASSERT_TRUE(channel_->GetStats(&video_media_info)); + ASSERT_EQ(video_media_info.senders.size(), 1u); + // Middle layer is active. + ASSERT_TRUE(video_media_info.senders[0].active.has_value()); + EXPECT_TRUE(video_media_info.senders[0].active.value()); + + parameters = channel_->AsSendChannel()->GetRtpSendParameters(kSsrc1); + ASSERT_EQ(3u, parameters.encodings.size()); + parameters.encodings[0].active = false; + parameters.encodings[1].active = false; + parameters.encodings[2].active = false; + channel_->AsSendChannel()->SetRtpSendParameters(kSsrc1, parameters); + ASSERT_TRUE(channel_->GetStats(&video_media_info)); + ASSERT_EQ(video_media_info.senders.size(), 1u); + // No layer is active. + ASSERT_TRUE(video_media_info.senders[0].active.has_value()); + EXPECT_FALSE(video_media_info.senders[0].active.value()); +} + TEST_F(WebRtcVideoChannelTest, MediaSubstreamMissingProducesEmpyStats) { FakeVideoSendStream* stream = AddSendStream(); @@ -6152,6 +6292,7 @@ TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesDecodeStatsCorrectly) { stats.total_decode_time = webrtc::TimeDelta::Millis(16); stats.total_assembly_time = webrtc::TimeDelta::Millis(4); stats.frames_assembled_from_multiple_packets = 2; + stats.power_efficient_decoder = true; stream->SetStats(stats); cricket::VideoMediaInfo info; @@ -6183,6 +6324,7 @@ TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesDecodeStatsCorrectly) { EXPECT_EQ(stats.total_assembly_time, info.receivers[0].total_assembly_time); EXPECT_EQ(stats.frames_assembled_from_multiple_packets, info.receivers[0].frames_assembled_from_multiple_packets); + EXPECT_TRUE(info.receivers[0].power_efficient_decoder); } TEST_F(WebRtcVideoChannelTest, @@ -6294,7 +6436,7 @@ TEST_F(WebRtcVideoChannelTest, DefaultReceiveStreamReconfiguresToUseRtx) { EXPECT_EQ(0u, recv_stream->GetConfig().rtp.rtx_ssrc) << "Default receive stream should not have configured RTX"; - EXPECT_TRUE(channel_->AddRecvStream( + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs))); ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size()) << "AddRecvStream should have reconfigured, not added a new receiver."; @@ -6319,8 +6461,8 @@ TEST_F(WebRtcVideoChannelTest, RejectsAddingStreamsWithMissingSsrcsForRtx) { cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs); sp.ssrcs = ssrcs; // Without RTXs, this is the important part. - EXPECT_FALSE(channel_->AddSendStream(sp)); - EXPECT_FALSE(channel_->AddRecvStream(sp)); + EXPECT_FALSE(channel_->AsSendChannel()->AddSendStream(sp)); + EXPECT_FALSE(channel_->AsReceiveChannel()->AddRecvStream(sp)); } TEST_F(WebRtcVideoChannelTest, RejectsAddingStreamsWithOverlappingRtxSsrcs) { @@ -6332,20 +6474,20 @@ TEST_F(WebRtcVideoChannelTest, RejectsAddingStreamsWithOverlappingRtxSsrcs) { StreamParams sp = cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs); - EXPECT_TRUE(channel_->AddSendStream(sp)); - EXPECT_TRUE(channel_->AddRecvStream(sp)); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(sp)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(sp)); // The RTX SSRC is already used in previous streams, using it should fail. sp = cricket::StreamParams::CreateLegacy(rtx_ssrcs[0]); - EXPECT_FALSE(channel_->AddSendStream(sp)); - EXPECT_FALSE(channel_->AddRecvStream(sp)); + EXPECT_FALSE(channel_->AsSendChannel()->AddSendStream(sp)); + EXPECT_FALSE(channel_->AsReceiveChannel()->AddRecvStream(sp)); // After removing the original stream this should be fine to add (makes sure // that RTX ssrcs are not forever taken). - EXPECT_TRUE(channel_->RemoveSendStream(ssrcs[0])); - EXPECT_TRUE(channel_->RemoveRecvStream(ssrcs[0])); - EXPECT_TRUE(channel_->AddSendStream(sp)); - EXPECT_TRUE(channel_->AddRecvStream(sp)); + EXPECT_TRUE(channel_->AsSendChannel()->RemoveSendStream(ssrcs[0])); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(ssrcs[0])); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(sp)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(sp)); } TEST_F(WebRtcVideoChannelTest, @@ -6357,21 +6499,23 @@ TEST_F(WebRtcVideoChannelTest, StreamParams sp = cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kFirstStreamSsrcs)); - EXPECT_TRUE(channel_->AddSendStream(sp)); - EXPECT_TRUE(channel_->AddRecvStream(sp)); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(sp)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(sp)); // One of the SSRCs is already used in previous streams, using it should fail. sp = cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kOverlappingStreamSsrcs)); - EXPECT_FALSE(channel_->AddSendStream(sp)); - EXPECT_FALSE(channel_->AddRecvStream(sp)); + EXPECT_FALSE(channel_->AsSendChannel()->AddSendStream(sp)); + EXPECT_FALSE(channel_->AsReceiveChannel()->AddRecvStream(sp)); // After removing the original stream this should be fine to add (makes sure // that RTX ssrcs are not forever taken). - EXPECT_TRUE(channel_->RemoveSendStream(kFirstStreamSsrcs[0])); - EXPECT_TRUE(channel_->RemoveRecvStream(kFirstStreamSsrcs[0])); - EXPECT_TRUE(channel_->AddSendStream(sp)); - EXPECT_TRUE(channel_->AddRecvStream(sp)); + EXPECT_TRUE( + channel_->AsSendChannel()->RemoveSendStream(kFirstStreamSsrcs[0])); + EXPECT_TRUE( + channel_->AsReceiveChannel()->RemoveRecvStream(kFirstStreamSsrcs[0])); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(sp)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(sp)); } TEST_F(WebRtcVideoChannelTest, ReportsSsrcGroupsInStats) { @@ -6383,14 +6527,14 @@ TEST_F(WebRtcVideoChannelTest, ReportsSsrcGroupsInStats) { StreamParams sender_sp = cricket::CreateSimWithRtxStreamParams( "cname", MAKE_VECTOR(kSenderSsrcs), MAKE_VECTOR(kSenderRtxSsrcs)); - EXPECT_TRUE(channel_->AddSendStream(sender_sp)); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(sender_sp)); static const uint32_t kReceiverSsrcs[] = {3}; static const uint32_t kReceiverRtxSsrcs[] = {2}; StreamParams receiver_sp = cricket::CreateSimWithRtxStreamParams( "cname", MAKE_VECTOR(kReceiverSsrcs), MAKE_VECTOR(kReceiverRtxSsrcs)); - EXPECT_TRUE(channel_->AddRecvStream(receiver_sp)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(receiver_sp)); cricket::VideoMediaInfo info; ASSERT_TRUE(channel_->GetStats(&info)); @@ -6433,9 +6577,9 @@ TEST_F(WebRtcVideoChannelTest, RecvUnsignaledSsrcWithSignaledStreamId) { const char kSyncLabel[] = "sync_label"; cricket::StreamParams unsignaled_stream; unsignaled_stream.set_stream_ids({kSyncLabel}); - ASSERT_TRUE(channel_->AddRecvStream(unsignaled_stream)); - channel_->OnDemuxerCriteriaUpdatePending(); - channel_->OnDemuxerCriteriaUpdateComplete(); + ASSERT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(unsignaled_stream)); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdatePending(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdateComplete(); rtc::Thread::Current()->ProcessMessages(0); // The stream shouldn't have been created at this point because it doesn't // have any SSRCs. @@ -6453,8 +6597,8 @@ TEST_F(WebRtcVideoChannelTest, RecvUnsignaledSsrcWithSignaledStreamId) { // Reset the unsignaled stream to clear the cache. This deletes the receive // stream. - channel_->ResetUnsignaledRecvStream(); - channel_->OnDemuxerCriteriaUpdatePending(); + channel_->AsReceiveChannel()->ResetUnsignaledRecvStream(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdatePending(); EXPECT_EQ(0u, fake_call_->GetVideoReceiveStreams().size()); // Until the demuxer criteria has been updated, we ignore in-flight ssrcs of @@ -6465,7 +6609,7 @@ TEST_F(WebRtcVideoChannelTest, RecvUnsignaledSsrcWithSignaledStreamId) { // After the demuxer criteria has been updated, we should proceed to create // unsignalled receive streams. This time when a default video receive stream // is created it won't have a sync_group. - channel_->OnDemuxerCriteriaUpdateComplete(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdateComplete(); ReceivePacketAndAdvanceTime(packet.Buffer(), /* packet_time_us */ -1); EXPECT_EQ(1u, fake_call_->GetVideoReceiveStreams().size()); EXPECT_TRUE( @@ -6489,9 +6633,9 @@ TEST_F(WebRtcVideoChannelTest, kIncomingUnsignalledSsrc); // Stream with another SSRC gets signaled. - channel_->ResetUnsignaledRecvStream(); + channel_->AsReceiveChannel()->ResetUnsignaledRecvStream(); constexpr uint32_t kIncomingSignalledSsrc = kIncomingUnsignalledSsrc + 1; - ASSERT_TRUE(channel_->AddRecvStream( + ASSERT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( cricket::StreamParams::CreateLegacy(kIncomingSignalledSsrc))); // New receiver is for the signaled stream. @@ -6506,9 +6650,10 @@ TEST_F(WebRtcVideoChannelTest, const uint32_t kSsrc2 = 2; // Starting point: receiving kSsrc1. - EXPECT_TRUE(channel_->AddRecvStream(StreamParams::CreateLegacy(kSsrc1))); - channel_->OnDemuxerCriteriaUpdatePending(); - channel_->OnDemuxerCriteriaUpdateComplete(); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + StreamParams::CreateLegacy(kSsrc1))); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdatePending(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdateComplete(); rtc::Thread::Current()->ProcessMessages(0); EXPECT_EQ(fake_call_->GetVideoReceiveStreams().size(), 1u); @@ -6519,7 +6664,7 @@ TEST_F(WebRtcVideoChannelTest, // Emulate a second m= section being created by updating the demuxer criteria // without adding any streams. - channel_->OnDemuxerCriteriaUpdatePending(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdatePending(); // Emulate there being in-flight packets for kSsrc1 and kSsrc2 arriving before // the demuxer is updated. @@ -6545,7 +6690,7 @@ TEST_F(WebRtcVideoChannelTest, // Signal that the demuxer update is complete. Because there are no more // pending demuxer updates, receiving unknown ssrcs (kSsrc2) should again // result in unsignalled receive streams being created. - channel_->OnDemuxerCriteriaUpdateComplete(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdateComplete(); rtc::Thread::Current()->ProcessMessages(0); // Receive packets for kSsrc1 and kSsrc2 again. @@ -6575,10 +6720,12 @@ TEST_F(WebRtcVideoChannelTest, const uint32_t kSsrc2 = 2; // Starting point: receiving kSsrc1 and kSsrc2. - EXPECT_TRUE(channel_->AddRecvStream(StreamParams::CreateLegacy(kSsrc1))); - EXPECT_TRUE(channel_->AddRecvStream(StreamParams::CreateLegacy(kSsrc2))); - channel_->OnDemuxerCriteriaUpdatePending(); - channel_->OnDemuxerCriteriaUpdateComplete(); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + StreamParams::CreateLegacy(kSsrc1))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + StreamParams::CreateLegacy(kSsrc2))); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdatePending(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdateComplete(); rtc::Thread::Current()->ProcessMessages(0); EXPECT_EQ(fake_call_->GetVideoReceiveStreams().size(), 2u); EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc1), 0u); @@ -6586,8 +6733,8 @@ TEST_F(WebRtcVideoChannelTest, // Remove kSsrc1, signal that a demuxer criteria update is pending, but not // completed yet. - EXPECT_TRUE(channel_->RemoveRecvStream(kSsrc1)); - channel_->OnDemuxerCriteriaUpdatePending(); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc1)); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdatePending(); // We only have a receiver for kSsrc2 now. EXPECT_EQ(fake_call_->GetVideoReceiveStreams().size(), 1u); @@ -6615,7 +6762,7 @@ TEST_F(WebRtcVideoChannelTest, // Signal that the demuxer update is complete. This means we should stop // ignorning kSsrc1. - channel_->OnDemuxerCriteriaUpdateComplete(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdateComplete(); rtc::Thread::Current()->ProcessMessages(0); // Receive packets for kSsrc1 and kSsrc2 again. @@ -6643,20 +6790,22 @@ TEST_F(WebRtcVideoChannelTest, MultiplePendingDemuxerCriteriaUpdates) { const uint32_t kSsrc = 1; // Starting point: receiving kSsrc. - EXPECT_TRUE(channel_->AddRecvStream(StreamParams::CreateLegacy(kSsrc))); - channel_->OnDemuxerCriteriaUpdatePending(); - channel_->OnDemuxerCriteriaUpdateComplete(); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + StreamParams::CreateLegacy(kSsrc))); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdatePending(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdateComplete(); rtc::Thread::Current()->ProcessMessages(0); ASSERT_EQ(fake_call_->GetVideoReceiveStreams().size(), 1u); // Remove kSsrc... - EXPECT_TRUE(channel_->RemoveRecvStream(kSsrc)); - channel_->OnDemuxerCriteriaUpdatePending(); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc)); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdatePending(); EXPECT_EQ(fake_call_->GetVideoReceiveStreams().size(), 0u); // And then add it back again, before the demuxer knows about the new // criteria! - EXPECT_TRUE(channel_->AddRecvStream(StreamParams::CreateLegacy(kSsrc))); - channel_->OnDemuxerCriteriaUpdatePending(); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + StreamParams::CreateLegacy(kSsrc))); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdatePending(); EXPECT_EQ(fake_call_->GetVideoReceiveStreams().size(), 1u); // In-flight packets should arrive because the stream was recreated, even @@ -6669,7 +6818,7 @@ TEST_F(WebRtcVideoChannelTest, MultiplePendingDemuxerCriteriaUpdates) { EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc), 1u); // Signal that the demuxer knows about the first update: the removal. - channel_->OnDemuxerCriteriaUpdateComplete(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdateComplete(); rtc::Thread::Current()->ProcessMessages(0); // This still should not prevent in-flight packets from arriving because we @@ -6682,8 +6831,8 @@ TEST_F(WebRtcVideoChannelTest, MultiplePendingDemuxerCriteriaUpdates) { EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc), 2u); // Remove the kSsrc again while previous demuxer updates are still pending. - EXPECT_TRUE(channel_->RemoveRecvStream(kSsrc)); - channel_->OnDemuxerCriteriaUpdatePending(); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc)); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdatePending(); EXPECT_EQ(fake_call_->GetVideoReceiveStreams().size(), 0u); // Now the packet should be dropped and not create an unsignalled receive @@ -6697,7 +6846,7 @@ TEST_F(WebRtcVideoChannelTest, MultiplePendingDemuxerCriteriaUpdates) { EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc), 2u); // Signal that the demuxer knows about the second update: adding it back. - channel_->OnDemuxerCriteriaUpdateComplete(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdateComplete(); rtc::Thread::Current()->ProcessMessages(0); // The packets should continue to be dropped because removal happened after @@ -6711,7 +6860,7 @@ TEST_F(WebRtcVideoChannelTest, MultiplePendingDemuxerCriteriaUpdates) { EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(kSsrc), 2u); // Signal that the demuxer knows about the last update: the second removal. - channel_->OnDemuxerCriteriaUpdateComplete(); + channel_->AsReceiveChannel()->OnDemuxerCriteriaUpdateComplete(); rtc::Thread::Current()->ProcessMessages(0); // If packets still arrive after the demuxer knows about the latest removal we @@ -6734,7 +6883,8 @@ TEST_F(WebRtcVideoChannelTest, UnsignalledSsrcHasACooldown) { // Receive a packet for kSsrc1. RtpPacket packet; packet.SetSsrc(kSsrc1); - channel_->OnPacketReceived(packet.Buffer(), /* packet_time_us */ -1); + channel_->AsReceiveChannel()->OnPacketReceived(packet.Buffer(), + /* packet_time_us */ -1); } rtc::Thread::Current()->ProcessMessages(0); time_controller_.AdvanceTime( @@ -6749,7 +6899,8 @@ TEST_F(WebRtcVideoChannelTest, UnsignalledSsrcHasACooldown) { // Receive a packet for kSsrc2. RtpPacket packet; packet.SetSsrc(kSsrc2); - channel_->OnPacketReceived(packet.Buffer(), /* packet_time_us */ -1); + channel_->AsReceiveChannel()->OnPacketReceived(packet.Buffer(), + /* packet_time_us */ -1); } rtc::Thread::Current()->ProcessMessages(0); @@ -6766,7 +6917,8 @@ TEST_F(WebRtcVideoChannelTest, UnsignalledSsrcHasACooldown) { // Receive a packet for kSsrc2. RtpPacket packet; packet.SetSsrc(kSsrc2); - channel_->OnPacketReceived(packet.Buffer(), /* packet_time_us */ -1); + channel_->AsReceiveChannel()->OnPacketReceived(packet.Buffer(), + /* packet_time_us */ -1); } rtc::Thread::Current()->ProcessMessages(0); @@ -6781,17 +6933,22 @@ TEST_F(WebRtcVideoChannelTest, UnsignalledSsrcHasACooldown) { // Test BaseMinimumPlayoutDelayMs on receive streams. TEST_F(WebRtcVideoChannelTest, BaseMinimumPlayoutDelayMs) { // Test that set won't work for non-existing receive streams. - EXPECT_FALSE(channel_->SetBaseMinimumPlayoutDelayMs(kSsrc + 2, 200)); + EXPECT_FALSE(channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs( + kSsrc + 2, 200)); // Test that get won't work for non-existing receive streams. - EXPECT_FALSE(channel_->GetBaseMinimumPlayoutDelayMs(kSsrc + 2)); + EXPECT_FALSE( + channel_->AsReceiveChannel()->GetBaseMinimumPlayoutDelayMs(kSsrc + 2)); EXPECT_TRUE(AddRecvStream()); // Test that set works for the existing receive stream. - EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(last_ssrc_, 200)); + EXPECT_TRUE(channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs( + last_ssrc_, 200)); auto* recv_stream = fake_call_->GetVideoReceiveStream(last_ssrc_); EXPECT_TRUE(recv_stream); EXPECT_EQ(recv_stream->base_mininum_playout_delay_ms(), 200); - EXPECT_EQ(channel_->GetBaseMinimumPlayoutDelayMs(last_ssrc_).value_or(0), + EXPECT_EQ(channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(last_ssrc_) + .value_or(0), 200); } @@ -6801,8 +6958,12 @@ TEST_F(WebRtcVideoChannelTest, BaseMinimumPlayoutDelayMsUnsignaledRecvStream) { const FakeVideoReceiveStream* recv_stream; // Set default stream with SSRC 0 - EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(0, 200)); - EXPECT_EQ(200, channel_->GetBaseMinimumPlayoutDelayMs(0).value_or(0)); + EXPECT_TRUE( + channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs(0, 200)); + EXPECT_EQ( + 200, + channel_->AsReceiveChannel()->GetBaseMinimumPlayoutDelayMs(0).value_or( + 0)); // Spawn an unsignaled stream by sending a packet, it should inherit // default delay 200. @@ -6812,14 +6973,20 @@ TEST_F(WebRtcVideoChannelTest, BaseMinimumPlayoutDelayMsUnsignaledRecvStream) { recv_stream = fake_call_->GetVideoReceiveStream(kIncomingUnsignalledSsrc); EXPECT_EQ(recv_stream->base_mininum_playout_delay_ms(), 200); - delay_ms = channel_->GetBaseMinimumPlayoutDelayMs(kIncomingUnsignalledSsrc); + delay_ms = channel_->AsReceiveChannel()->GetBaseMinimumPlayoutDelayMs( + kIncomingUnsignalledSsrc); EXPECT_EQ(200, delay_ms.value_or(0)); // Check that now if we change delay for SSRC 0 it will change delay for the // default receiving stream as well. - EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(0, 300)); - EXPECT_EQ(300, channel_->GetBaseMinimumPlayoutDelayMs(0).value_or(0)); - delay_ms = channel_->GetBaseMinimumPlayoutDelayMs(kIncomingUnsignalledSsrc); + EXPECT_TRUE( + channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs(0, 300)); + EXPECT_EQ( + 300, + channel_->AsReceiveChannel()->GetBaseMinimumPlayoutDelayMs(0).value_or( + 0)); + delay_ms = channel_->AsReceiveChannel()->GetBaseMinimumPlayoutDelayMs( + kIncomingUnsignalledSsrc); EXPECT_EQ(300, delay_ms.value_or(0)); recv_stream = fake_call_->GetVideoReceiveStream(kIncomingUnsignalledSsrc); EXPECT_EQ(recv_stream->base_mininum_playout_delay_ms(), 300); @@ -7041,8 +7208,8 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_EQ(kSsrcs3[0], recv_stream0->GetConfig().rtp.remote_ssrc); // Signal the SSRC. - EXPECT_TRUE( - channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrcs3[0]))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(kSsrcs3[0]))); ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size()); recv_stream0 = fake_call_->GetVideoReceiveStreams()[0]; EXPECT_EQ(kSsrcs3[0], recv_stream0->GetConfig().rtp.remote_ssrc); @@ -7087,22 +7254,26 @@ TEST_F(WebRtcVideoChannelTest, CanSetMaxBitrateForExistingStream) { TEST_F(WebRtcVideoChannelTest, CannotSetMaxBitrateForNonexistentStream) { webrtc::RtpParameters nonexistent_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(0u, nonexistent_parameters.encodings.size()); nonexistent_parameters.encodings.push_back(webrtc::RtpEncodingParameters()); - EXPECT_FALSE( - channel_->SetRtpSendParameters(last_ssrc_, nonexistent_parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, nonexistent_parameters) + .ok()); } TEST_F(WebRtcVideoChannelTest, SetLowMaxBitrateOverwritesVideoStreamMinBitrate) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_FALSE(parameters.encodings[0].max_bitrate_bps.has_value()); - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Note that this is testing the behavior of the FakeVideoSendStream, which // also calls to CreateEncoderStreams to get the VideoStreams, so essentially @@ -7114,10 +7285,12 @@ TEST_F(WebRtcVideoChannelTest, // Set a low max bitrate & check that VideoStream.min_bitrate_bps is limited // by this amount. - parameters = channel_->GetRtpSendParameters(last_ssrc_); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); int low_max_bitrate_bps = webrtc::kDefaultMinVideoBitrateBps - 1000; parameters.encodings[0].max_bitrate_bps = low_max_bitrate_bps; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); ASSERT_EQ(1UL, stream->GetVideoStreams().size()); EXPECT_EQ(low_max_bitrate_bps, stream->GetVideoStreams()[0].min_bitrate_bps); @@ -7136,10 +7309,13 @@ TEST_F(WebRtcVideoChannelTest, int high_min_bitrate_bps = stream->GetVideoStreams()[0].max_bitrate_bps + 1; // Set a high min bitrate and check that max_bitrate_bps is adjusted up. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); parameters.encodings[0].min_bitrate_bps = high_min_bitrate_bps; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); ASSERT_EQ(1UL, stream->GetVideoStreams().size()); EXPECT_EQ(high_min_bitrate_bps, stream->GetVideoStreams()[0].min_bitrate_bps); @@ -7160,10 +7336,13 @@ TEST_F(WebRtcVideoChannelTest, // Set min bitrate above global max bitrate and check that min_bitrate_bps is // adjusted down. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); parameters.encodings[0].min_bitrate_bps = 99999 + 1; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); ASSERT_EQ(1UL, stream->GetVideoStreams().size()); EXPECT_EQ(send_parameters_.max_bandwidth_bps, stream->GetVideoStreams()[0].min_bitrate_bps); @@ -7174,10 +7353,13 @@ TEST_F(WebRtcVideoChannelTest, TEST_F(WebRtcVideoChannelTest, SetMaxFramerateOneStream) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_FALSE(parameters.encodings[0].max_framerate.has_value()); - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Note that this is testing the behavior of the FakeVideoSendStream, which // also calls to CreateEncoderStreams to get the VideoStreams, so essentially @@ -7189,9 +7371,11 @@ TEST_F(WebRtcVideoChannelTest, SetMaxFramerateOneStream) { // Set max framerate and check that VideoStream.max_framerate is set. const int kNewMaxFramerate = kDefaultVideoMaxFramerate - 1; - parameters = channel_->GetRtpSendParameters(last_ssrc_); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); parameters.encodings[0].max_framerate = kNewMaxFramerate; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); ASSERT_EQ(1UL, stream->GetVideoStreams().size()); EXPECT_EQ(kNewMaxFramerate, stream->GetVideoStreams()[0].max_framerate); @@ -7200,10 +7384,13 @@ TEST_F(WebRtcVideoChannelTest, SetMaxFramerateOneStream) { TEST_F(WebRtcVideoChannelTest, SetNumTemporalLayersForSingleStream) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_FALSE(parameters.encodings[0].num_temporal_layers.has_value()); - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Note that this is testing the behavior of the FakeVideoSendStream, which // also calls to CreateEncoderStreams to get the VideoStreams, so essentially @@ -7213,9 +7400,11 @@ TEST_F(WebRtcVideoChannelTest, SetNumTemporalLayersForSingleStream) { EXPECT_FALSE(stream->GetVideoStreams()[0].num_temporal_layers.has_value()); // Set temporal layers and check that VideoStream.num_temporal_layers is set. - parameters = channel_->GetRtpSendParameters(last_ssrc_); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); parameters.encodings[0].num_temporal_layers = 2; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); ASSERT_EQ(1UL, stream->GetVideoStreams().size()); EXPECT_EQ(2UL, stream->GetVideoStreams()[0].num_temporal_layers); @@ -7224,13 +7413,18 @@ TEST_F(WebRtcVideoChannelTest, SetNumTemporalLayersForSingleStream) { TEST_F(WebRtcVideoChannelTest, CannotSetRtpSendParametersWithIncorrectNumberOfEncodings) { AddSendStream(); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); // Two or more encodings should result in failure. parameters.encodings.push_back(webrtc::RtpEncodingParameters()); - EXPECT_FALSE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Zero encodings should also fail. parameters.encodings.clear(); - EXPECT_FALSE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); } TEST_F(WebRtcVideoChannelTest, @@ -7239,44 +7433,58 @@ TEST_F(WebRtcVideoChannelTest, StreamParams sp = CreateSimStreamParams("cname", ssrcs); AddSendStream(sp); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); // Additional encodings should result in failure. parameters.encodings.push_back(webrtc::RtpEncodingParameters()); - EXPECT_FALSE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Zero encodings should also fail. parameters.encodings.clear(); - EXPECT_FALSE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); } // Changing the SSRC through RtpParameters is not allowed. TEST_F(WebRtcVideoChannelTest, CannotSetSsrcInRtpSendParameters) { AddSendStream(); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); parameters.encodings[0].ssrc = 0xdeadbeef; - EXPECT_FALSE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); } // Tests that when RTCRtpEncodingParameters.bitrate_priority gets set to // a value <= 0, setting the parameters returns false. TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersInvalidBitratePriority) { AddSendStream(); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_EQ(webrtc::kDefaultBitratePriority, parameters.encodings[0].bitrate_priority); parameters.encodings[0].bitrate_priority = 0; - EXPECT_FALSE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); parameters.encodings[0].bitrate_priority = -2; - EXPECT_FALSE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); } // Tests when the the RTCRtpEncodingParameters.bitrate_priority gets set // properly on the VideoChannel and propogates down to the video encoder. TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersPriorityOneStream) { AddSendStream(); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_EQ(webrtc::kDefaultBitratePriority, parameters.encodings[0].bitrate_priority); @@ -7284,11 +7492,13 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersPriorityOneStream) { // Change the value and set it on the VideoChannel. double new_bitrate_priority = 2.0; parameters.encodings[0].bitrate_priority = new_bitrate_priority; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Verify that the encoding parameters bitrate_priority is set for the // VideoChannel. - parameters = channel_->GetRtpSendParameters(last_ssrc_); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_EQ(new_bitrate_priority, parameters.encodings[0].bitrate_priority); @@ -7332,17 +7542,19 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersPrioritySimulcastStreams) { // Get and set the rtp encoding parameters. webrtc::RtpParameters parameters = - channel_->GetRtpSendParameters(primary_ssrc); + channel_->AsSendChannel()->GetRtpSendParameters(primary_ssrc); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); EXPECT_EQ(webrtc::kDefaultBitratePriority, parameters.encodings[0].bitrate_priority); // Change the value and set it on the VideoChannel. double new_bitrate_priority = 2.0; parameters.encodings[0].bitrate_priority = new_bitrate_priority; - EXPECT_TRUE(channel_->SetRtpSendParameters(primary_ssrc, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(primary_ssrc, parameters) + .ok()); // Verify that the encoding parameters priority is set on the VideoChannel. - parameters = channel_->GetRtpSendParameters(primary_ssrc); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(primary_ssrc); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); EXPECT_EQ(new_bitrate_priority, parameters.encodings[0].bitrate_priority); @@ -7388,12 +7600,14 @@ TEST_F(WebRtcVideoChannelTest, // Try layers in natural order (smallest to largest). { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(3u, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].scale_resolution_down_by = 4.0; rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; rtp_parameters.encodings[2].scale_resolution_down_by = 1.0; - auto result = channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + auto result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -7410,12 +7624,14 @@ TEST_F(WebRtcVideoChannelTest, // Try layers in reverse natural order (largest to smallest). { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(3u, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].scale_resolution_down_by = 1.0; rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - auto result = channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + auto result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -7432,12 +7648,14 @@ TEST_F(WebRtcVideoChannelTest, // Try layers in mixed order. { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(3u, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].scale_resolution_down_by = 10.0; rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - auto result = channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + auto result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -7454,12 +7672,14 @@ TEST_F(WebRtcVideoChannelTest, // Try with a missing scale setting, defaults to 1.0 if any other is set. { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(3u, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].scale_resolution_down_by = 1.0; rtp_parameters.encodings[1].scale_resolution_down_by.reset(); rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - auto result = channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + auto result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -7497,13 +7717,14 @@ TEST_F(WebRtcVideoChannelTest, channel_->SetSend(true); // Set `scale_resolution_down_by`'s. - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(rtp_parameters.encodings.size(), 3u); rtp_parameters.encodings[0].scale_resolution_down_by = 1.0; rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - const auto result = - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + const auto result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); // Use a capture resolution whose width and height are not divisible by 2^3. @@ -7544,12 +7765,14 @@ TEST_F(WebRtcVideoChannelTest, // Try layers in natural order (smallest to largest). { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(3u, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].scale_resolution_down_by = 4.0; rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; rtp_parameters.encodings[2].scale_resolution_down_by = 1.0; - auto result = channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + auto result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -7566,12 +7789,14 @@ TEST_F(WebRtcVideoChannelTest, // Try layers in reverse natural order (largest to smallest). { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(3u, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].scale_resolution_down_by = 1.0; rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - auto result = channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + auto result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -7588,12 +7813,14 @@ TEST_F(WebRtcVideoChannelTest, // Try layers in mixed order. { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(3u, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].scale_resolution_down_by = 10.0; rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - auto result = channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + auto result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -7610,12 +7837,14 @@ TEST_F(WebRtcVideoChannelTest, // Try with a missing scale setting, defaults to 1.0 if any other is set. { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(3u, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].scale_resolution_down_by = 1.0; rtp_parameters.encodings[1].scale_resolution_down_by.reset(); rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - auto result = channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + auto result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -7653,13 +7882,14 @@ TEST_F(WebRtcVideoChannelTest, channel_->SetSend(true); // Set `scale_resolution_down_by`'s. - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(rtp_parameters.encodings.size(), 3u); rtp_parameters.encodings[0].scale_resolution_down_by = 1.0; rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - const auto result = - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + const auto result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); // Use a capture resolution whose width and height are not divisible by 2^3. @@ -7688,7 +7918,8 @@ TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersMaxFramerate) { SetUpSimulcast(true, false); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); for (const auto& encoding : parameters.encodings) { EXPECT_FALSE(encoding.max_framerate); @@ -7698,10 +7929,12 @@ TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersMaxFramerate) { parameters.encodings[0].max_framerate = 10; parameters.encodings[1].max_framerate = 20; parameters.encodings[2].max_framerate = 25; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Verify that the bitrates are set on the VideoChannel. - parameters = channel_->GetRtpSendParameters(last_ssrc_); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); EXPECT_EQ(10, parameters.encodings[0].max_framerate); EXPECT_EQ(20, parameters.encodings[1].max_framerate); @@ -7714,16 +7947,21 @@ TEST_F(WebRtcVideoChannelTest, SetUpSimulcast(true, false); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); // Num temporal layers should be in the range [1, kMaxTemporalStreams]. parameters.encodings[0].num_temporal_layers = 0; EXPECT_EQ(webrtc::RTCErrorType::INVALID_RANGE, - channel_->SetRtpSendParameters(last_ssrc_, parameters).type()); + channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .type()); parameters.encodings[0].num_temporal_layers = webrtc::kMaxTemporalStreams + 1; EXPECT_EQ(webrtc::RTCErrorType::INVALID_RANGE, - channel_->SetRtpSendParameters(last_ssrc_, parameters).type()); + channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .type()); } TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersNumTemporalLayers) { @@ -7731,7 +7969,8 @@ TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersNumTemporalLayers) { SetUpSimulcast(true, false); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); for (const auto& encoding : parameters.encodings) EXPECT_FALSE(encoding.num_temporal_layers); @@ -7740,10 +7979,12 @@ TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersNumTemporalLayers) { parameters.encodings[0].num_temporal_layers = 3; parameters.encodings[1].num_temporal_layers = 3; parameters.encodings[2].num_temporal_layers = 3; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Verify that the number of temporal layers are set on the VideoChannel. - parameters = channel_->GetRtpSendParameters(last_ssrc_); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); EXPECT_EQ(3, parameters.encodings[0].num_temporal_layers); EXPECT_EQ(3, parameters.encodings[1].num_temporal_layers); @@ -7763,12 +8004,15 @@ TEST_F(WebRtcVideoChannelTest, NumTemporalLayersPropagatedToEncoder) { // Get and set the rtp encoding parameters. // Change the value and set it on the VideoChannel. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); parameters.encodings[0].num_temporal_layers = 3; parameters.encodings[1].num_temporal_layers = 2; parameters.encodings[2].num_temporal_layers = 1; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Verify that the new value is propagated down to the encoder. // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly. @@ -7788,7 +8032,9 @@ TEST_F(WebRtcVideoChannelTest, NumTemporalLayersPropagatedToEncoder) { EXPECT_EQ(1UL, stream->GetVideoStreams()[2].num_temporal_layers); // No parameter changed, encoder should not be reconfigured. - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); EXPECT_EQ(2, stream->num_encoder_reconfigurations()); EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); @@ -7808,11 +8054,14 @@ TEST_F(WebRtcVideoChannelTest, frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Change rtp encoding parameters. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); parameters.encodings[0].num_temporal_layers = 2; parameters.encodings[2].num_temporal_layers = 1; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Verify that no value is propagated down to the encoder. webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); @@ -7848,11 +8097,14 @@ TEST_F(WebRtcVideoChannelTest, // Get and set the rtp encoding parameters. // Change the value and set it on the VideoChannel. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); parameters.encodings[0].max_framerate = 15; parameters.encodings[2].max_framerate = 20; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Verify that the new value propagated down to the encoder. // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly. @@ -7880,7 +8132,8 @@ TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersMinAndMaxBitrate) { SetUpSimulcast(true, false); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); for (const auto& encoding : parameters.encodings) { EXPECT_FALSE(encoding.min_bitrate_bps); @@ -7894,10 +8147,12 @@ TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersMinAndMaxBitrate) { parameters.encodings[1].max_bitrate_bps = 400000; parameters.encodings[2].min_bitrate_bps = 500000; parameters.encodings[2].max_bitrate_bps = 600000; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Verify that the bitrates are set on the VideoChannel. - parameters = channel_->GetRtpSendParameters(last_ssrc_); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); EXPECT_EQ(100000, parameters.encodings[0].min_bitrate_bps); EXPECT_EQ(200000, parameters.encodings[0].max_bitrate_bps); @@ -7912,14 +8167,17 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersFailsWithIncorrectBitrate) { SetUpSimulcast(true, false); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); // Max bitrate lower than min bitrate should fail. parameters.encodings[2].min_bitrate_bps = 100000; parameters.encodings[2].max_bitrate_bps = 100000 - 1; EXPECT_EQ(webrtc::RTCErrorType::INVALID_RANGE, - channel_->SetRtpSendParameters(last_ssrc_, parameters).type()); + channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .type()); } // Test that min and max bitrate values set via RtpParameters are correctly @@ -7939,7 +8197,8 @@ TEST_F(WebRtcVideoChannelTest, MinAndMaxSimulcastBitratePropagatedToEncoder) { // Get and set the rtp encoding parameters. // Change the value and set it on the VideoChannel. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); parameters.encodings[0].min_bitrate_bps = 100000; parameters.encodings[0].max_bitrate_bps = 200000; @@ -7947,7 +8206,9 @@ TEST_F(WebRtcVideoChannelTest, MinAndMaxSimulcastBitratePropagatedToEncoder) { parameters.encodings[1].max_bitrate_bps = 400000; parameters.encodings[2].min_bitrate_bps = 500000; parameters.encodings[2].max_bitrate_bps = 600000; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Verify that the new value propagated down to the encoder. // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly. @@ -7979,7 +8240,9 @@ TEST_F(WebRtcVideoChannelTest, MinAndMaxSimulcastBitratePropagatedToEncoder) { EXPECT_EQ(600000, stream->GetVideoStreams()[2].max_bitrate_bps); // No parameter changed, encoder should not be reconfigured. - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); EXPECT_EQ(2, stream->num_encoder_reconfigurations()); EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); @@ -8001,7 +8264,8 @@ TEST_F(WebRtcVideoChannelTest, MinOrMaxSimulcastBitratePropagatedToEncoder) { frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); // Change the value and set it on the VideoChannel. @@ -8011,7 +8275,9 @@ TEST_F(WebRtcVideoChannelTest, MinOrMaxSimulcastBitratePropagatedToEncoder) { // Layer 1: only configure max bitrate. const int kMaxBpsLayer1 = kDefault[1].max_bitrate_bps - 1; parameters.encodings[1].max_bitrate_bps = kMaxBpsLayer1; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Verify that the new value propagated down to the encoder. // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly. @@ -8070,7 +8336,8 @@ TEST_F(WebRtcVideoChannelTest, SetMinAndMaxSimulcastBitrateAboveBelowDefault) { frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); // Change the value and set it on the VideoChannel. @@ -8080,7 +8347,9 @@ TEST_F(WebRtcVideoChannelTest, SetMinAndMaxSimulcastBitrateAboveBelowDefault) { // For layer 1, set the max bitrate below the default min. const int kMaxBpsLayer1 = kDefault[1].min_bitrate_bps - 1; parameters.encodings[1].max_bitrate_bps = kMaxBpsLayer1; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Verify that the new value propagated down to the encoder. // FakeVideoSendStream calls CreateEncoderStreams, test that the vector of @@ -8118,11 +8387,14 @@ TEST_F(WebRtcVideoChannelTest, BandwidthAboveTotalMaxBitrateGivenToMaxLayer) { frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Set max bitrate for all but the highest layer. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); parameters.encodings[0].max_bitrate_bps = kDefault[0].max_bitrate_bps; parameters.encodings[1].max_bitrate_bps = kDefault[1].max_bitrate_bps; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Set max bandwidth equal to total max bitrate. send_parameters_.max_bandwidth_bps = @@ -8167,10 +8439,13 @@ TEST_F(WebRtcVideoChannelTest, frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Set max bitrate for the highest layer. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); parameters.encodings[2].max_bitrate_bps = kDefault[2].max_bitrate_bps; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Set max bandwidth above the total max bitrate. send_parameters_.max_bandwidth_bps = @@ -8195,11 +8470,14 @@ TEST_F(WebRtcVideoChannelTest, MinAndMaxBitratePropagatedToEncoder) { EXPECT_TRUE(stream->IsSending()); // Set min and max bitrate. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1u, parameters.encodings.size()); parameters.encodings[0].min_bitrate_bps = 80000; parameters.encodings[0].max_bitrate_bps = 150000; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly. webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); @@ -8250,16 +8528,21 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersOneEncodingActive) { EXPECT_TRUE(stream->IsSending()); // Get current parameters and change "active" to false. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(1u, parameters.encodings.size()); ASSERT_TRUE(parameters.encodings[0].active); parameters.encodings[0].active = false; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); EXPECT_FALSE(stream->IsSending()); // Now change it back to active and verify we resume sending. parameters.encodings[0].active = true; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); EXPECT_TRUE(stream->IsSending()); } @@ -8287,7 +8570,7 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersMultipleEncodingsActive) { // Check that all encodings are initially active. webrtc::RtpParameters parameters = - channel_->GetRtpSendParameters(primary_ssrc); + channel_->AsSendChannel()->GetRtpSendParameters(primary_ssrc); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); EXPECT_TRUE(parameters.encodings[0].active); EXPECT_TRUE(parameters.encodings[1].active); @@ -8298,9 +8581,11 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersMultipleEncodingsActive) { parameters.encodings[0].active = false; parameters.encodings[1].active = true; parameters.encodings[2].active = false; - EXPECT_TRUE(channel_->SetRtpSendParameters(primary_ssrc, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(primary_ssrc, parameters) + .ok()); // Verify that the active fields are set on the VideoChannel. - parameters = channel_->GetRtpSendParameters(primary_ssrc); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(primary_ssrc); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); EXPECT_FALSE(parameters.encodings[0].active); EXPECT_TRUE(parameters.encodings[1].active); @@ -8319,9 +8604,11 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersMultipleEncodingsActive) { parameters.encodings[0].active = false; parameters.encodings[1].active = false; parameters.encodings[2].active = false; - EXPECT_TRUE(channel_->SetRtpSendParameters(primary_ssrc, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(primary_ssrc, parameters) + .ok()); // Verify that the active fields are set on the VideoChannel. - parameters = channel_->GetRtpSendParameters(primary_ssrc); + parameters = channel_->AsSendChannel()->GetRtpSendParameters(primary_ssrc); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); EXPECT_FALSE(parameters.encodings[0].active); EXPECT_FALSE(parameters.encodings[1].active); @@ -8361,7 +8648,7 @@ TEST_F(WebRtcVideoChannelTest, // Check that all encodings are initially active. webrtc::RtpParameters parameters = - channel_->GetRtpSendParameters(primary_ssrc); + channel_->AsSendChannel()->GetRtpSendParameters(primary_ssrc); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); EXPECT_TRUE(parameters.encodings[0].active); EXPECT_TRUE(parameters.encodings[1].active); @@ -8372,7 +8659,9 @@ TEST_F(WebRtcVideoChannelTest, parameters.encodings[0].active = false; parameters.encodings[1].active = false; parameters.encodings[2].active = true; - EXPECT_TRUE(channel_->SetRtpSendParameters(primary_ssrc, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(primary_ssrc, parameters) + .ok()); // Check that the VideoSendStream is updated appropriately. This means its // send state was updated and it was reconfigured. @@ -8405,13 +8694,16 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_TRUE(stream->IsSending()); // Get current parameters and change "active" to false. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(1u, parameters.encodings.size()); ASSERT_TRUE(parameters.encodings[0].active); parameters.encodings[0].active = false; EXPECT_EQ(1u, GetFakeSendStreams().size()); EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams()); - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, parameters) + .ok()); EXPECT_FALSE(stream->IsSending()); // Reorder the codec list, causing the stream to be reconfigured. @@ -8439,7 +8731,7 @@ TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersCodecs) { EXPECT_TRUE(channel_->SetSendParameters(parameters)); webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(2u, rtp_parameters.codecs.size()); EXPECT_EQ(GetEngineCodec("VP8").ToCodecParameters(), rtp_parameters.codecs[0]); @@ -8453,7 +8745,8 @@ TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersRtcpCname) { params.cname = "rtcpcname"; AddSendStream(params); - webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrc); + webrtc::RtpParameters rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrc); EXPECT_STREQ("rtcpcname", rtp_parameters.rtcp.cname.c_str()); } @@ -8463,7 +8756,7 @@ TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersSsrc) { AddSendStream(); webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(1u, rtp_parameters.encodings.size()); EXPECT_EQ(last_ssrc_, rtp_parameters.encodings[0].ssrc); } @@ -8472,13 +8765,13 @@ TEST_F(WebRtcVideoChannelTest, DetectRtpSendParameterHeaderExtensionsChange) { AddSendStream(); webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); rtp_parameters.header_extensions.emplace_back(); EXPECT_NE(0u, rtp_parameters.header_extensions.size()); - webrtc::RTCError result = - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + webrtc::RTCError result = channel_->AsSendChannel()->SetRtpSendParameters( + last_ssrc_, rtp_parameters); EXPECT_EQ(webrtc::RTCErrorType::INVALID_MODIFICATION, result.type()); } @@ -8489,15 +8782,17 @@ TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersDegradationPreference) { EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_FALSE(rtp_parameters.degradation_preference.has_value()); rtp_parameters.degradation_preference = webrtc::DegradationPreference::MAINTAIN_FRAMERATE; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, rtp_parameters) + .ok()); webrtc::RtpParameters updated_rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(updated_rtp_parameters.degradation_preference, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); @@ -8514,13 +8809,16 @@ TEST_F(WebRtcVideoChannelTest, SetAndGetRtpSendParameters) { EXPECT_TRUE(channel_->SetSendParameters(parameters)); webrtc::RtpParameters initial_params = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); // We should be able to set the params we just got. - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, initial_params).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, initial_params) + .ok()); // ... And this shouldn't change the params returned by GetRtpSendParameters. - EXPECT_EQ(initial_params, channel_->GetRtpSendParameters(last_ssrc_)); + EXPECT_EQ(initial_params, + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_)); } // Test that GetRtpReceiveParameters returns the currently configured codecs. @@ -8659,7 +8957,7 @@ TEST_F(WebRtcVideoChannelTest, cricket::StreamParams params = cricket::StreamParams::CreateLegacy(1); params.AddFidSsrc(1, 2); - EXPECT_TRUE(channel_->AddRecvStream(params)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(params)); } void WebRtcVideoChannelTest::TestReceiverLocalSsrcConfiguration( @@ -8691,13 +8989,13 @@ void WebRtcVideoChannelTest::TestReceiverLocalSsrcConfiguration( // Removing first sender should fall back to another (in this case the second) // local send stream's SSRC. AddSendStream(StreamParams::CreateLegacy(kSecondSenderSsrc)); - ASSERT_TRUE(channel_->RemoveSendStream(kSenderSsrc)); + ASSERT_TRUE(channel_->AsSendChannel()->RemoveSendStream(kSenderSsrc)); receive_streams = fake_call_->GetVideoReceiveStreams(); ASSERT_EQ(1u, receive_streams.size()); EXPECT_EQ(kSecondSenderSsrc, receive_streams[0]->GetConfig().rtp.local_ssrc); // Removing the last sender should fall back to default local SSRC. - ASSERT_TRUE(channel_->RemoveSendStream(kSecondSenderSsrc)); + ASSERT_TRUE(channel_->AsSendChannel()->RemoveSendStream(kSecondSenderSsrc)); receive_streams = fake_call_->GetVideoReceiveStreams(); ASSERT_EQ(1u, receive_streams.size()); EXPECT_EQ(kExpectedDefaultReceiverSsrc, @@ -8743,14 +9041,16 @@ TEST_F(WebRtcVideoChannelTest, FakeVideoSendStream* stream = SetUpSimulcast(true, false); webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(3u, rtp_parameters.encodings.size()); ASSERT_TRUE(rtp_parameters.encodings[0].active); ASSERT_TRUE(rtp_parameters.encodings[1].active); ASSERT_TRUE(rtp_parameters.encodings[2].active); rtp_parameters.encodings[0].active = false; rtp_parameters.encodings[1].active = false; - EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(last_ssrc_, rtp_parameters) + .ok()); EXPECT_TRUE(stream->GetEncoderConfig().is_quality_scaling_allowed); } @@ -8775,7 +9075,7 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test { channel_.reset(engine_.CreateMediaChannel( &fake_call_, GetMediaConfig(), VideoOptions(), webrtc::CryptoOptions(), mock_rate_allocator_factory_.get())); - channel_->OnReadyToSend(true); + channel_->AsSendChannel()->OnReadyToSend(true); last_ssrc_ = 123; } @@ -8814,7 +9114,8 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test { channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); - auto rtp_parameters = channel_->GetRtpSendParameters(kSsrcs3[0]); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcs3[0]); EXPECT_EQ(num_configured_streams, rtp_parameters.encodings.size()); std::vector video_streams = stream->GetVideoStreams(); @@ -8890,7 +9191,7 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test { FakeVideoSendStream* AddSendStream(const StreamParams& sp) { size_t num_streams = fake_call_.GetVideoSendStreams().size(); - EXPECT_TRUE(channel_->AddSendStream(sp)); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(sp)); std::vector streams = fake_call_.GetVideoSendStreams(); EXPECT_EQ(num_streams + 1, streams.size()); @@ -8907,7 +9208,7 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test { FakeVideoReceiveStream* AddRecvStream(const StreamParams& sp) { size_t num_streams = fake_call_.GetVideoReceiveStreams().size(); - EXPECT_TRUE(channel_->AddRecvStream(sp)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(sp)); std::vector streams = fake_call_.GetVideoReceiveStreams(); EXPECT_EQ(num_streams + 1, streams.size()); @@ -9007,7 +9308,7 @@ TEST_F(WebRtcVideoChannelTest, SetsRidsOnSendStream) { } sp.set_rids(rid_descriptions); - ASSERT_TRUE(channel_->AddSendStream(sp)); + ASSERT_TRUE(channel_->AsSendChannel()->AddSendStream(sp)); const auto& streams = fake_call_->GetVideoSendStreams(); ASSERT_EQ(1u, streams.size()); auto stream = streams[0]; @@ -9057,11 +9358,11 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSinglecast) { { // TEST requested_resolution < frame size webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].requested_resolution = {.width = 640, .height = 360}; - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + channel_->AsSendChannel()->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -9072,11 +9373,12 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSinglecast) { } { // TEST requested_resolution == frame size - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].requested_resolution = {.width = 1280, .height = 720}; - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + channel_->AsSendChannel()->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); auto streams = stream->GetVideoStreams(); @@ -9086,11 +9388,12 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSinglecast) { } { // TEST requested_resolution > frame size - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].requested_resolution = {.width = 2 * 1280, .height = 2 * 720}; - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + channel_->AsSendChannel()->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); auto streams = stream->GetVideoStreams(); @@ -9114,11 +9417,12 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSinglecastCropping) { EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].requested_resolution = {.width = 720, .height = 720}; - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + channel_->AsSendChannel()->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -9129,11 +9433,12 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSinglecastCropping) { } { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].requested_resolution = {.width = 1280, .height = 1280}; - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + channel_->AsSendChannel()->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -9144,11 +9449,12 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSinglecastCropping) { } { - auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + auto rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].requested_resolution = {.width = 650, .height = 650}; - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + channel_->AsSendChannel()->SetRtpSendParameters(last_ssrc_, rtp_parameters); auto streams = stream->GetVideoStreams(); ASSERT_EQ(streams.size(), 1u); @@ -9172,7 +9478,7 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSimulcast) { { webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(3UL, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].requested_resolution = {.width = 320, .height = 180}; @@ -9180,7 +9486,7 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSimulcast) { .height = 360}; rtp_parameters.encodings[2].requested_resolution = {.width = 1280, .height = 720}; - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + channel_->AsSendChannel()->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -9194,7 +9500,7 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSimulcast) { { webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(3UL, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].requested_resolution = {.width = 320, .height = 180}; @@ -9202,7 +9508,7 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSimulcast) { rtp_parameters.encodings[2].requested_resolution = {.width = 1280, .height = 720}; - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + channel_->AsSendChannel()->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -9215,7 +9521,7 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSimulcast) { { webrtc::RtpParameters rtp_parameters = - channel_->GetRtpSendParameters(last_ssrc_); + channel_->AsSendChannel()->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(3UL, rtp_parameters.encodings.size()); rtp_parameters.encodings[0].requested_resolution = {.width = 320, .height = 180}; @@ -9224,7 +9530,7 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSimulcast) { .height = 360}; rtp_parameters.encodings[2].requested_resolution = {.width = 960, .height = 540}; - channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + channel_->AsSendChannel()->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); diff --git a/media/engine/webrtc_voice_engine.cc b/media/engine/webrtc_voice_engine.cc index 9f686e695a..212bd25aaa 100644 --- a/media/engine/webrtc_voice_engine.cc +++ b/media/engine/webrtc_voice_engine.cc @@ -785,19 +785,19 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream void SetSendCodecSpec( const webrtc::AudioSendStream::Config::SendCodecSpec& send_codec_spec) { UpdateSendCodecSpec(send_codec_spec); - ReconfigureAudioSendStream(); + ReconfigureAudioSendStream(nullptr); } void SetRtpExtensions(const std::vector& extensions) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.rtp.extensions = extensions; rtp_parameters_.header_extensions = extensions; - ReconfigureAudioSendStream(); + ReconfigureAudioSendStream(nullptr); } void SetExtmapAllowMixed(bool extmap_allow_mixed) { config_.rtp.extmap_allow_mixed = extmap_allow_mixed; - ReconfigureAudioSendStream(); + ReconfigureAudioSendStream(nullptr); } void SetMid(const std::string& mid) { @@ -806,14 +806,14 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream return; } config_.rtp.mid = mid; - ReconfigureAudioSendStream(); + ReconfigureAudioSendStream(nullptr); } void SetFrameEncryptor( rtc::scoped_refptr frame_encryptor) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.frame_encryptor = frame_encryptor; - ReconfigureAudioSendStream(); + ReconfigureAudioSendStream(nullptr); } void SetAudioNetworkAdaptorConfig( @@ -826,7 +826,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream audio_network_adaptor_config_from_options_ = audio_network_adaptor_config; UpdateAudioNetworkAdaptorConfig(); UpdateAllowedBitrateRange(); - ReconfigureAudioSendStream(); + ReconfigureAudioSendStream(nullptr); } bool SetMaxSendBitrate(int bps) { @@ -844,7 +844,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream if (send_rate != config_.send_codec_spec->target_bitrate_bps) { config_.send_codec_spec->target_bitrate_bps = send_rate; - ReconfigureAudioSendStream(); + ReconfigureAudioSendStream(nullptr); } return true; } @@ -954,11 +954,12 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream return rtp_parameters_; } - webrtc::RTCError SetRtpParameters(const webrtc::RtpParameters& parameters) { + webrtc::RTCError SetRtpParameters(const webrtc::RtpParameters& parameters, + webrtc::SetParametersCallback callback) { webrtc::RTCError error = CheckRtpParametersInvalidModificationAndValues( rtp_parameters_, parameters); if (!error.ok()) { - return error; + return webrtc::InvokeSetParametersCallback(callback, error); } absl::optional send_rate; @@ -967,7 +968,8 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream parameters.encodings[0].max_bitrate_bps, *audio_codec_spec_); if (!send_rate) { - return webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR); + return webrtc::InvokeSetParametersCallback( + callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); } } @@ -997,7 +999,9 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream // used. UpdateAudioNetworkAdaptorConfig(); UpdateAllowedBitrateRange(); - ReconfigureAudioSendStream(); + ReconfigureAudioSendStream(std::move(callback)); + } else { + webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); } rtp_parameters_.rtcp.cname = config_.rtp.c_name; @@ -1012,7 +1016,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream rtc::scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.frame_transformer = std::move(frame_transformer); - ReconfigureAudioSendStream(); + ReconfigureAudioSendStream(nullptr); } void ConfigureEncoder(const webrtc::AudioEncoder::Config& config) { @@ -1042,7 +1046,6 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream // - a reasonable default of 32kbps min/max // - fixed target bitrate from codec spec // - lower min bitrate if adaptive ptime is enabled - // - bitrate configured in the rtp_parameter encodings settings const int kDefaultBitrateBps = 32000; config_.min_bitrate_bps = kDefaultBitrateBps; config_.max_bitrate_bps = kDefaultBitrateBps; @@ -1058,13 +1061,6 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream config_.min_bitrate_bps, static_cast(adaptive_ptime_config_.min_encoder_bitrate.bps())); } - - if (rtp_parameters_.encodings[0].min_bitrate_bps) { - config_.min_bitrate_bps = *rtp_parameters_.encodings[0].min_bitrate_bps; - } - if (rtp_parameters_.encodings[0].max_bitrate_bps) { - config_.max_bitrate_bps = *rtp_parameters_.encodings[0].max_bitrate_bps; - } } void UpdateSendCodecSpec( @@ -1111,10 +1107,10 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream audio_network_adaptor_config_from_options_; } - void ReconfigureAudioSendStream() { + void ReconfigureAudioSendStream(webrtc::SetParametersCallback callback) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(stream_); - stream_->Reconfigure(config_); + stream_->Reconfigure(config_, std::move(callback)); } int NumPreferredChannels() const override { return num_encoded_channels_; } @@ -1399,14 +1395,16 @@ webrtc::RtpParameters WebRtcVoiceMediaChannel::GetRtpSendParameters( webrtc::RTCError WebRtcVoiceMediaChannel::SetRtpSendParameters( uint32_t ssrc, - const webrtc::RtpParameters& parameters) { + const webrtc::RtpParameters& parameters, + webrtc::SetParametersCallback callback) { RTC_DCHECK_RUN_ON(worker_thread_); auto it = send_streams_.find(ssrc); if (it == send_streams_.end()) { RTC_LOG(LS_WARNING) << "Attempting to set RTP send parameters for stream " "with ssrc " << ssrc << " which doesn't exist."; - return webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR); + return webrtc::InvokeSetParametersCallback( + callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); } // TODO(deadbeef): Handle setting parameters with a list of codecs in a @@ -1415,7 +1413,8 @@ webrtc::RTCError WebRtcVoiceMediaChannel::SetRtpSendParameters( if (current_parameters.codecs != parameters.codecs) { RTC_DLOG(LS_ERROR) << "Using SetParameters to change the set of codecs " "is not currently supported."; - return webrtc::RTCError(webrtc::RTCErrorType::UNSUPPORTED_PARAMETER); + return webrtc::InvokeSetParametersCallback( + callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); } if (!parameters.encodings.empty()) { @@ -1450,7 +1449,7 @@ webrtc::RTCError WebRtcVoiceMediaChannel::SetRtpSendParameters( // Codecs are handled at the WebRtcVoiceMediaChannel level. webrtc::RtpParameters reduced_params = parameters; reduced_params.codecs.clear(); - return it->second->SetRtpParameters(reduced_params); + return it->second->SetRtpParameters(reduced_params, std::move(callback)); } webrtc::RtpParameters WebRtcVoiceMediaChannel::GetRtpReceiveParameters( @@ -2304,6 +2303,7 @@ bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info, sinfo.header_and_padding_bytes_sent = stats.header_and_padding_bytes_sent; sinfo.retransmitted_bytes_sent = stats.retransmitted_bytes_sent; sinfo.packets_sent = stats.packets_sent; + sinfo.total_packet_send_delay = stats.total_packet_send_delay; sinfo.retransmitted_packets_sent = stats.retransmitted_packets_sent; sinfo.packets_lost = stats.packets_lost; sinfo.fraction_lost = stats.fraction_lost; diff --git a/media/engine/webrtc_voice_engine.h b/media/engine/webrtc_voice_engine.h index f8178f430b..20409f1e98 100644 --- a/media/engine/webrtc_voice_engine.h +++ b/media/engine/webrtc_voice_engine.h @@ -156,7 +156,8 @@ class WebRtcVoiceMediaChannel final : public VoiceMediaChannel, webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const override; webrtc::RTCError SetRtpSendParameters( uint32_t ssrc, - const webrtc::RtpParameters& parameters) override; + const webrtc::RtpParameters& parameters, + webrtc::SetParametersCallback callback) override; webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const override; webrtc::RtpParameters GetDefaultRtpReceiveParameters() const override; diff --git a/media/engine/webrtc_voice_engine_unittest.cc b/media/engine/webrtc_voice_engine_unittest.cc index 16cc86e43d..91fc5851b0 100644 --- a/media/engine/webrtc_voice_engine_unittest.cc +++ b/media/engine/webrtc_voice_engine_unittest.cc @@ -29,6 +29,7 @@ #include "media/base/media_constants.h" #include "media/engine/fake_webrtc_call.h" #include "modules/audio_device/include/mock_audio_device.h" +#include "modules/audio_mixer/audio_mixer_impl.h" #include "modules/audio_processing/include/mock_audio_processing.h" #include "rtc_base/arraysize.h" #include "rtc_base/byte_order.h" @@ -53,7 +54,6 @@ using webrtc::BitrateConstraints; constexpr uint32_t kMaxUnsignaledRecvStreams = 4; const cricket::AudioCodec kPcmuCodec(0, "PCMU", 8000, 64000, 1); -const cricket::AudioCodec kIsacCodec(103, "ISAC", 16000, 32000, 1); const cricket::AudioCodec kOpusCodec(111, "opus", 48000, 32000, 2); const cricket::AudioCodec kG722CodecVoE(9, "G722", 16000, 64000, 1); const cricket::AudioCodec kG722CodecSdp(9, "G722", 8000, 64000, 1); @@ -247,7 +247,7 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { if (!SetupChannel()) { return false; } - if (!channel_->AddSendStream(sp)) { + if (!channel_->AsSendChannel()->AddSendStream(sp)) { return false; } if (!use_null_apm_) { @@ -260,21 +260,23 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { bool AddRecvStream(uint32_t ssrc) { EXPECT_TRUE(channel_); - return channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(ssrc)); + return channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(ssrc)); } void SetupForMultiSendStream() { EXPECT_TRUE(SetupSendStream()); // Remove stream added in Setup. EXPECT_TRUE(call_.GetAudioSendStream(kSsrcX)); - EXPECT_TRUE(channel_->RemoveSendStream(kSsrcX)); + EXPECT_TRUE(channel_->AsSendChannel()->RemoveSendStream(kSsrcX)); // Verify the channel does not exist. EXPECT_FALSE(call_.GetAudioSendStream(kSsrcX)); } void DeliverPacket(const void* data, int len) { rtc::CopyOnWriteBuffer packet(reinterpret_cast(data), len); - channel_->OnPacketReceived(packet, /* packet_time_us */ -1); + channel_->AsReceiveChannel()->OnPacketReceived(packet, + /* packet_time_us */ -1); rtc::Thread::Current()->ProcessMessages(0); } @@ -342,8 +344,8 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { if (caller) { // If this is a caller, local description will be applied and add the // send stream. - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcX))); } // Test we can only InsertDtmf when the other side supports telephone-event. @@ -358,8 +360,8 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { if (!caller) { // If this is callee, there's no active send channel yet. EXPECT_FALSE(channel_->InsertDtmf(ssrc, 2, 123)); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcX))); } // Check we fail if the ssrc is invalid. @@ -381,8 +383,8 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { // For a caller, the answer will be applied in set remote description // where SetSendParameters() is called. EXPECT_TRUE(SetupChannel()); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcX))); send_parameters_.extmap_allow_mixed = extmap_allow_mixed; SetSendParameters(send_parameters_); const webrtc::AudioSendStream::Config& config = GetSendStreamConfig(kSsrcX); @@ -394,8 +396,8 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { // where SetExtmapAllowMixed() and AddSendStream() are called. EXPECT_TRUE(SetupChannel()); channel_->SetExtmapAllowMixed(extmap_allow_mixed); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcX))); const webrtc::AudioSendStream::Config& config = GetSendStreamConfig(kSsrcX); EXPECT_EQ(extmap_allow_mixed, config.rtp.extmap_allow_mixed); @@ -423,11 +425,14 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { // Sets the per-stream maximum bitrate limit for the specified SSRC. bool SetMaxBitrateForStream(int32_t ssrc, int bitrate) { - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(ssrc); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(ssrc); EXPECT_EQ(1UL, parameters.encodings.size()); parameters.encodings[0].max_bitrate_bps = bitrate; - return channel_->SetRtpSendParameters(ssrc, parameters).ok(); + return channel_->AsSendChannel() + ->SetRtpSendParameters(ssrc, parameters) + .ok(); } void SetGlobalMaxBitrate(const cricket::AudioCodec& codec, int bitrate) { @@ -449,6 +454,10 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { return GetSendStreamConfig(ssrc).send_codec_spec->target_bitrate_bps; } + int GetMaxBitrate(int32_t ssrc) { + return GetSendStreamConfig(ssrc).max_bitrate_bps; + } + const absl::optional& GetAudioNetworkAdaptorConfig( int32_t ssrc) { return GetSendStreamConfig(ssrc).audio_network_adaptor_config; @@ -469,13 +478,14 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { // Verify that reading back the parameters gives results // consistent with the Set() result. webrtc::RtpParameters resulting_parameters = - channel_->GetRtpSendParameters(kSsrcX); + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); EXPECT_EQ(1UL, resulting_parameters.encodings.size()); EXPECT_EQ(expected_result ? stream_max : -1, resulting_parameters.encodings[0].max_bitrate_bps); // Verify that the codec settings have the expected bitrate. EXPECT_EQ(expected_codec_bitrate, GetCodecBitrate(kSsrcX)); + EXPECT_EQ(expected_codec_bitrate, GetMaxBitrate(kSsrcX)); } void SetSendCodecsShouldWorkForBitrates(const char* min_bitrate_kbps, @@ -529,8 +539,8 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { EXPECT_EQ(id, GetSendStreamConfig(kSsrcX).rtp.extensions[0].id); // Ensure extension is set properly on new stream. - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcY))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcY))); EXPECT_NE(call_.GetAudioSendStream(kSsrcX), call_.GetAudioSendStream(kSsrcY)); EXPECT_EQ(1u, GetSendStreamConfig(kSsrcY).rtp.extensions.size()); @@ -711,8 +721,7 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { stats.header_and_padding_bytes_rcvd); EXPECT_EQ(rtc::checked_cast(info.packets_rcvd), stats.packets_rcvd); - EXPECT_EQ(rtc::checked_cast(info.packets_lost), - stats.packets_lost); + EXPECT_EQ(info.packets_lost, stats.packets_lost); EXPECT_EQ(info.codec_name, stats.codec_name); EXPECT_EQ(info.codec_payload_type, stats.codec_payload_type); EXPECT_EQ(rtc::checked_cast(info.jitter_ms), stats.jitter_ms); @@ -816,8 +825,8 @@ TEST_P(WebRtcVoiceEngineTestFake, CreateMediaChannel) { // Test that we can add a send stream and that it has the correct defaults. TEST_P(WebRtcVoiceEngineTestFake, CreateSendStream) { EXPECT_TRUE(SetupChannel()); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcX))); const webrtc::AudioSendStream::Config& config = GetSendStreamConfig(kSsrcX); EXPECT_EQ(kSsrcX, config.rtp.ssrc); EXPECT_EQ("", config.rtp.c_name); @@ -857,7 +866,7 @@ TEST_P(WebRtcVoiceEngineTestFake, OpusSupportsTransportCc) { TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecs) { EXPECT_TRUE(SetupChannel()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kTelephoneEventCodec1); parameters.codecs.push_back(kTelephoneEventCodec2); @@ -868,7 +877,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecs) { EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map, (ContainerEq>( {{0, {"PCMU", 8000, 1}}, - {106, {"ISAC", 16000, 1}}, + {106, {"OPUS", 48000, 2}}, {126, {"telephone-event", 8000, 1}}, {107, {"telephone-event", 32000, 1}}}))); } @@ -877,7 +886,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecs) { TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsUnsupportedCodec) { EXPECT_TRUE(SetupChannel()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(cricket::AudioCodec(127, "XYZ", 32000, 0, 1)); EXPECT_FALSE(channel_->SetRecvParameters(parameters)); } @@ -886,9 +895,9 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsUnsupportedCodec) { TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsDuplicatePayloadType) { EXPECT_TRUE(SetupChannel()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kCn16000Codec); - parameters.codecs[1].id = kIsacCodec.id; + parameters.codecs[1].id = kOpusCodec.id; EXPECT_FALSE(channel_->SetRecvParameters(parameters)); } @@ -896,32 +905,27 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsDuplicatePayloadType) { TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpusNoStereo) { EXPECT_TRUE(SetupChannel()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kOpusCodec); EXPECT_TRUE(channel_->SetRecvParameters(parameters)); EXPECT_TRUE(AddRecvStream(kSsrcX)); EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map, (ContainerEq>( - {{0, {"PCMU", 8000, 1}}, - {103, {"ISAC", 16000, 1}}, - {111, {"opus", 48000, 2}}}))); + {{0, {"PCMU", 8000, 1}}, {111, {"opus", 48000, 2}}}))); } // Test that we can decode OPUS with stereo = 0. TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpus0Stereo) { EXPECT_TRUE(SetupChannel()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kOpusCodec); - parameters.codecs[2].params["stereo"] = "0"; + parameters.codecs[1].params["stereo"] = "0"; EXPECT_TRUE(channel_->SetRecvParameters(parameters)); EXPECT_TRUE(AddRecvStream(kSsrcX)); EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map, (ContainerEq>( {{0, {"PCMU", 8000, 1}}, - {103, {"ISAC", 16000, 1}}, {111, {"opus", 48000, 2, {{"stereo", "0"}}}}}))); } @@ -929,16 +933,14 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpus0Stereo) { TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpus1Stereo) { EXPECT_TRUE(SetupChannel()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kOpusCodec); - parameters.codecs[2].params["stereo"] = "1"; + parameters.codecs[1].params["stereo"] = "1"; EXPECT_TRUE(channel_->SetRecvParameters(parameters)); EXPECT_TRUE(AddRecvStream(kSsrcX)); EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map, (ContainerEq>( {{0, {"PCMU", 8000, 1}}, - {103, {"ISAC", 16000, 1}}, {111, {"opus", 48000, 2, {{"stereo", "1"}}}}}))); } @@ -946,7 +948,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpus1Stereo) { TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithMultipleStreams) { EXPECT_TRUE(SetupChannel()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kTelephoneEventCodec1); parameters.codecs.push_back(kTelephoneEventCodec2); @@ -958,7 +960,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithMultipleStreams) { EXPECT_THAT(GetRecvStreamConfig(ssrc).decoder_map, (ContainerEq>( {{0, {"PCMU", 8000, 1}}, - {106, {"ISAC", 16000, 1}}, + {106, {"OPUS", 48000, 2}}, {126, {"telephone-event", 8000, 1}}, {107, {"telephone-event", 32000, 1}}}))); } @@ -967,20 +969,20 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithMultipleStreams) { TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsAfterAddingStreams) { EXPECT_TRUE(SetupRecvStream()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].id = 106; // collide with existing CN 32k EXPECT_TRUE(channel_->SetRecvParameters(parameters)); const auto& dm = GetRecvStreamConfig(kSsrcX).decoder_map; ASSERT_EQ(1u, dm.count(106)); - EXPECT_EQ(webrtc::SdpAudioFormat("isac", 16000, 1), dm.at(106)); + EXPECT_EQ(webrtc::SdpAudioFormat("opus", 48000, 2), dm.at(106)); } // Test that we can apply the same set of codecs again while playing. TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWhilePlaying) { EXPECT_TRUE(SetupRecvStream()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn16000Codec); EXPECT_TRUE(channel_->SetRecvParameters(parameters)); channel_->SetPlayout(true); @@ -988,7 +990,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWhilePlaying) { // Remapping a payload type to a different codec should fail. parameters.codecs[0] = kOpusCodec; - parameters.codecs[0].id = kIsacCodec.id; + parameters.codecs[0].id = kPcmuCodec.id; EXPECT_FALSE(channel_->SetRecvParameters(parameters)); EXPECT_TRUE(GetRecvStream(kSsrcX).started()); } @@ -997,7 +999,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWhilePlaying) { TEST_P(WebRtcVoiceEngineTestFake, AddRecvCodecsWhilePlaying) { EXPECT_TRUE(SetupRecvStream()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn16000Codec); EXPECT_TRUE(channel_->SetRecvParameters(parameters)); channel_->SetPlayout(true); @@ -1012,7 +1014,7 @@ TEST_P(WebRtcVoiceEngineTestFake, AddRecvCodecsWhilePlaying) { TEST_P(WebRtcVoiceEngineTestFake, ChangeRecvCodecPayloadType) { EXPECT_TRUE(SetupRecvStream()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); EXPECT_TRUE(channel_->SetRecvParameters(parameters)); ++parameters.codecs[0].id; @@ -1040,9 +1042,6 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendBandwidthAuto) { // value. autobw is enabled for the following tests because the target // bitrate is <= 0. - // ISAC, default bitrate == 32000. - TestMaxSendBandwidth(kIsacCodec, 0, true, 32000); - // PCMU, default bitrate == 64000. TestMaxSendBandwidth(kPcmuCodec, -1, true, 64000); @@ -1053,11 +1052,6 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendBandwidthAuto) { TEST_P(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthMultiRateAsCaller) { EXPECT_TRUE(SetupSendStream()); - // ISAC, default bitrate == 32000. - TestMaxSendBandwidth(kIsacCodec, 16000, true, 16000); - // Rates above the max (56000) should be capped. - TestMaxSendBandwidth(kIsacCodec, 100000, true, 32000); - // opus, default bitrate == 64000. TestMaxSendBandwidth(kOpusCodec, 96000, true, 96000); TestMaxSendBandwidth(kOpusCodec, 48000, true, 48000); @@ -1089,8 +1083,8 @@ TEST_P(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthMultiRateAsCallee) { parameters.max_bandwidth_bps = kDesiredBitrate; SetSendParameters(parameters); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcX))); EXPECT_EQ(kDesiredBitrate, GetCodecBitrate(kSsrcX)); } @@ -1141,12 +1135,13 @@ TEST_P(WebRtcVoiceEngineTestFake, SetMaxBitratePerStream) { TEST_P(WebRtcVoiceEngineTestFake, CannotSetMaxBitrateForNonexistentStream) { EXPECT_TRUE(SetupChannel()); webrtc::RtpParameters nonexistent_parameters = - channel_->GetRtpSendParameters(kSsrcX); + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); EXPECT_EQ(0u, nonexistent_parameters.encodings.size()); nonexistent_parameters.encodings.push_back(webrtc::RtpEncodingParameters()); - EXPECT_FALSE( - channel_->SetRtpSendParameters(kSsrcX, nonexistent_parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(kSsrcX, nonexistent_parameters) + .ok()); } TEST_P(WebRtcVoiceEngineTestFake, @@ -1157,21 +1152,26 @@ TEST_P(WebRtcVoiceEngineTestFake, // for each encoding individually. EXPECT_TRUE(SetupSendStream()); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); // Two or more encodings should result in failure. parameters.encodings.push_back(webrtc::RtpEncodingParameters()); - EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); + EXPECT_FALSE( + channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok()); // Zero encodings should also fail. parameters.encodings.clear(); - EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); + EXPECT_FALSE( + channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok()); } // Changing the SSRC through RtpParameters is not allowed. TEST_P(WebRtcVoiceEngineTestFake, CannotSetSsrcInRtpSendParameters) { EXPECT_TRUE(SetupSendStream()); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); parameters.encodings[0].ssrc = 0xdeadbeef; - EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); + EXPECT_FALSE( + channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok()); } // Test that a stream will not be sending if its encoding is made @@ -1181,34 +1181,40 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRtpParametersEncodingsActive) { SetSend(true); EXPECT_TRUE(GetSendStream(kSsrcX).IsSending()); // Get current parameters and change "active" to false. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); ASSERT_EQ(1u, parameters.encodings.size()); ASSERT_TRUE(parameters.encodings[0].active); parameters.encodings[0].active = false; - EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); + EXPECT_TRUE( + channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok()); EXPECT_FALSE(GetSendStream(kSsrcX).IsSending()); // Now change it back to active and verify we resume sending. // This should occur even when other parameters are updated. parameters.encodings[0].active = true; parameters.encodings[0].max_bitrate_bps = absl::optional(6000); - EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); + EXPECT_TRUE( + channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok()); EXPECT_TRUE(GetSendStream(kSsrcX).IsSending()); } TEST_P(WebRtcVoiceEngineTestFake, SetRtpParametersAdaptivePtime) { EXPECT_TRUE(SetupSendStream()); // Get current parameters and change "adaptive_ptime" to true. - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); ASSERT_EQ(1u, parameters.encodings.size()); ASSERT_FALSE(parameters.encodings[0].adaptive_ptime); parameters.encodings[0].adaptive_ptime = true; - EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); + EXPECT_TRUE( + channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok()); EXPECT_TRUE(GetAudioNetworkAdaptorConfig(kSsrcX)); EXPECT_EQ(16000, GetSendStreamConfig(kSsrcX).min_bitrate_bps); parameters.encodings[0].adaptive_ptime = false; - EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); + EXPECT_TRUE( + channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok()); EXPECT_FALSE(GetAudioNetworkAdaptorConfig(kSsrcX)); EXPECT_EQ(32000, GetSendStreamConfig(kSsrcX).min_bitrate_bps); } @@ -1222,9 +1228,11 @@ TEST_P(WebRtcVoiceEngineTestFake, EXPECT_EQ(send_parameters_.options.audio_network_adaptor_config, GetAudioNetworkAdaptorConfig(kSsrcX)); - webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); parameters.encodings[0].adaptive_ptime = false; - EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); + EXPECT_TRUE( + channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok()); EXPECT_EQ(send_parameters_.options.audio_network_adaptor_config, GetAudioNetworkAdaptorConfig(kSsrcX)); } @@ -1242,8 +1250,8 @@ TEST_P(WebRtcVoiceEngineTestFake, RtpParametersArePerStream) { SetupForMultiSendStream(); // Create send streams. for (uint32_t ssrc : kSsrcs4) { - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(ssrc))); } // Configure one stream to be limited by the stream config, another to be // limited by the global max, and the third one with no per-stream limit @@ -1269,13 +1277,14 @@ TEST_P(WebRtcVoiceEngineTestFake, RtpParametersArePerStream) { TEST_P(WebRtcVoiceEngineTestFake, GetRtpSendParametersCodecs) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); SetSendParameters(parameters); - webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); ASSERT_EQ(2u, rtp_parameters.codecs.size()); - EXPECT_EQ(kIsacCodec.ToCodecParameters(), rtp_parameters.codecs[0]); + EXPECT_EQ(kOpusCodec.ToCodecParameters(), rtp_parameters.codecs[0]); EXPECT_EQ(kPcmuCodec.ToCodecParameters(), rtp_parameters.codecs[1]); } @@ -1285,7 +1294,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpSendParametersRtcpCname) { params.cname = "rtcpcname"; EXPECT_TRUE(SetupSendStream(params)); - webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); EXPECT_STREQ("rtcpcname", rtp_parameters.rtcp.cname.c_str()); } @@ -1293,20 +1303,22 @@ TEST_P(WebRtcVoiceEngineTestFake, DetectRtpSendParameterHeaderExtensionsChange) { EXPECT_TRUE(SetupSendStream()); - webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); rtp_parameters.header_extensions.emplace_back(); EXPECT_NE(0u, rtp_parameters.header_extensions.size()); webrtc::RTCError result = - channel_->SetRtpSendParameters(kSsrcX, rtp_parameters); + channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, rtp_parameters); EXPECT_EQ(webrtc::RTCErrorType::INVALID_MODIFICATION, result.type()); } // Test that GetRtpSendParameters returns an SSRC. TEST_P(WebRtcVoiceEngineTestFake, GetRtpSendParametersSsrc) { EXPECT_TRUE(SetupSendStream()); - webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); ASSERT_EQ(1u, rtp_parameters.encodings.size()); EXPECT_EQ(kSsrcX, rtp_parameters.encodings[0].ssrc); } @@ -1315,18 +1327,23 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpSendParametersSsrc) { TEST_P(WebRtcVoiceEngineTestFake, SetAndGetRtpSendParameters) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); SetSendParameters(parameters); - webrtc::RtpParameters initial_params = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters initial_params = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); // We should be able to set the params we just got. - EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, initial_params).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(kSsrcX, initial_params) + .ok()); // ... And this shouldn't change the params returned by GetRtpSendParameters. - webrtc::RtpParameters new_params = channel_->GetRtpSendParameters(kSsrcX); - EXPECT_EQ(initial_params, channel_->GetRtpSendParameters(kSsrcX)); + webrtc::RtpParameters new_params = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); + EXPECT_EQ(initial_params, + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX)); } // Test that max_bitrate_bps in send stream config gets updated correctly when @@ -1337,13 +1354,16 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRtpSendParameterUpdatesMaxBitrate) { send_parameters.codecs.push_back(kOpusCodec); SetSendParameters(send_parameters); - webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); // Expect empty on parameters.encodings[0].max_bitrate_bps; EXPECT_FALSE(rtp_parameters.encodings[0].max_bitrate_bps); constexpr int kMaxBitrateBps = 6000; rtp_parameters.encodings[0].max_bitrate_bps = kMaxBitrateBps; - EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, rtp_parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(kSsrcX, rtp_parameters) + .ok()); const int max_bitrate = GetSendStreamConfig(kSsrcX).max_bitrate_bps; EXPECT_EQ(max_bitrate, kMaxBitrateBps); @@ -1353,35 +1373,44 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRtpSendParameterUpdatesMaxBitrate) { // a value <= 0, setting the parameters returns false. TEST_P(WebRtcVoiceEngineTestFake, SetRtpSendParameterInvalidBitratePriority) { EXPECT_TRUE(SetupSendStream()); - webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); EXPECT_EQ(webrtc::kDefaultBitratePriority, rtp_parameters.encodings[0].bitrate_priority); rtp_parameters.encodings[0].bitrate_priority = 0; - EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrcX, rtp_parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(kSsrcX, rtp_parameters) + .ok()); rtp_parameters.encodings[0].bitrate_priority = -1.0; - EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrcX, rtp_parameters).ok()); + EXPECT_FALSE(channel_->AsSendChannel() + ->SetRtpSendParameters(kSsrcX, rtp_parameters) + .ok()); } // Test that the bitrate_priority in the send stream config gets updated when // SetRtpSendParameters is set for the VoiceMediaChannel. TEST_P(WebRtcVoiceEngineTestFake, SetRtpSendParameterUpdatesBitratePriority) { EXPECT_TRUE(SetupSendStream()); - webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); + webrtc::RtpParameters rtp_parameters = + channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); EXPECT_EQ(webrtc::kDefaultBitratePriority, rtp_parameters.encodings[0].bitrate_priority); double new_bitrate_priority = 2.0; rtp_parameters.encodings[0].bitrate_priority = new_bitrate_priority; - EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, rtp_parameters).ok()); + EXPECT_TRUE(channel_->AsSendChannel() + ->SetRtpSendParameters(kSsrcX, rtp_parameters) + .ok()); // The priority should get set for both the audio channel's rtp parameters // and the audio send stream's audio config. - EXPECT_EQ( - new_bitrate_priority, - channel_->GetRtpSendParameters(kSsrcX).encodings[0].bitrate_priority); + EXPECT_EQ(new_bitrate_priority, channel_->AsSendChannel() + ->GetRtpSendParameters(kSsrcX) + .encodings[0] + .bitrate_priority); EXPECT_EQ(new_bitrate_priority, GetSendStreamConfig(kSsrcX).bitrate_priority); } @@ -1389,14 +1418,14 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRtpSendParameterUpdatesBitratePriority) { TEST_P(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersCodecs) { EXPECT_TRUE(SetupRecvStream()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); EXPECT_TRUE(channel_->SetRecvParameters(parameters)); webrtc::RtpParameters rtp_parameters = channel_->GetRtpReceiveParameters(kSsrcX); ASSERT_EQ(2u, rtp_parameters.codecs.size()); - EXPECT_EQ(kIsacCodec.ToCodecParameters(), rtp_parameters.codecs[0]); + EXPECT_EQ(kOpusCodec.ToCodecParameters(), rtp_parameters.codecs[0]); EXPECT_EQ(kPcmuCodec.ToCodecParameters(), rtp_parameters.codecs[1]); } @@ -1413,7 +1442,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersSsrc) { TEST_P(WebRtcVoiceEngineTestFake, SetAndGetRtpReceiveParameters) { EXPECT_TRUE(SetupRecvStream()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); EXPECT_TRUE(channel_->SetRecvParameters(parameters)); @@ -1435,7 +1464,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersWithUnsignaledSsrc) { // Call necessary methods to configure receiving a default stream as // soon as it arrives. cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); EXPECT_TRUE(channel_->SetRecvParameters(parameters)); @@ -1467,7 +1496,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersWithUnsignaledSsrc) { TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecs) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn8000Codec); parameters.codecs[0].id = 96; @@ -1476,7 +1505,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecs) { const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; EXPECT_EQ(96, send_codec_spec.payload_type); EXPECT_EQ(22000, send_codec_spec.target_bitrate_bps); - EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str()); + EXPECT_STRCASEEQ("OPUS", send_codec_spec.format.name.c_str()); EXPECT_NE(send_codec_spec.format.clockrate_hz, 8000); EXPECT_EQ(absl::nullopt, send_codec_spec.cng_payload_type); EXPECT_FALSE(channel_->CanInsertDtmf()); @@ -1572,7 +1601,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRedFmtpAmountOfRedundancy) { TEST_P(WebRtcVoiceEngineTestFake, DontRecreateSendStream) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn8000Codec); parameters.codecs[0].id = 96; @@ -1807,8 +1836,8 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecEnableNackAsCallee) { // NACK should be enabled even with no send stream. EXPECT_EQ(kRtpHistoryMs, GetRecvStreamConfig(kSsrcX).rtp.nack.rtp_history_ms); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcX))); } // Test that we can enable NACK on receive streams. @@ -1845,7 +1874,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecDisableNackRecvStreams) { TEST_P(WebRtcVoiceEngineTestFake, AddRecvStreamEnableNack) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kCn16000Codec); parameters.codecs[0].AddFeedbackParam(cricket::FeedbackParam( cricket::kRtcpFbParamNack, cricket::kParamValueEmpty)); @@ -1865,7 +1894,7 @@ TEST_P(WebRtcVoiceEngineTestFake, TransportCcCanBeEnabledAndDisabled) { SetSendParameters(send_parameters); cricket::AudioRecvParameters recv_parameters; - recv_parameters.codecs.push_back(kIsacCodec); + recv_parameters.codecs.push_back(kOpusCodec); EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters)); EXPECT_TRUE(AddRecvStream(kSsrcX)); ASSERT_TRUE(call_.GetAudioReceiveStream(kSsrcX) != nullptr); @@ -1877,8 +1906,8 @@ TEST_P(WebRtcVoiceEngineTestFake, TransportCcCanBeEnabledAndDisabled) { EXPECT_TRUE(call_.GetAudioReceiveStream(kSsrcX)->transport_cc()); } -// Test that we can switch back and forth between Opus and ISAC with CN. -TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsIsacOpusSwitching) { +// Test that we can switch back and forth between Opus and PCMU with CN. +TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsOpusPcmuSwitching) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters opus_parameters; @@ -1890,15 +1919,15 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsIsacOpusSwitching) { EXPECT_STRCASEEQ("opus", spec.format.name.c_str()); } - cricket::AudioSendParameters isac_parameters; - isac_parameters.codecs.push_back(kIsacCodec); - isac_parameters.codecs.push_back(kCn16000Codec); - isac_parameters.codecs.push_back(kOpusCodec); - SetSendParameters(isac_parameters); + cricket::AudioSendParameters pcmu_parameters; + pcmu_parameters.codecs.push_back(kPcmuCodec); + pcmu_parameters.codecs.push_back(kCn16000Codec); + pcmu_parameters.codecs.push_back(kOpusCodec); + SetSendParameters(pcmu_parameters); { const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; - EXPECT_EQ(103, spec.payload_type); - EXPECT_STRCASEEQ("ISAC", spec.format.name.c_str()); + EXPECT_EQ(0, spec.payload_type); + EXPECT_STRCASEEQ("PCMU", spec.format.name.c_str()); } SetSendParameters(opus_parameters); @@ -1913,33 +1942,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsIsacOpusSwitching) { TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsBitrate) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; - parameters.codecs.push_back(kIsacCodec); // bitrate == 32000 - SetSendParameters(parameters); - { - const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; - EXPECT_EQ(103, spec.payload_type); - EXPECT_STRCASEEQ("ISAC", spec.format.name.c_str()); - EXPECT_EQ(32000, spec.target_bitrate_bps); - } - - parameters.codecs[0].bitrate = 0; // bitrate == default - SetSendParameters(parameters); - { - const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; - EXPECT_EQ(103, spec.payload_type); - EXPECT_STRCASEEQ("ISAC", spec.format.name.c_str()); - EXPECT_EQ(32000, spec.target_bitrate_bps); - } - parameters.codecs[0].bitrate = 28000; // bitrate == 28000 - SetSendParameters(parameters); - { - const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; - EXPECT_EQ(103, spec.payload_type); - EXPECT_STRCASEEQ("ISAC", spec.format.name.c_str()); - EXPECT_EQ(28000, spec.target_bitrate_bps); - } - - parameters.codecs[0] = kPcmuCodec; // bitrate == 64000 + parameters.codecs.push_back(kPcmuCodec); SetSendParameters(parameters); { const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; @@ -1981,14 +1984,14 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsDTMFOnTop) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; parameters.codecs.push_back(kTelephoneEventCodec1); - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs[0].id = 98; // DTMF parameters.codecs[1].id = 96; SetSendParameters(parameters); const auto& spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; EXPECT_EQ(96, spec.payload_type); - EXPECT_STRCASEEQ("ISAC", spec.format.name.c_str()); + EXPECT_STRCASEEQ("OPUS", spec.format.name.c_str()); SetSend(true); EXPECT_TRUE(channel_->CanInsertDtmf()); } @@ -2014,7 +2017,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsDTMFPayloadTypeOutOfRange) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; parameters.codecs.push_back(kTelephoneEventCodec2); - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].id = 0; // DTMF parameters.codecs[1].id = 96; SetSendParameters(parameters); @@ -2036,15 +2039,13 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsDTMFPayloadTypeOutOfRange) { TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNOnTop) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; - parameters.codecs.push_back(kCn16000Codec); - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kCn8000Codec); parameters.codecs.push_back(kPcmuCodec); - parameters.codecs[0].id = 98; // wideband CN - parameters.codecs[1].id = 96; + parameters.codecs[0].id = 98; // narrowband CN SetSendParameters(parameters); const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; - EXPECT_EQ(96, send_codec_spec.payload_type); - EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str()); + EXPECT_EQ(0, send_codec_spec.payload_type); + EXPECT_STRCASEEQ("PCMU", send_codec_spec.format.name.c_str()); EXPECT_EQ(98, send_codec_spec.cng_payload_type); } @@ -2052,19 +2053,17 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNOnTop) { TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCaller) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; - parameters.codecs.push_back(kIsacCodec); parameters.codecs.push_back(kPcmuCodec); - // TODO(juberti): cn 32000 parameters.codecs.push_back(kCn16000Codec); parameters.codecs.push_back(kCn8000Codec); parameters.codecs.push_back(kTelephoneEventCodec1); parameters.codecs[0].id = 96; - parameters.codecs[2].id = 97; // wideband CN - parameters.codecs[4].id = 98; // DTMF + parameters.codecs[2].id = 97; // narrowband CN + parameters.codecs[3].id = 98; // DTMF SetSendParameters(parameters); const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; EXPECT_EQ(96, send_codec_spec.payload_type); - EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str()); + EXPECT_STRCASEEQ("PCMU", send_codec_spec.format.name.c_str()); EXPECT_EQ(1u, send_codec_spec.format.num_channels); EXPECT_EQ(97, send_codec_spec.cng_payload_type); SetSend(true); @@ -2075,22 +2074,20 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCaller) { TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCallee) { EXPECT_TRUE(SetupChannel()); cricket::AudioSendParameters parameters; - parameters.codecs.push_back(kIsacCodec); parameters.codecs.push_back(kPcmuCodec); - // TODO(juberti): cn 32000 parameters.codecs.push_back(kCn16000Codec); parameters.codecs.push_back(kCn8000Codec); parameters.codecs.push_back(kTelephoneEventCodec2); parameters.codecs[0].id = 96; - parameters.codecs[2].id = 97; // wideband CN - parameters.codecs[4].id = 98; // DTMF + parameters.codecs[2].id = 97; // narrowband CN + parameters.codecs[3].id = 98; // DTMF SetSendParameters(parameters); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcX))); const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; EXPECT_EQ(96, send_codec_spec.payload_type); - EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str()); + EXPECT_STRCASEEQ("PCMU", send_codec_spec.format.name.c_str()); EXPECT_EQ(1u, send_codec_spec.format.num_channels); EXPECT_EQ(97, send_codec_spec.cng_payload_type); SetSend(true); @@ -2102,20 +2099,11 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCallee) { TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNNoMatch) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; - // Set ISAC(16K) and CN(16K). VAD should be activated. - parameters.codecs.push_back(kIsacCodec); + // Set PCMU(8K) and CN(16K). VAD should not be activated. + parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn16000Codec); parameters.codecs[1].id = 97; SetSendParameters(parameters); - { - const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; - EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str()); - EXPECT_EQ(1u, send_codec_spec.format.num_channels); - EXPECT_EQ(97, send_codec_spec.cng_payload_type); - } - // Set PCMU(8K) and CN(16K). VAD should not be activated. - parameters.codecs[0] = kPcmuCodec; - SetSendParameters(parameters); { const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; EXPECT_STRCASEEQ("PCMU", send_codec_spec.format.name.c_str()); @@ -2130,12 +2118,12 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNNoMatch) { EXPECT_EQ(1u, send_codec_spec.format.num_channels); EXPECT_EQ(13, send_codec_spec.cng_payload_type); } - // Set ISAC(16K) and CN(8K). VAD should not be activated. - parameters.codecs[0] = kIsacCodec; + // Set OPUS(48K) and CN(8K). VAD should not be activated. + parameters.codecs[0] = kOpusCodec; SetSendParameters(parameters); { const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; - EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str()); + EXPECT_STRCASEEQ("OPUS", send_codec_spec.format.name.c_str()); EXPECT_EQ(absl::nullopt, send_codec_spec.cng_payload_type); } } @@ -2144,19 +2132,18 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNNoMatch) { TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCaseInsensitive) { EXPECT_TRUE(SetupSendStream()); cricket::AudioSendParameters parameters; - parameters.codecs.push_back(kIsacCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn16000Codec); parameters.codecs.push_back(kCn8000Codec); parameters.codecs.push_back(kTelephoneEventCodec1); - parameters.codecs[0].name = "iSaC"; + parameters.codecs[0].name = "PcMu"; parameters.codecs[0].id = 96; - parameters.codecs[2].id = 97; // wideband CN - parameters.codecs[4].id = 98; // DTMF + parameters.codecs[2].id = 97; // narrowband CN + parameters.codecs[3].id = 98; // DTMF SetSendParameters(parameters); const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; EXPECT_EQ(96, send_codec_spec.payload_type); - EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str()); + EXPECT_STRCASEEQ("PCMU", send_codec_spec.format.name.c_str()); EXPECT_EQ(1u, send_codec_spec.format.num_channels); EXPECT_EQ(97, send_codec_spec.cng_payload_type); SetSend(true); @@ -2269,8 +2256,8 @@ TEST_P(WebRtcVoiceEngineTestFake, CreateAndDeleteMultipleSendStreams) { SetSend(true); for (uint32_t ssrc : kSsrcs4) { - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(ssrc))); SetAudioSend(ssrc, true, &fake_source_); // Verify that we are in a sending state for all the created streams. EXPECT_TRUE(GetSendStream(ssrc).IsSending()); @@ -2279,9 +2266,9 @@ TEST_P(WebRtcVoiceEngineTestFake, CreateAndDeleteMultipleSendStreams) { // Delete the send streams. for (uint32_t ssrc : kSsrcs4) { - EXPECT_TRUE(channel_->RemoveSendStream(ssrc)); + EXPECT_TRUE(channel_->AsSendChannel()->RemoveSendStream(ssrc)); EXPECT_FALSE(call_.GetAudioSendStream(ssrc)); - EXPECT_FALSE(channel_->RemoveSendStream(ssrc)); + EXPECT_FALSE(channel_->AsSendChannel()->RemoveSendStream(ssrc)); } EXPECT_EQ(0u, call_.GetAudioSendStreams().size()); } @@ -2292,29 +2279,30 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsWithMultipleSendStreams) { // Create send streams. for (uint32_t ssrc : kSsrcs4) { - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(ssrc))); } cricket::AudioSendParameters parameters; - // Set ISAC(16K) and CN(16K). VAD should be activated. - parameters.codecs.push_back(kIsacCodec); - parameters.codecs.push_back(kCn16000Codec); + // Set PCMU and CN(8K). VAD should be activated. + parameters.codecs.push_back(kPcmuCodec); + parameters.codecs.push_back(kCn8000Codec); parameters.codecs[1].id = 97; SetSendParameters(parameters); - // Verify ISAC and VAD are corrected configured on all send channels. + // Verify PCMU and VAD are corrected configured on all send channels. for (uint32_t ssrc : kSsrcs4) { ASSERT_TRUE(call_.GetAudioSendStream(ssrc) != nullptr); const auto& send_codec_spec = *call_.GetAudioSendStream(ssrc)->GetConfig().send_codec_spec; - EXPECT_STRCASEEQ("ISAC", send_codec_spec.format.name.c_str()); + EXPECT_STRCASEEQ("PCMU", send_codec_spec.format.name.c_str()); EXPECT_EQ(1u, send_codec_spec.format.num_channels); EXPECT_EQ(97, send_codec_spec.cng_payload_type); } // Change to PCMU(8K) and CN(16K). parameters.codecs[0] = kPcmuCodec; + parameters.codecs[1] = kCn16000Codec; SetSendParameters(parameters); for (uint32_t ssrc : kSsrcs4) { ASSERT_TRUE(call_.GetAudioSendStream(ssrc) != nullptr); @@ -2331,8 +2319,8 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendWithMultipleSendStreams) { // Create the send channels and they should be a "not sending" date. for (uint32_t ssrc : kSsrcs4) { - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(ssrc))); SetAudioSend(ssrc, true, &fake_source_); EXPECT_FALSE(GetSendStream(ssrc).IsSending()); } @@ -2358,8 +2346,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStatsWithMultipleSendStreams) { // Create send streams. for (uint32_t ssrc : kSsrcs4) { - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(ssrc))); } // Create a receive stream to check that none of the send streams end up in @@ -2393,7 +2381,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStatsWithMultipleSendStreams) { // Remove the kSsrcY stream. No receiver stats. { cricket::VoiceMediaInfo info; - EXPECT_TRUE(channel_->RemoveRecvStream(kSsrcY)); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrcY)); EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0)); EXPECT_EQ(true, channel_->GetStats(&info, /*get_and_clear_legacy_stats=*/true)); @@ -2454,8 +2442,8 @@ TEST_P(WebRtcVoiceEngineTestFake, PlayoutWithMultipleStreams) { EXPECT_TRUE(GetRecvStream(kSsrcZ).started()); // Now remove the recv streams. - EXPECT_TRUE(channel_->RemoveRecvStream(kSsrcZ)); - EXPECT_TRUE(channel_->RemoveRecvStream(kSsrcY)); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrcZ)); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrcY)); } TEST_P(WebRtcVoiceEngineTestFake, SetAudioNetworkAdaptorViaOptions) { @@ -2545,7 +2533,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStats) { // Remove the kSsrcY stream. No receiver stats. { cricket::VoiceMediaInfo info; - EXPECT_TRUE(channel_->RemoveRecvStream(kSsrcY)); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrcY)); EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0)); EXPECT_EQ(true, channel_->GetStats(&info, /*get_and_clear_legacy_stats=*/true)); @@ -2583,8 +2571,8 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendSsrcWithMultipleStreams) { TEST_P(WebRtcVoiceEngineTestFake, SetSendSsrcAfterCreatingReceiveChannel) { EXPECT_TRUE(SetupChannel()); EXPECT_TRUE(AddRecvStream(kSsrcY)); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcX))); EXPECT_TRUE(call_.GetAudioSendStream(kSsrcX)); EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcY).rtp.local_ssrc); } @@ -2646,9 +2634,9 @@ TEST_P(WebRtcVoiceEngineTestFake, RecvWithMultipleStreams) { EXPECT_EQ(s3.received_packets(), 1); EXPECT_TRUE(s3.VerifyLastPacket(packets[3], sizeof(packets[3]))); - EXPECT_TRUE(channel_->RemoveRecvStream(ssrc3)); - EXPECT_TRUE(channel_->RemoveRecvStream(ssrc2)); - EXPECT_TRUE(channel_->RemoveRecvStream(ssrc1)); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(ssrc3)); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(ssrc2)); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(ssrc1)); } // Test that receiving on an unsignaled stream works (a stream is created). @@ -2671,7 +2659,7 @@ TEST_P(WebRtcVoiceEngineTestFake, RecvUnsignaledSsrcWithSignaledStreamId) { EXPECT_TRUE(SetupChannel()); cricket::StreamParams unsignaled_stream; unsignaled_stream.set_stream_ids({kSyncLabel}); - ASSERT_TRUE(channel_->AddRecvStream(unsignaled_stream)); + ASSERT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(unsignaled_stream)); // The stream shouldn't have been created at this point because it doesn't // have any SSRCs. EXPECT_EQ(0u, call_.GetAudioReceiveStreams().size()); @@ -2685,8 +2673,8 @@ TEST_P(WebRtcVoiceEngineTestFake, RecvUnsignaledSsrcWithSignaledStreamId) { // Remset the unsignaled stream to clear the cached parameters. If a new // default unsignaled receive stream is created it will not have a sync group. - channel_->ResetUnsignaledRecvStream(); - channel_->RemoveRecvStream(kSsrc1); + channel_->AsReceiveChannel()->ResetUnsignaledRecvStream(); + channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc1); DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame)); @@ -2715,7 +2703,7 @@ TEST_P(WebRtcVoiceEngineTestFake, ASSERT_EQ(receivers1.size(), 2u); // Should remove all default streams. - channel_->ResetUnsignaledRecvStream(); + channel_->AsReceiveChannel()->ResetUnsignaledRecvStream(); const auto& receivers2 = call_.GetAudioReceiveStreams(); EXPECT_EQ(0u, receivers2.size()); } @@ -2844,7 +2832,7 @@ TEST_P(WebRtcVoiceEngineTestFake, AddRecvStreamAfterUnsignaled_Updates) { stream_params.ssrcs.push_back(1); stream_params.set_stream_ids({new_stream_id}); - EXPECT_TRUE(channel_->AddRecvStream(stream_params)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(stream_params)); EXPECT_EQ(1u, streams.size()); // The audio receive stream should not have been recreated. EXPECT_EQ(audio_receive_stream_id, streams.front()->id()); @@ -2864,13 +2852,13 @@ TEST_P(WebRtcVoiceEngineTestFake, AddRecvStream) { TEST_P(WebRtcVoiceEngineTestFake, AddRecvStreamUnsupportedCodec) { EXPECT_TRUE(SetupSendStream()); cricket::AudioRecvParameters parameters; - parameters.codecs.push_back(kIsacCodec); + parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); EXPECT_TRUE(channel_->SetRecvParameters(parameters)); EXPECT_TRUE(AddRecvStream(kSsrcX)); EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map, (ContainerEq>( - {{0, {"PCMU", 8000, 1}}, {103, {"ISAC", 16000, 1}}}))); + {{0, {"PCMU", 8000, 1}}, {111, {"OPUS", 48000, 2}}}))); } // Test that we properly clean up any streams that were added, even if @@ -3212,10 +3200,10 @@ TEST_P(WebRtcVoiceEngineTestFake, TestSetDscpOptions) { // Various priorities map to various dscp values. parameters.encodings[0].network_priority = webrtc::Priority::kHigh; - ASSERT_TRUE(channel->SetRtpSendParameters(kSsrcZ, parameters).ok()); + ASSERT_TRUE(channel->SetRtpSendParameters(kSsrcZ, parameters, nullptr).ok()); EXPECT_EQ(rtc::DSCP_EF, network_interface.dscp()); parameters.encodings[0].network_priority = webrtc::Priority::kVeryLow; - ASSERT_TRUE(channel->SetRtpSendParameters(kSsrcZ, parameters).ok()); + ASSERT_TRUE(channel->SetRtpSendParameters(kSsrcZ, parameters, nullptr).ok()); EXPECT_EQ(rtc::DSCP_CS1, network_interface.dscp()); // Packets should also self-identify their dscp in PacketOptions. @@ -3242,7 +3230,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOutputVolume) { EXPECT_FALSE(channel_->SetOutputVolume(kSsrcY, 0.5)); cricket::StreamParams stream; stream.ssrcs.push_back(kSsrcY); - EXPECT_TRUE(channel_->AddRecvStream(stream)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(stream)); EXPECT_DOUBLE_EQ(1, GetRecvStream(kSsrcY).gain()); EXPECT_TRUE(channel_->SetOutputVolume(kSsrcY, 3)); EXPECT_DOUBLE_EQ(3, GetRecvStream(kSsrcY).gain()); @@ -3284,14 +3272,18 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOutputVolumeUnsignaledRecvStream) { TEST_P(WebRtcVoiceEngineTestFake, BaseMinimumPlayoutDelayMs) { EXPECT_TRUE(SetupChannel()); - EXPECT_FALSE(channel_->SetBaseMinimumPlayoutDelayMs(kSsrcY, 200)); - EXPECT_FALSE(channel_->GetBaseMinimumPlayoutDelayMs(kSsrcY).has_value()); + EXPECT_FALSE( + channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs(kSsrcY, 200)); + EXPECT_FALSE(channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrcY) + .has_value()); cricket::StreamParams stream; stream.ssrcs.push_back(kSsrcY); - EXPECT_TRUE(channel_->AddRecvStream(stream)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(stream)); EXPECT_EQ(0, GetRecvStream(kSsrcY).base_mininum_playout_delay_ms()); - EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(kSsrcY, 300)); + EXPECT_TRUE( + channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs(kSsrcY, 300)); EXPECT_EQ(300, GetRecvStream(kSsrcY).base_mininum_playout_delay_ms()); } @@ -3302,43 +3294,70 @@ TEST_P(WebRtcVoiceEngineTestFake, // Spawn an unsignaled stream by sending a packet - delay should be 0. DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame)); - EXPECT_EQ(0, channel_->GetBaseMinimumPlayoutDelayMs(kSsrc1).value_or(-1)); + EXPECT_EQ(0, channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrc1) + .value_or(-1)); // Check that it doesn't provide default values for unknown ssrc. - EXPECT_FALSE(channel_->GetBaseMinimumPlayoutDelayMs(kSsrcY).has_value()); + EXPECT_FALSE(channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrcY) + .has_value()); // Check that default value for unsignaled streams is 0. - EXPECT_EQ(0, channel_->GetBaseMinimumPlayoutDelayMs(kSsrc0).value_or(-1)); + EXPECT_EQ(0, channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrc0) + .value_or(-1)); // Should remember the delay 100 which will be set on new unsignaled streams, // and also set the delay to 100 on existing unsignaled streams. - EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(kSsrc0, 100)); - EXPECT_EQ(100, channel_->GetBaseMinimumPlayoutDelayMs(kSsrc0).value_or(-1)); + EXPECT_TRUE( + channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs(kSsrc0, 100)); + EXPECT_EQ(100, channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrc0) + .value_or(-1)); // Check that it doesn't provide default values for unknown ssrc. - EXPECT_FALSE(channel_->GetBaseMinimumPlayoutDelayMs(kSsrcY).has_value()); + EXPECT_FALSE(channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrcY) + .has_value()); // Spawn an unsignaled stream by sending a packet - delay should be 100. unsigned char pcmuFrame2[sizeof(kPcmuFrame)]; memcpy(pcmuFrame2, kPcmuFrame, sizeof(kPcmuFrame)); rtc::SetBE32(&pcmuFrame2[8], kSsrcX); DeliverPacket(pcmuFrame2, sizeof(pcmuFrame2)); - EXPECT_EQ(100, channel_->GetBaseMinimumPlayoutDelayMs(kSsrcX).value_or(-1)); + EXPECT_EQ(100, channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrcX) + .value_or(-1)); // Setting delay with SSRC=0 should affect all unsignaled streams. - EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(kSsrc0, 300)); + EXPECT_TRUE( + channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs(kSsrc0, 300)); if (kMaxUnsignaledRecvStreams > 1) { - EXPECT_EQ(300, channel_->GetBaseMinimumPlayoutDelayMs(kSsrc1).value_or(-1)); + EXPECT_EQ(300, channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrc1) + .value_or(-1)); } - EXPECT_EQ(300, channel_->GetBaseMinimumPlayoutDelayMs(kSsrcX).value_or(-1)); + EXPECT_EQ(300, channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrcX) + .value_or(-1)); // Setting delay on an individual stream affects only that. - EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(kSsrcX, 400)); + EXPECT_TRUE( + channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs(kSsrcX, 400)); if (kMaxUnsignaledRecvStreams > 1) { - EXPECT_EQ(300, channel_->GetBaseMinimumPlayoutDelayMs(kSsrc1).value_or(-1)); + EXPECT_EQ(300, channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrc1) + .value_or(-1)); } - EXPECT_EQ(400, channel_->GetBaseMinimumPlayoutDelayMs(kSsrcX).value_or(-1)); - EXPECT_EQ(300, channel_->GetBaseMinimumPlayoutDelayMs(kSsrc0).value_or(-1)); + EXPECT_EQ(400, channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrcX) + .value_or(-1)); + EXPECT_EQ(300, channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrc0) + .value_or(-1)); // Check that it doesn't provide default values for unknown ssrc. - EXPECT_FALSE(channel_->GetBaseMinimumPlayoutDelayMs(kSsrcY).has_value()); + EXPECT_FALSE(channel_->AsReceiveChannel() + ->GetBaseMinimumPlayoutDelayMs(kSsrcY) + .has_value()); } TEST_P(WebRtcVoiceEngineTestFake, SetsSyncGroupFromStreamId) { @@ -3350,9 +3369,9 @@ TEST_P(WebRtcVoiceEngineTestFake, SetsSyncGroupFromStreamId) { sp.set_stream_ids({kStreamId}); // Creating two channels to make sure that sync label is set properly for both // the default voice channel and following ones. - EXPECT_TRUE(channel_->AddRecvStream(sp)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(sp)); sp.ssrcs[0] += 1; - EXPECT_TRUE(channel_->AddRecvStream(sp)); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(sp)); ASSERT_EQ(2u, call_.GetAudioReceiveStreams().size()); EXPECT_EQ(kStreamId, @@ -3375,8 +3394,8 @@ TEST_P(WebRtcVoiceEngineTestFake, ConfiguresAudioReceiveStreamRtpExtensions) { EXPECT_TRUE(SetupSendStream()); SetSendParameters(send_parameters_); for (uint32_t ssrc : ssrcs) { - EXPECT_TRUE( - channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(ssrc))); + EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream( + cricket::StreamParams::CreateLegacy(ssrc))); } EXPECT_EQ(2u, call_.GetAudioReceiveStreams().size()); @@ -3437,7 +3456,8 @@ TEST_P(WebRtcVoiceEngineTestFake, DeliverAudioPacket_Call) { const cricket::FakeAudioReceiveStream* s = call_.GetAudioReceiveStream(kAudioSsrc); EXPECT_EQ(0, s->received_packets()); - channel_->OnPacketReceived(kPcmuPacket, /* packet_time_us */ -1); + channel_->AsReceiveChannel()->OnPacketReceived(kPcmuPacket, + /* packet_time_us */ -1); rtc::Thread::Current()->ProcessMessages(0); EXPECT_EQ(1, s->received_packets()); @@ -3449,8 +3469,8 @@ TEST_P(WebRtcVoiceEngineTestFake, AssociateFirstSendChannel_SendCreatedFirst) { EXPECT_TRUE(SetupSendStream()); EXPECT_TRUE(AddRecvStream(kSsrcY)); EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcY).rtp.local_ssrc); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcZ))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcZ))); EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcY).rtp.local_ssrc); EXPECT_TRUE(AddRecvStream(kSsrcW)); EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcW).rtp.local_ssrc); @@ -3459,13 +3479,13 @@ TEST_P(WebRtcVoiceEngineTestFake, AssociateFirstSendChannel_SendCreatedFirst) { TEST_P(WebRtcVoiceEngineTestFake, AssociateFirstSendChannel_RecvCreatedFirst) { EXPECT_TRUE(SetupRecvStream()); EXPECT_EQ(0xFA17FA17u, GetRecvStreamConfig(kSsrcX).rtp.local_ssrc); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcY))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcY))); EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcX).rtp.local_ssrc); EXPECT_TRUE(AddRecvStream(kSsrcZ)); EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcZ).rtp.local_ssrc); - EXPECT_TRUE( - channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcW))); + EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrcW))); EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcX).rtp.local_ssrc); EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcZ).rtp.local_ssrc); } @@ -3513,7 +3533,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRawAudioSinkUnsignaledRecvStream) { EXPECT_NE(nullptr, GetRecvStream(kSsrc1).sink()); // If we remove and add a default stream, it should get the same sink. - EXPECT_TRUE(channel_->RemoveRecvStream(kSsrc1)); + EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc1)); DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame)); EXPECT_NE(nullptr, GetRecvStream(kSsrc1).sink()); @@ -3563,13 +3583,13 @@ TEST_P(WebRtcVoiceEngineTestFake, OnReadyToSendSignalsNetworkState) { EXPECT_EQ(webrtc::kNetworkUp, call_.GetNetworkState(webrtc::MediaType::VIDEO)); - channel_->OnReadyToSend(false); + channel_->AsSendChannel()->OnReadyToSend(false); EXPECT_EQ(webrtc::kNetworkDown, call_.GetNetworkState(webrtc::MediaType::AUDIO)); EXPECT_EQ(webrtc::kNetworkUp, call_.GetNetworkState(webrtc::MediaType::VIDEO)); - channel_->OnReadyToSend(true); + channel_->AsSendChannel()->OnReadyToSend(true); EXPECT_EQ(webrtc::kNetworkUp, call_.GetNetworkState(webrtc::MediaType::AUDIO)); EXPECT_EQ(webrtc::kNetworkUp, @@ -3671,7 +3691,7 @@ TEST(WebRtcVoiceEngineTest, StartupShutdownWithExternalADM) { } } -// Verify the payload id of common audio codecs, including CN, ISAC, and G722. +// Verify the payload id of common audio codecs, including CN and G722. TEST(WebRtcVoiceEngineTest, HasCorrectPayloadTypeMapping) { for (bool use_null_apm : {false, true}) { std::unique_ptr task_queue_factory = @@ -3698,10 +3718,6 @@ TEST(WebRtcVoiceEngineTest, HasCorrectPayloadTypeMapping) { EXPECT_EQ(105, codec.id); } else if (is_codec("CN", 32000)) { EXPECT_EQ(106, codec.id); - } else if (is_codec("ISAC", 16000)) { - EXPECT_EQ(103, codec.id); - } else if (is_codec("ISAC", 32000)) { - EXPECT_EQ(104, codec.id); } else if (is_codec("G722", 8000)) { EXPECT_EQ(9, codec.id); } else if (is_codec("telephone-event", 8000)) { @@ -3807,6 +3823,57 @@ TEST(WebRtcVoiceEngineTest, SetRecvCodecs) { } } +TEST(WebRtcVoiceEngineTest, SetRtpSendParametersMaxBitrate) { + rtc::AutoThread main_thread; + std::unique_ptr task_queue_factory = + webrtc::CreateDefaultTaskQueueFactory(); + rtc::scoped_refptr adm = + webrtc::test::MockAudioDeviceModule::CreateNice(); + webrtc::FieldTrialBasedConfig field_trials; + FakeAudioSource source; + cricket::WebRtcVoiceEngine engine(task_queue_factory.get(), adm.get(), + webrtc::CreateBuiltinAudioEncoderFactory(), + webrtc::CreateBuiltinAudioDecoderFactory(), + nullptr, nullptr, nullptr, field_trials); + engine.Init(); + webrtc::RtcEventLogNull event_log; + webrtc::Call::Config call_config(&event_log); + call_config.trials = &field_trials; + call_config.task_queue_factory = task_queue_factory.get(); + { + webrtc::AudioState::Config config; + config.audio_mixer = webrtc::AudioMixerImpl::Create(); + config.audio_device_module = + webrtc::test::MockAudioDeviceModule::CreateNice(); + call_config.audio_state = webrtc::AudioState::Create(config); + } + auto call = absl::WrapUnique(webrtc::Call::Create(call_config)); + cricket::WebRtcVoiceMediaChannel channel(&engine, cricket::MediaConfig(), + cricket::AudioOptions(), + webrtc::CryptoOptions(), call.get()); + { + cricket::AudioSendParameters params; + params.codecs.push_back(cricket::AudioCodec(1, "opus", 48000, 32000, 2)); + params.extensions.push_back(webrtc::RtpExtension( + webrtc::RtpExtension::kTransportSequenceNumberUri, 1)); + EXPECT_TRUE(channel.SetSendParameters(params)); + } + constexpr int kSsrc = 1234; + { + cricket::StreamParams params; + params.add_ssrc(kSsrc); + channel.AddSendStream(params); + } + channel.SetAudioSend(kSsrc, true, nullptr, &source); + channel.SetSend(true); + webrtc::RtpParameters params = channel.GetRtpSendParameters(kSsrc); + for (int max_bitrate : {-10, -1, 0, 10000}) { + params.encodings[0].max_bitrate_bps = max_bitrate; + channel.SetRtpSendParameters( + kSsrc, params, [](webrtc::RTCError error) { EXPECT_TRUE(error.ok()); }); + } +} + TEST(WebRtcVoiceEngineTest, CollectRecvCodecs) { for (bool use_null_apm : {false, true}) { std::vector specs; diff --git a/modules/audio_coding/BUILD.gn b/modules/audio_coding/BUILD.gn index 0d67cfa3a6..65c849d7eb 100644 --- a/modules/audio_coding/BUILD.gn +++ b/modules/audio_coding/BUILD.gn @@ -381,50 +381,8 @@ rtc_library("ilbc_c") { absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ] } -rtc_source_set("isac_common") { - poisonous = [ "audio_codecs" ] - sources = [ - "codecs/isac/audio_decoder_isac_t.h", - "codecs/isac/audio_decoder_isac_t_impl.h", - "codecs/isac/audio_encoder_isac_t.h", - "codecs/isac/audio_encoder_isac_t_impl.h", - ] - deps = [ - ":isac_bwinfo", - "../../api:scoped_refptr", - "../../api/audio_codecs:audio_codecs_api", - "../../api/units:time_delta", - "../../rtc_base:checks", - "../../rtc_base:safe_minmax", - "../../system_wrappers:field_trial", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] -} - -rtc_library("isac") { - visibility += [ "*" ] - poisonous = [ "audio_codecs" ] - sources = [ - "codecs/isac/main/include/audio_decoder_isac.h", - "codecs/isac/main/include/audio_encoder_isac.h", - "codecs/isac/main/source/audio_decoder_isac.cc", - "codecs/isac/main/source/audio_encoder_isac.cc", - ] - - deps = [ - ":isac_common", - "../../api/audio_codecs:audio_codecs_api", - ] - public_deps = [ ":isac_c" ] # no-presubmit-check TODO(webrtc:8603) -} - -rtc_source_set("isac_bwinfo") { - sources = [ "codecs/isac/bandwidth_info.h" ] - deps = [] -} - rtc_library("isac_vad") { - visibility += webrtc_default_visibility + visibility += [ "../audio_processing/vad:*" ] sources = [ "codecs/isac/main/source/filter_functions.c", "codecs/isac/main/source/filter_functions.h", @@ -447,247 +405,9 @@ rtc_library("isac_vad") { ] } -rtc_library("isac_c") { - poisonous = [ "audio_codecs" ] - sources = [ - "codecs/isac/main/include/isac.h", - "codecs/isac/main/source/arith_routines.c", - "codecs/isac/main/source/arith_routines.h", - "codecs/isac/main/source/arith_routines_hist.c", - "codecs/isac/main/source/arith_routines_logist.c", - "codecs/isac/main/source/bandwidth_estimator.c", - "codecs/isac/main/source/bandwidth_estimator.h", - "codecs/isac/main/source/codec.h", - "codecs/isac/main/source/crc.c", - "codecs/isac/main/source/crc.h", - "codecs/isac/main/source/decode.c", - "codecs/isac/main/source/decode_bwe.c", - "codecs/isac/main/source/encode.c", - "codecs/isac/main/source/encode_lpc_swb.c", - "codecs/isac/main/source/encode_lpc_swb.h", - "codecs/isac/main/source/entropy_coding.c", - "codecs/isac/main/source/entropy_coding.h", - "codecs/isac/main/source/filterbanks.c", - "codecs/isac/main/source/intialize.c", - "codecs/isac/main/source/isac.c", - "codecs/isac/main/source/isac_float_type.h", - "codecs/isac/main/source/lattice.c", - "codecs/isac/main/source/lpc_analysis.c", - "codecs/isac/main/source/lpc_analysis.h", - "codecs/isac/main/source/lpc_gain_swb_tables.c", - "codecs/isac/main/source/lpc_gain_swb_tables.h", - "codecs/isac/main/source/lpc_shape_swb12_tables.c", - "codecs/isac/main/source/lpc_shape_swb12_tables.h", - "codecs/isac/main/source/lpc_shape_swb16_tables.c", - "codecs/isac/main/source/lpc_shape_swb16_tables.h", - "codecs/isac/main/source/lpc_tables.c", - "codecs/isac/main/source/lpc_tables.h", - "codecs/isac/main/source/pitch_gain_tables.c", - "codecs/isac/main/source/pitch_gain_tables.h", - "codecs/isac/main/source/pitch_lag_tables.c", - "codecs/isac/main/source/pitch_lag_tables.h", - "codecs/isac/main/source/spectrum_ar_model_tables.c", - "codecs/isac/main/source/spectrum_ar_model_tables.h", - "codecs/isac/main/source/transform.c", - ] - - if (is_linux || is_chromeos) { - libs = [ "m" ] - } - - deps = [ - ":isac_bwinfo", - ":isac_vad", - "../../common_audio", - "../../common_audio:common_audio_c", - "../../rtc_base:checks", - "../../rtc_base:compile_assert_c", - "../../rtc_base/system:arch", - "../third_party/fft", - ] -} - -rtc_library("isac_fix") { - visibility += [ "*" ] - poisonous = [ "audio_codecs" ] - sources = [ - "codecs/isac/fix/source/audio_decoder_isacfix.cc", - "codecs/isac/fix/source/audio_encoder_isacfix.cc", - ] - - deps = [ - ":isac_common", - "../../api/audio_codecs:audio_codecs_api", - "../../common_audio", - "../../system_wrappers", - ] - public_deps = [ ":isac_fix_c" ] # no-presubmit-check TODO(webrtc:8603) - - if (rtc_build_with_neon) { - deps += [ ":isac_neon" ] - } -} - -rtc_library("isac_fix_common") { - poisonous = [ "audio_codecs" ] - sources = [ - "codecs/isac/fix/source/codec.h", - "codecs/isac/fix/source/entropy_coding.h", - "codecs/isac/fix/source/fft.c", - "codecs/isac/fix/source/fft.h", - "codecs/isac/fix/source/filterbank_internal.h", - "codecs/isac/fix/source/settings.h", - "codecs/isac/fix/source/structs.h", - "codecs/isac/fix/source/transform_tables.c", - ] - deps = [ - ":isac_bwinfo", - "../../common_audio", - "../../common_audio:common_audio_c", - ] -} - -rtc_source_set("isac_fix_c_arm_asm") { - poisonous = [ "audio_codecs" ] - sources = [] - if (current_cpu == "arm" && arm_version >= 7) { - sources += [ - "codecs/isac/fix/source/lattice_armv7.S", - "codecs/isac/fix/source/pitch_filter_armv6.S", - ] - deps = [ - ":isac_fix_common", - "../../rtc_base/system:asm_defines", - ] - } -} - -rtc_library("isac_fix_c") { - poisonous = [ "audio_codecs" ] - sources = [ - "codecs/isac/fix/include/audio_decoder_isacfix.h", - "codecs/isac/fix/include/audio_encoder_isacfix.h", - "codecs/isac/fix/include/isacfix.h", - "codecs/isac/fix/source/arith_routines.c", - "codecs/isac/fix/source/arith_routines_hist.c", - "codecs/isac/fix/source/arith_routines_logist.c", - "codecs/isac/fix/source/arith_routins.h", - "codecs/isac/fix/source/bandwidth_estimator.c", - "codecs/isac/fix/source/bandwidth_estimator.h", - "codecs/isac/fix/source/decode.c", - "codecs/isac/fix/source/decode_bwe.c", - "codecs/isac/fix/source/decode_plc.c", - "codecs/isac/fix/source/encode.c", - "codecs/isac/fix/source/entropy_coding.c", - "codecs/isac/fix/source/filterbank_tables.c", - "codecs/isac/fix/source/filterbank_tables.h", - "codecs/isac/fix/source/filterbanks.c", - "codecs/isac/fix/source/filters.c", - "codecs/isac/fix/source/initialize.c", - "codecs/isac/fix/source/isac_fix_type.h", - "codecs/isac/fix/source/isacfix.c", - "codecs/isac/fix/source/lattice.c", - "codecs/isac/fix/source/lattice_c.c", - "codecs/isac/fix/source/lpc_masking_model.c", - "codecs/isac/fix/source/lpc_masking_model.h", - "codecs/isac/fix/source/lpc_tables.c", - "codecs/isac/fix/source/lpc_tables.h", - "codecs/isac/fix/source/pitch_estimator.c", - "codecs/isac/fix/source/pitch_estimator.h", - "codecs/isac/fix/source/pitch_estimator_c.c", - "codecs/isac/fix/source/pitch_filter.c", - "codecs/isac/fix/source/pitch_filter_c.c", - "codecs/isac/fix/source/pitch_gain_tables.c", - "codecs/isac/fix/source/pitch_gain_tables.h", - "codecs/isac/fix/source/pitch_lag_tables.c", - "codecs/isac/fix/source/pitch_lag_tables.h", - "codecs/isac/fix/source/spectrum_ar_model_tables.c", - "codecs/isac/fix/source/spectrum_ar_model_tables.h", - "codecs/isac/fix/source/transform.c", - ] - - deps = [ - ":isac_bwinfo", - ":isac_common", - ":isac_fix_common", - "../../api/audio_codecs:audio_codecs_api", - "../../common_audio", - "../../common_audio:common_audio_c", - "../../rtc_base:checks", - "../../rtc_base:compile_assert_c", - "../../rtc_base:sanitizer", - "../../system_wrappers", - "../third_party/fft", - ] - - if (rtc_build_with_neon) { - deps += [ ":isac_neon" ] - - # TODO(bugs.webrtc.org/9579): Consider moving the usage of NEON from - # pitch_estimator_c.c into the "isac_neon" target and delete this flag: - if (current_cpu != "arm64") { - suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] - cflags = [ "-mfpu=neon" ] - } - } - - if (current_cpu == "arm" && arm_version >= 7) { - sources -= [ - "codecs/isac/fix/source/lattice_c.c", - "codecs/isac/fix/source/pitch_filter_c.c", - ] - deps += [ ":isac_fix_c_arm_asm" ] - } - - if (current_cpu == "mipsel") { - sources += [ - "codecs/isac/fix/source/entropy_coding_mips.c", - "codecs/isac/fix/source/filters_mips.c", - "codecs/isac/fix/source/lattice_mips.c", - "codecs/isac/fix/source/pitch_estimator_mips.c", - "codecs/isac/fix/source/transform_mips.c", - ] - sources -= [ - "codecs/isac/fix/source/lattice_c.c", - "codecs/isac/fix/source/pitch_estimator_c.c", - ] - if (mips_dsp_rev > 0) { - sources += [ "codecs/isac/fix/source/filterbanks_mips.c" ] - } - if (mips_dsp_rev > 1) { - sources += [ - "codecs/isac/fix/source/lpc_masking_model_mips.c", - "codecs/isac/fix/source/pitch_filter_mips.c", - ] - sources -= [ "codecs/isac/fix/source/pitch_filter_c.c" ] - } - } -} - -if (rtc_build_with_neon) { - rtc_library("isac_neon") { - poisonous = [ "audio_codecs" ] - sources = [ - "codecs/isac/fix/source/entropy_coding_neon.c", - "codecs/isac/fix/source/filterbanks_neon.c", - "codecs/isac/fix/source/filters_neon.c", - "codecs/isac/fix/source/lattice_neon.c", - "codecs/isac/fix/source/transform_neon.c", - ] - - if (current_cpu != "arm64") { - # Enable compilation for the NEON instruction set. - suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] - cflags = [ "-mfpu=neon" ] - } - - deps = [ - ":isac_fix_common", - "../../common_audio", - "../../common_audio:common_audio_c", - "../../rtc_base:checks", - ] - } +rtc_source_set("isac_bwinfo") { + sources = [ "codecs/isac/bandwidth_info.h" ] + deps = [] } rtc_library("pcm16b") { @@ -1291,11 +1011,6 @@ if (rtc_include_tests) { if (rtc_include_opus) { audio_coding_deps += [ ":webrtc_opus" ] } - if (current_cpu == "arm") { - audio_coding_deps += [ ":isac_fix" ] - } else { - audio_coding_deps += [ ":isac" ] - } if (!build_with_mozilla && !build_with_chromium) { audio_coding_deps += [ ":red" ] } @@ -1327,11 +1042,7 @@ if (rtc_include_tests) { ":g711_test", ":g722_test", ":ilbc_test", - ":isac_api_test", - ":isac_switch_samprate_test", - ":isac_test", ":neteq_ilbc_quality_test", - ":neteq_isac_quality_test", ":neteq_opus_quality_test", ":neteq_pcm16b_quality_test", ":neteq_pcmu_quality_test", @@ -1371,8 +1082,6 @@ if (rtc_include_tests) { "test/Tester.cc", "test/TwoWayCommunication.cc", "test/TwoWayCommunication.h", - "test/iSACTest.cc", - "test/iSACTest.h", "test/target_delay_unittest.cc", ] deps = [ @@ -1397,8 +1106,6 @@ if (rtc_include_tests) { "../../api/audio_codecs/g722:audio_encoder_g722", "../../api/audio_codecs/ilbc:audio_decoder_ilbc", "../../api/audio_codecs/ilbc:audio_encoder_ilbc", - "../../api/audio_codecs/isac:audio_decoder_isac_float", - "../../api/audio_codecs/isac:audio_encoder_isac_float", "../../api/audio_codecs/opus:audio_decoder_opus", "../../api/audio_codecs/opus:audio_encoder_opus", "../../common_audio", @@ -1513,8 +1220,6 @@ if (rtc_include_tests) { deps = [ ":ilbc", - ":isac", - ":isac_fix", ":neteq", ":neteq_input_audio_tools", ":neteq_tools", @@ -1619,12 +1324,10 @@ if (rtc_include_tests) { testonly = true defines = [] deps = [ - ":isac_fix_common", "../../rtc_base:macromagic", "../../test:fileutils", ] sources = [ - "codecs/isac/fix/test/isac_speed_test.cc", "codecs/opus/opus_speed_test.cc", "codecs/tools/audio_codec_speed_test.cc", "codecs/tools/audio_codec_speed_test.h", @@ -1647,7 +1350,6 @@ if (rtc_include_tests) { } deps += [ - ":isac_fix", ":webrtc_opus", "../../rtc_base:checks", "../../test:test_main", @@ -1723,7 +1425,6 @@ if (rtc_include_tests) { "../../api/audio_codecs/g711:audio_encoder_g711", "../../api/audio_codecs/g722:audio_encoder_g722", "../../api/audio_codecs/ilbc:audio_encoder_ilbc", - "../../api/audio_codecs/isac:audio_encoder_isac", "../../api/audio_codecs/opus:audio_encoder_opus", "../../rtc_base:safe_conversions", "//third_party/abseil-cpp/absl/flags:flag", @@ -1832,21 +1533,6 @@ if (rtc_include_tests) { ] } - rtc_executable("neteq_isac_quality_test") { - testonly = true - - sources = [ "neteq/test/neteq_isac_quality_test.cc" ] - - deps = [ - ":isac_fix", - ":neteq", - ":neteq_quality_test_support", - "../../test:test_main", - "//testing/gtest", - "//third_party/abseil-cpp/absl/flags:flag", - ] - } - rtc_executable("neteq_pcmu_quality_test") { testonly = true @@ -1884,28 +1570,6 @@ if (rtc_include_tests) { } } - rtc_library("isac_test_util") { - testonly = true - sources = [ - "codecs/isac/main/util/utility.c", - "codecs/isac/main/util/utility.h", - ] - } - - if (!build_with_chromium) { - rtc_executable("isac_test") { - testonly = true - - sources = [ "codecs/isac/main/test/simpleKenny.c" ] - - deps = [ - ":isac", - ":isac_test_util", - "../../rtc_base:macromagic", - ] - } - } - rtc_executable("g711_test") { testonly = true @@ -1923,32 +1587,6 @@ if (rtc_include_tests) { } if (!build_with_chromium) { - rtc_executable("isac_api_test") { - testonly = true - - sources = [ "codecs/isac/main/test/ReleaseTest-API/ReleaseTest-API.cc" ] - - deps = [ - ":isac", - ":isac_test_util", - "../../rtc_base:macromagic", - ] - } - - rtc_executable("isac_switch_samprate_test") { - testonly = true - - sources = - [ "codecs/isac/main/test/SwitchingSampRate/SwitchingSampRate.cc" ] - - deps = [ - ":isac", - ":isac_test_util", - "../../common_audio", - "../../common_audio:common_audio_c", - ] - } - rtc_executable("ilbc_test") { testonly = true @@ -1997,13 +1635,6 @@ if (rtc_include_tests) { "codecs/cng/audio_encoder_cng_unittest.cc", "codecs/cng/cng_unittest.cc", "codecs/ilbc/ilbc_unittest.cc", - "codecs/isac/fix/source/filterbanks_unittest.cc", - "codecs/isac/fix/source/filters_unittest.cc", - "codecs/isac/fix/source/lpc_masking_model_unittest.cc", - "codecs/isac/fix/source/transform_unittest.cc", - "codecs/isac/isac_webrtc_api_test.cc", - "codecs/isac/main/source/audio_encoder_isac_unittest.cc", - "codecs/isac/main/source/isac_unittest.cc", "codecs/legacy_encoded_audio_frame_unittest.cc", "codecs/opus/audio_decoder_multi_channel_opus_unittest.cc", "codecs/opus/audio_encoder_multi_channel_opus_unittest.cc", @@ -2070,11 +1701,6 @@ if (rtc_include_tests) { ":default_neteq_factory", ":g711", ":ilbc", - ":isac", - ":isac_c", - ":isac_common", - ":isac_fix", - ":isac_fix_common", ":legacy_encoded_audio_frame", ":mocks", ":neteq", @@ -2094,10 +1720,6 @@ if (rtc_include_tests) { "../../api/audio_codecs:audio_codecs_api", "../../api/audio_codecs:builtin_audio_decoder_factory", "../../api/audio_codecs:builtin_audio_encoder_factory", - "../../api/audio_codecs/isac:audio_decoder_isac_fix", - "../../api/audio_codecs/isac:audio_decoder_isac_float", - "../../api/audio_codecs/isac:audio_encoder_isac_fix", - "../../api/audio_codecs/isac:audio_encoder_isac_float", "../../api/audio_codecs/opus:audio_decoder_multiopus", "../../api/audio_codecs/opus:audio_decoder_opus", "../../api/audio_codecs/opus:audio_encoder_multiopus", diff --git a/modules/audio_coding/acm2/acm_receiver_unittest.cc b/modules/audio_coding/acm2/acm_receiver_unittest.cc index e73acc2338..6dd44b696e 100644 --- a/modules/audio_coding/acm2/acm_receiver_unittest.cc +++ b/modules/audio_coding/acm2/acm_receiver_unittest.cc @@ -13,6 +13,7 @@ #include // std::min #include +#include "absl/types/optional.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "modules/audio_coding/codecs/cng/audio_encoder_cng.h" @@ -64,12 +65,14 @@ class AcmReceiverTestOldApi : public AudioPacketizationCallback, const SdpAudioFormat& format, const std::map cng_payload_types = {}) { // Create the speech encoder. - AudioCodecInfo info = encoder_factory_->QueryAudioEncoder(format).value(); + absl::optional info = + encoder_factory_->QueryAudioEncoder(format); + RTC_CHECK(info.has_value()); std::unique_ptr enc = encoder_factory_->MakeAudioEncoder(payload_type, format, absl::nullopt); // If we have a compatible CN specification, stack a CNG on top. - auto it = cng_payload_types.find(info.sample_rate_hz); + auto it = cng_payload_types.find(info->sample_rate_hz); if (it != cng_payload_types.end()) { AudioEncoderCngConfig config; config.speech_encoder = std::move(enc); @@ -81,7 +84,7 @@ class AcmReceiverTestOldApi : public AudioPacketizationCallback, // Actually start using the new encoder. acm_->SetEncoder(std::move(enc)); - return info; + return *info; } int InsertOnePacketOfSilence(const AudioCodecInfo& info) { @@ -148,8 +151,7 @@ class AcmReceiverTestOldApi : public AudioPacketizationCallback, #define MAYBE_SampleRate SampleRate #endif TEST_F(AcmReceiverTestOldApi, MAYBE_SampleRate) { - const std::map codecs = {{0, {"ISAC", 16000, 1}}, - {1, {"ISAC", 32000, 1}}}; + const std::map codecs = {{0, {"OPUS", 48000, 2}}}; receiver_->SetCodecs(codecs); constexpr int kOutSampleRateHz = 8000; // Different than codec sample rate. @@ -232,15 +234,6 @@ TEST_F(AcmReceiverTestFaxModeOldApi, MAYBE_VerifyAudioFramePCMU) { RunVerifyAudioFrame({"PCMU", 8000, 1}); } -#if defined(WEBRTC_ANDROID) -#define MAYBE_VerifyAudioFrameISAC DISABLED_VerifyAudioFrameISAC -#else -#define MAYBE_VerifyAudioFrameISAC VerifyAudioFrameISAC -#endif -TEST_F(AcmReceiverTestFaxModeOldApi, MAYBE_VerifyAudioFrameISAC) { - RunVerifyAudioFrame({"ISAC", 16000, 1}); -} - #if defined(WEBRTC_ANDROID) #define MAYBE_VerifyAudioFrameOpus DISABLED_VerifyAudioFrameOpus #else @@ -310,12 +303,10 @@ TEST_F(AcmReceiverTestPostDecodeVadPassiveOldApi, MAYBE_PostdecodingVad) { #else #define MAYBE_LastAudioCodec LastAudioCodec #endif -#if defined(WEBRTC_CODEC_ISAC) +#if defined(WEBRTC_CODEC_OPUS) TEST_F(AcmReceiverTestOldApi, MAYBE_LastAudioCodec) { - const std::map codecs = {{0, {"ISAC", 16000, 1}}, - {1, {"PCMA", 8000, 1}}, - {2, {"ISAC", 32000, 1}}, - {3, {"L16", 32000, 1}}}; + const std::map codecs = { + {0, {"PCMU", 8000, 1}}, {1, {"PCMA", 8000, 1}}, {2, {"L16", 32000, 1}}}; const std::map cng_payload_types = { {8000, 100}, {16000, 101}, {32000, 102}}; { diff --git a/modules/audio_coding/acm2/audio_coding_module_unittest.cc b/modules/audio_coding/acm2/audio_coding_module_unittest.cc index 6d3ebbfd8d..f1eb81c015 100644 --- a/modules/audio_coding/acm2/audio_coding_module_unittest.cc +++ b/modules/audio_coding/acm2/audio_coding_module_unittest.cc @@ -30,7 +30,6 @@ #include "modules/audio_coding/codecs/cng/audio_encoder_cng.h" #include "modules/audio_coding/codecs/g711/audio_decoder_pcm.h" #include "modules/audio_coding/codecs/g711/audio_encoder_pcm.h" -#include "modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h" #include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "modules/audio_coding/neteq/tools/audio_checksum.h" #include "modules/audio_coding/neteq/tools/audio_loop.h" @@ -302,44 +301,6 @@ TEST_F(AudioCodingModuleTestOldApi, TransportCallbackIsInvokedForEachPacket) { EXPECT_EQ(AudioFrameType::kAudioFrameSpeech, packet_cb_.last_frame_type()); } -#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX) -// Verifies that the RTP timestamp series is not reset when the codec is -// changed. -TEST_F(AudioCodingModuleTestOldApi, TimestampSeriesContinuesWhenCodecChanges) { - RegisterCodec(); // This registers the default codec. - uint32_t expected_ts = input_frame_.timestamp_; - int blocks_per_packet = pac_size_ / (kSampleRateHz / 100); - // Encode 5 packets of the first codec type. - const int kNumPackets1 = 5; - for (int j = 0; j < kNumPackets1; ++j) { - for (int i = 0; i < blocks_per_packet; ++i) { - EXPECT_EQ(j, packet_cb_.num_calls()); - InsertAudio(); - } - EXPECT_EQ(j + 1, packet_cb_.num_calls()); - EXPECT_EQ(expected_ts, packet_cb_.last_timestamp()); - expected_ts += pac_size_; - } - - // Change codec. - audio_format_ = SdpAudioFormat("ISAC", kSampleRateHz, 1); - pac_size_ = 480; - RegisterCodec(); - blocks_per_packet = pac_size_ / (kSampleRateHz / 100); - // Encode another 5 packets. - const int kNumPackets2 = 5; - for (int j = 0; j < kNumPackets2; ++j) { - for (int i = 0; i < blocks_per_packet; ++i) { - EXPECT_EQ(kNumPackets1 + j, packet_cb_.num_calls()); - InsertAudio(); - } - EXPECT_EQ(kNumPackets1 + j + 1, packet_cb_.num_calls()); - EXPECT_EQ(expected_ts, packet_cb_.last_timestamp()); - expected_ts += pac_size_; - } -} -#endif - // Introduce this class to set different expectations on the number of encoded // bytes. This class expects all encoded packets to be 9 bytes (matching one // CNG SID frame) or 0 bytes. This test depends on `input_frame_` containing @@ -420,8 +381,7 @@ TEST_F(AudioCodingModuleTestWithComfortNoiseOldApi, DoTest(k10MsBlocksPerPacket, kCngPayloadType); } -// A multi-threaded test for ACM. This base class is using the PCM16b 16 kHz -// codec, while the derive class AcmIsacMtTest is using iSAC. +// A multi-threaded test for ACM that uses the PCM16b 16 kHz codec. class AudioCodingModuleMtTestOldApi : public AudioCodingModuleTestOldApi { protected: static const int kNumPackets = 500; @@ -560,272 +520,6 @@ TEST_F(AudioCodingModuleMtTestOldApi, MAYBE_DoTest) { EXPECT_TRUE(RunTest()); } -// This is a multi-threaded ACM test using iSAC. The test encodes audio -// from a PCM file. The most recent encoded frame is used as input to the -// receiving part. Depending on timing, it may happen that the same RTP packet -// is inserted into the receiver multiple times, but this is a valid use-case, -// and simplifies the test code a lot. -class AcmIsacMtTestOldApi : public AudioCodingModuleMtTestOldApi { - protected: - static const int kNumPackets = 500; - static const int kNumPullCalls = 500; - - AcmIsacMtTestOldApi() - : AudioCodingModuleMtTestOldApi(), last_packet_number_(0) {} - - ~AcmIsacMtTestOldApi() {} - - void SetUp() override { - AudioCodingModuleTestOldApi::SetUp(); - RegisterCodec(); // Must be called before the threads start below. - - // Set up input audio source to read from specified file, loop after 5 - // seconds, and deliver blocks of 10 ms. - const std::string input_file_name = - webrtc::test::ResourcePath("audio_coding/speech_mono_16kHz", "pcm"); - audio_loop_.Init(input_file_name, 5 * kSampleRateHz, kNumSamples10ms); - - // Generate one packet to have something to insert. - int loop_counter = 0; - while (packet_cb_.last_payload_len_bytes() == 0) { - InsertAudio(); - ASSERT_LT(loop_counter++, 10); - } - // Set `last_packet_number_` to one less that `num_calls` so that the packet - // will be fetched in the next InsertPacket() call. - last_packet_number_ = packet_cb_.num_calls() - 1; - - StartThreads(); - } - - void RegisterCodec() override { - static_assert(kSampleRateHz == 16000, "test designed for iSAC 16 kHz"); - audio_format_ = SdpAudioFormat("isac", kSampleRateHz, 1); - pac_size_ = 480; - - // Register iSAC codec in ACM, effectively unregistering the PCM16B codec - // registered in AudioCodingModuleTestOldApi::SetUp(); - acm_->SetReceiveCodecs({{kPayloadType, *audio_format_}}); - acm_->SetEncoder(CreateBuiltinAudioEncoderFactory()->MakeAudioEncoder( - kPayloadType, *audio_format_, absl::nullopt)); - } - - void InsertPacket() override { - int num_calls = packet_cb_.num_calls(); // Store locally for thread safety. - if (num_calls > last_packet_number_) { - // Get the new payload out from the callback handler. - // Note that since we swap buffers here instead of directly inserting - // a pointer to the data in `packet_cb_`, we avoid locking the callback - // for the duration of the IncomingPacket() call. - packet_cb_.SwapBuffers(&last_payload_vec_); - ASSERT_GT(last_payload_vec_.size(), 0u); - rtp_utility_->Forward(&rtp_header_); - last_packet_number_ = num_calls; - } - ASSERT_GT(last_payload_vec_.size(), 0u); - ASSERT_EQ(0, acm_->IncomingPacket(&last_payload_vec_[0], - last_payload_vec_.size(), rtp_header_)); - } - - void InsertAudio() override { - // TODO(kwiberg): Use std::copy here. Might be complications because AFAICS - // this call confuses the number of samples with the number of bytes, and - // ends up copying only half of what it should. - memcpy(input_frame_.mutable_data(), audio_loop_.GetNextBlock().data(), - kNumSamples10ms); - AudioCodingModuleTestOldApi::InsertAudio(); - } - - // Override the verification function with no-op, since iSAC produces variable - // payload sizes. - void VerifyEncoding() override {} - - // This method is the same as AudioCodingModuleMtTestOldApi::TestDone(), but - // here it is using the constants defined in this class (i.e., shorter test - // run). - bool TestDone() override { - if (packet_cb_.num_calls() > kNumPackets) { - MutexLock lock(&mutex_); - if (pull_audio_count_ > kNumPullCalls) { - // Both conditions for completion are met. End the test. - return true; - } - } - return false; - } - - int last_packet_number_; - std::vector last_payload_vec_; - test::AudioLoop audio_loop_; -}; - -#if defined(WEBRTC_IOS) -#define MAYBE_DoTest DISABLED_DoTest -#else -#define MAYBE_DoTest DoTest -#endif -#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX) -TEST_F(AcmIsacMtTestOldApi, MAYBE_DoTest) { - EXPECT_TRUE(RunTest()); -} -#endif - -class AcmReRegisterIsacMtTestOldApi : public AudioCodingModuleTestOldApi { - protected: - static const int kRegisterAfterNumPackets = 5; - static const int kNumPackets = 10; - static const int kPacketSizeMs = 30; - static const int kPacketSizeSamples = kPacketSizeMs * 16; - - AcmReRegisterIsacMtTestOldApi() - : AudioCodingModuleTestOldApi(), - codec_registered_(false), - receive_packet_count_(0), - next_insert_packet_time_ms_(0), - fake_clock_(new SimulatedClock(0)) { - AudioEncoderIsacFloatImpl::Config config; - config.payload_type = kPayloadType; - isac_encoder_.reset(new AudioEncoderIsacFloatImpl(config)); - clock_ = fake_clock_.get(); - } - - void SetUp() override { - AudioCodingModuleTestOldApi::SetUp(); - // Set up input audio source to read from specified file, loop after 5 - // seconds, and deliver blocks of 10 ms. - const std::string input_file_name = - webrtc::test::ResourcePath("audio_coding/speech_mono_16kHz", "pcm"); - audio_loop_.Init(input_file_name, 5 * kSampleRateHz, kNumSamples10ms); - RegisterCodec(); // Must be called before the threads start below. - StartThreads(); - } - - void RegisterCodec() override { - // Register iSAC codec in ACM, effectively unregistering the PCM16B codec - // registered in AudioCodingModuleTestOldApi::SetUp(); - // Only register the decoder for now. The encoder is registered later. - static_assert(kSampleRateHz == 16000, "test designed for iSAC 16 kHz"); - acm_->SetReceiveCodecs({{kPayloadType, {"ISAC", kSampleRateHz, 1}}}); - } - - void StartThreads() { - quit_.store(false); - const auto attributes = - rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime); - receive_thread_ = rtc::PlatformThread::SpawnJoinable( - [this] { - while (!quit_.load() && CbReceiveImpl()) { - } - }, - "receive", attributes); - codec_registration_thread_ = rtc::PlatformThread::SpawnJoinable( - [this] { - while (!quit_.load()) { - CbCodecRegistrationImpl(); - } - }, - "codec_registration", attributes); - } - - void TearDown() override { - AudioCodingModuleTestOldApi::TearDown(); - quit_.store(true); - receive_thread_.Finalize(); - codec_registration_thread_.Finalize(); - } - - bool RunTest() { return test_complete_.Wait(TimeDelta::Minutes(10)); } - - bool CbReceiveImpl() { - SleepMs(1); - rtc::Buffer encoded; - AudioEncoder::EncodedInfo info; - { - MutexLock lock(&mutex_); - if (clock_->TimeInMilliseconds() < next_insert_packet_time_ms_) { - return true; - } - next_insert_packet_time_ms_ += kPacketSizeMs; - ++receive_packet_count_; - - // Encode new frame. - uint32_t input_timestamp = rtp_header_.timestamp; - while (info.encoded_bytes == 0) { - info = isac_encoder_->Encode(input_timestamp, - audio_loop_.GetNextBlock(), &encoded); - input_timestamp += 160; // 10 ms at 16 kHz. - } - EXPECT_EQ(rtp_header_.timestamp + kPacketSizeSamples, input_timestamp); - EXPECT_EQ(rtp_header_.timestamp, info.encoded_timestamp); - EXPECT_EQ(rtp_header_.payloadType, info.payload_type); - } - // Now we're not holding the crit sect when calling ACM. - - // Insert into ACM. - EXPECT_EQ(0, acm_->IncomingPacket(encoded.data(), info.encoded_bytes, - rtp_header_)); - - // Pull audio. - for (int i = 0; i < rtc::CheckedDivExact(kPacketSizeMs, 10); ++i) { - AudioFrame audio_frame; - bool muted; - EXPECT_EQ(0, acm_->PlayoutData10Ms(-1 /* default output frequency */, - &audio_frame, &muted)); - if (muted) { - ADD_FAILURE(); - return false; - } - fake_clock_->AdvanceTimeMilliseconds(10); - } - rtp_utility_->Forward(&rtp_header_); - return true; - } - - void CbCodecRegistrationImpl() { - SleepMs(1); - if (HasFatalFailure()) { - // End the test early if a fatal failure (ASSERT_*) has occurred. - test_complete_.Set(); - } - MutexLock lock(&mutex_); - if (!codec_registered_ && - receive_packet_count_ > kRegisterAfterNumPackets) { - // Register the iSAC encoder. - acm_->SetEncoder(CreateBuiltinAudioEncoderFactory()->MakeAudioEncoder( - kPayloadType, *audio_format_, absl::nullopt)); - codec_registered_ = true; - } - if (codec_registered_ && receive_packet_count_ > kNumPackets) { - test_complete_.Set(); - } - } - - rtc::PlatformThread receive_thread_; - rtc::PlatformThread codec_registration_thread_; - // Used to force worker threads to stop looping. - std::atomic quit_; - - rtc::Event test_complete_; - Mutex mutex_; - bool codec_registered_ RTC_GUARDED_BY(mutex_); - int receive_packet_count_ RTC_GUARDED_BY(mutex_); - int64_t next_insert_packet_time_ms_ RTC_GUARDED_BY(mutex_); - std::unique_ptr isac_encoder_; - std::unique_ptr fake_clock_; - test::AudioLoop audio_loop_; -}; - -#if defined(WEBRTC_IOS) -#define MAYBE_DoTest DISABLED_DoTest -#else -#define MAYBE_DoTest DoTest -#endif -#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX) -TEST_F(AcmReRegisterIsacMtTestOldApi, MAYBE_DoTest) { - EXPECT_TRUE(RunTest()); -} -#endif - // Disabling all of these tests on iOS until file support has been added. // See https://code.google.com/p/webrtc/issues/detail?id=4752 for details. #if !defined(WEBRTC_IOS) @@ -1025,38 +719,6 @@ class AcmSenderBitExactnessOldApi : public ::testing::Test, class AcmSenderBitExactnessNewApi : public AcmSenderBitExactnessOldApi {}; -// Run bit exactness tests only for release builds. -#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)) && \ - defined(NDEBUG) && defined(WEBRTC_LINUX) && defined(WEBRTC_ARCH_X86_64) -TEST_F(AcmSenderBitExactnessOldApi, IsacWb30ms) { - ASSERT_NO_FATAL_FAILURE(SetUpTest("ISAC", 16000, 1, 103, 480, 480)); - Run(/*audio_checksum_ref=*/"a3077ac01b0137e8bbc237fb1f9816a5", - /*payload_checksum_ref=*/"3c79f16f34218271f3dca4e2b1dfe1bb", - /*expected_packets=*/33, - /*expected_channels=*/test::AcmReceiveTestOldApi::kMonoOutput); -} - -TEST_F(AcmSenderBitExactnessOldApi, IsacWb60ms) { - ASSERT_NO_FATAL_FAILURE(SetUpTest("ISAC", 16000, 1, 103, 960, 960)); - Run(/*audio_checksum_ref=*/"76da9b7514f986fc2bb32b1c3170e8d4", - /*payload_checksum_ref=*/"9e0a0ab743ad987b55b8e14802769c56", - /*expected_packets=*/16, - /*expected_channels=*/test::AcmReceiveTestOldApi::kMonoOutput); -} -#endif - -// Run bit exactness test only for release build. -#if defined(WEBRTC_CODEC_ISAC) && defined(NDEBUG) && defined(WEBRTC_LINUX) && \ - defined(WEBRTC_ARCH_X86_64) -TEST_F(AcmSenderBitExactnessOldApi, IsacSwb30ms) { - ASSERT_NO_FATAL_FAILURE(SetUpTest("ISAC", 32000, 1, 104, 960, 960)); - Run(/*audio_checksum_ref=*/"f4cf577f28a0dcbac33358b757518e0c", - /*payload_checksum_ref=*/"ce86106a93419aefb063097108ec94ab", - /*expected_packets=*/33, - /*expected_channels=*/test::AcmReceiveTestOldApi::kMonoOutput); -} -#endif - TEST_F(AcmSenderBitExactnessOldApi, Pcm16_8000khz_10ms) { ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 8000, 1, 107, 80, 80)); Run(/*audio_checksum_ref=*/"69118ed438ac76252d023e0463819471", @@ -1067,7 +729,7 @@ TEST_F(AcmSenderBitExactnessOldApi, Pcm16_8000khz_10ms) { TEST_F(AcmSenderBitExactnessOldApi, Pcm16_16000khz_10ms) { ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 16000, 1, 108, 160, 160)); - Run(/*audio_checksum_ref=*/"bc6ab94d12a464921763d7544fdbd07e", + Run(/*audio_checksum_ref=*/"f95c87bdd33f631bcf80f4b19445bbd2", /*payload_checksum_ref=*/"ad786526383178b08d80d6eee06e9bad", /*expected_packets=*/100, /*expected_channels=*/test::AcmReceiveTestOldApi::kMonoOutput); @@ -1151,7 +813,7 @@ TEST_F(AcmSenderBitExactnessOldApi, Ilbc_30ms) { #if defined(WEBRTC_LINUX) && defined(WEBRTC_ARCH_X86_64) TEST_F(AcmSenderBitExactnessOldApi, G722_20ms) { ASSERT_NO_FATAL_FAILURE(SetUpTest("G722", 16000, 1, 9, 320, 160)); - Run(/*audio_checksum_ref=*/"a87a91ec0124510a64967f5d768554ff", + Run(/*audio_checksum_ref=*/"f5264affff25cf2cbd2e1e8a5217f9a3", /*payload_checksum_ref=*/"fc68a87e1380614e658087cb35d5ca10", /*expected_packets=*/50, /*expected_channels=*/test::AcmReceiveTestOldApi::kMonoOutput); diff --git a/modules/audio_coding/audio_coding.gni b/modules/audio_coding/audio_coding.gni index bf67d9cb8d..3b147091de 100644 --- a/modules/audio_coding/audio_coding.gni +++ b/modules/audio_coding/audio_coding.gni @@ -20,11 +20,6 @@ if (rtc_opus_support_120ms_ptime) { } else { audio_codec_defines += [ "WEBRTC_OPUS_SUPPORT_120MS_PTIME=0" ] } -if (current_cpu == "arm") { - audio_codec_defines += [ "WEBRTC_CODEC_ISACFX" ] -} else { - audio_codec_defines += [ "WEBRTC_CODEC_ISAC" ] -} audio_coding_defines = audio_codec_defines neteq_defines = audio_codec_defines diff --git a/modules/audio_coding/audio_network_adaptor/bitrate_controller_unittest.cc b/modules/audio_coding/audio_network_adaptor/bitrate_controller_unittest.cc index 3155f198a4..9c593b818b 100644 --- a/modules/audio_coding/audio_network_adaptor/bitrate_controller_unittest.cc +++ b/modules/audio_coding/audio_network_adaptor/bitrate_controller_unittest.cc @@ -74,8 +74,6 @@ TEST(AnaBitrateControllerTest, OutputInitValueWhenOverheadUnknown) { } TEST(AnaBitrateControllerTest, ChangeBitrateOnTargetBitrateChanged) { - test::ScopedFieldTrials override_field_trials( - "WebRTC-SendSideBwe-WithOverhead/Enabled/"); constexpr int kInitialFrameLengthMs = 20; BitrateController controller( BitrateController::Config(32000, kInitialFrameLengthMs, 0, 0)); @@ -98,8 +96,6 @@ TEST(AnaBitrateControllerTest, UpdateMultipleNetworkMetricsAtOnce) { // BitrateController::UpdateNetworkMetrics(...) can handle multiple // network updates at once. This is, however, not a common use case in current // audio_network_adaptor_impl.cc. - test::ScopedFieldTrials override_field_trials( - "WebRTC-SendSideBwe-WithOverhead/Enabled/"); constexpr int kInitialFrameLengthMs = 20; BitrateController controller( BitrateController::Config(32000, kInitialFrameLengthMs, 0, 0)); @@ -116,8 +112,6 @@ TEST(AnaBitrateControllerTest, UpdateMultipleNetworkMetricsAtOnce) { } TEST(AnaBitrateControllerTest, TreatUnknownFrameLengthAsFrameLengthUnchanged) { - test::ScopedFieldTrials override_field_trials( - "WebRTC-SendSideBwe-WithOverhead/Enabled/"); constexpr int kInitialFrameLengthMs = 20; BitrateController controller( BitrateController::Config(32000, kInitialFrameLengthMs, 0, 0)); @@ -131,8 +125,6 @@ TEST(AnaBitrateControllerTest, TreatUnknownFrameLengthAsFrameLengthUnchanged) { } TEST(AnaBitrateControllerTest, IncreaseBitrateOnFrameLengthIncreased) { - test::ScopedFieldTrials override_field_trials( - "WebRTC-SendSideBwe-WithOverhead/Enabled/"); constexpr int kInitialFrameLengthMs = 20; BitrateController controller( BitrateController::Config(32000, kInitialFrameLengthMs, 0, 0)); @@ -155,8 +147,6 @@ TEST(AnaBitrateControllerTest, IncreaseBitrateOnFrameLengthIncreased) { } TEST(AnaBitrateControllerTest, DecreaseBitrateOnFrameLengthDecreased) { - test::ScopedFieldTrials override_field_trials( - "WebRTC-SendSideBwe-WithOverhead/Enabled/"); constexpr int kInitialFrameLengthMs = 60; BitrateController controller( BitrateController::Config(32000, kInitialFrameLengthMs, 0, 0)); @@ -179,8 +169,6 @@ TEST(AnaBitrateControllerTest, DecreaseBitrateOnFrameLengthDecreased) { } TEST(AnaBitrateControllerTest, BitrateNeverBecomesNegative) { - test::ScopedFieldTrials override_field_trials( - "WebRTC-SendSideBwe-WithOverhead/Enabled/"); BitrateController controller(BitrateController::Config(32000, 20, 0, 0)); constexpr size_t kOverheadBytesPerPacket = 64; constexpr int kFrameLengthMs = 60; @@ -192,8 +180,6 @@ TEST(AnaBitrateControllerTest, BitrateNeverBecomesNegative) { } TEST(AnaBitrateControllerTest, CheckBehaviorOnChangingCondition) { - test::ScopedFieldTrials override_field_trials( - "WebRTC-SendSideBwe-WithOverhead/Enabled/"); BitrateController controller(BitrateController::Config(32000, 20, 0, 0)); // Start from an arbitrary overall bitrate. diff --git a/modules/audio_coding/codecs/builtin_audio_decoder_factory_unittest.cc b/modules/audio_coding/codecs/builtin_audio_decoder_factory_unittest.cc index 109da78eea..4a2b261a59 100644 --- a/modules/audio_coding/codecs/builtin_audio_decoder_factory_unittest.cc +++ b/modules/audio_coding/codecs/builtin_audio_decoder_factory_unittest.cc @@ -75,31 +75,6 @@ TEST(AudioDecoderFactoryTest, CreateIlbc) { adf->MakeAudioDecoder(SdpAudioFormat("ilbc", 16000, 1), absl::nullopt)); } -TEST(AudioDecoderFactoryTest, CreateIsac) { - rtc::scoped_refptr adf = - CreateBuiltinAudioDecoderFactory(); - ASSERT_TRUE(adf); - // iSAC supports 16 kHz, 1 channel. The float implementation additionally - // supports 32 kHz, 1 channel. - EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("isac", 16000, 0), absl::nullopt)); - EXPECT_TRUE( - adf->MakeAudioDecoder(SdpAudioFormat("isac", 16000, 1), absl::nullopt)); - EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("isac", 16000, 2), absl::nullopt)); - EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("isac", 8000, 1), absl::nullopt)); - EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("isac", 48000, 1), absl::nullopt)); -#ifdef WEBRTC_ARCH_ARM - EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("isac", 32000, 1), absl::nullopt)); -#else - EXPECT_TRUE( - adf->MakeAudioDecoder(SdpAudioFormat("isac", 32000, 1), absl::nullopt)); -#endif -} - TEST(AudioDecoderFactoryTest, CreateL16) { rtc::scoped_refptr adf = CreateBuiltinAudioDecoderFactory(); @@ -125,9 +100,6 @@ TEST(AudioDecoderFactoryTest, MaxNrOfChannels) { #ifdef WEBRTC_CODEC_OPUS "opus", #endif -#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX) - "isac", -#endif #ifdef WEBRTC_CODEC_ILBC "ilbc", #endif diff --git a/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h b/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h deleted file mode 100644 index aae708f295..0000000000 --- a/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_DECODER_ISAC_T_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_DECODER_ISAC_T_H_ - -#include - -#include "absl/types/optional.h" -#include "api/audio_codecs/audio_decoder.h" -#include "api/scoped_refptr.h" - -namespace webrtc { - -template -class AudioDecoderIsacT final : public AudioDecoder { - public: - struct Config { - bool IsOk() const; - int sample_rate_hz = 16000; - }; - explicit AudioDecoderIsacT(const Config& config); - virtual ~AudioDecoderIsacT() override; - - AudioDecoderIsacT(const AudioDecoderIsacT&) = delete; - AudioDecoderIsacT& operator=(const AudioDecoderIsacT&) = delete; - - bool HasDecodePlc() const override; - size_t DecodePlc(size_t num_frames, int16_t* decoded) override; - void Reset() override; - int ErrorCode() override; - int SampleRateHz() const override; - size_t Channels() const override; - int DecodeInternal(const uint8_t* encoded, - size_t encoded_len, - int sample_rate_hz, - int16_t* decoded, - SpeechType* speech_type) override; - - private: - typename T::instance_type* isac_state_; - int sample_rate_hz_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_DECODER_ISAC_T_H_ diff --git a/modules/audio_coding/codecs/isac/audio_decoder_isac_t_impl.h b/modules/audio_coding/codecs/isac/audio_decoder_isac_t_impl.h deleted file mode 100644 index 9aa498866b..0000000000 --- a/modules/audio_coding/codecs/isac/audio_decoder_isac_t_impl.h +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_DECODER_ISAC_T_IMPL_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_DECODER_ISAC_T_IMPL_H_ - -#include "modules/audio_coding/codecs/isac/audio_decoder_isac_t.h" -#include "rtc_base/checks.h" - -namespace webrtc { - -template -bool AudioDecoderIsacT::Config::IsOk() const { - return (sample_rate_hz == 16000 || sample_rate_hz == 32000); -} - -template -AudioDecoderIsacT::AudioDecoderIsacT(const Config& config) - : sample_rate_hz_(config.sample_rate_hz) { - RTC_CHECK(config.IsOk()) << "Unsupported sample rate " - << config.sample_rate_hz; - RTC_CHECK_EQ(0, T::Create(&isac_state_)); - T::DecoderInit(isac_state_); - RTC_CHECK_EQ(0, T::SetDecSampRate(isac_state_, sample_rate_hz_)); -} - -template -AudioDecoderIsacT::~AudioDecoderIsacT() { - RTC_CHECK_EQ(0, T::Free(isac_state_)); -} - -template -int AudioDecoderIsacT::DecodeInternal(const uint8_t* encoded, - size_t encoded_len, - int sample_rate_hz, - int16_t* decoded, - SpeechType* speech_type) { - RTC_CHECK_EQ(sample_rate_hz_, sample_rate_hz); - int16_t temp_type = 1; // Default is speech. - int ret = - T::DecodeInternal(isac_state_, encoded, encoded_len, decoded, &temp_type); - *speech_type = ConvertSpeechType(temp_type); - return ret; -} - -template -bool AudioDecoderIsacT::HasDecodePlc() const { - return false; -} - -template -size_t AudioDecoderIsacT::DecodePlc(size_t num_frames, int16_t* decoded) { - return T::DecodePlc(isac_state_, decoded, num_frames); -} - -template -void AudioDecoderIsacT::Reset() { - T::DecoderInit(isac_state_); -} - -template -int AudioDecoderIsacT::ErrorCode() { - return T::GetErrorCode(isac_state_); -} - -template -int AudioDecoderIsacT::SampleRateHz() const { - return sample_rate_hz_; -} - -template -size_t AudioDecoderIsacT::Channels() const { - return 1; -} - -} // namespace webrtc - -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_DECODER_ISAC_T_IMPL_H_ diff --git a/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h b/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h deleted file mode 100644 index c382ea076e..0000000000 --- a/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_ENCODER_ISAC_T_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_ENCODER_ISAC_T_H_ - -#include -#include - -#include "absl/types/optional.h" -#include "api/audio_codecs/audio_encoder.h" -#include "api/scoped_refptr.h" -#include "api/units/time_delta.h" -#include "system_wrappers/include/field_trial.h" - -namespace webrtc { - -template -class AudioEncoderIsacT final : public AudioEncoder { - public: - // Allowed combinations of sample rate, frame size, and bit rate are - // - 16000 Hz, 30 ms, 10000-32000 bps - // - 16000 Hz, 60 ms, 10000-32000 bps - // - 32000 Hz, 30 ms, 10000-56000 bps (if T has super-wideband support) - struct Config { - bool IsOk() const; - int payload_type = 103; - int sample_rate_hz = 16000; - int frame_size_ms = 30; - int bit_rate = kDefaultBitRate; // Limit on the short-term average bit - // rate, in bits/s. - int max_payload_size_bytes = -1; - int max_bit_rate = -1; - }; - - explicit AudioEncoderIsacT(const Config& config); - ~AudioEncoderIsacT() override; - - AudioEncoderIsacT(const AudioEncoderIsacT&) = delete; - AudioEncoderIsacT& operator=(const AudioEncoderIsacT&) = delete; - - int SampleRateHz() const override; - size_t NumChannels() const override; - size_t Num10MsFramesInNextPacket() const override; - size_t Max10MsFramesInAPacket() const override; - int GetTargetBitrate() const override; - void SetTargetBitrate(int target_bps) override; - void OnReceivedTargetAudioBitrate(int target_bps) override; - void OnReceivedUplinkBandwidth( - int target_audio_bitrate_bps, - absl::optional bwe_period_ms) override; - void OnReceivedUplinkAllocation(BitrateAllocationUpdate update) override; - void OnReceivedOverhead(size_t overhead_bytes_per_packet) override; - EncodedInfo EncodeImpl(uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) override; - void Reset() override; - absl::optional> GetFrameLengthRange() - const override; - - private: - // This value is taken from STREAM_SIZE_MAX_60 for iSAC float (60 ms) and - // STREAM_MAXW16_60MS for iSAC fix (60 ms). - static const size_t kSufficientEncodeBufferSizeBytes = 400; - - static constexpr int kDefaultBitRate = 32000; - static constexpr int kMinBitrateBps = 10000; - static constexpr int MaxBitrateBps(int sample_rate_hz) { - return sample_rate_hz == 32000 ? 56000 : 32000; - } - - void SetTargetBitrate(int target_bps, bool subtract_per_packet_overhead); - - // Recreate the iSAC encoder instance with the given settings, and save them. - void RecreateEncoderInstance(const Config& config); - - Config config_; - typename T::instance_type* isac_state_ = nullptr; - - // Have we accepted input but not yet emitted it in a packet? - bool packet_in_progress_ = false; - - // Timestamp of the first input of the currently in-progress packet. - uint32_t packet_timestamp_; - - // Timestamp of the previously encoded packet. - uint32_t last_encoded_timestamp_; - - // Cache the value of the "WebRTC-SendSideBwe-WithOverhead" field trial. - const bool send_side_bwe_with_overhead_ = - !field_trial::IsDisabled("WebRTC-SendSideBwe-WithOverhead"); - - // When we send a packet, expect this many bytes of headers to be added to it. - // Start out with a reasonable default that we can use until we receive a real - // value. - DataSize overhead_per_packet_ = DataSize::Bytes(28); -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_ENCODER_ISAC_T_H_ diff --git a/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h b/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h deleted file mode 100644 index 1bd27cf80d..0000000000 --- a/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_ENCODER_ISAC_T_IMPL_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_ENCODER_ISAC_T_IMPL_H_ - -#include "modules/audio_coding/codecs/isac/audio_encoder_isac_t.h" -#include "rtc_base/checks.h" -#include "rtc_base/numerics/safe_minmax.h" - -namespace webrtc { - -template -bool AudioEncoderIsacT::Config::IsOk() const { - if (max_bit_rate < 32000 && max_bit_rate != -1) - return false; - if (max_payload_size_bytes < 120 && max_payload_size_bytes != -1) - return false; - - switch (sample_rate_hz) { - case 16000: - if (max_bit_rate > 53400) - return false; - if (max_payload_size_bytes > 400) - return false; - return (frame_size_ms == 30 || frame_size_ms == 60) && - (bit_rate == 0 || (bit_rate >= 10000 && bit_rate <= 32000)); - case 32000: - if (max_bit_rate > 160000) - return false; - if (max_payload_size_bytes > 600) - return false; - return T::has_swb && - (frame_size_ms == 30 && - (bit_rate == 0 || (bit_rate >= 10000 && bit_rate <= 56000))); - default: - return false; - } -} - -template -AudioEncoderIsacT::AudioEncoderIsacT(const Config& config) { - RecreateEncoderInstance(config); -} - -template -AudioEncoderIsacT::~AudioEncoderIsacT() { - RTC_CHECK_EQ(0, T::Free(isac_state_)); -} - -template -int AudioEncoderIsacT::SampleRateHz() const { - return T::EncSampRate(isac_state_); -} - -template -size_t AudioEncoderIsacT::NumChannels() const { - return 1; -} - -template -size_t AudioEncoderIsacT::Num10MsFramesInNextPacket() const { - const int samples_in_next_packet = T::GetNewFrameLen(isac_state_); - return static_cast(rtc::CheckedDivExact( - samples_in_next_packet, rtc::CheckedDivExact(SampleRateHz(), 100))); -} - -template -size_t AudioEncoderIsacT::Max10MsFramesInAPacket() const { - return 6; // iSAC puts at most 60 ms in a packet. -} - -template -int AudioEncoderIsacT::GetTargetBitrate() const { - return config_.bit_rate == 0 ? kDefaultBitRate : config_.bit_rate; -} - -template -void AudioEncoderIsacT::SetTargetBitrate(int target_bps) { - // Set target bitrate directly without subtracting per-packet overhead, - // because that's what AudioEncoderOpus does. - SetTargetBitrate(target_bps, - /*subtract_per_packet_overhead=*/false); -} - -template -void AudioEncoderIsacT::OnReceivedTargetAudioBitrate(int target_bps) { - // Set target bitrate directly without subtracting per-packet overhead, - // because that's what AudioEncoderOpus does. - SetTargetBitrate(target_bps, - /*subtract_per_packet_overhead=*/false); -} - -template -void AudioEncoderIsacT::OnReceivedUplinkBandwidth( - int target_audio_bitrate_bps, - absl::optional /*bwe_period_ms*/) { - // Set target bitrate, subtracting the per-packet overhead if - // WebRTC-SendSideBwe-WithOverhead is enabled, because that's what - // AudioEncoderOpus does. - SetTargetBitrate( - target_audio_bitrate_bps, - /*subtract_per_packet_overhead=*/send_side_bwe_with_overhead_); -} - -template -void AudioEncoderIsacT::OnReceivedUplinkAllocation( - BitrateAllocationUpdate update) { - // Set target bitrate, subtracting the per-packet overhead if - // WebRTC-SendSideBwe-WithOverhead is enabled, because that's what - // AudioEncoderOpus does. - SetTargetBitrate( - update.target_bitrate.bps(), - /*subtract_per_packet_overhead=*/send_side_bwe_with_overhead_); -} - -template -void AudioEncoderIsacT::OnReceivedOverhead( - size_t overhead_bytes_per_packet) { - overhead_per_packet_ = DataSize::Bytes(overhead_bytes_per_packet); -} - -template -AudioEncoder::EncodedInfo AudioEncoderIsacT::EncodeImpl( - uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) { - if (!packet_in_progress_) { - // Starting a new packet; remember the timestamp for later. - packet_in_progress_ = true; - packet_timestamp_ = rtp_timestamp; - } - size_t encoded_bytes = encoded->AppendData( - kSufficientEncodeBufferSizeBytes, [&](rtc::ArrayView encoded) { - int r = T::Encode(isac_state_, audio.data(), encoded.data()); - - if (T::GetErrorCode(isac_state_) == 6450) { - // Isac is not able to effectively compress all types of signals. This - // is a limitation of the codec that cannot be easily fixed. - r = 0; - } - RTC_CHECK_GE(r, 0) << "Encode failed (error code " - << T::GetErrorCode(isac_state_) << ")"; - - return static_cast(r); - }); - - if (encoded_bytes == 0) - return EncodedInfo(); - - // Got enough input to produce a packet. Return the saved timestamp from - // the first chunk of input that went into the packet. - packet_in_progress_ = false; - EncodedInfo info; - info.encoded_bytes = encoded_bytes; - info.encoded_timestamp = packet_timestamp_; - info.payload_type = config_.payload_type; - info.encoder_type = CodecType::kIsac; - return info; -} - -template -void AudioEncoderIsacT::Reset() { - RecreateEncoderInstance(config_); -} - -template -absl::optional> -AudioEncoderIsacT::GetFrameLengthRange() const { - return {{TimeDelta::Millis(config_.frame_size_ms), - TimeDelta::Millis(config_.frame_size_ms)}}; -} - -template -void AudioEncoderIsacT::SetTargetBitrate(int target_bps, - bool subtract_per_packet_overhead) { - if (subtract_per_packet_overhead) { - const DataRate overhead_rate = - overhead_per_packet_ / TimeDelta::Millis(config_.frame_size_ms); - target_bps -= overhead_rate.bps(); - } - target_bps = rtc::SafeClamp(target_bps, kMinBitrateBps, - MaxBitrateBps(config_.sample_rate_hz)); - int result = T::Control(isac_state_, target_bps, config_.frame_size_ms); - RTC_DCHECK_EQ(result, 0); - config_.bit_rate = target_bps; -} - -template -void AudioEncoderIsacT::RecreateEncoderInstance(const Config& config) { - RTC_CHECK(config.IsOk()); - packet_in_progress_ = false; - if (isac_state_) - RTC_CHECK_EQ(0, T::Free(isac_state_)); - RTC_CHECK_EQ(0, T::Create(&isac_state_)); - RTC_CHECK_EQ(0, T::EncoderInit(isac_state_, /*coding_mode=*/1)); - RTC_CHECK_EQ(0, T::SetEncSampRate(isac_state_, config.sample_rate_hz)); - const int bit_rate = config.bit_rate == 0 ? kDefaultBitRate : config.bit_rate; - RTC_CHECK_EQ(0, T::Control(isac_state_, bit_rate, config.frame_size_ms)); - - if (config.max_payload_size_bytes != -1) - RTC_CHECK_EQ( - 0, T::SetMaxPayloadSize(isac_state_, config.max_payload_size_bytes)); - if (config.max_bit_rate != -1) - RTC_CHECK_EQ(0, T::SetMaxRate(isac_state_, config.max_bit_rate)); - - // Set the decoder sample rate even though we just use the encoder. This - // doesn't appear to be necessary to produce a valid encoding, but without it - // we get an encoding that isn't bit-for-bit identical with what a combined - // encoder+decoder object produces. - RTC_CHECK_EQ(0, T::SetDecSampRate(isac_state_, config.sample_rate_hz)); - - config_ = config; -} - -} // namespace webrtc - -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_ENCODER_ISAC_T_IMPL_H_ diff --git a/modules/audio_coding/codecs/isac/empty.cc b/modules/audio_coding/codecs/isac/empty.cc deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h b/modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h deleted file mode 100644 index 0b4eadd448..0000000000 --- a/modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INCLUDE_AUDIO_DECODER_ISACFIX_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INCLUDE_AUDIO_DECODER_ISACFIX_H_ - -#include "modules/audio_coding/codecs/isac/audio_decoder_isac_t.h" -#include "modules/audio_coding/codecs/isac/fix/source/isac_fix_type.h" - -namespace webrtc { - -using AudioDecoderIsacFixImpl = AudioDecoderIsacT; - -} // namespace webrtc -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INCLUDE_AUDIO_DECODER_ISACFIX_H_ diff --git a/modules/audio_coding/codecs/isac/fix/include/audio_encoder_isacfix.h b/modules/audio_coding/codecs/isac/fix/include/audio_encoder_isacfix.h deleted file mode 100644 index f0cc038328..0000000000 --- a/modules/audio_coding/codecs/isac/fix/include/audio_encoder_isacfix.h +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INCLUDE_AUDIO_ENCODER_ISACFIX_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INCLUDE_AUDIO_ENCODER_ISACFIX_H_ - -#include "modules/audio_coding/codecs/isac/audio_encoder_isac_t.h" -#include "modules/audio_coding/codecs/isac/fix/source/isac_fix_type.h" - -namespace webrtc { - -using AudioEncoderIsacFixImpl = AudioEncoderIsacT; - -} // namespace webrtc -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INCLUDE_AUDIO_ENCODER_ISACFIX_H_ diff --git a/modules/audio_coding/codecs/isac/fix/include/isacfix.h b/modules/audio_coding/codecs/isac/fix/include/isacfix.h deleted file mode 100644 index dcc7b0991d..0000000000 --- a/modules/audio_coding/codecs/isac/fix/include/isacfix.h +++ /dev/null @@ -1,486 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INCLUDE_ISACFIX_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INCLUDE_ISACFIX_H_ - -#include - -#include "modules/audio_coding/codecs/isac/bandwidth_info.h" - -typedef struct { - void* dummy; -} ISACFIX_MainStruct; - -#if defined(__cplusplus) -extern "C" { -#endif - -/**************************************************************************** - * WebRtcIsacfix_Create(...) - * - * This function creates an ISAC instance, which will contain the state - * information for one coding/decoding channel. - * - * Input: - * - *ISAC_main_inst : a pointer to the coder instance. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_Create(ISACFIX_MainStruct** ISAC_main_inst); - -/**************************************************************************** - * WebRtcIsacfix_Free(...) - * - * This function frees the ISAC instance created at the beginning. - * - * Input: - * - ISAC_main_inst : a ISAC instance. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_Free(ISACFIX_MainStruct* ISAC_main_inst); - -/**************************************************************************** - * WebRtcIsacfix_EncoderInit(...) - * - * This function initializes an ISAC instance prior to the encoder calls. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - CodingMode : 0 - Bit rate and frame length are automatically - * adjusted to available bandwidth on - * transmission channel. - * 1 - User sets a frame length and a target bit - * rate which is taken as the maximum short-term - * average bit rate. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_EncoderInit(ISACFIX_MainStruct* ISAC_main_inst, - int16_t CodingMode); - -/**************************************************************************** - * WebRtcIsacfix_Encode(...) - * - * This function encodes 10ms frame(s) and inserts it into a package. - * Input speech length has to be 160 samples (10ms). The encoder buffers those - * 10ms frames until it reaches the chosen Framesize (480 or 960 samples - * corresponding to 30 or 60 ms frames), and then proceeds to the encoding. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - speechIn : input speech vector. - * - * Output: - * - encoded : the encoded data vector - * - * Return value : >0 - Length (in bytes) of coded data - * 0 - The buffer didn't reach the chosen framesize - * so it keeps buffering speech samples. - * -1 - Error - */ - -int WebRtcIsacfix_Encode(ISACFIX_MainStruct* ISAC_main_inst, - const int16_t* speechIn, - uint8_t* encoded); - -/**************************************************************************** - * WebRtcIsacfix_DecoderInit(...) - * - * This function initializes an ISAC instance prior to the decoder calls. - * - * Input: - * - ISAC_main_inst : ISAC instance. - */ - -void WebRtcIsacfix_DecoderInit(ISACFIX_MainStruct* ISAC_main_inst); - -/**************************************************************************** - * WebRtcIsacfix_UpdateBwEstimate1(...) - * - * This function updates the estimate of the bandwidth. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC frame(s). - * - packet_size : size of the packet in bytes. - * - rtp_seq_number : the RTP number of the packet. - * - arr_ts : the arrival time of the packet (from NetEq) - * in samples. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_UpdateBwEstimate1(ISACFIX_MainStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t packet_size, - uint16_t rtp_seq_number, - uint32_t arr_ts); - -/**************************************************************************** - * WebRtcIsacfix_UpdateBwEstimate(...) - * - * This function updates the estimate of the bandwidth. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC frame(s). - * - packet_size : size of the packet in bytes. - * - rtp_seq_number : the RTP number of the packet. - * - send_ts : the send time of the packet from RTP header, - * in samples. - * - arr_ts : the arrival time of the packet (from NetEq) - * in samples. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_UpdateBwEstimate(ISACFIX_MainStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t packet_size, - uint16_t rtp_seq_number, - uint32_t send_ts, - uint32_t arr_ts); - -/**************************************************************************** - * WebRtcIsacfix_Decode(...) - * - * This function decodes an ISAC frame. Output speech length - * will be a multiple of 480 samples: 480 or 960 samples, - * depending on the framesize (30 or 60 ms). - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC frame(s) - * - len : bytes in encoded vector - * - * Output: - * - decoded : The decoded vector - * - * Return value : >0 - number of samples in decoded vector - * -1 - Error - */ - -int WebRtcIsacfix_Decode(ISACFIX_MainStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t len, - int16_t* decoded, - int16_t* speechType); - -/**************************************************************************** - * WebRtcIsacfix_DecodePlc(...) - * - * This function conducts PLC for ISAC frame(s) in wide-band (16kHz sampling). - * Output speech length will be "480*noOfLostFrames" samples - * that is equevalent of "30*noOfLostFrames" millisecond. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - noOfLostFrames : Number of PLC frames (480sample = 30ms) - * to produce - * NOTE! Maximum number is 2 (960 samples = 60ms) - * - * Output: - * - decoded : The decoded vector - * - * Return value : Number of samples in decoded PLC vector - */ - -size_t WebRtcIsacfix_DecodePlc(ISACFIX_MainStruct* ISAC_main_inst, - int16_t* decoded, - size_t noOfLostFrames); - -/**************************************************************************** - * WebRtcIsacfix_ReadFrameLen(...) - * - * This function returns the length of the frame represented in the packet. - * - * Input: - * - encoded : Encoded bitstream - * - encoded_len_bytes : Length of the bitstream in bytes. - * - * Output: - * - frameLength : Length of frame in packet (in samples) - * - */ - -int16_t WebRtcIsacfix_ReadFrameLen(const uint8_t* encoded, - size_t encoded_len_bytes, - size_t* frameLength); - -/**************************************************************************** - * WebRtcIsacfix_Control(...) - * - * This function sets the limit on the short-term average bit rate and the - * frame length. Should be used only in Instantaneous mode. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - rate : limit on the short-term average bit rate, - * in bits/second (between 10000 and 32000) - * - framesize : number of milliseconds per frame (30 or 60) - * - * Return value : 0 - ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_Control(ISACFIX_MainStruct* ISAC_main_inst, - int16_t rate, - int framesize); - -void WebRtcIsacfix_SetInitialBweBottleneck(ISACFIX_MainStruct* ISAC_main_inst, - int bottleneck_bits_per_second); - -/**************************************************************************** - * WebRtcIsacfix_ControlBwe(...) - * - * This function sets the initial values of bottleneck and frame-size if - * iSAC is used in channel-adaptive mode. Through this API, users can - * enforce a frame-size for all values of bottleneck. Then iSAC will not - * automatically change the frame-size. - * - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - rateBPS : initial value of bottleneck in bits/second - * 10000 <= rateBPS <= 32000 is accepted - * - frameSizeMs : number of milliseconds per frame (30 or 60) - * - enforceFrameSize : 1 to enforce the given frame-size through out - * the adaptation process, 0 to let iSAC change - * the frame-size if required. - * - * Return value : 0 - ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_ControlBwe(ISACFIX_MainStruct* ISAC_main_inst, - int16_t rateBPS, - int frameSizeMs, - int16_t enforceFrameSize); - -/**************************************************************************** - * WebRtcIsacfix_version(...) - * - * This function returns the version number. - * - * Output: - * - version : Pointer to character string - * - */ - -void WebRtcIsacfix_version(char* version); - -/**************************************************************************** - * WebRtcIsacfix_GetErrorCode(...) - * - * This function can be used to check the error code of an iSAC instance. When - * a function returns -1 a error code will be set for that instance. The - * function below extract the code of the last error that occured in the - * specified instance. - * - * Input: - * - ISAC_main_inst : ISAC instance - * - * Return value : Error code - */ - -int16_t WebRtcIsacfix_GetErrorCode(ISACFIX_MainStruct* ISAC_main_inst); - -/**************************************************************************** - * WebRtcIsacfix_GetUplinkBw(...) - * - * This function return iSAC send bitrate - * - * Input: - * - ISAC_main_inst : iSAC instance - * - * Return value : <0 Error code - * else bitrate - */ - -int32_t WebRtcIsacfix_GetUplinkBw(ISACFIX_MainStruct* ISAC_main_inst); - -/**************************************************************************** - * WebRtcIsacfix_SetMaxPayloadSize(...) - * - * This function sets a limit for the maximum payload size of iSAC. The same - * value is used both for 30 and 60 msec packets. - * The absolute max will be valid until next time the function is called. - * NOTE! This function may override the function WebRtcIsacfix_SetMaxRate() - * - * Input: - * - ISAC_main_inst : iSAC instance - * - maxPayloadBytes : maximum size of the payload in bytes - * valid values are between 100 and 400 bytes - * - * - * Return value : 0 if sucessful - * -1 if error happens - */ - -int16_t WebRtcIsacfix_SetMaxPayloadSize(ISACFIX_MainStruct* ISAC_main_inst, - int16_t maxPayloadBytes); - -/**************************************************************************** - * WebRtcIsacfix_SetMaxRate(...) - * - * This function sets the maximum rate which the codec may not exceed for a - * singel packet. The maximum rate is set in bits per second. - * The codec has an absolute maximum rate of 53400 bits per second (200 bytes - * per 30 msec). - * It is possible to set a maximum rate between 32000 and 53400 bits per second. - * - * The rate limit is valid until next time the function is called. - * - * NOTE! Packet size will never go above the value set if calling - * WebRtcIsacfix_SetMaxPayloadSize() (default max packet size is 400 bytes). - * - * Input: - * - ISAC_main_inst : iSAC instance - * - maxRateInBytes : maximum rate in bits per second, - * valid values are 32000 to 53400 bits - * - * Return value : 0 if sucessful - * -1 if error happens - */ - -int16_t WebRtcIsacfix_SetMaxRate(ISACFIX_MainStruct* ISAC_main_inst, - int32_t maxRate); - -/**************************************************************************** - * WebRtcIsacfix_CreateInternal(...) - * - * This function creates the memory that is used to store data in the encoder - * - * Input: - * - *ISAC_main_inst : a pointer to the coder instance. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_CreateInternal(ISACFIX_MainStruct* ISAC_main_inst); - -/**************************************************************************** - * WebRtcIsacfix_FreeInternal(...) - * - * This function frees the internal memory for storing encoder data. - * - * Input: - * - ISAC_main_inst : an ISAC instance. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_FreeInternal(ISACFIX_MainStruct* ISAC_main_inst); - -/**************************************************************************** - * WebRtcIsacfix_GetNewBitStream(...) - * - * This function returns encoded data, with the received bwe-index in the - * stream. It should always return a complete packet, i.e. only called once - * even for 60 msec frames - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - bweIndex : index of bandwidth estimate to put in new - * bitstream - scale : factor for rate change (0.4 ~=> half the - * rate, 1 no change). - * - * Output: - * - encoded : the encoded data vector - * - * Return value : >0 - Length (in bytes) of coded data - * -1 - Error - */ - -int16_t WebRtcIsacfix_GetNewBitStream(ISACFIX_MainStruct* ISAC_main_inst, - int16_t bweIndex, - float scale, - uint8_t* encoded); - -/**************************************************************************** - * WebRtcIsacfix_GetDownLinkBwIndex(...) - * - * This function returns index representing the Bandwidth estimate from - * other side to this side. - * - * Input: - * - ISAC_main_inst : iSAC struct - * - * Output: - * - rateIndex : Bandwidth estimate to transmit to other side. - * - */ - -int16_t WebRtcIsacfix_GetDownLinkBwIndex(ISACFIX_MainStruct* ISAC_main_inst, - int16_t* rateIndex); - -/**************************************************************************** - * WebRtcIsacfix_UpdateUplinkBw(...) - * - * This function takes an index representing the Bandwidth estimate from - * this side to other side and updates BWE. - * - * Input: - * - ISAC_main_inst : iSAC struct - * - rateIndex : Bandwidth estimate from other side. - * - */ - -int16_t WebRtcIsacfix_UpdateUplinkBw(ISACFIX_MainStruct* ISAC_main_inst, - int16_t rateIndex); - -/**************************************************************************** - * WebRtcIsacfix_ReadBwIndex(...) - * - * This function returns the index of the Bandwidth estimate from the bitstream. - * - * Input: - * - encoded : Encoded bitstream - * - encoded_len_bytes : Length of the bitstream in bytes. - * - * Output: - * - rateIndex : Bandwidth estimate in bitstream - * - */ - -int16_t WebRtcIsacfix_ReadBwIndex(const uint8_t* encoded, - size_t encoded_len_bytes, - int16_t* rateIndex); - -/**************************************************************************** - * WebRtcIsacfix_GetNewFrameLen(...) - * - * This function return the next frame length (in samples) of iSAC. - * - * Input: - * -ISAC_main_inst : iSAC instance - * - * Return value : frame lenght in samples - */ - -int16_t WebRtcIsacfix_GetNewFrameLen(ISACFIX_MainStruct* ISAC_main_inst); - -#if defined(__cplusplus) -} -#endif - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_INCLUDE_ISACFIX_H_ */ diff --git a/modules/audio_coding/codecs/isac/fix/source/arith_routines.c b/modules/audio_coding/codecs/isac/fix/source/arith_routines.c deleted file mode 100644 index eaeef50f04..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/arith_routines.c +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * arith_routins.c - * - * This C file contains a function for finalizing the bitstream - * after arithmetic coding. - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/arith_routins.h" - - -/**************************************************************************** - * WebRtcIsacfix_EncTerminate(...) - * - * Final call to the arithmetic coder for an encoder call. This function - * terminates and return byte stream. - * - * Input: - * - streamData : in-/output struct containing bitstream - * - * Return value : number of bytes in the stream - */ -int16_t WebRtcIsacfix_EncTerminate(Bitstr_enc *streamData) -{ - uint16_t *streamPtr; - uint16_t negCarry; - - /* point to the right place in the stream buffer */ - streamPtr = streamData->stream + streamData->stream_index; - - /* find minimum length (determined by current interval width) */ - if ( streamData->W_upper > 0x01FFFFFF ) - { - streamData->streamval += 0x01000000; - - /* if result is less than the added value we must take care of the carry */ - if (streamData->streamval < 0x01000000) - { - /* propagate carry */ - if (streamData->full == 0) { - /* Add value to current value */ - negCarry = *streamPtr; - negCarry += 0x0100; - *streamPtr = negCarry; - - /* if value is too big, propagate carry to next byte, and so on */ - while (!(negCarry)) - { - negCarry = *--streamPtr; - negCarry++; - *streamPtr = negCarry; - } - } else { - /* propagate carry by adding one to the previous byte in the - * stream if that byte is 0xFFFF we need to propagate the carry - * furhter back in the stream */ - while ( !(++(*--streamPtr)) ); - } - - /* put pointer back to the old value */ - streamPtr = streamData->stream + streamData->stream_index; - } - /* write remaining data to bitstream, if "full == 0" first byte has data */ - if (streamData->full == 0) { - *streamPtr++ += (uint16_t)(streamData->streamval >> 24); - streamData->full = 1; - } else { - *streamPtr = (uint16_t)((streamData->streamval >> 24) << 8); - streamData->full = 0; - } - } - else - { - streamData->streamval += 0x00010000; - - /* if result is less than the added value we must take care of the carry */ - if (streamData->streamval < 0x00010000) - { - /* propagate carry */ - if (streamData->full == 0) { - /* Add value to current value */ - negCarry = *streamPtr; - negCarry += 0x0100; - *streamPtr = negCarry; - - /* if value to big, propagate carry to next byte, and so on */ - while (!(negCarry)) - { - negCarry = *--streamPtr; - negCarry++; - *streamPtr = negCarry; - } - } else { - /* Add carry to previous byte */ - while ( !(++(*--streamPtr)) ); - } - - /* put pointer back to the old value */ - streamPtr = streamData->stream + streamData->stream_index; - } - /* write remaining data (2 bytes) to bitstream */ - if (streamData->full) { - *streamPtr++ = (uint16_t)(streamData->streamval >> 16); - } else { - *streamPtr++ |= (uint16_t)(streamData->streamval >> 24); - *streamPtr = (uint16_t)(streamData->streamval >> 8) & 0xFF00; - } - } - - /* calculate stream length in bytes */ - return (((streamPtr - streamData->stream)<<1) + !(streamData->full)); -} diff --git a/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c b/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c deleted file mode 100644 index cad3056b37..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c +++ /dev/null @@ -1,401 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * arith_routinshist.c - * - * This C file contains arithmetic encoding and decoding. - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/arith_routins.h" - - -/**************************************************************************** - * WebRtcIsacfix_EncHistMulti(...) - * - * Encode the histogram interval - * - * Input: - * - streamData : in-/output struct containing bitstream - * - data : data vector - * - cdf : array of cdf arrays - * - lenData : data vector length - * - * Return value : 0 if ok - * <0 if error detected - */ -int WebRtcIsacfix_EncHistMulti(Bitstr_enc *streamData, - const int16_t *data, - const uint16_t *const *cdf, - const int16_t lenData) -{ - uint32_t W_lower; - uint32_t W_upper; - uint32_t W_upper_LSB; - uint32_t W_upper_MSB; - uint16_t *streamPtr; - uint16_t negCarry; - uint16_t *maxStreamPtr; - uint16_t *streamPtrCarry; - uint32_t cdfLo; - uint32_t cdfHi; - int k; - - - /* point to beginning of stream buffer - * and set maximum streamPtr value */ - streamPtr = streamData->stream + streamData->stream_index; - maxStreamPtr = streamData->stream + STREAM_MAXW16_60MS - 1; - - W_upper = streamData->W_upper; - - for (k = lenData; k > 0; k--) - { - /* fetch cdf_lower and cdf_upper from cdf tables */ - cdfLo = (uint32_t) *(*cdf + (uint32_t)*data); - cdfHi = (uint32_t) *(*cdf++ + (uint32_t)*data++ + 1); - - /* update interval */ - W_upper_LSB = W_upper & 0x0000FFFF; - W_upper_MSB = W_upper >> 16; - W_lower = WEBRTC_SPL_UMUL(W_upper_MSB, cdfLo); - W_lower += ((W_upper_LSB * cdfLo) >> 16); - W_upper = WEBRTC_SPL_UMUL(W_upper_MSB, cdfHi); - W_upper += ((W_upper_LSB * cdfHi) >> 16); - - /* shift interval such that it begins at zero */ - W_upper -= ++W_lower; - - /* add integer to bitstream */ - streamData->streamval += W_lower; - - /* handle carry */ - if (streamData->streamval < W_lower) - { - /* propagate carry */ - streamPtrCarry = streamPtr; - if (streamData->full == 0) { - negCarry = *streamPtrCarry; - negCarry += 0x0100; - *streamPtrCarry = negCarry; - while (!(negCarry)) - { - negCarry = *--streamPtrCarry; - negCarry++; - *streamPtrCarry = negCarry; - } - } else { - while ( !(++(*--streamPtrCarry)) ); - } - } - - /* renormalize interval, store most significant byte of streamval and update streamval - * W_upper < 2^24 */ - while ( !(W_upper & 0xFF000000) ) - { - W_upper <<= 8; - if (streamData->full == 0) { - *streamPtr++ += (uint16_t)(streamData->streamval >> 24); - streamData->full = 1; - } else { - *streamPtr = (uint16_t)((streamData->streamval >> 24) << 8); - streamData->full = 0; - } - - if( streamPtr > maxStreamPtr ) { - return -ISAC_DISALLOWED_BITSTREAM_LENGTH; - } - streamData->streamval <<= 8; - } - } - - /* calculate new stream_index */ - streamData->stream_index = streamPtr - streamData->stream; - streamData->W_upper = W_upper; - - return 0; -} - - -/**************************************************************************** - * WebRtcIsacfix_DecHistBisectMulti(...) - * - * Function to decode more symbols from the arithmetic bytestream, using - * method of bisection cdf tables should be of size 2^k-1 (which corresponds - * to an alphabet size of 2^k-2) - * - * Input: - * - streamData : in-/output struct containing bitstream - * - cdf : array of cdf arrays - * - cdfSize : array of cdf table sizes+1 (power of two: 2^k) - * - lenData : data vector length - * - * Output: - * - data : data vector - * - * Return value : number of bytes in the stream - * <0 if error detected - */ -int16_t WebRtcIsacfix_DecHistBisectMulti(int16_t *data, - Bitstr_dec *streamData, - const uint16_t *const *cdf, - const uint16_t *cdfSize, - const int16_t lenData) -{ - uint32_t W_lower = 0; - uint32_t W_upper; - uint32_t W_tmp; - uint32_t W_upper_LSB; - uint32_t W_upper_MSB; - uint32_t streamval; - const uint16_t *streamPtr; - const uint16_t *cdfPtr; - int16_t sizeTmp; - int k; - - - streamPtr = streamData->stream + streamData->stream_index; - W_upper = streamData->W_upper; - - /* Error check: should not be possible in normal operation */ - if (W_upper == 0) { - return -2; - } - - /* first time decoder is called for this stream */ - if (streamData->stream_index == 0) - { - /* read first word from bytestream */ - streamval = (uint32_t)*streamPtr++ << 16; - streamval |= *streamPtr++; - } else { - streamval = streamData->streamval; - } - - for (k = lenData; k > 0; k--) - { - /* find the integer *data for which streamval lies in [W_lower+1, W_upper] */ - W_upper_LSB = W_upper & 0x0000FFFF; - W_upper_MSB = W_upper >> 16; - - /* start halfway the cdf range */ - sizeTmp = *cdfSize++ / 2; - cdfPtr = *cdf + (sizeTmp - 1); - - /* method of bisection */ - for ( ;; ) - { - W_tmp = WEBRTC_SPL_UMUL_32_16(W_upper_MSB, *cdfPtr); - W_tmp += (W_upper_LSB * (*cdfPtr)) >> 16; - sizeTmp /= 2; - if (sizeTmp == 0) { - break; - } - - if (streamval > W_tmp) - { - W_lower = W_tmp; - cdfPtr += sizeTmp; - } else { - W_upper = W_tmp; - cdfPtr -= sizeTmp; - } - } - if (streamval > W_tmp) - { - W_lower = W_tmp; - *data++ = cdfPtr - *cdf++; - } else { - W_upper = W_tmp; - *data++ = cdfPtr - *cdf++ - 1; - } - - /* shift interval to start at zero */ - W_upper -= ++W_lower; - - /* add integer to bitstream */ - streamval -= W_lower; - - /* renormalize interval and update streamval */ - /* W_upper < 2^24 */ - while ( !(W_upper & 0xFF000000) ) - { - /* read next byte from stream */ - if (streamData->full == 0) { - streamval = (streamval << 8) | (*streamPtr++ & 0x00FF); - streamData->full = 1; - } else { - streamval = (streamval << 8) | (*streamPtr >> 8); - streamData->full = 0; - } - W_upper <<= 8; - } - - - /* Error check: should not be possible in normal operation */ - if (W_upper == 0) { - return -2; - } - - } - - streamData->stream_index = streamPtr - streamData->stream; - streamData->W_upper = W_upper; - streamData->streamval = streamval; - - if ( W_upper > 0x01FFFFFF ) { - return (streamData->stream_index*2 - 3 + !streamData->full); - } else { - return (streamData->stream_index*2 - 2 + !streamData->full); - } -} - - -/**************************************************************************** - * WebRtcIsacfix_DecHistOneStepMulti(...) - * - * Function to decode more symbols from the arithmetic bytestream, taking - * single step up or down at a time. - * cdf tables can be of arbitrary size, but large tables may take a lot of - * iterations. - * - * Input: - * - streamData : in-/output struct containing bitstream - * - cdf : array of cdf arrays - * - initIndex : vector of initial cdf table search entries - * - lenData : data vector length - * - * Output: - * - data : data vector - * - * Return value : number of bytes in original stream - * <0 if error detected - */ -int16_t WebRtcIsacfix_DecHistOneStepMulti(int16_t *data, - Bitstr_dec *streamData, - const uint16_t *const *cdf, - const uint16_t *initIndex, - const int16_t lenData) -{ - uint32_t W_lower; - uint32_t W_upper; - uint32_t W_tmp; - uint32_t W_upper_LSB; - uint32_t W_upper_MSB; - uint32_t streamval; - const uint16_t *streamPtr; - const uint16_t *cdfPtr; - int k; - - - streamPtr = streamData->stream + streamData->stream_index; - W_upper = streamData->W_upper; - /* Error check: Should not be possible in normal operation */ - if (W_upper == 0) { - return -2; - } - - /* Check if it is the first time decoder is called for this stream */ - if (streamData->stream_index == 0) - { - /* read first word from bytestream */ - streamval = (uint32_t)(*streamPtr++) << 16; - streamval |= *streamPtr++; - } else { - streamval = streamData->streamval; - } - - for (k = lenData; k > 0; k--) - { - /* find the integer *data for which streamval lies in [W_lower+1, W_upper] */ - W_upper_LSB = W_upper & 0x0000FFFF; - W_upper_MSB = WEBRTC_SPL_RSHIFT_U32(W_upper, 16); - - /* start at the specified table entry */ - cdfPtr = *cdf + (*initIndex++); - W_tmp = WEBRTC_SPL_UMUL_32_16(W_upper_MSB, *cdfPtr); - W_tmp += (W_upper_LSB * (*cdfPtr)) >> 16; - - if (streamval > W_tmp) - { - for ( ;; ) - { - W_lower = W_tmp; - - /* range check */ - if (cdfPtr[0] == 65535) { - return -3; - } - - W_tmp = WEBRTC_SPL_UMUL_32_16(W_upper_MSB, *++cdfPtr); - W_tmp += (W_upper_LSB * (*cdfPtr)) >> 16; - - if (streamval <= W_tmp) { - break; - } - } - W_upper = W_tmp; - *data++ = cdfPtr - *cdf++ - 1; - } else { - for ( ;; ) - { - W_upper = W_tmp; - --cdfPtr; - - /* range check */ - if (cdfPtr < *cdf) { - return -3; - } - - W_tmp = WEBRTC_SPL_UMUL_32_16(W_upper_MSB, *cdfPtr); - W_tmp += (W_upper_LSB * (*cdfPtr)) >> 16; - - if (streamval > W_tmp) { - break; - } - } - W_lower = W_tmp; - *data++ = cdfPtr - *cdf++; - } - - /* shift interval to start at zero */ - W_upper -= ++W_lower; - - /* add integer to bitstream */ - streamval -= W_lower; - - /* renormalize interval and update streamval */ - /* W_upper < 2^24 */ - while ( !(W_upper & 0xFF000000) ) - { - /* read next byte from stream */ - if (streamData->full == 0) { - streamval = (streamval << 8) | (*streamPtr++ & 0x00FF); - streamData->full = 1; - } else { - streamval = (streamval << 8) | (*streamPtr >> 8); - streamData->full = 0; - } - W_upper <<= 8; - } - } - - streamData->stream_index = streamPtr - streamData->stream; - streamData->W_upper = W_upper; - streamData->streamval = streamval; - - /* find number of bytes in original stream (determined by current interval width) */ - if ( W_upper > 0x01FFFFFF ) { - return (streamData->stream_index*2 - 3 + !streamData->full); - } else { - return (streamData->stream_index*2 - 2 + !streamData->full); - } -} diff --git a/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c b/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c deleted file mode 100644 index 8e97960461..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c +++ /dev/null @@ -1,413 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * arith_routinslogist.c - * - * This C file contains arithmetic encode and decode logistic - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/arith_routins.h" - -/* Tables for piecewise linear cdf functions: y = k*x */ - -/* x Points for function piecewise() in Q15 */ -static const int32_t kHistEdges[51] = { - -327680, -314573, -301466, -288359, -275252, -262144, -249037, -235930, -222823, -209716, - -196608, -183501, -170394, -157287, -144180, -131072, -117965, -104858, -91751, -78644, - -65536, -52429, -39322, -26215, -13108, 0, 13107, 26214, 39321, 52428, - 65536, 78643, 91750, 104857, 117964, 131072, 144179, 157286, 170393, 183500, - 196608, 209715, 222822, 235929, 249036, 262144, 275251, 288358, 301465, 314572, - 327680 -}; - - -/* k Points for function piecewise() in Q0 */ -static const uint16_t kCdfSlope[51] = { - 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, - 5, 5, 13, 23, 47, 87, 154, 315, 700, 1088, - 2471, 6064, 14221, 21463, 36634, 36924, 19750, 13270, 5806, 2312, - 1095, 660, 316, 145, 86, 41, 32, 5, 5, 5, - 5, 5, 5, 5, 5, 5, 5, 5, 5, 2, - 0 -}; - -/* y Points for function piecewise() in Q0 */ -static const uint16_t kCdfLogistic[51] = { - 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, - 20, 22, 24, 29, 38, 57, 92, 153, 279, 559, - 994, 1983, 4408, 10097, 18682, 33336, 48105, 56005, 61313, 63636, - 64560, 64998, 65262, 65389, 65447, 65481, 65497, 65510, 65512, 65514, - 65516, 65518, 65520, 65522, 65524, 65526, 65528, 65530, 65532, 65534, - 65535 -}; - - -/**************************************************************************** - * WebRtcIsacfix_Piecewise(...) - * - * Piecewise linear function - * - * Input: - * - xinQ15 : input value x in Q15 - * - * Return value : korresponding y-value in Q0 - */ - - -static __inline uint16_t WebRtcIsacfix_Piecewise(int32_t xinQ15) { - int32_t ind; - int32_t qtmp1; - uint16_t qtmp2; - - /* Find index for x-value */ - qtmp1 = WEBRTC_SPL_SAT(kHistEdges[50],xinQ15,kHistEdges[0]); - ind = WEBRTC_SPL_MUL(5, qtmp1 - kHistEdges[0]); - ind >>= 16; - - /* Calculate corresponding y-value ans return*/ - qtmp1 = qtmp1 - kHistEdges[ind]; - qtmp2 = (uint16_t)WEBRTC_SPL_RSHIFT_U32( - WEBRTC_SPL_UMUL_32_16(qtmp1,kCdfSlope[ind]), 15); - return (kCdfLogistic[ind] + qtmp2); -} - -/**************************************************************************** - * WebRtcIsacfix_EncLogisticMulti2(...) - * - * Arithmetic coding of spectrum. - * - * Input: - * - streamData : in-/output struct containing bitstream - * - dataQ7 : data vector in Q7 - * - envQ8 : side info vector defining the width of the pdf - * in Q8 - * - lenData : data vector length - * - * Return value : 0 if ok, - * <0 otherwise. - */ -int WebRtcIsacfix_EncLogisticMulti2(Bitstr_enc *streamData, - int16_t *dataQ7, - const uint16_t *envQ8, - const int16_t lenData) -{ - uint32_t W_lower; - uint32_t W_upper; - uint16_t W_upper_LSB; - uint16_t W_upper_MSB; - uint16_t *streamPtr; - uint16_t *maxStreamPtr; - uint16_t *streamPtrCarry; - uint16_t negcarry; - uint32_t cdfLo; - uint32_t cdfHi; - int k; - - /* point to beginning of stream buffer - * and set maximum streamPtr value */ - streamPtr = streamData->stream + streamData->stream_index; - maxStreamPtr = streamData->stream + STREAM_MAXW16_60MS - 1; - W_upper = streamData->W_upper; - - for (k = 0; k < lenData; k++) - { - /* compute cdf_lower and cdf_upper by evaluating the - * WebRtcIsacfix_Piecewise linear cdf */ - cdfLo = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(*dataQ7 - 64, *envQ8)); - cdfHi = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(*dataQ7 + 64, *envQ8)); - - /* test and clip if probability gets too small */ - while ((cdfLo + 1) >= cdfHi) { - /* clip */ - if (*dataQ7 > 0) { - *dataQ7 -= 128; - cdfHi = cdfLo; - cdfLo = WebRtcIsacfix_Piecewise( - WEBRTC_SPL_MUL_16_U16(*dataQ7 - 64, *envQ8)); - } else { - *dataQ7 += 128; - cdfLo = cdfHi; - cdfHi = WebRtcIsacfix_Piecewise( - WEBRTC_SPL_MUL_16_U16(*dataQ7 + 64, *envQ8)); - } - } - - dataQ7++; - /* increment only once per 4 iterations */ - envQ8 += (k & 1) & (k >> 1); - - - /* update interval */ - W_upper_LSB = (uint16_t)W_upper; - W_upper_MSB = (uint16_t)WEBRTC_SPL_RSHIFT_U32(W_upper, 16); - W_lower = WEBRTC_SPL_UMUL_32_16(cdfLo, W_upper_MSB); - W_lower += (cdfLo * W_upper_LSB) >> 16; - W_upper = WEBRTC_SPL_UMUL_32_16(cdfHi, W_upper_MSB); - W_upper += (cdfHi * W_upper_LSB) >> 16; - - /* shift interval such that it begins at zero */ - W_upper -= ++W_lower; - - /* add integer to bitstream */ - streamData->streamval += W_lower; - - /* handle carry */ - if (streamData->streamval < W_lower) - { - /* propagate carry */ - streamPtrCarry = streamPtr; - if (streamData->full == 0) { - negcarry = *streamPtrCarry; - negcarry += 0x0100; - *streamPtrCarry = negcarry; - while (!(negcarry)) - { - negcarry = *--streamPtrCarry; - negcarry++; - *streamPtrCarry = negcarry; - } - } else { - while (!(++(*--streamPtrCarry))); - } - } - - /* renormalize interval, store most significant byte of streamval and update streamval - * W_upper < 2^24 */ - while ( !(W_upper & 0xFF000000) ) - { - W_upper <<= 8; - if (streamData->full == 0) { - *streamPtr++ += (uint16_t) WEBRTC_SPL_RSHIFT_U32( - streamData->streamval, 24); - streamData->full = 1; - } else { - *streamPtr = (uint16_t)((streamData->streamval >> 24) << 8); - streamData->full = 0; - } - - if( streamPtr > maxStreamPtr ) - return -ISAC_DISALLOWED_BITSTREAM_LENGTH; - - streamData->streamval <<= 8; - } - } - - /* calculate new stream_index */ - streamData->stream_index = streamPtr - streamData->stream; - streamData->W_upper = W_upper; - - return 0; -} - - -/**************************************************************************** - * WebRtcIsacfix_DecLogisticMulti2(...) - * - * Arithmetic decoding of spectrum. - * - * Input: - * - streamData : in-/output struct containing bitstream - * - envQ8 : side info vector defining the width of the pdf - * in Q8 - * - lenData : data vector length - * - * Input/Output: - * - dataQ7 : input: dither vector, output: data vector - * - * Return value : number of bytes in the stream so far - * -1 if error detected - */ -int WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7, - Bitstr_dec *streamData, - const int32_t *envQ8, - const int16_t lenData) -{ - uint32_t W_lower; - uint32_t W_upper; - uint32_t W_tmp; - uint16_t W_upper_LSB; - uint16_t W_upper_MSB; - uint32_t streamVal; - uint16_t cdfTmp; - int32_t res; - int32_t inSqrt; - int32_t newRes; - const uint16_t *streamPtr; - int16_t candQ7; - int16_t envCount; - uint16_t tmpARSpecQ8 = 0; - int k, i; - int offset = 0; - - /* point to beginning of stream buffer */ - streamPtr = streamData->stream + streamData->stream_index; - W_upper = streamData->W_upper; - - /* Check if it is first time decoder is called for this stream */ - if (streamData->stream_index == 0) - { - /* read first word from bytestream */ - streamVal = (uint32_t)(*streamPtr++) << 16; - streamVal |= *streamPtr++; - - } else { - streamVal = streamData->streamval; - } - - - res = 1 << (WebRtcSpl_GetSizeInBits(envQ8[0]) >> 1); - envCount = 0; - - /* code assumes lenData%4 == 0 */ - for (k = 0; k < lenData; k += 4) - { - int k4; - - /* convert to magnitude spectrum, by doing square-roots (modified from SPLIB) */ - inSqrt = envQ8[envCount]; - i = 10; - - /* For safty reasons */ - if (inSqrt < 0) - inSqrt=-inSqrt; - - newRes = (inSqrt / res + res) >> 1; - do - { - res = newRes; - newRes = (inSqrt / res + res) >> 1; - } while (newRes != res && i-- > 0); - - tmpARSpecQ8 = (uint16_t)newRes; - - for(k4 = 0; k4 < 4; k4++) - { - /* find the integer *data for which streamVal lies in [W_lower+1, W_upper] */ - W_upper_LSB = (uint16_t) (W_upper & 0x0000FFFF); - W_upper_MSB = (uint16_t) WEBRTC_SPL_RSHIFT_U32(W_upper, 16); - - /* find first candidate by inverting the logistic cdf - * Input dither value collected from io-stream */ - candQ7 = - *dataQ7 + 64; - cdfTmp = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8)); - - W_tmp = (uint32_t)cdfTmp * W_upper_MSB; - W_tmp += ((uint32_t)cdfTmp * (uint32_t)W_upper_LSB) >> 16; - - if (streamVal > W_tmp) - { - W_lower = W_tmp; - candQ7 += 128; - cdfTmp = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8)); - - W_tmp = (uint32_t)cdfTmp * W_upper_MSB; - W_tmp += ((uint32_t)cdfTmp * (uint32_t)W_upper_LSB) >> 16; - - while (streamVal > W_tmp) - { - W_lower = W_tmp; - candQ7 += 128; - cdfTmp = WebRtcIsacfix_Piecewise( - WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8)); - - W_tmp = (uint32_t)cdfTmp * W_upper_MSB; - W_tmp += ((uint32_t)cdfTmp * (uint32_t)W_upper_LSB) >> 16; - - /* error check */ - if (W_lower == W_tmp) { - return -1; - } - } - W_upper = W_tmp; - - /* Output value put in dataQ7: another sample decoded */ - *dataQ7 = candQ7 - 64; - } - else - { - W_upper = W_tmp; - candQ7 -= 128; - cdfTmp = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8)); - - W_tmp = (uint32_t)cdfTmp * W_upper_MSB; - W_tmp += ((uint32_t)cdfTmp * (uint32_t)W_upper_LSB) >> 16; - - while ( !(streamVal > W_tmp) ) - { - W_upper = W_tmp; - candQ7 -= 128; - cdfTmp = WebRtcIsacfix_Piecewise( - WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8)); - - W_tmp = (uint32_t)cdfTmp * W_upper_MSB; - W_tmp += ((uint32_t)cdfTmp * (uint32_t)W_upper_LSB) >> 16; - - /* error check */ - if (W_upper == W_tmp){ - return -1; - } - } - W_lower = W_tmp; - - /* Output value put in dataQ7: another sample decoded */ - *dataQ7 = candQ7 + 64; - } - - dataQ7++; - - /* shift interval to start at zero */ - W_upper -= ++W_lower; - - /* add integer to bitstream */ - streamVal -= W_lower; - - /* renormalize interval and update streamVal - * W_upper < 2^24 */ - while ( !(W_upper & 0xFF000000) ) - { - if (streamPtr < streamData->stream + streamData->stream_size) { - /* read next byte from stream */ - if (streamData->full == 0) { - streamVal = (streamVal << 8) | (*streamPtr++ & 0x00FF); - streamData->full = 1; - } else { - streamVal = (streamVal << 8) | (*streamPtr >> 8); - streamData->full = 0; - } - } else { - /* Intending to read outside the stream. This can happen for the last - * two or three bytes. It is how the algorithm is implemented. Do - * not read from the bit stream and insert zeros instead. */ - streamVal <<= 8; - if (streamData->full == 0) { - offset++; // We would have incremented the pointer in this case. - streamData->full = 1; - } else { - streamData->full = 0; - } - } - W_upper <<= 8; - } - } - envCount++; - } - - streamData->stream_index = streamPtr + offset - streamData->stream; - streamData->W_upper = W_upper; - streamData->streamval = streamVal; - - /* find number of bytes in original stream (determined by current interval width) */ - if ( W_upper > 0x01FFFFFF ) - return (streamData->stream_index*2 - 3 + !streamData->full); - else - return (streamData->stream_index*2 - 2 + !streamData->full); -} diff --git a/modules/audio_coding/codecs/isac/fix/source/arith_routins.h b/modules/audio_coding/codecs/isac/fix/source/arith_routins.h deleted file mode 100644 index d112bfe7f2..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/arith_routins.h +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * arith_routins.h - * - * Functions for arithmetic coding. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ARITH_ROUTINS_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ARITH_ROUTINS_H_ - -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - -/**************************************************************************** - * WebRtcIsacfix_EncLogisticMulti2(...) - * - * Arithmetic coding of spectrum. - * - * Input: - * - streamData : in-/output struct containing bitstream - * - dataQ7 : data vector in Q7 - * - envQ8 : side info vector defining the width of the pdf - * in Q8 - * - lenData : data vector length - * - * Return value : 0 if ok, - * <0 otherwise. - */ -int WebRtcIsacfix_EncLogisticMulti2(Bitstr_enc* streamData, - int16_t* dataQ7, - const uint16_t* env, - int16_t lenData); - -/**************************************************************************** - * WebRtcIsacfix_EncTerminate(...) - * - * Final call to the arithmetic coder for an encoder call. This function - * terminates and return byte stream. - * - * Input: - * - streamData : in-/output struct containing bitstream - * - * Return value : number of bytes in the stream - */ -int16_t WebRtcIsacfix_EncTerminate(Bitstr_enc* streamData); - -/**************************************************************************** - * WebRtcIsacfix_DecLogisticMulti2(...) - * - * Arithmetic decoding of spectrum. - * - * Input: - * - streamData : in-/output struct containing bitstream - * - envQ8 : side info vector defining the width of the pdf - * in Q8 - * - lenData : data vector length - * - * Input/Output: - * - dataQ7 : input: dither vector, output: data vector, in Q7 - * - * Return value : number of bytes in the stream so far - * <0 if error detected - */ -int WebRtcIsacfix_DecLogisticMulti2(int16_t* data, - Bitstr_dec* streamData, - const int32_t* env, - int16_t lenData); - -/**************************************************************************** - * WebRtcIsacfix_EncHistMulti(...) - * - * Encode the histogram interval - * - * Input: - * - streamData : in-/output struct containing bitstream - * - data : data vector - * - cdf : array of cdf arrays - * - lenData : data vector length - * - * Return value : 0 if ok - * <0 if error detected - */ -int WebRtcIsacfix_EncHistMulti(Bitstr_enc* streamData, - const int16_t* data, - const uint16_t* const* cdf, - int16_t lenData); - -/**************************************************************************** - * WebRtcIsacfix_DecHistBisectMulti(...) - * - * Function to decode more symbols from the arithmetic bytestream, using - * method of bisection. - * C df tables should be of size 2^k-1 (which corresponds to an - * alphabet size of 2^k-2) - * - * Input: - * - streamData : in-/output struct containing bitstream - * - cdf : array of cdf arrays - * - cdfSize : array of cdf table sizes+1 (power of two: 2^k) - * - lenData : data vector length - * - * Output: - * - data : data vector - * - * Return value : number of bytes in the stream - * <0 if error detected - */ -int16_t WebRtcIsacfix_DecHistBisectMulti(int16_t* data, - Bitstr_dec* streamData, - const uint16_t* const* cdf, - const uint16_t* cdfSize, - int16_t lenData); - -/**************************************************************************** - * WebRtcIsacfix_DecHistOneStepMulti(...) - * - * Function to decode more symbols from the arithmetic bytestream, taking - * single step up or down at a time. - * cdf tables can be of arbitrary size, but large tables may take a lot of - * iterations. - * - * Input: - * - streamData : in-/output struct containing bitstream - * - cdf : array of cdf arrays - * - initIndex : vector of initial cdf table search entries - * - lenData : data vector length - * - * Output: - * - data : data vector - * - * Return value : number of bytes in original stream - * <0 if error detected - */ -int16_t WebRtcIsacfix_DecHistOneStepMulti(int16_t* data, - Bitstr_dec* streamData, - const uint16_t* const* cdf, - const uint16_t* initIndex, - int16_t lenData); - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ARITH_ROUTINS_H_ */ diff --git a/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.c b/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.c deleted file mode 100644 index 8845357d59..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.c +++ /dev/null @@ -1,1021 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * bandwidth_estimator.c - * - * This file contains the code for the Bandwidth Estimator designed - * for iSAC. - * - * NOTE! Castings needed for C55, do not remove! - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h" - -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "rtc_base/checks.h" - -/* array of quantization levels for bottle neck info; Matlab code: */ -/* sprintf('%4.1ff, ', logspace(log10(5000), log10(40000), 12)) */ -static const int16_t kQRateTable[12] = { - 10000, 11115, 12355, 13733, 15265, 16967, - 18860, 20963, 23301, 25900, 28789, 32000 -}; - -/* 0.1 times the values in the table kQRateTable */ -/* values are in Q16 */ -static const int32_t KQRate01[12] = { - 65536000, 72843264, 80969728, 90000589, 100040704, 111194931, - 123600896, 137383117, 152705434, 169738240, 188671590, 209715200 -}; - -/* Bits per Bytes Seconds - * 8 bits/byte * 1000 msec/sec * 1/framelength (in msec)->bits/byte*sec - * frame length will either be 30 or 60 msec. 8738 is 1/60 in Q19 and 1/30 in Q18 - * The following number is either in Q15 or Q14 depending on the current frame length */ -static const int32_t kBitsByteSec = 4369000; - -/* Received header rate. First value is for 30 ms packets and second for 60 ms */ -static const int16_t kRecHeaderRate[2] = { - 9333, 4666 -}; - -/* Inverted minimum and maximum bandwidth in Q30. - minBwInv 30 ms, maxBwInv 30 ms, - minBwInv 60 ms, maxBwInv 69 ms -*/ -static const int32_t kInvBandwidth[4] = { - 55539, 25978, - 73213, 29284 -}; - -/* Number of samples in 25 msec */ -static const int32_t kSamplesIn25msec = 400; - - -/**************************************************************************** - * WebRtcIsacfix_InitBandwidthEstimator(...) - * - * This function initializes the struct for the bandwidth estimator - * - * Input/Output: - * - bweStr : Struct containing bandwidth information. - * - * Return value : 0 - */ -int32_t WebRtcIsacfix_InitBandwidthEstimator(BwEstimatorstr *bweStr) -{ - bweStr->prevFrameSizeMs = INIT_FRAME_LEN; - bweStr->prevRtpNumber = 0; - bweStr->prevSendTime = 0; - bweStr->prevArrivalTime = 0; - bweStr->prevRtpRate = 1; - bweStr->lastUpdate = 0; - bweStr->lastReduction = 0; - bweStr->countUpdates = -9; - - /* INIT_BN_EST = 20000 - * INIT_BN_EST_Q7 = 2560000 - * INIT_HDR_RATE = 4666 - * INIT_REC_BN_EST_Q5 = 789312 - * - * recBwInv = 1/(INIT_BN_EST + INIT_HDR_RATE) in Q30 - * recBwAvg = INIT_BN_EST + INIT_HDR_RATE in Q5 - */ - bweStr->recBwInv = 43531; - bweStr->recBw = INIT_BN_EST; - bweStr->recBwAvgQ = INIT_BN_EST_Q7; - bweStr->recBwAvg = INIT_REC_BN_EST_Q5; - bweStr->recJitter = (int32_t) 327680; /* 10 in Q15 */ - bweStr->recJitterShortTerm = 0; - bweStr->recJitterShortTermAbs = (int32_t) 40960; /* 5 in Q13 */ - bweStr->recMaxDelay = (int32_t) 10; - bweStr->recMaxDelayAvgQ = (int32_t) 5120; /* 10 in Q9 */ - bweStr->recHeaderRate = INIT_HDR_RATE; - bweStr->countRecPkts = 0; - bweStr->sendBwAvg = INIT_BN_EST_Q7; - bweStr->sendMaxDelayAvg = (int32_t) 5120; /* 10 in Q9 */ - - bweStr->countHighSpeedRec = 0; - bweStr->highSpeedRec = 0; - bweStr->countHighSpeedSent = 0; - bweStr->highSpeedSend = 0; - bweStr->inWaitPeriod = 0; - - /* Find the inverse of the max bw and min bw in Q30 - * (1 / (MAX_ISAC_BW + INIT_HDR_RATE) in Q30 - * (1 / (MIN_ISAC_BW + INIT_HDR_RATE) in Q30 - */ - bweStr->maxBwInv = kInvBandwidth[3]; - bweStr->minBwInv = kInvBandwidth[2]; - - bweStr->external_bw_info.in_use = 0; - - return 0; -} - -/**************************************************************************** - * WebRtcIsacfix_UpdateUplinkBwImpl(...) - * - * This function updates bottle neck rate received from other side in payload - * and calculates a new bottle neck to send to the other side. - * - * Input/Output: - * - bweStr : struct containing bandwidth information. - * - rtpNumber : value from RTP packet, from NetEq - * - frameSize : length of signal frame in ms, from iSAC decoder - * - sendTime : value in RTP header giving send time in samples - * - arrivalTime : value given by timeGetTime() time of arrival in - * samples of packet from NetEq - * - pksize : size of packet in bytes, from NetEq - * - Index : integer (range 0...23) indicating bottle neck & - * jitter as estimated by other side - * - * Return value : 0 if everything went fine, - * -1 otherwise - */ -int32_t WebRtcIsacfix_UpdateUplinkBwImpl(BwEstimatorstr *bweStr, - const uint16_t rtpNumber, - const int16_t frameSize, - const uint32_t sendTime, - const uint32_t arrivalTime, - const size_t pksize, - const uint16_t Index) -{ - uint16_t weight = 0; - uint32_t currBwInv = 0; - uint16_t recRtpRate; - uint32_t arrTimeProj; - int32_t arrTimeDiff; - int32_t arrTimeNoise; - int32_t arrTimeNoiseAbs; - int32_t sendTimeDiff; - - int32_t delayCorrFactor = DELAY_CORRECTION_MED; - int32_t lateDiff = 0; - int16_t immediateSet = 0; - int32_t frameSizeSampl; - - int32_t temp; - int32_t msec; - uint32_t exponent; - uint32_t reductionFactor; - uint32_t numBytesInv; - int32_t sign; - - uint32_t byteSecondsPerBit; - uint32_t tempLower; - uint32_t tempUpper; - int32_t recBwAvgInv; - int32_t numPktsExpected; - - int16_t errCode; - - RTC_DCHECK(!bweStr->external_bw_info.in_use); - - /* UPDATE ESTIMATES FROM OTHER SIDE */ - - /* The function also checks if Index has a valid value */ - errCode = WebRtcIsacfix_UpdateUplinkBwRec(bweStr, Index); - if (errCode <0) { - return(errCode); - } - - - /* UPDATE ESTIMATES ON THIS SIDE */ - - /* Bits per second per byte * 1/30 or 1/60 */ - if (frameSize == 60) { - /* If frameSize changed since last call, from 30 to 60, recalculate some values */ - if ( (frameSize != bweStr->prevFrameSizeMs) && (bweStr->countUpdates > 0)) { - bweStr->countUpdates = 10; - bweStr->recHeaderRate = kRecHeaderRate[1]; - - bweStr->maxBwInv = kInvBandwidth[3]; - bweStr->minBwInv = kInvBandwidth[2]; - bweStr->recBwInv = 1073741824 / (bweStr->recBw + bweStr->recHeaderRate); - } - - /* kBitsByteSec is in Q15 */ - recRtpRate = (int16_t)((kBitsByteSec * pksize) >> 15) + - bweStr->recHeaderRate; - - } else { - /* If frameSize changed since last call, from 60 to 30, recalculate some values */ - if ( (frameSize != bweStr->prevFrameSizeMs) && (bweStr->countUpdates > 0)) { - bweStr->countUpdates = 10; - bweStr->recHeaderRate = kRecHeaderRate[0]; - - bweStr->maxBwInv = kInvBandwidth[1]; - bweStr->minBwInv = kInvBandwidth[0]; - bweStr->recBwInv = 1073741824 / (bweStr->recBw + bweStr->recHeaderRate); - } - - /* kBitsByteSec is in Q14 */ - recRtpRate = (uint16_t)((kBitsByteSec * pksize) >> 14) + - bweStr->recHeaderRate; - } - - - /* Check for timer wrap-around */ - if (arrivalTime < bweStr->prevArrivalTime) { - bweStr->prevArrivalTime = arrivalTime; - bweStr->lastUpdate = arrivalTime; - bweStr->lastReduction = arrivalTime + FS3; - - bweStr->countRecPkts = 0; - - /* store frame size */ - bweStr->prevFrameSizeMs = frameSize; - - /* store far-side transmission rate */ - bweStr->prevRtpRate = recRtpRate; - - /* store far-side RTP time stamp */ - bweStr->prevRtpNumber = rtpNumber; - - return 0; - } - - bweStr->countRecPkts++; - - /* Calculate framesize in msec */ - frameSizeSampl = SAMPLES_PER_MSEC * frameSize; - - /* Check that it's not one of the first 9 packets */ - if ( bweStr->countUpdates > 0 ) { - - /* Stay in Wait Period for 1.5 seconds (no updates in wait period) */ - if(bweStr->inWaitPeriod) { - if ((arrivalTime - bweStr->startWaitPeriod)> FS_1_HALF) { - bweStr->inWaitPeriod = 0; - } - } - - /* If not been updated for a long time, reduce the BN estimate */ - - /* Check send time difference between this packet and previous received */ - sendTimeDiff = sendTime - bweStr->prevSendTime; - if (sendTimeDiff <= frameSizeSampl * 2) { - - /* Only update if 3 seconds has past since last update */ - if ((arrivalTime - bweStr->lastUpdate) > FS3) { - - /* Calculate expected number of received packets since last update */ - numPktsExpected = (arrivalTime - bweStr->lastUpdate) / frameSizeSampl; - - /* If received number of packets is more than 90% of expected (922 = 0.9 in Q10): */ - /* do the update, else not */ - if ((int32_t)bweStr->countRecPkts << 10 > 922 * numPktsExpected) { - /* Q4 chosen to approx dividing by 16 */ - msec = (arrivalTime - bweStr->lastReduction); - - /* the number below represents 13 seconds, highly unlikely - but to insure no overflow when reduction factor is multiplied by recBw inverse */ - if (msec > 208000) { - msec = 208000; - } - - /* Q20 2^(negative number: - 76/1048576) = .99995 - product is Q24 */ - exponent = WEBRTC_SPL_UMUL(0x0000004C, msec); - - /* do the approx with positive exponent so that value is actually rf^-1 - and multiply by bw inverse */ - reductionFactor = WEBRTC_SPL_RSHIFT_U32(0x01000000 | (exponent & 0x00FFFFFF), - WEBRTC_SPL_RSHIFT_U32(exponent, 24)); - - /* reductionFactor in Q13 */ - reductionFactor = WEBRTC_SPL_RSHIFT_U32(reductionFactor, 11); - - if ( reductionFactor != 0 ) { - bweStr->recBwInv = WEBRTC_SPL_MUL((int32_t)bweStr->recBwInv, (int32_t)reductionFactor); - bweStr->recBwInv = (int32_t)bweStr->recBwInv >> 13; - - } else { - static const uint32_t kInitRate = INIT_BN_EST + INIT_HDR_RATE; - /* recBwInv = 1 / kInitRate in Q26 (Q30??)*/ - bweStr->recBwInv = (1073741824 + kInitRate / 2) / kInitRate; - } - - /* reset time-since-update counter */ - bweStr->lastReduction = arrivalTime; - } else { - /* Delay last reduction with 3 seconds */ - bweStr->lastReduction = arrivalTime + FS3; - bweStr->lastUpdate = arrivalTime; - bweStr->countRecPkts = 0; - } - } - } else { - bweStr->lastReduction = arrivalTime + FS3; - bweStr->lastUpdate = arrivalTime; - bweStr->countRecPkts = 0; - } - - - /* update only if previous packet was not lost */ - if ( rtpNumber == bweStr->prevRtpNumber + 1 ) { - arrTimeDiff = arrivalTime - bweStr->prevArrivalTime; - - if (!(bweStr->highSpeedSend && bweStr->highSpeedRec)) { - if (arrTimeDiff > frameSizeSampl) { - if (sendTimeDiff > 0) { - lateDiff = arrTimeDiff - sendTimeDiff - frameSizeSampl * 2; - } else { - lateDiff = arrTimeDiff - frameSizeSampl; - } - - /* 8000 is 1/2 second (in samples at FS) */ - if (lateDiff > 8000) { - delayCorrFactor = (int32_t) DELAY_CORRECTION_MAX; - bweStr->inWaitPeriod = 1; - bweStr->startWaitPeriod = arrivalTime; - immediateSet = 1; - } else if (lateDiff > 5120) { - delayCorrFactor = (int32_t) DELAY_CORRECTION_MED; - immediateSet = 1; - bweStr->inWaitPeriod = 1; - bweStr->startWaitPeriod = arrivalTime; - } - } - } - - if ((bweStr->prevRtpRate > (int32_t)bweStr->recBwAvg >> 5) && - (recRtpRate > (int32_t)bweStr->recBwAvg >> 5) && - !bweStr->inWaitPeriod) { - - /* test if still in initiation period and increment counter */ - if (bweStr->countUpdates++ > 99) { - /* constant weight after initiation part, 0.01 in Q13 */ - weight = (uint16_t) 82; - } else { - /* weight decreases with number of updates, 1/countUpdates in Q13 */ - weight = (uint16_t) WebRtcSpl_DivW32W16( - 8192 + (bweStr->countUpdates >> 1), - (int16_t)bweStr->countUpdates); - } - - /* Bottle Neck Estimation */ - - /* limit outliers, if more than 25 ms too much */ - if (arrTimeDiff > frameSizeSampl + kSamplesIn25msec) { - arrTimeDiff = frameSizeSampl + kSamplesIn25msec; - } - - /* don't allow it to be less than frame rate - 10 ms */ - if (arrTimeDiff < frameSizeSampl - FRAMESAMPLES_10ms) { - arrTimeDiff = frameSizeSampl - FRAMESAMPLES_10ms; - } - - /* compute inverse receiving rate for last packet, in Q19 */ - numBytesInv = (uint16_t) WebRtcSpl_DivW32W16( - (int32_t)(524288 + ((pksize + HEADER_SIZE) >> 1)), - (int16_t)(pksize + HEADER_SIZE)); - - /* 8389 is ~ 1/128000 in Q30 */ - byteSecondsPerBit = (uint32_t)(arrTimeDiff * 8389); - - /* get upper N bits */ - tempUpper = WEBRTC_SPL_RSHIFT_U32(byteSecondsPerBit, 15); - - /* get lower 15 bits */ - tempLower = byteSecondsPerBit & 0x00007FFF; - - tempUpper = WEBRTC_SPL_MUL(tempUpper, numBytesInv); - tempLower = WEBRTC_SPL_MUL(tempLower, numBytesInv); - tempLower = WEBRTC_SPL_RSHIFT_U32(tempLower, 15); - - currBwInv = tempUpper + tempLower; - currBwInv = WEBRTC_SPL_RSHIFT_U32(currBwInv, 4); - - /* Limit inv rate. Note that minBwInv > maxBwInv! */ - if(currBwInv < bweStr->maxBwInv) { - currBwInv = bweStr->maxBwInv; - } else if(currBwInv > bweStr->minBwInv) { - currBwInv = bweStr->minBwInv; - } - - /* update bottle neck rate estimate */ - bweStr->recBwInv = WEBRTC_SPL_UMUL(weight, currBwInv) + - WEBRTC_SPL_UMUL((uint32_t) 8192 - weight, bweStr->recBwInv); - - /* Shift back to Q30 from Q40 (actual used bits shouldn't be more than 27 based on minBwInv) - up to 30 bits used with Q13 weight */ - bweStr->recBwInv = WEBRTC_SPL_RSHIFT_U32(bweStr->recBwInv, 13); - - /* reset time-since-update counter */ - bweStr->lastUpdate = arrivalTime; - bweStr->lastReduction = arrivalTime + FS3; - bweStr->countRecPkts = 0; - - /* to save resolution compute the inverse of recBwAvg in Q26 by left shifting numerator to 2^31 - and NOT right shifting recBwAvg 5 bits to an integer - At max 13 bits are used - shift to Q5 */ - recBwAvgInv = (0x80000000 + bweStr->recBwAvg / 2) / bweStr->recBwAvg; - - /* Calculate Projected arrival time difference */ - - /* The numerator of the quotient can be 22 bits so right shift inv by 4 to avoid overflow - result in Q22 */ - arrTimeProj = WEBRTC_SPL_MUL((int32_t)8000, recBwAvgInv); - /* shift to Q22 */ - arrTimeProj = WEBRTC_SPL_RSHIFT_U32(arrTimeProj, 4); - /* complete calulation */ - arrTimeProj = WEBRTC_SPL_MUL(((int32_t)pksize + HEADER_SIZE), arrTimeProj); - /* shift to Q10 */ - arrTimeProj = WEBRTC_SPL_RSHIFT_U32(arrTimeProj, 12); - - /* difference between projected and actual arrival time differences */ - /* Q9 (only shift arrTimeDiff by 5 to simulate divide by 16 (need to revisit if change sampling rate) DH */ - if ((arrTimeDiff << 6) > (int32_t)arrTimeProj) { - arrTimeNoise = (arrTimeDiff << 6) - arrTimeProj; - sign = 1; - } else { - arrTimeNoise = arrTimeProj - (arrTimeDiff << 6); - sign = -1; - } - - /* Q9 */ - arrTimeNoiseAbs = arrTimeNoise; - - /* long term averaged absolute jitter, Q15 */ - weight >>= 3; - bweStr->recJitter = weight * (arrTimeNoiseAbs << 5) + - (1024 - weight) * bweStr->recJitter; - - /* remove the fractional portion */ - bweStr->recJitter >>= 10; - - /* Maximum jitter is 10 msec in Q15 */ - if (bweStr->recJitter > (int32_t)327680) { - bweStr->recJitter = (int32_t)327680; - } - - /* short term averaged absolute jitter */ - /* Calculation in Q13 products in Q23 */ - bweStr->recJitterShortTermAbs = 51 * (arrTimeNoiseAbs << 3) + - WEBRTC_SPL_MUL(973, bweStr->recJitterShortTermAbs); - bweStr->recJitterShortTermAbs >>= 10; - - /* short term averaged jitter */ - /* Calculation in Q13 products in Q23 */ - bweStr->recJitterShortTerm = 205 * (arrTimeNoise << 3) * sign + - WEBRTC_SPL_MUL(3891, bweStr->recJitterShortTerm); - - if (bweStr->recJitterShortTerm < 0) { - temp = -bweStr->recJitterShortTerm; - temp >>= 12; - bweStr->recJitterShortTerm = -temp; - } else { - bweStr->recJitterShortTerm >>= 12; - } - } - } - } else { - /* reset time-since-update counter when receiving the first 9 packets */ - bweStr->lastUpdate = arrivalTime; - bweStr->lastReduction = arrivalTime + FS3; - bweStr->countRecPkts = 0; - bweStr->countUpdates++; - } - - /* Limit to minimum or maximum bottle neck rate (in Q30) */ - if (bweStr->recBwInv > bweStr->minBwInv) { - bweStr->recBwInv = bweStr->minBwInv; - } else if (bweStr->recBwInv < bweStr->maxBwInv) { - bweStr->recBwInv = bweStr->maxBwInv; - } - - - /* store frame length */ - bweStr->prevFrameSizeMs = frameSize; - - /* store far-side transmission rate */ - bweStr->prevRtpRate = recRtpRate; - - /* store far-side RTP time stamp */ - bweStr->prevRtpNumber = rtpNumber; - - /* Replace bweStr->recMaxDelay by the new value (atomic operation) */ - if (bweStr->prevArrivalTime != 0xffffffff) { - bweStr->recMaxDelay = WEBRTC_SPL_MUL(3, bweStr->recJitter); - } - - /* store arrival time stamp */ - bweStr->prevArrivalTime = arrivalTime; - bweStr->prevSendTime = sendTime; - - /* Replace bweStr->recBw by the new value */ - bweStr->recBw = 1073741824 / bweStr->recBwInv - bweStr->recHeaderRate; - - if (immediateSet) { - /* delay correction factor is in Q10 */ - bweStr->recBw = WEBRTC_SPL_UMUL(delayCorrFactor, bweStr->recBw); - bweStr->recBw = WEBRTC_SPL_RSHIFT_U32(bweStr->recBw, 10); - - if (bweStr->recBw < (int32_t) MIN_ISAC_BW) { - bweStr->recBw = (int32_t) MIN_ISAC_BW; - } - - bweStr->recBwAvg = (bweStr->recBw + bweStr->recHeaderRate) << 5; - - bweStr->recBwAvgQ = bweStr->recBw << 7; - - bweStr->recJitterShortTerm = 0; - - bweStr->recBwInv = 1073741824 / (bweStr->recBw + bweStr->recHeaderRate); - - immediateSet = 0; - } - - - return 0; -} - -/* This function updates the send bottle neck rate */ -/* Index - integer (range 0...23) indicating bottle neck & jitter as estimated by other side */ -/* returns 0 if everything went fine, -1 otherwise */ -int16_t WebRtcIsacfix_UpdateUplinkBwRec(BwEstimatorstr *bweStr, - const int16_t Index) -{ - uint16_t RateInd; - - RTC_DCHECK(!bweStr->external_bw_info.in_use); - - if ( (Index < 0) || (Index > 23) ) { - return -ISAC_RANGE_ERROR_BW_ESTIMATOR; - } - - /* UPDATE ESTIMATES FROM OTHER SIDE */ - - if ( Index > 11 ) { - RateInd = Index - 12; - /* compute the jitter estimate as decoded on the other side in Q9 */ - /* sendMaxDelayAvg = 0.9 * sendMaxDelayAvg + 0.1 * MAX_ISAC_MD */ - bweStr->sendMaxDelayAvg = WEBRTC_SPL_MUL(461, bweStr->sendMaxDelayAvg) + - 51 * (MAX_ISAC_MD << 9); - bweStr->sendMaxDelayAvg >>= 9; - - } else { - RateInd = Index; - /* compute the jitter estimate as decoded on the other side in Q9 */ - /* sendMaxDelayAvg = 0.9 * sendMaxDelayAvg + 0.1 * MIN_ISAC_MD */ - bweStr->sendMaxDelayAvg = WEBRTC_SPL_MUL(461, bweStr->sendMaxDelayAvg) + - 51 * (MIN_ISAC_MD << 9); - bweStr->sendMaxDelayAvg >>= 9; - - } - - - /* compute the BN estimate as decoded on the other side */ - /* sendBwAvg = 0.9 * sendBwAvg + 0.1 * kQRateTable[RateInd]; */ - bweStr->sendBwAvg = 461 * bweStr->sendBwAvg + - 51 * ((uint32_t)kQRateTable[RateInd] << 7); - bweStr->sendBwAvg = WEBRTC_SPL_RSHIFT_U32(bweStr->sendBwAvg, 9); - - - if (WEBRTC_SPL_RSHIFT_U32(bweStr->sendBwAvg, 7) > 28000 && !bweStr->highSpeedSend) { - bweStr->countHighSpeedSent++; - - /* approx 2 seconds with 30ms frames */ - if (bweStr->countHighSpeedSent >= 66) { - bweStr->highSpeedSend = 1; - } - } else if (!bweStr->highSpeedSend) { - bweStr->countHighSpeedSent = 0; - } - - return 0; -} - -/**************************************************************************** - * WebRtcIsacfix_GetDownlinkBwIndexImpl(...) - * - * This function calculates and returns the bandwidth/jitter estimation code - * (integer 0...23) to put in the sending iSAC payload. - * - * Input: - * - bweStr : BWE struct - * - * Return: - * bandwith and jitter index (0..23) - */ -uint16_t WebRtcIsacfix_GetDownlinkBwIndexImpl(BwEstimatorstr *bweStr) -{ - int32_t rate; - int32_t maxDelay; - uint16_t rateInd; - uint16_t maxDelayBit; - int32_t tempTerm1; - int32_t tempTerm2; - int32_t tempTermX; - int32_t tempTermY; - int32_t tempMin; - int32_t tempMax; - - if (bweStr->external_bw_info.in_use) - return bweStr->external_bw_info.bottleneck_idx; - - /* Get Rate Index */ - - /* Get unquantized rate. Always returns 10000 <= rate <= 32000 */ - rate = WebRtcIsacfix_GetDownlinkBandwidth(bweStr); - - /* Compute the averaged BN estimate on this side */ - - /* recBwAvg = 0.9 * recBwAvg + 0.1 * (rate + bweStr->recHeaderRate), 0.9 and 0.1 in Q9 */ - bweStr->recBwAvg = 922 * bweStr->recBwAvg + - 102 * (((uint32_t)rate + bweStr->recHeaderRate) << 5); - bweStr->recBwAvg = WEBRTC_SPL_RSHIFT_U32(bweStr->recBwAvg, 10); - - /* Find quantization index that gives the closest rate after averaging. - * Note that we don't need to check the last value, rate <= kQRateTable[11], - * because we will use rateInd = 11 even if rate > kQRateTable[11]. */ - for (rateInd = 1; rateInd < 11; rateInd++) { - if (rate <= kQRateTable[rateInd]){ - break; - } - } - - /* find closest quantization index, and update quantized average by taking: */ - /* 0.9*recBwAvgQ + 0.1*kQRateTable[rateInd] */ - - /* 0.9 times recBwAvgQ in Q16 */ - /* 461/512 - 25/65536 =0.900009 */ - tempTerm1 = WEBRTC_SPL_MUL(bweStr->recBwAvgQ, 25); - tempTerm1 >>= 7; - tempTermX = WEBRTC_SPL_UMUL(461, bweStr->recBwAvgQ) - tempTerm1; - - /* rate in Q16 */ - tempTermY = rate << 16; - - /* 0.1 * kQRateTable[rateInd] = KQRate01[rateInd] */ - tempTerm1 = tempTermX + KQRate01[rateInd] - tempTermY; - tempTerm2 = tempTermY - tempTermX - KQRate01[rateInd-1]; - - /* Compare (0.9 * recBwAvgQ + 0.1 * kQRateTable[rateInd] - rate) > - (rate - 0.9 * recBwAvgQ - 0.1 * kQRateTable[rateInd-1]) */ - if (tempTerm1 > tempTerm2) { - rateInd--; - } - - /* Update quantized average by taking: */ - /* 0.9*recBwAvgQ + 0.1*kQRateTable[rateInd] */ - - /* Add 0.1 times kQRateTable[rateInd], in Q16 */ - tempTermX += KQRate01[rateInd]; - - /* Shift back to Q7 */ - bweStr->recBwAvgQ = tempTermX >> 9; - - /* Count consecutive received bandwidth above 28000 kbps (28000 in Q7 = 3584000) */ - /* If 66 high estimates in a row, set highSpeedRec to one */ - /* 66 corresponds to ~2 seconds in 30 msec mode */ - if ((bweStr->recBwAvgQ > 3584000) && !bweStr->highSpeedRec) { - bweStr->countHighSpeedRec++; - if (bweStr->countHighSpeedRec >= 66) { - bweStr->highSpeedRec = 1; - } - } else if (!bweStr->highSpeedRec) { - bweStr->countHighSpeedRec = 0; - } - - /* Get Max Delay Bit */ - - /* get unquantized max delay */ - maxDelay = WebRtcIsacfix_GetDownlinkMaxDelay(bweStr); - - /* Update quantized max delay average */ - tempMax = 652800; /* MAX_ISAC_MD * 0.1 in Q18 */ - tempMin = 130560; /* MIN_ISAC_MD * 0.1 in Q18 */ - tempTermX = WEBRTC_SPL_MUL((int32_t)bweStr->recMaxDelayAvgQ, (int32_t)461); - tempTermY = maxDelay << 18; - - tempTerm1 = tempTermX + tempMax - tempTermY; - tempTerm2 = tempTermY - tempTermX - tempMin; - - if ( tempTerm1 > tempTerm2) { - maxDelayBit = 0; - tempTerm1 = tempTermX + tempMin; - - /* update quantized average, shift back to Q9 */ - bweStr->recMaxDelayAvgQ = tempTerm1 >> 9; - } else { - maxDelayBit = 12; - tempTerm1 = tempTermX + tempMax; - - /* update quantized average, shift back to Q9 */ - bweStr->recMaxDelayAvgQ = tempTerm1 >> 9; - } - - /* Return bandwitdh and jitter index (0..23) */ - return (uint16_t)(rateInd + maxDelayBit); -} - -/* get the bottle neck rate from far side to here, as estimated on this side */ -uint16_t WebRtcIsacfix_GetDownlinkBandwidth(const BwEstimatorstr *bweStr) -{ - uint32_t recBw; - int32_t jitter_sign; /* Q8 */ - int32_t bw_adjust; /* Q16 */ - int32_t rec_jitter_short_term_abs_inv; /* Q18 */ - int32_t temp; - - RTC_DCHECK(!bweStr->external_bw_info.in_use); - - /* Q18 rec jitter short term abs is in Q13, multiply it by 2^13 to save precision - 2^18 then needs to be shifted 13 bits to 2^31 */ - rec_jitter_short_term_abs_inv = 0x80000000u / bweStr->recJitterShortTermAbs; - - /* Q27 = 9 + 18 */ - jitter_sign = (bweStr->recJitterShortTerm >> 4) * - rec_jitter_short_term_abs_inv; - - if (jitter_sign < 0) { - temp = -jitter_sign; - temp >>= 19; - jitter_sign = -temp; - } else { - jitter_sign >>= 19; - } - - /* adjust bw proportionally to negative average jitter sign */ - //bw_adjust = 1.0f - jitter_sign * (0.15f + 0.15f * jitter_sign * jitter_sign); - //Q8 -> Q16 .15 +.15 * jitter^2 first term is .15 in Q16 latter term is Q8*Q8*Q8 - //38 in Q8 ~.15 9830 in Q16 ~.15 - temp = 9830 + ((38 * jitter_sign * jitter_sign) >> 8); - - if (jitter_sign < 0) { - temp = WEBRTC_SPL_MUL(jitter_sign, temp); - temp = -temp; - temp >>= 8; - bw_adjust = (uint32_t)65536 + temp; /* (1 << 16) + temp; */ - } else { - /* (1 << 16) - ((jitter_sign * temp) >> 8); */ - bw_adjust = 65536 - ((jitter_sign * temp) >> 8); - } - - //make sure following multiplication won't overflow - //bw adjust now Q14 - bw_adjust >>= 2; // See if good resolution is maintained. - - /* adjust Rate if jitter sign is mostly constant */ - recBw = WEBRTC_SPL_UMUL(bweStr->recBw, bw_adjust); - - recBw >>= 14; - - /* limit range of bottle neck rate */ - if (recBw < MIN_ISAC_BW) { - recBw = MIN_ISAC_BW; - } else if (recBw > MAX_ISAC_BW) { - recBw = MAX_ISAC_BW; - } - - return (uint16_t) recBw; -} - -/* Returns the mmax delay (in ms) */ -int16_t WebRtcIsacfix_GetDownlinkMaxDelay(const BwEstimatorstr *bweStr) -{ - int16_t recMaxDelay = (int16_t)(bweStr->recMaxDelay >> 15); - - RTC_DCHECK(!bweStr->external_bw_info.in_use); - - /* limit range of jitter estimate */ - if (recMaxDelay < MIN_ISAC_MD) { - recMaxDelay = MIN_ISAC_MD; - } else if (recMaxDelay > MAX_ISAC_MD) { - recMaxDelay = MAX_ISAC_MD; - } - - return recMaxDelay; -} - -/* Clamp val to the closed interval [min,max]. */ -static int16_t clamp(int16_t val, int16_t min, int16_t max) { - RTC_DCHECK_LE(min, max); - return val < min ? min : (val > max ? max : val); -} - -int16_t WebRtcIsacfix_GetUplinkBandwidth(const BwEstimatorstr* bweStr) { - return bweStr->external_bw_info.in_use - ? bweStr->external_bw_info.send_bw_avg - : clamp(bweStr->sendBwAvg >> 7, MIN_ISAC_BW, MAX_ISAC_BW); -} - -int16_t WebRtcIsacfix_GetUplinkMaxDelay(const BwEstimatorstr* bweStr) { - return bweStr->external_bw_info.in_use - ? bweStr->external_bw_info.send_max_delay_avg - : clamp(bweStr->sendMaxDelayAvg >> 9, MIN_ISAC_MD, MAX_ISAC_MD); -} - -/* - * update long-term average bitrate and amount of data in buffer - * returns minimum payload size (bytes) - */ -uint16_t WebRtcIsacfix_GetMinBytes(RateModel *State, - int16_t StreamSize, /* bytes in bitstream */ - const int16_t FrameSamples, /* samples per frame */ - const int16_t BottleNeck, /* bottle neck rate; excl headers (bps) */ - const int16_t DelayBuildUp) /* max delay from bottle neck buffering (ms) */ -{ - int32_t MinRate = 0; - uint16_t MinBytes; - int16_t TransmissionTime; - int32_t inv_Q12; - int32_t den; - - - /* first 10 packets @ low rate, then INIT_BURST_LEN packets @ fixed rate of INIT_RATE bps */ - if (State->InitCounter > 0) { - if (State->InitCounter-- <= INIT_BURST_LEN) { - MinRate = INIT_RATE; - } else { - MinRate = 0; - } - } else { - /* handle burst */ - if (State->BurstCounter) { - if (State->StillBuffered < - (((512 - 512 / BURST_LEN) * DelayBuildUp) >> 9)) { - /* max bps derived from BottleNeck and DelayBuildUp values */ - inv_Q12 = 4096 / (BURST_LEN * FrameSamples); - MinRate = (512 + SAMPLES_PER_MSEC * ((DelayBuildUp * inv_Q12) >> 3)) * - BottleNeck; - } else { - /* max bps derived from StillBuffered and DelayBuildUp values */ - inv_Q12 = 4096 / FrameSamples; - if (DelayBuildUp > State->StillBuffered) { - MinRate = (512 + SAMPLES_PER_MSEC * (((DelayBuildUp - - State->StillBuffered) * inv_Q12) >> 3)) * BottleNeck; - } else if ((den = WEBRTC_SPL_MUL(SAMPLES_PER_MSEC, (State->StillBuffered - DelayBuildUp))) >= FrameSamples) { - /* MinRate will be negative here */ - MinRate = 0; - } else { - MinRate = (512 - ((den * inv_Q12) >> 3)) * BottleNeck; - } - //if (MinRate < 1.04 * BottleNeck) - // MinRate = 1.04 * BottleNeck; - //Q9 - if (MinRate < WEBRTC_SPL_MUL(532, BottleNeck)) { - MinRate += WEBRTC_SPL_MUL(22, BottleNeck); - } - } - - State->BurstCounter--; - } - } - - - /* convert rate from bits/second to bytes/packet */ - //round and shift before conversion - MinRate += 256; - MinRate >>= 9; - MinBytes = MinRate * FrameSamples / FS8; - - /* StreamSize will be adjusted if less than MinBytes */ - if (StreamSize < MinBytes) { - StreamSize = MinBytes; - } - - /* keep track of when bottle neck was last exceeded by at least 1% */ - //517/512 ~ 1.01 - if ((StreamSize * (int32_t)FS8) / FrameSamples > (517 * BottleNeck) >> 9) { - if (State->PrevExceed) { - /* bottle_neck exceded twice in a row, decrease ExceedAgo */ - State->ExceedAgo -= BURST_INTERVAL / (BURST_LEN - 1); - if (State->ExceedAgo < 0) { - State->ExceedAgo = 0; - } - } else { - State->ExceedAgo += FrameSamples / SAMPLES_PER_MSEC; /* ms */ - State->PrevExceed = 1; - } - } else { - State->PrevExceed = 0; - State->ExceedAgo += FrameSamples / SAMPLES_PER_MSEC; /* ms */ - } - - /* set burst flag if bottle neck not exceeded for long time */ - if ((State->ExceedAgo > BURST_INTERVAL) && (State->BurstCounter == 0)) { - if (State->PrevExceed) { - State->BurstCounter = BURST_LEN - 1; - } else { - State->BurstCounter = BURST_LEN; - } - } - - - /* Update buffer delay */ - TransmissionTime = (StreamSize * 8000) / BottleNeck; /* ms */ - State->StillBuffered += TransmissionTime; - State->StillBuffered -= FrameSamples / SAMPLES_PER_MSEC; /* ms */ - if (State->StillBuffered < 0) { - State->StillBuffered = 0; - } - - if (State->StillBuffered > 2000) { - State->StillBuffered = 2000; - } - - return MinBytes; -} - - -/* - * update long-term average bitrate and amount of data in buffer - */ -void WebRtcIsacfix_UpdateRateModel(RateModel *State, - int16_t StreamSize, /* bytes in bitstream */ - const int16_t FrameSamples, /* samples per frame */ - const int16_t BottleNeck) /* bottle neck rate; excl headers (bps) */ -{ - const int16_t TransmissionTime = (StreamSize * 8000) / BottleNeck; /* ms */ - - /* avoid the initial "high-rate" burst */ - State->InitCounter = 0; - - /* Update buffer delay */ - State->StillBuffered += TransmissionTime; - State->StillBuffered -= FrameSamples >> 4; /* ms */ - if (State->StillBuffered < 0) { - State->StillBuffered = 0; - } - -} - - -void WebRtcIsacfix_InitRateModel(RateModel *State) -{ - State->PrevExceed = 0; /* boolean */ - State->ExceedAgo = 0; /* ms */ - State->BurstCounter = 0; /* packets */ - State->InitCounter = INIT_BURST_LEN + 10; /* packets */ - State->StillBuffered = 1; /* ms */ -} - - - - - -int16_t WebRtcIsacfix_GetNewFrameLength(int16_t bottle_neck, int16_t current_framesamples) -{ - int16_t new_framesamples; - - new_framesamples = current_framesamples; - - /* find new framelength */ - switch(current_framesamples) { - case 480: - if (bottle_neck < Thld_30_60) { - new_framesamples = 960; - } - break; - case 960: - if (bottle_neck >= Thld_60_30) { - new_framesamples = 480; - } - break; - default: - new_framesamples = -1; /* Error */ - } - - return new_framesamples; -} - -int16_t WebRtcIsacfix_GetSnr(int16_t bottle_neck, int16_t framesamples) -{ - int16_t s2nr = 0; - - /* find new SNR value */ - //consider BottleNeck to be in Q10 ( * 1 in Q10) - switch(framesamples) { - // TODO(bjornv): The comments below confuses me. I don't know if there is a - // difference between frame lengths (in which case the implementation is - // wrong), or if it is frame length independent in which case we should - // correct the comment and simplify the implementation. - case 480: - /*s2nr = -1*(a_30 << 10) + ((b_30 * bottle_neck) >> 10);*/ - s2nr = -22500 + (int16_t)(500 * bottle_neck >> 10); - break; - case 960: - /*s2nr = -1*(a_60 << 10) + ((b_60 * bottle_neck) >> 10);*/ - s2nr = -22500 + (int16_t)(500 * bottle_neck >> 10); - break; - default: - s2nr = -1; /* Error */ - } - - return s2nr; //return in Q10 - -} diff --git a/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h b/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h deleted file mode 100644 index f106746f14..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * bandwidth_estimator.h - * - * This header file contains the API for the Bandwidth Estimator - * designed for iSAC. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_BANDWIDTH_ESTIMATOR_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_BANDWIDTH_ESTIMATOR_H_ - -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - -/**************************************************************************** - * WebRtcIsacfix_InitBandwidthEstimator(...) - * - * This function initializes the struct for the bandwidth estimator - * - * Input/Output: - * - bwest_str : Struct containing bandwidth information. - * - * Return value : 0 - */ - -int32_t WebRtcIsacfix_InitBandwidthEstimator(BwEstimatorstr* bwest_str); - -/**************************************************************************** - * WebRtcIsacfix_UpdateUplinkBwImpl(...) - * - * This function updates bottle neck rate received from other side in payload - * and calculates a new bottle neck to send to the other side. - * - * Input/Output: - * - bweStr : struct containing bandwidth information. - * - rtpNumber : value from RTP packet, from NetEq - * - frameSize : length of signal frame in ms, from iSAC decoder - * - sendTime : value in RTP header giving send time in samples - * - arrivalTime : value given by timeGetTime() time of arrival in - * samples of packet from NetEq - * - pksize : size of packet in bytes, from NetEq - * - Index : integer (range 0...23) indicating bottle neck & - * jitter as estimated by other side - * - * Return value : 0 if everything went fine, - * -1 otherwise - */ - -int32_t WebRtcIsacfix_UpdateUplinkBwImpl(BwEstimatorstr* bwest_str, - uint16_t rtp_number, - int16_t frameSize, - uint32_t send_ts, - uint32_t arr_ts, - size_t pksize, - uint16_t Index); - -/* Update receiving estimates. Used when we only receive BWE index, no iSAC data - * packet. */ -int16_t WebRtcIsacfix_UpdateUplinkBwRec(BwEstimatorstr* bwest_str, - int16_t Index); - -/**************************************************************************** - * WebRtcIsacfix_GetDownlinkBwIndexImpl(...) - * - * This function calculates and returns the bandwidth/jitter estimation code - * (integer 0...23) to put in the sending iSAC payload. - * - * Input: - * - bweStr : BWE struct - * - * Return: - * bandwith and jitter index (0..23) - */ -uint16_t WebRtcIsacfix_GetDownlinkBwIndexImpl(BwEstimatorstr* bwest_str); - -/* Returns the bandwidth estimation (in bps) */ -uint16_t WebRtcIsacfix_GetDownlinkBandwidth(const BwEstimatorstr* bwest_str); - -/* Returns the bandwidth that iSAC should send with in bps */ -int16_t WebRtcIsacfix_GetUplinkBandwidth(const BwEstimatorstr* bwest_str); - -/* Returns the max delay (in ms) */ -int16_t WebRtcIsacfix_GetDownlinkMaxDelay(const BwEstimatorstr* bwest_str); - -/* Returns the max delay value from the other side in ms */ -int16_t WebRtcIsacfix_GetUplinkMaxDelay(const BwEstimatorstr* bwest_str); - -/* - * update amount of data in bottle neck buffer and burst handling - * returns minimum payload size (bytes) - */ -uint16_t WebRtcIsacfix_GetMinBytes( - RateModel* State, - int16_t StreamSize, /* bytes in bitstream */ - int16_t FrameLen, /* ms per frame */ - int16_t BottleNeck, /* bottle neck rate; excl headers (bps) */ - int16_t DelayBuildUp); /* max delay from bottle neck buffering (ms) */ - -/* - * update long-term average bitrate and amount of data in buffer - */ -void WebRtcIsacfix_UpdateRateModel( - RateModel* State, - int16_t StreamSize, /* bytes in bitstream */ - int16_t FrameSamples, /* samples per frame */ - int16_t BottleNeck); /* bottle neck rate; excl headers (bps) */ - -void WebRtcIsacfix_InitRateModel(RateModel* State); - -/* Returns the new framelength value (input argument: bottle_neck) */ -int16_t WebRtcIsacfix_GetNewFrameLength(int16_t bottle_neck, - int16_t current_framelength); - -/* Returns the new SNR value (input argument: bottle_neck) */ -// returns snr in Q10 -int16_t WebRtcIsacfix_GetSnr(int16_t bottle_neck, int16_t framesamples); - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_BANDWIDTH_ESTIMATOR_H_ \ - */ diff --git a/modules/audio_coding/codecs/isac/fix/source/codec.h b/modules/audio_coding/codecs/isac/fix/source/codec.h deleted file mode 100644 index 01d6fb907e..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/codec.h +++ /dev/null @@ -1,212 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * codec.h - * - * This header file contains the calls to the internal encoder - * and decoder functions. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_CODEC_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_CODEC_H_ - -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - -#ifdef __cplusplus -extern "C" { -#endif - -int WebRtcIsacfix_EstimateBandwidth(BwEstimatorstr* bwest_str, - Bitstr_dec* streamdata, - size_t packet_size, - uint16_t rtp_seq_number, - uint32_t send_ts, - uint32_t arr_ts); - -int WebRtcIsacfix_DecodeImpl(int16_t* signal_out16, - IsacFixDecoderInstance* ISACdec_obj, - size_t* current_framesamples); - -void WebRtcIsacfix_DecodePlcImpl(int16_t* decoded, - IsacFixDecoderInstance* ISACdec_obj, - size_t* current_framesample); - -int WebRtcIsacfix_EncodeImpl(int16_t* in, - IsacFixEncoderInstance* ISACenc_obj, - BwEstimatorstr* bw_estimatordata, - int16_t CodingMode); - -int WebRtcIsacfix_EncodeStoredData(IsacFixEncoderInstance* ISACenc_obj, - int BWnumber, - float scale); - -/* initialization functions */ - -void WebRtcIsacfix_InitMaskingEnc(MaskFiltstr_enc* maskdata); -void WebRtcIsacfix_InitMaskingDec(MaskFiltstr_dec* maskdata); - -void WebRtcIsacfix_InitPreFilterbank(PreFiltBankstr* prefiltdata); - -void WebRtcIsacfix_InitPostFilterbank(PostFiltBankstr* postfiltdata); - -void WebRtcIsacfix_InitPitchFilter(PitchFiltstr* pitchfiltdata); - -void WebRtcIsacfix_InitPitchAnalysis(PitchAnalysisStruct* State); - -void WebRtcIsacfix_InitPlc(PLCstr* State); - -/* transform functions */ - -void WebRtcIsacfix_InitTransform(void); - -typedef void (*Time2Spec)(int16_t* inre1Q9, - int16_t* inre2Q9, - int16_t* outre, - int16_t* outim); -typedef void (*Spec2Time)(int16_t* inreQ7, - int16_t* inimQ7, - int32_t* outre1Q16, - int32_t* outre2Q16); - -extern Time2Spec WebRtcIsacfix_Time2Spec; -extern Spec2Time WebRtcIsacfix_Spec2Time; - -void WebRtcIsacfix_Time2SpecC(int16_t* inre1Q9, - int16_t* inre2Q9, - int16_t* outre, - int16_t* outim); -void WebRtcIsacfix_Spec2TimeC(int16_t* inreQ7, - int16_t* inimQ7, - int32_t* outre1Q16, - int32_t* outre2Q16); - -#if defined(WEBRTC_HAS_NEON) -void WebRtcIsacfix_Time2SpecNeon(int16_t* inre1Q9, - int16_t* inre2Q9, - int16_t* outre, - int16_t* outim); -void WebRtcIsacfix_Spec2TimeNeon(int16_t* inreQ7, - int16_t* inimQ7, - int32_t* outre1Q16, - int32_t* outre2Q16); -#endif - -#if defined(MIPS32_LE) -void WebRtcIsacfix_Time2SpecMIPS(int16_t* inre1Q9, - int16_t* inre2Q9, - int16_t* outre, - int16_t* outim); -void WebRtcIsacfix_Spec2TimeMIPS(int16_t* inreQ7, - int16_t* inimQ7, - int32_t* outre1Q16, - int32_t* outre2Q16); -#endif - -/* filterbank functions */ - -void WebRtcIsacfix_SplitAndFilter1(int16_t* in, - int16_t* LP16, - int16_t* HP16, - PreFiltBankstr* prefiltdata); - -void WebRtcIsacfix_FilterAndCombine1(int16_t* tempin_ch1, - int16_t* tempin_ch2, - int16_t* out16, - PostFiltBankstr* postfiltdata); - -/* normalized lattice filters */ - -void WebRtcIsacfix_NormLatticeFilterMa(size_t orderCoef, - int32_t* stateGQ15, - int16_t* lat_inQ0, - int16_t* filt_coefQ15, - int32_t* gain_lo_hiQ17, - int16_t lo_hi, - int16_t* lat_outQ9); - -void WebRtcIsacfix_NormLatticeFilterAr(size_t orderCoef, - int16_t* stateGQ0, - int32_t* lat_inQ25, - int16_t* filt_coefQ15, - int32_t* gain_lo_hiQ17, - int16_t lo_hi, - int16_t* lat_outQ0); - -/* TODO(kma): Remove the following functions into individual header files. */ - -/* Internal functions in both C and ARM Neon versions */ - -int WebRtcIsacfix_AutocorrC(int32_t* __restrict r, - const int16_t* __restrict x, - int16_t N, - int16_t order, - int16_t* __restrict scale); - -void WebRtcIsacfix_FilterMaLoopC(int16_t input0, - int16_t input1, - int32_t input2, - int32_t* ptr0, - int32_t* ptr1, - int32_t* ptr2); - -#if defined(WEBRTC_HAS_NEON) -int WebRtcIsacfix_AutocorrNeon(int32_t* __restrict r, - const int16_t* __restrict x, - int16_t N, - int16_t order, - int16_t* __restrict scale); - -void WebRtcIsacfix_FilterMaLoopNeon(int16_t input0, - int16_t input1, - int32_t input2, - int32_t* ptr0, - int32_t* ptr1, - int32_t* ptr2); -#endif - -#if defined(MIPS32_LE) -int WebRtcIsacfix_AutocorrMIPS(int32_t* __restrict r, - const int16_t* __restrict x, - int16_t N, - int16_t order, - int16_t* __restrict scale); - -void WebRtcIsacfix_FilterMaLoopMIPS(int16_t input0, - int16_t input1, - int32_t input2, - int32_t* ptr0, - int32_t* ptr1, - int32_t* ptr2); -#endif - -/* Function pointers associated with the above functions. */ - -typedef int (*AutocorrFix)(int32_t* __restrict r, - const int16_t* __restrict x, - int16_t N, - int16_t order, - int16_t* __restrict scale); -extern AutocorrFix WebRtcIsacfix_AutocorrFix; - -typedef void (*FilterMaLoopFix)(int16_t input0, - int16_t input1, - int32_t input2, - int32_t* ptr0, - int32_t* ptr1, - int32_t* ptr2); -extern FilterMaLoopFix WebRtcIsacfix_FilterMaLoopFix; - -#ifdef __cplusplus -} // extern "C" -#endif - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_CODEC_H_ */ diff --git a/modules/audio_coding/codecs/isac/fix/source/decode.c b/modules/audio_coding/codecs/isac/fix/source/decode.c deleted file mode 100644 index 144208818a..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/decode.c +++ /dev/null @@ -1,221 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * decode.c - * - * This C file contains the internal decoding function. - * - */ - -#include - -#include "modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h" -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - - - - -int WebRtcIsacfix_DecodeImpl(int16_t* signal_out16, - IsacFixDecoderInstance* ISACdec_obj, - size_t* current_framesamples) -{ - int k; - int err; - int16_t BWno; - int len = 0; - - int16_t model; - - - int16_t Vector_Word16_1[FRAMESAMPLES/2]; - int16_t Vector_Word16_2[FRAMESAMPLES/2]; - - int32_t Vector_Word32_1[FRAMESAMPLES/2]; - int32_t Vector_Word32_2[FRAMESAMPLES/2]; - - int16_t lofilt_coefQ15[ORDERLO*SUBFRAMES]; //refl. coeffs - int16_t hifilt_coefQ15[ORDERHI*SUBFRAMES]; //refl. coeffs - int32_t gain_lo_hiQ17[2*SUBFRAMES]; - - int16_t PitchLags_Q7[PITCH_SUBFRAMES]; - int16_t PitchGains_Q12[PITCH_SUBFRAMES]; - int16_t AvgPitchGain_Q12; - - int16_t tmp_1, tmp_2; - int32_t tmp32a; - int16_t gainQ13; - - - size_t frame_nb; /* counter */ - size_t frame_mode; /* 0 for 30ms, 1 for 60ms */ - static const size_t kProcessedSamples = 480; /* 480 (for both 30, 60 ms) */ - - /* PLC */ - int16_t overlapWin[ 240 ]; - - (ISACdec_obj->bitstr_obj).W_upper = 0xFFFFFFFF; - (ISACdec_obj->bitstr_obj).streamval = 0; - (ISACdec_obj->bitstr_obj).stream_index = 0; - (ISACdec_obj->bitstr_obj).full = 1; - - - /* decode framelength and BW estimation - not used, only for stream pointer*/ - err = WebRtcIsacfix_DecodeFrameLen(&ISACdec_obj->bitstr_obj, current_framesamples); - if (err<0) // error check - return err; - - frame_mode = *current_framesamples / MAX_FRAMESAMPLES; /* 0, or 1 */ - - err = WebRtcIsacfix_DecodeSendBandwidth(&ISACdec_obj->bitstr_obj, &BWno); - if (err<0) // error check - return err; - - /* one loop if it's one frame (30ms), two loops if two frames bundled together - * (60ms) */ - for (frame_nb = 0; frame_nb <= frame_mode; frame_nb++) { - - /* decode & dequantize pitch parameters */ - err = WebRtcIsacfix_DecodePitchGain(&(ISACdec_obj->bitstr_obj), PitchGains_Q12); - if (err<0) // error check - return err; - - err = WebRtcIsacfix_DecodePitchLag(&ISACdec_obj->bitstr_obj, PitchGains_Q12, PitchLags_Q7); - if (err<0) // error check - return err; - - AvgPitchGain_Q12 = (int16_t)(((int32_t)PitchGains_Q12[0] + PitchGains_Q12[1] + PitchGains_Q12[2] + PitchGains_Q12[3])>>2); - - /* decode & dequantize FiltCoef */ - err = WebRtcIsacfix_DecodeLpc(gain_lo_hiQ17, lofilt_coefQ15, hifilt_coefQ15, - &ISACdec_obj->bitstr_obj, &model); - - if (err<0) // error check - return err; - - /* decode & dequantize spectrum */ - len = WebRtcIsacfix_DecodeSpec(&ISACdec_obj->bitstr_obj, Vector_Word16_1, Vector_Word16_2, AvgPitchGain_Q12); - if (len < 0) // error check - return len; - - // Why does this need Q16 in and out? /JS - WebRtcIsacfix_Spec2Time(Vector_Word16_1, Vector_Word16_2, Vector_Word32_1, Vector_Word32_2); - - for (k=0; k Q9. - Vector_Word16_1[k] = (int16_t)((Vector_Word32_1[k] + 64) >> 7); - } - - /* ---- If this is recovery frame ---- */ - if( (ISACdec_obj->plcstr_obj).used == PLC_WAS_USED ) - { - (ISACdec_obj->plcstr_obj).used = PLC_NOT_USED; - if( (ISACdec_obj->plcstr_obj).B < 1000 ) - { - (ISACdec_obj->plcstr_obj).decayCoeffPriodic = 4000; - } - - ISACdec_obj->plcstr_obj.decayCoeffPriodic = WEBRTC_SPL_WORD16_MAX; /* DECAY_RATE is in Q15 */ - ISACdec_obj->plcstr_obj.decayCoeffNoise = WEBRTC_SPL_WORD16_MAX; /* DECAY_RATE is in Q15 */ - ISACdec_obj->plcstr_obj.pitchCycles = 0; - - PitchGains_Q12[0] = (int16_t)(PitchGains_Q12[0] * 700 >> 10); - - /* ---- Add-overlap ---- */ - WebRtcSpl_GetHanningWindow( overlapWin, RECOVERY_OVERLAP ); - for( k = 0; k < RECOVERY_OVERLAP; k++ ) - Vector_Word16_1[k] = WebRtcSpl_AddSatW16( - (int16_t)(ISACdec_obj->plcstr_obj.overlapLP[k] * - overlapWin[RECOVERY_OVERLAP - k - 1] >> 14), - (int16_t)(Vector_Word16_1[k] * overlapWin[k] >> 14)); - - - - } - - /* --- Store side info --- */ - if( frame_nb == frame_mode ) - { - /* --- LPC info */ - WEBRTC_SPL_MEMCPY_W16( (ISACdec_obj->plcstr_obj).lofilt_coefQ15, &lofilt_coefQ15[(SUBFRAMES-1)*ORDERLO], ORDERLO ); - WEBRTC_SPL_MEMCPY_W16( (ISACdec_obj->plcstr_obj).hifilt_coefQ15, &hifilt_coefQ15[(SUBFRAMES-1)*ORDERHI], ORDERHI ); - (ISACdec_obj->plcstr_obj).gain_lo_hiQ17[0] = gain_lo_hiQ17[(SUBFRAMES-1) * 2]; - (ISACdec_obj->plcstr_obj).gain_lo_hiQ17[1] = gain_lo_hiQ17[(SUBFRAMES-1) * 2 + 1]; - - /* --- LTP info */ - (ISACdec_obj->plcstr_obj).AvgPitchGain_Q12 = PitchGains_Q12[3]; - (ISACdec_obj->plcstr_obj).lastPitchGain_Q12 = PitchGains_Q12[3]; - (ISACdec_obj->plcstr_obj).lastPitchLag_Q7 = PitchLags_Q7[3]; - - if( PitchLags_Q7[3] < 3000 ) - (ISACdec_obj->plcstr_obj).lastPitchLag_Q7 += PitchLags_Q7[3]; - - WEBRTC_SPL_MEMCPY_W16( (ISACdec_obj->plcstr_obj).prevPitchInvIn, Vector_Word16_1, FRAMESAMPLES/2 ); - - } - /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ - - /* inverse pitch filter */ - WebRtcIsacfix_PitchFilter(Vector_Word16_1, Vector_Word16_2, &ISACdec_obj->pitchfiltstr_obj, PitchLags_Q7, PitchGains_Q12, 4); - - if( frame_nb == frame_mode ) - { - WEBRTC_SPL_MEMCPY_W16( (ISACdec_obj->plcstr_obj).prevPitchInvOut, &(Vector_Word16_2[FRAMESAMPLES/2 - (PITCH_MAX_LAG + 10)]), PITCH_MAX_LAG ); - } - - - /* reduce gain to compensate for pitch enhancer */ - /* gain = 1.0f - 0.45f * AvgPitchGain; */ - tmp32a = AvgPitchGain_Q12 * 29; // Q18 - gainQ13 = (int16_t)((262144 - tmp32a) >> 5); // Q18 -> Q13. - - for (k = 0; k < FRAMESAMPLES/2; k++) - { - Vector_Word32_1[k] = (Vector_Word16_2[k] * gainQ13) * (1 << 3); // Q25 - } - - - /* perceptual post-filtering (using normalized lattice filter) */ - WebRtcIsacfix_NormLatticeFilterAr(ORDERLO, (ISACdec_obj->maskfiltstr_obj).PostStateLoGQ0, - Vector_Word32_1, lofilt_coefQ15, gain_lo_hiQ17, 0, Vector_Word16_1); - - /* --- Store Highpass Residual --- */ - for (k = 0; k < FRAMESAMPLES/2; k++) - Vector_Word32_1[k] = Vector_Word32_2[k] * (1 << 9); // Q16 -> Q25 - - for( k = 0; k < PITCH_MAX_LAG + 10; k++ ) - (ISACdec_obj->plcstr_obj).prevHP[k] = Vector_Word32_1[FRAMESAMPLES/2 - (PITCH_MAX_LAG + 10) + k]; - - - WebRtcIsacfix_NormLatticeFilterAr(ORDERHI, (ISACdec_obj->maskfiltstr_obj).PostStateHiGQ0, - Vector_Word32_1, hifilt_coefQ15, gain_lo_hiQ17, 1, Vector_Word16_2); - - /* recombine the 2 bands */ - - /* Form the polyphase signals, and compensate for DC offset */ - for (k=0;kpostfiltbankstr_obj); - - } - return len; -} diff --git a/modules/audio_coding/codecs/isac/fix/source/decode_bwe.c b/modules/audio_coding/codecs/isac/fix/source/decode_bwe.c deleted file mode 100644 index 99676504cd..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/decode_bwe.c +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * decode_bwe.c - * - * This C file contains the internal decode bandwidth estimate function. - * - */ - - -#include "modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h" -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - - - - -int WebRtcIsacfix_EstimateBandwidth(BwEstimatorstr *bwest_str, - Bitstr_dec *streamdata, - size_t packet_size, - uint16_t rtp_seq_number, - uint32_t send_ts, - uint32_t arr_ts) -{ - int16_t index; - size_t frame_samples; - int err; - - /* decode framelength */ - err = WebRtcIsacfix_DecodeFrameLen(streamdata, &frame_samples); - /* error check */ - if (err<0) { - return err; - } - - /* decode BW estimation */ - err = WebRtcIsacfix_DecodeSendBandwidth(streamdata, &index); - /* error check */ - if (err<0) { - return err; - } - - /* Update BWE with received data */ - err = WebRtcIsacfix_UpdateUplinkBwImpl( - bwest_str, - rtp_seq_number, - (int16_t)(frame_samples * 1000 / FS), - send_ts, - arr_ts, - packet_size, /* in bytes */ - index); - - /* error check */ - if (err<0) { - return err; - } - - /* Succesful */ - return 0; -} diff --git a/modules/audio_coding/codecs/isac/fix/source/decode_plc.c b/modules/audio_coding/codecs/isac/fix/source/decode_plc.c deleted file mode 100644 index 873cf951ba..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/decode_plc.c +++ /dev/null @@ -1,805 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * decode_plc.c - * - * Packet Loss Concealment. - * - */ - -#include - -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "modules/audio_coding/codecs/isac/fix/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" -#include "modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h" -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" - - -#define NO_OF_PRIMES 8 -#define NOISE_FILTER_LEN 30 - -/* - * function to decode the bitstream - * returns the total number of bytes in the stream - */ - -static int16_t plc_filterma_Fast( - int16_t *In, /* (i) Vector to be filtered. InOut[-orderCoef+1] - to InOut[-1] contains state */ - int16_t *Out, /* (o) Filtered vector */ - int16_t *B, /* (i) The filter coefficients (in Q0) */ - int16_t Blen, /* (i) Number of B coefficients */ - int16_t len, /* (i) Number of samples to be filtered */ - int16_t reduceDecay, - int16_t decay, - int16_t rshift ) -{ - int i, j; - int32_t o; - int32_t lim = (1 << (15 + rshift)) - 1; - - for (i = 0; i < len; i++) - { - const int16_t *b_ptr = &B[0]; - const int16_t *x_ptr = &In[i]; - - o = (int32_t)0; - - for (j = 0;j < Blen; j++) - { - o = WebRtcSpl_AddSatW32(o, *b_ptr * *x_ptr); - b_ptr++; - x_ptr--; - } - - /* to round off correctly */ - o = WebRtcSpl_AddSatW32(o, 1 << (rshift - 1)); - - /* saturate according to the domain of the filter coefficients */ - o = WEBRTC_SPL_SAT((int32_t)lim, o, (int32_t)-lim); - - /* o should be in the range of int16_t */ - o >>= rshift; - - /* decay the output signal; this is specific to plc */ - *Out++ = (int16_t)((int16_t)o * decay >> 15); - - /* change the decay */ - decay -= reduceDecay; - if( decay < 0 ) - decay = 0; - } - return( decay ); -} - - - - - - - - -static __inline int32_t log2_Q8_T( uint32_t x ) { - - int32_t zeros; - int16_t frac; - - zeros=WebRtcSpl_NormU32(x); - frac = (int16_t)(((x << zeros) & 0x7FFFFFFF) >> 23); - - /* log2(magn(i)) */ - return ((31 - zeros) << 8) + frac; -} - -static __inline int16_t exp2_Q10_T(int16_t x) { // Both in and out in Q10 - - int16_t tmp16_1, tmp16_2; - - tmp16_2=(int16_t)(0x0400|(x&0x03FF)); - tmp16_1 = -(x >> 10); - if(tmp16_1>0) - return tmp16_2 >> tmp16_1; - else - return tmp16_2 << -tmp16_1; - -} - - -/* - This is a fixed-point version of the above code with limLow = 700 and limHigh = 5000, - hard-coded. The values 700 and 5000 were experimentally obtained. - - The function implements membership values for two sets. The mebership functions are - of second orders corresponding to half-bell-shapped pulses. -*/ -static void MemshipValQ15( int16_t in, int16_t *A, int16_t *B ) -{ - int16_t x; - - in -= 700; /* translate the lowLim to 0, limHigh = 5000 - 700, M = 2150 */ - - if( in <= 2150 ) - { - if( in > 0 ) - { - /* b = in^2 / (2 * M^2), a = 1 - b in Q0. - We have to compute in Q15 */ - - /* x = in / 2150 {in Q15} = x * 15.2409 {in Q15} = - x*15 + (x*983)/(2^12); note that 983/2^12 = 0.23999 */ - - /* we are sure that x is in the range of int16_t */ - x = (int16_t)(in * 15 + (in * 983 >> 12)); - /* b = x^2 / 2 {in Q15} so a shift of 16 is required to - be in correct domain and one more for the division by 2 */ - *B = (int16_t)((x * x + 0x00010000) >> 17); - *A = WEBRTC_SPL_WORD16_MAX - *B; - } - else - { - *B = 0; - *A = WEBRTC_SPL_WORD16_MAX; - } - } - else - { - if( in < 4300 ) - { - /* This is a mirror case of the above */ - in = 4300 - in; - x = (int16_t)(in * 15 + (in * 983 >> 12)); - /* b = x^2 / 2 {in Q15} so a shift of 16 is required to - be in correct domain and one more for the division by 2 */ - *A = (int16_t)((x * x + 0x00010000) >> 17); - *B = WEBRTC_SPL_WORD16_MAX - *A; - - } - else - { - *A = 0; - *B = WEBRTC_SPL_WORD16_MAX; - } - } -} - - - - -static void LinearResampler(int16_t* in, - int16_t* out, - size_t lenIn, - size_t lenOut) -{ - size_t n = (lenIn - 1) * RESAMP_RES; - int16_t resOut, relativePos, diff; /* */ - size_t i, j; - uint16_t udiff; - - if( lenIn == lenOut ) - { - WEBRTC_SPL_MEMCPY_W16( out, in, lenIn ); - return; - } - - resOut = WebRtcSpl_DivW32W16ResW16( (int32_t)n, (int16_t)(lenOut-1) ); - - out[0] = in[0]; - for( i = 1, j = 0, relativePos = 0; i < lenOut; i++ ) - { - - relativePos += resOut; - while( relativePos > RESAMP_RES ) - { - j++; - relativePos -= RESAMP_RES; - } - - - /* an overflow may happen and the differce in sample values may - * require more than 16 bits. We like to avoid 32 bit arithmatic - * as much as possible */ - - if( (in[ j ] > 0) && (in[j + 1] < 0) ) - { - udiff = (uint16_t)(in[ j ] - in[j + 1]); - out[ i ] = in[ j ] - (uint16_t)( ((int32_t)( udiff * relativePos )) >> RESAMP_RES_BIT); - } - else - { - if( (in[j] < 0) && (in[j+1] > 0) ) - { - udiff = (uint16_t)( in[j + 1] - in[ j ] ); - out[ i ] = in[ j ] + (uint16_t)( ((int32_t)( udiff * relativePos )) >> RESAMP_RES_BIT); - } - else - { - diff = in[ j + 1 ] - in[ j ]; - out[i] = in[j] + (int16_t)(diff * relativePos >> RESAMP_RES_BIT); - } - } - } -} - - - - - -void WebRtcIsacfix_DecodePlcImpl(int16_t *signal_out16, - IsacFixDecoderInstance *ISACdec_obj, - size_t *current_framesamples ) -{ - int subframecnt; - - int16_t* Vector_Word16_1; - int16_t Vector_Word16_Extended_1[FRAMESAMPLES_HALF + NOISE_FILTER_LEN]; - int16_t* Vector_Word16_2; - int16_t Vector_Word16_Extended_2[FRAMESAMPLES_HALF + NOISE_FILTER_LEN]; - - int32_t Vector_Word32_1[FRAMESAMPLES_HALF]; - int32_t Vector_Word32_2[FRAMESAMPLES_HALF]; - - int16_t lofilt_coefQ15[ORDERLO*SUBFRAMES]; //refl. coeffs - int16_t hifilt_coefQ15[ORDERHI*SUBFRAMES]; //refl. coeffs - - int16_t pitchLags_Q7[PITCH_SUBFRAMES]; - int16_t pitchGains_Q12[PITCH_SUBFRAMES]; - - int16_t tmp_1, tmp_2; - int32_t tmp32a, tmp32b; - int16_t gainQ13; - - int16_t myDecayRate; - - /* ---------- PLC variables ------------ */ - size_t lag0, i, k; - int16_t noiseIndex; - int16_t stretchPitchLP[PITCH_MAX_LAG + 10], stretchPitchLP1[PITCH_MAX_LAG + 10]; - - int32_t gain_lo_hiQ17[2*SUBFRAMES]; - - int16_t nLP, pLP, wNoisyLP, wPriodicLP, tmp16; - size_t minIdx; - int32_t nHP, pHP, wNoisyHP, wPriodicHP, corr, minCorr, maxCoeff; - int16_t noise1, rshift; - - - int16_t ltpGain, pitchGain, myVoiceIndicator, myAbs, maxAbs; - int32_t varIn, varOut, logVarIn, logVarOut, Q, logMaxAbs; - int rightShiftIn, rightShiftOut; - - - /* ------------------------------------- */ - - - myDecayRate = (DECAY_RATE); - Vector_Word16_1 = &Vector_Word16_Extended_1[NOISE_FILTER_LEN]; - Vector_Word16_2 = &Vector_Word16_Extended_2[NOISE_FILTER_LEN]; - - - /* ----- Simply Copy Previous LPC parameters ------ */ - for( subframecnt = 0; subframecnt < SUBFRAMES; subframecnt++ ) - { - /* lower Band */ - WEBRTC_SPL_MEMCPY_W16(&lofilt_coefQ15[ subframecnt * ORDERLO ], - (ISACdec_obj->plcstr_obj).lofilt_coefQ15, ORDERLO); - gain_lo_hiQ17[2*subframecnt] = (ISACdec_obj->plcstr_obj).gain_lo_hiQ17[0]; - - /* Upper Band */ - WEBRTC_SPL_MEMCPY_W16(&hifilt_coefQ15[ subframecnt * ORDERHI ], - (ISACdec_obj->plcstr_obj).hifilt_coefQ15, ORDERHI); - gain_lo_hiQ17[2*subframecnt + 1] = (ISACdec_obj->plcstr_obj).gain_lo_hiQ17[1]; - } - - - - - lag0 = (size_t)(((ISACdec_obj->plcstr_obj.lastPitchLag_Q7 + 64) >> 7) + 1); - - - if( (ISACdec_obj->plcstr_obj).used != PLC_WAS_USED ) - { - (ISACdec_obj->plcstr_obj).pitchCycles = 0; - - (ISACdec_obj->plcstr_obj).lastPitchLP = - &((ISACdec_obj->plcstr_obj).prevPitchInvIn[FRAMESAMPLES_HALF - lag0]); - minCorr = WEBRTC_SPL_WORD32_MAX; - - if ((FRAMESAMPLES_HALF - 10) > 2 * lag0) - { - minIdx = 11; - for( i = 0; i < 21; i++ ) - { - corr = 0; - for( k = 0; k < lag0; k++ ) - { - corr = WebRtcSpl_AddSatW32(corr, WEBRTC_SPL_ABS_W32( - WebRtcSpl_SubSatW16( - (ISACdec_obj->plcstr_obj).lastPitchLP[k], - (ISACdec_obj->plcstr_obj).prevPitchInvIn[ - FRAMESAMPLES_HALF - 2*lag0 - 10 + i + k ] ) ) ); - } - if( corr < minCorr ) - { - minCorr = corr; - minIdx = i; - } - } - (ISACdec_obj->plcstr_obj).prevPitchLP = - &( (ISACdec_obj->plcstr_obj).prevPitchInvIn[ - FRAMESAMPLES_HALF - lag0*2 - 10 + minIdx] ); - } - else - { - (ISACdec_obj->plcstr_obj).prevPitchLP = - (ISACdec_obj->plcstr_obj).lastPitchLP; - } - pitchGain = (ISACdec_obj->plcstr_obj).lastPitchGain_Q12; - - WebRtcSpl_AutoCorrelation( - &(ISACdec_obj->plcstr_obj).prevPitchInvIn[FRAMESAMPLES_HALF - lag0], - lag0, 0, &varIn, &rightShiftIn); - WebRtcSpl_AutoCorrelation( - &(ISACdec_obj->plcstr_obj).prevPitchInvOut[PITCH_MAX_LAG + 10 - lag0], - lag0, 0, &varOut, &rightShiftOut); - - maxAbs = 0; - for( i = 0; i< lag0; i++) - { - myAbs = WEBRTC_SPL_ABS_W16( - (ISACdec_obj->plcstr_obj).prevPitchInvOut[ - PITCH_MAX_LAG + 10 - lag0 + i] ); - maxAbs = (myAbs > maxAbs)? myAbs:maxAbs; - } - logVarIn = log2_Q8_T( (uint32_t)( varIn ) ) + - (int32_t)(rightShiftIn << 8); - logVarOut = log2_Q8_T( (uint32_t)( varOut ) ) + - (int32_t)(rightShiftOut << 8); - logMaxAbs = log2_Q8_T( (uint32_t)( maxAbs ) ); - - ltpGain = (int16_t)(logVarOut - logVarIn); - Q = 2 * logMaxAbs - ( logVarOut - 1512 ); - - /* - * --- - * We are computing sqrt( (VarIn/lag0) / var( noise ) ) - * var( noise ) is almost 256. we have already computed log2( VarIn ) in Q8 - * so we actually compute 2^( 0.5*(log2( VarIn ) - log2( lag0 ) - log2( var(noise ) ) ). - * Note that put log function is in Q8 but the exponential function is in Q10. - * -- - */ - - logVarIn -= log2_Q8_T( (uint32_t)( lag0 ) ); - tmp16 = (int16_t)((logVarIn<<1) - (4<<10) ); - rightShiftIn = 0; - if( tmp16 > 4096 ) - { - tmp16 -= 4096; - tmp16 = exp2_Q10_T( tmp16 ); - tmp16 >>= 6; - } - else - tmp16 = exp2_Q10_T( tmp16 )>>10; - - (ISACdec_obj->plcstr_obj).std = tmp16 - 4; - - if( (ltpGain < 110) || (ltpGain > 230) ) - { - if( ltpGain < 100 && (pitchGain < 1800) ) - { - (ISACdec_obj->plcstr_obj).A = WEBRTC_SPL_WORD16_MAX; - } - else - { - (ISACdec_obj->plcstr_obj).A = ((ltpGain < 110) && (Q < 800) - )? WEBRTC_SPL_WORD16_MAX:0; - } - (ISACdec_obj->plcstr_obj).B = WEBRTC_SPL_WORD16_MAX - - (ISACdec_obj->plcstr_obj).A; - } - else - { - if( (pitchGain < 450) || (pitchGain > 1600) ) - { - (ISACdec_obj->plcstr_obj).A = ((pitchGain < 450) - )? WEBRTC_SPL_WORD16_MAX:0; - (ISACdec_obj->plcstr_obj).B = WEBRTC_SPL_WORD16_MAX - - (ISACdec_obj->plcstr_obj).A; - } - else - { - myVoiceIndicator = ltpGain * 2 + pitchGain; - MemshipValQ15( myVoiceIndicator, - &(ISACdec_obj->plcstr_obj).A, &(ISACdec_obj->plcstr_obj).B ); - } - } - - - - myVoiceIndicator = ltpGain * 16 + pitchGain * 2 + (pitchGain >> 8); - MemshipValQ15( myVoiceIndicator, - &(ISACdec_obj->plcstr_obj).A, &(ISACdec_obj->plcstr_obj).B ); - - - - (ISACdec_obj->plcstr_obj).stretchLag = lag0; - (ISACdec_obj->plcstr_obj).pitchIndex = 0; - - } - else - { - myDecayRate = (DECAY_RATE<<2); - } - - if( (ISACdec_obj->plcstr_obj).B < 1000 ) - { - myDecayRate += (DECAY_RATE<<3); - } - - /* ------------ reconstructing the residual signal ------------------ */ - - LinearResampler( (ISACdec_obj->plcstr_obj).lastPitchLP, - stretchPitchLP, lag0, (ISACdec_obj->plcstr_obj).stretchLag ); - /* inverse pitch filter */ - - pitchLags_Q7[0] = pitchLags_Q7[1] = pitchLags_Q7[2] = pitchLags_Q7[3] = - (int16_t)((ISACdec_obj->plcstr_obj).stretchLag<<7); - pitchGains_Q12[3] = ( (ISACdec_obj->plcstr_obj).lastPitchGain_Q12); - pitchGains_Q12[2] = (int16_t)(pitchGains_Q12[3] * 1010 >> 10); - pitchGains_Q12[1] = (int16_t)(pitchGains_Q12[2] * 1010 >> 10); - pitchGains_Q12[0] = (int16_t)(pitchGains_Q12[1] * 1010 >> 10); - - - /* most of the time either B or A are zero so seperating */ - if( (ISACdec_obj->plcstr_obj).B == 0 ) - { - for( i = 0; i < FRAMESAMPLES_HALF; i++ ) - { - /* --- Low Pass */ - (ISACdec_obj->plcstr_obj).seed = WEBRTC_SPL_RAND( - (ISACdec_obj->plcstr_obj).seed ); - Vector_Word16_1[i] = (ISACdec_obj->plcstr_obj.seed >> 10) - 16; - - /* --- Highpass */ - (ISACdec_obj->plcstr_obj).seed = WEBRTC_SPL_RAND( - (ISACdec_obj->plcstr_obj).seed ); - Vector_Word16_2[i] = (ISACdec_obj->plcstr_obj.seed >> 10) - 16; - - } - for( i = 1; i < NOISE_FILTER_LEN; i++ ) - { - (ISACdec_obj->plcstr_obj).seed = WEBRTC_SPL_RAND( - (ISACdec_obj->plcstr_obj).seed ); - Vector_Word16_Extended_1[i] = (ISACdec_obj->plcstr_obj.seed >> 10) - 16; - - (ISACdec_obj->plcstr_obj).seed = WEBRTC_SPL_RAND( - (ISACdec_obj->plcstr_obj).seed ); - Vector_Word16_Extended_2[i] = (ISACdec_obj->plcstr_obj.seed >> 10) - 16; - } - plc_filterma_Fast(Vector_Word16_1, Vector_Word16_Extended_1, - &(ISACdec_obj->plcstr_obj).prevPitchInvIn[FRAMESAMPLES_HALF - - NOISE_FILTER_LEN], (int16_t) NOISE_FILTER_LEN, - (int16_t) FRAMESAMPLES_HALF, (int16_t)(5), - (ISACdec_obj->plcstr_obj).decayCoeffNoise, (int16_t)(6)); - - maxCoeff = WebRtcSpl_MaxAbsValueW32( - &(ISACdec_obj->plcstr_obj).prevHP[ - PITCH_MAX_LAG + 10 - NOISE_FILTER_LEN], NOISE_FILTER_LEN ); - - rshift = 0; - while( maxCoeff > WEBRTC_SPL_WORD16_MAX ) - { - maxCoeff >>= 1; - rshift++; - } - for( i = 0; i < NOISE_FILTER_LEN; i++ ) { - Vector_Word16_1[FRAMESAMPLES_HALF - NOISE_FILTER_LEN + i] =(int16_t)( - ISACdec_obj->plcstr_obj.prevHP[PITCH_MAX_LAG + 10 - NOISE_FILTER_LEN + - i] >> rshift); - } - (ISACdec_obj->plcstr_obj).decayCoeffNoise = plc_filterma_Fast( - Vector_Word16_2, - Vector_Word16_Extended_2, - &Vector_Word16_1[FRAMESAMPLES_HALF - NOISE_FILTER_LEN], - (int16_t) NOISE_FILTER_LEN, - (int16_t) FRAMESAMPLES_HALF, - (int16_t) (5), - (ISACdec_obj->plcstr_obj).decayCoeffNoise, - (int16_t) (7) ); - - for( i = 0; i < FRAMESAMPLES_HALF; i++ ) - Vector_Word32_2[i] = Vector_Word16_Extended_2[i] << rshift; - - Vector_Word16_1 = Vector_Word16_Extended_1; - } - else - { - if( (ISACdec_obj->plcstr_obj).A == 0 ) - { - /* ------ Periodic Vector --- */ - for( i = 0, noiseIndex = 0; i < FRAMESAMPLES_HALF; i++, noiseIndex++ ) - { - /* --- Lowpass */ - pLP = (int16_t)(stretchPitchLP[ISACdec_obj->plcstr_obj.pitchIndex] * - ISACdec_obj->plcstr_obj.decayCoeffPriodic >> 15); - - /* --- Highpass */ - pHP = (int32_t)WEBRTC_SPL_MUL_16_32_RSFT15( - (ISACdec_obj->plcstr_obj).decayCoeffPriodic, - (ISACdec_obj->plcstr_obj).prevHP[PITCH_MAX_LAG + 10 - - (ISACdec_obj->plcstr_obj).stretchLag + - (ISACdec_obj->plcstr_obj).pitchIndex] ); - - /* --- lower the muliplier (more decay at next sample) --- */ - (ISACdec_obj->plcstr_obj).decayCoeffPriodic -= (myDecayRate); - if( (ISACdec_obj->plcstr_obj).decayCoeffPriodic < 0 ) - (ISACdec_obj->plcstr_obj).decayCoeffPriodic = 0; - - (ISACdec_obj->plcstr_obj).pitchIndex++; - - if( (ISACdec_obj->plcstr_obj).pitchIndex == - (ISACdec_obj->plcstr_obj).stretchLag ) - { - (ISACdec_obj->plcstr_obj).pitchIndex = 0; - (ISACdec_obj->plcstr_obj).pitchCycles++; - - if( (ISACdec_obj->plcstr_obj).stretchLag != (lag0 + 1) ) - { - (ISACdec_obj->plcstr_obj).stretchLag = lag0 + 1; - } - else - { - (ISACdec_obj->plcstr_obj).stretchLag = lag0; - } - - (ISACdec_obj->plcstr_obj).stretchLag = ( - (ISACdec_obj->plcstr_obj).stretchLag > PITCH_MAX_LAG - )? (PITCH_MAX_LAG):(ISACdec_obj->plcstr_obj).stretchLag; - - LinearResampler( (ISACdec_obj->plcstr_obj).lastPitchLP, - stretchPitchLP, lag0, (ISACdec_obj->plcstr_obj).stretchLag ); - - LinearResampler( (ISACdec_obj->plcstr_obj).prevPitchLP, - stretchPitchLP1, lag0, (ISACdec_obj->plcstr_obj).stretchLag ); - - switch( (ISACdec_obj->plcstr_obj).pitchCycles ) - { - case 1: - { - for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ ) - { - stretchPitchLP[k] = (int16_t)(( - (int32_t)stretchPitchLP[k]* 3 + - (int32_t)stretchPitchLP1[k])>>2); - } - break; - } - case 2: - { - for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ ) - { - stretchPitchLP[k] = (int16_t)(( - (int32_t)stretchPitchLP[k] + - (int32_t)stretchPitchLP1[k] )>>1); - } - break; - } - case 3: - { - for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ ) - { - stretchPitchLP[k] = (int16_t)((stretchPitchLP[k] + - (int32_t)stretchPitchLP1[k]*3 )>>2); - } - break; - } - } - - if( (ISACdec_obj->plcstr_obj).pitchCycles == 3 ) - { - myDecayRate += 35; //(myDecayRate>>1); - (ISACdec_obj->plcstr_obj).pitchCycles = 0; - } - - } - - /* ------ Sum the noisy and periodic signals ------ */ - Vector_Word16_1[i] = pLP; - Vector_Word32_2[i] = pHP; - } - } - else - { - for( i = 0, noiseIndex = 0; i < FRAMESAMPLES_HALF; i++, noiseIndex++ ) - { - - (ISACdec_obj->plcstr_obj).seed = WEBRTC_SPL_RAND( - (ISACdec_obj->plcstr_obj).seed ); - - noise1 = (ISACdec_obj->plcstr_obj.seed >> 10) - 16; - - nLP = (int16_t)((int16_t)(noise1 * ISACdec_obj->plcstr_obj.std) * - ISACdec_obj->plcstr_obj.decayCoeffNoise >> 15); - - /* --- Highpass */ - (ISACdec_obj->plcstr_obj).seed = WEBRTC_SPL_RAND( - (ISACdec_obj->plcstr_obj).seed ); - noise1 = (ISACdec_obj->plcstr_obj.seed >> 11) - 8; - - nHP = (int32_t)WEBRTC_SPL_MUL_16_32_RSFT15( - (ISACdec_obj->plcstr_obj).decayCoeffNoise, - (int32_t)(noise1*(ISACdec_obj->plcstr_obj).std) ); - - /* --- lower the muliplier (more decay at next sample) --- */ - (ISACdec_obj->plcstr_obj).decayCoeffNoise -= (myDecayRate); - if( (ISACdec_obj->plcstr_obj).decayCoeffNoise < 0 ) - (ISACdec_obj->plcstr_obj).decayCoeffNoise = 0; - - /* ------ Periodic Vector --- */ - /* --- Lowpass */ - pLP = (int16_t)(stretchPitchLP[ISACdec_obj->plcstr_obj.pitchIndex] * - ISACdec_obj->plcstr_obj.decayCoeffPriodic >> 15); - - /* --- Highpass */ - pHP = (int32_t)WEBRTC_SPL_MUL_16_32_RSFT15( - (ISACdec_obj->plcstr_obj).decayCoeffPriodic, - (ISACdec_obj->plcstr_obj).prevHP[PITCH_MAX_LAG + 10 - - (ISACdec_obj->plcstr_obj).stretchLag + - (ISACdec_obj->plcstr_obj).pitchIndex] ); - - /* --- lower the muliplier (more decay at next sample) --- */ - (ISACdec_obj->plcstr_obj).decayCoeffPriodic -= (myDecayRate); - if( (ISACdec_obj->plcstr_obj).decayCoeffPriodic < 0 ) - { - (ISACdec_obj->plcstr_obj).decayCoeffPriodic = 0; - } - - /* ------ Weighting the noisy and periodic vectors ------- */ - wNoisyLP = (int16_t)(ISACdec_obj->plcstr_obj.A * nLP >> 15); - wNoisyHP = (int32_t)(WEBRTC_SPL_MUL_16_32_RSFT15( - (ISACdec_obj->plcstr_obj).A, (nHP) ) ); - - wPriodicLP = (int16_t)(ISACdec_obj->plcstr_obj.B * pLP >> 15); - wPriodicHP = (int32_t)(WEBRTC_SPL_MUL_16_32_RSFT15( - (ISACdec_obj->plcstr_obj).B, pHP)); - - (ISACdec_obj->plcstr_obj).pitchIndex++; - - if((ISACdec_obj->plcstr_obj).pitchIndex == - (ISACdec_obj->plcstr_obj).stretchLag) - { - (ISACdec_obj->plcstr_obj).pitchIndex = 0; - (ISACdec_obj->plcstr_obj).pitchCycles++; - - if( (ISACdec_obj->plcstr_obj).stretchLag != (lag0 + 1) ) - (ISACdec_obj->plcstr_obj).stretchLag = lag0 + 1; - else - (ISACdec_obj->plcstr_obj).stretchLag = lag0; - - (ISACdec_obj->plcstr_obj).stretchLag = ( - (ISACdec_obj->plcstr_obj).stretchLag > PITCH_MAX_LAG - )? (PITCH_MAX_LAG):(ISACdec_obj->plcstr_obj).stretchLag; - LinearResampler( - (ISACdec_obj->plcstr_obj).lastPitchLP, - stretchPitchLP, lag0, (ISACdec_obj->plcstr_obj).stretchLag ); - - LinearResampler((ISACdec_obj->plcstr_obj).prevPitchLP, - stretchPitchLP1, lag0, (ISACdec_obj->plcstr_obj).stretchLag ); - - switch((ISACdec_obj->plcstr_obj).pitchCycles) - { - case 1: - { - for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ ) - { - stretchPitchLP[k] = (int16_t)(( - (int32_t)stretchPitchLP[k]* 3 + - (int32_t)stretchPitchLP1[k] )>>2); - } - break; - } - case 2: - { - for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ ) - { - stretchPitchLP[k] = (int16_t)(( - (int32_t)stretchPitchLP[k] + - (int32_t)stretchPitchLP1[k])>>1); - } - break; - } - case 3: - { - for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ ) - { - stretchPitchLP[k] = (int16_t)( - (stretchPitchLP[k] + - (int32_t)stretchPitchLP1[k]*3 )>>2); - } - break; - } - } - - if( (ISACdec_obj->plcstr_obj).pitchCycles == 3 ) - { - myDecayRate += 55; //(myDecayRate>>1); - (ISACdec_obj->plcstr_obj).pitchCycles = 0; - } - } - - /* ------ Sum the noisy and periodic signals ------ */ - Vector_Word16_1[i] = WebRtcSpl_AddSatW16(wNoisyLP, wPriodicLP); - Vector_Word32_2[i] = WebRtcSpl_AddSatW32(wNoisyHP, wPriodicHP); - } - } - } - /* ----------------- residual signal is reconstructed ------------------ */ - - k = (ISACdec_obj->plcstr_obj).pitchIndex; - /* --- Write one pitch cycle for recovery block --- */ - - for( i = 0; i < RECOVERY_OVERLAP; i++ ) - { - ISACdec_obj->plcstr_obj.overlapLP[i] = (int16_t)( - stretchPitchLP[k] * ISACdec_obj->plcstr_obj.decayCoeffPriodic >> 15); - k = ( k < ((ISACdec_obj->plcstr_obj).stretchLag - 1) )? (k+1):0; - } - - (ISACdec_obj->plcstr_obj).lastPitchLag_Q7 = - (int16_t)((ISACdec_obj->plcstr_obj).stretchLag << 7); - - - /* --- Inverse Pitch Filter --- */ - WebRtcIsacfix_PitchFilter(Vector_Word16_1, Vector_Word16_2, - &ISACdec_obj->pitchfiltstr_obj, pitchLags_Q7, pitchGains_Q12, 4); - - /* reduce gain to compensate for pitch enhancer */ - /* gain = 1.0f - 0.45f * AvgPitchGain; */ - tmp32a = ISACdec_obj->plcstr_obj.AvgPitchGain_Q12 * 29; // Q18 - tmp32b = 262144 - tmp32a; // Q18 - gainQ13 = (int16_t) (tmp32b >> 5); // Q13 - - /* perceptual post-filtering (using normalized lattice filter) */ - for (k = 0; k < FRAMESAMPLES_HALF; k++) - Vector_Word32_1[k] = (Vector_Word16_2[k] * gainQ13) << 3; // Q25 - - - WebRtcIsacfix_NormLatticeFilterAr(ORDERLO, - (ISACdec_obj->maskfiltstr_obj).PostStateLoGQ0, - Vector_Word32_1, lofilt_coefQ15, gain_lo_hiQ17, 0, Vector_Word16_1); - - WebRtcIsacfix_NormLatticeFilterAr(ORDERHI, - (ISACdec_obj->maskfiltstr_obj).PostStateHiGQ0, - Vector_Word32_2, hifilt_coefQ15, gain_lo_hiQ17, 1, Vector_Word16_2); - - /* recombine the 2 bands */ - - /* Form the polyphase signals, and compensate for DC offset */ - for (k=0;kpostfiltbankstr_obj); - - (ISACdec_obj->plcstr_obj).used = PLC_WAS_USED; - *current_framesamples = 480; -} diff --git a/modules/audio_coding/codecs/isac/fix/source/encode.c b/modules/audio_coding/codecs/isac/fix/source/encode.c deleted file mode 100644 index ef3e320e2c..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/encode.c +++ /dev/null @@ -1,635 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * encode.c - * - * Encoding function for the iSAC coder. - * - */ - -#include "rtc_base/checks.h" -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" - -#include - -#include "modules/audio_coding/codecs/isac/fix/source/arith_routins.h" -#include "modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h" -#include "modules/audio_coding/codecs/isac/fix/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h" -#include "modules/audio_coding/codecs/isac/fix/source/lpc_tables.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.h" -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - - -int WebRtcIsacfix_EncodeImpl(int16_t *in, - IsacFixEncoderInstance *ISACenc_obj, - BwEstimatorstr *bw_estimatordata, - int16_t CodingMode) -{ - int16_t stream_length = 0; - int16_t usefulstr_len = 0; - int k; - int16_t BWno; - - int16_t lofilt_coefQ15[(ORDERLO)*SUBFRAMES]; - int16_t hifilt_coefQ15[(ORDERHI)*SUBFRAMES]; - int32_t gain_lo_hiQ17[2*SUBFRAMES]; - - int16_t LPandHP[FRAMESAMPLES/2 + QLOOKAHEAD]; - int16_t LP16a[FRAMESAMPLES/2 + QLOOKAHEAD]; - int16_t HP16a[FRAMESAMPLES/2 + QLOOKAHEAD]; - - int16_t PitchLags_Q7[PITCH_SUBFRAMES]; - int16_t PitchGains_Q12[PITCH_SUBFRAMES]; - int16_t AvgPitchGain_Q12; - - int16_t frame_mode; /* 0 for 30ms, 1 for 60ms */ - int16_t processed_samples; - int status; - - int32_t bits_gainsQ11; - int16_t MinBytes; - int16_t bmodel; - - transcode_obj transcodingParam; - int16_t payloadLimitBytes; - int16_t arithLenBeforeEncodingDFT; - int16_t iterCntr; - - /* copy new frame length and bottle neck rate only for the first 10 ms data */ - if (ISACenc_obj->buffer_index == 0) { - /* set the framelength for the next packet */ - ISACenc_obj->current_framesamples = ISACenc_obj->new_framelength; - } - - frame_mode = ISACenc_obj->current_framesamples/MAX_FRAMESAMPLES; /* 0 (30 ms) or 1 (60 ms) */ - processed_samples = ISACenc_obj->current_framesamples/(frame_mode+1); /* 480 (30, 60 ms) */ - - /* buffer speech samples (by 10ms packet) until the framelength is reached (30 or 60 ms) */ - /**************************************************************************************/ - /* fill the buffer with 10ms input data */ - for(k=0; kdata_buffer_fix[k + ISACenc_obj->buffer_index] = in[k]; - } - /* if buffersize is not equal to current framesize, and end of file is not reached yet, */ - /* increase index and go back to main to get more speech samples */ - if (ISACenc_obj->buffer_index + FRAMESAMPLES_10ms != processed_samples) { - ISACenc_obj->buffer_index = ISACenc_obj->buffer_index + FRAMESAMPLES_10ms; - return 0; - } - /* if buffer reached the right size, reset index and continue with encoding the frame */ - ISACenc_obj->buffer_index = 0; - - /* end of buffer function */ - /**************************/ - - /* encoding */ - /************/ - - if (frame_mode == 0 || ISACenc_obj->frame_nb == 0 ) - { - /* reset bitstream */ - ISACenc_obj->bitstr_obj.W_upper = 0xFFFFFFFF; - ISACenc_obj->bitstr_obj.streamval = 0; - ISACenc_obj->bitstr_obj.stream_index = 0; - ISACenc_obj->bitstr_obj.full = 1; - - if (CodingMode == 0) { - ISACenc_obj->BottleNeck = WebRtcIsacfix_GetUplinkBandwidth(bw_estimatordata); - ISACenc_obj->MaxDelay = WebRtcIsacfix_GetUplinkMaxDelay(bw_estimatordata); - } - if (CodingMode == 0 && frame_mode == 0 && (ISACenc_obj->enforceFrameSize == 0)) { - ISACenc_obj->new_framelength = WebRtcIsacfix_GetNewFrameLength(ISACenc_obj->BottleNeck, - ISACenc_obj->current_framesamples); - } - - // multiply the bottleneck by 0.88 before computing SNR, 0.88 is tuned by experimenting on TIMIT - // 901/1024 is 0.87988281250000 - ISACenc_obj->s2nr = WebRtcIsacfix_GetSnr( - (int16_t)(ISACenc_obj->BottleNeck * 901 >> 10), - ISACenc_obj->current_framesamples); - - /* encode frame length */ - status = WebRtcIsacfix_EncodeFrameLen(ISACenc_obj->current_framesamples, &ISACenc_obj->bitstr_obj); - if (status < 0) - { - /* Wrong frame size */ - if (frame_mode == 1 && ISACenc_obj->frame_nb == 1) - { - // If this is the second 30ms of a 60ms frame reset this such that in the next call - // encoder starts fresh. - ISACenc_obj->frame_nb = 0; - } - return status; - } - - /* Save framelength for multiple packets memory */ - if (ISACenc_obj->SaveEnc_ptr != NULL) { - (ISACenc_obj->SaveEnc_ptr)->framelength=ISACenc_obj->current_framesamples; - } - - /* bandwidth estimation and coding */ - BWno = WebRtcIsacfix_GetDownlinkBwIndexImpl(bw_estimatordata); - status = WebRtcIsacfix_EncodeReceiveBandwidth(&BWno, &ISACenc_obj->bitstr_obj); - if (status < 0) - { - if (frame_mode == 1 && ISACenc_obj->frame_nb == 1) - { - // If this is the second 30ms of a 60ms frame reset this such that in the next call - // encoder starts fresh. - ISACenc_obj->frame_nb = 0; - } - return status; - } - } - - /* split signal in two bands */ - WebRtcIsacfix_SplitAndFilter1(ISACenc_obj->data_buffer_fix, LP16a, HP16a, &ISACenc_obj->prefiltbankstr_obj ); - - /* estimate pitch parameters and pitch-filter lookahead signal */ - WebRtcIsacfix_PitchAnalysis(LP16a+QLOOKAHEAD, LPandHP, - &ISACenc_obj->pitchanalysisstr_obj, PitchLags_Q7, PitchGains_Q12); /* LPandHP = LP_lookahead_pfQ0, */ - - /* Set where to store data in multiple packets memory */ - if (ISACenc_obj->SaveEnc_ptr != NULL) { - if (frame_mode == 0 || ISACenc_obj->frame_nb == 0) - { - (ISACenc_obj->SaveEnc_ptr)->startIdx = 0; - } - else - { - (ISACenc_obj->SaveEnc_ptr)->startIdx = 1; - } - } - - /* quantize & encode pitch parameters */ - status = WebRtcIsacfix_EncodePitchGain(PitchGains_Q12, &ISACenc_obj->bitstr_obj, ISACenc_obj->SaveEnc_ptr); - if (status < 0) - { - if (frame_mode == 1 && ISACenc_obj->frame_nb == 1) - { - // If this is the second 30ms of a 60ms frame reset this such that in the next call - // encoder starts fresh. - ISACenc_obj->frame_nb = 0; - } - return status; - } - status = WebRtcIsacfix_EncodePitchLag(PitchLags_Q7 , PitchGains_Q12, &ISACenc_obj->bitstr_obj, ISACenc_obj->SaveEnc_ptr); - if (status < 0) - { - if (frame_mode == 1 && ISACenc_obj->frame_nb == 1) - { - // If this is the second 30ms of a 60ms frame reset this such that in the next call - // encoder starts fresh. - ISACenc_obj->frame_nb = 0; - } - return status; - } - AvgPitchGain_Q12 = (PitchGains_Q12[0] + PitchGains_Q12[1] + - PitchGains_Q12[2] + PitchGains_Q12[3]) >> 2; - - /* find coefficients for perceptual pre-filters */ - WebRtcIsacfix_GetLpcCoef(LPandHP, HP16a+QLOOKAHEAD, &ISACenc_obj->maskfiltstr_obj, - ISACenc_obj->s2nr, PitchGains_Q12, - gain_lo_hiQ17, lofilt_coefQ15, hifilt_coefQ15); /*LPandHP = LP_lookahead_pfQ0*/ - - // record LPC Gains for possible bit-rate reduction - for(k = 0; k < KLT_ORDER_GAIN; k++) - { - transcodingParam.lpcGains[k] = gain_lo_hiQ17[k]; - } - - /* code LPC model and shape - gains not quantized yet */ - status = WebRtcIsacfix_EncodeLpc(gain_lo_hiQ17, lofilt_coefQ15, hifilt_coefQ15, - &bmodel, &bits_gainsQ11, &ISACenc_obj->bitstr_obj, ISACenc_obj->SaveEnc_ptr, &transcodingParam); - if (status < 0) - { - if (frame_mode == 1 && ISACenc_obj->frame_nb == 1) - { - // If this is the second 30ms of a 60ms frame reset this such that in the next call - // encoder starts fresh. - ISACenc_obj->frame_nb = 0; - } - return status; - } - arithLenBeforeEncodingDFT = (ISACenc_obj->bitstr_obj.stream_index << 1) + (1-ISACenc_obj->bitstr_obj.full); - - /* low-band filtering */ - WebRtcIsacfix_NormLatticeFilterMa(ORDERLO, ISACenc_obj->maskfiltstr_obj.PreStateLoGQ15, - LP16a, lofilt_coefQ15, gain_lo_hiQ17, 0, LPandHP);/* LPandHP = LP16b */ - - /* pitch filter */ - WebRtcIsacfix_PitchFilter(LPandHP, LP16a, &ISACenc_obj->pitchfiltstr_obj, PitchLags_Q7, PitchGains_Q12, 1);/* LPandHP = LP16b */ - - /* high-band filtering */ - WebRtcIsacfix_NormLatticeFilterMa(ORDERHI, ISACenc_obj->maskfiltstr_obj.PreStateHiGQ15, - HP16a, hifilt_coefQ15, gain_lo_hiQ17, 1, LPandHP);/*LPandHP = HP16b*/ - - /* transform */ - WebRtcIsacfix_Time2Spec(LP16a, LPandHP, LP16a, LPandHP); /*LPandHP = HP16b*/ - - /* Save data for multiple packets memory */ - if (ISACenc_obj->SaveEnc_ptr != NULL) { - for (k = 0; k < FRAMESAMPLES_HALF; k++) { - (ISACenc_obj->SaveEnc_ptr)->fre[k + (ISACenc_obj->SaveEnc_ptr)->startIdx*FRAMESAMPLES_HALF] = LP16a[k]; - (ISACenc_obj->SaveEnc_ptr)->fim[k + (ISACenc_obj->SaveEnc_ptr)->startIdx*FRAMESAMPLES_HALF] = LPandHP[k]; - } - (ISACenc_obj->SaveEnc_ptr)->AvgPitchGain[(ISACenc_obj->SaveEnc_ptr)->startIdx] = AvgPitchGain_Q12; - } - - /* quantization and lossless coding */ - status = WebRtcIsacfix_EncodeSpec(LP16a, LPandHP, &ISACenc_obj->bitstr_obj, AvgPitchGain_Q12); - if((status <= -1) && (status != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) /*LPandHP = HP16b*/ - { - if (frame_mode == 1 && ISACenc_obj->frame_nb == 1) - { - // If this is the second 30ms of a 60ms frame reset this such that in the next call - // encoder starts fresh. - ISACenc_obj->frame_nb = 0; - } - return status; - } - - if((frame_mode == 1) && (ISACenc_obj->frame_nb == 0)) - { - // it is a 60ms and we are in the first 30ms - // then the limit at this point should be half of the assigned value - payloadLimitBytes = ISACenc_obj->payloadLimitBytes60 >> 1; - } - else if (frame_mode == 0) - { - // it is a 30ms frame - payloadLimitBytes = (ISACenc_obj->payloadLimitBytes30) - 3; - } - else - { - // this is the second half of a 60ms frame. - payloadLimitBytes = ISACenc_obj->payloadLimitBytes60 - 3; // subract 3 because termination process may add 3 bytes - } - - iterCntr = 0; - while((((ISACenc_obj->bitstr_obj.stream_index) << 1) > payloadLimitBytes) || - (status == -ISAC_DISALLOWED_BITSTREAM_LENGTH)) - { - int16_t arithLenDFTByte; - int16_t bytesLeftQ5; - int16_t ratioQ5[8] = {0, 6, 9, 12, 16, 19, 22, 25}; - - // According to experiments on TIMIT the following is proper for audio, but it is not agressive enough for tonal inputs - // such as DTMF, sweep-sine, ... - // - // (0.55 - (0.8 - ratio[i]/32) * 5 / 6) * 2^14 - // int16_t scaleQ14[8] = {0, 648, 1928, 3208, 4915, 6195, 7475, 8755}; - - - // This is a supper-agressive scaling passed the tests (tonal inputs) tone with one iteration for payload limit - // of 120 (32kbps bottleneck), number of frames needed a rate-reduction was 58403 - // - int16_t scaleQ14[8] = {0, 348, 828, 1408, 2015, 3195, 3500, 3500}; - int16_t idx; - - if(iterCntr >= MAX_PAYLOAD_LIMIT_ITERATION) - { - // We were not able to limit the payload size - - if((frame_mode == 1) && (ISACenc_obj->frame_nb == 0)) - { - // This was the first 30ms of a 60ms frame. Although the payload is larger than it - // should be but we let the second 30ms be encoded. Maybe togetehr we won't exceed - // the limit. - ISACenc_obj->frame_nb = 1; - return 0; - } - else if((frame_mode == 1) && (ISACenc_obj->frame_nb == 1)) - { - ISACenc_obj->frame_nb = 0; - } - - if(status != -ISAC_DISALLOWED_BITSTREAM_LENGTH) - { - return -ISAC_PAYLOAD_LARGER_THAN_LIMIT; - } - else - { - return status; - } - } - if(status != -ISAC_DISALLOWED_BITSTREAM_LENGTH) - { - arithLenDFTByte = (ISACenc_obj->bitstr_obj.stream_index << 1) + (1-ISACenc_obj->bitstr_obj.full) - arithLenBeforeEncodingDFT; - bytesLeftQ5 = (payloadLimitBytes - arithLenBeforeEncodingDFT) << 5; - - // bytesLeft / arithLenDFTBytes indicates how much scaling is required a rough estimate (agressive) - // scale = 0.55 - (0.8 - bytesLeft / arithLenDFTBytes) * 5 / 6 - // bytesLeft / arithLenDFTBytes below 0.2 will have a scale of zero and above 0.8 are treated as 0.8 - // to avoid division we do more simplification. - // - // values of (bytesLeft / arithLenDFTBytes)*32 between ratioQ5[i] and ratioQ5[i+1] are rounded to ratioQ5[i] - // and the corresponding scale is chosen - - // we compare bytesLeftQ5 with ratioQ5[]*arithLenDFTByte; - idx = 4; - idx += (bytesLeftQ5 >= ratioQ5[idx] * arithLenDFTByte) ? 2 : -2; - idx += (bytesLeftQ5 >= ratioQ5[idx] * arithLenDFTByte) ? 1 : -1; - idx += (bytesLeftQ5 >= ratioQ5[idx] * arithLenDFTByte) ? 0 : -1; - } - else - { - // we are here because the bit-stream did not fit into the buffer, in this case, the stream_index is not - // trustable, especially if the is the first 30ms of a packet. Thereforem, we will go for the most agressive - // case. - idx = 0; - } - // scale FFT coefficients to reduce the bit-rate - for(k = 0; k < FRAMESAMPLES_HALF; k++) - { - LP16a[k] = (int16_t)(LP16a[k] * scaleQ14[idx] >> 14); - LPandHP[k] = (int16_t)(LPandHP[k] * scaleQ14[idx] >> 14); - } - - // Save data for multiple packets memory - if (ISACenc_obj->SaveEnc_ptr != NULL) - { - for(k = 0; k < FRAMESAMPLES_HALF; k++) - { - (ISACenc_obj->SaveEnc_ptr)->fre[k + (ISACenc_obj->SaveEnc_ptr)->startIdx*FRAMESAMPLES_HALF] = LP16a[k]; - (ISACenc_obj->SaveEnc_ptr)->fim[k + (ISACenc_obj->SaveEnc_ptr)->startIdx*FRAMESAMPLES_HALF] = LPandHP[k]; - } - } - - // scale the unquantized LPC gains and save the scaled version for the future use - for(k = 0; k < KLT_ORDER_GAIN; k++) - { - gain_lo_hiQ17[k] = WEBRTC_SPL_MUL_16_32_RSFT14(scaleQ14[idx], transcodingParam.lpcGains[k]);//transcodingParam.lpcGains[k]; // - transcodingParam.lpcGains[k] = gain_lo_hiQ17[k]; - } - - // reset the bit-stream object to the state which it had before encoding LPC Gains - ISACenc_obj->bitstr_obj.full = transcodingParam.full; - ISACenc_obj->bitstr_obj.stream_index = transcodingParam.stream_index; - ISACenc_obj->bitstr_obj.streamval = transcodingParam.streamval; - ISACenc_obj->bitstr_obj.W_upper = transcodingParam.W_upper; - ISACenc_obj->bitstr_obj.stream[transcodingParam.stream_index-1] = transcodingParam.beforeLastWord; - ISACenc_obj->bitstr_obj.stream[transcodingParam.stream_index] = transcodingParam.lastWord; - - - // quantize and encode LPC gain - WebRtcIsacfix_EstCodeLpcGain(gain_lo_hiQ17, &ISACenc_obj->bitstr_obj, ISACenc_obj->SaveEnc_ptr); - arithLenBeforeEncodingDFT = (ISACenc_obj->bitstr_obj.stream_index << 1) + (1-ISACenc_obj->bitstr_obj.full); - status = WebRtcIsacfix_EncodeSpec(LP16a, LPandHP, &ISACenc_obj->bitstr_obj, AvgPitchGain_Q12); - if((status <= -1) && (status != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) /*LPandHP = HP16b*/ - { - if (frame_mode == 1 && ISACenc_obj->frame_nb == 1) - { - // If this is the second 30ms of a 60ms frame reset this such that in the next call - // encoder starts fresh. - ISACenc_obj->frame_nb = 0; - } - return status; - } - iterCntr++; - } - - if (frame_mode == 1 && ISACenc_obj->frame_nb == 0) - /* i.e. 60 ms framesize and just processed the first 30ms, */ - /* go back to main function to buffer the other 30ms speech frame */ - { - ISACenc_obj->frame_nb = 1; - return 0; - } - else if (frame_mode == 1 && ISACenc_obj->frame_nb == 1) - { - ISACenc_obj->frame_nb = 0; - /* also update the framelength for next packet, in Adaptive mode only */ - if (CodingMode == 0 && (ISACenc_obj->enforceFrameSize == 0)) { - ISACenc_obj->new_framelength = WebRtcIsacfix_GetNewFrameLength(ISACenc_obj->BottleNeck, - ISACenc_obj->current_framesamples); - } - } - - - /* complete arithmetic coding */ - stream_length = WebRtcIsacfix_EncTerminate(&ISACenc_obj->bitstr_obj); - /* can this be negative? */ - - if(CodingMode == 0) - { - - /* update rate model and get minimum number of bytes in this packet */ - MinBytes = WebRtcIsacfix_GetMinBytes(&ISACenc_obj->rate_data_obj, (int16_t) stream_length, - ISACenc_obj->current_framesamples, ISACenc_obj->BottleNeck, ISACenc_obj->MaxDelay); - - /* if bitstream is too short, add garbage at the end */ - - /* Store length of coded data */ - usefulstr_len = stream_length; - - /* Make sure MinBytes does not exceed packet size limit */ - if ((ISACenc_obj->frame_nb == 0) && (MinBytes > ISACenc_obj->payloadLimitBytes30)) { - MinBytes = ISACenc_obj->payloadLimitBytes30; - } else if ((ISACenc_obj->frame_nb == 1) && (MinBytes > ISACenc_obj->payloadLimitBytes60)) { - MinBytes = ISACenc_obj->payloadLimitBytes60; - } - - /* Make sure we don't allow more than 255 bytes of garbage data. - We store the length of the garbage data in 8 bits in the bitstream, - 255 is the max garbage lenght we can signal using 8 bits. */ - if( MinBytes > usefulstr_len + 255 ) { - MinBytes = usefulstr_len + 255; - } - - /* Save data for creation of multiple bitstreams */ - if (ISACenc_obj->SaveEnc_ptr != NULL) { - (ISACenc_obj->SaveEnc_ptr)->minBytes = MinBytes; - } - - while (stream_length < MinBytes) - { - RTC_DCHECK_GE(stream_length, 0); - if (stream_length & 0x0001){ - ISACenc_obj->bitstr_seed = WEBRTC_SPL_RAND( ISACenc_obj->bitstr_seed ); - ISACenc_obj->bitstr_obj.stream[stream_length / 2] |= - (uint16_t)(ISACenc_obj->bitstr_seed & 0xFF); - } else { - ISACenc_obj->bitstr_seed = WEBRTC_SPL_RAND( ISACenc_obj->bitstr_seed ); - ISACenc_obj->bitstr_obj.stream[stream_length / 2] = - ((uint16_t)ISACenc_obj->bitstr_seed << 8); - } - stream_length++; - } - - /* to get the real stream_length, without garbage */ - if (usefulstr_len & 0x0001) { - ISACenc_obj->bitstr_obj.stream[usefulstr_len>>1] &= 0xFF00; - ISACenc_obj->bitstr_obj.stream[usefulstr_len>>1] += (MinBytes - usefulstr_len) & 0x00FF; - } - else { - ISACenc_obj->bitstr_obj.stream[usefulstr_len>>1] &= 0x00FF; - ISACenc_obj->bitstr_obj.stream[usefulstr_len >> 1] += - ((uint16_t)((MinBytes - usefulstr_len) & 0x00FF) << 8); - } - } - else - { - /* update rate model */ - WebRtcIsacfix_UpdateRateModel(&ISACenc_obj->rate_data_obj, (int16_t) stream_length, - ISACenc_obj->current_framesamples, ISACenc_obj->BottleNeck); - } - return stream_length; -} - -/* This function is used to create a new bitstream with new BWE. - The same data as previously encoded with the fucntion WebRtcIsacfix_EncodeImpl() - is used. The data needed is taken from the struct, where it was stored - when calling the encoder. */ -int WebRtcIsacfix_EncodeStoredData(IsacFixEncoderInstance *ISACenc_obj, - int BWnumber, - float scale) -{ - int ii; - int status; - int16_t BWno = (int16_t)BWnumber; - int stream_length = 0; - - int16_t model; - const uint16_t *Q_PitchGain_cdf_ptr[1]; - const uint16_t **cdf; - const IsacSaveEncoderData *SaveEnc_str; - int32_t tmpLPCcoeffs_g[KLT_ORDER_GAIN<<1]; - int16_t tmpLPCindex_g[KLT_ORDER_GAIN<<1]; - int16_t tmp_fre[FRAMESAMPLES]; - int16_t tmp_fim[FRAMESAMPLES]; - - SaveEnc_str = ISACenc_obj->SaveEnc_ptr; - - /* Check if SaveEnc memory exists */ - if (SaveEnc_str == NULL) { - return (-1); - } - - /* Sanity Check - possible values for BWnumber is 0 - 23 */ - if ((BWnumber < 0) || (BWnumber > 23)) { - return -ISAC_RANGE_ERROR_BW_ESTIMATOR; - } - - /* reset bitstream */ - ISACenc_obj->bitstr_obj.W_upper = 0xFFFFFFFF; - ISACenc_obj->bitstr_obj.streamval = 0; - ISACenc_obj->bitstr_obj.stream_index = 0; - ISACenc_obj->bitstr_obj.full = 1; - - /* encode frame length */ - status = WebRtcIsacfix_EncodeFrameLen(SaveEnc_str->framelength, &ISACenc_obj->bitstr_obj); - if (status < 0) { - /* Wrong frame size */ - return status; - } - - /* encode bandwidth estimate */ - status = WebRtcIsacfix_EncodeReceiveBandwidth(&BWno, &ISACenc_obj->bitstr_obj); - if (status < 0) { - return status; - } - - /* Transcoding */ - /* If scale < 1, rescale data to produce lower bitrate signal */ - if ((0.0 < scale) && (scale < 1.0)) { - /* Compensate LPC gain */ - for (ii = 0; ii < (KLT_ORDER_GAIN*(1+SaveEnc_str->startIdx)); ii++) { - tmpLPCcoeffs_g[ii] = (int32_t) ((scale) * (float) SaveEnc_str->LPCcoeffs_g[ii]); - } - - /* Scale DFT */ - for (ii = 0; ii < (FRAMESAMPLES_HALF*(1+SaveEnc_str->startIdx)); ii++) { - tmp_fre[ii] = (int16_t) ((scale) * (float) SaveEnc_str->fre[ii]) ; - tmp_fim[ii] = (int16_t) ((scale) * (float) SaveEnc_str->fim[ii]) ; - } - } else { - for (ii = 0; ii < (KLT_ORDER_GAIN*(1+SaveEnc_str->startIdx)); ii++) { - tmpLPCindex_g[ii] = SaveEnc_str->LPCindex_g[ii]; - } - - for (ii = 0; ii < (FRAMESAMPLES_HALF*(1+SaveEnc_str->startIdx)); ii++) { - tmp_fre[ii] = SaveEnc_str->fre[ii]; - tmp_fim[ii] = SaveEnc_str->fim[ii]; - } - } - - /* Loop over number of 30 msec */ - for (ii = 0; ii <= SaveEnc_str->startIdx; ii++) - { - - /* encode pitch gains */ - *Q_PitchGain_cdf_ptr = WebRtcIsacfix_kPitchGainCdf; - status = WebRtcIsacfix_EncHistMulti(&ISACenc_obj->bitstr_obj, &SaveEnc_str->pitchGain_index[ii], - Q_PitchGain_cdf_ptr, 1); - if (status < 0) { - return status; - } - - /* entropy coding of quantization pitch lags */ - /* voicing classificiation */ - if (SaveEnc_str->meanGain[ii] <= 819) { - cdf = WebRtcIsacfix_kPitchLagPtrLo; - } else if (SaveEnc_str->meanGain[ii] <= 1638) { - cdf = WebRtcIsacfix_kPitchLagPtrMid; - } else { - cdf = WebRtcIsacfix_kPitchLagPtrHi; - } - status = WebRtcIsacfix_EncHistMulti(&ISACenc_obj->bitstr_obj, - &SaveEnc_str->pitchIndex[PITCH_SUBFRAMES*ii], cdf, PITCH_SUBFRAMES); - if (status < 0) { - return status; - } - - /* LPC */ - /* entropy coding of model number */ - model = 0; - status = WebRtcIsacfix_EncHistMulti(&ISACenc_obj->bitstr_obj, &model, - WebRtcIsacfix_kModelCdfPtr, 1); - if (status < 0) { - return status; - } - - /* entropy coding of quantization indices - LPC shape only */ - status = WebRtcIsacfix_EncHistMulti(&ISACenc_obj->bitstr_obj, &SaveEnc_str->LPCindex_s[KLT_ORDER_SHAPE*ii], - WebRtcIsacfix_kCdfShapePtr[0], KLT_ORDER_SHAPE); - if (status < 0) { - return status; - } - - /* If transcoding, get new LPC gain indices */ - if (scale < 1.0) { - WebRtcIsacfix_TranscodeLpcCoef(&tmpLPCcoeffs_g[KLT_ORDER_GAIN*ii], &tmpLPCindex_g[KLT_ORDER_GAIN*ii]); - } - - /* entropy coding of quantization indices - LPC gain */ - status = WebRtcIsacfix_EncHistMulti(&ISACenc_obj->bitstr_obj, &tmpLPCindex_g[KLT_ORDER_GAIN*ii], - WebRtcIsacfix_kCdfGainPtr[0], KLT_ORDER_GAIN); - if (status < 0) { - return status; - } - - /* quantization and lossless coding */ - status = WebRtcIsacfix_EncodeSpec(&tmp_fre[ii*FRAMESAMPLES_HALF], &tmp_fim[ii*FRAMESAMPLES_HALF], - &ISACenc_obj->bitstr_obj, SaveEnc_str->AvgPitchGain[ii]); - if (status < 0) { - return status; - } - } - - /* complete arithmetic coding */ - stream_length = WebRtcIsacfix_EncTerminate(&ISACenc_obj->bitstr_obj); - - return stream_length; -} diff --git a/modules/audio_coding/codecs/isac/fix/source/entropy_coding.c b/modules/audio_coding/codecs/isac/fix/source/entropy_coding.c deleted file mode 100644 index 842e77f47e..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/entropy_coding.c +++ /dev/null @@ -1,2056 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * entropy_coding.c - * - * This file contains all functions used to arithmetically - * encode the iSAC bistream. - * - */ - -#include - -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "modules/audio_coding/codecs/isac/fix/source/arith_routins.h" -#include "modules/audio_coding/codecs/isac/fix/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/fix/source/lpc_tables.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.h" -#include "rtc_base/sanitizer.h" - -/* - * Eenumerations for arguments to functions WebRtcIsacfix_MatrixProduct1() - * and WebRtcIsacfix_MatrixProduct2(). -*/ - -enum matrix_index_factor { - kTIndexFactor1 = 1, - kTIndexFactor2 = 2, - kTIndexFactor3 = SUBFRAMES, - kTIndexFactor4 = LPC_SHAPE_ORDER -}; - -enum matrix_index_step { - kTIndexStep1 = 1, - kTIndexStep2 = SUBFRAMES, - kTIndexStep3 = LPC_SHAPE_ORDER -}; - -enum matrixprod_loop_count { - kTLoopCount1 = SUBFRAMES, - kTLoopCount2 = 2, - kTLoopCount3 = LPC_SHAPE_ORDER -}; - -enum matrix1_shift_value { - kTMatrix1_shift0 = 0, - kTMatrix1_shift1 = 1, - kTMatrix1_shift5 = 5 -}; - -enum matrixprod_init_case { - kTInitCase0 = 0, - kTInitCase1 = 1 -}; - -/* - This function implements the fix-point correspondant function to lrint. - - FLP: (int32_t)floor(flt+.499999999999) - FIP: (fixVal+roundVal)>>qDomain - - where roundVal = 2^(qDomain-1) = 1<<(qDomain-1) - -*/ -static __inline int32_t CalcLrIntQ(int32_t fixVal, int16_t qDomain) { - return (fixVal + (1 << (qDomain - 1))) >> qDomain; -} - -/* - __inline uint32_t stepwise(int32_t dinQ10) { - - int32_t ind, diQ10, dtQ10; - - diQ10 = dinQ10; - if (diQ10 < DPMIN_Q10) - diQ10 = DPMIN_Q10; - if (diQ10 >= DPMAX_Q10) - diQ10 = DPMAX_Q10 - 1; - - dtQ10 = diQ10 - DPMIN_Q10;*/ /* Q10 + Q10 = Q10 */ -/* ind = (dtQ10 * 5) >> 10; */ /* 2^10 / 5 = 0.2 in Q10 */ -/* Q10 -> Q0 */ - -/* return rpointsFIX_Q10[ind]; - - } -*/ - -/* logN(x) = logN(2)*log2(x) = 0.6931*log2(x). Output in Q8. */ -/* The input argument X to logN(X) is 2^17 times higher than the - input floating point argument Y to log(Y), since the X value - is a Q17 value. This can be compensated for after the call, by - subraction a value Z for each Q-step. One Q-step means that - X gets 2 thimes higher, i.e. Z = logN(2)*256 = 0.693147180559*256 = - 177.445678 should be subtracted (since logN() returns a Q8 value). - For a X value in Q17, the value 177.445678*17 = 3017 should be - subtracted */ -static int16_t CalcLogN(int32_t arg) { - int16_t zeros, log2, frac, logN; - - zeros=WebRtcSpl_NormU32(arg); - frac = (int16_t)((uint32_t)((arg << zeros) & 0x7FFFFFFF) >> 23); - log2 = (int16_t)(((31 - zeros) << 8) + frac); // log2(x) in Q8 - logN = (int16_t)(log2 * 22713 >> 15); // log(2) = 0.693147 = 22713 in Q15 - logN=logN+11; //Scalar compensation which minimizes the (log(x)-logN(x))^2 error over all x. - - return logN; -} - - -/* - expN(x) = 2^(a*x), where a = log2(e) ~= 1.442695 - - Input: Q8 (int16_t) - Output: Q17 (int32_t) - - a = log2(e) = log2(exp(1)) ~= 1.442695 ==> a = 23637 in Q14 (1.442688) - To this value, 700 is added or subtracted in order to get an average error - nearer zero, instead of always same-sign. -*/ - -static int32_t CalcExpN(int16_t x) { - int16_t axINT, axFRAC; - int16_t exp16; - int32_t exp; - int16_t ax = (int16_t)(x * 23637 >> 14); // Q8 - - if (x>=0) { - axINT = ax >> 8; //Q0 - axFRAC = ax&0x00FF; - exp16 = 1 << axINT; // Q0 - axFRAC = axFRAC+256; //Q8 - exp = exp16 * axFRAC; // Q0*Q8 = Q8 - exp <<= 9; // Q17 - } else { - ax = -ax; - axINT = 1 + (ax >> 8); //Q0 - axFRAC = 0x00FF - (ax&0x00FF); - exp16 = (int16_t)(32768 >> axINT); // Q15 - axFRAC = axFRAC+256; //Q8 - exp = exp16 * axFRAC; // Q15*Q8 = Q23 - exp >>= 6; // Q17 - } - - return exp; -} - - -/* compute correlation from power spectrum */ -static void CalcCorrelation(int32_t *PSpecQ12, int32_t *CorrQ7) -{ - int32_t summ[FRAMESAMPLES/8]; - int32_t diff[FRAMESAMPLES/8]; - int32_t sum; - int k, n; - - for (k = 0; k < FRAMESAMPLES/8; k++) { - summ[k] = (PSpecQ12[k] + PSpecQ12[FRAMESAMPLES / 4 - 1 - k] + 16) >> 5; - diff[k] = (PSpecQ12[k] - PSpecQ12[FRAMESAMPLES / 4 - 1 - k] + 16) >> 5; - } - - sum = 2; - for (n = 0; n < FRAMESAMPLES/8; n++) - sum += summ[n]; - CorrQ7[0] = sum; - - for (k = 0; k < AR_ORDER; k += 2) { - sum = 0; - for (n = 0; n < FRAMESAMPLES/8; n++) - sum += (WebRtcIsacfix_kCos[k][n] * diff[n] + 256) >> 9; - CorrQ7[k+1] = sum; - } - - for (k=1; k> 9; - CorrQ7[k+1] = sum; - } -} - -// Some arithmetic operations that are allowed to overflow. (It's still -// undefined behavior, so not a good idea; this just makes UBSan ignore the -// violations, so that our old code can continue to do what it's always been -// doing.) -static inline int32_t RTC_NO_SANITIZE("signed-integer-overflow") - OverflowingMulS16S32ToS32(int16_t a, int32_t b) { - return a * b; -} -static inline int32_t RTC_NO_SANITIZE("signed-integer-overflow") - OverflowingAddS32S32ToS32(int32_t a, int32_t b) { - return a + b; -} -static inline int32_t RTC_NO_SANITIZE("signed-integer-overflow") - OverflowingSubS32S32ToS32(int32_t a, int32_t b) { - return a - b; -} - -/* compute inverse AR power spectrum */ -static void CalcInvArSpec(const int16_t *ARCoefQ12, - const int32_t gainQ10, - int32_t *CurveQ16) -{ - int32_t CorrQ11[AR_ORDER+1]; - int32_t sum, tmpGain; - int32_t diffQ16[FRAMESAMPLES/8]; - const int16_t *CS_ptrQ9; - int k, n; - int16_t round, shftVal = 0, sh; - - sum = 0; - for (n = 0; n < AR_ORDER+1; n++) - sum += WEBRTC_SPL_MUL(ARCoefQ12[n], ARCoefQ12[n]); /* Q24 */ - sum = ((sum >> 6) * 65 + 32768) >> 16; /* Result in Q8. */ - CorrQ11[0] = (sum * gainQ10 + 256) >> 9; - - /* To avoid overflow, we shift down gainQ10 if it is large. We will not lose any precision */ - if(gainQ10>400000){ - tmpGain = gainQ10 >> 3; - round = 32; - shftVal = 6; - } else { - tmpGain = gainQ10; - round = 256; - shftVal = 9; - } - - for (k = 1; k < AR_ORDER+1; k++) { - sum = 16384; - for (n = k; n < AR_ORDER+1; n++) - sum += WEBRTC_SPL_MUL(ARCoefQ12[n-k], ARCoefQ12[n]); /* Q24 */ - sum >>= 15; - CorrQ11[k] = (sum * tmpGain + round) >> shftVal; - } - sum = CorrQ11[0] << 7; - for (n = 0; n < FRAMESAMPLES/8; n++) - CurveQ16[n] = sum; - - for (k = 1; k < AR_ORDER; k += 2) { - for (n = 0; n < FRAMESAMPLES/8; n++) - CurveQ16[n] += - (OverflowingMulS16S32ToS32(WebRtcIsacfix_kCos[k][n], CorrQ11[k + 1]) + - 2) >> - 2; - } - - CS_ptrQ9 = WebRtcIsacfix_kCos[0]; - - /* If CorrQ11[1] too large we avoid getting overflow in the calculation by shifting */ - sh=WebRtcSpl_NormW32(CorrQ11[1]); - if (CorrQ11[1]==0) /* Use next correlation */ - sh=WebRtcSpl_NormW32(CorrQ11[2]); - - if (sh<9) - shftVal = 9 - sh; - else - shftVal = 0; - - for (n = 0; n < FRAMESAMPLES/8; n++) - diffQ16[n] = (CS_ptrQ9[n] * (CorrQ11[1] >> shftVal) + 2) >> 2; - for (k = 2; k < AR_ORDER; k += 2) { - CS_ptrQ9 = WebRtcIsacfix_kCos[k]; - for (n = 0; n < FRAMESAMPLES/8; n++) - diffQ16[n] += (CS_ptrQ9[n] * (CorrQ11[k + 1] >> shftVal) + 2) >> 2; - } - - for (k=0; k> 6) * 65 + 32768) >> 16; /* Result in Q8. */ - CorrQ11[0] = (sum * gainQ10 + 256) >> 9; - - /* To avoid overflow, we shift down gainQ10 if it is large. We will not lose any precision */ - if(gainQ10>400000){ - tmpGain = gainQ10 >> 3; - round = 32; - shftVal = 6; - } else { - tmpGain = gainQ10; - round = 256; - shftVal = 9; - } - - for (k = 1; k < AR_ORDER+1; k++) { - sum = 16384; - for (n = k; n < AR_ORDER+1; n++) - sum += WEBRTC_SPL_MUL(ARCoefQ12[n-k], ARCoefQ12[n]); /* Q24 */ - sum >>= 15; - CorrQ11[k] = (sum * tmpGain + round) >> shftVal; - } - sum = CorrQ11[0] << 7; - for (n = 0; n < FRAMESAMPLES/8; n++) - summQ16[n] = sum; - - for (k = 1; k < (AR_ORDER); k += 2) { - for (n = 0; n < FRAMESAMPLES/8; n++) - summQ16[n] += ((CorrQ11[k + 1] * WebRtcIsacfix_kCos[k][n]) + 2) >> 2; - } - - CS_ptrQ9 = WebRtcIsacfix_kCos[0]; - - /* If CorrQ11[1] too large we avoid getting overflow in the calculation by shifting */ - sh=WebRtcSpl_NormW32(CorrQ11[1]); - if (CorrQ11[1]==0) /* Use next correlation */ - sh=WebRtcSpl_NormW32(CorrQ11[2]); - - if (sh<9) - shftVal = 9 - sh; - else - shftVal = 0; - - for (n = 0; n < FRAMESAMPLES/8; n++) - diffQ16[n] = (CS_ptrQ9[n] * (CorrQ11[1] >> shftVal) + 2) >> 2; - for (k = 2; k < AR_ORDER; k += 2) { - CS_ptrQ9 = WebRtcIsacfix_kCos[k]; - for (n = 0; n < FRAMESAMPLES/8; n++) - diffQ16[n] += (CS_ptrQ9[n] * (CorrQ11[k + 1] >> shftVal) + 2) >> 2; - } - - in_sqrt = summQ16[0] + (diffQ16[0] << shftVal); - - /* convert to magnitude spectrum, by doing square-roots (modified from SPLIB) */ - res = 1 << (WebRtcSpl_GetSizeInBits(in_sqrt) >> 1); - - for (k = 0; k < FRAMESAMPLES/8; k++) - { - in_sqrt = summQ16[k] + (diffQ16[k] << shftVal); - i = 10; - - /* make in_sqrt positive to prohibit sqrt of negative values */ - if(in_sqrt<0) - in_sqrt=-in_sqrt; - - newRes = (in_sqrt / res + res) >> 1; - do - { - res = newRes; - newRes = (in_sqrt / res + res) >> 1; - } while (newRes != res && i-- > 0); - - CurveQ8[k] = (int16_t)newRes; - } - for (k = FRAMESAMPLES/8; k < FRAMESAMPLES/4; k++) { - - in_sqrt = summQ16[FRAMESAMPLES / 4 - 1 - k] - - (diffQ16[FRAMESAMPLES / 4 - 1 - k] << shftVal); - i = 10; - - /* make in_sqrt positive to prohibit sqrt of negative values */ - if(in_sqrt<0) - in_sqrt=-in_sqrt; - - newRes = (in_sqrt / res + res) >> 1; - do - { - res = newRes; - newRes = (in_sqrt / res + res) >> 1; - } while (newRes != res && i-- > 0); - - CurveQ8[k] = (int16_t)newRes; - } - -} - - - -/* generate array of dither samples in Q7 */ -static void GenerateDitherQ7(int16_t *bufQ7, - uint32_t seed, - int16_t length, - int16_t AvgPitchGain_Q12) -{ - int k; - int16_t dither1_Q7, dither2_Q7, dither_gain_Q14, shft; - - if (AvgPitchGain_Q12 < 614) /* this threshold should be equal to that in decode_spec() */ - { - for (k = 0; k < length-2; k += 3) - { - /* new random unsigned int32_t */ - seed = WEBRTC_SPL_UMUL(seed, 196314165) + 907633515; - - /* fixed-point dither sample between -64 and 64 (Q7) */ - dither1_Q7 = (int16_t)(((int32_t)(seed + 16777216)) >> 25); - - /* new random unsigned int32_t */ - seed = WEBRTC_SPL_UMUL(seed, 196314165) + 907633515; - - /* fixed-point dither sample between -64 and 64 */ - dither2_Q7 = (int16_t)(((int32_t)(seed + 16777216)) >> 25); - - shft = (int16_t)(WEBRTC_SPL_RSHIFT_U32(seed, 25) & 15); - if (shft < 5) - { - bufQ7[k] = dither1_Q7; - bufQ7[k+1] = dither2_Q7; - bufQ7[k+2] = 0; - } - else if (shft < 10) - { - bufQ7[k] = dither1_Q7; - bufQ7[k+1] = 0; - bufQ7[k+2] = dither2_Q7; - } - else - { - bufQ7[k] = 0; - bufQ7[k+1] = dither1_Q7; - bufQ7[k+2] = dither2_Q7; - } - } - } - else - { - dither_gain_Q14 = (int16_t)(22528 - WEBRTC_SPL_MUL(10, AvgPitchGain_Q12)); - - /* dither on half of the coefficients */ - for (k = 0; k < length-1; k += 2) - { - /* new random unsigned int32_t */ - seed = WEBRTC_SPL_UMUL(seed, 196314165) + 907633515; - - /* fixed-point dither sample between -64 and 64 */ - dither1_Q7 = (int16_t)(((int32_t)(seed + 16777216)) >> 25); - - /* dither sample is placed in either even or odd index */ - shft = (int16_t)(WEBRTC_SPL_RSHIFT_U32(seed, 25) & 1); /* either 0 or 1 */ - - bufQ7[k + shft] = (int16_t)((dither_gain_Q14 * dither1_Q7 + 8192) >> 14); - bufQ7[k + 1 - shft] = 0; - } - } -} - - - - -/* - * function to decode the complex spectrum from the bitstream - * returns the total number of bytes in the stream - */ -int WebRtcIsacfix_DecodeSpec(Bitstr_dec *streamdata, - int16_t *frQ7, - int16_t *fiQ7, - int16_t AvgPitchGain_Q12) -{ - int16_t data[FRAMESAMPLES]; - int32_t invARSpec2_Q16[FRAMESAMPLES/4]; - int16_t ARCoefQ12[AR_ORDER+1]; - int16_t RCQ15[AR_ORDER]; - int16_t gainQ10; - int32_t gain2_Q10; - int len; - int k; - - /* create dither signal */ - GenerateDitherQ7(data, streamdata->W_upper, FRAMESAMPLES, AvgPitchGain_Q12); /* Dither is output in vector 'Data' */ - - /* decode model parameters */ - if (WebRtcIsacfix_DecodeRcCoef(streamdata, RCQ15) < 0) - return -ISAC_RANGE_ERROR_DECODE_SPECTRUM; - - - WebRtcSpl_ReflCoefToLpc(RCQ15, AR_ORDER, ARCoefQ12); - - if (WebRtcIsacfix_DecodeGain2(streamdata, &gain2_Q10) < 0) - return -ISAC_RANGE_ERROR_DECODE_SPECTRUM; - - /* compute inverse AR power spectrum */ - CalcInvArSpec(ARCoefQ12, gain2_Q10, invARSpec2_Q16); - - /* arithmetic decoding of spectrum */ - /* 'data' input and output. Input = Dither */ - len = WebRtcIsacfix_DecLogisticMulti2(data, streamdata, invARSpec2_Q16, (int16_t)FRAMESAMPLES); - - if (len<1) - return -ISAC_RANGE_ERROR_DECODE_SPECTRUM; - - /* subtract dither and scale down spectral samples with low SNR */ - if (AvgPitchGain_Q12 <= 614) - { - for (k = 0; k < FRAMESAMPLES; k += 4) - { - gainQ10 = WebRtcSpl_DivW32W16ResW16(30 << 10, - (int16_t)((uint32_t)(invARSpec2_Q16[k >> 2] + 2195456) >> 16)); - *frQ7++ = (int16_t)((data[k] * gainQ10 + 512) >> 10); - *fiQ7++ = (int16_t)((data[k + 1] * gainQ10 + 512) >> 10); - *frQ7++ = (int16_t)((data[k + 2] * gainQ10 + 512) >> 10); - *fiQ7++ = (int16_t)((data[k + 3] * gainQ10 + 512) >> 10); - } - } - else - { - for (k = 0; k < FRAMESAMPLES; k += 4) - { - gainQ10 = WebRtcSpl_DivW32W16ResW16(36 << 10, - (int16_t)((uint32_t)(invARSpec2_Q16[k >> 2] + 2654208) >> 16)); - *frQ7++ = (int16_t)((data[k] * gainQ10 + 512) >> 10); - *fiQ7++ = (int16_t)((data[k + 1] * gainQ10 + 512) >> 10); - *frQ7++ = (int16_t)((data[k + 2] * gainQ10 + 512) >> 10); - *fiQ7++ = (int16_t)((data[k + 3] * gainQ10 + 512) >> 10); - } - } - - return len; -} - - -int WebRtcIsacfix_EncodeSpec(const int16_t *fr, - const int16_t *fi, - Bitstr_enc *streamdata, - int16_t AvgPitchGain_Q12) -{ - int16_t dataQ7[FRAMESAMPLES]; - int32_t PSpec[FRAMESAMPLES/4]; - uint16_t invARSpecQ8[FRAMESAMPLES/4]; - int32_t CorrQ7[AR_ORDER+1]; - int32_t CorrQ7_norm[AR_ORDER+1]; - int16_t RCQ15[AR_ORDER]; - int16_t ARCoefQ12[AR_ORDER+1]; - int32_t gain2_Q10; - int16_t val; - int32_t nrg; - uint32_t sum; - int16_t lft_shft; - int16_t status; - int k, n, j; - - - /* create dither_float signal */ - GenerateDitherQ7(dataQ7, streamdata->W_upper, FRAMESAMPLES, AvgPitchGain_Q12); - - /* add dither and quantize, and compute power spectrum */ - /* Vector dataQ7 contains Dither in Q7 */ - for (k = 0; k < FRAMESAMPLES; k += 4) - { - val = ((*fr++ + dataQ7[k] + 64) & 0xFF80) - dataQ7[k]; /* Data = Dither */ - dataQ7[k] = val; /* New value in Data */ - sum = WEBRTC_SPL_UMUL(val, val); - - val = ((*fi++ + dataQ7[k+1] + 64) & 0xFF80) - dataQ7[k+1]; /* Data = Dither */ - dataQ7[k+1] = val; /* New value in Data */ - sum += WEBRTC_SPL_UMUL(val, val); - - val = ((*fr++ + dataQ7[k+2] + 64) & 0xFF80) - dataQ7[k+2]; /* Data = Dither */ - dataQ7[k+2] = val; /* New value in Data */ - sum += WEBRTC_SPL_UMUL(val, val); - - val = ((*fi++ + dataQ7[k+3] + 64) & 0xFF80) - dataQ7[k+3]; /* Data = Dither */ - dataQ7[k+3] = val; /* New value in Data */ - sum += WEBRTC_SPL_UMUL(val, val); - - PSpec[k>>2] = WEBRTC_SPL_RSHIFT_U32(sum, 2); - } - - /* compute correlation from power spectrum */ - CalcCorrelation(PSpec, CorrQ7); - - - /* find AR coefficients */ - /* number of bit shifts to 14-bit normalize CorrQ7[0] (leaving room for sign) */ - lft_shft = WebRtcSpl_NormW32(CorrQ7[0]) - 18; - - if (lft_shft > 0) { - for (k=0; k> -lft_shft; - } - - /* find RC coefficients */ - WebRtcSpl_AutoCorrToReflCoef(CorrQ7_norm, AR_ORDER, RCQ15); - - /* quantize & code RC Coef */ - status = WebRtcIsacfix_EncodeRcCoef(RCQ15, streamdata); - if (status < 0) { - return status; - } - - /* RC -> AR coefficients */ - WebRtcSpl_ReflCoefToLpc(RCQ15, AR_ORDER, ARCoefQ12); - - /* compute ARCoef' * Corr * ARCoef in Q19 */ - nrg = 0; - for (j = 0; j <= AR_ORDER; j++) { - for (n = 0; n <= j; n++) - nrg += (ARCoefQ12[j] * ((CorrQ7_norm[j - n] * ARCoefQ12[n] + 256) >> 9) + - 4) >> 3; - for (n = j+1; n <= AR_ORDER; n++) - nrg += (ARCoefQ12[j] * ((CorrQ7_norm[n - j] * ARCoefQ12[n] + 256) >> 9) + - 4) >> 3; - } - - if (lft_shft > 0) - nrg >>= lft_shft; - else - nrg <<= -lft_shft; - - if(nrg>131072) - gain2_Q10 = WebRtcSpl_DivResultInQ31(FRAMESAMPLES >> 2, nrg); /* also shifts 31 bits to the left! */ - else - gain2_Q10 = FRAMESAMPLES >> 2; - - /* quantize & code gain2_Q10 */ - if (WebRtcIsacfix_EncodeGain2(&gain2_Q10, streamdata)) - return -1; - - /* compute inverse AR magnitude spectrum */ - CalcRootInvArSpec(ARCoefQ12, gain2_Q10, invARSpecQ8); - - - /* arithmetic coding of spectrum */ - status = WebRtcIsacfix_EncLogisticMulti2(streamdata, dataQ7, invARSpecQ8, (int16_t)FRAMESAMPLES); - if ( status ) - return( status ); - - return 0; -} - - -/* Matlab's LAR definition */ -static void Rc2LarFix(const int16_t *rcQ15, int32_t *larQ17, int16_t order) { - - /* - - This is a piece-wise implemenetation of a rc2lar-function (all values in the comment - are Q15 values and are based on [0 24956/32768 30000/32768 32500/32768], i.e. - [0.76159667968750 0.91552734375000 0.99182128906250] - - x0 x1 a k x0(again) b - ================================================================================== - 0.00 0.76: 0 2.625997508581 0 0 - 0.76 0.91: 2.000012018559 7.284502668663 0.761596679688 -3.547841027073 - 0.91 0.99: 3.121320351712 31.115835041229 0.915527343750 -25.366077452148 - 0.99 1.00: 5.495270168700 686.663805654056 0.991821289063 -675.552510708011 - - The implementation is y(x)= a + (x-x0)*k, but this can be simplified to - - y(x) = a-x0*k + x*k = b + x*k, where b = a-x0*k - - akx=[0 2.625997508581 0 - 2.000012018559 7.284502668663 0.761596679688 - 3.121320351712 31.115835041229 0.915527343750 - 5.495270168700 686.663805654056 0.991821289063]; - - b = akx(:,1) - akx(:,3).*akx(:,2) - - [ 0.0 - -3.547841027073 - -25.366077452148 - -675.552510708011] - - */ - - int k; - int16_t rc; - int32_t larAbsQ17; - - for (k = 0; k < order; k++) { - - rc = WEBRTC_SPL_ABS_W16(rcQ15[k]); //Q15 - - /* Calculate larAbsQ17 in Q17 from rc in Q15 */ - - if (rc<24956) { //0.7615966 in Q15 - // (Q15*Q13)>>11 = Q17 - larAbsQ17 = rc * 21512 >> 11; - } else if (rc<30000) { //0.91552734375 in Q15 - // Q17 + (Q15*Q12)>>10 = Q17 - larAbsQ17 = -465024 + (rc * 29837 >> 10); - } else if (rc<32500) { //0.99182128906250 in Q15 - // Q17 + (Q15*Q10)>>8 = Q17 - larAbsQ17 = -3324784 + (rc * 31863 >> 8); - } else { - // Q17 + (Q15*Q5)>>3 = Q17 - larAbsQ17 = -88546020 + (rc * 21973 >> 3); - } - - if (rcQ15[k]>0) { - larQ17[k] = larAbsQ17; - } else { - larQ17[k] = -larAbsQ17; - } - } -} - - -static void Lar2RcFix(const int32_t *larQ17, int16_t *rcQ15, int16_t order) { - - /* - This is a piece-wise implemenetation of a lar2rc-function - See comment in Rc2LarFix() about details. - */ - - int k; - int16_t larAbsQ11; - int32_t rc; - - for (k = 0; k < order; k++) { - - larAbsQ11 = (int16_t)WEBRTC_SPL_ABS_W32((larQ17[k] + 32) >> 6); // Q11 - - if (larAbsQ11<4097) { //2.000012018559 in Q11 - // Q11*Q16>>12 = Q15 - rc = larAbsQ11 * 24957 >> 12; - } else if (larAbsQ11<6393) { //3.121320351712 in Q11 - // (Q11*Q17 + Q13)>>13 = Q15 - rc = (larAbsQ11 * 17993 + 130738688) >> 13; - } else if (larAbsQ11<11255) { //5.495270168700 in Q11 - // (Q11*Q19 + Q30)>>15 = Q15 - rc = (larAbsQ11 * 16850 + 875329820) >> 15; - } else { - // (Q11*Q24>>16 + Q19)>>4 = Q15 - rc = (((larAbsQ11 * 24433) >> 16) + 515804) >> 4; - } - - if (larQ17[k]<=0) { - rc = -rc; - } - - rcQ15[k] = (int16_t) rc; // Q15 - } -} - -static void Poly2LarFix(int16_t *lowbandQ15, - int16_t orderLo, - int16_t *hibandQ15, - int16_t orderHi, - int16_t Nsub, - int32_t *larsQ17) { - - int k, n; - int32_t *outpQ17; - int16_t orderTot; - int32_t larQ17[MAX_ORDER]; // Size 7+6 is enough - - orderTot = (orderLo + orderHi); - outpQ17 = larsQ17; - for (k = 0; k < Nsub; k++) { - - Rc2LarFix(lowbandQ15, larQ17, orderLo); - - for (n = 0; n < orderLo; n++) - outpQ17[n] = larQ17[n]; //Q17 - - Rc2LarFix(hibandQ15, larQ17, orderHi); - - for (n = 0; n < orderHi; n++) - outpQ17[n + orderLo] = larQ17[n]; //Q17; - - outpQ17 += orderTot; - lowbandQ15 += orderLo; - hibandQ15 += orderHi; - } -} - - -static void Lar2polyFix(int32_t *larsQ17, - int16_t *lowbandQ15, - int16_t orderLo, - int16_t *hibandQ15, - int16_t orderHi, - int16_t Nsub) { - - int k, n; - int16_t orderTot; - int16_t *outplQ15, *outphQ15; - int32_t *inpQ17; - int16_t rcQ15[7+6]; - - orderTot = (orderLo + orderHi); - outplQ15 = lowbandQ15; - outphQ15 = hibandQ15; - inpQ17 = larsQ17; - for (k = 0; k < Nsub; k++) { - - /* gains not handled here as in the FLP version */ - - /* Low band */ - Lar2RcFix(&inpQ17[0], rcQ15, orderLo); - for (n = 0; n < orderLo; n++) - outplQ15[n] = rcQ15[n]; // Refl. coeffs - - /* High band */ - Lar2RcFix(&inpQ17[orderLo], rcQ15, orderHi); - for (n = 0; n < orderHi; n++) - outphQ15[n] = rcQ15[n]; // Refl. coeffs - - inpQ17 += orderTot; - outplQ15 += orderLo; - outphQ15 += orderHi; - } -} - -/* -Function WebRtcIsacfix_MatrixProduct1C() does one form of matrix multiplication. -It first shifts input data of one matrix, determines the right indexes for the -two matrixes, multiply them, and write the results into an output buffer. - -Note that two factors (or, multipliers) determine the initialization values of -the variable `matrix1_index` in the code. The relationship is -`matrix1_index` = `matrix1_index_factor1` * `matrix1_index_factor2`, where -`matrix1_index_factor1` is given by the argument while `matrix1_index_factor2` -is determined by the value of argument `matrix1_index_init_case`; -`matrix1_index_factor2` is the value of the outmost loop counter j (when -`matrix1_index_init_case` is 0), or the value of the middle loop counter k (when -`matrix1_index_init_case` is non-zero). - -`matrix0_index` is determined the same way. - -Arguments: - matrix0[]: matrix0 data in Q15 domain. - matrix1[]: matrix1 data. - matrix_product[]: output data (matrix product). - matrix1_index_factor1: The first of two factors determining the - initialization value of matrix1_index. - matrix0_index_factor1: The first of two factors determining the - initialization value of matrix0_index. - matrix1_index_init_case: Case number for selecting the second of two - factors determining the initialization value - of matrix1_index and matrix0_index. - matrix1_index_step: Incremental step for matrix1_index. - matrix0_index_step: Incremental step for matrix0_index. - inner_loop_count: Maximum count of the inner loop. - mid_loop_count: Maximum count of the intermediate loop. - shift: Left shift value for matrix1. -*/ -void WebRtcIsacfix_MatrixProduct1C(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - const int matrix1_index_factor1, - const int matrix0_index_factor1, - const int matrix1_index_init_case, - const int matrix1_index_step, - const int matrix0_index_step, - const int inner_loop_count, - const int mid_loop_count, - const int shift) { - int j = 0, k = 0, n = 0; - int matrix0_index = 0, matrix1_index = 0, matrix_prod_index = 0; - int* matrix0_index_factor2 = &k; - int* matrix1_index_factor2 = &j; - if (matrix1_index_init_case != 0) { - matrix0_index_factor2 = &j; - matrix1_index_factor2 = &k; - } - - for (j = 0; j < SUBFRAMES; j++) { - matrix_prod_index = mid_loop_count * j; - for (k = 0; k < mid_loop_count; k++) { - int32_t sum32 = 0; - matrix0_index = matrix0_index_factor1 * (*matrix0_index_factor2); - matrix1_index = matrix1_index_factor1 * (*matrix1_index_factor2); - for (n = 0; n < inner_loop_count; n++) { - sum32 += WEBRTC_SPL_MUL_16_32_RSFT16( - matrix0[matrix0_index], matrix1[matrix1_index] * (1 << shift)); - matrix0_index += matrix0_index_step; - matrix1_index += matrix1_index_step; - } - matrix_product[matrix_prod_index] = sum32; - matrix_prod_index++; - } - } -} - -/* -Function WebRtcIsacfix_MatrixProduct2C() returns the product of two matrixes, -one of which has two columns. It first has to determine the correct index of -the first matrix before doing the actual element multiplication. - -Arguments: - matrix0[]: A matrix in Q15 domain. - matrix1[]: A matrix in Q21 domain. - matrix_product[]: Output data in Q17 domain. - matrix0_index_factor: A factor determining the initialization value - of matrix0_index. - matrix0_index_step: Incremental step for matrix0_index. -*/ -void WebRtcIsacfix_MatrixProduct2C(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - const int matrix0_index_factor, - const int matrix0_index_step) { - int j = 0, n = 0; - int matrix1_index = 0, matrix0_index = 0, matrix_prod_index = 0; - for (j = 0; j < SUBFRAMES; j++) { - int32_t sum32 = 0, sum32_2 = 0; - matrix1_index = 0; - matrix0_index = matrix0_index_factor * j; - for (n = SUBFRAMES; n > 0; n--) { - sum32 += (WEBRTC_SPL_MUL_16_32_RSFT16(matrix0[matrix0_index], - matrix1[matrix1_index])); - sum32_2 += (WEBRTC_SPL_MUL_16_32_RSFT16(matrix0[matrix0_index], - matrix1[matrix1_index + 1])); - matrix1_index += 2; - matrix0_index += matrix0_index_step; - } - matrix_product[matrix_prod_index] = sum32 >> 3; - matrix_product[matrix_prod_index + 1] = sum32_2 >> 3; - matrix_prod_index += 2; - } -} - -int WebRtcIsacfix_DecodeLpc(int32_t *gain_lo_hiQ17, - int16_t *LPCCoef_loQ15, - int16_t *LPCCoef_hiQ15, - Bitstr_dec *streamdata, - int16_t *outmodel) { - - int32_t larsQ17[KLT_ORDER_SHAPE]; // KLT_ORDER_GAIN+KLT_ORDER_SHAPE == (ORDERLO+ORDERHI)*SUBFRAMES - int err; - - err = WebRtcIsacfix_DecodeLpcCoef(streamdata, larsQ17, gain_lo_hiQ17, outmodel); - if (err<0) // error check - return -ISAC_RANGE_ERROR_DECODE_LPC; - - Lar2polyFix(larsQ17, LPCCoef_loQ15, ORDERLO, LPCCoef_hiQ15, ORDERHI, SUBFRAMES); - - return 0; -} - -/* decode & dequantize LPC Coef */ -int WebRtcIsacfix_DecodeLpcCoef(Bitstr_dec *streamdata, - int32_t *LPCCoefQ17, - int32_t *gain_lo_hiQ17, - int16_t *outmodel) -{ - int j, k, n; - int err; - int16_t pos, pos2, posg, poss; - int16_t gainpos; - int16_t model; - int16_t index_QQ[KLT_ORDER_SHAPE]; - int32_t tmpcoeffs_gQ17[KLT_ORDER_GAIN]; - int32_t tmpcoeffs2_gQ21[KLT_ORDER_GAIN]; - int16_t tmpcoeffs_sQ10[KLT_ORDER_SHAPE]; - int32_t tmpcoeffs_sQ17[KLT_ORDER_SHAPE]; - int32_t tmpcoeffs2_sQ18[KLT_ORDER_SHAPE]; - int32_t sumQQ; - int16_t sumQQ16; - int32_t tmp32; - - - - /* entropy decoding of model number */ - err = WebRtcIsacfix_DecHistOneStepMulti(&model, streamdata, WebRtcIsacfix_kModelCdfPtr, WebRtcIsacfix_kModelInitIndex, 1); - if (err<0) // error check - return err; - - /* entropy decoding of quantization indices */ - err = WebRtcIsacfix_DecHistOneStepMulti(index_QQ, streamdata, WebRtcIsacfix_kCdfShapePtr[model], WebRtcIsacfix_kInitIndexShape[model], KLT_ORDER_SHAPE); - if (err<0) // error check - return err; - /* find quantization levels for coefficients */ - for (k=0; k> 7; // (Q10*Q15)>>7 = Q18 - pos++; - pos2++; - } - tmpcoeffs2_sQ18[poss] = sumQQ; //Q18 - poss++; - } - } - - /* right transform */ // Transpose matrix - WebRtcIsacfix_MatrixProduct2(WebRtcIsacfix_kT2GainQ15[0], tmpcoeffs2_gQ21, - tmpcoeffs_gQ17, kTIndexFactor1, kTIndexStep2); - WebRtcIsacfix_MatrixProduct1(WebRtcIsacfix_kT2ShapeQ15[model], - tmpcoeffs2_sQ18, tmpcoeffs_sQ17, kTIndexFactor1, kTIndexFactor1, - kTInitCase1, kTIndexStep3, kTIndexStep2, kTLoopCount1, kTLoopCount3, - kTMatrix1_shift0); - - /* scaling, mean addition, and gain restoration */ - gainpos = 0; - posg = 0;poss = 0;pos=0; - for (k=0; k> 11); - sumQQ16 += WebRtcIsacfix_kMeansGainQ8[model][posg]; - sumQQ = CalcExpN(sumQQ16); // Q8 in and Q17 out - gain_lo_hiQ17[gainpos] = sumQQ; //Q17 - gainpos++; - posg++; - - // Divide by 4 and get Q17 to Q8, i.e. shift 2+9. - sumQQ16 = (int16_t)(tmpcoeffs_gQ17[posg] >> 11); - sumQQ16 += WebRtcIsacfix_kMeansGainQ8[model][posg]; - sumQQ = CalcExpN(sumQQ16); // Q8 in and Q17 out - gain_lo_hiQ17[gainpos] = sumQQ; //Q17 - gainpos++; - posg++; - - /* lo band LAR coeffs */ - for (n=0; n>16 = Q17, with 1/2.1 = 0.47619047619 ~= 31208 in Q16 - tmp32 = tmp32 + WebRtcIsacfix_kMeansShapeQ17[model][poss]; // Q17+Q17 = Q17 - LPCCoefQ17[pos] = tmp32; - } - - /* hi band LAR coeffs */ - for (n=0; n>16)<<3 = Q17, with 1/0.45 = 2.222222222222 ~= 18204 in Q13 - tmp32 = - WEBRTC_SPL_MUL_16_32_RSFT16(18204, tmpcoeffs_sQ17[poss]) * (1 << 3); - tmp32 = tmp32 + WebRtcIsacfix_kMeansShapeQ17[model][poss]; // Q17+Q17 = Q17 - LPCCoefQ17[pos] = tmp32; - } - } - - - *outmodel=model; - - return 0; -} - -/* estimate codel length of LPC Coef */ -static int EstCodeLpcCoef(int32_t *LPCCoefQ17, - int32_t *gain_lo_hiQ17, - int16_t *model, - int32_t *sizeQ11, - Bitstr_enc *streamdata, - IsacSaveEncoderData* encData, - transcode_obj *transcodingParam) { - int j, k, n; - int16_t posQQ, pos2QQ, gainpos; - int16_t pos, poss, posg, offsg; - int16_t index_gQQ[KLT_ORDER_GAIN], index_sQQ[KLT_ORDER_SHAPE]; - int16_t index_ovr_gQQ[KLT_ORDER_GAIN], index_ovr_sQQ[KLT_ORDER_SHAPE]; - int32_t BitsQQ; - - int16_t tmpcoeffs_gQ6[KLT_ORDER_GAIN]; - int32_t tmpcoeffs_gQ17[KLT_ORDER_GAIN]; - int32_t tmpcoeffs_sQ17[KLT_ORDER_SHAPE]; - int32_t tmpcoeffs2_gQ21[KLT_ORDER_GAIN]; - int32_t tmpcoeffs2_sQ17[KLT_ORDER_SHAPE]; - int32_t sumQQ; - int32_t tmp32; - int16_t sumQQ16; - int status = 0; - - /* write LAR coefficients to statistics file */ - /* Save data for creation of multiple bitstreams (and transcoding) */ - if (encData != NULL) { - for (k=0; kLPCcoeffs_g[KLT_ORDER_GAIN*encData->startIdx + k] = gain_lo_hiQ17[k]; - } - } - - /* log gains, mean removal and scaling */ - posg = 0;poss = 0;pos=0; gainpos=0; - - for (k=0; k WebRtcIsacfix_kMaxIndGain[k]) { - index_gQQ[k] = WebRtcIsacfix_kMaxIndGain[k]; - } - index_ovr_gQQ[k] = WebRtcIsacfix_kOffsetGain[0][k]+index_gQQ[k]; - posQQ = WebRtcIsacfix_kOfLevelsGain[0] + index_ovr_gQQ[k]; - - /* Save data for creation of multiple bitstreams */ - if (encData != NULL) { - encData->LPCindex_g[KLT_ORDER_GAIN*encData->startIdx + k] = index_gQQ[k]; - } - - /* determine number of bits */ - sumQQ = WebRtcIsacfix_kCodeLenGainQ11[posQQ]; //Q11 - BitsQQ += sumQQ; - } - - for (k=0; k WebRtcIsacfix_kMaxIndShape[k]) - index_sQQ[k] = WebRtcIsacfix_kMaxIndShape[k]; - index_ovr_sQQ[k] = WebRtcIsacfix_kOffsetShape[0][k]+index_sQQ[k]; - - posQQ = WebRtcIsacfix_kOfLevelsShape[0] + index_ovr_sQQ[k]; - sumQQ = WebRtcIsacfix_kCodeLenShapeQ11[posQQ]; //Q11 - BitsQQ += sumQQ; - } - - - - *model = 0; - *sizeQ11=BitsQQ; - - /* entropy coding of model number */ - status = WebRtcIsacfix_EncHistMulti(streamdata, model, WebRtcIsacfix_kModelCdfPtr, 1); - if (status < 0) { - return status; - } - - /* entropy coding of quantization indices - shape only */ - status = WebRtcIsacfix_EncHistMulti(streamdata, index_sQQ, WebRtcIsacfix_kCdfShapePtr[0], KLT_ORDER_SHAPE); - if (status < 0) { - return status; - } - - /* Save data for creation of multiple bitstreams */ - if (encData != NULL) { - for (k=0; kLPCindex_s[KLT_ORDER_SHAPE*encData->startIdx + k] = index_sQQ[k]; - } - } - /* save the state of the bitstream object 'streamdata' for the possible bit-rate reduction */ - transcodingParam->full = streamdata->full; - transcodingParam->stream_index = streamdata->stream_index; - transcodingParam->streamval = streamdata->streamval; - transcodingParam->W_upper = streamdata->W_upper; - transcodingParam->beforeLastWord = streamdata->stream[streamdata->stream_index-1]; - transcodingParam->lastWord = streamdata->stream[streamdata->stream_index]; - - /* entropy coding of index */ - status = WebRtcIsacfix_EncHistMulti(streamdata, index_gQQ, WebRtcIsacfix_kCdfGainPtr[0], KLT_ORDER_GAIN); - if (status < 0) { - return status; - } - - /* find quantization levels for shape coefficients */ - for (k=0; k>16 = Q17, with 1/2.1 = 0.47619047619 ~= 31208 in Q16 - tmp32 = tmp32 + WebRtcIsacfix_kMeansShapeQ17[0][poss]; // Q17+Q17 = Q17 - LPCCoefQ17[pos] = tmp32; - } - - /* hi band LAR coeffs */ - for (n=0; n>16)<<3 = Q17, with 1/0.45 = 2.222222222222 ~= 18204 in Q13 - tmp32 = WEBRTC_SPL_MUL_16_32_RSFT16(18204, tmpcoeffs_sQ17[poss]) << 3; - tmp32 = tmp32 + WebRtcIsacfix_kMeansShapeQ17[0][poss]; // Q17+Q17 = Q17 - LPCCoefQ17[pos] = tmp32; - } - - } - - //to update tmpcoeffs_gQ17 to the proper state - for (k=0; k> (16 - 1) = Q17; Q17 << 4 = Q21. - sumQQ = (WEBRTC_SPL_MUL_16_32_RSFT16(WebRtcIsacfix_kT1GainQ15[0][0], - tmpcoeffs_gQ17[offsg]) << 1); - sumQQ += (WEBRTC_SPL_MUL_16_32_RSFT16(WebRtcIsacfix_kT1GainQ15[0][1], - tmpcoeffs_gQ17[offsg + 1]) << 1); - tmpcoeffs2_gQ21[posg] = sumQQ << 4; - posg++; - - sumQQ = (WEBRTC_SPL_MUL_16_32_RSFT16(WebRtcIsacfix_kT1GainQ15[0][2], - tmpcoeffs_gQ17[offsg]) << 1); - sumQQ += (WEBRTC_SPL_MUL_16_32_RSFT16(WebRtcIsacfix_kT1GainQ15[0][3], - tmpcoeffs_gQ17[offsg + 1]) << 1); - tmpcoeffs2_gQ21[posg] = sumQQ << 4; - posg++; - offsg += 2; - } - - /* right transform */ // Transpose matrix - WebRtcIsacfix_MatrixProduct2(WebRtcIsacfix_kT2GainQ15[0], tmpcoeffs2_gQ21, - tmpcoeffs_gQ17, kTIndexFactor1, kTIndexStep2); - - /* scaling, mean addition, and gain restoration */ - posg = 0; - gainpos = 0; - for (k=0; k<2*SUBFRAMES; k++) { - - // Divide by 4 and get Q17 to Q8, i.e. shift 2+9. - sumQQ16 = (int16_t)(tmpcoeffs_gQ17[posg] >> 11); - sumQQ16 += WebRtcIsacfix_kMeansGainQ8[0][posg]; - sumQQ = CalcExpN(sumQQ16); // Q8 in and Q17 out - gain_lo_hiQ17[gainpos] = sumQQ; //Q17 - - gainpos++; - pos++;posg++; - } - - return 0; -} - -int WebRtcIsacfix_EstCodeLpcGain(int32_t *gain_lo_hiQ17, - Bitstr_enc *streamdata, - IsacSaveEncoderData* encData) { - int j, k; - int16_t posQQ, pos2QQ, gainpos; - int16_t posg; - int16_t index_gQQ[KLT_ORDER_GAIN]; - - int16_t tmpcoeffs_gQ6[KLT_ORDER_GAIN]; - int32_t tmpcoeffs_gQ17[KLT_ORDER_GAIN]; - int32_t tmpcoeffs2_gQ21[KLT_ORDER_GAIN]; - int32_t sumQQ; - int status = 0; - - /* write LAR coefficients to statistics file */ - /* Save data for creation of multiple bitstreams (and transcoding) */ - if (encData != NULL) { - for (k=0; kLPCcoeffs_g[KLT_ORDER_GAIN*encData->startIdx + k] = gain_lo_hiQ17[k]; - } - } - - /* log gains, mean removal and scaling */ - posg = 0; gainpos = 0; - - for (k=0; k WebRtcIsacfix_kMaxIndGain[k]) { - index_gQQ[k] = WebRtcIsacfix_kMaxIndGain[k]; - } - - /* Save data for creation of multiple bitstreams */ - if (encData != NULL) { - encData->LPCindex_g[KLT_ORDER_GAIN*encData->startIdx + k] = index_gQQ[k]; - } - } - - /* entropy coding of index */ - status = WebRtcIsacfix_EncHistMulti(streamdata, index_gQQ, WebRtcIsacfix_kCdfGainPtr[0], KLT_ORDER_GAIN); - if (status < 0) { - return status; - } - - return 0; -} - - -int WebRtcIsacfix_EncodeLpc(int32_t *gain_lo_hiQ17, - int16_t *LPCCoef_loQ15, - int16_t *LPCCoef_hiQ15, - int16_t *model, - int32_t *sizeQ11, - Bitstr_enc *streamdata, - IsacSaveEncoderData* encData, - transcode_obj *transcodeParam) -{ - int status = 0; - int32_t larsQ17[KLT_ORDER_SHAPE]; // KLT_ORDER_SHAPE == (ORDERLO+ORDERHI)*SUBFRAMES - // = (6+12)*6 == 108 - - Poly2LarFix(LPCCoef_loQ15, ORDERLO, LPCCoef_hiQ15, ORDERHI, SUBFRAMES, larsQ17); - - status = EstCodeLpcCoef(larsQ17, gain_lo_hiQ17, model, sizeQ11, - streamdata, encData, transcodeParam); - if (status < 0) { - return (status); - } - - Lar2polyFix(larsQ17, LPCCoef_loQ15, ORDERLO, LPCCoef_hiQ15, ORDERHI, SUBFRAMES); - - return 0; -} - - -/* decode & dequantize RC */ -int WebRtcIsacfix_DecodeRcCoef(Bitstr_dec *streamdata, int16_t *RCQ15) -{ - int k, err; - int16_t index[AR_ORDER]; - - /* entropy decoding of quantization indices */ - err = WebRtcIsacfix_DecHistOneStepMulti(index, streamdata, WebRtcIsacfix_kRcCdfPtr, WebRtcIsacfix_kRcInitInd, AR_ORDER); - if (err<0) // error check - return err; - - /* find quantization levels for reflection coefficients */ - for (k=0; k WebRtcIsacfix_kRcBound[index[k]]) - { - while (RCQ15[k] > WebRtcIsacfix_kRcBound[index[k] + 1]) - index[k]++; - } - else - { - while (RCQ15[k] < WebRtcIsacfix_kRcBound[--index[k]]) ; - } - - RCQ15[k] = *(WebRtcIsacfix_kRcLevPtr[k] + index[k]); - } - - - /* entropy coding of quantization indices */ - status = WebRtcIsacfix_EncHistMulti(streamdata, index, WebRtcIsacfix_kRcCdfPtr, AR_ORDER); - - /* If error in WebRtcIsacfix_EncHistMulti(), status will be negative, otherwise 0 */ - return status; -} - - -/* decode & dequantize squared Gain */ -int WebRtcIsacfix_DecodeGain2(Bitstr_dec *streamdata, int32_t *gainQ10) -{ - int err; - int16_t index; - - /* entropy decoding of quantization index */ - err = WebRtcIsacfix_DecHistOneStepMulti( - &index, - streamdata, - WebRtcIsacfix_kGainPtr, - WebRtcIsacfix_kGainInitInd, - 1); - /* error check */ - if (err<0) { - return err; - } - - /* find quantization level */ - *gainQ10 = WebRtcIsacfix_kGain2Lev[index]; - - return 0; -} - - - -/* quantize & code squared Gain */ -int WebRtcIsacfix_EncodeGain2(int32_t *gainQ10, Bitstr_enc *streamdata) -{ - int16_t index; - int status = 0; - - /* find quantization index */ - index = WebRtcIsacfix_kGainInitInd[0]; - if (*gainQ10 > WebRtcIsacfix_kGain2Bound[index]) - { - while (*gainQ10 > WebRtcIsacfix_kGain2Bound[index + 1]) - index++; - } - else - { - while (*gainQ10 < WebRtcIsacfix_kGain2Bound[--index]) ; - } - - /* dequantize */ - *gainQ10 = WebRtcIsacfix_kGain2Lev[index]; - - /* entropy coding of quantization index */ - status = WebRtcIsacfix_EncHistMulti(streamdata, &index, WebRtcIsacfix_kGainPtr, 1); - - /* If error in WebRtcIsacfix_EncHistMulti(), status will be negative, otherwise 0 */ - return status; -} - - -/* code and decode Pitch Gains and Lags functions */ - -/* decode & dequantize Pitch Gains */ -int WebRtcIsacfix_DecodePitchGain(Bitstr_dec *streamdata, int16_t *PitchGains_Q12) -{ - int err; - int16_t index_comb; - const uint16_t *pitch_gain_cdf_ptr[1]; - - /* entropy decoding of quantization indices */ - *pitch_gain_cdf_ptr = WebRtcIsacfix_kPitchGainCdf; - err = WebRtcIsacfix_DecHistBisectMulti(&index_comb, streamdata, pitch_gain_cdf_ptr, WebRtcIsacfix_kCdfTableSizeGain, 1); - /* error check, Q_mean_Gain.. tables are of size 144 */ - if ((err < 0) || (index_comb < 0) || (index_comb >= 144)) - return -ISAC_RANGE_ERROR_DECODE_PITCH_GAIN; - - /* unquantize back to pitch gains by table look-up */ - PitchGains_Q12[0] = WebRtcIsacfix_kPitchGain1[index_comb]; - PitchGains_Q12[1] = WebRtcIsacfix_kPitchGain2[index_comb]; - PitchGains_Q12[2] = WebRtcIsacfix_kPitchGain3[index_comb]; - PitchGains_Q12[3] = WebRtcIsacfix_kPitchGain4[index_comb]; - - return 0; -} - - -/* quantize & code Pitch Gains */ -int WebRtcIsacfix_EncodePitchGain(int16_t* PitchGains_Q12, - Bitstr_enc* streamdata, - IsacSaveEncoderData* encData) { - int k,j; - int16_t SQ15[PITCH_SUBFRAMES]; - int16_t index[3]; - int16_t index_comb; - const uint16_t *pitch_gain_cdf_ptr[1]; - int32_t CQ17; - int status = 0; - - - /* get the approximate arcsine (almost linear)*/ - for (k=0; k> 2); // Q15 - - - /* find quantization index; only for the first three transform coefficients */ - for (k=0; k<3; k++) - { - /* transform */ - CQ17=0; - for (j=0; j> 10; // Q17 - } - - index[k] = (int16_t)((CQ17 + 8192)>>14); // Rounding and scaling with stepsize (=1/0.125=8) - - /* check that the index is not outside the boundaries of the table */ - if (index[k] < WebRtcIsacfix_kLowerlimiGain[k]) index[k] = WebRtcIsacfix_kLowerlimiGain[k]; - else if (index[k] > WebRtcIsacfix_kUpperlimitGain[k]) index[k] = WebRtcIsacfix_kUpperlimitGain[k]; - index[k] -= WebRtcIsacfix_kLowerlimiGain[k]; - } - - /* calculate unique overall index */ - index_comb = (int16_t)(WEBRTC_SPL_MUL(WebRtcIsacfix_kMultsGain[0], index[0]) + - WEBRTC_SPL_MUL(WebRtcIsacfix_kMultsGain[1], index[1]) + index[2]); - - /* unquantize back to pitch gains by table look-up */ - // (Y) - PitchGains_Q12[0] = WebRtcIsacfix_kPitchGain1[index_comb]; - PitchGains_Q12[1] = WebRtcIsacfix_kPitchGain2[index_comb]; - PitchGains_Q12[2] = WebRtcIsacfix_kPitchGain3[index_comb]; - PitchGains_Q12[3] = WebRtcIsacfix_kPitchGain4[index_comb]; - - - /* entropy coding of quantization pitch gains */ - *pitch_gain_cdf_ptr = WebRtcIsacfix_kPitchGainCdf; - status = WebRtcIsacfix_EncHistMulti(streamdata, &index_comb, pitch_gain_cdf_ptr, 1); - if (status < 0) { - return status; - } - - /* Save data for creation of multiple bitstreams */ - if (encData != NULL) { - encData->pitchGain_index[encData->startIdx] = index_comb; - } - - return 0; -} - - - -/* Pitch LAG */ - - -/* decode & dequantize Pitch Lags */ -int WebRtcIsacfix_DecodePitchLag(Bitstr_dec *streamdata, - int16_t *PitchGain_Q12, - int16_t *PitchLags_Q7) -{ - int k, err; - int16_t index[PITCH_SUBFRAMES]; - const int16_t *mean_val2Q10, *mean_val4Q10; - - const int16_t *lower_limit; - const uint16_t *init_index; - const uint16_t *cdf_size; - const uint16_t **cdf; - - int32_t meangainQ12; - int32_t CQ11, CQ10,tmp32a,tmp32b; - int16_t shft; - - meangainQ12=0; - for (k = 0; k < 4; k++) - meangainQ12 += PitchGain_Q12[k]; - - meangainQ12 >>= 2; // Get average. - - /* voicing classificiation */ - if (meangainQ12 <= 819) { // mean_gain < 0.2 - shft = -1; // StepSize=2.0; - cdf = WebRtcIsacfix_kPitchLagPtrLo; - cdf_size = WebRtcIsacfix_kPitchLagSizeLo; - mean_val2Q10 = WebRtcIsacfix_kMeanLag2Lo; - mean_val4Q10 = WebRtcIsacfix_kMeanLag4Lo; - lower_limit = WebRtcIsacfix_kLowerLimitLo; - init_index = WebRtcIsacfix_kInitIndLo; - } else if (meangainQ12 <= 1638) { // mean_gain < 0.4 - shft = 0; // StepSize=1.0; - cdf = WebRtcIsacfix_kPitchLagPtrMid; - cdf_size = WebRtcIsacfix_kPitchLagSizeMid; - mean_val2Q10 = WebRtcIsacfix_kMeanLag2Mid; - mean_val4Q10 = WebRtcIsacfix_kMeanLag4Mid; - lower_limit = WebRtcIsacfix_kLowerLimitMid; - init_index = WebRtcIsacfix_kInitIndMid; - } else { - shft = 1; // StepSize=0.5; - cdf = WebRtcIsacfix_kPitchLagPtrHi; - cdf_size = WebRtcIsacfix_kPitchLagSizeHi; - mean_val2Q10 = WebRtcIsacfix_kMeanLag2Hi; - mean_val4Q10 = WebRtcIsacfix_kMeanLag4Hi; - lower_limit = WebRtcIsacfix_kLowerLimitHi; - init_index = WebRtcIsacfix_kInitIndHi; - } - - /* entropy decoding of quantization indices */ - err = WebRtcIsacfix_DecHistBisectMulti(index, streamdata, cdf, cdf_size, 1); - if ((err<0) || (index[0]<0)) // error check - return -ISAC_RANGE_ERROR_DECODE_PITCH_LAG; - - err = WebRtcIsacfix_DecHistOneStepMulti(index+1, streamdata, cdf+1, init_index, 3); - if (err<0) // error check - return -ISAC_RANGE_ERROR_DECODE_PITCH_LAG; - - - /* unquantize back to transform coefficients and do the inverse transform: S = T'*C */ - CQ11 = ((int32_t)index[0] + lower_limit[0]); // Q0 - CQ11 = WEBRTC_SPL_SHIFT_W32(CQ11,11-shft); // Scale with StepSize, Q11 - for (k=0; k> 5); - } - - CQ10 = mean_val2Q10[index[1]]; - for (k=0; k> 10; - PitchLags_Q7[k] += (int16_t)(tmp32b >> 5); - } - - CQ10 = mean_val4Q10[index[3]]; - for (k=0; k> 10; - PitchLags_Q7[k] += (int16_t)(tmp32b >> 5); - } - - return 0; -} - - - -/* quantize & code Pitch Lags */ -int WebRtcIsacfix_EncodePitchLag(int16_t* PitchLagsQ7, - int16_t* PitchGain_Q12, - Bitstr_enc* streamdata, - IsacSaveEncoderData* encData) { - int k, j; - int16_t index[PITCH_SUBFRAMES]; - int32_t meangainQ12, CQ17; - int32_t CQ11, CQ10,tmp32a; - - const int16_t *mean_val2Q10,*mean_val4Q10; - const int16_t *lower_limit, *upper_limit; - const uint16_t **cdf; - int16_t shft, tmp16b; - int32_t tmp32b; - int status = 0; - - /* compute mean pitch gain */ - meangainQ12=0; - for (k = 0; k < 4; k++) - meangainQ12 += PitchGain_Q12[k]; - - meangainQ12 >>= 2; - - /* Save data for creation of multiple bitstreams */ - if (encData != NULL) { - encData->meanGain[encData->startIdx] = meangainQ12; - } - - /* voicing classificiation */ - if (meangainQ12 <= 819) { // mean_gain < 0.2 - shft = -1; // StepSize=2.0; - cdf = WebRtcIsacfix_kPitchLagPtrLo; - mean_val2Q10 = WebRtcIsacfix_kMeanLag2Lo; - mean_val4Q10 = WebRtcIsacfix_kMeanLag4Lo; - lower_limit = WebRtcIsacfix_kLowerLimitLo; - upper_limit = WebRtcIsacfix_kUpperLimitLo; - } else if (meangainQ12 <= 1638) { // mean_gain < 0.4 - shft = 0; // StepSize=1.0; - cdf = WebRtcIsacfix_kPitchLagPtrMid; - mean_val2Q10 = WebRtcIsacfix_kMeanLag2Mid; - mean_val4Q10 = WebRtcIsacfix_kMeanLag4Mid; - lower_limit = WebRtcIsacfix_kLowerLimitMid; - upper_limit = WebRtcIsacfix_kUpperLimitMid; - } else { - shft = 1; // StepSize=0.5; - cdf = WebRtcIsacfix_kPitchLagPtrHi; - mean_val2Q10 = WebRtcIsacfix_kMeanLag2Hi; - mean_val4Q10 = WebRtcIsacfix_kMeanLag4Hi; - lower_limit = WebRtcIsacfix_kLowerLimitHi; - upper_limit = WebRtcIsacfix_kUpperLimitHi; - } - - /* find quantization index */ - for (k=0; k<4; k++) - { - /* transform */ - CQ17=0; - for (j=0; j> 2; // Q17 - - CQ17 = WEBRTC_SPL_SHIFT_W32(CQ17,shft); // Scale with StepSize - - /* quantize */ - tmp16b = (int16_t)((CQ17 + 65536) >> 17); - index[k] = tmp16b; - - /* check that the index is not outside the boundaries of the table */ - if (index[k] < lower_limit[k]) index[k] = lower_limit[k]; - else if (index[k] > upper_limit[k]) index[k] = upper_limit[k]; - index[k] -= lower_limit[k]; - - /* Save data for creation of multiple bitstreams */ - if(encData != NULL) { - encData->pitchIndex[PITCH_SUBFRAMES*encData->startIdx + k] = index[k]; - } - } - - /* unquantize back to transform coefficients and do the inverse transform: S = T'*C */ - CQ11 = (index[0] + lower_limit[0]); // Q0 - CQ11 = WEBRTC_SPL_SHIFT_W32(CQ11,11-shft); // Scale with StepSize, Q11 - - for (k=0; k> 5); // Q7. - } - - CQ10 = mean_val2Q10[index[1]]; - for (k=0; k> 10; - PitchLagsQ7[k] += (int16_t)(tmp32b >> 5); // Q7. - } - - CQ10 = mean_val4Q10[index[3]]; - for (k=0; k> 10; - PitchLagsQ7[k] += (int16_t)(tmp32b >> 5); // Q7. - } - - /* entropy coding of quantization pitch lags */ - status = WebRtcIsacfix_EncHistMulti(streamdata, index, cdf, PITCH_SUBFRAMES); - - /* If error in WebRtcIsacfix_EncHistMulti(), status will be negative, otherwise 0 */ - return status; -} - - - -/* Routines for inband signaling of bandwitdh estimation */ -/* Histograms based on uniform distribution of indices */ -/* Move global variables later! */ - - -/* cdf array for frame length indicator */ -const uint16_t kFrameLenCdf[4] = { - 0, 21845, 43690, 65535}; - -/* pointer to cdf array for frame length indicator */ -const uint16_t * const kFrameLenCdfPtr[1] = {kFrameLenCdf}; - -/* initial cdf index for decoder of frame length indicator */ -const uint16_t kFrameLenInitIndex[1] = {1}; - - -int WebRtcIsacfix_DecodeFrameLen(Bitstr_dec *streamdata, - size_t *framesamples) -{ - - int err; - int16_t frame_mode; - - err = 0; - /* entropy decoding of frame length [1:30ms,2:60ms] */ - err = WebRtcIsacfix_DecHistOneStepMulti(&frame_mode, streamdata, kFrameLenCdfPtr, kFrameLenInitIndex, 1); - if (err<0) // error check - return -ISAC_RANGE_ERROR_DECODE_FRAME_LENGTH; - - switch(frame_mode) { - case 1: - *framesamples = 480; /* 30ms */ - break; - case 2: - *framesamples = 960; /* 60ms */ - break; - default: - err = -ISAC_DISALLOWED_FRAME_MODE_DECODER; - } - - return err; -} - - -int WebRtcIsacfix_EncodeFrameLen(int16_t framesamples, Bitstr_enc *streamdata) { - - int status; - int16_t frame_mode; - - status = 0; - frame_mode = 0; - /* entropy coding of frame length [1:480 samples,2:960 samples] */ - switch(framesamples) { - case 480: - frame_mode = 1; - break; - case 960: - frame_mode = 2; - break; - default: - status = - ISAC_DISALLOWED_FRAME_MODE_ENCODER; - } - - if (status < 0) - return status; - - status = WebRtcIsacfix_EncHistMulti(streamdata, &frame_mode, kFrameLenCdfPtr, 1); - - return status; -} - -/* cdf array for estimated bandwidth */ -const uint16_t kBwCdf[25] = { - 0, 2731, 5461, 8192, 10923, 13653, 16384, 19114, 21845, 24576, 27306, 30037, - 32768, 35498, 38229, 40959, 43690, 46421, 49151, 51882, 54613, 57343, 60074, - 62804, 65535}; - -/* pointer to cdf array for estimated bandwidth */ -const uint16_t * const kBwCdfPtr[1] = {kBwCdf}; - -/* initial cdf index for decoder of estimated bandwidth*/ -const uint16_t kBwInitIndex[1] = {7}; - - -int WebRtcIsacfix_DecodeSendBandwidth(Bitstr_dec *streamdata, int16_t *BWno) { - - int err; - int16_t BWno32; - - /* entropy decoding of sender's BW estimation [0..23] */ - err = WebRtcIsacfix_DecHistOneStepMulti(&BWno32, streamdata, kBwCdfPtr, kBwInitIndex, 1); - if (err<0) // error check - return -ISAC_RANGE_ERROR_DECODE_BANDWIDTH; - *BWno = (int16_t)BWno32; - return err; - -} - - -int WebRtcIsacfix_EncodeReceiveBandwidth(int16_t *BWno, Bitstr_enc *streamdata) -{ - int status = 0; - /* entropy encoding of receiver's BW estimation [0..23] */ - status = WebRtcIsacfix_EncHistMulti(streamdata, BWno, kBwCdfPtr, 1); - - return status; -} - -/* estimate codel length of LPC Coef */ -void WebRtcIsacfix_TranscodeLpcCoef(int32_t *gain_lo_hiQ17, - int16_t *index_gQQ) { - int j, k; - int16_t posQQ, pos2QQ; - int16_t posg, offsg, gainpos; - int32_t tmpcoeffs_gQ6[KLT_ORDER_GAIN]; - int32_t tmpcoeffs_gQ17[KLT_ORDER_GAIN]; - int32_t tmpcoeffs2_gQ21[KLT_ORDER_GAIN]; - int32_t sumQQ; - - - /* log gains, mean removal and scaling */ - posg = 0; gainpos=0; - - for (k=0; k WebRtcIsacfix_kMaxIndGain[k]) { - index_gQQ[k] = WebRtcIsacfix_kMaxIndGain[k]; - } - } -} diff --git a/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h b/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h deleted file mode 100644 index ae11394f7c..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * entropy_coding.h - * - * This header file contains all of the functions used to arithmetically - * encode the iSAC bistream - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ENTROPY_CODING_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ENTROPY_CODING_H_ - -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - -/* decode complex spectrum (return number of bytes in stream) */ -int WebRtcIsacfix_DecodeSpec(Bitstr_dec* streamdata, - int16_t* frQ7, - int16_t* fiQ7, - int16_t AvgPitchGain_Q12); - -/* encode complex spectrum */ -int WebRtcIsacfix_EncodeSpec(const int16_t* fr, - const int16_t* fi, - Bitstr_enc* streamdata, - int16_t AvgPitchGain_Q12); - -/* decode & dequantize LPC Coef */ -int WebRtcIsacfix_DecodeLpcCoef(Bitstr_dec* streamdata, - int32_t* LPCCoefQ17, - int32_t* gain_lo_hiQ17, - int16_t* outmodel); - -int WebRtcIsacfix_DecodeLpc(int32_t* gain_lo_hiQ17, - int16_t* LPCCoef_loQ15, - int16_t* LPCCoef_hiQ15, - Bitstr_dec* streamdata, - int16_t* outmodel); - -/* quantize & code LPC Coef */ -int WebRtcIsacfix_EncodeLpc(int32_t* gain_lo_hiQ17, - int16_t* LPCCoef_loQ15, - int16_t* LPCCoef_hiQ15, - int16_t* model, - int32_t* sizeQ11, - Bitstr_enc* streamdata, - IsacSaveEncoderData* encData, - transcode_obj* transcodeParam); - -int WebRtcIsacfix_EstCodeLpcGain(int32_t* gain_lo_hiQ17, - Bitstr_enc* streamdata, - IsacSaveEncoderData* encData); -/* decode & dequantize RC */ -int WebRtcIsacfix_DecodeRcCoef(Bitstr_dec* streamdata, int16_t* RCQ15); - -/* quantize & code RC */ -int WebRtcIsacfix_EncodeRcCoef(int16_t* RCQ15, Bitstr_enc* streamdata); - -/* decode & dequantize squared Gain */ -int WebRtcIsacfix_DecodeGain2(Bitstr_dec* streamdata, int32_t* Gain2); - -/* quantize & code squared Gain (input is squared gain) */ -int WebRtcIsacfix_EncodeGain2(int32_t* gain2, Bitstr_enc* streamdata); - -int WebRtcIsacfix_EncodePitchGain(int16_t* PitchGains_Q12, - Bitstr_enc* streamdata, - IsacSaveEncoderData* encData); - -int WebRtcIsacfix_EncodePitchLag(int16_t* PitchLagQ7, - int16_t* PitchGain_Q12, - Bitstr_enc* streamdata, - IsacSaveEncoderData* encData); - -int WebRtcIsacfix_DecodePitchGain(Bitstr_dec* streamdata, - int16_t* PitchGain_Q12); - -int WebRtcIsacfix_DecodePitchLag(Bitstr_dec* streamdata, - int16_t* PitchGain_Q12, - int16_t* PitchLagQ7); - -int WebRtcIsacfix_DecodeFrameLen(Bitstr_dec* streamdata, size_t* framelength); - -int WebRtcIsacfix_EncodeFrameLen(int16_t framelength, Bitstr_enc* streamdata); - -int WebRtcIsacfix_DecodeSendBandwidth(Bitstr_dec* streamdata, int16_t* BWno); - -int WebRtcIsacfix_EncodeReceiveBandwidth(int16_t* BWno, Bitstr_enc* streamdata); - -void WebRtcIsacfix_TranscodeLpcCoef(int32_t* tmpcoeffs_gQ6, int16_t* index_gQQ); - -// Pointer functions for LPC transforms. - -typedef void (*MatrixProduct1)(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - int matrix1_index_factor1, - int matrix0_index_factor1, - int matrix1_index_init_case, - int matrix1_index_step, - int matrix0_index_step, - int inner_loop_count, - int mid_loop_count, - int shift); -typedef void (*MatrixProduct2)(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - int matrix0_index_factor, - int matrix0_index_step); - -extern MatrixProduct1 WebRtcIsacfix_MatrixProduct1; -extern MatrixProduct2 WebRtcIsacfix_MatrixProduct2; - -void WebRtcIsacfix_MatrixProduct1C(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - int matrix1_index_factor1, - int matrix0_index_factor1, - int matrix1_index_init_case, - int matrix1_index_step, - int matrix0_index_step, - int inner_loop_count, - int mid_loop_count, - int shift); -void WebRtcIsacfix_MatrixProduct2C(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - int matrix0_index_factor, - int matrix0_index_step); - -#if defined(WEBRTC_HAS_NEON) -void WebRtcIsacfix_MatrixProduct1Neon(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - int matrix1_index_factor1, - int matrix0_index_factor1, - int matrix1_index_init_case, - int matrix1_index_step, - int matrix0_index_step, - int inner_loop_count, - int mid_loop_count, - int shift); -void WebRtcIsacfix_MatrixProduct2Neon(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - int matrix0_index_factor, - int matrix0_index_step); -#endif - -#if defined(MIPS32_LE) -void WebRtcIsacfix_MatrixProduct1MIPS(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - int matrix1_index_factor1, - int matrix0_index_factor1, - int matrix1_index_init_case, - int matrix1_index_step, - int matrix0_index_step, - int inner_loop_count, - int mid_loop_count, - int shift); - -void WebRtcIsacfix_MatrixProduct2MIPS(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - int matrix0_index_factor, - int matrix0_index_step); -#endif - -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ENTROPY_CODING_H_ diff --git a/modules/audio_coding/codecs/isac/fix/source/entropy_coding_mips.c b/modules/audio_coding/codecs/isac/fix/source/entropy_coding_mips.c deleted file mode 100644 index a66a43ef99..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/entropy_coding_mips.c +++ /dev/null @@ -1,249 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -// MIPS optimization of the function WebRtcIsacfix_MatrixProduct1. -// Bit-exact with the function WebRtcIsacfix_MatrixProduct1C from -// entropy_coding.c file. -void WebRtcIsacfix_MatrixProduct1MIPS(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - const int matrix1_index_factor1, - const int matrix0_index_factor1, - const int matrix1_index_init_case, - const int matrix1_index_step, - const int matrix0_index_step, - const int inner_loop_count, - const int mid_loop_count, - const int shift) { - if (matrix1_index_init_case != 0) { - int j = SUBFRAMES, k = 0, n = 0; - int32_t r0, r1, r2, sum32; - int32_t* product_start = matrix_product; - int32_t* product_ptr; - const uint32_t product_step = 4 * mid_loop_count; - const uint32_t matrix0_step = 2 * matrix0_index_step; - const uint32_t matrix1_step = 4 * matrix1_index_step; - const uint32_t matrix0_step2 = 2 * matrix0_index_factor1; - const uint32_t matrix1_step2 = 4 * matrix1_index_factor1; - const int16_t* matrix0_start = matrix0; - const int32_t* matrix1_start = matrix1; - int16_t* matrix0_ptr; - int32_t* matrix1_ptr; - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "1: \n\t" - "addu %[product_ptr], %[product_start], $0 \n\t" - "addu %[k], %[product_step], $0 \n\t" - "addiu %[j], %[j], -1 \n\t" - "addu %[matrix1_start], %[matrix1], $0 \n\t" - "2: \n\t" - "addu %[matrix1_ptr], %[matrix1_start], $0 \n\t" - "addu %[matrix0_ptr], %[matrix0_start], $0 \n\t" - "addu %[n], %[inner_loop_count], $0 \n\t" - "mul %[sum32], $0, $0 \n\t" - "3: \n\t" - "lw %[r0], 0(%[matrix1_ptr]) \n\t" - "lh %[r1], 0(%[matrix0_ptr]) \n\t" - "addu %[matrix1_ptr], %[matrix1_ptr], %[matrix1_step] \n\t" - "sllv %[r0], %[r0], %[shift] \n\t" - "andi %[r2], %[r0], 0xffff \n\t" - "sra %[r2], %[r2], 1 \n\t" - "mul %[r2], %[r2], %[r1] \n\t" - "sra %[r0], %[r0], 16 \n\t" - "mul %[r0], %[r0], %[r1] \n\t" - "addu %[matrix0_ptr], %[matrix0_ptr], %[matrix0_step] \n\t" - "addiu %[n], %[n], -1 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r2], %[r2], 15 \n\t" -#else - "addiu %[r2], %[r2], 0x4000 \n\t" - "sra %[r2], %[r2], 15 \n\t" -#endif - "addu %[sum32], %[sum32], %[r2] \n\t" - "bgtz %[n], 3b \n\t" - " addu %[sum32], %[sum32], %[r0] \n\t" - "addiu %[k], %[k], -4 \n\t" - "addu %[matrix1_start], %[matrix1_start], %[matrix1_step2] \n\t" - "sw %[sum32], 0(%[product_ptr]) \n\t" - "bgtz %[k], 2b \n\t" - " addiu %[product_ptr], %[product_ptr], 4 \n\t" - "addu %[matrix0_start], %[matrix0_start], %[matrix0_step2] \n\t" - "bgtz %[j], 1b \n\t" - " addu %[product_start], %[product_start], %[product_step] \n\t" - ".set pop \n\t" - : [product_ptr] "=&r" (product_ptr), [product_start] "+r" (product_start), - [k] "=&r" (k), [j] "+r" (j), [matrix1_start] "=&r"(matrix1_start), - [matrix1_ptr] "=&r" (matrix1_ptr), [matrix0_ptr] "=&r" (matrix0_ptr), - [matrix0_start] "+r" (matrix0_start), [n] "=&r" (n), [r0] "=&r" (r0), - [sum32] "=&r" (sum32), [r1] "=&r" (r1),[r2] "=&r" (r2) - : [product_step] "r" (product_step), [matrix1] "r" (matrix1), - [inner_loop_count] "r" (inner_loop_count), - [matrix1_step] "r" (matrix1_step), [shift] "r" (shift), - [matrix0_step] "r" (matrix0_step), [matrix1_step2] "r" (matrix1_step2), - [matrix0_step2] "r" (matrix0_step2) - : "hi", "lo", "memory" - ); - } else { - int j = SUBFRAMES, k = 0, n = 0; - int32_t r0, r1, r2, sum32; - int32_t* product_start = matrix_product; - int32_t* product_ptr; - const uint32_t product_step = 4 * mid_loop_count; - const uint32_t matrix0_step = 2 * matrix0_index_step; - const uint32_t matrix1_step = 4 * matrix1_index_step; - const uint32_t matrix0_step2 = 2 * matrix0_index_factor1; - const uint32_t matrix1_step2 = 4 * matrix1_index_factor1; - const int16_t* matrix0_start = matrix0; - const int32_t* matrix1_start = matrix1; - int16_t* matrix0_ptr; - int32_t* matrix1_ptr; - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "1: \n\t" - "addu %[product_ptr], %[product_start], $0 \n\t" - "addu %[k], %[product_step], $0 \n\t" - "addiu %[j], %[j], -1 \n\t" - "addu %[matrix0_start], %[matrix0], $0 \n\t" - "2: \n\t" - "addu %[matrix1_ptr], %[matrix1_start], $0 \n\t" - "addu %[matrix0_ptr], %[matrix0_start], $0 \n\t" - "addu %[n], %[inner_loop_count], $0 \n\t" - "mul %[sum32], $0, $0 \n\t" - "3: \n\t" - "lw %[r0], 0(%[matrix1_ptr]) \n\t" - "lh %[r1], 0(%[matrix0_ptr]) \n\t" - "addu %[matrix1_ptr], %[matrix1_ptr], %[matrix1_step] \n\t" - "sllv %[r0], %[r0], %[shift] \n\t" - "andi %[r2], %[r0], 0xffff \n\t" - "sra %[r2], %[r2], 1 \n\t" - "mul %[r2], %[r2], %[r1] \n\t" - "sra %[r0], %[r0], 16 \n\t" - "mul %[r0], %[r0], %[r1] \n\t" - "addu %[matrix0_ptr], %[matrix0_ptr], %[matrix0_step] \n\t" - "addiu %[n], %[n], -1 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r2], %[r2], 15 \n\t" -#else - "addiu %[r2], %[r2], 0x4000 \n\t" - "sra %[r2], %[r2], 15 \n\t" -#endif - "addu %[sum32], %[sum32], %[r2] \n\t" - "bgtz %[n], 3b \n\t" - " addu %[sum32], %[sum32], %[r0] \n\t" - "addiu %[k], %[k], -4 \n\t" - "addu %[matrix0_start], %[matrix0_start], %[matrix0_step2] \n\t" - "sw %[sum32], 0(%[product_ptr]) \n\t" - "bgtz %[k], 2b \n\t" - " addiu %[product_ptr], %[product_ptr], 4 \n\t" - "addu %[matrix1_start], %[matrix1_start], %[matrix1_step2] \n\t" - "bgtz %[j], 1b \n\t" - " addu %[product_start], %[product_start], %[product_step] \n\t" - ".set pop \n\t" - : [product_ptr] "=&r" (product_ptr), [product_start] "+r" (product_start), - [k] "=&r" (k), [j] "+r" (j), [matrix1_start] "+r"(matrix1_start), - [matrix1_ptr] "=&r" (matrix1_ptr), [matrix0_ptr] "=&r" (matrix0_ptr), - [matrix0_start] "=&r" (matrix0_start), [n] "=&r" (n), [r0] "=&r" (r0), - [sum32] "=&r" (sum32), [r1] "=&r" (r1),[r2] "=&r" (r2) - : [product_step] "r" (product_step), [matrix0] "r" (matrix0), - [inner_loop_count] "r" (inner_loop_count), - [matrix1_step] "r" (matrix1_step), [shift] "r" (shift), - [matrix0_step] "r" (matrix0_step), [matrix1_step2] "r" (matrix1_step2), - [matrix0_step2] "r" (matrix0_step2) - : "hi", "lo", "memory" - ); - } -} - -// MIPS optimization of the function WebRtcIsacfix_MatrixProduct2. -// Bit-exact with the function WebRtcIsacfix_MatrixProduct2C from -// entropy_coding.c file. -void WebRtcIsacfix_MatrixProduct2MIPS(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - const int matrix0_index_factor, - const int matrix0_index_step) { - int j = 0, n = 0; - int loop_count = SUBFRAMES; - const int16_t* matrix0_ptr; - const int32_t* matrix1_ptr; - const int16_t* matrix0_start = matrix0; - const int matrix0_step = 2 * matrix0_index_step; - const int matrix0_step2 = 2 * matrix0_index_factor; - int32_t r0, r1, r2, r3, r4, sum32, sum32_2; - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addu %[j], %[loop_count], $0 \n\t" - "addu %[matrix0_start], %[matrix0], $0 \n\t" - "1: \n\t" - "addu %[matrix1_ptr], %[matrix1], $0 \n\t" - "addu %[matrix0_ptr], %[matrix0_start], $0 \n\t" - "addu %[n], %[loop_count], $0 \n\t" - "mul %[sum32], $0, $0 \n\t" - "mul %[sum32_2], $0, $0 \n\t" - "2: \n\t" - "lw %[r0], 0(%[matrix1_ptr]) \n\t" - "lw %[r1], 4(%[matrix1_ptr]) \n\t" - "lh %[r2], 0(%[matrix0_ptr]) \n\t" - "andi %[r3], %[r0], 0xffff \n\t" - "sra %[r3], %[r3], 1 \n\t" - "mul %[r3], %[r3], %[r2] \n\t" - "andi %[r4], %[r1], 0xffff \n\t" - "sra %[r4], %[r4], 1 \n\t" - "mul %[r4], %[r4], %[r2] \n\t" - "sra %[r0], %[r0], 16 \n\t" - "mul %[r0], %[r0], %[r2] \n\t" - "sra %[r1], %[r1], 16 \n\t" - "mul %[r1], %[r1], %[r2] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r3], %[r3], 15 \n\t" - "shra_r.w %[r4], %[r4], 15 \n\t" -#else - "addiu %[r3], %[r3], 0x4000 \n\t" - "sra %[r3], %[r3], 15 \n\t" - "addiu %[r4], %[r4], 0x4000 \n\t" - "sra %[r4], %[r4], 15 \n\t" -#endif - "addiu %[matrix1_ptr], %[matrix1_ptr], 8 \n\t" - "addu %[matrix0_ptr], %[matrix0_ptr], %[matrix0_step] \n\t" - "addiu %[n], %[n], -1 \n\t" - "addu %[sum32], %[sum32], %[r3] \n\t" - "addu %[sum32_2], %[sum32_2], %[r4] \n\t" - "addu %[sum32], %[sum32], %[r0] \n\t" - "bgtz %[n], 2b \n\t" - " addu %[sum32_2], %[sum32_2], %[r1] \n\t" - "sra %[sum32], %[sum32], 3 \n\t" - "sra %[sum32_2], %[sum32_2], 3 \n\t" - "addiu %[j], %[j], -1 \n\t" - "addu %[matrix0_start], %[matrix0_start], %[matrix0_step2] \n\t" - "sw %[sum32], 0(%[matrix_product]) \n\t" - "sw %[sum32_2], 4(%[matrix_product]) \n\t" - "bgtz %[j], 1b \n\t" - " addiu %[matrix_product], %[matrix_product], 8 \n\t" - ".set pop \n\t" - : [j] "=&r" (j), [matrix0_start] "=&r" (matrix0_start), - [matrix1_ptr] "=&r" (matrix1_ptr), [matrix0_ptr] "=&r" (matrix0_ptr), - [n] "=&r" (n), [sum32] "=&r" (sum32), [sum32_2] "=&r" (sum32_2), - [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3), - [r4] "=&r" (r4), [matrix_product] "+r" (matrix_product) - : [loop_count] "r" (loop_count), [matrix0] "r" (matrix0), - [matrix1] "r" (matrix1), [matrix0_step] "r" (matrix0_step), - [matrix0_step2] "r" (matrix0_step2) - : "hi", "lo", "memory" - ); -} diff --git a/modules/audio_coding/codecs/isac/fix/source/entropy_coding_neon.c b/modules/audio_coding/codecs/isac/fix/source/entropy_coding_neon.c deleted file mode 100644 index 0200567880..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/entropy_coding_neon.c +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* This file contains WebRtcIsacfix_MatrixProduct1Neon() and - * WebRtcIsacfix_MatrixProduct2Neon() for ARM Neon platform. API's are in - * entropy_coding.c. Results are bit exact with the c code for - * generic platforms. - */ - -#include -#include - -#include "modules/audio_coding/codecs/isac/fix/source/entropy_coding.h" -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "rtc_base/checks.h" - -void WebRtcIsacfix_MatrixProduct1Neon(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - const int matrix1_index_factor1, - const int matrix0_index_factor1, - const int matrix1_index_init_case, - const int matrix1_index_step, - const int matrix0_index_step, - const int inner_loop_count, - const int mid_loop_count, - const int shift) { - int j = 0, k = 0, n = 0; - int matrix1_index = 0, matrix0_index = 0, matrix_prod_index = 0; - int* matrix1_index_factor2 = &j; - int* matrix0_index_factor2 = &k; - if (matrix1_index_init_case != 0) { - matrix1_index_factor2 = &k; - matrix0_index_factor2 = &j; - } - int32x4_t shift32x4 = vdupq_n_s32(shift); - int32x2_t shift32x2 = vdup_n_s32(shift); - int32x4_t sum_32x4 = vdupq_n_s32(0); - int32x2_t sum_32x2 = vdup_n_s32(0); - - RTC_DCHECK_EQ(0, inner_loop_count % 2); - RTC_DCHECK_EQ(0, mid_loop_count % 2); - - if (matrix1_index_init_case != 0 && matrix1_index_factor1 == 1) { - for (j = 0; j < SUBFRAMES; j++) { - matrix_prod_index = mid_loop_count * j; - for (k = 0; k < (mid_loop_count >> 2) << 2; k += 4) { - sum_32x4 = veorq_s32(sum_32x4, sum_32x4); // Initialize to zeros. - matrix1_index = k; - matrix0_index = matrix0_index_factor1 * j; - for (n = 0; n < inner_loop_count; n++) { - int32x4_t matrix0_32x4 = - vdupq_n_s32((int32_t)(matrix0[matrix0_index]) << 15); - int32x4_t matrix1_32x4 = - vshlq_s32(vld1q_s32(&matrix1[matrix1_index]), shift32x4); - int32x4_t multi_32x4 = vqdmulhq_s32(matrix0_32x4, matrix1_32x4); - sum_32x4 = vqaddq_s32(sum_32x4, multi_32x4); - matrix1_index += matrix1_index_step; - matrix0_index += matrix0_index_step; - } - vst1q_s32(&matrix_product[matrix_prod_index], sum_32x4); - matrix_prod_index += 4; - } - if (mid_loop_count % 4 > 1) { - sum_32x2 = veor_s32(sum_32x2, sum_32x2); // Initialize to zeros. - matrix1_index = k; - k += 2; - matrix0_index = matrix0_index_factor1 * j; - for (n = 0; n < inner_loop_count; n++) { - int32x2_t matrix0_32x2 = - vdup_n_s32((int32_t)(matrix0[matrix0_index]) << 15); - int32x2_t matrix1_32x2 = - vshl_s32(vld1_s32(&matrix1[matrix1_index]), shift32x2); - int32x2_t multi_32x2 = vqdmulh_s32(matrix0_32x2, matrix1_32x2); - sum_32x2 = vqadd_s32(sum_32x2, multi_32x2); - matrix1_index += matrix1_index_step; - matrix0_index += matrix0_index_step; - } - vst1_s32(&matrix_product[matrix_prod_index], sum_32x2); - matrix_prod_index += 2; - } - } - } - else if (matrix1_index_init_case == 0 && matrix0_index_factor1 == 1) { - int32x2_t multi_32x2 = vdup_n_s32(0); - int32x2_t matrix0_32x2 = vdup_n_s32(0); - for (j = 0; j < SUBFRAMES; j++) { - matrix_prod_index = mid_loop_count * j; - for (k = 0; k < (mid_loop_count >> 2) << 2; k += 4) { - sum_32x4 = veorq_s32(sum_32x4, sum_32x4); // Initialize to zeros. - matrix1_index = matrix1_index_factor1 * j; - matrix0_index = k; - for (n = 0; n < inner_loop_count; n++) { - int32x4_t matrix1_32x4 = vdupq_n_s32(matrix1[matrix1_index] << shift); - int32x4_t matrix0_32x4 = - vshll_n_s16(vld1_s16(&matrix0[matrix0_index]), 15); - int32x4_t multi_32x4 = vqdmulhq_s32(matrix0_32x4, matrix1_32x4); - sum_32x4 = vqaddq_s32(sum_32x4, multi_32x4); - matrix1_index += matrix1_index_step; - matrix0_index += matrix0_index_step; - } - vst1q_s32(&matrix_product[matrix_prod_index], sum_32x4); - matrix_prod_index += 4; - } - if (mid_loop_count % 4 > 1) { - sum_32x2 = veor_s32(sum_32x2, sum_32x2); // Initialize to zeros. - matrix1_index = matrix1_index_factor1 * j; - matrix0_index = k; - for (n = 0; n < inner_loop_count; n++) { - int32x2_t matrix1_32x2 = vdup_n_s32(matrix1[matrix1_index] << shift); - matrix0_32x2 = - vset_lane_s32((int32_t)matrix0[matrix0_index], matrix0_32x2, 0); - matrix0_32x2 = vset_lane_s32((int32_t)matrix0[matrix0_index + 1], - matrix0_32x2, 1); - matrix0_32x2 = vshl_n_s32(matrix0_32x2, 15); - multi_32x2 = vqdmulh_s32(matrix1_32x2, matrix0_32x2); - sum_32x2 = vqadd_s32(sum_32x2, multi_32x2); - matrix1_index += matrix1_index_step; - matrix0_index += matrix0_index_step; - } - vst1_s32(&matrix_product[matrix_prod_index], sum_32x2); - matrix_prod_index += 2; - } - } - } - else if (matrix1_index_init_case == 0 && - matrix1_index_step == 1 && - matrix0_index_step == 1) { - int32x2_t multi_32x2 = vdup_n_s32(0); - int32x2_t matrix0_32x2 = vdup_n_s32(0); - for (j = 0; j < SUBFRAMES; j++) { - matrix_prod_index = mid_loop_count * j; - for (k = 0; k < mid_loop_count; k++) { - sum_32x4 = veorq_s32(sum_32x4, sum_32x4); // Initialize to zeros. - matrix1_index = matrix1_index_factor1 * j; - matrix0_index = matrix0_index_factor1 * k; - for (n = 0; n < (inner_loop_count >> 2) << 2; n += 4) { - int32x4_t matrix1_32x4 = - vshlq_s32(vld1q_s32(&matrix1[matrix1_index]), shift32x4); - int32x4_t matrix0_32x4 = - vshll_n_s16(vld1_s16(&matrix0[matrix0_index]), 15); - int32x4_t multi_32x4 = vqdmulhq_s32(matrix0_32x4, matrix1_32x4); - sum_32x4 = vqaddq_s32(sum_32x4, multi_32x4); - matrix1_index += 4; - matrix0_index += 4; - } - sum_32x2 = vqadd_s32(vget_low_s32(sum_32x4), vget_high_s32(sum_32x4)); - if (inner_loop_count % 4 > 1) { - int32x2_t matrix1_32x2 = - vshl_s32(vld1_s32(&matrix1[matrix1_index]), shift32x2); - matrix0_32x2 = - vset_lane_s32((int32_t)matrix0[matrix0_index], matrix0_32x2, 0); - matrix0_32x2 = vset_lane_s32((int32_t)matrix0[matrix0_index + 1], - matrix0_32x2, 1); - matrix0_32x2 = vshl_n_s32(matrix0_32x2, 15); - multi_32x2 = vqdmulh_s32(matrix1_32x2, matrix0_32x2); - sum_32x2 = vqadd_s32(sum_32x2, multi_32x2); - } - sum_32x2 = vpadd_s32(sum_32x2, sum_32x2); - vst1_lane_s32(&matrix_product[matrix_prod_index], sum_32x2, 0); - matrix_prod_index++; - } - } - } - else { - for (j = 0; j < SUBFRAMES; j++) { - matrix_prod_index = mid_loop_count * j; - for (k=0; k < mid_loop_count; k++) { - int32_t sum32 = 0; - matrix1_index = matrix1_index_factor1 * (*matrix1_index_factor2); - matrix0_index = matrix0_index_factor1 * (*matrix0_index_factor2); - for (n = 0; n < inner_loop_count; n++) { - sum32 += (WEBRTC_SPL_MUL_16_32_RSFT16(matrix0[matrix0_index], - matrix1[matrix1_index] << shift)); - matrix1_index += matrix1_index_step; - matrix0_index += matrix0_index_step; - } - matrix_product[matrix_prod_index] = sum32; - matrix_prod_index++; - } - } - } -} - -void WebRtcIsacfix_MatrixProduct2Neon(const int16_t matrix0[], - const int32_t matrix1[], - int32_t matrix_product[], - const int matrix0_index_factor, - const int matrix0_index_step) { - int j = 0, n = 0; - int matrix1_index = 0, matrix0_index = 0, matrix_prod_index = 0; - int32x2_t sum_32x2 = vdup_n_s32(0); - for (j = 0; j < SUBFRAMES; j++) { - sum_32x2 = veor_s32(sum_32x2, sum_32x2); // Initialize to zeros. - matrix1_index = 0; - matrix0_index = matrix0_index_factor * j; - for (n = SUBFRAMES; n > 0; n--) { - int32x2_t matrix0_32x2 = - vdup_n_s32((int32_t)(matrix0[matrix0_index]) << 15); - int32x2_t matrix1_32x2 = vld1_s32(&matrix1[matrix1_index]); - int32x2_t multi_32x2 = vqdmulh_s32(matrix0_32x2, matrix1_32x2); - sum_32x2 = vqadd_s32(sum_32x2, multi_32x2); - matrix1_index += 2; - matrix0_index += matrix0_index_step; - } - sum_32x2 = vshr_n_s32(sum_32x2, 3); - vst1_s32(&matrix_product[matrix_prod_index], sum_32x2); - matrix_prod_index += 2; - } -} diff --git a/modules/audio_coding/codecs/isac/fix/source/fft.c b/modules/audio_coding/codecs/isac/fix/source/fft.c deleted file mode 100644 index a0ed3f83ce..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/fft.c +++ /dev/null @@ -1,415 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * fft.c - * - * Fast Fourier Transform - * - */ - - -#include "modules/audio_coding/codecs/isac/fix/source/fft.h" - -static const int16_t kSortTabFft[240] = { - 0, 60, 120, 180, 20, 80, 140, 200, 40, 100, 160, 220, - 4, 64, 124, 184, 24, 84, 144, 204, 44, 104, 164, 224, - 8, 68, 128, 188, 28, 88, 148, 208, 48, 108, 168, 228, - 12, 72, 132, 192, 32, 92, 152, 212, 52, 112, 172, 232, - 16, 76, 136, 196, 36, 96, 156, 216, 56, 116, 176, 236, - 1, 61, 121, 181, 21, 81, 141, 201, 41, 101, 161, 221, - 5, 65, 125, 185, 25, 85, 145, 205, 45, 105, 165, 225, - 9, 69, 129, 189, 29, 89, 149, 209, 49, 109, 169, 229, - 13, 73, 133, 193, 33, 93, 153, 213, 53, 113, 173, 233, - 17, 77, 137, 197, 37, 97, 157, 217, 57, 117, 177, 237, - 2, 62, 122, 182, 22, 82, 142, 202, 42, 102, 162, 222, - 6, 66, 126, 186, 26, 86, 146, 206, 46, 106, 166, 226, - 10, 70, 130, 190, 30, 90, 150, 210, 50, 110, 170, 230, - 14, 74, 134, 194, 34, 94, 154, 214, 54, 114, 174, 234, - 18, 78, 138, 198, 38, 98, 158, 218, 58, 118, 178, 238, - 3, 63, 123, 183, 23, 83, 143, 203, 43, 103, 163, 223, - 7, 67, 127, 187, 27, 87, 147, 207, 47, 107, 167, 227, - 11, 71, 131, 191, 31, 91, 151, 211, 51, 111, 171, 231, - 15, 75, 135, 195, 35, 95, 155, 215, 55, 115, 175, 235, - 19, 79, 139, 199, 39, 99, 159, 219, 59, 119, 179, 239 -}; - -/* Cosine table in Q14 */ -static const int16_t kCosTabFfftQ14[240] = { - 16384, 16378, 16362, 16333, 16294, 16244, 16182, 16110, 16026, 15931, 15826, 15709, - 15582, 15444, 15296, 15137, 14968, 14788, 14598, 14399, 14189, 13970, 13741, 13502, - 13255, 12998, 12733, 12458, 12176, 11885, 11585, 11278, 10963, 10641, 10311, 9974, - 9630, 9280, 8923, 8561, 8192, 7818, 7438, 7053, 6664, 6270, 5872, 5469, - 5063, 4653, 4240, 3825, 3406, 2986, 2563, 2139, 1713, 1285, 857, 429, - 0, -429, -857, -1285, -1713, -2139, -2563, -2986, -3406, -3825, -4240, -4653, - -5063, -5469, -5872, -6270, -6664, -7053, -7438, -7818, -8192, -8561, -8923, -9280, - -9630, -9974, -10311, -10641, -10963, -11278, -11585, -11885, -12176, -12458, -12733, -12998, - -13255, -13502, -13741, -13970, -14189, -14399, -14598, -14788, -14968, -15137, -15296, -15444, - -15582, -15709, -15826, -15931, -16026, -16110, -16182, -16244, -16294, -16333, -16362, -16378, - -16384, -16378, -16362, -16333, -16294, -16244, -16182, -16110, -16026, -15931, -15826, -15709, - -15582, -15444, -15296, -15137, -14968, -14788, -14598, -14399, -14189, -13970, -13741, -13502, - -13255, -12998, -12733, -12458, -12176, -11885, -11585, -11278, -10963, -10641, -10311, -9974, - -9630, -9280, -8923, -8561, -8192, -7818, -7438, -7053, -6664, -6270, -5872, -5469, - -5063, -4653, -4240, -3825, -3406, -2986, -2563, -2139, -1713, -1285, -857, -429, - 0, 429, 857, 1285, 1713, 2139, 2563, 2986, 3406, 3825, 4240, 4653, - 5063, 5469, 5872, 6270, 6664, 7053, 7438, 7818, 8192, 8561, 8923, 9280, - 9630, 9974, 10311, 10641, 10963, 11278, 11585, 11885, 12176, 12458, 12733, 12998, - 13255, 13502, 13741, 13970, 14189, 14399, 14598, 14788, 14968, 15137, 15296, 15444, - 15582, 15709, 15826, 15931, 16026, 16110, 16182, 16244, 16294, 16333, 16362, 16378 -}; - - - -/* Uses 16x16 mul, without rounding, which is faster. Uses WEBRTC_SPL_MUL_16_16_RSFT */ -int16_t WebRtcIsacfix_FftRadix16Fastest(int16_t RexQx[], int16_t ImxQx[], int16_t iSign) { - - int16_t dd, ee, ff, gg, hh, ii; - int16_t k0, k1, k2, k3, k4, kk; - int16_t tmp116, tmp216; - - int16_t ccc1Q14, ccc2Q14, ccc3Q14, sss1Q14, sss2Q14, sss3Q14; - int16_t sss60Q14, ccc72Q14, sss72Q14; - int16_t aaQx, ajQx, akQx, ajmQx, ajpQx, akmQx, akpQx; - int16_t bbQx, bjQx, bkQx, bjmQx, bjpQx, bkmQx, bkpQx; - - int16_t ReDATAQx[240], ImDATAQx[240]; - - sss60Q14 = kCosTabFfftQ14[20]; - ccc72Q14 = kCosTabFfftQ14[48]; - sss72Q14 = kCosTabFfftQ14[12]; - - if (iSign < 0) { - sss72Q14 = -sss72Q14; - sss60Q14 = -sss60Q14; - } - /* Complexity is: 10 cycles */ - - /* compute fourier transform */ - - // transform for factor of 4 - for (kk=0; kk<60; kk++) { - k0 = kk; - k1 = k0 + 60; - k2 = k1 + 60; - k3 = k2 + 60; - - akpQx = RexQx[k0] + RexQx[k2]; - akmQx = RexQx[k0] - RexQx[k2]; - ajpQx = RexQx[k1] + RexQx[k3]; - ajmQx = RexQx[k1] - RexQx[k3]; - bkpQx = ImxQx[k0] + ImxQx[k2]; - bkmQx = ImxQx[k0] - ImxQx[k2]; - bjpQx = ImxQx[k1] + ImxQx[k3]; - bjmQx = ImxQx[k1] - ImxQx[k3]; - - RexQx[k0] = akpQx + ajpQx; - ImxQx[k0] = bkpQx + bjpQx; - ajpQx = akpQx - ajpQx; - bjpQx = bkpQx - bjpQx; - if (iSign < 0) { - akpQx = akmQx + bjmQx; - bkpQx = bkmQx - ajmQx; - akmQx -= bjmQx; - bkmQx += ajmQx; - } else { - akpQx = akmQx - bjmQx; - bkpQx = bkmQx + ajmQx; - akmQx += bjmQx; - bkmQx -= ajmQx; - } - - ccc1Q14 = kCosTabFfftQ14[kk]; - ccc2Q14 = kCosTabFfftQ14[2 * kk]; - ccc3Q14 = kCosTabFfftQ14[3 * kk]; - sss1Q14 = kCosTabFfftQ14[kk + 60]; - sss2Q14 = kCosTabFfftQ14[2 * kk + 60]; - sss3Q14 = kCosTabFfftQ14[3 * kk + 60]; - if (iSign==1) { - sss1Q14 = -sss1Q14; - sss2Q14 = -sss2Q14; - sss3Q14 = -sss3Q14; - } - - //Do several multiplications like Q14*Q16>>14 = Q16 - // RexQ16[k1] = akpQ16 * ccc1Q14 - bkpQ16 * sss1Q14; - // RexQ16[k2] = ajpQ16 * ccc2Q14 - bjpQ16 * sss2Q14; - // RexQ16[k3] = akmQ16 * ccc3Q14 - bkmQ16 * sss3Q14; - // ImxQ16[k1] = akpQ16 * sss1Q14 + bkpQ16 * ccc1Q14; - // ImxQ16[k2] = ajpQ16 * sss2Q14 + bjpQ16 * ccc2Q14; - // ImxQ16[k3] = akmQ16 * sss3Q14 + bkmQ16 * ccc3Q14; - - RexQx[k1] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc1Q14, akpQx, 14) - - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss1Q14, bkpQx, 14); // 6 non-mul + 2 mul cycles, i.e. 8 cycles (6+2*7=20 cycles if 16x32mul) - RexQx[k2] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, ajpQx, 14) - - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bjpQx, 14); - RexQx[k3] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc3Q14, akmQx, 14) - - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss3Q14, bkmQx, 14); - ImxQx[k1] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss1Q14, akpQx, 14) + - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc1Q14, bkpQx, 14); - ImxQx[k2] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, ajpQx, 14) + - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bjpQx, 14); - ImxQx[k3] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss3Q14, akmQx, 14) + - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc3Q14, bkmQx, 14); - //This mul segment needs 6*8 = 48 cycles for 16x16 muls, but 6*20 = 120 cycles for 16x32 muls - - - } - /* Complexity is: 51+48 = 99 cycles for 16x16 muls, but 51+120 = 171 cycles for 16x32 muls*/ - - // transform for factor of 3 - kk=0; - k1=20; - k2=40; - - for (hh=0; hh<4; hh++) { - for (ii=0; ii<20; ii++) { - akQx = RexQx[kk]; - bkQx = ImxQx[kk]; - ajQx = RexQx[k1] + RexQx[k2]; - bjQx = ImxQx[k1] + ImxQx[k2]; - RexQx[kk] = akQx + ajQx; - ImxQx[kk] = bkQx + bjQx; - tmp116 = ajQx >> 1; - tmp216 = bjQx >> 1; - akQx = akQx - tmp116; - bkQx = bkQx - tmp216; - tmp116 = RexQx[k1] - RexQx[k2]; - tmp216 = ImxQx[k1] - ImxQx[k2]; - - ajQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss60Q14, tmp116, 14); // Q14*Qx>>14 = Qx - bjQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss60Q14, tmp216, 14); // Q14*Qx>>14 = Qx - RexQx[k1] = akQx - bjQx; - RexQx[k2] = akQx + bjQx; - ImxQx[k1] = bkQx + ajQx; - ImxQx[k2] = bkQx - ajQx; - - kk++; - k1++; - k2++; - } - /* Complexity : (31+6)*20 = 740 cycles for 16x16 muls, but (31+18)*20 = 980 cycles for 16x32 muls*/ - kk=kk+40; - k1=k1+40; - k2=k2+40; - } - /* Complexity : 4*(740+3) = 2972 cycles for 16x16 muls, but 4*(980+3) = 3932 cycles for 16x32 muls*/ - - /* multiply by rotation factor for odd factor 3 or 5 (not for 4) - Same code (duplicated) for both ii=2 and ii=3 */ - kk = 1; - ee = 0; - ff = 0; - - for (gg=0; gg<19; gg++) { - kk += 20; - ff = ff+4; - for (hh=0; hh<2; hh++) { - ee = ff + hh * ff; - dd = ee + 60; - ccc2Q14 = kCosTabFfftQ14[ee]; - sss2Q14 = kCosTabFfftQ14[dd]; - if (iSign==1) { - sss2Q14 = -sss2Q14; - } - for (ii=0; ii<4; ii++) { - akQx = RexQx[kk]; - bkQx = ImxQx[kk]; - RexQx[kk] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, akQx, 14) - // Q14*Qx>>14 = Qx - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bkQx, 14); - ImxQx[kk] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, akQx, 14) + // Q14*Qx>>14 = Qx - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bkQx, 14); - - - kk += 60; - } - kk = kk - 220; - } - // Complexity: 2*(13+5+4*13+2) = 144 for 16x16 muls, but 2*(13+5+4*33+2) = 304 cycles for 16x32 muls - kk = kk - 59; - } - // Complexity: 19*144 = 2736 for 16x16 muls, but 19*304 = 5776 cycles for 16x32 muls - - // transform for factor of 5 - kk = 0; - ccc2Q14 = kCosTabFfftQ14[96]; - sss2Q14 = kCosTabFfftQ14[84]; - if (iSign==1) { - sss2Q14 = -sss2Q14; - } - - for (hh=0; hh<4; hh++) { - for (ii=0; ii<12; ii++) { - k1 = kk + 4; - k2 = k1 + 4; - k3 = k2 + 4; - k4 = k3 + 4; - - akpQx = RexQx[k1] + RexQx[k4]; - akmQx = RexQx[k1] - RexQx[k4]; - bkpQx = ImxQx[k1] + ImxQx[k4]; - bkmQx = ImxQx[k1] - ImxQx[k4]; - ajpQx = RexQx[k2] + RexQx[k3]; - ajmQx = RexQx[k2] - RexQx[k3]; - bjpQx = ImxQx[k2] + ImxQx[k3]; - bjmQx = ImxQx[k2] - ImxQx[k3]; - aaQx = RexQx[kk]; - bbQx = ImxQx[kk]; - RexQx[kk] = aaQx + akpQx + ajpQx; - ImxQx[kk] = bbQx + bkpQx + bjpQx; - - akQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc72Q14, akpQx, 14) + - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, ajpQx, 14) + aaQx; - bkQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc72Q14, bkpQx, 14) + - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bjpQx, 14) + bbQx; - ajQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss72Q14, akmQx, 14) + - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, ajmQx, 14); - bjQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss72Q14, bkmQx, 14) + - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bjmQx, 14); - // 32+4*8=64 or 32+4*20=112 - - RexQx[k1] = akQx - bjQx; - RexQx[k4] = akQx + bjQx; - ImxQx[k1] = bkQx + ajQx; - ImxQx[k4] = bkQx - ajQx; - - akQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, akpQx, 14) + - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc72Q14, ajpQx, 14) + aaQx; - bkQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bkpQx, 14) + - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc72Q14, bjpQx, 14) + bbQx; - ajQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, akmQx, 14) - - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss72Q14, ajmQx, 14); - bjQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bkmQx, 14) - - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss72Q14, bjmQx, 14); - // 8+4*8=40 or 8+4*20=88 - - RexQx[k2] = akQx - bjQx; - RexQx[k3] = akQx + bjQx; - ImxQx[k2] = bkQx + ajQx; - ImxQx[k3] = bkQx - ajQx; - - kk = k4 + 4; - } - // Complexity: 12*(64+40+10) = 1368 for 16x16 muls, but 12*(112+88+10) = 2520 cycles for 16x32 muls - kk -= 239; - } - // Complexity: 4*1368 = 5472 for 16x16 muls, but 4*2520 = 10080 cycles for 16x32 muls - - /* multiply by rotation factor for odd factor 3 or 5 (not for 4) - Same code (duplicated) for both ii=2 and ii=3 */ - kk = 1; - ee=0; - - for (gg=0; gg<3; gg++) { - kk += 4; - dd = 12 + 12 * gg; - ff = 0; - for (hh=0; hh<4; hh++) { - ff = ff+dd; - ee = ff+60; - for (ii=0; ii<12; ii++) { - akQx = RexQx[kk]; - bkQx = ImxQx[kk]; - - ccc2Q14 = kCosTabFfftQ14[ff]; - sss2Q14 = kCosTabFfftQ14[ee]; - - if (iSign==1) { - sss2Q14 = -sss2Q14; - } - - RexQx[kk] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, akQx, 14) - - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bkQx, 14); - ImxQx[kk] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, akQx, 14) + - (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bkQx, 14); - - kk += 20; - } - kk = kk - 236; - // Complexity: 12*(12+12) = 288 for 16x16 muls, but 12*(12+32) = 528 cycles for 16x32 muls - } - kk = kk - 19; - // Complexity: 4*288+6 for 16x16 muls, but 4*528+6 cycles for 16x32 muls - } - // Complexity: 3*4*288+6 = 3462 for 16x16 muls, but 3*4*528+6 = 6342 cycles for 16x32 muls - - - // last transform for factor of 4 */ - for (kk=0; kk<240; kk=kk+4) { - k1 = kk + 1; - k2 = k1 + 1; - k3 = k2 + 1; - - akpQx = RexQx[kk] + RexQx[k2]; - akmQx = RexQx[kk] - RexQx[k2]; - ajpQx = RexQx[k1] + RexQx[k3]; - ajmQx = RexQx[k1] - RexQx[k3]; - bkpQx = ImxQx[kk] + ImxQx[k2]; - bkmQx = ImxQx[kk] - ImxQx[k2]; - bjpQx = ImxQx[k1] + ImxQx[k3]; - bjmQx = ImxQx[k1] - ImxQx[k3]; - RexQx[kk] = akpQx + ajpQx; - ImxQx[kk] = bkpQx + bjpQx; - ajpQx = akpQx - ajpQx; - bjpQx = bkpQx - bjpQx; - if (iSign < 0) { - akpQx = akmQx + bjmQx; - bkpQx = bkmQx - ajmQx; - akmQx -= bjmQx; - bkmQx += ajmQx; - } else { - akpQx = akmQx - bjmQx; - bkpQx = bkmQx + ajmQx; - akmQx += bjmQx; - bkmQx -= ajmQx; - } - RexQx[k1] = akpQx; - RexQx[k2] = ajpQx; - RexQx[k3] = akmQx; - ImxQx[k1] = bkpQx; - ImxQx[k2] = bjpQx; - ImxQx[k3] = bkmQx; - } - // Complexity: 60*45 = 2700 for 16x16 muls, but 60*45 = 2700 cycles for 16x32 muls - - /* permute the results to normal order */ - for (ii=0; ii<240; ii++) { - ReDATAQx[ii]=RexQx[ii]; - ImDATAQx[ii]=ImxQx[ii]; - } - // Complexity: 240*2=480 cycles - - for (ii=0; ii<240; ii++) { - RexQx[ii]=ReDATAQx[kSortTabFft[ii]]; - ImxQx[ii]=ImDATAQx[kSortTabFft[ii]]; - } - // Complexity: 240*2*2=960 cycles - - // Total complexity: - // 16x16 16x32 - // Complexity: 10 10 - // Complexity: 99 171 - // Complexity: 2972 3932 - // Complexity: 2736 5776 - // Complexity: 5472 10080 - // Complexity: 3462 6342 - // Complexity: 2700 2700 - // Complexity: 480 480 - // Complexity: 960 960 - // ======================= - // 18891 30451 - // - // If this FFT is called 2 time each frame, i.e. 67 times per second, it will correspond to - // a C54 complexity of 67*18891/1000000 = 1.27 MIPS with 16x16-muls, and 67*30451/1000000 = - // = 2.04 MIPS with 16x32-muls. Note that this routine somtimes is called 6 times during the - // encoding of a frame, i.e. the max complexity would be 7/2*1.27 = 4.4 MIPS for the 16x16 mul case. - - - return 0; -} diff --git a/modules/audio_coding/codecs/isac/fix/source/fft.h b/modules/audio_coding/codecs/isac/fix/source/fft.h deleted file mode 100644 index 4fe9b96be4..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/fft.h +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/*--------------------------------*-C-*---------------------------------* - * File: - * fft.h - * ---------------------------------------------------------------------* - * Re[]: real value array - * Im[]: imaginary value array - * nTotal: total number of complex values - * nPass: number of elements involved in this pass of transform - * nSpan: nspan/nPass = number of bytes to increment pointer - * in Re[] and Im[] - * isign: exponent: +1 = forward -1 = reverse - * scaling: normalizing constant by which the final result is *divided* - * scaling == -1, normalize by total dimension of the transform - * scaling < -1, normalize by the square-root of the total dimension - * - * ---------------------------------------------------------------------- - * See the comments in the code for correct usage! - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FFT_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FFT_H_ - -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - -int16_t WebRtcIsacfix_FftRadix16Fastest(int16_t RexQx[], - int16_t ImxQx[], - int16_t iSign); - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FFT_H_ */ diff --git a/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h b/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h deleted file mode 100644 index f741e6f677..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FILTERBANK_INTERNAL_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FILTERBANK_INTERNAL_H_ - -#include - -#if defined(__cplusplus) || defined(c_plusplus) -extern "C" { -#endif - -/* Arguments: - * io: Input/output, in Q0. - * len: Input, sample length. - * coefficient: Input. - * state: Input/output, filter state, in Q4. - */ -typedef void (*HighpassFilterFixDec32)(int16_t* io, - int16_t len, - const int16_t* coefficient, - int32_t* state); -extern HighpassFilterFixDec32 WebRtcIsacfix_HighpassFilterFixDec32; - -void WebRtcIsacfix_HighpassFilterFixDec32C(int16_t* io, - int16_t len, - const int16_t* coefficient, - int32_t* state); - -#if defined(MIPS_DSP_R1_LE) -void WebRtcIsacfix_HighpassFilterFixDec32MIPS(int16_t* io, - int16_t len, - const int16_t* coefficient, - int32_t* state); -#endif - -typedef void (*AllpassFilter2FixDec16)( - int16_t* data_ch1, // Input and output in channel 1, in Q0 - int16_t* data_ch2, // Input and output in channel 2, in Q0 - const int16_t* factor_ch1, // Scaling factor for channel 1, in Q15 - const int16_t* factor_ch2, // Scaling factor for channel 2, in Q15 - int length, // Length of the data buffers - int32_t* filter_state_ch1, // Filter state for channel 1, in Q16 - int32_t* filter_state_ch2); // Filter state for channel 2, in Q16 -extern AllpassFilter2FixDec16 WebRtcIsacfix_AllpassFilter2FixDec16; - -void WebRtcIsacfix_AllpassFilter2FixDec16C(int16_t* data_ch1, - int16_t* data_ch2, - const int16_t* factor_ch1, - const int16_t* factor_ch2, - int length, - int32_t* filter_state_ch1, - int32_t* filter_state_ch2); - -#if defined(WEBRTC_HAS_NEON) -void WebRtcIsacfix_AllpassFilter2FixDec16Neon(int16_t* data_ch1, - int16_t* data_ch2, - const int16_t* factor_ch1, - const int16_t* factor_ch2, - int length, - int32_t* filter_state_ch1, - int32_t* filter_state_ch2); -#endif - -#if defined(MIPS_DSP_R1_LE) -void WebRtcIsacfix_AllpassFilter2FixDec16MIPS(int16_t* data_ch1, - int16_t* data_ch2, - const int16_t* factor_ch1, - const int16_t* factor_ch2, - int length, - int32_t* filter_state_ch1, - int32_t* filter_state_ch2); -#endif - -#if defined(__cplusplus) || defined(c_plusplus) -} -#endif - -#endif -/* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FILTERBANK_INTERNAL_H_ */ diff --git a/modules/audio_coding/codecs/isac/fix/source/filterbank_tables.c b/modules/audio_coding/codecs/isac/fix/source/filterbank_tables.c deleted file mode 100644 index f2dec79c2d..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/filterbank_tables.c +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * filterbank_tables.c - * - * This file contains variables that are used in - * filterbanks.c - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/filterbank_tables.h" - -/* HPstcoeff_in_Q14 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; - * In float, they are: {-1.94895953203325f, 0.94984516000000f, - * -0.05101826139794f, 0.05015484000000f}; - */ -const int16_t WebRtcIsacfix_kHpStCoeffInQ30[8] = { - 16189, -31932, /* Q30 lo/hi pair */ - 17243, 15562, /* Q30 lo/hi pair */ - -17186, -26748, /* Q35 lo/hi pair */ - -27476, 26296 /* Q35 lo/hi pair */ -}; - -/* HPstcoeff_out_1_Q14 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; - * In float, they are: {-1.99701049409000f, 0.99714204490000f, - * 0.01701049409000f, -0.01704204490000f}; - */ -const int16_t WebRtcIsacfix_kHPStCoeffOut1Q30[8] = { - -1306, -32719, /* Q30 lo/hi pair */ - 11486, 16337, /* Q30 lo/hi pair */ - 26078, 8918, /* Q35 lo/hi pair */ - 3956, -8935 /* Q35 lo/hi pair */ -}; - -/* HPstcoeff_out_2_Q14 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; - * In float, they are: {-1.98645294509837f, 0.98672435560000f, - * 0.00645294509837f, -0.00662435560000f}; - */ -const int16_t WebRtcIsacfix_kHPStCoeffOut2Q30[8] = { - -2953, -32546, /* Q30 lo/hi pair */ - 32233, 16166, /* Q30 lo/hi pair */ - 13217, 3383, /* Q35 lo/hi pair */ - -4597, -3473 /* Q35 lo/hi pair */ -}; - -/* The upper channel all-pass filter factors */ -const int16_t WebRtcIsacfix_kUpperApFactorsQ15[2] = { - 1137, 12537 -}; - -/* The lower channel all-pass filter factors */ -const int16_t WebRtcIsacfix_kLowerApFactorsQ15[2] = { - 5059, 24379 -}; diff --git a/modules/audio_coding/codecs/isac/fix/source/filterbank_tables.h b/modules/audio_coding/codecs/isac/fix/source/filterbank_tables.h deleted file mode 100644 index 01e5a7ba85..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/filterbank_tables.h +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * filterbank_tables.h - * - * Header file for variables that are defined in - * filterbank_tables.c. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FILTERBANK_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FILTERBANK_TABLES_H_ - -#include - -#if defined(__cplusplus) || defined(c_plusplus) -extern "C" { -#endif - -/********************* Coefficient Tables ************************/ - -/* HPstcoeff_in_Q14 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; */ -/* [Q30lo Q30hi Q30lo Q30hi Q35lo Q35hi Q35lo Q35hi] */ -extern const int16_t WebRtcIsacfix_kHpStCoeffInQ30[8]; - -/* HPstcoeff_out_1_Q14 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; */ -/* [Q30lo Q30hi Q30lo Q30hi Q35lo Q35hi Q35lo Q35hi] */ -extern const int16_t WebRtcIsacfix_kHPStCoeffOut1Q30[8]; - -/* HPstcoeff_out_2_Q14 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; */ -/* [Q30lo Q30hi Q30lo Q30hi Q35lo Q35hi Q35lo Q35hi] */ -extern const int16_t WebRtcIsacfix_kHPStCoeffOut2Q30[8]; - -/* The upper channel all-pass filter factors */ -extern const int16_t WebRtcIsacfix_kUpperApFactorsQ15[2]; - -/* The lower channel all-pass filter factors */ -extern const int16_t WebRtcIsacfix_kLowerApFactorsQ15[2]; - -#if defined(__cplusplus) || defined(c_plusplus) -} -#endif - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_FILTERBANK_TABLES_H_ */ diff --git a/modules/audio_coding/codecs/isac/fix/source/filterbanks.c b/modules/audio_coding/codecs/isac/fix/source/filterbanks.c deleted file mode 100644 index 57b3e70b89..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/filterbanks.c +++ /dev/null @@ -1,297 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * filterbanks.c - * - * This file contains function - * WebRtcIsacfix_SplitAndFilter, and WebRtcIsacfix_FilterAndCombine - * which implement filterbanks that produce decimated lowpass and - * highpass versions of a signal, and performs reconstruction. - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h" - -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/filterbank_tables.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "rtc_base/checks.h" - -// Declare a function pointer. -AllpassFilter2FixDec16 WebRtcIsacfix_AllpassFilter2FixDec16; - -void WebRtcIsacfix_AllpassFilter2FixDec16C( - int16_t *data_ch1, // Input and output in channel 1, in Q0 - int16_t *data_ch2, // Input and output in channel 2, in Q0 - const int16_t *factor_ch1, // Scaling factor for channel 1, in Q15 - const int16_t *factor_ch2, // Scaling factor for channel 2, in Q15 - const int length, // Length of the data buffers - int32_t *filter_state_ch1, // Filter state for channel 1, in Q16 - int32_t *filter_state_ch2) { // Filter state for channel 2, in Q16 - int n = 0; - int32_t state0_ch1 = filter_state_ch1[0], state1_ch1 = filter_state_ch1[1]; - int32_t state0_ch2 = filter_state_ch2[0], state1_ch2 = filter_state_ch2[1]; - int16_t in_out = 0; - int32_t a = 0, b = 0; - - // Assembly file assumption. - RTC_DCHECK_EQ(0, length % 2); - - for (n = 0; n < length; n++) { - // Process channel 1: - in_out = data_ch1[n]; - a = factor_ch1[0] * in_out; // Q15 * Q0 = Q15 - a *= 1 << 1; // Q15 -> Q16 - b = WebRtcSpl_AddSatW32(a, state0_ch1); - a = -factor_ch1[0] * (int16_t)(b >> 16); // Q15 - state0_ch1 = - WebRtcSpl_AddSatW32(a * (1 << 1), (int32_t)in_out * (1 << 16)); // Q16 - in_out = (int16_t) (b >> 16); // Save as Q0 - - a = factor_ch1[1] * in_out; // Q15 * Q0 = Q15 - a *= 1 << 1; // Q15 -> Q16 - b = WebRtcSpl_AddSatW32(a, state1_ch1); // Q16 - a = -factor_ch1[1] * (int16_t)(b >> 16); // Q15 - state1_ch1 = - WebRtcSpl_AddSatW32(a * (1 << 1), (int32_t)in_out * (1 << 16)); // Q16 - data_ch1[n] = (int16_t) (b >> 16); // Save as Q0 - - // Process channel 2: - in_out = data_ch2[n]; - a = factor_ch2[0] * in_out; // Q15 * Q0 = Q15 - a *= 1 << 1; // Q15 -> Q16 - b = WebRtcSpl_AddSatW32(a, state0_ch2); // Q16 - a = -factor_ch2[0] * (int16_t)(b >> 16); // Q15 - state0_ch2 = - WebRtcSpl_AddSatW32(a * (1 << 1), (int32_t)in_out * (1 << 16)); // Q16 - in_out = (int16_t) (b >> 16); // Save as Q0 - - a = factor_ch2[1] * in_out; // Q15 * Q0 = Q15 - a *= (1 << 1); // Q15 -> Q16 - b = WebRtcSpl_AddSatW32(a, state1_ch2); // Q16 - a = -factor_ch2[1] * (int16_t)(b >> 16); // Q15 - state1_ch2 = - WebRtcSpl_AddSatW32(a * (1 << 1), (int32_t)in_out * (1 << 16)); // Q16 - data_ch2[n] = (int16_t) (b >> 16); // Save as Q0 - } - - filter_state_ch1[0] = state0_ch1; - filter_state_ch1[1] = state1_ch1; - filter_state_ch2[0] = state0_ch2; - filter_state_ch2[1] = state1_ch2; -} - -// Declare a function pointer. -HighpassFilterFixDec32 WebRtcIsacfix_HighpassFilterFixDec32; - -void WebRtcIsacfix_HighpassFilterFixDec32C(int16_t *io, - int16_t len, - const int16_t *coefficient, - int32_t *state) -{ - int k; - int32_t a1 = 0, b1 = 0, c = 0, in = 0; - int32_t a2 = 0, b2 = 0; - int32_t state0 = state[0]; - int32_t state1 = state[1]; - - for (k=0; k Q7 */ - a1 = WEBRTC_SPL_MUL_16_32_RSFT16(coefficient[5], state0) + - (WEBRTC_SPL_MUL_16_32_RSFT16(coefficient[4], state0) >> 16); - b1 = WEBRTC_SPL_MUL_16_32_RSFT16(coefficient[7], state1) + - (WEBRTC_SPL_MUL_16_32_RSFT16(coefficient[6], state1) >> 16); - - /* Q30 * Q4 = Q34 ; shift 32 bit => Q2 */ - a2 = WEBRTC_SPL_MUL_16_32_RSFT16(coefficient[1], state0) + - (WEBRTC_SPL_MUL_16_32_RSFT16(coefficient[0], state0) >> 16); - b2 = WEBRTC_SPL_MUL_16_32_RSFT16(coefficient[3], state1) + - (WEBRTC_SPL_MUL_16_32_RSFT16(coefficient[2], state1) >> 16); -#endif - - c = in + ((a1 + b1) >> 7); // Q0. - io[k] = (int16_t)WebRtcSpl_SatW32ToW16(c); // Write output as Q0. - - c = in * (1 << 2) - a2 - b2; // In Q2. - c = (int32_t)WEBRTC_SPL_SAT(536870911, c, -536870912); - - state1 = state0; - state0 = c * (1 << 2); // Write state as Q4 - } - state[0] = state0; - state[1] = state1; -} - - -void WebRtcIsacfix_SplitAndFilter1(int16_t *pin, - int16_t *LP16, - int16_t *HP16, - PreFiltBankstr *prefiltdata) -{ - /* Function WebRtcIsacfix_SplitAndFilter */ - /* This function creates low-pass and high-pass decimated versions of part of - the input signal, and part of the signal in the input 'lookahead buffer'. */ - - int k; - - int16_t tempin_ch1[FRAMESAMPLES/2 + QLOOKAHEAD]; - int16_t tempin_ch2[FRAMESAMPLES/2 + QLOOKAHEAD]; - int32_t tmpState_ch1[2 * (QORDER-1)]; /* 4 */ - int32_t tmpState_ch2[2 * (QORDER-1)]; /* 4 */ - - /* High pass filter */ - WebRtcIsacfix_HighpassFilterFixDec32(pin, FRAMESAMPLES, WebRtcIsacfix_kHpStCoeffInQ30, prefiltdata->HPstates_fix); - - - /* First Channel */ - for (k=0;kINLABUF1_fix[k]; - prefiltdata->INLABUF1_fix[k] = pin[FRAMESAMPLES + 1 - 2 * (QLOOKAHEAD - k)]; - } - - /* Second Channel. This is exactly like the first channel, except that the - even samples are now filtered instead (lower channel). */ - for (k=0;kINLABUF2_fix[k]; - prefiltdata->INLABUF2_fix[k] = pin[FRAMESAMPLES - 2 * (QLOOKAHEAD - k)]; - } - - - /*obtain polyphase components by forward all-pass filtering through each channel */ - /* The all pass filtering automatically updates the filter states which are exported in the - prefiltdata structure */ - WebRtcIsacfix_AllpassFilter2FixDec16(tempin_ch1, - tempin_ch2, - WebRtcIsacfix_kUpperApFactorsQ15, - WebRtcIsacfix_kLowerApFactorsQ15, - FRAMESAMPLES/2, - prefiltdata->INSTAT1_fix, - prefiltdata->INSTAT2_fix); - - for (k = 0; k < 2 * (QORDER - 1); k++) { - tmpState_ch1[k] = prefiltdata->INSTAT1_fix[k]; - tmpState_ch2[k] = prefiltdata->INSTAT2_fix[k]; - } - WebRtcIsacfix_AllpassFilter2FixDec16(tempin_ch1 + FRAMESAMPLES/2, - tempin_ch2 + FRAMESAMPLES/2, - WebRtcIsacfix_kUpperApFactorsQ15, - WebRtcIsacfix_kLowerApFactorsQ15, - QLOOKAHEAD, - tmpState_ch1, - tmpState_ch2); - - /* Now Construct low-pass and high-pass signals as combinations of polyphase components */ - for (k=0; k Q0 - tmp2 = (int32_t)tempin_ch2[k]; // Q0 -> Q0 - tmp3 = (tmp1 + tmp2) >> 1; /* Low pass signal. */ - LP16[k] = (int16_t)WebRtcSpl_SatW32ToW16(tmp3); /*low pass */ - tmp3 = (tmp1 - tmp2) >> 1; /* High pass signal. */ - HP16[k] = (int16_t)WebRtcSpl_SatW32ToW16(tmp3); /*high pass */ - } - -}/*end of WebRtcIsacfix_SplitAndFilter */ - - - -////////////////////////////////////////////////////////// -////////// Combining -/* Function WebRtcIsacfix_FilterAndCombine */ -/* This is a decoder function that takes the decimated - length FRAMESAMPLES/2 input low-pass and - high-pass signals and creates a reconstructed fullband - output signal of length FRAMESAMPLES. WebRtcIsacfix_FilterAndCombine - is the sibling function of WebRtcIsacfix_SplitAndFilter */ -/* INPUTS: - inLP: a length FRAMESAMPLES/2 array of input low-pass - samples. - inHP: a length FRAMESAMPLES/2 array of input high-pass - samples. - postfiltdata: input data structure containing the filterbank - states from the previous decoding iteration. - OUTPUTS: - Out: a length FRAMESAMPLES array of output reconstructed - samples (fullband) based on the input low-pass and - high-pass signals. - postfiltdata: the input data structure containing the filterbank - states is updated for the next decoding iteration */ -void WebRtcIsacfix_FilterAndCombine1(int16_t *tempin_ch1, - int16_t *tempin_ch2, - int16_t *out16, - PostFiltBankstr *postfiltdata) -{ - int k; - int16_t in[FRAMESAMPLES]; - - /* all-pass filter the new upper and lower channel signal. - For upper channel, use the all-pass filter factors that were used as a - lower channel at the encoding side. So at the decoder, the corresponding - all-pass filter factors for each channel are swapped. - For lower channel signal, since all-pass filter factors at the decoder are - swapped from the ones at the encoder, the 'upper' channel all-pass filter - factors (kUpperApFactors) are used to filter this new lower channel signal. - */ - WebRtcIsacfix_AllpassFilter2FixDec16(tempin_ch1, - tempin_ch2, - WebRtcIsacfix_kLowerApFactorsQ15, - WebRtcIsacfix_kUpperApFactorsQ15, - FRAMESAMPLES/2, - postfiltdata->STATE_0_UPPER_fix, - postfiltdata->STATE_0_LOWER_fix); - - /* Merge outputs to form the full length output signal.*/ - for (k=0;kHPstates1_fix); - WebRtcIsacfix_HighpassFilterFixDec32(in, FRAMESAMPLES, WebRtcIsacfix_kHPStCoeffOut2Q30, postfiltdata->HPstates2_fix); - - for (k=0;k - -#include "modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h" -#include "rtc_base/checks.h" - -void WebRtcIsacfix_AllpassFilter2FixDec16Neon( - int16_t* data_ch1, // Input and output in channel 1, in Q0 - int16_t* data_ch2, // Input and output in channel 2, in Q0 - const int16_t* factor_ch1, // Scaling factor for channel 1, in Q15 - const int16_t* factor_ch2, // Scaling factor for channel 2, in Q15 - const int length, // Length of the data buffers - int32_t* filter_state_ch1, // Filter state for channel 1, in Q16 - int32_t* filter_state_ch2) { // Filter state for channel 2, in Q16 - RTC_DCHECK_EQ(0, length % 2); - int n = 0; - int16x4_t factorv; - int16x4_t datav; - int32x4_t statev; - - // Load factor_ch1 and factor_ch2. - factorv = vld1_dup_s16(factor_ch1); - factorv = vld1_lane_s16(factor_ch1 + 1, factorv, 1); - factorv = vld1_lane_s16(factor_ch2, factorv, 2); - factorv = vld1_lane_s16(factor_ch2 + 1, factorv, 3); - - // Load filter_state_ch1[0] and filter_state_ch2[0]. - statev = vld1q_dup_s32(filter_state_ch1); - statev = vld1q_lane_s32(filter_state_ch2, statev, 2); - - // Loop unrolling preprocessing. - int32x4_t a; - int16x4_t tmp1, tmp2; - - // Load data_ch1[0] and data_ch2[0]. - datav = vld1_dup_s16(data_ch1); - datav = vld1_lane_s16(data_ch2, datav, 2); - - a = vqdmlal_s16(statev, datav, factorv); - tmp1 = vshrn_n_s32(a, 16); - - // Update filter_state_ch1[0] and filter_state_ch2[0]. - statev = vqdmlsl_s16(vshll_n_s16(datav, 16), tmp1, factorv); - - // Load filter_state_ch1[1] and filter_state_ch2[1]. - statev = vld1q_lane_s32(filter_state_ch1 + 1, statev, 1); - statev = vld1q_lane_s32(filter_state_ch2 + 1, statev, 3); - - // Load data_ch1[1] and data_ch2[1]. - tmp1 = vld1_lane_s16(data_ch1 + 1, tmp1, 1); - tmp1 = vld1_lane_s16(data_ch2 + 1, tmp1, 3); - datav = vrev32_s16(tmp1); - - // Loop unrolling processing. - for (n = 0; n < length - 2; n += 2) { - a = vqdmlal_s16(statev, datav, factorv); - tmp1 = vshrn_n_s32(a, 16); - // Store data_ch1[n] and data_ch2[n]. - vst1_lane_s16(data_ch1 + n, tmp1, 1); - vst1_lane_s16(data_ch2 + n, tmp1, 3); - - // Update filter_state_ch1[0], filter_state_ch1[1] - // and filter_state_ch2[0], filter_state_ch2[1]. - statev = vqdmlsl_s16(vshll_n_s16(datav, 16), tmp1, factorv); - - // Load data_ch1[n + 2] and data_ch2[n + 2]. - tmp1 = vld1_lane_s16(data_ch1 + n + 2, tmp1, 1); - tmp1 = vld1_lane_s16(data_ch2 + n + 2, tmp1, 3); - datav = vrev32_s16(tmp1); - - a = vqdmlal_s16(statev, datav, factorv); - tmp2 = vshrn_n_s32(a, 16); - // Store data_ch1[n + 1] and data_ch2[n + 1]. - vst1_lane_s16(data_ch1 + n + 1, tmp2, 1); - vst1_lane_s16(data_ch2 + n + 1, tmp2, 3); - - // Update filter_state_ch1[0], filter_state_ch1[1] - // and filter_state_ch2[0], filter_state_ch2[1]. - statev = vqdmlsl_s16(vshll_n_s16(datav, 16), tmp2, factorv); - - // Load data_ch1[n + 3] and data_ch2[n + 3]. - tmp2 = vld1_lane_s16(data_ch1 + n + 3, tmp2, 1); - tmp2 = vld1_lane_s16(data_ch2 + n + 3, tmp2, 3); - datav = vrev32_s16(tmp2); - } - - // Loop unrolling post-processing. - a = vqdmlal_s16(statev, datav, factorv); - tmp1 = vshrn_n_s32(a, 16); - // Store data_ch1[n] and data_ch2[n]. - vst1_lane_s16(data_ch1 + n, tmp1, 1); - vst1_lane_s16(data_ch2 + n, tmp1, 3); - - // Update filter_state_ch1[0], filter_state_ch1[1] - // and filter_state_ch2[0], filter_state_ch2[1]. - statev = vqdmlsl_s16(vshll_n_s16(datav, 16), tmp1, factorv); - // Store filter_state_ch1[0] and filter_state_ch2[0]. - vst1q_lane_s32(filter_state_ch1, statev, 0); - vst1q_lane_s32(filter_state_ch2, statev, 2); - - datav = vrev32_s16(tmp1); - a = vqdmlal_s16(statev, datav, factorv); - tmp2 = vshrn_n_s32(a, 16); - // Store data_ch1[n + 1] and data_ch2[n + 1]. - vst1_lane_s16(data_ch1 + n + 1, tmp2, 1); - vst1_lane_s16(data_ch2 + n + 1, tmp2, 3); - - // Update filter_state_ch1[1] and filter_state_ch2[1]. - statev = vqdmlsl_s16(vshll_n_s16(datav, 16), tmp2, factorv); - // Store filter_state_ch1[1] and filter_state_ch2[1]. - vst1q_lane_s32(filter_state_ch1 + 1, statev, 1); - vst1q_lane_s32(filter_state_ch2 + 1, statev, 3); -} - -// This function is the prototype for above neon optimized function. -//void AllpassFilter2FixDec16BothChannels( -// int16_t *data_ch1, // Input and output in channel 1, in Q0 -// int16_t *data_ch2, // Input and output in channel 2, in Q0 -// const int16_t *factor_ch1, // Scaling factor for channel 1, in Q15 -// const int16_t *factor_ch2, // Scaling factor for channel 2, in Q15 -// const int length, // Length of the data buffers -// int32_t *filter_state_ch1, // Filter state for channel 1, in Q16 -// int32_t *filter_state_ch2) { // Filter state for channel 2, in Q16 -// int n = 0; -// int32_t state0_ch1 = filter_state_ch1[0], state1_ch1 = filter_state_ch1[1]; -// int32_t state0_ch2 = filter_state_ch2[0], state1_ch2 = filter_state_ch2[1]; -// int16_t sample0_ch1 = 0, sample0_ch2 = 0; -// int16_t sample1_ch1 = 0, sample1_ch2 = 0; -// int32_t a0_ch1 = 0, a0_ch2 = 0; -// int32_t b0_ch1 = 0, b0_ch2 = 0; -// -// int32_t a1_ch1 = 0, a1_ch2 = 0; -// int32_t b1_ch1 = 0, b1_ch2 = 0; -// int32_t b2_ch1 = 0, b2_ch2 = 0; -// -// // Loop unrolling preprocessing. -// -// sample0_ch1 = data_ch1[n]; -// sample0_ch2 = data_ch2[n]; -// -// a0_ch1 = (factor_ch1[0] * sample0_ch1) << 1; -// a0_ch2 = (factor_ch2[0] * sample0_ch2) << 1; -// -// b0_ch1 = WebRtcSpl_AddSatW32(a0_ch1, state0_ch1); -// b0_ch2 = WebRtcSpl_AddSatW32(a0_ch2, state0_ch2); //Q16+Q16=Q16 -// -// a0_ch1 = -factor_ch1[0] * (int16_t)(b0_ch1 >> 16); -// a0_ch2 = -factor_ch2[0] * (int16_t)(b0_ch2 >> 16); -// -// state0_ch1 = WebRtcSpl_AddSatW32(a0_ch1 <<1, (uint32_t)sample0_ch1 << 16); -// state0_ch2 = WebRtcSpl_AddSatW32(a0_ch2 <<1, (uint32_t)sample0_ch2 << 16); -// -// sample1_ch1 = data_ch1[n + 1]; -// sample0_ch1 = (int16_t) (b0_ch1 >> 16); //Save as Q0 -// sample1_ch2 = data_ch2[n + 1]; -// sample0_ch2 = (int16_t) (b0_ch2 >> 16); //Save as Q0 -// -// -// for (n = 0; n < length - 2; n += 2) { -// a1_ch1 = (factor_ch1[0] * sample1_ch1) << 1; -// a0_ch1 = (factor_ch1[1] * sample0_ch1) << 1; -// a1_ch2 = (factor_ch2[0] * sample1_ch2) << 1; -// a0_ch2 = (factor_ch2[1] * sample0_ch2) << 1; -// -// b1_ch1 = WebRtcSpl_AddSatW32(a1_ch1, state0_ch1); -// b0_ch1 = WebRtcSpl_AddSatW32(a0_ch1, state1_ch1); //Q16+Q16=Q16 -// b1_ch2 = WebRtcSpl_AddSatW32(a1_ch2, state0_ch2); //Q16+Q16=Q16 -// b0_ch2 = WebRtcSpl_AddSatW32(a0_ch2, state1_ch2); //Q16+Q16=Q16 -// -// a1_ch1 = -factor_ch1[0] * (int16_t)(b1_ch1 >> 16); -// a0_ch1 = -factor_ch1[1] * (int16_t)(b0_ch1 >> 16); -// a1_ch2 = -factor_ch2[0] * (int16_t)(b1_ch2 >> 16); -// a0_ch2 = -factor_ch2[1] * (int16_t)(b0_ch2 >> 16); -// -// state0_ch1 = WebRtcSpl_AddSatW32(a1_ch1<<1, (uint32_t)sample1_ch1 <<16); -// state1_ch1 = WebRtcSpl_AddSatW32(a0_ch1<<1, (uint32_t)sample0_ch1 <<16); -// state0_ch2 = WebRtcSpl_AddSatW32(a1_ch2<<1, (uint32_t)sample1_ch2 <<16); -// state1_ch2 = WebRtcSpl_AddSatW32(a0_ch2<<1, (uint32_t)sample0_ch2 <<16); -// -// sample0_ch1 = data_ch1[n + 2]; -// sample1_ch1 = (int16_t) (b1_ch1 >> 16); //Save as Q0 -// sample0_ch2 = data_ch2[n + 2]; -// sample1_ch2 = (int16_t) (b1_ch2 >> 16); //Save as Q0 -// -// a0_ch1 = (factor_ch1[0] * sample0_ch1) << 1; -// a1_ch1 = (factor_ch1[1] * sample1_ch1) << 1; -// a0_ch2 = (factor_ch2[0] * sample0_ch2) << 1; -// a1_ch2 = (factor_ch2[1] * sample1_ch2) << 1; -// -// b2_ch1 = WebRtcSpl_AddSatW32(a0_ch1, state0_ch1); -// b1_ch1 = WebRtcSpl_AddSatW32(a1_ch1, state1_ch1); //Q16+Q16=Q16 -// b2_ch2 = WebRtcSpl_AddSatW32(a0_ch2, state0_ch2); //Q16+Q16=Q16 -// b1_ch2 = WebRtcSpl_AddSatW32(a1_ch2, state1_ch2); //Q16+Q16=Q16 -// -// a0_ch1 = -factor_ch1[0] * (int16_t)(b2_ch1 >> 16); -// a1_ch1 = -factor_ch1[1] * (int16_t)(b1_ch1 >> 16); -// a0_ch2 = -factor_ch2[0] * (int16_t)(b2_ch2 >> 16); -// a1_ch2 = -factor_ch2[1] * (int16_t)(b1_ch2 >> 16); -// -// state0_ch1 = WebRtcSpl_AddSatW32(a0_ch1<<1, (uint32_t)sample0_ch1<<16); -// state1_ch1 = WebRtcSpl_AddSatW32(a1_ch1<<1, (uint32_t)sample1_ch1<<16); -// state0_ch2 = WebRtcSpl_AddSatW32(a0_ch2<<1, (uint32_t)sample0_ch2<<16); -// state1_ch2 = WebRtcSpl_AddSatW32(a1_ch2<<1, (uint32_t)sample1_ch2<<16); -// -// -// sample1_ch1 = data_ch1[n + 3]; -// sample0_ch1 = (int16_t) (b2_ch1 >> 16); //Save as Q0 -// sample1_ch2 = data_ch2[n + 3]; -// sample0_ch2 = (int16_t) (b2_ch2 >> 16); //Save as Q0 -// -// data_ch1[n] = (int16_t) (b0_ch1 >> 16); //Save as Q0 -// data_ch1[n + 1] = (int16_t) (b1_ch1 >> 16); //Save as Q0 -// data_ch2[n] = (int16_t) (b0_ch2 >> 16); -// data_ch2[n + 1] = (int16_t) (b1_ch2 >> 16); -// } -// -// // Loop unrolling post-processing. -// -// a1_ch1 = (factor_ch1[0] * sample1_ch1) << 1; -// a0_ch1 = (factor_ch1[1] * sample0_ch1) << 1; -// a1_ch2 = (factor_ch2[0] * sample1_ch2) << 1; -// a0_ch2 = (factor_ch2[1] * sample0_ch2) << 1; -// -// b1_ch1 = WebRtcSpl_AddSatW32(a1_ch1, state0_ch1); -// b0_ch1 = WebRtcSpl_AddSatW32(a0_ch1, state1_ch1); -// b1_ch2 = WebRtcSpl_AddSatW32(a1_ch2, state0_ch2); -// b0_ch2 = WebRtcSpl_AddSatW32(a0_ch2, state1_ch2); -// -// a1_ch1 = -factor_ch1[0] * (int16_t)(b1_ch1 >> 16); -// a0_ch1 = -factor_ch1[1] * (int16_t)(b0_ch1 >> 16); -// a1_ch2 = -factor_ch2[0] * (int16_t)(b1_ch2 >> 16); -// a0_ch2 = -factor_ch2[1] * (int16_t)(b0_ch2 >> 16); -// -// state0_ch1 = WebRtcSpl_AddSatW32(a1_ch1<<1, (uint32_t)sample1_ch1 << 16); -// state1_ch1 = WebRtcSpl_AddSatW32(a0_ch1<<1, (uint32_t)sample0_ch1 << 16); -// state0_ch2 = WebRtcSpl_AddSatW32(a1_ch2<<1, (uint32_t)sample1_ch2 << 16); -// state1_ch2 = WebRtcSpl_AddSatW32(a0_ch2<<1, (uint32_t)sample0_ch2 << 16); -// -// data_ch1[n] = (int16_t) (b0_ch1 >> 16); //Save as Q0 -// data_ch2[n] = (int16_t) (b0_ch2 >> 16); -// -// sample1_ch1 = (int16_t) (b1_ch1 >> 16); //Save as Q0 -// sample1_ch2 = (int16_t) (b1_ch2 >> 16); //Save as Q0 -// -// a1_ch1 = (factor_ch1[1] * sample1_ch1) << 1; -// a1_ch2 = (factor_ch2[1] * sample1_ch2) << 1; -// -// b1_ch1 = WebRtcSpl_AddSatW32(a1_ch1, state1_ch1); //Q16+Q16=Q16 -// b1_ch2 = WebRtcSpl_AddSatW32(a1_ch2, state1_ch2); //Q16+Q16=Q16 -// -// a1_ch1 = -factor_ch1[1] * (int16_t)(b1_ch1 >> 16); -// a1_ch2 = -factor_ch2[1] * (int16_t)(b1_ch2 >> 16); -// -// state1_ch1 = WebRtcSpl_AddSatW32(a1_ch1<<1, (uint32_t)sample1_ch1<<16); -// state1_ch2 = WebRtcSpl_AddSatW32(a1_ch2<<1, (uint32_t)sample1_ch2<<16); -// -// data_ch1[n + 1] = (int16_t) (b1_ch1 >> 16); //Save as Q0 -// data_ch2[n + 1] = (int16_t) (b1_ch2 >> 16); -// -// filter_state_ch1[0] = state0_ch1; -// filter_state_ch1[1] = state1_ch1; -// filter_state_ch2[0] = state0_ch2; -// filter_state_ch2[1] = state1_ch2; -//} diff --git a/modules/audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc b/modules/audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc deleted file mode 100644 index 4a3db2324a..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h" -#include "modules/audio_coding/codecs/isac/fix/source/filterbank_tables.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "rtc_base/sanitizer.h" -#include "system_wrappers/include/cpu_features_wrapper.h" -#include "test/gtest.h" - -class FilterBanksTest : public ::testing::Test { - protected: - // Pass a function pointer to the Tester function. - void RTC_NO_SANITIZE("signed-integer-overflow") // bugs.webrtc.org/5513 - CalculateResidualEnergyTester( - AllpassFilter2FixDec16 AllpassFilter2FixDec16Function) { - const int kSamples = QLOOKAHEAD; - const int kState = 2; - int16_t data_ch1[kSamples] = {0}; - int16_t data_ch2[kSamples] = {0}; - int32_t state_ch1[kState] = {0}; - int32_t state_ch2[kState] = {0}; - const int32_t out_state_ch1[kState] = {-809122714, 1645972152}; - const int32_t out_state_ch2[kState] = {428019288, 1057309936}; - const int32_t out_data_ch1[kSamples] = { - 0, 0, 347, 10618, 16718, -7089, 32767, 16913, - 27042, 8377, -22973, -28372, -27603, -14804, 398, -25332, - -11200, 18044, 25223, -6839, 1116, -23984, 32717, 7364}; - const int32_t out_data_ch2[kSamples] = { - 0, 0, 3010, 22351, 21106, 16969, -2095, -664, - 3513, -30980, 32767, -23839, 13335, 20289, -6831, 339, - -17207, 32767, 4959, 6177, 32767, 16599, -4747, 20504}; - int sign = 1; - - for (int i = 0; i < kSamples; i++) { - sign *= -1; - data_ch1[i] = sign * WEBRTC_SPL_WORD32_MAX / (i * i + 1); - data_ch2[i] = sign * WEBRTC_SPL_WORD32_MIN / (i * i + 1); - // UBSan: -1 * -2147483648 cannot be represented in type 'int' - }; - - AllpassFilter2FixDec16Function( - data_ch1, data_ch2, WebRtcIsacfix_kUpperApFactorsQ15, - WebRtcIsacfix_kLowerApFactorsQ15, kSamples, state_ch1, state_ch2); - - for (int i = 0; i < kSamples; i++) { - EXPECT_EQ(out_data_ch1[i], data_ch1[i]); - EXPECT_EQ(out_data_ch2[i], data_ch2[i]); - } - for (int i = 0; i < kState; i++) { - EXPECT_EQ(out_state_ch1[i], state_ch1[i]); - EXPECT_EQ(out_state_ch2[i], state_ch2[i]); - } - } -}; - -TEST_F(FilterBanksTest, AllpassFilter2FixDec16Test) { - CalculateResidualEnergyTester(WebRtcIsacfix_AllpassFilter2FixDec16C); -#if defined(WEBRTC_HAS_NEON) - CalculateResidualEnergyTester(WebRtcIsacfix_AllpassFilter2FixDec16Neon); -#endif -} - -TEST_F(FilterBanksTest, HighpassFilterFixDec32Test) { - const int kSamples = 20; - int16_t in[kSamples]; - int32_t state[2] = {12345, 987654}; -#ifdef WEBRTC_ARCH_ARM_V7 - int32_t out[kSamples] = {-1040, -1035, -22875, -1397, -27604, 20018, 7917, - -1279, -8552, -14494, -7558, -23537, -27258, -30554, - -32768, -3432, -32768, 25215, -27536, 22436}; -#else - int32_t out[kSamples] = {-1040, -1035, -22875, -1397, -27604, 20017, 7915, - -1280, -8554, -14496, -7561, -23541, -27263, -30560, - -32768, -3441, -32768, 25203, -27550, 22419}; -#endif - HighpassFilterFixDec32 WebRtcIsacfix_HighpassFilterFixDec32; -#if defined(MIPS_DSP_R1_LE) - WebRtcIsacfix_HighpassFilterFixDec32 = - WebRtcIsacfix_HighpassFilterFixDec32MIPS; -#else - WebRtcIsacfix_HighpassFilterFixDec32 = WebRtcIsacfix_HighpassFilterFixDec32C; -#endif - - for (int i = 0; i < kSamples; i++) { - in[i] = WEBRTC_SPL_WORD32_MAX / (i + 1); - } - - WebRtcIsacfix_HighpassFilterFixDec32(in, kSamples, - WebRtcIsacfix_kHPStCoeffOut1Q30, state); - - for (int i = 0; i < kSamples; i++) { - EXPECT_EQ(out[i], in[i]); - } -} diff --git a/modules/audio_coding/codecs/isac/fix/source/filters.c b/modules/audio_coding/codecs/isac/fix/source/filters.c deleted file mode 100644 index 838ba4b3e8..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/filters.c +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/checks.h" -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" - -// Autocorrelation function in fixed point. -// NOTE! Different from SPLIB-version in how it scales the signal. -int WebRtcIsacfix_AutocorrC(int32_t* __restrict r, - const int16_t* __restrict x, - int16_t N, - int16_t order, - int16_t* __restrict scale) { - int i = 0; - int j = 0; - int16_t scaling = 0; - int32_t sum = 0; - uint32_t temp = 0; - int64_t prod = 0; - - // The ARM assembly code assumptoins. - RTC_DCHECK_EQ(0, N % 4); - RTC_DCHECK_GE(N, 8); - - // Calculate r[0]. - for (i = 0; i < N; i++) { - prod += x[i] * x[i]; - } - - // Calculate scaling (the value of shifting). - temp = (uint32_t)(prod >> 31); - if(temp == 0) { - scaling = 0; - } else { - scaling = 32 - WebRtcSpl_NormU32(temp); - } - r[0] = (int32_t)(prod >> scaling); - - // Perform the actual correlation calculation. - for (i = 1; i < order + 1; i++) { - prod = 0; - for (j = 0; j < N - i; j++) { - prod += x[j] * x[i + j]; - } - sum = (int32_t)(prod >> scaling); - r[i] = sum; - } - - *scale = scaling; - - return(order + 1); -} - -static const int32_t kApUpperQ15[ALLPASSSECTIONS] = { 1137, 12537 }; -static const int32_t kApLowerQ15[ALLPASSSECTIONS] = { 5059, 24379 }; - - -static void AllpassFilterForDec32(int16_t *InOut16, //Q0 - const int32_t *APSectionFactors, //Q15 - int16_t lengthInOut, - int32_t *FilterState) //Q16 -{ - int n, j; - int32_t a, b; - - for (j=0; j Q16 - b = WebRtcSpl_AddSatW32(a, FilterState[j]); //Q16+Q16=Q16 - // `a` in Q15 (Q0*Q31=Q31 shifted 16 gives Q15). - a = WEBRTC_SPL_MUL_16_32_RSFT16(b >> 16, -APSectionFactors[j]); - // FilterState[j]: Q15<<1 + Q0<<16 = Q16 + Q16 = Q16 - FilterState[j] = WebRtcSpl_AddSatW32(a << 1, (uint32_t)InOut16[n] << 16); - InOut16[n] = (int16_t)(b >> 16); // Save as Q0. - } - } -} - - - - -void WebRtcIsacfix_DecimateAllpass32(const int16_t *in, - int32_t *state_in, /* array of size: 2*ALLPASSSECTIONS+1 */ - int16_t N, /* number of input samples */ - int16_t *out) /* array of size N/2 */ -{ - int n; - int16_t data_vec[PITCH_FRAME_LEN]; - - /* copy input */ - memcpy(data_vec + 1, in, sizeof(int16_t) * (N - 1)); - - data_vec[0] = (int16_t)(state_in[2 * ALLPASSSECTIONS] >> 16); // z^-1 state. - state_in[2 * ALLPASSSECTIONS] = (uint32_t)in[N - 1] << 16; - - - - AllpassFilterForDec32(data_vec+1, kApUpperQ15, N, state_in); - AllpassFilterForDec32(data_vec, kApLowerQ15, N, state_in+ALLPASSSECTIONS); - - for (n=0;n> 3); - int count = (int)(N & 7); - // Declare temporary variables used as registry values. - int32_t r0, r1, r2, r3; -#if !defined(MIPS_DSP_R2_LE) - // For non-DSPR2 optimizations 4 more registers are used. - int32_t r4, r5, r6, r7; -#endif - - // Calculate r[0] and scaling needed. - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "mult $0, $0 \n\t" - // Loop is unrolled 8 times, set accumulator to zero in branch delay slot. - "beqz %[loop_size], 2f \n\t" - " mult $0, $0 \n\t" - "1: \n\t" - // Load 8 samples per loop iteration. -#if defined(MIPS_DSP_R2_LE) - "ulw %[r0], 0(%[in]) \n\t" - "ulw %[r1], 4(%[in]) \n\t" - "ulw %[r2], 8(%[in]) \n\t" - "ulw %[r3], 12(%[in]) \n\t" -#else - "lh %[r0], 0(%[in]) \n\t" - "lh %[r1], 2(%[in]) \n\t" - "lh %[r2], 4(%[in]) \n\t" - "lh %[r3], 6(%[in]) \n\t" - "lh %[r4], 8(%[in]) \n\t" - "lh %[r5], 10(%[in]) \n\t" - "lh %[r6], 12(%[in]) \n\t" - "lh %[r7], 14(%[in]) \n\t" -#endif - "addiu %[loop_size], %[loop_size], -1 \n\t" - // Multiply and accumulate. -#if defined(MIPS_DSP_R2_LE) - "dpa.w.ph $ac0, %[r0], %[r0] \n\t" - "dpa.w.ph $ac0, %[r1], %[r1] \n\t" - "dpa.w.ph $ac0, %[r2], %[r2] \n\t" - "dpa.w.ph $ac0, %[r3], %[r3] \n\t" -#else - "madd %[r0], %[r0] \n\t" - "madd %[r1], %[r1] \n\t" - "madd %[r2], %[r2] \n\t" - "madd %[r3], %[r3] \n\t" - "madd %[r4], %[r4] \n\t" - "madd %[r5], %[r5] \n\t" - "madd %[r6], %[r6] \n\t" - "madd %[r7], %[r7] \n\t" -#endif - "bnez %[loop_size], 1b \n\t" - " addiu %[in], %[in], 16 \n\t" - "2: \n\t" - "beqz %[count], 4f \n\t" -#if defined(MIPS_DSP_R1_LE) - " extr.w %[r0], $ac0, 31 \n\t" -#else - " mfhi %[r2] \n\t" -#endif - // Process remaining samples (if any). - "3: \n\t" - "lh %[r0], 0(%[in]) \n\t" - "addiu %[count], %[count], -1 \n\t" - "madd %[r0], %[r0] \n\t" - "bnez %[count], 3b \n\t" - " addiu %[in], %[in], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "extr.w %[r0], $ac0, 31 \n\t" -#else - "mfhi %[r2] \n\t" -#endif - "4: \n\t" -#if !defined(MIPS_DSP_R1_LE) - "mflo %[r3] \n\t" - "sll %[r0], %[r2], 1 \n\t" - "srl %[r1], %[r3], 31 \n\t" - "addu %[r0], %[r0], %[r1] \n\t" -#endif - // Calculate scaling (the value of shifting). - "clz %[r1], %[r0] \n\t" - "addiu %[r1], %[r1], -32 \n\t" - "subu %[scaling], $0, %[r1] \n\t" - "slti %[r1], %[r0], 0x1 \n\t" - "movn %[scaling], $0, %[r1] \n\t" -#if defined(MIPS_DSP_R1_LE) - "extrv.w %[r0], $ac0, %[scaling] \n\t" - "mfhi %[r2], $ac0 \n\t" -#else - "addiu %[r1], %[scaling], -32 \n\t" - "subu %[r1], $0, %[r1] \n\t" - "sllv %[r1], %[r2], %[r1] \n\t" - "srlv %[r0], %[r3], %[scaling] \n\t" - "addu %[r0], %[r0], %[r1] \n\t" -#endif - "slti %[r1], %[scaling], 32 \n\t" - "movz %[r0], %[r2], %[r1] \n\t" - ".set pop \n\t" - : [loop_size] "+r" (loop_size), [in] "+r" (in), [r0] "=&r" (r0), - [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3), -#if !defined(MIPS_DSP_R2_LE) - [r4] "=&r" (r4), [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7), -#endif - [count] "+r" (count), [scaling] "=r" (scaling) - : [N] "r" (N) - : "memory", "hi", "lo" - ); - r[0] = r0; - - // Correlation calculation is divided in 3 cases depending on the scaling - // value (different accumulator manipulation needed). Three slightly different - // loops are written in order to avoid branches inside the loop. - if (scaling == 0) { - // In this case, the result will be in low part of the accumulator. - for (i = 1; i < order + 1; i++) { - in = (int16_t*)x; - int16_t* in1 = (int16_t*)x + i; - count = N - i; - loop_size = (count) >> 2; - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "mult $0, $0 \n\t" - "beqz %[loop_size], 2f \n\t" - " andi %[count], %[count], 0x3 \n\t" - // Loop processing 4 pairs of samples per iteration. - "1: \n\t" -#if defined(MIPS_DSP_R2_LE) - "ulw %[r0], 0(%[in]) \n\t" - "ulw %[r1], 0(%[in1]) \n\t" - "ulw %[r2], 4(%[in]) \n\t" - "ulw %[r3], 4(%[in1]) \n\t" -#else - "lh %[r0], 0(%[in]) \n\t" - "lh %[r1], 0(%[in1]) \n\t" - "lh %[r2], 2(%[in]) \n\t" - "lh %[r3], 2(%[in1]) \n\t" - "lh %[r4], 4(%[in]) \n\t" - "lh %[r5], 4(%[in1]) \n\t" - "lh %[r6], 6(%[in]) \n\t" - "lh %[r7], 6(%[in1]) \n\t" -#endif - "addiu %[loop_size], %[loop_size], -1 \n\t" -#if defined(MIPS_DSP_R2_LE) - "dpa.w.ph $ac0, %[r0], %[r1] \n\t" - "dpa.w.ph $ac0, %[r2], %[r3] \n\t" -#else - "madd %[r0], %[r1] \n\t" - "madd %[r2], %[r3] \n\t" - "madd %[r4], %[r5] \n\t" - "madd %[r6], %[r7] \n\t" -#endif - "addiu %[in], %[in], 8 \n\t" - "bnez %[loop_size], 1b \n\t" - " addiu %[in1], %[in1], 8 \n\t" - "2: \n\t" - "beqz %[count], 4f \n\t" - " mflo %[r0] \n\t" - // Process remaining samples (if any). - "3: \n\t" - "lh %[r0], 0(%[in]) \n\t" - "lh %[r1], 0(%[in1]) \n\t" - "addiu %[count], %[count], -1 \n\t" - "addiu %[in], %[in], 2 \n\t" - "madd %[r0], %[r1] \n\t" - "bnez %[count], 3b \n\t" - " addiu %[in1], %[in1], 2 \n\t" - "mflo %[r0] \n\t" - "4: \n\t" - ".set pop \n\t" - : [loop_size] "+r" (loop_size), [in] "+r" (in), [in1] "+r" (in1), -#if !defined(MIPS_DSP_R2_LE) - [r4] "=&r" (r4), [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7), -#endif - [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3), - [count] "+r" (count) - : - : "memory", "hi", "lo" - ); - r[i] = r0; - } - } else if (scaling == 32) { - // In this case, the result will be high part of the accumulator. - for (i = 1; i < order + 1; i++) { - in = (int16_t*)x; - int16_t* in1 = (int16_t*)x + i; - count = N - i; - loop_size = (count) >> 2; - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "mult $0, $0 \n\t" - "beqz %[loop_size], 2f \n\t" - " andi %[count], %[count], 0x3 \n\t" - // Loop processing 4 pairs of samples per iteration. - "1: \n\t" -#if defined(MIPS_DSP_R2_LE) - "ulw %[r0], 0(%[in]) \n\t" - "ulw %[r1], 0(%[in1]) \n\t" - "ulw %[r2], 4(%[in]) \n\t" - "ulw %[r3], 4(%[in1]) \n\t" -#else - "lh %[r0], 0(%[in]) \n\t" - "lh %[r1], 0(%[in1]) \n\t" - "lh %[r2], 2(%[in]) \n\t" - "lh %[r3], 2(%[in1]) \n\t" - "lh %[r4], 4(%[in]) \n\t" - "lh %[r5], 4(%[in1]) \n\t" - "lh %[r6], 6(%[in]) \n\t" - "lh %[r7], 6(%[in1]) \n\t" -#endif - "addiu %[loop_size], %[loop_size], -1 \n\t" -#if defined(MIPS_DSP_R2_LE) - "dpa.w.ph $ac0, %[r0], %[r1] \n\t" - "dpa.w.ph $ac0, %[r2], %[r3] \n\t" -#else - "madd %[r0], %[r1] \n\t" - "madd %[r2], %[r3] \n\t" - "madd %[r4], %[r5] \n\t" - "madd %[r6], %[r7] \n\t" -#endif - "addiu %[in], %[in], 8 \n\t" - "bnez %[loop_size], 1b \n\t" - " addiu %[in1], %[in1], 8 \n\t" - "2: \n\t" - "beqz %[count], 4f \n\t" - " mfhi %[r0] \n\t" - // Process remaining samples (if any). - "3: \n\t" - "lh %[r0], 0(%[in]) \n\t" - "lh %[r1], 0(%[in1]) \n\t" - "addiu %[count], %[count], -1 \n\t" - "addiu %[in], %[in], 2 \n\t" - "madd %[r0], %[r1] \n\t" - "bnez %[count], 3b \n\t" - " addiu %[in1], %[in1], 2 \n\t" - "mfhi %[r0] \n\t" - "4: \n\t" - ".set pop \n\t" - : [loop_size] "+r" (loop_size), [in] "+r" (in), [in1] "+r" (in1), -#if !defined(MIPS_DSP_R2_LE) - [r4] "=&r" (r4), [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7), -#endif - [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3), - [count] "+r" (count) - : - : "memory", "hi", "lo" - ); - r[i] = r0; - } - } else { - // In this case, the result is obtained by combining low and high parts - // of the accumulator. -#if !defined(MIPS_DSP_R1_LE) - int32_t tmp_shift = 32 - scaling; -#endif - for (i = 1; i < order + 1; i++) { - in = (int16_t*)x; - int16_t* in1 = (int16_t*)x + i; - count = N - i; - loop_size = (count) >> 2; - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "mult $0, $0 \n\t" - "beqz %[loop_size], 2f \n\t" - " andi %[count], %[count], 0x3 \n\t" - "1: \n\t" -#if defined(MIPS_DSP_R2_LE) - "ulw %[r0], 0(%[in]) \n\t" - "ulw %[r1], 0(%[in1]) \n\t" - "ulw %[r2], 4(%[in]) \n\t" - "ulw %[r3], 4(%[in1]) \n\t" -#else - "lh %[r0], 0(%[in]) \n\t" - "lh %[r1], 0(%[in1]) \n\t" - "lh %[r2], 2(%[in]) \n\t" - "lh %[r3], 2(%[in1]) \n\t" - "lh %[r4], 4(%[in]) \n\t" - "lh %[r5], 4(%[in1]) \n\t" - "lh %[r6], 6(%[in]) \n\t" - "lh %[r7], 6(%[in1]) \n\t" -#endif - "addiu %[loop_size], %[loop_size], -1 \n\t" -#if defined(MIPS_DSP_R2_LE) - "dpa.w.ph $ac0, %[r0], %[r1] \n\t" - "dpa.w.ph $ac0, %[r2], %[r3] \n\t" -#else - "madd %[r0], %[r1] \n\t" - "madd %[r2], %[r3] \n\t" - "madd %[r4], %[r5] \n\t" - "madd %[r6], %[r7] \n\t" -#endif - "addiu %[in], %[in], 8 \n\t" - "bnez %[loop_size], 1b \n\t" - " addiu %[in1], %[in1], 8 \n\t" - "2: \n\t" - "beqz %[count], 4f \n\t" -#if defined(MIPS_DSP_R1_LE) - " extrv.w %[r0], $ac0, %[scaling] \n\t" -#else - " mfhi %[r0] \n\t" -#endif - "3: \n\t" - "lh %[r0], 0(%[in]) \n\t" - "lh %[r1], 0(%[in1]) \n\t" - "addiu %[count], %[count], -1 \n\t" - "addiu %[in], %[in], 2 \n\t" - "madd %[r0], %[r1] \n\t" - "bnez %[count], 3b \n\t" - " addiu %[in1], %[in1], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "extrv.w %[r0], $ac0, %[scaling] \n\t" -#else - "mfhi %[r0] \n\t" -#endif - "4: \n\t" -#if !defined(MIPS_DSP_R1_LE) - "mflo %[r1] \n\t" - "sllv %[r0], %[r0], %[tmp_shift] \n\t" - "srlv %[r1], %[r1], %[scaling] \n\t" - "addu %[r0], %[r0], %[r1] \n\t" -#endif - ".set pop \n\t" - : [loop_size] "+r" (loop_size), [in] "+r" (in), [in1] "+r" (in1), -#if !defined(MIPS_DSP_R2_LE) - [r4] "=&r" (r4), [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7), -#endif - [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3), - [count] "+r" (count) - : [scaling] "r" (scaling) -#if !defined(MIPS_DSP_R1_LE) - , [tmp_shift] "r" (tmp_shift) -#endif - : "memory", "hi", "lo" - ); - r[i] = r0; - } - } - *scale = scaling; - - return (order + 1); -} diff --git a/modules/audio_coding/codecs/isac/fix/source/filters_neon.c b/modules/audio_coding/codecs/isac/fix/source/filters_neon.c deleted file mode 100644 index 1734a969cb..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/filters_neon.c +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "rtc_base/checks.h" -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" - -// Autocorrelation function in fixed point. -// NOTE! Different from SPLIB-version in how it scales the signal. -int WebRtcIsacfix_AutocorrNeon(int32_t* __restrict r, - const int16_t* x, - int16_t n, - int16_t order, - int16_t* __restrict scale) { - int i = 0; - int16_t scaling = 0; - uint32_t temp = 0; - int64_t prod = 0; - int64_t prod_tail = 0; - - RTC_DCHECK_EQ(0, n % 4); - RTC_DCHECK_GE(n, 8); - - // Calculate r[0]. - int16x4_t x0_v; - int32x4_t tmpa0_v; - int64x2_t tmpb_v; - - tmpb_v = vdupq_n_s64(0); - const int16_t* x_start = x; - const int16_t* x_end0 = x_start + n; - while (x_start < x_end0) { - x0_v = vld1_s16(x_start); - tmpa0_v = vmull_s16(x0_v, x0_v); - tmpb_v = vpadalq_s32(tmpb_v, tmpa0_v); - x_start += 4; - } - -#ifdef WEBRTC_ARCH_ARM64 - prod = vaddvq_s64(tmpb_v); -#else - prod = vget_lane_s64(vadd_s64(vget_low_s64(tmpb_v), vget_high_s64(tmpb_v)), - 0); -#endif - // Calculate scaling (the value of shifting). - temp = (uint32_t)(prod >> 31); - - scaling = temp ? 32 - WebRtcSpl_NormU32(temp) : 0; - r[0] = (int32_t)(prod >> scaling); - - int16x8_t x1_v; - int16x8_t y_v; - int32x4_t tmpa1_v; - // Perform the actual correlation calculation. - for (i = 1; i < order + 1; i++) { - tmpb_v = vdupq_n_s64(0); - int rest = (n - i) % 8; - x_start = x; - x_end0 = x_start + n - i - rest; - const int16_t* y_start = x_start + i; - while (x_start < x_end0) { - x1_v = vld1q_s16(x_start); - y_v = vld1q_s16(y_start); - tmpa0_v = vmull_s16(vget_low_s16(x1_v), vget_low_s16(y_v)); -#ifdef WEBRTC_ARCH_ARM64 - tmpa1_v = vmull_high_s16(x1_v, y_v); -#else - tmpa1_v = vmull_s16(vget_high_s16(x1_v), vget_high_s16(y_v)); -#endif - tmpb_v = vpadalq_s32(tmpb_v, tmpa0_v); - tmpb_v = vpadalq_s32(tmpb_v, tmpa1_v); - x_start += 8; - y_start += 8; - } - // The remaining calculation. - const int16_t* x_end1 = x + n - i; - if (rest >= 4) { - int16x4_t x2_v = vld1_s16(x_start); - int16x4_t y2_v = vld1_s16(y_start); - tmpa0_v = vmull_s16(x2_v, y2_v); - tmpb_v = vpadalq_s32(tmpb_v, tmpa0_v); - x_start += 4; - y_start += 4; - } -#ifdef WEBRTC_ARCH_ARM64 - prod = vaddvq_s64(tmpb_v); -#else - prod = vget_lane_s64(vadd_s64(vget_low_s64(tmpb_v), vget_high_s64(tmpb_v)), - 0); -#endif - - prod_tail = 0; - while (x_start < x_end1) { - prod_tail += *x_start * *y_start; - ++x_start; - ++y_start; - } - - r[i] = (int32_t)((prod + prod_tail) >> scaling); - } - - *scale = scaling; - - return order + 1; -} - diff --git a/modules/audio_coding/codecs/isac/fix/source/filters_unittest.cc b/modules/audio_coding/codecs/isac/fix/source/filters_unittest.cc deleted file mode 100644 index 192ef89f9f..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/filters_unittest.cc +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "system_wrappers/include/cpu_features_wrapper.h" -#include "test/gtest.h" - -class FiltersTest : public ::testing::Test { - protected: - // Pass a function pointer to the Tester function. - void FiltersTester(AutocorrFix WebRtcIsacfix_AutocorrFixFunction) { - const int kOrder = 12; - const int kBuffer = 40; - int16_t scale = 0; - int32_t r_buffer[kOrder + 2] = {0}; - - // Test an overflow case. - const int16_t x_buffer_0[kBuffer] = { - 0, 0, 3010, 22351, 21106, 16969, -2095, -664, - 3513, -30980, 32767, -23839, 13335, 20289, -6831, 339, - -17207, 32767, 4959, 6177, 32767, 16599, -4747, 20504, - 3513, -30980, 32767, -23839, 13335, 20289, 0, -16969, - -2095, -664, 3513, 31981, 32767, -13839, 23336, 30281}; - const int32_t r_expected_0[kOrder + 2] = { - 1872498461, -224288754, 203789985, 483400487, -208272635, - 2436500, 137785322, 266600814, -208486262, 329510080, - 137949184, -161738972, -26894267, 237630192}; - - WebRtcIsacfix_AutocorrFixFunction(r_buffer, x_buffer_0, kBuffer, kOrder + 1, - &scale); - for (int i = 0; i < kOrder + 2; i++) { - EXPECT_EQ(r_expected_0[i], r_buffer[i]); - } - EXPECT_EQ(3, scale); - - // Test a no-overflow case. - const int16_t x_buffer_1[kBuffer] = { - 0, 0, 300, 21, 206, 169, -295, -664, 3513, -300, - 327, -29, 15, 289, -6831, 339, -107, 37, 59, 6177, - 327, 169, -4747, 204, 313, -980, 767, -9, 135, 289, - 0, -6969, -2095, -664, 0, 1, 7, -39, 236, 281}; - const int32_t r_expected_1[kOrder + 2] = { - 176253864, 8126617, 1983287, -26196788, -3487363, - -42839676, -24644043, 3469813, 30559879, 31905045, - 5101567, 29328896, -55787438, -13163978}; - - WebRtcIsacfix_AutocorrFixFunction(r_buffer, x_buffer_1, kBuffer, kOrder + 1, - &scale); - for (int i = 0; i < kOrder + 2; i++) { - EXPECT_EQ(r_expected_1[i], r_buffer[i]); - } - EXPECT_EQ(0, scale); - } -}; - -TEST_F(FiltersTest, AutocorrFixTest) { - FiltersTester(WebRtcIsacfix_AutocorrC); -#if defined(WEBRTC_HAS_NEON) - FiltersTester(WebRtcIsacfix_AutocorrNeon); -#endif -} diff --git a/modules/audio_coding/codecs/isac/fix/source/initialize.c b/modules/audio_coding/codecs/isac/fix/source/initialize.c deleted file mode 100644 index 1b82958883..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/initialize.c +++ /dev/null @@ -1,173 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * initialize.c - * - * Internal initfunctions - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" - - -void WebRtcIsacfix_InitMaskingEnc(MaskFiltstr_enc *maskdata) { - - int k; - - for (k = 0; k < WINLEN; k++) { - maskdata->DataBufferLoQ0[k] = (int16_t) 0; - maskdata->DataBufferHiQ0[k] = (int16_t) 0; - } - for (k = 0; k < ORDERLO+1; k++) { - maskdata->CorrBufLoQQ[k] = (int32_t) 0; - maskdata->CorrBufLoQdom[k] = 0; - - maskdata->PreStateLoGQ15[k] = 0; - - } - for (k = 0; k < ORDERHI+1; k++) { - maskdata->CorrBufHiQQ[k] = (int32_t) 0; - maskdata->CorrBufHiQdom[k] = 0; - maskdata->PreStateHiGQ15[k] = 0; - } - - maskdata->OldEnergy = 10; - - return; -} - -void WebRtcIsacfix_InitMaskingDec(MaskFiltstr_dec *maskdata) { - - int k; - - for (k = 0; k < ORDERLO+1; k++) - { - maskdata->PostStateLoGQ0[k] = 0; - } - for (k = 0; k < ORDERHI+1; k++) - { - maskdata->PostStateHiGQ0[k] = 0; - } - - maskdata->OldEnergy = 10; - - return; -} - - - - - - - -void WebRtcIsacfix_InitPreFilterbank(PreFiltBankstr *prefiltdata) -{ - int k; - - for (k = 0; k < QLOOKAHEAD; k++) { - prefiltdata->INLABUF1_fix[k] = 0; - prefiltdata->INLABUF2_fix[k] = 0; - } - for (k = 0; k < 2 * (QORDER - 1); k++) { - prefiltdata->INSTAT1_fix[k] = 0; - prefiltdata->INSTAT2_fix[k] = 0; - } - - /* High pass filter states */ - prefiltdata->HPstates_fix[0] = 0; - prefiltdata->HPstates_fix[1] = 0; - - return; -} - -void WebRtcIsacfix_InitPostFilterbank(PostFiltBankstr *postfiltdata) -{ - int k; - - for (k = 0; k < 2 * POSTQORDER; k++) { - postfiltdata->STATE_0_LOWER_fix[k] = 0; - postfiltdata->STATE_0_UPPER_fix[k] = 0; - } - - /* High pass filter states */ - - postfiltdata->HPstates1_fix[0] = 0; - postfiltdata->HPstates1_fix[1] = 0; - - postfiltdata->HPstates2_fix[0] = 0; - postfiltdata->HPstates2_fix[1] = 0; - - return; -} - - -void WebRtcIsacfix_InitPitchFilter(PitchFiltstr *pitchfiltdata) -{ - int k; - - for (k = 0; k < PITCH_BUFFSIZE; k++) - pitchfiltdata->ubufQQ[k] = 0; - for (k = 0; k < (PITCH_DAMPORDER); k++) - pitchfiltdata->ystateQQ[k] = 0; - - pitchfiltdata->oldlagQ7 = 6400; /* 50.0 in Q7 */ - pitchfiltdata->oldgainQ12 = 0; -} - -void WebRtcIsacfix_InitPitchAnalysis(PitchAnalysisStruct *State) -{ - int k; - - for (k = 0; k < PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2; k++) { - State->dec_buffer16[k] = 0; - } - for (k = 0; k < 2 * ALLPASSSECTIONS + 1; k++) { - State->decimator_state32[k] = 0; - } - - for (k = 0; k < QLOOKAHEAD; k++) - State->inbuf[k] = 0; - - WebRtcIsacfix_InitPitchFilter(&(State->PFstr_wght)); - - WebRtcIsacfix_InitPitchFilter(&(State->PFstr)); -} - - -void WebRtcIsacfix_InitPlc( PLCstr *State ) -{ - State->decayCoeffPriodic = WEBRTC_SPL_WORD16_MAX; - State->decayCoeffNoise = WEBRTC_SPL_WORD16_MAX; - - State->used = PLC_WAS_USED; - - WebRtcSpl_ZerosArrayW16(State->overlapLP, RECOVERY_OVERLAP); - WebRtcSpl_ZerosArrayW16(State->lofilt_coefQ15, ORDERLO); - WebRtcSpl_ZerosArrayW16(State->hifilt_coefQ15, ORDERHI ); - - State->AvgPitchGain_Q12 = 0; - State->lastPitchGain_Q12 = 0; - State->lastPitchLag_Q7 = 0; - State->gain_lo_hiQ17[0]=State->gain_lo_hiQ17[1] = 0; - WebRtcSpl_ZerosArrayW16(State->prevPitchInvIn, FRAMESAMPLES/2); - WebRtcSpl_ZerosArrayW16(State->prevPitchInvOut, PITCH_MAX_LAG + 10 ); - WebRtcSpl_ZerosArrayW32(State->prevHP, PITCH_MAX_LAG + 10 ); - State->pitchCycles = 0; - State->A = 0; - State->B = 0; - State->pitchIndex = 0; - State->stretchLag = 240; - State->seed = 4447; - - -} diff --git a/modules/audio_coding/codecs/isac/fix/source/isac_fix_type.h b/modules/audio_coding/codecs/isac/fix/source/isac_fix_type.h deleted file mode 100644 index 512911a8bb..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/isac_fix_type.h +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ISAC_FIX_TYPE_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ISAC_FIX_TYPE_H_ - -#include "modules/audio_coding/codecs/isac/fix/include/isacfix.h" -#include "rtc_base/checks.h" - -namespace webrtc { - -class IsacFix { - public: - using instance_type = ISACFIX_MainStruct; - static const bool has_swb = false; - static inline int16_t Control(instance_type* inst, - int32_t rate, - int framesize) { - return WebRtcIsacfix_Control(inst, rate, framesize); - } - static inline int16_t ControlBwe(instance_type* inst, - int32_t rate_bps, - int frame_size_ms, - int16_t enforce_frame_size) { - return WebRtcIsacfix_ControlBwe(inst, rate_bps, frame_size_ms, - enforce_frame_size); - } - static inline int16_t Create(instance_type** inst) { - return WebRtcIsacfix_Create(inst); - } - static inline int DecodeInternal(instance_type* inst, - const uint8_t* encoded, - size_t len, - int16_t* decoded, - int16_t* speech_type) { - return WebRtcIsacfix_Decode(inst, encoded, len, decoded, speech_type); - } - static inline size_t DecodePlc(instance_type* inst, - int16_t* decoded, - size_t num_lost_frames) { - return WebRtcIsacfix_DecodePlc(inst, decoded, num_lost_frames); - } - static inline void DecoderInit(instance_type* inst) { - WebRtcIsacfix_DecoderInit(inst); - } - static inline int Encode(instance_type* inst, - const int16_t* speech_in, - uint8_t* encoded) { - return WebRtcIsacfix_Encode(inst, speech_in, encoded); - } - static inline int16_t EncoderInit(instance_type* inst, int16_t coding_mode) { - return WebRtcIsacfix_EncoderInit(inst, coding_mode); - } - static inline uint16_t EncSampRate(instance_type* inst) { - return kFixSampleRate; - } - - static inline int16_t Free(instance_type* inst) { - return WebRtcIsacfix_Free(inst); - } - static inline int16_t GetErrorCode(instance_type* inst) { - return WebRtcIsacfix_GetErrorCode(inst); - } - - static inline int16_t GetNewFrameLen(instance_type* inst) { - return WebRtcIsacfix_GetNewFrameLen(inst); - } - static inline int16_t SetDecSampRate(instance_type* inst, - uint16_t sample_rate_hz) { - RTC_DCHECK_EQ(sample_rate_hz, kFixSampleRate); - return 0; - } - static inline int16_t SetEncSampRate(instance_type* inst, - uint16_t sample_rate_hz) { - RTC_DCHECK_EQ(sample_rate_hz, kFixSampleRate); - return 0; - } - static inline void SetEncSampRateInDecoder(instance_type* inst, - uint16_t sample_rate_hz) { - RTC_DCHECK_EQ(sample_rate_hz, kFixSampleRate); - } - static inline void SetInitialBweBottleneck(instance_type* inst, - int bottleneck_bits_per_second) { - WebRtcIsacfix_SetInitialBweBottleneck(inst, bottleneck_bits_per_second); - } - static inline int16_t SetMaxPayloadSize(instance_type* inst, - int16_t max_payload_size_bytes) { - return WebRtcIsacfix_SetMaxPayloadSize(inst, max_payload_size_bytes); - } - static inline int16_t SetMaxRate(instance_type* inst, int32_t max_bit_rate) { - return WebRtcIsacfix_SetMaxRate(inst, max_bit_rate); - } - - private: - enum { kFixSampleRate = 16000 }; -}; - -} // namespace webrtc -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ISAC_FIX_TYPE_H_ diff --git a/modules/audio_coding/codecs/isac/fix/source/isacfix.c b/modules/audio_coding/codecs/isac/fix/source/isacfix.c deleted file mode 100644 index a7d44e883d..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/isacfix.c +++ /dev/null @@ -1,1230 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * isacfix.c - * - * This C file contains the functions for the ISAC API - * - */ - -#include "modules/audio_coding/codecs/isac/fix/include/isacfix.h" - -#include - -#include "rtc_base/checks.h" -#include "modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h" -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h" -#include "modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h" -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - -// Declare function pointers. -FilterMaLoopFix WebRtcIsacfix_FilterMaLoopFix; -Spec2Time WebRtcIsacfix_Spec2Time; -Time2Spec WebRtcIsacfix_Time2Spec; -MatrixProduct1 WebRtcIsacfix_MatrixProduct1; -MatrixProduct2 WebRtcIsacfix_MatrixProduct2; - -/* This method assumes that `stream_size_bytes` is in valid range, - * i.e. >= 0 && <= STREAM_MAXW16_60MS - */ -static void InitializeDecoderBitstream(size_t stream_size_bytes, - Bitstr_dec* bitstream) { - bitstream->W_upper = 0xFFFFFFFF; - bitstream->streamval = 0; - bitstream->stream_index = 0; - bitstream->full = 1; - bitstream->stream_size = (stream_size_bytes + 1) >> 1; - memset(bitstream->stream, 0, sizeof(bitstream->stream)); -} - -/**************************************************************************** - * WebRtcIsacfix_Create(...) - * - * This function creates a ISAC instance, which will contain the state - * information for one coding/decoding channel. - * - * Input: - * - *ISAC_main_inst : a pointer to the coder instance. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_Create(ISACFIX_MainStruct **ISAC_main_inst) -{ - ISACFIX_SubStruct *tempo; - tempo = malloc(1 * sizeof(ISACFIX_SubStruct)); - *ISAC_main_inst = (ISACFIX_MainStruct *)tempo; - if (*ISAC_main_inst!=NULL) { - (*(ISACFIX_SubStruct**)ISAC_main_inst)->errorcode = 0; - (*(ISACFIX_SubStruct**)ISAC_main_inst)->initflag = 0; - (*(ISACFIX_SubStruct**)ISAC_main_inst)->ISACenc_obj.SaveEnc_ptr = NULL; - WebRtcIsacfix_InitBandwidthEstimator(&tempo->bwestimator_obj); - return(0); - } else { - return(-1); - } -} - - -/**************************************************************************** - * WebRtcIsacfix_CreateInternal(...) - * - * This function creates the memory that is used to store data in the encoder - * - * Input: - * - *ISAC_main_inst : a pointer to the coder instance. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_CreateInternal(ISACFIX_MainStruct *ISAC_main_inst) -{ - ISACFIX_SubStruct *ISAC_inst; - - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* Allocate memory for storing encoder data */ - ISAC_inst->ISACenc_obj.SaveEnc_ptr = malloc(1 * sizeof(IsacSaveEncoderData)); - - if (ISAC_inst->ISACenc_obj.SaveEnc_ptr!=NULL) { - return(0); - } else { - return(-1); - } -} - - -/**************************************************************************** - * WebRtcIsacfix_Free(...) - * - * This function frees the ISAC instance created at the beginning. - * - * Input: - * - ISAC_main_inst : a ISAC instance. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_Free(ISACFIX_MainStruct *ISAC_main_inst) -{ - free(ISAC_main_inst); - return(0); -} - -/**************************************************************************** - * WebRtcIsacfix_FreeInternal(...) - * - * This function frees the internal memory for storing encoder data. - * - * Input: - * - ISAC_main_inst : a ISAC instance. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_FreeInternal(ISACFIX_MainStruct *ISAC_main_inst) -{ - ISACFIX_SubStruct *ISAC_inst; - - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* Release memory */ - free(ISAC_inst->ISACenc_obj.SaveEnc_ptr); - - return(0); -} - -/**************************************************************************** - * WebRtcIsacfix_InitNeon(...) - * - * This function initializes function pointers for ARM Neon platform. - */ - -#if defined(WEBRTC_HAS_NEON) -static void WebRtcIsacfix_InitNeon(void) { - WebRtcIsacfix_AutocorrFix = WebRtcIsacfix_AutocorrNeon; - WebRtcIsacfix_FilterMaLoopFix = WebRtcIsacfix_FilterMaLoopNeon; - WebRtcIsacfix_Spec2Time = WebRtcIsacfix_Spec2TimeNeon; - WebRtcIsacfix_Time2Spec = WebRtcIsacfix_Time2SpecNeon; - WebRtcIsacfix_AllpassFilter2FixDec16 = - WebRtcIsacfix_AllpassFilter2FixDec16Neon; - WebRtcIsacfix_MatrixProduct1 = WebRtcIsacfix_MatrixProduct1Neon; - WebRtcIsacfix_MatrixProduct2 = WebRtcIsacfix_MatrixProduct2Neon; -} -#endif - -/**************************************************************************** - * WebRtcIsacfix_InitMIPS(...) - * - * This function initializes function pointers for MIPS platform. - */ - -#if defined(MIPS32_LE) -static void WebRtcIsacfix_InitMIPS(void) { - WebRtcIsacfix_AutocorrFix = WebRtcIsacfix_AutocorrMIPS; - WebRtcIsacfix_FilterMaLoopFix = WebRtcIsacfix_FilterMaLoopMIPS; - WebRtcIsacfix_Spec2Time = WebRtcIsacfix_Spec2TimeMIPS; - WebRtcIsacfix_Time2Spec = WebRtcIsacfix_Time2SpecMIPS; - WebRtcIsacfix_MatrixProduct1 = WebRtcIsacfix_MatrixProduct1MIPS; - WebRtcIsacfix_MatrixProduct2 = WebRtcIsacfix_MatrixProduct2MIPS; -#if defined(MIPS_DSP_R1_LE) - WebRtcIsacfix_AllpassFilter2FixDec16 = - WebRtcIsacfix_AllpassFilter2FixDec16MIPS; - WebRtcIsacfix_HighpassFilterFixDec32 = - WebRtcIsacfix_HighpassFilterFixDec32MIPS; -#endif -#if defined(MIPS_DSP_R2_LE) - WebRtcIsacfix_CalculateResidualEnergy = - WebRtcIsacfix_CalculateResidualEnergyMIPS; -#endif -} -#endif - -static void InitFunctionPointers(void) { - WebRtcIsacfix_AutocorrFix = WebRtcIsacfix_AutocorrC; - WebRtcIsacfix_FilterMaLoopFix = WebRtcIsacfix_FilterMaLoopC; - WebRtcIsacfix_CalculateResidualEnergy = - WebRtcIsacfix_CalculateResidualEnergyC; - WebRtcIsacfix_AllpassFilter2FixDec16 = WebRtcIsacfix_AllpassFilter2FixDec16C; - WebRtcIsacfix_HighpassFilterFixDec32 = WebRtcIsacfix_HighpassFilterFixDec32C; - WebRtcIsacfix_Time2Spec = WebRtcIsacfix_Time2SpecC; - WebRtcIsacfix_Spec2Time = WebRtcIsacfix_Spec2TimeC; - WebRtcIsacfix_MatrixProduct1 = WebRtcIsacfix_MatrixProduct1C; - WebRtcIsacfix_MatrixProduct2 = WebRtcIsacfix_MatrixProduct2C; - -#if defined(WEBRTC_HAS_NEON) - WebRtcIsacfix_InitNeon(); -#endif - -#if defined(MIPS32_LE) - WebRtcIsacfix_InitMIPS(); -#endif -} - -/**************************************************************************** - * WebRtcIsacfix_EncoderInit(...) - * - * This function initializes a ISAC instance prior to the encoder calls. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - CodingMode : 0 -> Bit rate and frame length are automatically - * adjusted to available bandwidth on - * transmission channel. - * 1 -> User sets a frame length and a target bit - * rate which is taken as the maximum short-term - * average bit rate. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_EncoderInit(ISACFIX_MainStruct *ISAC_main_inst, - int16_t CodingMode) -{ - int k; - int16_t statusInit; - ISACFIX_SubStruct *ISAC_inst; - - statusInit = 0; - /* typecast pointer to rela structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* flag encoder init */ - ISAC_inst->initflag |= 2; - - if (CodingMode == 0) - /* Adaptive mode */ - ISAC_inst->ISACenc_obj.new_framelength = INITIAL_FRAMESAMPLES; - else if (CodingMode == 1) - /* Instantaneous mode */ - ISAC_inst->ISACenc_obj.new_framelength = 480; /* default for I-mode */ - else { - ISAC_inst->errorcode = ISAC_DISALLOWED_CODING_MODE; - statusInit = -1; - } - - ISAC_inst->CodingMode = CodingMode; - - WebRtcIsacfix_InitMaskingEnc(&ISAC_inst->ISACenc_obj.maskfiltstr_obj); - WebRtcIsacfix_InitPreFilterbank(&ISAC_inst->ISACenc_obj.prefiltbankstr_obj); - WebRtcIsacfix_InitPitchFilter(&ISAC_inst->ISACenc_obj.pitchfiltstr_obj); - WebRtcIsacfix_InitPitchAnalysis(&ISAC_inst->ISACenc_obj.pitchanalysisstr_obj); - - WebRtcIsacfix_InitRateModel(&ISAC_inst->ISACenc_obj.rate_data_obj); - - - ISAC_inst->ISACenc_obj.buffer_index = 0; - ISAC_inst->ISACenc_obj.frame_nb = 0; - ISAC_inst->ISACenc_obj.BottleNeck = 32000; /* default for I-mode */ - ISAC_inst->ISACenc_obj.MaxDelay = 10; /* default for I-mode */ - ISAC_inst->ISACenc_obj.current_framesamples = 0; - ISAC_inst->ISACenc_obj.s2nr = 0; - ISAC_inst->ISACenc_obj.MaxBits = 0; - ISAC_inst->ISACenc_obj.bitstr_seed = 4447; - ISAC_inst->ISACenc_obj.payloadLimitBytes30 = STREAM_MAXW16_30MS << 1; - ISAC_inst->ISACenc_obj.payloadLimitBytes60 = STREAM_MAXW16_60MS << 1; - ISAC_inst->ISACenc_obj.maxPayloadBytes = STREAM_MAXW16_60MS << 1; - ISAC_inst->ISACenc_obj.maxRateInBytes = STREAM_MAXW16_30MS << 1; - ISAC_inst->ISACenc_obj.enforceFrameSize = 0; - - /* Init the bistream data area to zero */ - for (k=0; kISACenc_obj.bitstr_obj.stream[k] = 0; - } - - InitFunctionPointers(); - - return statusInit; -} - -/* Read the given number of bytes of big-endian 16-bit integers from `src` and - write them to `dest` in host endian. If `nbytes` is odd, the number of - output elements is rounded up, and the least significant byte of the last - element is set to 0. */ -static void read_be16(const uint8_t* src, size_t nbytes, uint16_t* dest) { - size_t i; - for (i = 0; i < nbytes / 2; ++i) - dest[i] = src[2 * i] << 8 | src[2 * i + 1]; - if (nbytes % 2 == 1) - dest[nbytes / 2] = src[nbytes - 1] << 8; -} - -/* Read the given number of bytes of host-endian 16-bit integers from `src` and - write them to `dest` in big endian. If `nbytes` is odd, the number of source - elements is rounded up (but only the most significant byte of the last - element is used), and the number of output bytes written will be - nbytes + 1. */ -static void write_be16(const uint16_t* src, size_t nbytes, uint8_t* dest) { - size_t i; - for (i = 0; i < nbytes / 2; ++i) { - dest[2 * i] = src[i] >> 8; - dest[2 * i + 1] = src[i]; - } - if (nbytes % 2 == 1) { - dest[nbytes - 1] = src[nbytes / 2] >> 8; - dest[nbytes] = 0; - } -} - -/**************************************************************************** - * WebRtcIsacfix_Encode(...) - * - * This function encodes 10ms frame(s) and inserts it into a package. - * Input speech length has to be 160 samples (10ms). The encoder buffers those - * 10ms frames until it reaches the chosen Framesize (480 or 960 samples - * corresponding to 30 or 60 ms frames), and then proceeds to the encoding. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - speechIn : input speech vector. - * - * Output: - * - encoded : the encoded data vector - * - * Return value: - * : >0 - Length (in bytes) of coded data - * : 0 - The buffer didn't reach the chosen framesize - * so it keeps buffering speech samples. - * : -1 - Error - */ - -int WebRtcIsacfix_Encode(ISACFIX_MainStruct *ISAC_main_inst, - const int16_t *speechIn, - uint8_t* encoded) -{ - ISACFIX_SubStruct *ISAC_inst; - int stream_len; - - /* typecast pointer to rela structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - - /* check if encoder initiated */ - if ((ISAC_inst->initflag & 2) != 2) { - ISAC_inst->errorcode = ISAC_ENCODER_NOT_INITIATED; - return (-1); - } - - stream_len = WebRtcIsacfix_EncodeImpl((int16_t*)speechIn, - &ISAC_inst->ISACenc_obj, - &ISAC_inst->bwestimator_obj, - ISAC_inst->CodingMode); - if (stream_len<0) { - ISAC_inst->errorcode = -(int16_t)stream_len; - return -1; - } - - write_be16(ISAC_inst->ISACenc_obj.bitstr_obj.stream, (size_t)stream_len, - encoded); - return stream_len; - -} - - -/**************************************************************************** - * WebRtcIsacfix_GetNewBitStream(...) - * - * This function returns encoded data, with the received bwe-index in the - * stream. It should always return a complete packet, i.e. only called once - * even for 60 msec frames - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - bweIndex : index of bandwidth estimate to put in new bitstream - * - * Output: - * - encoded : the encoded data vector - * - * Return value: - * : >0 - Length (in bytes) of coded data - * : -1 - Error - */ - -int16_t WebRtcIsacfix_GetNewBitStream(ISACFIX_MainStruct *ISAC_main_inst, - int16_t bweIndex, - float scale, - uint8_t* encoded) -{ - ISACFIX_SubStruct *ISAC_inst; - int16_t stream_len; - - /* typecast pointer to rela structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - - /* check if encoder initiated */ - if ((ISAC_inst->initflag & 2) != 2) { - ISAC_inst->errorcode = ISAC_ENCODER_NOT_INITIATED; - return (-1); - } - - stream_len = WebRtcIsacfix_EncodeStoredData(&ISAC_inst->ISACenc_obj, - bweIndex, - scale); - if (stream_len<0) { - ISAC_inst->errorcode = - stream_len; - return -1; - } - - write_be16(ISAC_inst->ISACenc_obj.bitstr_obj.stream, stream_len, encoded); - return stream_len; -} - - - -/**************************************************************************** - * WebRtcIsacfix_DecoderInit(...) - * - * This function initializes a ISAC instance prior to the decoder calls. - * - * Input: - * - ISAC_main_inst : ISAC instance. - */ - -void WebRtcIsacfix_DecoderInit(ISACFIX_MainStruct *ISAC_main_inst) -{ - ISACFIX_SubStruct *ISAC_inst; - - InitFunctionPointers(); - - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* flag decoder init */ - ISAC_inst->initflag |= 1; - - WebRtcIsacfix_InitMaskingDec(&ISAC_inst->ISACdec_obj.maskfiltstr_obj); - WebRtcIsacfix_InitPostFilterbank(&ISAC_inst->ISACdec_obj.postfiltbankstr_obj); - WebRtcIsacfix_InitPitchFilter(&ISAC_inst->ISACdec_obj.pitchfiltstr_obj); - - /* TS */ - WebRtcIsacfix_InitPlc( &ISAC_inst->ISACdec_obj.plcstr_obj ); -} - - -/**************************************************************************** - * WebRtcIsacfix_UpdateBwEstimate1(...) - * - * This function updates the estimate of the bandwidth. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC frame(s). - * - packet_size : size of the packet. - * - rtp_seq_number : the RTP number of the packet. - * - arr_ts : the arrival time of the packet (from NetEq) - * in samples. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_UpdateBwEstimate1(ISACFIX_MainStruct *ISAC_main_inst, - const uint8_t* encoded, - size_t packet_size, - uint16_t rtp_seq_number, - uint32_t arr_ts) -{ - ISACFIX_SubStruct *ISAC_inst; - Bitstr_dec streamdata; - int16_t err; - const size_t kRequiredEncodedLenBytes = 10; - - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* Sanity check of packet length */ - if (packet_size == 0) { - /* return error code if the packet length is null or less */ - ISAC_inst->errorcode = ISAC_EMPTY_PACKET; - return -1; - } else if (packet_size > (STREAM_MAXW16<<1)) { - /* return error code if length of stream is too long */ - ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH; - return -1; - } - - /* check if decoder initiated */ - if ((ISAC_inst->initflag & 1) != 1) { - ISAC_inst->errorcode = ISAC_DECODER_NOT_INITIATED; - return (-1); - } - - InitializeDecoderBitstream(packet_size, &streamdata); - - read_be16(encoded, kRequiredEncodedLenBytes, streamdata.stream); - - err = WebRtcIsacfix_EstimateBandwidth(&ISAC_inst->bwestimator_obj, - &streamdata, - packet_size, - rtp_seq_number, - 0, - arr_ts); - - - if (err < 0) - { - /* return error code if something went wrong */ - ISAC_inst->errorcode = -err; - return -1; - } - - - return 0; -} - -/**************************************************************************** - * WebRtcIsacfix_UpdateBwEstimate(...) - * - * This function updates the estimate of the bandwidth. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC frame(s). - * - packet_size : size of the packet. - * - rtp_seq_number : the RTP number of the packet. - * - send_ts : Send Time Stamp from RTP header - * - arr_ts : the arrival time of the packet (from NetEq) - * in samples. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_UpdateBwEstimate(ISACFIX_MainStruct *ISAC_main_inst, - const uint8_t* encoded, - size_t packet_size, - uint16_t rtp_seq_number, - uint32_t send_ts, - uint32_t arr_ts) -{ - ISACFIX_SubStruct *ISAC_inst; - Bitstr_dec streamdata; - int16_t err; - const size_t kRequiredEncodedLenBytes = 10; - - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* Sanity check of packet length */ - if (packet_size == 0) { - /* return error code if the packet length is null or less */ - ISAC_inst->errorcode = ISAC_EMPTY_PACKET; - return -1; - } else if (packet_size < kRequiredEncodedLenBytes) { - ISAC_inst->errorcode = ISAC_PACKET_TOO_SHORT; - return -1; - } else if (packet_size > (STREAM_MAXW16<<1)) { - /* return error code if length of stream is too long */ - ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH; - return -1; - } - - /* check if decoder initiated */ - if ((ISAC_inst->initflag & 1) != 1) { - ISAC_inst->errorcode = ISAC_DECODER_NOT_INITIATED; - return (-1); - } - - InitializeDecoderBitstream(packet_size, &streamdata); - - read_be16(encoded, kRequiredEncodedLenBytes, streamdata.stream); - - err = WebRtcIsacfix_EstimateBandwidth(&ISAC_inst->bwestimator_obj, - &streamdata, - packet_size, - rtp_seq_number, - send_ts, - arr_ts); - - if (err < 0) - { - /* return error code if something went wrong */ - ISAC_inst->errorcode = -err; - return -1; - } - - - return 0; -} - -/**************************************************************************** - * WebRtcIsacfix_Decode(...) - * - * This function decodes a ISAC frame. Output speech length - * will be a multiple of 480 samples: 480 or 960 samples, - * depending on the framesize (30 or 60 ms). - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC frame(s) - * - len : bytes in encoded vector - * - * Output: - * - decoded : The decoded vector - * - * Return value : >0 - number of samples in decoded vector - * -1 - Error - */ - - -int WebRtcIsacfix_Decode(ISACFIX_MainStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t len, - int16_t* decoded, - int16_t* speechType) -{ - ISACFIX_SubStruct *ISAC_inst; - /* number of samples (480 or 960), output from decoder */ - /* that were actually used in the encoder/decoder (determined on the fly) */ - size_t number_of_samples; - int declen_int = 0; - size_t declen; - - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* check if decoder initiated */ - if ((ISAC_inst->initflag & 1) != 1) { - ISAC_inst->errorcode = ISAC_DECODER_NOT_INITIATED; - return (-1); - } - - /* Sanity check of packet length */ - if (len == 0) { - /* return error code if the packet length is null or less */ - ISAC_inst->errorcode = ISAC_EMPTY_PACKET; - return -1; - } else if (len > (STREAM_MAXW16<<1)) { - /* return error code if length of stream is too long */ - ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH; - return -1; - } - - InitializeDecoderBitstream(len, &ISAC_inst->ISACdec_obj.bitstr_obj); - - read_be16(encoded, len, ISAC_inst->ISACdec_obj.bitstr_obj.stream); - - /* added for NetEq purposes (VAD/DTX related) */ - *speechType=1; - - declen_int = WebRtcIsacfix_DecodeImpl(decoded, &ISAC_inst->ISACdec_obj, - &number_of_samples); - if (declen_int < 0) { - /* Some error inside the decoder */ - ISAC_inst->errorcode = -(int16_t)declen_int; - memset(decoded, 0, sizeof(int16_t) * MAX_FRAMESAMPLES); - return -1; - } - declen = (size_t)declen_int; - - /* error check */ - - if (declen & 1) { - if (len != declen && - len != declen + - ((ISAC_inst->ISACdec_obj.bitstr_obj.stream[declen >> 1]) & 0xFF)) { - ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH; - memset(decoded, 0, sizeof(int16_t) * number_of_samples); - return -1; - } - } else { - if (len != declen && - len != declen + - ((ISAC_inst->ISACdec_obj.bitstr_obj.stream[declen >> 1]) >> 8)) { - ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH; - memset(decoded, 0, sizeof(int16_t) * number_of_samples); - return -1; - } - } - - return (int)number_of_samples; -} - - -/**************************************************************************** - * WebRtcIsacfix_DecodePlc(...) - * - * This function conducts PLC for ISAC frame(s) in wide-band (16kHz sampling). - * Output speech length will be "480*noOfLostFrames" samples - * that is equevalent of "30*noOfLostFrames" millisecond. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - noOfLostFrames : Number of PLC frames (480sample = 30ms) - * to produce - * - * Output: - * - decoded : The decoded vector - * - * Return value : Number of samples in decoded PLC vector - */ - -size_t WebRtcIsacfix_DecodePlc(ISACFIX_MainStruct* ISAC_main_inst, - int16_t* decoded, - size_t noOfLostFrames) -{ - - size_t no_of_samples, declen, k; - int16_t outframe16[MAX_FRAMESAMPLES]; - - ISACFIX_SubStruct *ISAC_inst; - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* Limit number of frames to two = 60 msec. Otherwise we exceed data vectors */ - if (noOfLostFrames > 2) { - noOfLostFrames = 2; - } - k = 0; - declen = 0; - while( noOfLostFrames > 0 ) - { - WebRtcIsacfix_DecodePlcImpl(&(outframe16[k*480]), &ISAC_inst->ISACdec_obj, - &no_of_samples); - declen += no_of_samples; - noOfLostFrames--; - k++; - } - - for (k=0;kCodingMode == 0) - { - /* in adaptive mode */ - ISAC_inst->errorcode = ISAC_MODE_MISMATCH; - return -1; - } - - - if (rate >= 10000 && rate <= 32000) - ISAC_inst->ISACenc_obj.BottleNeck = rate; - else { - ISAC_inst->errorcode = ISAC_DISALLOWED_BOTTLENECK; - return -1; - } - - - - if (framesize == 30 || framesize == 60) - ISAC_inst->ISACenc_obj.new_framelength = (int16_t)((FS/1000) * framesize); - else { - ISAC_inst->errorcode = ISAC_DISALLOWED_FRAME_LENGTH; - return -1; - } - - return 0; -} - -void WebRtcIsacfix_SetInitialBweBottleneck(ISACFIX_MainStruct* ISAC_main_inst, - int bottleneck_bits_per_second) { - ISACFIX_SubStruct* inst = (ISACFIX_SubStruct*)ISAC_main_inst; - RTC_DCHECK_GE(bottleneck_bits_per_second, 10000); - RTC_DCHECK_LE(bottleneck_bits_per_second, 32000); - inst->bwestimator_obj.sendBwAvg = ((uint32_t)bottleneck_bits_per_second) << 7; -} - -/**************************************************************************** - * WebRtcIsacfix_ControlBwe(...) - * - * This function sets the initial values of bottleneck and frame-size if - * iSAC is used in channel-adaptive mode. Through this API, users can - * enforce a frame-size for all values of bottleneck. Then iSAC will not - * automatically change the frame-size. - * - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - rateBPS : initial value of bottleneck in bits/second - * 10000 <= rateBPS <= 32000 is accepted - * For default bottleneck set rateBPS = 0 - * - frameSizeMs : number of milliseconds per frame (30 or 60) - * - enforceFrameSize : 1 to enforce the given frame-size through out - * the adaptation process, 0 to let iSAC change - * the frame-size if required. - * - * Return value : 0 - ok - * -1 - Error - */ - -int16_t WebRtcIsacfix_ControlBwe(ISACFIX_MainStruct *ISAC_main_inst, - int16_t rateBPS, - int frameSizeMs, - int16_t enforceFrameSize) -{ - ISACFIX_SubStruct *ISAC_inst; - /* Typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* check if encoder initiated */ - if ((ISAC_inst->initflag & 2) != 2) { - ISAC_inst->errorcode = ISAC_ENCODER_NOT_INITIATED; - return (-1); - } - - /* Check that we are in channel-adaptive mode, otherwise, return -1 */ - if (ISAC_inst->CodingMode != 0) { - ISAC_inst->errorcode = ISAC_MODE_MISMATCH; - return (-1); - } - - /* Set struct variable if enforceFrameSize is set. ISAC will then keep the */ - /* chosen frame size. */ - ISAC_inst->ISACenc_obj.enforceFrameSize = (enforceFrameSize != 0)? 1:0; - - /* Set initial rate, if value between 10000 and 32000, */ - /* if rateBPS is 0, keep the default initial bottleneck value (15000) */ - if ((rateBPS >= 10000) && (rateBPS <= 32000)) { - ISAC_inst->bwestimator_obj.sendBwAvg = (((uint32_t)rateBPS) << 7); - } else if (rateBPS != 0) { - ISAC_inst->errorcode = ISAC_DISALLOWED_BOTTLENECK; - return -1; - } - - /* Set initial framesize. If enforceFrameSize is set the frame size will not change */ - if ((frameSizeMs == 30) || (frameSizeMs == 60)) { - ISAC_inst->ISACenc_obj.new_framelength = (int16_t)((FS/1000) * frameSizeMs); - } else { - ISAC_inst->errorcode = ISAC_DISALLOWED_FRAME_LENGTH; - return -1; - } - - return 0; -} - - - - - -/**************************************************************************** - * WebRtcIsacfix_GetDownLinkBwIndex(...) - * - * This function returns index representing the Bandwidth estimate from - * other side to this side. - * - * Input: - * - ISAC_main_inst: iSAC struct - * - * Output: - * - rateIndex : Bandwidth estimate to transmit to other side. - * - */ - -int16_t WebRtcIsacfix_GetDownLinkBwIndex(ISACFIX_MainStruct* ISAC_main_inst, - int16_t* rateIndex) -{ - ISACFIX_SubStruct *ISAC_inst; - - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* Call function to get Bandwidth Estimate */ - *rateIndex = WebRtcIsacfix_GetDownlinkBwIndexImpl(&ISAC_inst->bwestimator_obj); - - return 0; -} - - -/**************************************************************************** - * WebRtcIsacfix_UpdateUplinkBw(...) - * - * This function takes an index representing the Bandwidth estimate from - * this side to other side and updates BWE. - * - * Input: - * - ISAC_main_inst: iSAC struct - * - rateIndex : Bandwidth estimate from other side. - * - */ - -int16_t WebRtcIsacfix_UpdateUplinkBw(ISACFIX_MainStruct* ISAC_main_inst, - int16_t rateIndex) -{ - int16_t err = 0; - ISACFIX_SubStruct *ISAC_inst; - - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - /* Call function to update BWE with received Bandwidth Estimate */ - err = WebRtcIsacfix_UpdateUplinkBwRec(&ISAC_inst->bwestimator_obj, rateIndex); - if (err < 0) { - ISAC_inst->errorcode = -err; - return (-1); - } - - return 0; -} - -/**************************************************************************** - * WebRtcIsacfix_ReadFrameLen(...) - * - * This function returns the length of the frame represented in the packet. - * - * Input: - * - encoded : Encoded bitstream - * - * Output: - * - frameLength : Length of frame in packet (in samples) - * - */ - -int16_t WebRtcIsacfix_ReadFrameLen(const uint8_t* encoded, - size_t encoded_len_bytes, - size_t* frameLength) -{ - Bitstr_dec streamdata; - int16_t err; - const size_t kRequiredEncodedLenBytes = 10; - - if (encoded_len_bytes < kRequiredEncodedLenBytes) { - return -1; - } - - InitializeDecoderBitstream(encoded_len_bytes, &streamdata); - - read_be16(encoded, kRequiredEncodedLenBytes, streamdata.stream); - - /* decode frame length */ - err = WebRtcIsacfix_DecodeFrameLen(&streamdata, frameLength); - if (err<0) // error check - return err; - - return 0; -} - - -/**************************************************************************** - * WebRtcIsacfix_ReadBwIndex(...) - * - * This function returns the index of the Bandwidth estimate from the bitstream. - * - * Input: - * - encoded : Encoded bitstream - * - * Output: - * - frameLength : Length of frame in packet (in samples) - * - rateIndex : Bandwidth estimate in bitstream - * - */ - -int16_t WebRtcIsacfix_ReadBwIndex(const uint8_t* encoded, - size_t encoded_len_bytes, - int16_t* rateIndex) -{ - Bitstr_dec streamdata; - int16_t err; - const size_t kRequiredEncodedLenBytes = 10; - - if (encoded_len_bytes < kRequiredEncodedLenBytes) { - return -1; - } - - InitializeDecoderBitstream(encoded_len_bytes, &streamdata); - - read_be16(encoded, kRequiredEncodedLenBytes, streamdata.stream); - - /* decode frame length, needed to get to the rateIndex in the bitstream */ - size_t frameLength; - err = WebRtcIsacfix_DecodeFrameLen(&streamdata, &frameLength); - if (err<0) // error check - return err; - - /* decode BW estimation */ - err = WebRtcIsacfix_DecodeSendBandwidth(&streamdata, rateIndex); - if (err<0) // error check - return err; - - return 0; -} - - - - -/**************************************************************************** - * WebRtcIsacfix_GetErrorCode(...) - * - * This function can be used to check the error code of an iSAC instance. When - * a function returns -1 a error code will be set for that instance. The - * function below extract the code of the last error that occured in the - * specified instance. - * - * Input: - * - ISAC_main_inst : ISAC instance - * - * Return value : Error code - */ - -int16_t WebRtcIsacfix_GetErrorCode(ISACFIX_MainStruct *ISAC_main_inst) -{ - ISACFIX_SubStruct *ISAC_inst; - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - return ISAC_inst->errorcode; -} - - - -/**************************************************************************** - * WebRtcIsacfix_GetUplinkBw(...) - * - * This function returns the inst quantized iSAC send bitrate - * - * Input: - * - ISAC_main_inst : iSAC instance - * - * Return value : bitrate - */ - -int32_t WebRtcIsacfix_GetUplinkBw(ISACFIX_MainStruct *ISAC_main_inst) -{ - ISACFIX_SubStruct *ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - BwEstimatorstr * bw = (BwEstimatorstr*)&(ISAC_inst->bwestimator_obj); - - return (int32_t) WebRtcIsacfix_GetUplinkBandwidth(bw); -} - -/**************************************************************************** - * WebRtcIsacfix_GetNewFrameLen(...) - * - * This function return the next frame length (in samples) of iSAC. - * - * Input: - * - ISAC_main_inst : iSAC instance - * - * Return value : frame lenght in samples - */ - -int16_t WebRtcIsacfix_GetNewFrameLen(ISACFIX_MainStruct *ISAC_main_inst) -{ - ISACFIX_SubStruct *ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - return ISAC_inst->ISACenc_obj.new_framelength; -} - - -/**************************************************************************** - * WebRtcIsacfix_SetMaxPayloadSize(...) - * - * This function sets a limit for the maximum payload size of iSAC. The same - * value is used both for 30 and 60 msec packets. - * The absolute max will be valid until next time the function is called. - * NOTE! This function may override the function WebRtcIsacfix_SetMaxRate() - * - * Input: - * - ISAC_main_inst : iSAC instance - * - maxPayloadBytes : maximum size of the payload in bytes - * valid values are between 100 and 400 bytes - * - * - * Return value : 0 if sucessful - * -1 if error happens - */ - -int16_t WebRtcIsacfix_SetMaxPayloadSize(ISACFIX_MainStruct *ISAC_main_inst, - int16_t maxPayloadBytes) -{ - ISACFIX_SubStruct *ISAC_inst; - - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - if((maxPayloadBytes < 100) || (maxPayloadBytes > 400)) - { - /* maxPayloadBytes is out of valid range */ - return -1; - } - else - { - /* Set new absolute max, which will not change unless this function - is called again with a new value */ - ISAC_inst->ISACenc_obj.maxPayloadBytes = maxPayloadBytes; - - /* Set new maximum values for 30 and 60 msec packets */ - if (maxPayloadBytes < ISAC_inst->ISACenc_obj.maxRateInBytes) { - ISAC_inst->ISACenc_obj.payloadLimitBytes30 = maxPayloadBytes; - } else { - ISAC_inst->ISACenc_obj.payloadLimitBytes30 = ISAC_inst->ISACenc_obj.maxRateInBytes; - } - - if ( maxPayloadBytes < (ISAC_inst->ISACenc_obj.maxRateInBytes << 1)) { - ISAC_inst->ISACenc_obj.payloadLimitBytes60 = maxPayloadBytes; - } else { - ISAC_inst->ISACenc_obj.payloadLimitBytes60 = (ISAC_inst->ISACenc_obj.maxRateInBytes << 1); - } - } - return 0; -} - - -/**************************************************************************** - * WebRtcIsacfix_SetMaxRate(...) - * - * This function sets the maximum rate which the codec may not exceed for a - * singel packet. The maximum rate is set in bits per second. - * The codec has an absolute maximum rate of 53400 bits per second (200 bytes - * per 30 msec). - * It is possible to set a maximum rate between 32000 and 53400 bits per second. - * - * The rate limit is valid until next time the function is called. - * - * NOTE! Packet size will never go above the value set if calling - * WebRtcIsacfix_SetMaxPayloadSize() (default max packet size is 400 bytes). - * - * Input: - * - ISAC_main_inst : iSAC instance - * - maxRateInBytes : maximum rate in bits per second, - * valid values are 32000 to 53400 bits - * - * Return value : 0 if sucessful - * -1 if error happens - */ - -int16_t WebRtcIsacfix_SetMaxRate(ISACFIX_MainStruct *ISAC_main_inst, - int32_t maxRate) -{ - ISACFIX_SubStruct *ISAC_inst; - int16_t maxRateInBytes; - - /* typecast pointer to real structure */ - ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst; - - if((maxRate < 32000) || (maxRate > 53400)) - { - /* maxRate is out of valid range */ - return -1; - } - else - { - /* Calculate maximum number of bytes per 30 msec packets for the given - maximum rate. Multiply with 30/1000 to get number of bits per 30 msec, - divide by 8 to get number of bytes per 30 msec: - maxRateInBytes = floor((maxRate * 30/1000) / 8); */ - maxRateInBytes = (int16_t)( WebRtcSpl_DivW32W16ResW16(WEBRTC_SPL_MUL(maxRate, 3), 800) ); - - /* Store the value for usage in the WebRtcIsacfix_SetMaxPayloadSize-function */ - ISAC_inst->ISACenc_obj.maxRateInBytes = maxRateInBytes; - - /* For 30 msec packets: if the new limit is below the maximum - payload size, set a new limit */ - if (maxRateInBytes < ISAC_inst->ISACenc_obj.maxPayloadBytes) { - ISAC_inst->ISACenc_obj.payloadLimitBytes30 = maxRateInBytes; - } else { - ISAC_inst->ISACenc_obj.payloadLimitBytes30 = ISAC_inst->ISACenc_obj.maxPayloadBytes; - } - - /* For 60 msec packets: if the new limit (times 2) is below the - maximum payload size, set a new limit */ - if ( (maxRateInBytes << 1) < ISAC_inst->ISACenc_obj.maxPayloadBytes) { - ISAC_inst->ISACenc_obj.payloadLimitBytes60 = (maxRateInBytes << 1); - } else { - ISAC_inst->ISACenc_obj.payloadLimitBytes60 = ISAC_inst->ISACenc_obj.maxPayloadBytes; - } - } - - return 0; -} - - - -/**************************************************************************** - * WebRtcIsacfix_version(...) - * - * This function returns the version number. - * - * Output: - * - version : Pointer to character string - * - */ - -void WebRtcIsacfix_version(char *version) -{ - strcpy(version, "3.6.0"); -} diff --git a/modules/audio_coding/codecs/isac/fix/source/lattice.c b/modules/audio_coding/codecs/isac/fix/source/lattice.c deleted file mode 100644 index 7bbf4e054a..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/lattice.c +++ /dev/null @@ -1,321 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * lattice.c - * - * Contains the normalized lattice filter routines (MA and AR) for iSAC codec - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "rtc_base/sanitizer.h" - -#define LATTICE_MUL_32_32_RSFT16(a32a, a32b, b32) \ - ((int32_t)(WEBRTC_SPL_MUL(a32a, b32) + (WEBRTC_SPL_MUL_16_32_RSFT16(a32b, b32)))) -/* This macro is FORBIDDEN to use elsewhere than in a function in this file and - its corresponding neon version. It might give unpredictable results, since a - general int32_t*int32_t multiplication results in a 64 bit value. - The result is then shifted just 16 steps to the right, giving need for 48 - bits, i.e. in the generel case, it will NOT fit in a int32_t. In the - cases used in here, the int32_t will be enough, since (for a good - reason) the involved multiplicands aren't big enough to overflow a - int32_t after shifting right 16 bits. I have compared the result of a - multiplication between t32 and tmp32, done in two ways: - 1) Using (int32_t) (((float)(tmp32))*((float)(tmp32b))/65536.0); - 2) Using LATTICE_MUL_32_32_RSFT16(t16a, t16b, tmp32b); - By running 25 files, I haven't found any bigger diff than 64 - this was in the - case when method 1) gave 650235648 and 2) gave 650235712. -*/ - -/* Function prototype: filtering ar_g_Q0[] and ar_f_Q0[] through an AR filter - with coefficients cth_Q15[] and sth_Q15[]. - Implemented for both generic and ARMv7 platforms. - */ -void WebRtcIsacfix_FilterArLoop(int16_t* ar_g_Q0, - int16_t* ar_f_Q0, - int16_t* cth_Q15, - int16_t* sth_Q15, - size_t order_coef); - -/* Inner loop used for function WebRtcIsacfix_NormLatticeFilterMa(). It does: - for 0 <= n < HALF_SUBFRAMELEN - 1: - *ptr2 = input2 * (*ptr2) + input0 * (*ptr0)); - *ptr1 = input1 * (*ptr0) + input0 * (*ptr2); - Note, function WebRtcIsacfix_FilterMaLoopNeon and WebRtcIsacfix_FilterMaLoopC - are not bit-exact. The accuracy by the ARM Neon function is same or better. -*/ -void WebRtcIsacfix_FilterMaLoopC(int16_t input0, // Filter coefficient - int16_t input1, // Filter coefficient - int32_t input2, // Inverse coeff. (1/input1) - int32_t* ptr0, // Sample buffer - int32_t* ptr1, // Sample buffer - int32_t* ptr2) { // Sample buffer - int n = 0; - - // Separate the 32-bit variable input2 into two 16-bit integers (high 16 and - // low 16 bits), for using LATTICE_MUL_32_32_RSFT16 in the loop. - int16_t t16a = (int16_t)(input2 >> 16); - int16_t t16b = (int16_t)input2; - if (t16b < 0) t16a++; - - // The loop filtering the samples *ptr0, *ptr1, *ptr2 with filter coefficients - // input0, input1, and input2. - for(n = 0; n < HALF_SUBFRAMELEN - 1; n++, ptr0++, ptr1++, ptr2++) { - int32_t tmp32a = 0; - int32_t tmp32b = 0; - - // Calculate *ptr2 = input2 * (*ptr2 + input0 * (*ptr0)); - tmp32a = WEBRTC_SPL_MUL_16_32_RSFT15(input0, *ptr0); // Q15 * Q15 >> 15 = Q15 - tmp32b = *ptr2 + tmp32a; // Q15 + Q15 = Q15 - *ptr2 = LATTICE_MUL_32_32_RSFT16(t16a, t16b, tmp32b); - - // Calculate *ptr1 = input1 * (*ptr0) + input0 * (*ptr2); - tmp32a = WEBRTC_SPL_MUL_16_32_RSFT15(input1, *ptr0); // Q15*Q15>>15 = Q15 - tmp32b = WEBRTC_SPL_MUL_16_32_RSFT15(input0, *ptr2); // Q15*Q15>>15 = Q15 - *ptr1 = tmp32a + tmp32b; // Q15 + Q15 = Q15 - } -} - -/* filter the signal using normalized lattice filter */ -/* MA filter */ -void WebRtcIsacfix_NormLatticeFilterMa(size_t orderCoef, - int32_t *stateGQ15, - int16_t *lat_inQ0, - int16_t *filt_coefQ15, - int32_t *gain_lo_hiQ17, - int16_t lo_hi, - int16_t *lat_outQ9) -{ - int16_t sthQ15[MAX_AR_MODEL_ORDER]; - int16_t cthQ15[MAX_AR_MODEL_ORDER]; - - int u, n; - size_t i, k; - int16_t temp2,temp3; - size_t ord_1 = orderCoef+1; - int32_t inv_cthQ16[MAX_AR_MODEL_ORDER]; - - int32_t gain32, fQtmp; - int16_t gain16; - int16_t gain_sh; - - int32_t tmp32, tmp32b; - int32_t fQ15vec[HALF_SUBFRAMELEN]; - int32_t gQ15[MAX_AR_MODEL_ORDER+1][HALF_SUBFRAMELEN]; - int16_t sh; - int16_t t16a; - int16_t t16b; - - for (u=0;u>15 = Q(17+gain_sh) - inv_cthQ16[k] = WebRtcSpl_DivW32W16((int32_t)2147483647, cthQ15[k]); // 1/cth[k] in Q31/Q15 = Q16 - } - gain16 = (int16_t)(gain32 >> 16); // Q(1+gain_sh). - - /* normalized lattice filter */ - /*****************************/ - - /* initial conditions */ - for (i=0;i>15 = Q15 - tmp32b= fQtmp + tmp32; //Q15+Q15=Q15 - tmp32 = inv_cthQ16[i-1]; //Q16 - t16a = (int16_t)(tmp32 >> 16); - t16b = (int16_t)(tmp32 - (t16a << 16)); - if (t16b<0) t16a++; - tmp32 = LATTICE_MUL_32_32_RSFT16(t16a, t16b, tmp32b); - fQtmp = tmp32; // Q15 - - // Calculate g[i][0] = cth[i-1]*stateG[i-1] + sth[i-1]* f[i][0]; - tmp32 = WEBRTC_SPL_MUL_16_32_RSFT15(cthQ15[i-1], stateGQ15[i-1]); //Q15*Q15>>15 = Q15 - tmp32b = WEBRTC_SPL_MUL_16_32_RSFT15(sthQ15[i-1], fQtmp); //Q15*Q15>>15 = Q15 - tmp32 = tmp32 + tmp32b;//Q15+Q15 = Q15 - gQ15[i][0] = tmp32; // Q15 - } - - /* filtering */ - /* save the states */ - for(k=0;k>= gain_sh; // Q(17+gain_sh) -> Q17 - tmp32 = WEBRTC_SPL_MUL_16_32_RSFT16(gain16, fQ15vec[n]); //Q(1+gain_sh)*Q15>>16 = Q(gain_sh) - sh = 9-gain_sh; //number of needed shifts to reach Q9 - t16a = (int16_t) WEBRTC_SPL_SHIFT_W32(tmp32, sh); - lat_outQ9[n + temp1] = t16a; - } - - /* save the states */ - for (i=0;i>15 = Q27 - } - - sh = WebRtcSpl_NormW32(tmp32); // tmp32 is the gain - den16 = (int16_t) WEBRTC_SPL_SHIFT_W32(tmp32, sh-16); //Q(27+sh-16) = Q(sh+11) (all 16 bits are value bits) - inv_gain32 = WebRtcSpl_DivW32W16((int32_t)2147483647, den16); // 1/gain in Q31/Q(sh+11) = Q(20-sh) - - //initial conditions - inv_gain16 = (int16_t)(inv_gain32 >> 2); // 1/gain in Q(20-sh-2) = Q(18-sh) - - for (i=0;iQ26 - tmp32 = WEBRTC_SPL_MUL_16_32_RSFT16(inv_gain16, tmp32); //lat_in[]*inv_gain in (Q(18-sh)*Q26)>>16 = Q(28-sh) - tmp32 = WEBRTC_SPL_SHIFT_W32(tmp32, -(28-sh)); // lat_in[]*inv_gain in Q0 - - ARfQ0vec[i] = (int16_t)WebRtcSpl_SatW32ToW16(tmp32); // Q0 - } - - // Get the state of f & g for the first input, for all orders. - for (i = orderCoef; i > 0; i--) - { - tmp32 = (cthQ15[i - 1] * ARfQ0vec[0] - sthQ15[i - 1] * stateGQ0[i - 1] + - 16384) >> 15; - tmpAR = (int16_t)WebRtcSpl_SatW32ToW16(tmp32); // Q0 - - tmp32 = (sthQ15[i - 1] * ARfQ0vec[0] + cthQ15[i - 1] * stateGQ0[i - 1] + - 16384) >> 15; - ARgQ0vec[i] = (int16_t)WebRtcSpl_SatW32ToW16(tmp32); // Q0 - ARfQ0vec[0] = tmpAR; - } - ARgQ0vec[0] = ARfQ0vec[0]; - - // Filter ARgQ0vec[] and ARfQ0vec[] through coefficients cthQ15[] and sthQ15[]. - WebRtcIsacfix_FilterArLoop(ARgQ0vec, ARfQ0vec, cthQ15, sthQ15, orderCoef); - - for(n=0;n 0; k--) - - ldrh r7, [r3, #-2]! @ sth_Q15[k - 1] - ldrh r6, [r2, #-2]! @ cth_Q15[k - 1] - - ldrh r8, [r0, #-2] @ ar_g_Q0[k - 1] - smlabb r11, r7, r5, r12 @ sth_Q15[k - 1] * tmpAR + 16384 - smlabb r10, r6, r5, r12 @ cth_Q15[k - 1] * tmpAR + 16384 - smulbb r7, r7, r8 @ sth_Q15[k - 1] * ar_g_Q0[k - 1] - smlabb r11, r6, r8, r11 @ cth_Q15[k - 1] * ar_g_Q0[k - 1] + - @ (sth_Q15[k - 1] * tmpAR + 16384) - - sub r10, r10, r7 @ cth_Q15[k - 1] * tmpAR + 16384 - - @ (sth_Q15[k - 1] * ar_g_Q0[k - 1]) - ssat r11, #16, r11, asr #15 - ssat r5, #16, r10, asr #15 - strh r11, [r0], #-2 @ Output: ar_g_Q0[k] - - subs r9, #1 - bgt ORDER_COEF_LOOP - - strh r5, [r0] @ Output: ar_g_Q0[0] = tmpAR; - strh r5, [r1], #2 @ Output: ar_f_Q0[n+1] = tmpAR; - - subs r4, #1 - bne HALF_SUBFRAME_LOOP - - pop {r4-r11} - bx lr diff --git a/modules/audio_coding/codecs/isac/fix/source/lattice_c.c b/modules/audio_coding/codecs/isac/fix/source/lattice_c.c deleted file mode 100644 index 43406612e8..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/lattice_c.c +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * Contains the core loop function for the lattice filter AR routine - * for iSAC codec. - * - */ - -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -/* Filter ar_g_Q0[] and ar_f_Q0[] through an AR filter with coefficients - * cth_Q15[] and sth_Q15[]. - */ -void WebRtcIsacfix_FilterArLoop(int16_t* ar_g_Q0, // Input samples - int16_t* ar_f_Q0, // Input samples - int16_t* cth_Q15, // Filter coefficients - int16_t* sth_Q15, // Filter coefficients - size_t order_coef) { // order of the filter - int n = 0; - - for (n = 0; n < HALF_SUBFRAMELEN - 1; n++) { - size_t k = 0; - int16_t tmpAR = 0; - int32_t tmp32 = 0; - int32_t tmp32_2 = 0; - - tmpAR = ar_f_Q0[n + 1]; - for (k = order_coef; k > 0; k--) { - tmp32 = (cth_Q15[k - 1] * tmpAR - sth_Q15[k - 1] * ar_g_Q0[k - 1] + - 16384) >> 15; - tmp32_2 = (sth_Q15[k - 1] * tmpAR + cth_Q15[k - 1] * ar_g_Q0[k - 1] + - 16384) >> 15; - tmpAR = (int16_t)WebRtcSpl_SatW32ToW16(tmp32); - ar_g_Q0[k] = (int16_t)WebRtcSpl_SatW32ToW16(tmp32_2); - } - ar_f_Q0[n + 1] = tmpAR; - ar_g_Q0[0] = tmpAR; - } -} diff --git a/modules/audio_coding/codecs/isac/fix/source/lattice_mips.c b/modules/audio_coding/codecs/isac/fix/source/lattice_mips.c deleted file mode 100644 index 3189726629..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/lattice_mips.c +++ /dev/null @@ -1,329 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include - -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -// Filter ar_g_Q0[] and ar_f_Q0[] through an AR filter with coefficients -// cth_Q15[] and sth_Q15[]. -void WebRtcIsacfix_FilterArLoop(int16_t* ar_g_Q0, // Input samples - int16_t* ar_f_Q0, // Input samples - int16_t* cth_Q15, // Filter coefficients - int16_t* sth_Q15, // Filter coefficients - size_t order_coef) { // order of the filter - int n = 0; - - for (n = 0; n < HALF_SUBFRAMELEN - 1; n++) { - int count = (int)(order_coef - 1); - int offset; -#if !defined(MIPS_DSP_R1_LE) - int16_t* tmp_cth; - int16_t* tmp_sth; - int16_t* tmp_arg; - int32_t max_q16 = 0x7fff; - int32_t min_q16 = 0xffff8000; -#endif - // Declare variables used as temporary registers. - int32_t r0, r1, r2, t0, t1, t2, t_ar; - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "bltz %[count], 2f \n\t" - " lh %[t_ar], 0(%[tmp]) \n\t" - // Inner loop - "1: \n\t" - "sll %[offset], %[count], 1 \n\t" -#if defined(MIPS_DSP_R1_LE) - "lhx %[r0], %[offset](%[cth_Q15]) \n\t" - "lhx %[r1], %[offset](%[sth_Q15]) \n\t" - "lhx %[r2], %[offset](%[ar_g_Q0]) \n\t" -#else - "addu %[tmp_cth], %[cth_Q15], %[offset] \n\t" - "addu %[tmp_sth], %[sth_Q15], %[offset] \n\t" - "addu %[tmp_arg], %[ar_g_Q0], %[offset] \n\t" - "lh %[r0], 0(%[tmp_cth]) \n\t" - "lh %[r1], 0(%[tmp_sth]) \n\t" - "lh %[r2], 0(%[tmp_arg]) \n\t" -#endif - "mul %[t0], %[r0], %[t_ar] \n\t" - "mul %[t1], %[r1], %[t_ar] \n\t" - "mul %[t2], %[r1], %[r2] \n\t" - "mul %[r0], %[r0], %[r2] \n\t" - "subu %[t0], %[t0], %[t2] \n\t" - "addu %[t1], %[t1], %[r0] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[t1], %[t1], 15 \n\t" - "shra_r.w %[t0], %[t0], 15 \n\t" -#else - "addiu %[t1], %[t1], 0x4000 \n\t" - "sra %[t1], %[t1], 15 \n\t" - "addiu %[t0], %[t0], 0x4000 \n\t" - "sra %[t0], %[t0], 15 \n\t" -#endif - "addiu %[offset], %[offset], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shll_s.w %[t1], %[t1], 16 \n\t" - "shll_s.w %[t_ar], %[t0], 16 \n\t" -#else - "slt %[r0], %[t1], %[max_q16] \n\t" - "slt %[r1], %[t0], %[max_q16] \n\t" - "movz %[t1], %[max_q16], %[r0] \n\t" - "movz %[t0], %[max_q16], %[r1] \n\t" -#endif - "addu %[offset], %[offset], %[ar_g_Q0] \n\t" -#if defined(MIPS_DSP_R1_LE) - "sra %[t1], %[t1], 16 \n\t" - "sra %[t_ar], %[t_ar], 16 \n\t" -#else - "slt %[r0], %[t1], %[min_q16] \n\t" - "slt %[r1], %[t0], %[min_q16] \n\t" - "movn %[t1], %[min_q16], %[r0] \n\t" - "movn %[t0], %[min_q16], %[r1] \n\t" - "addu %[t_ar], $zero, %[t0] \n\t" -#endif - "sh %[t1], 0(%[offset]) \n\t" - "bgtz %[count], 1b \n\t" - " addiu %[count], %[count], -1 \n\t" - "2: \n\t" - "sh %[t_ar], 0(%[tmp]) \n\t" - "sh %[t_ar], 0(%[ar_g_Q0]) \n\t" - ".set pop \n\t" - : [t_ar] "=&r" (t_ar), [count] "+r" (count), [offset] "=&r" (offset), - [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [t0] "=&r" (t0), -#if !defined(MIPS_DSP_R1_LE) - [tmp_cth] "=&r" (tmp_cth), [tmp_sth] "=&r" (tmp_sth), - [tmp_arg] "=&r" (tmp_arg), -#endif - [t1] "=&r" (t1), [t2] "=&r" (t2) - : [tmp] "r" (&ar_f_Q0[n+1]), [cth_Q15] "r" (cth_Q15), -#if !defined(MIPS_DSP_R1_LE) - [max_q16] "r" (max_q16), [min_q16] "r" (min_q16), -#endif - [sth_Q15] "r" (sth_Q15), [ar_g_Q0] "r" (ar_g_Q0) - : "memory", "hi", "lo" - ); - } -} - -// MIPS optimization of the inner loop used for function -// WebRtcIsacfix_NormLatticeFilterMa(). It does: -// -// for 0 <= n < HALF_SUBFRAMELEN - 1: -// *ptr2 = input2 * (*ptr2) + input0 * (*ptr0)); -// *ptr1 = input1 * (*ptr0) + input0 * (*ptr2); -// -// Note, function WebRtcIsacfix_FilterMaLoopMIPS and WebRtcIsacfix_FilterMaLoopC -// are not bit-exact. The accuracy of the MIPS function is same or better. -void WebRtcIsacfix_FilterMaLoopMIPS(int16_t input0, // Filter coefficient - int16_t input1, // Filter coefficient - int32_t input2, // Inverse coeff (1/input1) - int32_t* ptr0, // Sample buffer - int32_t* ptr1, // Sample buffer - int32_t* ptr2) { // Sample buffer -#if defined(MIPS_DSP_R2_LE) - // MIPS DSPR2 version. 4 available accumulators allows loop unrolling 4 times. - // This variant is not bit-exact with WebRtcIsacfix_FilterMaLoopC, since we - // are exploiting 64-bit accumulators. The accuracy of the MIPS DSPR2 function - // is same or better. - int n = (HALF_SUBFRAMELEN - 1) >> 2; - int m = (HALF_SUBFRAMELEN - 1) & 3; - - int r0, r1, r2, r3; - int t0, t1, t2, t3; - int s0, s1, s2, s3; - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "1: \n\t" - "lw %[r0], 0(%[ptr0]) \n\t" - "lw %[r1], 4(%[ptr0]) \n\t" - "lw %[r2], 8(%[ptr0]) \n\t" - "lw %[r3], 12(%[ptr0]) \n\t" - "mult $ac0, %[r0], %[input0] \n\t" - "mult $ac1, %[r1], %[input0] \n\t" - "mult $ac2, %[r2], %[input0] \n\t" - "mult $ac3, %[r3], %[input0] \n\t" - "lw %[t0], 0(%[ptr2]) \n\t" - "extr_rs.w %[s0], $ac0, 15 \n\t" - "extr_rs.w %[s1], $ac1, 15 \n\t" - "extr_rs.w %[s2], $ac2, 15 \n\t" - "extr_rs.w %[s3], $ac3, 15 \n\t" - "lw %[t1], 4(%[ptr2]) \n\t" - "lw %[t2], 8(%[ptr2]) \n\t" - "lw %[t3], 12(%[ptr2]) \n\t" - "addu %[t0], %[t0], %[s0] \n\t" - "addu %[t1], %[t1], %[s1] \n\t" - "addu %[t2], %[t2], %[s2] \n\t" - "addu %[t3], %[t3], %[s3] \n\t" - "mult $ac0, %[t0], %[input2] \n\t" - "mult $ac1, %[t1], %[input2] \n\t" - "mult $ac2, %[t2], %[input2] \n\t" - "mult $ac3, %[t3], %[input2] \n\t" - "addiu %[ptr0], %[ptr0], 16 \n\t" - "extr_rs.w %[t0], $ac0, 16 \n\t" - "extr_rs.w %[t1], $ac1, 16 \n\t" - "extr_rs.w %[t2], $ac2, 16 \n\t" - "extr_rs.w %[t3], $ac3, 16 \n\t" - "addiu %[n], %[n], -1 \n\t" - "mult $ac0, %[r0], %[input1] \n\t" - "mult $ac1, %[r1], %[input1] \n\t" - "mult $ac2, %[r2], %[input1] \n\t" - "mult $ac3, %[r3], %[input1] \n\t" - "sw %[t0], 0(%[ptr2]) \n\t" - "extr_rs.w %[s0], $ac0, 15 \n\t" - "extr_rs.w %[s1], $ac1, 15 \n\t" - "extr_rs.w %[s2], $ac2, 15 \n\t" - "extr_rs.w %[s3], $ac3, 15 \n\t" - "sw %[t1], 4(%[ptr2]) \n\t" - "sw %[t2], 8(%[ptr2]) \n\t" - "sw %[t3], 12(%[ptr2]) \n\t" - "mult $ac0, %[t0], %[input0] \n\t" - "mult $ac1, %[t1], %[input0] \n\t" - "mult $ac2, %[t2], %[input0] \n\t" - "mult $ac3, %[t3], %[input0] \n\t" - "addiu %[ptr2], %[ptr2], 16 \n\t" - "extr_rs.w %[t0], $ac0, 15 \n\t" - "extr_rs.w %[t1], $ac1, 15 \n\t" - "extr_rs.w %[t2], $ac2, 15 \n\t" - "extr_rs.w %[t3], $ac3, 15 \n\t" - "addu %[t0], %[t0], %[s0] \n\t" - "addu %[t1], %[t1], %[s1] \n\t" - "addu %[t2], %[t2], %[s2] \n\t" - "addu %[t3], %[t3], %[s3] \n\t" - "sw %[t0], 0(%[ptr1]) \n\t" - "sw %[t1], 4(%[ptr1]) \n\t" - "sw %[t2], 8(%[ptr1]) \n\t" - "sw %[t3], 12(%[ptr1]) \n\t" - "bgtz %[n], 1b \n\t" - " addiu %[ptr1], %[ptr1], 16 \n\t" - "beq %[m], %0, 3f \n\t" - " nop \n\t" - "2: \n\t" - "lw %[r0], 0(%[ptr0]) \n\t" - "lw %[t0], 0(%[ptr2]) \n\t" - "addiu %[ptr0], %[ptr0], 4 \n\t" - "mult $ac0, %[r0], %[input0] \n\t" - "mult $ac1, %[r0], %[input1] \n\t" - "extr_rs.w %[r1], $ac0, 15 \n\t" - "extr_rs.w %[t1], $ac1, 15 \n\t" - "addu %[t0], %[t0], %[r1] \n\t" - "mult $ac0, %[t0], %[input2] \n\t" - "extr_rs.w %[t0], $ac0, 16 \n\t" - "sw %[t0], 0(%[ptr2]) \n\t" - "mult $ac0, %[t0], %[input0] \n\t" - "addiu %[ptr2], %[ptr2], 4 \n\t" - "addiu %[m], %[m], -1 \n\t" - "extr_rs.w %[t0], $ac0, 15 \n\t" - "addu %[t0], %[t0], %[t1] \n\t" - "sw %[t0], 0(%[ptr1]) \n\t" - "bgtz %[m], 2b \n\t" - " addiu %[ptr1], %[ptr1], 4 \n\t" - "3: \n\t" - ".set pop \n\t" - : [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), - [r3] "=&r" (r3), [t0] "=&r" (t0), [t1] "=&r" (t1), - [t2] "=&r" (t2), [t3] "=&r" (t3), [s0] "=&r" (s0), - [s1] "=&r" (s1), [s2] "=&r" (s2), [s3] "=&r" (s3), - [ptr0] "+r" (ptr0), [ptr1] "+r" (ptr1), [m] "+r" (m), - [ptr2] "+r" (ptr2), [n] "+r" (n) - : [input0] "r" (input0), [input1] "r" (input1), - [input2] "r" (input2) - : "memory", "hi", "lo", "$ac1hi", "$ac1lo", "$ac2hi", - "$ac2lo", "$ac3hi", "$ac3lo" - ); -#else - // Non-DSPR2 version of the function. Avoiding the accumulator usage due to - // large latencies. This variant is bit-exact with C code. - int n = HALF_SUBFRAMELEN - 1; - int32_t t16a, t16b; - int32_t r0, r1, r2, r3, r4; - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "sra %[t16a], %[input2], 16 \n\t" - "andi %[t16b], %[input2], 0xFFFF \n\t" -#if defined(MIPS32R2_LE) - "seh %[t16b], %[t16b] \n\t" - "seh %[input0], %[input0] \n\t" - "seh %[input1], %[input1] \n\t" -#else - "sll %[t16b], %[t16b], 16 \n\t" - "sra %[t16b], %[t16b], 16 \n\t" - "sll %[input0], %[input0], 16 \n\t" - "sra %[input0], %[input0], 16 \n\t" - "sll %[input1], %[input1], 16 \n\t" - "sra %[input1], %[input1], 16 \n\t" -#endif - "addiu %[r0], %[t16a], 1 \n\t" - "slt %[r1], %[t16b], $zero \n\t" - "movn %[t16a], %[r0], %[r1] \n\t" - "1: \n\t" - "lw %[r0], 0(%[ptr0]) \n\t" - "lw %[r1], 0(%[ptr2]) \n\t" - "addiu %[ptr0], %[ptr0], 4 \n\t" - "sra %[r2], %[r0], 16 \n\t" - "andi %[r0], %[r0], 0xFFFF \n\t" - "mul %[r3], %[r2], %[input0] \n\t" - "mul %[r4], %[r0], %[input0] \n\t" - "mul %[r2], %[r2], %[input1] \n\t" - "mul %[r0], %[r0], %[input1] \n\t" - "addiu %[ptr2], %[ptr2], 4 \n\t" - "sll %[r3], %[r3], 1 \n\t" - "sra %[r4], %[r4], 1 \n\t" - "addiu %[r4], %[r4], 0x2000 \n\t" - "sra %[r4], %[r4], 14 \n\t" - "addu %[r3], %[r3], %[r4] \n\t" - "addu %[r1], %[r1], %[r3] \n\t" - "sra %[r3], %[r1], 16 \n\t" - "andi %[r4], %[r1], 0xFFFF \n\t" - "sra %[r4], %[r4], 1 \n\t" - "mul %[r1], %[r1], %[t16a] \n\t" - "mul %[r3], %[r3], %[t16b] \n\t" - "mul %[r4], %[r4], %[t16b] \n\t" - "sll %[r2], %[r2], 1 \n\t" - "sra %[r0], %[r0], 1 \n\t" - "addiu %[r0], %[r0], 0x2000 \n\t" - "sra %[r0], %[r0], 14 \n\t" - "addu %[r0], %[r0], %[r2] \n\t" - "addiu %[n], %[n], -1 \n\t" - "addu %[r1], %[r1], %[r3] \n\t" - "addiu %[r4], %[r4], 0x4000 \n\t" - "sra %[r4], %[r4], 15 \n\t" - "addu %[r1], %[r1], %[r4] \n\t" - "sra %[r2], %[r1], 16 \n\t" - "andi %[r3], %[r1], 0xFFFF \n\t" - "mul %[r3], %[r3], %[input0] \n\t" - "mul %[r2], %[r2], %[input0] \n\t" - "sw %[r1], -4(%[ptr2]) \n\t" - "sra %[r3], %[r3], 1 \n\t" - "addiu %[r3], %[r3], 0x2000 \n\t" - "sra %[r3], %[r3], 14 \n\t" - "addu %[r0], %[r0], %[r3] \n\t" - "sll %[r2], %[r2], 1 \n\t" - "addu %[r0], %[r0], %[r2] \n\t" - "sw %[r0], 0(%[ptr1]) \n\t" - "bgtz %[n], 1b \n\t" - " addiu %[ptr1], %[ptr1], 4 \n\t" - ".set pop \n\t" - : [t16a] "=&r" (t16a), [t16b] "=&r" (t16b), [r0] "=&r" (r0), - [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3), - [r4] "=&r" (r4), [ptr0] "+r" (ptr0), [ptr1] "+r" (ptr1), - [ptr2] "+r" (ptr2), [n] "+r" (n) - : [input0] "r" (input0), [input1] "r" (input1), - [input2] "r" (input2) - : "hi", "lo", "memory" - ); -#endif -} diff --git a/modules/audio_coding/codecs/isac/fix/source/lattice_neon.c b/modules/audio_coding/codecs/isac/fix/source/lattice_neon.c deleted file mode 100644 index 8ea9b63578..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/lattice_neon.c +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -// Contains a function for the core loop in the normalized lattice MA -// filter routine for iSAC codec, optimized for ARM Neon platform. -// It does: -// for 0 <= n < HALF_SUBFRAMELEN - 1: -// *ptr2 = input2 * ((*ptr2) + input0 * (*ptr0)); -// *ptr1 = input1 * (*ptr0) + input0 * (*ptr2); -// Output is not bit-exact with the reference C code, due to the replacement -// of WEBRTC_SPL_MUL_16_32_RSFT15 and LATTICE_MUL_32_32_RSFT16 with Neon -// instructions. The difference should not be bigger than 1. -void WebRtcIsacfix_FilterMaLoopNeon(int16_t input0, // Filter coefficient - int16_t input1, // Filter coefficient - int32_t input2, // Inverse coefficient - int32_t* ptr0, // Sample buffer - int32_t* ptr1, // Sample buffer - int32_t* ptr2) // Sample buffer -{ - int n = 0; - int loop = (HALF_SUBFRAMELEN - 1) >> 3; - int loop_tail = (HALF_SUBFRAMELEN - 1) & 0x7; - - int32x4_t input0_v = vdupq_n_s32((int32_t)input0 << 16); - int32x4_t input1_v = vdupq_n_s32((int32_t)input1 << 16); - int32x4_t input2_v = vdupq_n_s32(input2); - int32x4_t tmp0a, tmp1a, tmp2a, tmp3a; - int32x4_t tmp0b, tmp1b, tmp2b, tmp3b; - int32x4_t ptr0va, ptr1va, ptr2va; - int32x4_t ptr0vb, ptr1vb, ptr2vb; - - int64x2_t tmp2al_low, tmp2al_high, tmp2bl_low, tmp2bl_high; - // Unroll to process 8 samples at once. - for (n = 0; n < loop; n++) { - ptr0va = vld1q_s32(ptr0); - ptr0vb = vld1q_s32(ptr0 + 4); - ptr0 += 8; - - ptr2va = vld1q_s32(ptr2); - ptr2vb = vld1q_s32(ptr2 + 4); - - // Calculate tmp0 = (*ptr0) * input0. - tmp0a = vqrdmulhq_s32(ptr0va, input0_v); - tmp0b = vqrdmulhq_s32(ptr0vb, input0_v); - - // Calculate tmp1 = (*ptr0) * input1. - tmp1a = vqrdmulhq_s32(ptr0va, input1_v); - tmp1b = vqrdmulhq_s32(ptr0vb, input1_v); - - // Calculate tmp2 = tmp0 + *(ptr2). - tmp2a = vaddq_s32(tmp0a, ptr2va); - tmp2b = vaddq_s32(tmp0b, ptr2vb); - - // Calculate *ptr2 = input2 * tmp2. - tmp2al_low = vmull_s32(vget_low_s32(tmp2a), vget_low_s32(input2_v)); -#if defined(WEBRTC_ARCH_ARM64) - tmp2al_high = vmull_high_s32(tmp2a, input2_v); -#else - tmp2al_high = vmull_s32(vget_high_s32(tmp2a), vget_high_s32(input2_v)); -#endif - ptr2va = vcombine_s32(vrshrn_n_s64(tmp2al_low, 16), - vrshrn_n_s64(tmp2al_high, 16)); - - tmp2bl_low = vmull_s32(vget_low_s32(tmp2b), vget_low_s32(input2_v)); -#if defined(WEBRTC_ARCH_ARM64) - tmp2bl_high = vmull_high_s32(tmp2b, input2_v); -#else - tmp2bl_high = vmull_s32(vget_high_s32(tmp2b), vget_high_s32(input2_v)); -#endif - ptr2vb = vcombine_s32(vrshrn_n_s64(tmp2bl_low, 16), - vrshrn_n_s64(tmp2bl_high, 16)); - - vst1q_s32(ptr2, ptr2va); - vst1q_s32(ptr2 + 4, ptr2vb); - ptr2 += 8; - - // Calculate tmp3 = ptr2v * input0. - tmp3a = vqrdmulhq_s32(ptr2va, input0_v); - tmp3b = vqrdmulhq_s32(ptr2vb, input0_v); - - // Calculate *ptr1 = tmp1 + tmp3. - ptr1va = vaddq_s32(tmp1a, tmp3a); - ptr1vb = vaddq_s32(tmp1b, tmp3b); - - vst1q_s32(ptr1, ptr1va); - vst1q_s32(ptr1 + 4, ptr1vb); - ptr1 += 8; - } - - // Process four more samples. - if (loop_tail & 0x4) { - ptr0va = vld1q_s32(ptr0); - ptr2va = vld1q_s32(ptr2); - ptr0 += 4; - - // Calculate tmp0 = (*ptr0) * input0. - tmp0a = vqrdmulhq_s32(ptr0va, input0_v); - - // Calculate tmp1 = (*ptr0) * input1. - tmp1a = vqrdmulhq_s32(ptr0va, input1_v); - - // Calculate tmp2 = tmp0 + *(ptr2). - tmp2a = vaddq_s32(tmp0a, ptr2va); - - // Calculate *ptr2 = input2 * tmp2. - tmp2al_low = vmull_s32(vget_low_s32(tmp2a), vget_low_s32(input2_v)); - -#if defined(WEBRTC_ARCH_ARM64) - tmp2al_high = vmull_high_s32(tmp2a, input2_v); -#else - tmp2al_high = vmull_s32(vget_high_s32(tmp2a), vget_high_s32(input2_v)); -#endif - ptr2va = vcombine_s32(vrshrn_n_s64(tmp2al_low, 16), - vrshrn_n_s64(tmp2al_high, 16)); - - vst1q_s32(ptr2, ptr2va); - ptr2 += 4; - - // Calculate tmp3 = *(ptr2) * input0. - tmp3a = vqrdmulhq_s32(ptr2va, input0_v); - - // Calculate *ptr1 = tmp1 + tmp3. - ptr1va = vaddq_s32(tmp1a, tmp3a); - - vst1q_s32(ptr1, ptr1va); - ptr1 += 4; - } - - // Process two more samples. - if (loop_tail & 0x2) { - int32x2_t ptr0v_tail, ptr2v_tail, ptr1v_tail; - int32x2_t tmp0_tail, tmp1_tail, tmp2_tail, tmp3_tail; - int64x2_t tmp2l_tail; - ptr0v_tail = vld1_s32(ptr0); - ptr2v_tail = vld1_s32(ptr2); - ptr0 += 2; - - // Calculate tmp0 = (*ptr0) * input0. - tmp0_tail = vqrdmulh_s32(ptr0v_tail, vget_low_s32(input0_v)); - - // Calculate tmp1 = (*ptr0) * input1. - tmp1_tail = vqrdmulh_s32(ptr0v_tail, vget_low_s32(input1_v)); - - // Calculate tmp2 = tmp0 + *(ptr2). - tmp2_tail = vadd_s32(tmp0_tail, ptr2v_tail); - - // Calculate *ptr2 = input2 * tmp2. - tmp2l_tail = vmull_s32(tmp2_tail, vget_low_s32(input2_v)); - ptr2v_tail = vrshrn_n_s64(tmp2l_tail, 16); - - vst1_s32(ptr2, ptr2v_tail); - ptr2 += 2; - - // Calculate tmp3 = *(ptr2) * input0. - tmp3_tail = vqrdmulh_s32(ptr2v_tail, vget_low_s32(input0_v)); - - // Calculate *ptr1 = tmp1 + tmp3. - ptr1v_tail = vadd_s32(tmp1_tail, tmp3_tail); - - vst1_s32(ptr1, ptr1v_tail); - ptr1 += 2; - } - - // Process one more sample. - if (loop_tail & 0x1) { - int16_t t16a = (int16_t)(input2 >> 16); - int16_t t16b = (int16_t)input2; - if (t16b < 0) t16a++; - int32_t tmp32a; - int32_t tmp32b; - - // Calculate *ptr2 = input2 * (*ptr2 + input0 * (*ptr0)). - tmp32a = WEBRTC_SPL_MUL_16_32_RSFT15(input0, *ptr0); - tmp32b = *ptr2 + tmp32a; - *ptr2 = (int32_t)(WEBRTC_SPL_MUL(t16a, tmp32b) + - (WEBRTC_SPL_MUL_16_32_RSFT16(t16b, tmp32b))); - - // Calculate *ptr1 = input1 * (*ptr0) + input0 * (*ptr2). - tmp32a = WEBRTC_SPL_MUL_16_32_RSFT15(input1, *ptr0); - tmp32b = WEBRTC_SPL_MUL_16_32_RSFT15(input0, *ptr2); - *ptr1 = tmp32a + tmp32b; - } -} diff --git a/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c b/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c deleted file mode 100644 index f151cd1c88..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c +++ /dev/null @@ -1,949 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * lpc_masking_model.c - * - * LPC analysis and filtering functions - * - */ - -#include "lpc_masking_model.h" - -#include /* For LLONG_MAX and LLONG_MIN. */ - -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -/* The conversion is implemented by the step-down algorithm */ -void WebRtcSpl_AToK_JSK( - int16_t *a16, /* Q11 */ - int16_t useOrder, - int16_t *k16 /* Q15 */ - ) -{ - int m, k; - int32_t tmp32[MAX_AR_MODEL_ORDER]; - int32_t tmp32b; - int32_t tmp_inv_denum32; - int16_t tmp_inv_denum16; - - k16[useOrder-1] = a16[useOrder] << 4; // Q11<<4 => Q15 - - for (m=useOrder-1; m>0; m--) { - // (1 - k^2) in Q30 - tmp_inv_denum32 = 1073741823 - k16[m] * k16[m]; - tmp_inv_denum16 = (int16_t)(tmp_inv_denum32 >> 15); // (1 - k^2) in Q15. - - for (k=1; k<=m; k++) { - tmp32b = (a16[k] << 16) - ((k16[m] * a16[m - k + 1]) << 1); - - tmp32[k] = WebRtcSpl_DivW32W16(tmp32b, tmp_inv_denum16); //Q27/Q15 = Q12 - } - - for (k=1; k> 1); // Q12>>1 => Q11 - } - - tmp32[m] = WEBRTC_SPL_SAT(4092, tmp32[m], -4092); - k16[m - 1] = (int16_t)(tmp32[m] << 3); // Q12<<3 => Q15 - } - - return; -} - - - - - -int16_t WebRtcSpl_LevinsonW32_JSK( - int32_t *R, /* (i) Autocorrelation of length >= order+1 */ - int16_t *A, /* (o) A[0..order] LPC coefficients (Q11) */ - int16_t *K, /* (o) K[0...order-1] Reflection coefficients (Q15) */ - int16_t order /* (i) filter order */ - ) { - int16_t i, j; - int16_t R_hi[LEVINSON_MAX_ORDER+1], R_low[LEVINSON_MAX_ORDER+1]; - /* Aurocorr coefficients in high precision */ - int16_t A_hi[LEVINSON_MAX_ORDER+1], A_low[LEVINSON_MAX_ORDER+1]; - /* LPC coefficients in high precicion */ - int16_t A_upd_hi[LEVINSON_MAX_ORDER+1], A_upd_low[LEVINSON_MAX_ORDER+1]; - /* LPC coefficients for next iteration */ - int16_t K_hi, K_low; /* reflection coefficient in high precision */ - int16_t Alpha_hi, Alpha_low, Alpha_exp; /* Prediction gain Alpha in high precision - and with scale factor */ - int16_t tmp_hi, tmp_low; - int32_t temp1W32, temp2W32, temp3W32; - int16_t norm; - - /* Normalize the autocorrelation R[0]...R[order+1] */ - - norm = WebRtcSpl_NormW32(R[0]); - - for (i=order;i>=0;i--) { - temp1W32 = R[i] << norm; - /* Put R in hi and low format */ - R_hi[i] = (int16_t)(temp1W32 >> 16); - R_low[i] = (int16_t)((temp1W32 - ((int32_t)R_hi[i] << 16)) >> 1); - } - - /* K = A[1] = -R[1] / R[0] */ - - temp2W32 = (R_hi[1] << 16) + (R_low[1] << 1); /* R[1] in Q31 */ - temp3W32 = WEBRTC_SPL_ABS_W32(temp2W32); /* abs R[1] */ - temp1W32 = WebRtcSpl_DivW32HiLow(temp3W32, R_hi[0], R_low[0]); /* abs(R[1])/R[0] in Q31 */ - /* Put back the sign on R[1] */ - if (temp2W32 > 0) { - temp1W32 = -temp1W32; - } - - /* Put K in hi and low format */ - K_hi = (int16_t)(temp1W32 >> 16); - K_low = (int16_t)((temp1W32 - ((int32_t)K_hi << 16)) >> 1); - - /* Store first reflection coefficient */ - K[0] = K_hi; - - temp1W32 >>= 4; /* A[1] in Q27. */ - - /* Put A[1] in hi and low format */ - A_hi[1] = (int16_t)(temp1W32 >> 16); - A_low[1] = (int16_t)((temp1W32 - ((int32_t)A_hi[1] << 16)) >> 1); - - /* Alpha = R[0] * (1-K^2) */ - - temp1W32 = (((K_hi * K_low) >> 14) + K_hi * K_hi) << 1; /* = k^2 in Q31 */ - - temp1W32 = WEBRTC_SPL_ABS_W32(temp1W32); /* Guard against <0 */ - temp1W32 = (int32_t)0x7fffffffL - temp1W32; /* temp1W32 = (1 - K[0]*K[0]) in Q31 */ - - /* Store temp1W32 = 1 - K[0]*K[0] on hi and low format */ - tmp_hi = (int16_t)(temp1W32 >> 16); - tmp_low = (int16_t)((temp1W32 - ((int32_t)tmp_hi << 16)) >> 1); - - /* Calculate Alpha in Q31 */ - temp1W32 = (R_hi[0] * tmp_hi + ((R_hi[0] * tmp_low) >> 15) + - ((R_low[0] * tmp_hi) >> 15)) << 1; - - /* Normalize Alpha and put it in hi and low format */ - - Alpha_exp = WebRtcSpl_NormW32(temp1W32); - temp1W32 <<= Alpha_exp; - Alpha_hi = (int16_t)(temp1W32 >> 16); - Alpha_low = (int16_t)((temp1W32 - ((int32_t)Alpha_hi<< 16)) >> 1); - - /* Perform the iterative calculations in the - Levinson Durbin algorithm */ - - for (i=2; i<=order; i++) - { - - /* ---- - \ - temp1W32 = R[i] + > R[j]*A[i-j] - / - ---- - j=1..i-1 - */ - - temp1W32 = 0; - - for(j=1; j> 15) + - ((R_low[j] * A_hi[i - j]) >> 15)) << 1); - } - - temp1W32 <<= 4; - temp1W32 += (R_hi[i] << 16) + (R_low[i] << 1); - - /* K = -temp1W32 / Alpha */ - temp2W32 = WEBRTC_SPL_ABS_W32(temp1W32); /* abs(temp1W32) */ - temp3W32 = WebRtcSpl_DivW32HiLow(temp2W32, Alpha_hi, Alpha_low); /* abs(temp1W32)/Alpha */ - - /* Put the sign of temp1W32 back again */ - if (temp1W32 > 0) { - temp3W32 = -temp3W32; - } - - /* Use the Alpha shifts from earlier to denormalize */ - norm = WebRtcSpl_NormW32(temp3W32); - if ((Alpha_exp <= norm)||(temp3W32==0)) { - temp3W32 <<= Alpha_exp; - } else { - if (temp3W32 > 0) - { - temp3W32 = (int32_t)0x7fffffffL; - } else - { - temp3W32 = (int32_t)0x80000000L; - } - } - - /* Put K on hi and low format */ - K_hi = (int16_t)(temp3W32 >> 16); - K_low = (int16_t)((temp3W32 - ((int32_t)K_hi << 16)) >> 1); - - /* Store Reflection coefficient in Q15 */ - K[i-1] = K_hi; - - /* Test for unstable filter. If unstable return 0 and let the - user decide what to do in that case - */ - - if ((int32_t)WEBRTC_SPL_ABS_W16(K_hi) > (int32_t)32740) { - return(-i); /* Unstable filter */ - } - - /* - Compute updated LPC coefficient: Anew[i] - Anew[j]= A[j] + K*A[i-j] for j=1..i-1 - Anew[i]= K - */ - - for(j=1; j> 15) + - ((K_low * A_hi[i - j]) >> 15)) << 1; // temp1W32 += K*A[i-j] in Q27. - - /* Put Anew in hi and low format */ - A_upd_hi[j] = (int16_t)(temp1W32 >> 16); - A_upd_low[j] = (int16_t)((temp1W32 - ((int32_t)A_upd_hi[j] << 16)) >> 1); - } - - temp3W32 >>= 4; /* temp3W32 = K in Q27 (Convert from Q31 to Q27) */ - - /* Store Anew in hi and low format */ - A_upd_hi[i] = (int16_t)(temp3W32 >> 16); - A_upd_low[i] = (int16_t)((temp3W32 - ((int32_t)A_upd_hi[i] << 16)) >> 1); - - /* Alpha = Alpha * (1-K^2) */ - - temp1W32 = (((K_hi * K_low) >> 14) + K_hi * K_hi) << 1; /* K*K in Q31 */ - - temp1W32 = WEBRTC_SPL_ABS_W32(temp1W32); /* Guard against <0 */ - temp1W32 = (int32_t)0x7fffffffL - temp1W32; /* 1 - K*K in Q31 */ - - /* Convert 1- K^2 in hi and low format */ - tmp_hi = (int16_t)(temp1W32 >> 16); - tmp_low = (int16_t)((temp1W32 - ((int32_t)tmp_hi << 16)) >> 1); - - /* Calculate Alpha = Alpha * (1-K^2) in Q31 */ - temp1W32 = (Alpha_hi * tmp_hi + ((Alpha_hi * tmp_low) >> 15) + - ((Alpha_low * tmp_hi) >> 15)) << 1; - - /* Normalize Alpha and store it on hi and low format */ - - norm = WebRtcSpl_NormW32(temp1W32); - temp1W32 <<= norm; - - Alpha_hi = (int16_t)(temp1W32 >> 16); - Alpha_low = (int16_t)((temp1W32 - ((int32_t)Alpha_hi << 16)) >> 1); - - /* Update the total nomalization of Alpha */ - Alpha_exp = Alpha_exp + norm; - - /* Update A[] */ - - for(j=1; j<=i; j++) - { - A_hi[j] =A_upd_hi[j]; - A_low[j] =A_upd_low[j]; - } - } - - /* - Set A[0] to 1.0 and store the A[i] i=1...order in Q12 - (Convert from Q27 and use rounding) - */ - - A[0] = 2048; - - for(i=1; i<=order; i++) { - /* temp1W32 in Q27 */ - temp1W32 = (A_hi[i] << 16) + (A_low[i] << 1); - /* Round and store upper word */ - A[i] = (int16_t)((temp1W32 + 32768) >> 16); - } - return(1); /* Stable filters */ -} - - - - - -/* window */ -/* Matlab generation of floating point code: - * t = (1:256)/257; r = 1-(1-t).^.45; w = sin(r*pi).^3; w = w/sum(w); plot((1:256)/8, w); grid; - * for k=1:16, fprintf(1, '%.8f, ', w(k*16 + (-15:0))); fprintf(1, '\n'); end - * All values are multiplyed with 2^21 in fixed point code. - */ -static const int16_t kWindowAutocorr[WINLEN] = { - 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 5, 6, - 8, 10, 12, 14, 17, 20, 24, 28, 33, 38, 43, 49, - 56, 63, 71, 79, 88, 98, 108, 119, 131, 143, 157, 171, - 186, 202, 219, 237, 256, 275, 296, 318, 341, 365, 390, 416, - 444, 472, 502, 533, 566, 600, 635, 671, 709, 748, 789, 831, - 875, 920, 967, 1015, 1065, 1116, 1170, 1224, 1281, 1339, 1399, 1461, - 1525, 1590, 1657, 1726, 1797, 1870, 1945, 2021, 2100, 2181, 2263, 2348, - 2434, 2523, 2614, 2706, 2801, 2898, 2997, 3099, 3202, 3307, 3415, 3525, - 3637, 3751, 3867, 3986, 4106, 4229, 4354, 4481, 4611, 4742, 4876, 5012, - 5150, 5291, 5433, 5578, 5725, 5874, 6025, 6178, 6333, 6490, 6650, 6811, - 6974, 7140, 7307, 7476, 7647, 7820, 7995, 8171, 8349, 8529, 8711, 8894, - 9079, 9265, 9453, 9642, 9833, 10024, 10217, 10412, 10607, 10803, 11000, 11199, - 11398, 11597, 11797, 11998, 12200, 12401, 12603, 12805, 13008, 13210, 13412, 13614, - 13815, 14016, 14216, 14416, 14615, 14813, 15009, 15205, 15399, 15591, 15782, 15971, - 16157, 16342, 16524, 16704, 16881, 17056, 17227, 17395, 17559, 17720, 17877, 18030, - 18179, 18323, 18462, 18597, 18727, 18851, 18970, 19082, 19189, 19290, 19384, 19471, - 19551, 19623, 19689, 19746, 19795, 19835, 19867, 19890, 19904, 19908, 19902, 19886, - 19860, 19823, 19775, 19715, 19644, 19561, 19465, 19357, 19237, 19102, 18955, 18793, - 18618, 18428, 18223, 18004, 17769, 17518, 17252, 16970, 16672, 16357, 16025, 15677, - 15311, 14929, 14529, 14111, 13677, 13225, 12755, 12268, 11764, 11243, 10706, 10152, - 9583, 8998, 8399, 7787, 7162, 6527, 5883, 5231, 4576, 3919, 3265, 2620, - 1990, 1386, 825, 333 -}; - - -/* By using a hearing threshold level in dB of -28 dB (higher value gives more noise), - the H_T_H (in float) can be calculated as: - H_T_H = pow(10.0, 0.05 * (-28.0)) = 0.039810717055350 - In Q19, H_T_H becomes round(0.039810717055350*2^19) ~= 20872, i.e. - H_T_H = 20872/524288.0, and H_T_HQ19 = 20872; -*/ - - -/* The bandwidth expansion vectors are created from: - kPolyVecLo=[0.900000,0.810000,0.729000,0.656100,0.590490,0.531441,0.478297,0.430467,0.387420,0.348678,0.313811,0.282430]; - kPolyVecHi=[0.800000,0.640000,0.512000,0.409600,0.327680,0.262144]; - round(kPolyVecLo*32768) - round(kPolyVecHi*32768) -*/ -static const int16_t kPolyVecLo[12] = { - 29491, 26542, 23888, 21499, 19349, 17414, 15673, 14106, 12695, 11425, 10283, 9255 -}; -static const int16_t kPolyVecHi[6] = { - 26214, 20972, 16777, 13422, 10737, 8590 -}; - -static __inline int32_t log2_Q8_LPC( uint32_t x ) { - - int32_t zeros; - int16_t frac; - - zeros=WebRtcSpl_NormU32(x); - frac = (int16_t)(((x << zeros) & 0x7FFFFFFF) >> 23); - - /* log2(x) */ - return ((31 - zeros) << 8) + frac; -} - -static const int16_t kMulPitchGain = -25; /* 200/256 in Q5 */ -static const int16_t kChngFactor = 3523; /* log10(2)*10/4*0.4/1.4=log10(2)/1.4= 0.2150 in Q14 */ -static const int16_t kExp2 = 11819; /* 1/log(2) */ -const int kShiftLowerBand = 11; /* Shift value for lower band in Q domain. */ -const int kShiftHigherBand = 12; /* Shift value for higher band in Q domain. */ - -void WebRtcIsacfix_GetVars(const int16_t *input, const int16_t *pitchGains_Q12, - uint32_t *oldEnergy, int16_t *varscale) -{ - int k; - uint32_t nrgQ[4]; - int16_t nrgQlog[4]; - int16_t tmp16, chng1, chng2, chng3, chng4, tmp, chngQ, oldNrgQlog, pgQ, pg3; - int32_t expPg32; - int16_t expPg, divVal; - int16_t tmp16_1, tmp16_2; - - /* Calculate energies of first and second frame halfs */ - nrgQ[0]=0; - for (k = QLOOKAHEAD/2; k < (FRAMESAMPLES/4 + QLOOKAHEAD) / 2; k++) { - nrgQ[0] += (uint32_t)(input[k] * input[k]); - } - nrgQ[1]=0; - for ( ; k < (FRAMESAMPLES/2 + QLOOKAHEAD) / 2; k++) { - nrgQ[1] += (uint32_t)(input[k] * input[k]); - } - nrgQ[2]=0; - for ( ; k < (FRAMESAMPLES * 3 / 4 + QLOOKAHEAD) / 2; k++) { - nrgQ[2] += (uint32_t)(input[k] * input[k]); - } - nrgQ[3]=0; - for ( ; k < (FRAMESAMPLES + QLOOKAHEAD) / 2; k++) { - nrgQ[3] += (uint32_t)(input[k] * input[k]); - } - - for ( k=0; k<4; k++) { - nrgQlog[k] = (int16_t)log2_Q8_LPC(nrgQ[k]); /* log2(nrgQ) */ - } - oldNrgQlog = (int16_t)log2_Q8_LPC(*oldEnergy); - - /* Calculate average level change */ - chng1 = WEBRTC_SPL_ABS_W16(nrgQlog[3]-nrgQlog[2]); - chng2 = WEBRTC_SPL_ABS_W16(nrgQlog[2]-nrgQlog[1]); - chng3 = WEBRTC_SPL_ABS_W16(nrgQlog[1]-nrgQlog[0]); - chng4 = WEBRTC_SPL_ABS_W16(nrgQlog[0]-oldNrgQlog); - tmp = chng1+chng2+chng3+chng4; - chngQ = (int16_t)(tmp * kChngFactor >> 10); /* Q12 */ - chngQ += 2926; /* + 1.0/1.4 in Q12 */ - - /* Find average pitch gain */ - pgQ = 0; - for (k=0; k<4; k++) - { - pgQ += pitchGains_Q12[k]; - } - - pg3 = (int16_t)(pgQ * pgQ >> 11); // pgQ in Q(12+2)=Q14. Q14*Q14>>11 => Q17 - pg3 = (int16_t)(pgQ * pg3 >> 13); /* Q14*Q17>>13 =>Q18 */ - /* kMulPitchGain = -25 = -200 in Q-3. */ - pg3 = (int16_t)(pg3 * kMulPitchGain >> 5); // Q10 - tmp16=(int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(kExp2,pg3,13);/* Q13*Q10>>13 => Q10*/ - if (tmp16<0) { - tmp16_2 = (0x0400 | (tmp16 & 0x03FF)); - tmp16_1 = ((uint16_t)(tmp16 ^ 0xFFFF) >> 10) - 3; /* Gives result in Q14 */ - if (tmp16_1<0) - expPg = -(tmp16_2 << -tmp16_1); - else - expPg = -(tmp16_2 >> tmp16_1); - } else - expPg = (int16_t) -16384; /* 1 in Q14, since 2^0=1 */ - - expPg32 = (int32_t)expPg << 8; /* Q22 */ - divVal = WebRtcSpl_DivW32W16ResW16(expPg32, chngQ); /* Q22/Q12=Q10 */ - - tmp16=(int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(kExp2,divVal,13);/* Q13*Q10>>13 => Q10*/ - if (tmp16<0) { - tmp16_2 = (0x0400 | (tmp16 & 0x03FF)); - tmp16_1 = ((uint16_t)(tmp16 ^ 0xFFFF) >> 10) - 3; /* Gives result in Q14 */ - if (tmp16_1<0) - expPg = tmp16_2 << -tmp16_1; - else - expPg = tmp16_2 >> tmp16_1; - } else - expPg = (int16_t) 16384; /* 1 in Q14, since 2^0=1 */ - - *varscale = expPg-1; - *oldEnergy = nrgQ[3]; -} - - - -static __inline int16_t exp2_Q10_T(int16_t x) { // Both in and out in Q10 - - int16_t tmp16_1, tmp16_2; - - tmp16_2=(int16_t)(0x0400|(x&0x03FF)); - tmp16_1 = -(x >> 10); - if(tmp16_1>0) - return tmp16_2 >> tmp16_1; - else - return tmp16_2 << -tmp16_1; - -} - - -// Declare function pointers. -AutocorrFix WebRtcIsacfix_AutocorrFix; -CalculateResidualEnergy WebRtcIsacfix_CalculateResidualEnergy; - -/* This routine calculates the residual energy for LPC. - * Formula as shown in comments inside. - */ -int32_t WebRtcIsacfix_CalculateResidualEnergyC(int lpc_order, - int32_t q_val_corr, - int q_val_polynomial, - int16_t* a_polynomial, - int32_t* corr_coeffs, - int* q_val_residual_energy) { - int i = 0, j = 0; - int shift_internal = 0, shift_norm = 0; - int32_t tmp32 = 0, word32_high = 0, word32_low = 0, residual_energy = 0; - int64_t sum64 = 0, sum64_tmp = 0; - - for (i = 0; i <= lpc_order; i++) { - for (j = i; j <= lpc_order; j++) { - /* For the case of i == 0: residual_energy += - * a_polynomial[j] * corr_coeffs[i] * a_polynomial[j - i]; - * For the case of i != 0: residual_energy += - * a_polynomial[j] * corr_coeffs[i] * a_polynomial[j - i] * 2; - */ - - tmp32 = a_polynomial[j] * a_polynomial[j - i]; - /* tmp32 in Q(q_val_polynomial * 2). */ - if (i != 0) { - tmp32 <<= 1; - } - sum64_tmp = (int64_t)tmp32 * (int64_t)corr_coeffs[i]; - sum64_tmp >>= shift_internal; - - /* Test overflow and sum the result. */ - if(((sum64_tmp > 0 && sum64 > 0) && (LLONG_MAX - sum64 < sum64_tmp)) || - ((sum64_tmp < 0 && sum64 < 0) && (LLONG_MIN - sum64 > sum64_tmp))) { - /* Shift right for overflow. */ - shift_internal += 1; - sum64 >>= 1; - sum64 += sum64_tmp >> 1; - } else { - sum64 += sum64_tmp; - } - } - } - - word32_high = (int32_t)(sum64 >> 32); - word32_low = (int32_t)sum64; - - // Calculate the value of shifting (shift_norm) for the 64-bit sum. - if(word32_high != 0) { - shift_norm = 32 - WebRtcSpl_NormW32(word32_high); - residual_energy = (int32_t)(sum64 >> shift_norm); - } else { - if((word32_low & 0x80000000) != 0) { - shift_norm = 1; - residual_energy = (uint32_t)word32_low >> 1; - } else { - shift_norm = WebRtcSpl_NormW32(word32_low); - residual_energy = word32_low << shift_norm; - shift_norm = -shift_norm; - } - } - - /* Q(q_val_polynomial * 2) * Q(q_val_corr) >> shift_internal >> shift_norm - * = Q(q_val_corr - shift_internal - shift_norm + q_val_polynomial * 2) - */ - *q_val_residual_energy = q_val_corr - shift_internal - shift_norm - + q_val_polynomial * 2; - - return residual_energy; -} - -void WebRtcIsacfix_GetLpcCoef(int16_t *inLoQ0, - int16_t *inHiQ0, - MaskFiltstr_enc *maskdata, - int16_t snrQ10, - const int16_t *pitchGains_Q12, - int32_t *gain_lo_hiQ17, - int16_t *lo_coeffQ15, - int16_t *hi_coeffQ15) -{ - int k, n, ii; - int pos1, pos2; - int sh_lo, sh_hi, sh, ssh, shMem; - int16_t varscaleQ14; - - int16_t tmpQQlo, tmpQQhi; - int32_t tmp32; - int16_t tmp16,tmp16b; - - int16_t polyHI[ORDERHI+1]; - int16_t rcQ15_lo[ORDERLO], rcQ15_hi[ORDERHI]; - - - int16_t DataLoQ6[WINLEN], DataHiQ6[WINLEN]; - int32_t corrloQQ[ORDERLO+2]; - int32_t corrhiQQ[ORDERHI+1]; - int32_t corrlo2QQ[ORDERLO+1]; - int16_t scale; - int16_t QdomLO, QdomHI, newQdomHI, newQdomLO; - - int32_t res_nrgQQ; - int32_t sqrt_nrg; - - /* less-noise-at-low-frequencies factor */ - int16_t aaQ14; - - /* Multiplication with 1/sqrt(12) ~= 0.28901734104046 can be done by convertion to - Q15, i.e. round(0.28901734104046*32768) = 9471, and use 9471/32768.0 ~= 0.289032 - */ - int16_t snrq; - int shft; - - int16_t tmp16a; - int32_t tmp32a, tmp32b, tmp32c; - - int16_t a_LOQ11[ORDERLO+1]; - int16_t k_vecloQ15[ORDERLO]; - int16_t a_HIQ12[ORDERHI+1]; - int16_t k_vechiQ15[ORDERHI]; - - int16_t stab; - - snrq=snrQ10; - - /* SNR= C * 2 ^ (D * snrq) ; C=0.289, D=0.05*log2(10)=0.166 (~=172 in Q10)*/ - tmp16 = (int16_t)(snrq * 172 >> 10); // Q10 - tmp16b = exp2_Q10_T(tmp16); // Q10 - snrq = (int16_t)(tmp16b * 285 >> 10); // Q10 - - /* change quallevel depending on pitch gains and level fluctuations */ - WebRtcIsacfix_GetVars(inLoQ0, pitchGains_Q12, &(maskdata->OldEnergy), &varscaleQ14); - - /* less-noise-at-low-frequencies factor */ - /* Calculation of 0.35 * (0.5 + 0.5 * varscale) in fixpoint: - With 0.35 in Q16 (0.35 ~= 22938/65536.0 = 0.3500061) and varscaleQ14 in Q14, - we get Q16*Q14>>16 = Q14 - */ - aaQ14 = (int16_t)((22938 * (8192 + (varscaleQ14 >> 1)) + 32768) >> 16); - - /* Calculate tmp = (1.0 + aa*aa); in Q12 */ - tmp16 = (int16_t)(aaQ14 * aaQ14 >> 15); // Q14*Q14>>15 = Q13 - tmpQQlo = 4096 + (tmp16 >> 1); // Q12 + Q13>>1 = Q12. - - /* Calculate tmp = (1.0+aa) * (1.0+aa); */ - tmp16 = 8192 + (aaQ14 >> 1); // 1+a in Q13. - tmpQQhi = (int16_t)(tmp16 * tmp16 >> 14); // Q13*Q13>>14 = Q12 - - /* replace data in buffer by new look-ahead data */ - for (pos1 = 0; pos1 < QLOOKAHEAD; pos1++) { - maskdata->DataBufferLoQ0[pos1 + WINLEN - QLOOKAHEAD] = inLoQ0[pos1]; - } - - for (k = 0; k < SUBFRAMES; k++) { - - /* Update input buffer and multiply signal with window */ - for (pos1 = 0; pos1 < WINLEN - UPDATE/2; pos1++) { - maskdata->DataBufferLoQ0[pos1] = maskdata->DataBufferLoQ0[pos1 + UPDATE/2]; - maskdata->DataBufferHiQ0[pos1] = maskdata->DataBufferHiQ0[pos1 + UPDATE/2]; - DataLoQ6[pos1] = (int16_t)(maskdata->DataBufferLoQ0[pos1] * - kWindowAutocorr[pos1] >> 15); // Q0*Q21>>15 = Q6 - DataHiQ6[pos1] = (int16_t)(maskdata->DataBufferHiQ0[pos1] * - kWindowAutocorr[pos1] >> 15); // Q0*Q21>>15 = Q6 - } - pos2 = (int16_t)(k * UPDATE / 2); - for (n = 0; n < UPDATE/2; n++, pos1++) { - maskdata->DataBufferLoQ0[pos1] = inLoQ0[QLOOKAHEAD + pos2]; - maskdata->DataBufferHiQ0[pos1] = inHiQ0[pos2++]; - DataLoQ6[pos1] = (int16_t)(maskdata->DataBufferLoQ0[pos1] * - kWindowAutocorr[pos1] >> 15); // Q0*Q21>>15 = Q6 - DataHiQ6[pos1] = (int16_t)(maskdata->DataBufferHiQ0[pos1] * - kWindowAutocorr[pos1] >> 15); // Q0*Q21>>15 = Q6 - } - - /* Get correlation coefficients */ - /* The highest absolute value measured inside DataLo in the test set - For DataHi, corresponding value was 160. - - This means that it should be possible to represent the input values - to WebRtcSpl_AutoCorrelation() as Q6 values (since 307*2^6 = - 19648). Of course, Q0 will also work, but due to the low energy in - DataLo and DataHi, the outputted autocorrelation will be more accurate - and mimic the floating point code better, by being in an high as possible - Q-domain. - */ - - WebRtcIsacfix_AutocorrFix(corrloQQ,DataLoQ6,WINLEN, ORDERLO+1, &scale); - QdomLO = 12-scale; // QdomLO is the Q-domain of corrloQQ - sh_lo = WebRtcSpl_NormW32(corrloQQ[0]); - QdomLO += sh_lo; - for (ii=0; ii> 1) - - (WEBRTC_SPL_MUL_16_32_RSFT16(aaQ14, corrloQQ[1]) >> 2); - - /* Calculate corrlo2[n] = tmpQQlo * corrlo[n] - tmpQQlo * (corrlo[n-1] + corrlo[n+1]);*/ - for (n = 1; n <= ORDERLO; n++) { - - tmp32 = (corrloQQ[n - 1] >> 1) + (corrloQQ[n + 1] >> 1); // Q(QdomLO-1). - corrlo2QQ[n] = (WEBRTC_SPL_MUL_16_32_RSFT16(tmpQQlo, corrloQQ[n]) >> 1) - - (WEBRTC_SPL_MUL_16_32_RSFT16(aaQ14, tmp32) >> 2); - } - QdomLO -= 5; - - /* Calculate corrhi[n] = tmpQQhi * corrhi[n]; */ - for (n = 0; n <= ORDERHI; n++) { - corrhiQQ[n] = WEBRTC_SPL_MUL_16_32_RSFT16(tmpQQhi, corrhiQQ[n]); // Q(12+QdomHI-16) = Q(QdomHI-4) - } - QdomHI -= 4; - - /* add white noise floor */ - /* corrlo2QQ is in Q(QdomLO) and corrhiQQ is in Q(QdomHI) */ - /* Calculate corrlo2[0] += 9.5367431640625e-7; and - corrhi[0] += 9.5367431640625e-7, where the constant is 1/2^20 */ - - tmp32 = WEBRTC_SPL_SHIFT_W32((int32_t) 1, QdomLO-20); - corrlo2QQ[0] += tmp32; - tmp32 = WEBRTC_SPL_SHIFT_W32((int32_t) 1, QdomHI-20); - corrhiQQ[0] += tmp32; - - /* corrlo2QQ is in Q(QdomLO) and corrhiQQ is in Q(QdomHI) before the following - code segment, where we want to make sure we get a 1-bit margin */ - for (n = 0; n <= ORDERLO; n++) { - corrlo2QQ[n] >>= 1; // Make sure we have a 1-bit margin. - } - QdomLO -= 1; // Now, corrlo2QQ is in Q(QdomLO), with a 1-bit margin - - for (n = 0; n <= ORDERHI; n++) { - corrhiQQ[n] >>= 1; // Make sure we have a 1-bit margin. - } - QdomHI -= 1; // Now, corrhiQQ is in Q(QdomHI), with a 1-bit margin - - - newQdomLO = QdomLO; - - for (n = 0; n <= ORDERLO; n++) { - int32_t tmp, tmpB, tmpCorr; - int16_t alpha=328; //0.01 in Q15 - int16_t beta=324; //(1-0.01)*0.01=0.0099 in Q15 - int16_t gamma=32440; //(1-0.01)=0.99 in Q15 - - if (maskdata->CorrBufLoQQ[n] != 0) { - shMem=WebRtcSpl_NormW32(maskdata->CorrBufLoQQ[n]); - sh = QdomLO - maskdata->CorrBufLoQdom[n]; - if (sh<=shMem) { - tmp = WEBRTC_SPL_SHIFT_W32(maskdata->CorrBufLoQQ[n], sh); // Get CorrBufLoQQ to same domain as corrlo2 - tmp = WEBRTC_SPL_MUL_16_32_RSFT15(alpha, tmp); - } else if ((sh-shMem)<7){ - tmp = WEBRTC_SPL_SHIFT_W32(maskdata->CorrBufLoQQ[n], shMem); // Shift up CorrBufLoQQ as much as possible - // Shift `alpha` the number of times required to get `tmp` in QdomLO. - tmp = WEBRTC_SPL_MUL_16_32_RSFT15(alpha << (sh - shMem), tmp); - } else { - tmp = WEBRTC_SPL_SHIFT_W32(maskdata->CorrBufLoQQ[n], shMem); // Shift up CorrBufHiQQ as much as possible - // Shift `alpha` as much as possible without overflow the number of - // times required to get `tmp` in QdomLO. - tmp = WEBRTC_SPL_MUL_16_32_RSFT15(alpha << 6, tmp); - tmpCorr = corrloQQ[n] >> (sh - shMem - 6); - tmp = tmp + tmpCorr; - maskdata->CorrBufLoQQ[n] = tmp; - newQdomLO = QdomLO-(sh-shMem-6); - maskdata->CorrBufLoQdom[n] = newQdomLO; - } - } else - tmp = 0; - - tmp = tmp + corrlo2QQ[n]; - - maskdata->CorrBufLoQQ[n] = tmp; - maskdata->CorrBufLoQdom[n] = QdomLO; - - tmp=WEBRTC_SPL_MUL_16_32_RSFT15(beta, tmp); - tmpB=WEBRTC_SPL_MUL_16_32_RSFT15(gamma, corrlo2QQ[n]); - corrlo2QQ[n] = tmp + tmpB; - } - if( newQdomLO!=QdomLO) { - for (n = 0; n <= ORDERLO; n++) { - if (maskdata->CorrBufLoQdom[n] != newQdomLO) - corrloQQ[n] >>= maskdata->CorrBufLoQdom[n] - newQdomLO; - } - QdomLO = newQdomLO; - } - - - newQdomHI = QdomHI; - - for (n = 0; n <= ORDERHI; n++) { - int32_t tmp, tmpB, tmpCorr; - int16_t alpha=328; //0.01 in Q15 - int16_t beta=324; //(1-0.01)*0.01=0.0099 in Q15 - int16_t gamma=32440; //(1-0.01)=0.99 in Q1 - if (maskdata->CorrBufHiQQ[n] != 0) { - shMem=WebRtcSpl_NormW32(maskdata->CorrBufHiQQ[n]); - sh = QdomHI - maskdata->CorrBufHiQdom[n]; - if (sh<=shMem) { - tmp = WEBRTC_SPL_SHIFT_W32(maskdata->CorrBufHiQQ[n], sh); // Get CorrBufHiQQ to same domain as corrhi - tmp = WEBRTC_SPL_MUL_16_32_RSFT15(alpha, tmp); - tmpCorr = corrhiQQ[n]; - tmp = tmp + tmpCorr; - maskdata->CorrBufHiQQ[n] = tmp; - maskdata->CorrBufHiQdom[n] = QdomHI; - } else if ((sh-shMem)<7) { - tmp = WEBRTC_SPL_SHIFT_W32(maskdata->CorrBufHiQQ[n], shMem); // Shift up CorrBufHiQQ as much as possible - // Shift `alpha` the number of times required to get `tmp` in QdomHI. - tmp = WEBRTC_SPL_MUL_16_32_RSFT15(alpha << (sh - shMem), tmp); - tmpCorr = corrhiQQ[n]; - tmp = tmp + tmpCorr; - maskdata->CorrBufHiQQ[n] = tmp; - maskdata->CorrBufHiQdom[n] = QdomHI; - } else { - tmp = WEBRTC_SPL_SHIFT_W32(maskdata->CorrBufHiQQ[n], shMem); // Shift up CorrBufHiQQ as much as possible - // Shift `alpha` as much as possible without overflow the number of - // times required to get `tmp` in QdomHI. - tmp = WEBRTC_SPL_MUL_16_32_RSFT15(alpha << 6, tmp); - tmpCorr = corrhiQQ[n] >> (sh - shMem - 6); - tmp = tmp + tmpCorr; - maskdata->CorrBufHiQQ[n] = tmp; - newQdomHI = QdomHI-(sh-shMem-6); - maskdata->CorrBufHiQdom[n] = newQdomHI; - } - } else { - tmp = corrhiQQ[n]; - tmpCorr = tmp; - maskdata->CorrBufHiQQ[n] = tmp; - maskdata->CorrBufHiQdom[n] = QdomHI; - } - - tmp=WEBRTC_SPL_MUL_16_32_RSFT15(beta, tmp); - tmpB=WEBRTC_SPL_MUL_16_32_RSFT15(gamma, tmpCorr); - corrhiQQ[n] = tmp + tmpB; - } - - if( newQdomHI!=QdomHI) { - for (n = 0; n <= ORDERHI; n++) { - if (maskdata->CorrBufHiQdom[n] != newQdomHI) - corrhiQQ[n] >>= maskdata->CorrBufHiQdom[n] - newQdomHI; - } - QdomHI = newQdomHI; - } - - stab=WebRtcSpl_LevinsonW32_JSK(corrlo2QQ, a_LOQ11, k_vecloQ15, ORDERLO); - - if (stab<0) { // If unstable use lower order - a_LOQ11[0]=2048; - for (n = 1; n <= ORDERLO; n++) { - a_LOQ11[n]=0; - } - - stab=WebRtcSpl_LevinsonW32_JSK(corrlo2QQ, a_LOQ11, k_vecloQ15, 8); - } - - - WebRtcSpl_LevinsonDurbin(corrhiQQ, a_HIQ12, k_vechiQ15, ORDERHI); - - /* bandwidth expansion */ - for (n = 1; n <= ORDERLO; n++) { - a_LOQ11[n] = (int16_t)((kPolyVecLo[n - 1] * a_LOQ11[n] + (1 << 14)) >> - 15); - } - - - polyHI[0] = a_HIQ12[0]; - for (n = 1; n <= ORDERHI; n++) { - a_HIQ12[n] = (int16_t)(((int32_t)(kPolyVecHi[n - 1] * a_HIQ12[n]) + - (1 << 14)) >> 15); - polyHI[n] = a_HIQ12[n]; - } - - /* Normalize the corrlo2 vector */ - sh = WebRtcSpl_NormW32(corrlo2QQ[0]); - for (n = 0; n <= ORDERLO; n++) { - corrlo2QQ[n] <<= sh; - } - QdomLO += sh; /* Now, corrlo2QQ is still in Q(QdomLO) */ - - - /* residual energy */ - - sh_lo = 31; - res_nrgQQ = WebRtcIsacfix_CalculateResidualEnergy(ORDERLO, QdomLO, - kShiftLowerBand, a_LOQ11, corrlo2QQ, &sh_lo); - - /* Convert to reflection coefficients */ - WebRtcSpl_AToK_JSK(a_LOQ11, ORDERLO, rcQ15_lo); - - if (sh_lo & 0x0001) { - res_nrgQQ >>= 1; - sh_lo-=1; - } - - - if( res_nrgQQ > 0 ) - { - sqrt_nrg=WebRtcSpl_Sqrt(res_nrgQQ); - - /* add hearing threshold and compute the gain */ - /* lo_coeff = varscale * S_N_R / (sqrt_nrg + varscale * H_T_H); */ - - tmp32a = varscaleQ14 >> 1; // H_T_HQ19=65536 (16-17=-1) - ssh = sh_lo >> 1; // sqrt_nrg is in Qssh. - sh = ssh - 14; - tmp32b = WEBRTC_SPL_SHIFT_W32(tmp32a, sh); // Q14->Qssh - tmp32c = sqrt_nrg + tmp32b; // Qssh (denominator) - tmp32a = varscaleQ14 * snrq; // Q24 (numerator) - - sh = WebRtcSpl_NormW32(tmp32c); - shft = 16 - sh; - tmp16a = (int16_t) WEBRTC_SPL_SHIFT_W32(tmp32c, -shft); // Q(ssh-shft) (denominator) - - tmp32b = WebRtcSpl_DivW32W16(tmp32a, tmp16a); // Q(24-ssh+shft) - sh = ssh-shft-7; - *gain_lo_hiQ17 = WEBRTC_SPL_SHIFT_W32(tmp32b, sh); // Gains in Q17 - } - else - { - *gain_lo_hiQ17 = 100; // Gains in Q17 - } - gain_lo_hiQ17++; - - /* copy coefficients to output array */ - for (n = 0; n < ORDERLO; n++) { - *lo_coeffQ15 = (int16_t) (rcQ15_lo[n]); - lo_coeffQ15++; - } - /* residual energy */ - sh_hi = 31; - res_nrgQQ = WebRtcIsacfix_CalculateResidualEnergy(ORDERHI, QdomHI, - kShiftHigherBand, a_HIQ12, corrhiQQ, &sh_hi); - - /* Convert to reflection coefficients */ - WebRtcSpl_LpcToReflCoef(polyHI, ORDERHI, rcQ15_hi); - - if (sh_hi & 0x0001) { - res_nrgQQ >>= 1; - sh_hi-=1; - } - - - if( res_nrgQQ > 0 ) - { - sqrt_nrg=WebRtcSpl_Sqrt(res_nrgQQ); - - - /* add hearing threshold and compute the gain */ - /* hi_coeff = varscale * S_N_R / (sqrt_nrg + varscale * H_T_H); */ - - tmp32a = varscaleQ14 >> 1; // H_T_HQ19=65536 (16-17=-1) - - ssh = sh_hi >> 1; // `sqrt_nrg` is in Qssh. - sh = ssh - 14; - tmp32b = WEBRTC_SPL_SHIFT_W32(tmp32a, sh); // Q14->Qssh - tmp32c = sqrt_nrg + tmp32b; // Qssh (denominator) - tmp32a = varscaleQ14 * snrq; // Q24 (numerator) - - sh = WebRtcSpl_NormW32(tmp32c); - shft = 16 - sh; - tmp16a = (int16_t) WEBRTC_SPL_SHIFT_W32(tmp32c, -shft); // Q(ssh-shft) (denominator) - - tmp32b = WebRtcSpl_DivW32W16(tmp32a, tmp16a); // Q(24-ssh+shft) - sh = ssh-shft-7; - *gain_lo_hiQ17 = WEBRTC_SPL_SHIFT_W32(tmp32b, sh); // Gains in Q17 - } - else - { - *gain_lo_hiQ17 = 100; // Gains in Q17 - } - gain_lo_hiQ17++; - - - /* copy coefficients to output array */ - for (n = 0; n < ORDERHI; n++) { - *hi_coeffQ15 = rcQ15_hi[n]; - hi_coeffQ15++; - } - } -} diff --git a/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h b/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h deleted file mode 100644 index 40a99e8a77..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * lpc_masking_model.h - * - * LPC functions - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_LPC_MASKING_MODEL_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_LPC_MASKING_MODEL_H_ - -#ifdef __cplusplus -extern "C" { -#endif - -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - -void WebRtcIsacfix_GetVars(const int16_t* input, - const int16_t* pitchGains_Q12, - uint32_t* oldEnergy, - int16_t* varscale); - -void WebRtcIsacfix_GetLpcCoef(int16_t* inLoQ0, - int16_t* inHiQ0, - MaskFiltstr_enc* maskdata, - int16_t snrQ10, - const int16_t* pitchGains_Q12, - int32_t* gain_lo_hiQ17, - int16_t* lo_coeffQ15, - int16_t* hi_coeffQ15); - -typedef int32_t (*CalculateResidualEnergy)(int lpc_order, - int32_t q_val_corr, - int q_val_polynomial, - int16_t* a_polynomial, - int32_t* corr_coeffs, - int* q_val_residual_energy); -extern CalculateResidualEnergy WebRtcIsacfix_CalculateResidualEnergy; - -int32_t WebRtcIsacfix_CalculateResidualEnergyC(int lpc_order, - int32_t q_val_corr, - int q_val_polynomial, - int16_t* a_polynomial, - int32_t* corr_coeffs, - int* q_val_residual_energy); - -#if defined(MIPS_DSP_R2_LE) -int32_t WebRtcIsacfix_CalculateResidualEnergyMIPS(int lpc_order, - int32_t q_val_corr, - int q_val_polynomial, - int16_t* a_polynomial, - int32_t* corr_coeffs, - int* q_val_residual_energy); -#endif - -#ifdef __cplusplus -} /* extern "C" */ -#endif - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_LPC_MASKING_MODEL_H_ */ diff --git a/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model_mips.c b/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model_mips.c deleted file mode 100644 index 727008da32..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model_mips.c +++ /dev/null @@ -1,237 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h" - -// MIPS DSPR2 optimization for function WebRtcIsacfix_CalculateResidualEnergy -// Bit-exact with WebRtcIsacfix_CalculateResidualEnergyC from file -// lpc_masking_model.c -int32_t WebRtcIsacfix_CalculateResidualEnergyMIPS(int lpc_order, - int32_t q_val_corr, - int q_val_polynomial, - int16_t* a_polynomial, - int32_t* corr_coeffs, - int* q_val_residual_energy) { - - int i = 0, j = 0; - int shift_internal = 0, shift_norm = 0; - int32_t tmp32 = 0, word32_high = 0, word32_low = 0, residual_energy = 0; - int32_t tmp_corr_c = corr_coeffs[0]; - int16_t* tmp_a_poly = &a_polynomial[0]; - int32_t sum64_hi = 0; - int32_t sum64_lo = 0; - - for (j = 0; j <= lpc_order; j++) { - // For the case of i == 0: - // residual_energy += - // a_polynomial[j] * corr_coeffs[i] * a_polynomial[j - i]; - - int32_t tmp2, tmp3; - int16_t sign_1; - int16_t sign_2; - int16_t sign_3; - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "lh %[tmp2], 0(%[tmp_a_poly]) \n\t" - "mul %[tmp32], %[tmp2], %[tmp2] \n\t" - "addiu %[tmp_a_poly], %[tmp_a_poly], 2 \n\t" - "sra %[sign_2], %[sum64_hi], 31 \n\t" - "mult $ac0, %[tmp32], %[tmp_corr_c] \n\t" - "shilov $ac0, %[shift_internal] \n\t" - "mfhi %[tmp2], $ac0 \n\t" - "mflo %[tmp3], $ac0 \n\t" - "sra %[sign_1], %[tmp2], 31 \n\t" - "xor %[sign_3], %[sign_1], %[sign_2] \n\t" - ".set pop \n\t" - : [tmp2] "=&r" (tmp2), [tmp3] "=&r" (tmp3), [tmp32] "=&r" (tmp32), - [tmp_a_poly] "+r" (tmp_a_poly), [sign_1] "=&r" (sign_1), - [sign_3] "=&r" (sign_3), [sign_2] "=&r" (sign_2), - [sum64_hi] "+r" (sum64_hi), [sum64_lo] "+r" (sum64_lo) - : [tmp_corr_c] "r" (tmp_corr_c), [shift_internal] "r" (shift_internal) - : "hi", "lo", "memory" - ); - - if (sign_3 != 0) { - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addsc %[sum64_lo], %[sum64_lo], %[tmp3] \n\t" - "addwc %[sum64_hi], %[sum64_hi], %[tmp2] \n\t" - ".set pop \n\t" - : [sum64_hi] "+r" (sum64_hi), [sum64_lo] "+r" (sum64_lo) - : [tmp2] "r" (tmp2), [tmp3] "r" (tmp3) - : "hi", "lo", "memory" - ); - } else { - if (((!(sign_1 || sign_2)) && (0x7FFFFFFF - sum64_hi < tmp2)) || - ((sign_1 && sign_2) && (sum64_hi + tmp2 > 0))) { - // Shift right for overflow. - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addiu %[shift_internal], %[shift_internal], 1 \n\t" - "prepend %[sum64_lo], %[sum64_hi], 1 \n\t" - "sra %[sum64_hi], %[sum64_hi], 1 \n\t" - "prepend %[tmp3], %[tmp2], 1 \n\t" - "sra %[tmp2], %[tmp2], 1 \n\t" - "addsc %[sum64_lo], %[sum64_lo], %[tmp3] \n\t" - "addwc %[sum64_hi], %[sum64_hi], %[tmp2] \n\t" - ".set pop \n\t" - : [tmp2] "+r" (tmp2), [tmp3] "+r" (tmp3), - [shift_internal] "+r" (shift_internal), - [sum64_hi] "+r" (sum64_hi), [sum64_lo] "+r" (sum64_lo) - : - : "hi", "lo", "memory" - ); - } else { - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addsc %[sum64_lo], %[sum64_lo], %[tmp3] \n\t" - "addwc %[sum64_hi], %[sum64_hi], %[tmp2] \n\t" - ".set pop \n\t" - : [sum64_hi] "+r" (sum64_hi), [sum64_lo] "+r" (sum64_lo) - : [tmp2] "r" (tmp2), [tmp3] "r" (tmp3) - : "hi", "lo", "memory" - ); - } - } - } - - for (i = 1; i <= lpc_order; i++) { - tmp_corr_c = corr_coeffs[i]; - int16_t* tmp_a_poly_j = &a_polynomial[i]; - int16_t* tmp_a_poly_j_i = &a_polynomial[0]; - for (j = i; j <= lpc_order; j++) { - // For the case of i = 1 .. lpc_order: - // residual_energy += - // a_polynomial[j] * corr_coeffs[i] * a_polynomial[j - i] * 2; - - int32_t tmp2, tmp3; - int16_t sign_1; - int16_t sign_2; - int16_t sign_3; - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "lh %[tmp3], 0(%[tmp_a_poly_j]) \n\t" - "lh %[tmp2], 0(%[tmp_a_poly_j_i]) \n\t" - "addiu %[tmp_a_poly_j], %[tmp_a_poly_j], 2 \n\t" - "addiu %[tmp_a_poly_j_i], %[tmp_a_poly_j_i], 2 \n\t" - "mul %[tmp32], %[tmp3], %[tmp2] \n\t" - "sll %[tmp32], %[tmp32], 1 \n\t" - "mult $ac0, %[tmp32], %[tmp_corr_c] \n\t" - "shilov $ac0, %[shift_internal] \n\t" - "mfhi %[tmp2], $ac0 \n\t" - "mflo %[tmp3], $ac0 \n\t" - "sra %[sign_1], %[tmp2], 31 \n\t" - "sra %[sign_2], %[sum64_hi], 31 \n\t" - "xor %[sign_3], %[sign_1], %[sign_2] \n\t" - ".set pop \n\t" - : [tmp2] "=&r" (tmp2), [tmp3] "=&r" (tmp3), [tmp32] "=&r" (tmp32), - [tmp_a_poly_j] "+r" (tmp_a_poly_j), [sign_1] "=&r" (sign_1), - [tmp_a_poly_j_i] "+r" (tmp_a_poly_j_i), [sign_2] "=&r" (sign_2), - [sign_3] "=&r" (sign_3), [sum64_hi] "+r" (sum64_hi), - [sum64_lo] "+r" (sum64_lo) - : [tmp_corr_c] "r" (tmp_corr_c), [shift_internal] "r" (shift_internal) - : "hi", "lo", "memory" - ); - if (sign_3 != 0) { - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addsc %[sum64_lo], %[sum64_lo], %[tmp3] \n\t" - "addwc %[sum64_hi], %[sum64_hi], %[tmp2] \n\t" - ".set pop \n\t" - : [tmp2] "+r" (tmp2), [tmp3] "+r" (tmp3), [sum64_hi] "+r" (sum64_hi), - [sum64_lo] "+r" (sum64_lo) - : - :"memory" - ); - } else { - // Test overflow and sum the result. - if (((!(sign_1 || sign_2)) && (0x7FFFFFFF - sum64_hi < tmp2)) || - ((sign_1 && sign_2) && (sum64_hi + tmp2 > 0))) { - // Shift right for overflow. - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addiu %[shift_internal], %[shift_internal], 1 \n\t" - "prepend %[sum64_lo], %[sum64_hi], 1 \n\t" - "sra %[sum64_hi], %[sum64_hi], 1 \n\t" - "prepend %[tmp3], %[tmp2], 1 \n\t" - "sra %[tmp2], %[tmp2], 1 \n\t" - "addsc %[sum64_lo], %[sum64_lo], %[tmp3] \n\t" - "addwc %[sum64_hi], %[sum64_hi], %[tmp2] \n\t" - ".set pop \n\t" - : [tmp2] "+r" (tmp2), [tmp3] "+r" (tmp3), - [shift_internal] "+r" (shift_internal), - [sum64_hi] "+r" (sum64_hi), [sum64_lo] "+r" (sum64_lo) - : - : "hi", "lo", "memory" - ); - } else { - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addsc %[sum64_lo], %[sum64_lo], %[tmp3] \n\t" - "addwc %[sum64_hi], %[sum64_hi], %[tmp2] \n\t" - ".set pop \n\t" - : [tmp2] "+r" (tmp2), [tmp3] "+r" (tmp3), - [sum64_hi] "+r" (sum64_hi), [sum64_lo] "+r" (sum64_lo) - : - : "hi", "lo", "memory" - ); - } - } - } - } - word32_high = sum64_hi; - word32_low = sum64_lo; - - // Calculate the value of shifting (shift_norm) for the 64-bit sum. - if (word32_high != 0) { - shift_norm = 32 - WebRtcSpl_NormW32(word32_high); - int tmp1; - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "srl %[residual_energy], %[sum64_lo], %[shift_norm] \n\t" - "li %[tmp1], 32 \n\t" - "subu %[tmp1], %[tmp1], %[shift_norm] \n\t" - "sll %[tmp1], %[sum64_hi], %[tmp1] \n\t" - "or %[residual_energy], %[residual_energy], %[tmp1] \n\t" - ".set pop \n\t" - : [residual_energy] "=&r" (residual_energy), [tmp1]"=&r"(tmp1), - [sum64_hi] "+r" (sum64_hi), [sum64_lo] "+r" (sum64_lo) - : [shift_norm] "r" (shift_norm) - : "memory" - ); - } else { - if ((word32_low & 0x80000000) != 0) { - shift_norm = 1; - residual_energy = (uint32_t)word32_low >> 1; - } else { - shift_norm = WebRtcSpl_NormW32(word32_low); - residual_energy = word32_low << shift_norm; - shift_norm = -shift_norm; - } - } - - // Q(q_val_polynomial * 2) * Q(q_val_corr) >> shift_internal >> shift_norm - // = Q(q_val_corr - shift_internal - shift_norm + q_val_polynomial * 2) - *q_val_residual_energy = - q_val_corr - shift_internal - shift_norm + q_val_polynomial * 2; - - return residual_energy; -} diff --git a/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model_unittest.cc b/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model_unittest.cc deleted file mode 100644 index 82793f1344..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model_unittest.cc +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h" - -#include "system_wrappers/include/cpu_features_wrapper.h" -#include "test/gtest.h" - -class LpcMaskingModelTest : public ::testing::Test { - protected: - // Pass a function pointer to the Tester function. - void CalculateResidualEnergyTester( - CalculateResidualEnergy CalculateResidualEnergyFunction) { - const int kIntOrder = 10; - const int32_t kInt32QDomain = 5; - const int kIntShift = 11; - int16_t a[kIntOrder + 1] = {32760, 122, 7, 0, -32760, -3958, - -48, 18745, 498, 9, 23456}; - int32_t corr[kIntOrder + 1] = {11443647, -27495, 0, 98745, -11443600, 1, - 1, 498, 9, 888, 23456}; - int q_shift_residual = 0; - int32_t residual_energy = 0; - - // Test the code path where (residual_energy >= 0x10000). - residual_energy = CalculateResidualEnergyFunction( - kIntOrder, kInt32QDomain, kIntShift, a, corr, &q_shift_residual); - EXPECT_EQ(1789023310, residual_energy); - EXPECT_EQ(2, q_shift_residual); - - // Test the code path where (residual_energy < 0x10000) - // and ((energy & 0x8000) != 0). - for (int i = 0; i < kIntOrder + 1; i++) { - a[i] = 24575 >> i; - corr[i] = i; - } - residual_energy = CalculateResidualEnergyFunction( - kIntOrder, kInt32QDomain, kIntShift, a, corr, &q_shift_residual); - EXPECT_EQ(1595279092, residual_energy); - EXPECT_EQ(26, q_shift_residual); - - // Test the code path where (residual_energy <= 0x7fff). - for (int i = 0; i < kIntOrder + 1; i++) { - a[i] = 2457 >> i; - } - residual_energy = CalculateResidualEnergyFunction( - kIntOrder, kInt32QDomain, kIntShift, a, corr, &q_shift_residual); - EXPECT_EQ(2029266944, residual_energy); - EXPECT_EQ(33, q_shift_residual); - } -}; - -TEST_F(LpcMaskingModelTest, CalculateResidualEnergyTest) { - CalculateResidualEnergyTester(WebRtcIsacfix_CalculateResidualEnergyC); -} diff --git a/modules/audio_coding/codecs/isac/fix/source/lpc_tables.c b/modules/audio_coding/codecs/isac/fix/source/lpc_tables.c deleted file mode 100644 index d495d29235..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/lpc_tables.c +++ /dev/null @@ -1,1281 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * lpc_tables.c - * - * Coding tables for the KLT coefficients - * - */ - - -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "modules/audio_coding/codecs/isac/fix/source/lpc_tables.h" - -/* indices of KLT coefficients used */ -const uint16_t WebRtcIsacfix_kSelIndGain[12] = { - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, - 10, 11}; - -const uint16_t WebRtcIsacfix_kSelIndShape[108] = { - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, - 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, - 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, - 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, - 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, - 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, - 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, - 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, - 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, - 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, - 100, 101, 102, 103, 104, 105, 106, 107 -}; - -/* cdf array for model indicator */ -const uint16_t WebRtcIsacfix_kModelCdf[4] = { - 0, 15434, 37548, 65535 -}; - -/* pointer to cdf array for model indicator */ -const uint16_t *WebRtcIsacfix_kModelCdfPtr[1] = { - WebRtcIsacfix_kModelCdf -}; - -/* initial cdf index for decoder of model indicator */ -const uint16_t WebRtcIsacfix_kModelInitIndex[1] = { - 1 -}; - -/* offset to go from rounded value to quantization index */ -const int16_t WebRtcIsacfix_kQuantMinGain[12] ={ - 3, 6, 4, 6, 6, 9, 5, 16, 11, 34, 32, 47 -}; - -const int16_t WebRtcIsacfix_kQuantMinShape[108] = { - 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, - 1, 1, 1, 1, 2, 2, 2, 3, 0, 0, - 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, - 1, 1, 1, 2, 2, 3, 0, 0, 0, 0, - 1, 0, 1, 1, 1, 1, 1, 1, 1, 2, - 2, 4, 3, 5, 0, 0, 0, 0, 1, 1, - 1, 1, 1, 1, 2, 1, 2, 2, 3, 4, - 4, 7, 0, 0, 1, 1, 1, 1, 1, 1, - 1, 2, 3, 2, 3, 4, 4, 5, 7, 13, - 0, 1, 1, 2, 3, 2, 2, 2, 4, 4, - 5, 6, 7, 11, 9, 13, 12, 26 -}; - -/* maximum quantization index */ -const uint16_t WebRtcIsacfix_kMaxIndGain[12] = { - 6, 12, 8, 14, 10, 19, 12, 31, 22, 56, 52, 138 -}; - -const uint16_t WebRtcIsacfix_kMaxIndShape[108] = { - 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, - 2, 2, 2, 2, 4, 4, 5, 6, 0, 0, - 0, 0, 1, 0, 0, 0, 0, 1, 2, 2, - 2, 2, 3, 4, 5, 7, 0, 0, 0, 0, - 2, 0, 2, 2, 2, 2, 3, 2, 2, 4, - 4, 6, 6, 9, 0, 0, 0, 0, 2, 2, - 2, 2, 2, 2, 3, 2, 4, 4, 7, 7, - 9, 13, 0, 0, 2, 2, 2, 2, 2, 2, - 3, 4, 5, 4, 6, 8, 8, 10, 16, 25, - 0, 2, 2, 4, 5, 4, 4, 4, 7, 8, - 9, 10, 13, 19, 17, 23, 25, 49 -}; - -/* index offset */ -const uint16_t WebRtcIsacfix_kOffsetGain[3][12] = { - { 0, 7, 20, 29, 44, 55, 75, 88, 120, 143, 200, 253}, - { 0, 7, 19, 27, 42, 53, 73, 86, 117, 140, 197, 249}, - { 0, 7, 20, 28, 44, 55, 75, 89, 121, 145, 202, 257} -}; - -const uint16_t WebRtcIsacfix_kOffsetShape[3][108] = { - { - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, - 11, 14, 17, 20, 23, 28, 33, 39, 46, 47, - 48, 49, 50, 52, 53, 54, 55, 56, 58, 61, - 64, 67, 70, 74, 79, 85, 93, 94, 95, 96, - 97, 100, 101, 104, 107, 110, 113, 117, 120, 123, - 128, 133, 140, 147, 157, 158, 159, 160, 161, 164, - 167, 170, 173, 176, 179, 183, 186, 191, 196, 204, - 212, 222, 236, 237, 238, 241, 244, 247, 250, 253, - 256, 260, 265, 271, 276, 283, 292, 301, 312, 329, - 355, 356, 359, 362, 367, 373, 378, 383, 388, 396, - 405, 415, 426, 440, 460, 478, 502, 528 - }, - { - 0, 1, 2, 3, 4, 6, 7, 8, 9, 11, - 13, 16, 19, 22, 26, 29, 34, 39, 45, 46, - 47, 48, 49, 50, 51, 52, 53, 55, 57, 60, - 63, 66, 70, 73, 78, 84, 91, 92, 93, 94, - 95, 96, 97, 99, 102, 105, 108, 111, 114, 118, - 123, 128, 134, 141, 151, 152, 153, 154, 156, 159, - 162, 165, 168, 171, 174, 177, 181, 186, 194, 200, - 208, 218, 233, 234, 235, 236, 239, 242, 245, 248, - 251, 254, 258, 263, 270, 277, 288, 297, 308, 324, - 349, 351, 354, 357, 361, 366, 372, 378, 383, 390, - 398, 407, 420, 431, 450, 472, 496, 524 - }, - { - 0, 1, 2, 3, 4, 5, 6, 7, 8, 11, - 14, 17, 20, 23, 26, 29, 34, 40, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 58, 61, 64, - 67, 70, 73, 77, 82, 88, 96, 97, 98, 99, - 101, 102, 104, 107, 110, 113, 116, 119, 122, 125, - 129, 134, 141, 150, 160, 161, 162, 163, 166, 168, - 171, 174, 177, 180, 183, 186, 190, 195, 201, 208, - 216, 226, 243, 244, 245, 248, 251, 254, 257, 260, - 263, 268, 273, 278, 284, 291, 299, 310, 323, 340, - 366, 368, 371, 374, 379, 383, 389, 394, 399, 406, - 414, 422, 433, 445, 461, 480, 505, 533 - } -}; - -/* initial cdf index for KLT coefficients */ -const uint16_t WebRtcIsacfix_kInitIndexGain[3][12] = { - { 3, 6, 4, 7, 5, 10, 6, 16, 11, 28, 26, 69}, - { 3, 6, 4, 7, 5, 10, 6, 15, 11, 28, 26, 69}, - { 3, 6, 4, 8, 5, 10, 7, 16, 12, 28, 27, 70} -}; - -const uint16_t WebRtcIsacfix_kInitIndexShape[3][108] = { - { - 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, - 1, 1, 1, 1, 2, 2, 3, 3, 0, 0, - 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, - 1, 1, 2, 2, 3, 4, 0, 0, 0, 0, - 1, 0, 1, 1, 1, 1, 2, 1, 1, 2, - 2, 3, 3, 5, 0, 0, 0, 0, 1, 1, - 1, 1, 1, 1, 2, 1, 2, 2, 4, 4, - 5, 7, 0, 0, 1, 1, 1, 1, 1, 1, - 2, 2, 3, 2, 3, 4, 4, 5, 8, 13, - 0, 1, 1, 2, 3, 2, 2, 2, 4, 4, - 5, 5, 7, 10, 9, 12, 13, 25 - }, - { - 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, - 1, 1, 1, 2, 1, 2, 2, 3, 0, 0, - 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, - 1, 2, 1, 2, 3, 3, 0, 0, 0, 0, - 0, 0, 1, 1, 1, 1, 1, 1, 2, 2, - 2, 3, 3, 5, 0, 0, 0, 1, 1, 1, - 1, 1, 1, 1, 1, 2, 2, 4, 3, 4, - 5, 7, 0, 0, 0, 1, 1, 1, 1, 1, - 1, 2, 2, 3, 3, 5, 4, 5, 8, 12, - 1, 1, 1, 2, 2, 3, 3, 2, 3, 4, - 4, 6, 5, 9, 11, 12, 14, 25 - }, - { - 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, - 1, 1, 1, 1, 1, 2, 3, 3, 0, 0, - 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, - 1, 1, 2, 2, 3, 4, 0, 0, 0, 1, - 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, - 2, 3, 4, 5, 0, 0, 0, 1, 1, 1, - 1, 1, 1, 1, 1, 2, 2, 3, 3, 4, - 5, 8, 0, 0, 1, 1, 1, 1, 1, 1, - 2, 2, 2, 3, 3, 4, 5, 6, 8, 13, - 1, 1, 1, 2, 2, 3, 2, 2, 3, 4, - 4, 5, 6, 8, 9, 12, 14, 25 - } -}; - -/* offsets for quantizer representation levels*/ -const uint16_t WebRtcIsacfix_kOfLevelsGain[3] = { - 0, 392, 779 -}; - -const uint16_t WebRtcIsacfix_kOfLevelsShape[3] = { - 0, 578, 1152 -}; - -/* quantizer representation levels */ - - - -const int32_t WebRtcIsacfix_kLevelsGainQ17[1176] = { - -364547,-231664,-102123,-573,104807,238257,368823,-758583,-640135,-510291 - ,-377662,-252785,-113177,2627,112906,248601,389461,522691,644517,763974 - ,-538963,-368179,-245823,-106095,-890,104299,241111,350730,493190,-800763 - ,-646230,-510239,-382115,-248623,-111829,-2983,113852,251105,388114,519757 - ,644048,774712,896334,1057931,-770009,-635310,-503690,-375087,-248106,-108525 - ,-105,108259,243472,377948,519271,-1160885,-1032044,-914636,-777593,-647891 - ,-518408,-388028,-254321,-115293,-598,117849,251296,385367,515510,652727 - ,777432,920363,1038625,1153637,1316836,-632287,-505430,-379430,-248458,-118013 - ,-888,118762,250266,381650,513327,652169,766471,932113,-2107480,-1971030 - ,-1827020,-1698263,-1558670,-1436314,-1305377,-1172252,-1047355,-914202,-779651,-651001 - ,-520999,-390394,-255761,-123490,-1893,126839,256703,385493,518607,651760 - ,782750,908693,1044131,1163865,1311066,1424177,1582628,1709823,1831740,1955391 - ,-1423044,-1288917,-1181281,-1043222,-911770,-780354,-646799,-522664,-386721,-258266 - ,-128060,-1101,128233,259996,390336,519590,649290,778701,908010,1040796 - ,1161235,1306889,1441882,-4446002,-4301031,-4194304,-4080591,-3947740,-3808975,-3686530 - ,-3567839,-3383251,-3287089,-3136577,-3017405,-2869860,-2751321,-2619984,-2482932,-2354790 - ,-2223147,-2090669,-1964135,-1831208,-1706697,-1570817,-1446008,-1305386,-1175773,-1046066 - ,-915356,-785120,-653614,-524331,-393767,-260442,-130187,-799,128841,261466 - ,393616,520542,652117,784613,914159,1045399,1181072,1308971,1442502,1570346 - ,1693912,1843986,1966014,2090474,2224869,2364593,2475934,2628403,2752512,2856640 - ,-4192441,-4063232,-3917821,-3799195,-3666233,-3519199,-3411021,-3269192,-3135684,-3008826 - ,-2880875,-2747342,-2620981,-2494872,-2354979,-2229718,-2098939,-1964971,-1835399,-1703452 - ,-1572806,-1440482,-1311794,-1179338,-1046521,-919823,-785914,-655335,-523416,-395507 - ,-264833,-132184,-2546,131698,256217,391372,522688,651248,789964,909618 - ,1035305,1179145,1313824,1436934,1552353,1693722,1815508,1972826,2096328,2228224 - ,2359296,2490368,2598848,-6160384,-6029312,-5881382,-5767168,-5636096,-5505024,-5373952 - ,-5228418,-5110384,-4954923,-4880576,-4710990,-4587364,-4471340,-4333905,-4211513,-4051293 - ,-3907927,-3800105,-3675961,-3538640,-3413663,-3271148,-3152105,-3019103,-2869647,-2744015 - ,-2620639,-2479385,-2364211,-2227611,-2095427,-1974497,-1834168,-1703561,-1568961,-1439826 - ,-1309192,-1174050,-1050191,-917836,-786015,-656943,-518934,-394831,-257708,-128041 - ,1610,128991,264442,393977,521383,653849,788164,918641,1049122,1181971 - ,1308934,1439505,1571808,1706305,1836318,1966235,2097269,2228990,2357005,2490292 - ,2617400,2749680,2881234,3014880,3145637,3276467,3409099,3536637,3671493,3802918 - ,3929740,4065036,4194143,4325999,4456126,4586857,4717194,4843923,4978676,5110913 - ,5245281,5371394,5499780,5633779,5762611,5897682,6028688,6167546,6296465,6421682 - ,6548882,6682074,6809432,6941956,7078143,7204509,7334296,7475137,7609896,7732044 - ,7861604,8002039,8131670,8259222,8390299,8522399,8650037,8782348,8908402,9037815 - ,9164594,9300338,9434679,9574500,9699702,9833934,9948152,10083972,10244937,10332822 - ,10485760,10600122,10760754,10892964,11010048,11111004,11272192,11403264,11525091,11624984 - ,11796480,11915146,-393216,-262144,-101702,-740,100568,262144,393216,-786432 - ,-655360,-524288,-383907,-243301,-94956,-156,95547,269629,416691,524288 - ,655360,-393216,-262144,-88448,-37,87318,262144,393216,524288,-917504 - ,-786432,-655360,-495894,-373308,-267503,-93211,4119,91308,250895,393216 - ,526138,655360,786432,917504,-786432,-655360,-524288,-393216,-262144,-83497 - ,222,86893,240922,393216,524288,-1048576,-917504,-790472,-655360,-508639 - ,-383609,-262016,-95550,-3775,96692,256797,364847,534906,655360,786432 - ,889679,1048576,1179648,1310720,1441792,-655360,-524288,-377684,-248408,-93690 - ,1261,95441,227519,393216,524288,655360,786432,917504,-2097152,-1966080 - ,-1809470,-1703936,-1572864,-1441792,-1314289,-1195149,-1056205,-917504,-809951,-657769 - ,-521072,-383788,-248747,-106350,-2944,105550,243408,388548,521064,628732 - ,786432,885456,1064548,1179648,1310720,1441792,1572864,1703936,1835008,-1441792 - ,-1310720,-1179648,-1037570,-888492,-767774,-646634,-519935,-373458,-248029,-111915 - ,760,111232,247735,379432,507672,672699,786432,917504,1048576,1179648 - ,1310720,1441792,-4456448,-4325376,-4194304,-4063232,-3932160,-3801088,-3670016,-3538944 - ,-3407872,-3276800,-3145728,-3014656,-2883584,-2752512,-2647002,-2490368,-2359296,-2228224 - ,-2097152,-1951753,-1835008,-1703936,-1594177,-1462001,-1289150,-1160774,-1025917,-924928 - ,-782509,-641294,-516191,-386630,-251910,-118886,5210,121226,253949,386008 - ,517973,649374,780064,917783,1052462,1183856,1290593,1419389,1556641,1699884 - ,1835008,1988314,2090470,2228224,2359296,2490368,2621440,2752512,2883584,-3801088 - ,-3643514,-3539937,-3409931,-3263294,-3145658,-3012952,-2879230,-2752359,-2622556,-2483471 - ,-2357556,-2226500,-2093112,-1965892,-1833664,-1701035,-1567767,-1440320,-1310556,-1178339 - ,-1049625,-916812,-786477,-655277,-525050,-393773,-264828,-130696,-480,132126 - ,260116,394197,527846,652294,785563,917183,1049511,1175958,1308161,1438759 - ,1572253,1698835,1828535,1967072,2089391,2212798,2348901,2461547,2621440,2752512 - ,2883584,-7309870,-7203780,-7062699,-6939106,-6790819,-6672036,-6553600,-6422317,-6288422 - ,-6164694,-6026456,-5901410,-5754168,-5621459,-5502710,-5369686,-5240454,-5120712,-4976140 - ,-4847970,-4723070,-4589083,-4450923,-4324680,-4189892,-4065551,-3931803,-3800209,-3668539 - ,-3539395,-3404801,-3277470,-3141389,-3016710,-2885724,-2752612,-2618541,-2486762,-2354153 - ,-2225059,-2094984,-1968194,-1830895,-1699508,-1575743,-1444516,-1308683,-1179714,-1053088 - ,-917981,-783707,-653900,-524980,-395409,-260309,-131948,-3452,132113,263241 - ,392185,522597,654134,788288,919810,1045795,1179210,1314201,1444235,1574447 - ,1705193,1834009,1967332,2098102,2229019,2359147,2489859,2619878,2754966,2879671 - ,3014438,3146143,3276733,3405958,3542196,3667493,3798815,3932961,4062458,4187125 - ,4322346,4454875,4587752,4716809,4848274,4975027,5111957,5242215,5373085,5501158 - ,5640140,5762918,5895358,6024008,6157906,6290628,6422713,6546339,6675888,6815606 - ,6955288,7077501,7211630,7337893,7473635,7607175,7728310,7866475,7999658,8127888 - ,8241758,8386483,8522550,8641582,8771915,8922139,9038632,9179385,9313426,9437184 - ,9568256,9699328,9830400,9952933,10120004,10223616,10354688,10474645,10616832,-393216 - ,-262144,-85425,-121,82533,262144,393216,-786432,-655360,-524288,-379928 - ,-222821,-95200,287,95541,227093,393216,493567,655360,786432,-393216 - ,-262144,-86805,510,86722,262144,393216,524288,-1048576,-917504,-786432 - ,-624456,-529951,-395071,-241627,-101168,81,99975,241605,393216,524288 - ,655360,786432,917504,-786432,-655360,-524288,-393216,-230359,-95619,-137 - ,94425,226222,393216,524288,-1179648,-1048576,-917504,-773841,-655360,-492258 - ,-379715,-244707,-103621,-434,104523,242680,381575,523659,650565,786432 - ,917504,1048576,1179648,1310720,-786432,-629344,-524288,-376757,-242858,-101932 - ,-2715,107155,239212,366480,514943,655360,786432,917504,-2228224,-2097152 - ,-1966080,-1835008,-1703936,-1572864,-1441792,-1284584,-1179648,-1048819,-934658,-777181 - ,-626371,-515660,-377493,-248975,-113036,436,113584,248354,379718,512475 - ,653932,796494,917504,1048576,1179648,1310720,1441792,1572864,1703936,1835008 - ,-1572864,-1441792,-1297608,-1161159,-1032316,-917092,-779770,-647384,-515529,-384269 - ,-250003,-119252,1053,118111,249512,380545,512039,648101,770656,907003 - ,1021725,1178082,1310720,1441792,-4587520,-4456448,-4325376,-4194304,-4063232,-3932160 - ,-3801088,-3670016,-3538944,-3407872,-3276800,-3145728,-2999335,-2883584,-2752512,-2621440 - ,-2490368,-2359296,-2228224,-2112691,-1966080,-1848781,-1709830,-1566109,-1438427,-1303530 - ,-1176124,-1040936,-913876,-784585,-652025,-518361,-385267,-256342,-127297,-2733 - ,125422,257792,389363,519911,651106,783805,909407,1044143,1174156,1309267 - ,1436173,1553771,1708958,1814083,1967036,2095386,2255169,2359296,2478303,2621440 - ,2752512,-4456448,-4325376,-4194304,-4063232,-3932160,-3797524,-3670016,-3560250,-3413217 - ,-3257719,-3166416,-2986626,-2878000,-2781144,-2625383,-2495465,-2346792,-2230930,-2077063 - ,-1949225,-1819274,-1697261,-1568664,-1443074,-1304302,-1175289,-1043794,-913423,-785561 - ,-652104,-522835,-392667,-260517,-130088,-2,129509,260990,391931,522470 - ,655770,784902,917093,1046445,1176951,1303121,1441362,1565401,1702022,1822856 - ,1952852,2090384,2214607,2338436,2457483,2621440,-8781824,-8650752,-8519680,-8388608 - ,-8260828,-8126464,-8003337,-7859030,-7750057,-7602176,-7471104,-7340032,-7193045,-7090588 - ,-6946816,-6843344,-6676635,-6557575,-6447804,-6277614,-6159736,-6035729,-5884723,-5739567 - ,-5634818,-5489867,-5372864,-5243300,-5098939,-4988639,-4856258,-4728494,-4591717,-4447428 - ,-4322409,-4192918,-4062638,-3934141,-3797545,-3673373,-3531587,-3407391,-3277404,-3147797 - ,-3013578,-2886548,-2749811,-2616428,-2490949,-2361301,-2228482,-2096883,-1964343,-1831754 - ,-1702201,-1572495,-1442012,-1309242,-1182451,-1048996,-916905,-786510,-657079,-524730 - ,-393672,-261313,-128743,166,130678,261334,393287,524155,655570,786839 - ,917353,1052167,1179013,1309360,1442634,1571153,1703961,1832027,1965014,2097912 - ,2224861,2355341,2490455,2623051,2753484,2877015,3015783,3144157,3273705,3405255 - ,3542006,3669580,3802417,3935413,4065088,4190896,4333521,4456355,4579781,4713832 - ,4845707,4978625,5113278,5243817,5382318,5500592,5638135,5761179,5900822,6029270 - ,6186398,6297816,6436435,6559163,6666389,6806548,6950461,7086078,7195777,7350973 - ,7480132,7614852,7743514,7847288,8014762,8126464,8257536,8388608,8519680,8650752 - ,8781824,8912896,9043968,9175040,9306112,9437184 -}; - - - -const int16_t WebRtcIsacfix_kLevelsShapeQ10[1735] = { - 0, 0, -1, 0, 0, 1, 0, 1, 0, -821 - , 1, -763, -1, 656, -620, 0, 633, -636, 4, 615 - , -630, 1, 649, -1773, -670, 5, 678, 1810, -1876, -676 - , 0, 691, 1843, -1806, -743, -1, 749, 1795, 2920, -2872 - , -1761, -772, -3, 790, 1763, 2942, 0, 0, 0, 0 - , -792, 2, 0, 0, 1, 0, -854, 0, -702, -1 - , 662, -624, -5, 638, -611, -6, 638, -647, 0, 651 - , -685, -4, 679, 2123, -1814, -693, 0, 664, 1791, -1735 - , -737, 0, 771, 1854, 2873, -2867, -1842, -793, -1, 821 - , 1826, 2805, 3922, 0, 0, 0, -1, -779, 1, 786 - , 1, -708, 0, 789, -799, 1, 797, -663, 2, 646 - , -600, 3, 609, -600, 1, 658, 1807, -627, -3, 612 - , -625, 3, 632, -1732, -674, 1, 672, 2048, -1768, -715 - , 0, 724, 1784, -3881, -3072, -1774, -719, -1, 730, 1811 - , -2963, -1829, -806, -1, 816, 1795, 3050, -5389, -3784, -2942 - , -1893, -865, -12, 867, 1885, 2945, 3928, -2, 1, 4 - , 0, -694, 2, 665, -598, 5, 587, -599, -1, 661 - , -656, -7, 611, -607, 5, 603, -618, -4, 620, -1794 - , -645, -2, 654, -655, -1, 658, -1801, -700, 5, 707 - , 1927, -1752, -745, -8, 752, 1843, -2838, -1781, -801, 11 - , 796, 1811, 2942, 3866, -3849, -3026, -1848, -819, 2, 827 - , 1825, 2963, -3873, -2904, -1869, -910, -6, 903, 1902, 2885 - , 3978, 5286, -7168, -6081, -4989, -3968, -2963, -1970, -943, -2 - , 953, 1951, 2968, 3974, 5009, 6032, -2, 3, -1024, 2 - , 1024, -637, 1, 669, -613, -7, 630, -603, 4, 612 - , -612, 0, 590, -645, -11, 627, -657, -2, 671, 1849 - , -1853, -694, 2, 702, 1838, -3304, -1780, -736, -8, 732 - , 1772, -1709, -755, -6, 760, 1780, -2994, -1780, -800, 8 - , 819, 1830, 2816, -4096, -2822, -1881, -851, -4, 855, 1872 - , 2840, 3899, -3908, -2904, -1878, -887, 6, 897, 1872, 2942 - , 4008, -4992, -3881, -2933, -1915, -928, 1, 937, 1919, 2900 - , 4009, 4881, -6848, -6157, -5065, -3981, -2983, -1972, -978, -1 - , 968, 1979, 2988, 4008, 5007, 6108, 7003, 8051, 9027,-13272 - ,-12012,-11228,-10213, -9261, -8084, -7133, -6075, -5052, -4050, -3036 - , -2014, -996, -4, 1007, 2031, 3038, 4049, 5074, 6134, 7069 - , 8094, 9069, 10212, 11049, 12104, 51, -1024, -13, 1024, -609 - , -107, 613, -2048, -687, -95, 667, 2048, -3072, -1724, -785 - , -34, 732, 1819, -2048, -703, -26, 681, 2048, -2048, -686 - , -9, 665, 2048, -2048, -702, 37, 748, 1723, -4096, -2786 - , -1844, -837, 37, 811, 1742, 3072, -4096, -2783, -1848, -881 - , 39, 898, 1843, 2792, 3764, -5120, -4096, -2923, -1833, -852 - , -14, 862, 1824, 2834, 4096, -6144, -5120, -3914, -2842, -1870 - , -886, -27, 888, 1929, 2931, 4051, -7168, -6144, -5120, -3866 - , -2933, -1915, -927, 64, 933, 1902, 2929, 3912, 5063, 6144 - ,-11264,-10240, -9216, -8192, -7086, -6144, -5039, -3972, -2943, -1929 - , -941, 3, 938, 1942, 2959, 3933, 4905, 6088, 6983, 8192 - , -9216, -8192, -7202, -6088, -4983, -4019, -2955, -1975, -966, 17 - , 997, 1981, 2967, 3990, 4948, 6022, 6967, 8192,-13312,-12288 - ,-11264,-10240, -9216, -8049, -6997, -6040, -5026, -4043, -3029, -2034 - , -1015, -23, 984, 1997, 3010, 4038, 5002, 6015, 6946, 8061 - , 9216, 10240,-12381,-11264,-10240, -9060, -8058, -7153, -6085, -5075 - , -4051, -3042, -2037, -1017, -5, 1007, 2028, 3035, 4050, 5088 - , 6111, 7160, 8156, 9215, 10095, 11229, 12202, 13016,-26624,-25600 - ,-24582,-23671,-22674,-21400,-20355,-19508,-18315,-17269,-16361,-15299 - ,-14363,-13294,-12262,-11237,-10203, -9227, -8165, -7156, -6116, -5122 - , -4076, -3056, -2043, -1020, -8, 1027, 2047, 3065, 4110, 5130 - , 6125, 7168, 8195, 9206, 10230, 11227, 12256, 13304, 14281, 15316 - , 16374, 17382, 18428, 19388, 20361, 21468, 22448, 23781, 0, 0 - , -1, 0, -2, 1024, 0, 0, 0, -1, 1024, -1024 - , 1, -1024, 4, 1024, -1024, 2, 1024, -1024, 2, 1024 - , -2048, -1024, -4, 1024, -1024, 2, 1024, -2048, -1024, -3 - , 1024, 2048, -2048, -1024, 4, 1024, 2048, -3072, -2048, -1024 - , -1, 662, 2048, 0, 1, 0, 0, 1, -2, -2 - , 0, 2, 1024, -1, 1024, -1024, 4, 1024, -1024, 1 - , 1024, -1024, 1, 1024, -2048, -781, -4, 844, -807, -5 - , 866, -2048, -726, -13, 777, 2048, -2048, -643, -4, 617 - , 2048, 3072, -3072, -2048, -629, 1, 630, 2048, 3072, 0 - , -1, 1, -2, 2, 1, -1024, 5, -1024, 6, 1024 - , -1024, 4, 1024, -1024, 1, 1024, -1024, -9, 1024, -673 - , -7, 655, -2048, -665, -15, 716, -2048, -647, 4, 640 - , 2048, -2048, -615, -1, 635, 2048, -2048, -613, 10, 637 - , 2048, 3072, -3072, -2048, -647, -3, 641, 2048, 3072, -5120 - , -4096, -3072, -2048, -681, 6, 685, 2048, 3072, 4096, 1 - , 1, 0, -1, 1024, -1024, -3, 1024, -1024, 6, 1024 - , -1024, -1, 769, -733, 0, 1024, -876, -2, 653, -1024 - , -4, 786, -596, -13, 595, -634, -2, 638, 2048, -2048 - , -620, -5, 620, 2048, -4096, -3072, -2048, -639, 11, 655 - , 2048, 3072, -3072, -2048, -659, 5, 663, 2048, -3072, -1823 - , -687, 22, 695, 2048, 3072, 4096, -4096, -3072, -1848, -715 - , -3, 727, 1816, 3072, 4096, 5120, -8192, -7168, -6144, -5120 - , -4096, -2884, -1771, -756, -14, 775, 1844, 3072, 4096, 5120 - , 6144, -1, 1, 0, -1024, 2, 815, -768, 2, 708 - , -1024, -3, 693, -661, -7, 607, -643, -5, 609, -624 - , 3, 631, -682, -3, 691, 2048, -2048, -640, 5, 650 - , 2048, -3072, -2048, -701, 9, 704, 2048, 3072, -3072, -2048 - , -670, 10, 674, 2048, 3072, -5120, -4096, -3072, -1749, -738 - , 0, 733, 1811, 3072, 4096, 5120, -4096, -3072, -1873, -753 - , 0, 756, 1874, 3072, 4096, -5120, -4096, -2900, -1838, -793 - , -6, 793, 1868, 2837, 4096, 5120, -7168, -6144, -5120, -4096 - , -2832, -1891, -828, 1, 828, 1901, 2823, 3912, 5120, 6144 - , 7168, 8192,-13312,-12288,-11264,-10240, -9216, -8192, -7168, -6144 - , -5120, -3976, -3004, -1911, -869, 7, 869, 1932, 3024, 3992 - , 5009, 6144, 7168, 8192, 9216, 10240, 11264, -4, 1024, -629 - , -22, 609, -623, 9, 640, -2048, -768, 1, 682, -2048 - , -741, 49, 722, 2048, -3072, -1706, -808, -20, 768, 1750 - , -1684, -727, -29, 788, 1840, 3033, -1758, -784, 0, 801 - , 1702, -3072, -1813, -814, 38, 820, 1884, 2927, -4096, -3241 - , -1839, -922, 25, 882, 1886, 2812, -4096, -2982, -1923, -894 - , 84, 912, 1869, 2778, 4096, -4928, -3965, -2902, -1920, -883 - , 3, 917, 1953, 2921, 3957, 4922, 6144, 7168, -5120, -3916 - , -2897, -1949, -930, 31, 959, 1934, 2901, 3851, 5120, -9216 - , -8192, -7046, -6029, -5030, -4034, -2980, -1969, -1013, -76, 963 - , 1963, 2901, 3929, 4893, 6270, 7168, 8192, 9216,-12288,-11264 - ,-10240, -9216, -8192, -6846, -6123, -5108, -4008, -3000, -1963, -954 - , -6, 958, 1992, 3009, 4020, 5085, 6097, 7168, 8192, 9216 - ,-11264,-10139, -9194, -8127, -7156, -6102, -5053, -4049, -3036, -2025 - , -1009, -34, 974, 1984, 3034, 4028, 5138, 6000, 7057, 8166 - , 9070, 10033, 11360, 12288,-13312,-12288,-10932,-10190, -9120, -8123 - , -7128, -6103, -5074, -4081, -3053, -2029, -989, -4, 1010, 2028 - , 3051, 4073, 5071, 6099, 7132, 8147, 9295, 10159, 11023, 12263 - , 13312, 14336,-25600,-24576,-23552,-22529,-21504,-20480,-19456,-18637 - ,-17425,-16165,-15316,-14327,-13606,-12135,-11182,-10107, -9153, -8144 - , -7146, -6160, -5129, -4095, -3064, -2038, -1025, 1, 1031, 2072 - , 3074, 4088, 5123, 6149, 7157, 8173, 9198, 10244, 11250, 12268 - , 13263, 14289, 15351, 16370, 17402, 18413, 19474, 20337, 21386, 22521 - , 23367, 24350, 0, 0, 0, 0, 0, 0, 0, 0 - , -1024, 0, 1024, -1024, 0, 1024, -1024, 0, 1024, -1024 - , 0, 1024, -1024, 0, 1024, -773, 0, 1024, -674, 0 - , 645, -2048, -745, 0, 628, 2048, -2048, -712, 0, 681 - , 2048, 3072, -3072, -2048, -673, 0, 682, 1964, 3257, 0 - , 0, 0, 0, 0, 0, 0, 0, -1024, 0, 1024 - , -1024, 0, 1024, -1024, 0, 1024, -705, 0, 623, -771 - , 0, 1024, -786, 0, 688, -631, 0, 652, 2048, -2048 - , -627, -1, 666, 2048, -3072, -1756, -694, 0, 674, 2048 - , -3098, -1879, -720, 5, 694, 1886, 2958, 4096, 0, 0 - , 0, 0, 1024, 0, 0, 1024, -769, 0, 1024, -1024 - , 0, 1024, -1024, 0, 1024, -817, 0, 734, -786, 0 - , 651, -638, 0, 637, -623, 0, 671, -652, 0, 619 - , 2048, -2048, -670, -1, 663, 2048, -1908, -680, 1, 686 - , 2048, 3072, 4096, -4096, -3072, -1833, -711, 0, 727, 1747 - , 3072, 4096, -4096, -2971, -1826, -762, 2, 766, 1832, 2852 - , 3928, 5079, 0, 0, 0, -1024, 0, 1024, -1024, 0 - , -656, 0, 1024, -599, 0, 620, -1024, 0, 1024, -603 - , 0, 622, -643, 0, 660, -599, 0, 611, -641, -1 - , 651, 2048, -2048, -648, -2, 647, 1798, -3072, -2048, -672 - , 2, 670, 2048, -3072, -1780, -694, -1, 706, 1751, 3072 - , -3072, -1862, -757, 7, 739, 1798, 3072, 4096, -5120, -4096 - , -3253, -1811, -787, 3, 782, 1887, 3123, 4096, -7252, -6144 - , -5354, -4060, -2864, -1863, -820, -11, 847, 1903, 2970, 3851 - , 4921, 5957, 7168, 8192, 9306, 0, 0, -1024, 0, 1024 - , -726, 0, 706, -692, 0, 593, -598, 0, 616, -624 - , 0, 616, -605, 0, 613, -2048, -652, 1, 635, 2048 - , -2048, -647, -1, 660, 2048, -1811, -668, -2, 685, 2048 - , -1796, -731, -2, 730, 1702, 3072, -3072, -1766, -747, -4 - , 756, 1770, 3072, -4096, -3024, -1762, -783, 4, 771, 1781 - , 3072, -5120, -4057, -2807, -1832, -822, 0, 816, 1804, 2851 - , 3949, 5120, -6144, -4899, -3927, -2920, -1893, -874, -2, 868 - , 1881, 2905, 3960, 4912, 6144, -9216, -8192, -7168, -6225, -4963 - , -3943, -2956, -1890, -902, 0, 897, 1914, 2916, 3984, 4990 - , 6050, 7168,-11264,-10217, -9114, -8132, -7035, -5988, -4984, -4000 - , -2980, -1962, -927, 7, 931, 1956, 2981, 4031, 4972, 6213 - , 7227, 8192, 9216, 10240, 11170, 12288, 13312, 14336, 0, 1024 - , -557, 1, 571, -606, -4, 612, -1676, -707, 10, 673 - , 2048, -2048, -727, 5, 686, -3072, -1772, -755, 12, 716 - , 1877, -1856, -786, 2, 786, 1712, -1685, -818, -16, 863 - , 1729, -3072, -1762, -857, 3, 866, 1838, 2841, -3862, -2816 - , -1864, -925, -2, 923, 1897, 2779, -2782, -1838, -920, -28 - , 931, 1951, 2835, 3804, -4815, -4001, -2940, -1934, -959, -22 - , 975, 1957, 2904, 3971, 4835, -5148, -3892, -2944, -1953, -986 - , -11, 989, 1968, 2939, 3949, 4947, 5902, -9216, -8192, -6915 - , -6004, -4965, -4013, -3009, -1977, -987, -1, 982, 1972, 3000 - , 3960, 4939, 5814, -8976, -7888, -7084, -5955, -5043, -4009, -2991 - , -2002, -1000, -8, 993, 2011, 3023, 4026, 5028, 6023, 7052 - , 8014, 9216,-11240,-10036, -9125, -8118, -7105, -6062, -5048, -4047 - , -3044, -2025, -1009, -1, 1011, 2023, 3042, 4074, 5085, 6108 - , 7119, 8142, 9152, 10114, 11141, 12250, 13307,-15360,-14099,-13284 - ,-12291,-11223,-10221, -9152, -8147, -7128, -6104, -5077, -4072, -3062 - , -2033, -1020, 7, 1018, 2038, 3059, 4081, 5084, 6109, 7102 - , 8128, 9134, 10125, 11239, 12080,-23552,-22528,-21504,-20480,-19456 - ,-18159,-17240,-16291,-15364,-14285,-13305,-12271,-11233,-10217, -9198 - , -8175, -7157, -6134, -5122, -4089, -3071, -2047, -1018, 3, 1026 - , 2041, 3077, 4090, 5108, 6131, 7150, 8172, 9175, 10196, 11272 - , 12303, 13273, 14328, 15332, 16334, 17381, 18409, 19423, 20423, 21451 - , 22679, 23391, 24568, 25600, 26589 -}; - -/* cdf tables for quantizer indices */ -const uint16_t WebRtcIsacfix_kCdfGain[1212] = { - 0, 13, 301, 3730, 61784, 65167, 65489, 65535, 0, 17, - 142, 314, 929, 2466, 7678, 56450, 63463, 64740, 65204, 65426, - 65527, 65535, 0, 8, 100, 724, 6301, 60105, 65125, 65510, - 65531, 65535, 0, 13, 117, 368, 1068, 3010, 11928, 53603, - 61177, 63404, 64505, 65108, 65422, 65502, 65531, 65535, 0, 4, - 17, 96, 410, 1859, 12125, 54361, 64103, 65305, 65497, 65535, - 0, 4, 88, 230, 469, 950, 1746, 3228, 6092, 16592, - 44756, 56848, 61256, 63308, 64325, 64920, 65309, 65460, 65502, 65522, - 65535, 0, 88, 352, 1675, 6339, 20749, 46686, 59284, 63525, - 64949, 65359, 65502, 65527, 65535, 0, 13, 38, 63, 117, - 234, 381, 641, 929, 1407, 2043, 2809, 4032, 5753, 8792, - 14407, 24308, 38941, 48947, 55403, 59293, 61411, 62688, 63630, 64329, - 64840, 65188, 65376, 65472, 65506, 65527, 65531, 65535, 0, 8, - 29, 75, 222, 615, 1327, 2801, 5623, 9931, 16094, 24966, - 34419, 43458, 50676, 56186, 60055, 62500, 63936, 64765, 65225, 65435, - 65514, 65535, 0, 8, 13, 15, 17, 21, 33, 59, - 71, 92, 151, 243, 360, 456, 674, 934, 1223, 1583, - 1989, 2504, 3031, 3617, 4354, 5154, 6163, 7411, 8780, 10747, - 12874, 15591, 18974, 23027, 27436, 32020, 36948, 41830, 46205, 49797, - 53042, 56094, 58418, 60360, 61763, 62818, 63559, 64103, 64509, 64798, - 65045, 65162, 65288, 65363, 65447, 65506, 65522, 65531, 65533, 65535, - 0, 4, 6, 25, 38, 71, 138, 264, 519, 808, - 1227, 1825, 2516, 3408, 4279, 5560, 7092, 9197, 11420, 14108, - 16947, 20300, 23926, 27459, 31164, 34827, 38575, 42178, 45540, 48747, - 51444, 54090, 56426, 58460, 60080, 61595, 62734, 63668, 64275, 64673, - 64936, 65112, 65217, 65334, 65426, 65464, 65477, 65489, 65518, 65527, - 65529, 65531, 65533, 65535, 0, 2, 4, 8, 10, 12, - 14, 16, 21, 33, 50, 71, 84, 92, 105, 138, - 180, 255, 318, 377, 435, 473, 511, 590, 682, 758, - 913, 1097, 1256, 1449, 1671, 1884, 2169, 2445, 2772, 3157, - 3563, 3944, 4375, 4848, 5334, 5820, 6448, 7101, 7716, 8378, - 9102, 9956, 10752, 11648, 12707, 13670, 14758, 15910, 17187, 18472, - 19627, 20649, 21951, 23169, 24283, 25552, 26862, 28227, 29391, 30764, - 31882, 33213, 34432, 35600, 36910, 38116, 39464, 40729, 41872, 43144, - 44371, 45514, 46762, 47813, 48968, 50069, 51032, 51974, 52908, 53737, - 54603, 55445, 56282, 56990, 57572, 58191, 58840, 59410, 59887, 60264, - 60607, 60946, 61269, 61516, 61771, 61960, 62198, 62408, 62558, 62776, - 62985, 63207, 63408, 63546, 63739, 63906, 64070, 64237, 64371, 64551, - 64677, 64836, 64999, 65095, 65213, 65284, 65338, 65380, 65426, 65447, - 65472, 65485, 65487, 65489, 65502, 65510, 65512, 65514, 65516, 65518, - 65522, 65531, 65533, 65535, 0, 2, 4, 6, 65528, 65531, - 65533, 65535, 0, 2, 4, 6, 8, 10, 222, 65321, - 65513, 65528, 65531, 65533, 65535, 0, 2, 4, 50, 65476, - 65529, 65531, 65533, 65535, 0, 2, 4, 6, 8, 12, - 38, 544, 64936, 65509, 65523, 65525, 65529, 65531, 65533, 65535, - 0, 2, 4, 6, 8, 10, 1055, 64508, 65528, 65531, - 65533, 65535, 0, 2, 4, 6, 8, 10, 12, 123, - 3956, 62999, 65372, 65495, 65515, 65521, 65523, 65525, 65527, 65529, - 65531, 65533, 65535, 0, 2, 4, 12, 53, 4707, 59445, - 65467, 65525, 65527, 65529, 65531, 65533, 65535, 0, 2, 4, - 6, 8, 10, 12, 14, 16, 38, 40, 50, 67, - 96, 234, 929, 14345, 55750, 64866, 65389, 65462, 65514, 65517, - 65519, 65521, 65523, 65525, 65527, 65529, 65531, 65533, 65535, 0, - 2, 4, 6, 8, 10, 15, 35, 91, 377, 1946, - 13618, 52565, 63714, 65184, 65465, 65520, 65523, 65525, 65527, 65529, - 65531, 65533, 65535, 0, 2, 4, 6, 8, 10, 12, - 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, - 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, - 54, 82, 149, 362, 751, 1701, 4239, 12893, 38627, 55072, - 60875, 63071, 64158, 64702, 65096, 65283, 65412, 65473, 65494, 65505, - 65508, 65517, 65519, 65521, 65523, 65525, 65527, 65529, 65531, 65533, - 65535, 0, 2, 15, 23, 53, 143, 260, 418, 698, - 988, 1353, 1812, 2411, 3144, 4015, 5143, 6401, 7611, 8999, - 10653, 12512, 14636, 16865, 19404, 22154, 24798, 27521, 30326, 33102, - 35790, 38603, 41415, 43968, 46771, 49435, 52152, 54715, 57143, 59481, - 61178, 62507, 63603, 64489, 64997, 65257, 65427, 65473, 65503, 65520, - 65529, 65531, 65533, 65535, 0, 3, 6, 9, 26, 32, - 44, 46, 64, 94, 111, 164, 205, 254, 327, 409, - 506, 608, 733, 885, 1093, 1292, 1482, 1742, 1993, 2329, - 2615, 3029, 3374, 3798, 4257, 4870, 5405, 5992, 6618, 7225, - 7816, 8418, 9051, 9761, 10532, 11380, 12113, 13010, 13788, 14594, - 15455, 16361, 17182, 18088, 18997, 20046, 20951, 21968, 22947, 24124, - 25296, 26547, 27712, 28775, 29807, 30835, 31709, 32469, 33201, 34014, - 34876, 35773, 36696, 37620, 38558, 39547, 40406, 41277, 42367, 43290, - 44445, 45443, 46510, 47684, 48973, 50157, 51187, 52242, 53209, 54083, - 55006, 55871, 56618, 57293, 57965, 58556, 59222, 59722, 60180, 60554, - 60902, 61250, 61554, 61837, 62100, 62372, 62631, 62856, 63078, 63324, - 63557, 63768, 63961, 64089, 64235, 64352, 64501, 64633, 64770, 64887, - 65001, 65059, 65121, 65188, 65246, 65302, 65346, 65390, 65428, 65463, - 65477, 65506, 65515, 65517, 65519, 65521, 65523, 65525, 65527, 65529, - 65531, 65533, 65535, 0, 2, 4, 109, 65332, 65531, 65533, - 65535, 0, 2, 4, 6, 8, 25, 1817, 63874, 65511, - 65527, 65529, 65531, 65533, 65535, 0, 2, 4, 907, 65014, - 65529, 65531, 65533, 65535, 0, 2, 4, 6, 8, 10, - 12, 132, 2743, 62708, 65430, 65525, 65527, 65529, 65531, 65533, - 65535, 0, 2, 4, 6, 8, 35, 3743, 61666, 65485, - 65531, 65533, 65535, 0, 2, 4, 6, 8, 10, 23, - 109, 683, 6905, 58417, 64911, 65398, 65497, 65518, 65525, 65527, - 65529, 65531, 65533, 65535, 0, 2, 4, 6, 53, 510, - 10209, 55212, 64573, 65441, 65522, 65529, 65531, 65533, 65535, 0, - 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, - 22, 32, 90, 266, 1037, 3349, 14468, 50488, 62394, 64685, - 65341, 65480, 65514, 65519, 65521, 65523, 65525, 65527, 65529, 65531, - 65533, 65535, 0, 2, 4, 6, 9, 16, 37, 106, - 296, 748, 1868, 5733, 18897, 45553, 60165, 63949, 64926, 65314, - 65441, 65508, 65524, 65529, 65531, 65533, 65535, 0, 2, 4, - 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, - 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, - 46, 48, 50, 83, 175, 344, 667, 1293, 2337, 4357, - 8033, 14988, 28600, 43244, 52011, 57042, 59980, 61779, 63065, 63869, - 64390, 64753, 64988, 65164, 65326, 65422, 65462, 65492, 65506, 65522, - 65524, 65526, 65531, 65533, 65535, 0, 2, 4, 6, 8, - 10, 12, 14, 16, 25, 39, 48, 55, 62, 65, - 85, 106, 139, 169, 194, 252, 323, 485, 688, 1074, - 1600, 2544, 3863, 5733, 8303, 11397, 15529, 20273, 25734, 31455, - 36853, 41891, 46410, 50306, 53702, 56503, 58673, 60479, 61880, 62989, - 63748, 64404, 64852, 65124, 65309, 65424, 65480, 65524, 65528, 65533, - 65535, 0, 2, 4, 6, 8, 10, 12, 14, 21, - 23, 25, 27, 29, 31, 39, 41, 43, 48, 60, - 72, 79, 106, 136, 166, 187, 224, 252, 323, 381, - 427, 478, 568, 660, 783, 912, 1046, 1175, 1365, 1567, - 1768, 2024, 2347, 2659, 3049, 3529, 4033, 4623, 5281, 5925, - 6726, 7526, 8417, 9468, 10783, 12141, 13571, 15222, 16916, 18659, - 20350, 22020, 23725, 25497, 27201, 29026, 30867, 32632, 34323, 36062, - 37829, 39466, 41144, 42654, 43981, 45343, 46579, 47759, 49013, 50171, - 51249, 52283, 53245, 54148, 54938, 55669, 56421, 57109, 57791, 58464, - 59092, 59674, 60105, 60653, 61083, 61407, 61757, 62095, 62388, 62649, - 62873, 63157, 63358, 63540, 63725, 63884, 64046, 64155, 64278, 64426, - 64548, 64654, 64806, 64906, 64994, 65077, 65137, 65215, 65277, 65324, - 65354, 65409, 65437, 65455, 65462, 65490, 65495, 65499, 65508, 65511, - 65513, 65515, 65517, 65519, 65521, 65523, 65525, 65527, 65529, 65531, - 65533, 65535 -}; - -const uint16_t WebRtcIsacfix_kCdfShape[2059] = { - 0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, 65535, - 0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, 4, - 65535, 0, 8, 65514, 65535, 0, 29, 65481, 65535, 0, - 121, 65439, 65535, 0, 239, 65284, 65535, 0, 8, 779, - 64999, 65527, 65535, 0, 8, 888, 64693, 65522, 65535, 0, - 29, 2604, 62843, 65497, 65531, 65535, 0, 25, 176, 4576, - 61164, 65275, 65527, 65535, 0, 65535, 0, 65535, 0, 65535, - 0, 65535, 0, 4, 65535, 0, 65535, 0, 65535, 0, - 65535, 0, 65535, 0, 4, 65535, 0, 33, 65502, 65535, - 0, 54, 65481, 65535, 0, 251, 65309, 65535, 0, 611, - 65074, 65535, 0, 1273, 64292, 65527, 65535, 0, 4, 1809, - 63940, 65518, 65535, 0, 88, 4392, 60603, 65426, 65531, 65535, - 0, 25, 419, 7046, 57756, 64961, 65514, 65531, 65535, 0, - 65535, 0, 65535, 0, 65535, 0, 65535, 0, 4, 65531, - 65535, 0, 65535, 0, 8, 65531, 65535, 0, 4, 65527, - 65535, 0, 17, 65510, 65535, 0, 42, 65481, 65535, 0, - 197, 65342, 65531, 65535, 0, 385, 65154, 65535, 0, 1005, - 64522, 65535, 0, 8, 1985, 63469, 65533, 65535, 0, 38, - 3119, 61884, 65514, 65535, 0, 4, 6, 67, 4961, 60804, - 65472, 65535, 0, 17, 565, 9182, 56538, 65087, 65514, 65535, - 0, 8, 63, 327, 2118, 14490, 52774, 63839, 65376, 65522, - 65535, 0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, - 17, 65522, 65535, 0, 59, 65489, 65535, 0, 50, 65522, - 65535, 0, 54, 65489, 65535, 0, 310, 65179, 65535, 0, - 615, 64836, 65535, 0, 4, 1503, 63965, 65535, 0, 2780, - 63383, 65535, 0, 21, 3919, 61051, 65527, 65535, 0, 84, - 6674, 59929, 65435, 65535, 0, 4, 255, 7976, 55784, 65150, - 65518, 65531, 65535, 0, 4, 8, 582, 10726, 53465, 64949, - 65518, 65535, 0, 29, 339, 3006, 17555, 49517, 62956, 65200, - 65497, 65531, 65535, 0, 2, 33, 138, 565, 2324, 7670, - 22089, 45966, 58949, 63479, 64966, 65380, 65518, 65535, 0, 65535, - 0, 65535, 0, 2, 65533, 65535, 0, 46, 65514, 65535, - 0, 414, 65091, 65535, 0, 540, 64911, 65535, 0, 419, - 65162, 65535, 0, 976, 64790, 65535, 0, 2977, 62495, 65531, - 65535, 0, 4, 3852, 61034, 65527, 65535, 0, 4, 29, - 6021, 60243, 65468, 65535, 0, 84, 6711, 58066, 65418, 65535, - 0, 13, 281, 9550, 54917, 65125, 65506, 65535, 0, 2, - 63, 984, 12108, 52644, 64342, 65435, 65527, 65535, 0, 29, - 251, 2014, 14871, 47553, 62881, 65229, 65518, 65535, 0, 13, - 142, 749, 4220, 18497, 45200, 60913, 64823, 65426, 65527, 65535, - 0, 13, 71, 264, 1176, 3789, 10500, 24480, 43488, 56324, - 62315, 64493, 65242, 65464, 65514, 65522, 65531, 65535, 0, 4, - 13, 38, 109, 205, 448, 850, 1708, 3429, 6276, 11371, - 19221, 29734, 40955, 49391, 55411, 59460, 62102, 63793, 64656, 65150, - 65401, 65485, 65522, 65531, 65535, 0, 65535, 0, 2, 65533, - 65535, 0, 1160, 65476, 65535, 0, 2, 6640, 64763, 65533, - 65535, 0, 2, 38, 9923, 61009, 65527, 65535, 0, 2, - 4949, 63092, 65533, 65535, 0, 2, 3090, 63398, 65533, 65535, - 0, 2, 2520, 58744, 65510, 65535, 0, 2, 13, 544, - 8784, 51403, 65148, 65533, 65535, 0, 2, 25, 1017, 10412, - 43550, 63651, 65489, 65527, 65535, 0, 2, 4, 29, 783, - 13377, 52462, 64524, 65495, 65533, 65535, 0, 2, 4, 6, - 100, 1817, 18451, 52590, 63559, 65376, 65531, 65535, 0, 2, - 4, 6, 46, 385, 2562, 11225, 37416, 60488, 65026, 65487, - 65529, 65533, 65535, 0, 2, 4, 6, 8, 10, 12, - 42, 222, 971, 5221, 19811, 45048, 60312, 64486, 65294, 65474, - 65525, 65529, 65533, 65535, 0, 2, 4, 8, 71, 167, - 666, 2533, 7875, 19622, 38082, 54359, 62108, 64633, 65290, 65495, - 65529, 65533, 65535, 0, 2, 4, 6, 8, 10, 13, - 109, 586, 1930, 4949, 11600, 22641, 36125, 48312, 56899, 61495, - 63927, 64932, 65389, 65489, 65518, 65531, 65533, 65535, 0, 4, - 6, 8, 67, 209, 712, 1838, 4195, 8432, 14432, 22834, - 31723, 40523, 48139, 53929, 57865, 60657, 62403, 63584, 64363, 64907, - 65167, 65372, 65472, 65514, 65535, 0, 2, 4, 13, 25, - 42, 46, 50, 75, 113, 147, 281, 448, 657, 909, - 1185, 1591, 1976, 2600, 3676, 5317, 7398, 9914, 12941, 16169, - 19477, 22885, 26464, 29851, 33360, 37228, 41139, 44802, 48654, 52058, - 55181, 57676, 59581, 61022, 62190, 63107, 63676, 64199, 64547, 64924, - 65158, 65313, 65430, 65481, 65518, 65535, 0, 65535, 0, 65535, - 0, 65535, 0, 65535, 0, 65533, 65535, 0, 65535, 0, - 65535, 0, 65535, 0, 65533, 65535, 0, 2, 65535, 0, - 2, 65533, 65535, 0, 2, 65533, 65535, 0, 2, 65533, - 65535, 0, 2, 4, 65533, 65535, 0, 2, 65533, 65535, - 0, 2, 4, 65531, 65533, 65535, 0, 2, 4, 65531, - 65533, 65535, 0, 2, 4, 6, 65524, 65533, 65535, 0, - 65535, 0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, - 65535, 0, 65535, 0, 65535, 0, 65533, 65535, 0, 65533, - 65535, 0, 2, 65533, 65535, 0, 2, 65533, 65535, 0, - 2, 65533, 65535, 0, 2, 4, 65532, 65535, 0, 6, - 65523, 65535, 0, 2, 15, 65530, 65533, 65535, 0, 2, - 35, 65493, 65531, 65533, 65535, 0, 2, 4, 158, 65382, - 65531, 65533, 65535, 0, 65535, 0, 65535, 0, 65535, 0, - 65535, 0, 65535, 0, 65535, 0, 2, 65535, 0, 2, - 65533, 65535, 0, 2, 65533, 65535, 0, 2, 65533, 65535, - 0, 2, 65533, 65535, 0, 9, 65512, 65535, 0, 2, - 12, 65529, 65535, 0, 2, 73, 65434, 65533, 65535, 0, - 2, 240, 65343, 65533, 65535, 0, 2, 476, 65017, 65531, - 65533, 65535, 0, 2, 4, 1046, 64686, 65531, 65533, 65535, - 0, 2, 4, 6, 8, 1870, 63898, 65529, 65531, 65533, - 65535, 0, 65535, 0, 65535, 0, 65535, 0, 65533, 65535, - 0, 2, 65533, 65535, 0, 2, 65533, 65535, 0, 2, - 65532, 65535, 0, 6, 65533, 65535, 0, 6, 65523, 65535, - 0, 2, 65532, 65535, 0, 137, 65439, 65535, 0, 576, - 64899, 65533, 65535, 0, 2, 289, 65299, 65533, 65535, 0, - 2, 4, 6, 880, 64134, 65531, 65533, 65535, 0, 2, - 4, 1853, 63347, 65533, 65535, 0, 2, 6, 2516, 61762, - 65529, 65531, 65533, 65535, 0, 2, 4, 9, 3980, 61380, - 65503, 65529, 65531, 65533, 65535, 0, 2, 4, 6, 8, - 10, 12, 61, 6393, 59859, 65466, 65527, 65529, 65531, 65533, - 65535, 0, 65535, 0, 65535, 0, 65535, 0, 2, 65532, - 65535, 0, 3, 65529, 65535, 0, 2, 65529, 65535, 0, - 61, 65453, 65535, 0, 234, 65313, 65535, 0, 503, 65138, - 65535, 0, 155, 65402, 65533, 65535, 0, 2, 1058, 64554, - 65533, 65535, 0, 2, 4, 3138, 62109, 65531, 65533, 65535, - 0, 2, 4, 2031, 63339, 65531, 65533, 65535, 0, 2, - 4, 6, 9, 4155, 60778, 65523, 65529, 65531, 65533, 65535, - 0, 2, 4, 41, 6189, 59269, 65490, 65531, 65533, 65535, - 0, 2, 4, 6, 210, 8789, 57043, 65400, 65528, 65531, - 65533, 65535, 0, 2, 4, 6, 8, 26, 453, 10086, - 55499, 64948, 65483, 65524, 65527, 65529, 65531, 65533, 65535, 0, - 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, - 114, 1014, 11202, 52670, 64226, 65356, 65503, 65514, 65523, 65525, - 65527, 65529, 65531, 65533, 65535, 0, 65533, 65535, 0, 15, - 65301, 65535, 0, 152, 64807, 65535, 0, 2, 3328, 63308, - 65535, 0, 2, 4050, 59730, 65533, 65535, 0, 2, 164, - 10564, 61894, 65529, 65535, 0, 15, 6712, 59831, 65076, 65532, - 65535, 0, 32, 7712, 57449, 65459, 65535, 0, 2, 210, - 7849, 53110, 65021, 65523, 65535, 0, 2, 12, 1081, 13883, - 48262, 62870, 65477, 65535, 0, 2, 88, 847, 6145, 37852, - 62012, 65454, 65533, 65535, 0, 9, 47, 207, 1823, 14522, - 45521, 61069, 64891, 65481, 65528, 65531, 65533, 65535, 0, 2, - 9, 488, 2881, 12758, 38703, 58412, 64420, 65410, 65533, 65535, - 0, 2, 4, 6, 61, 333, 1891, 6486, 19720, 43188, - 57547, 62472, 64796, 65421, 65497, 65523, 65529, 65531, 65533, 65535, - 0, 2, 4, 6, 8, 10, 12, 29, 117, 447, - 1528, 6138, 21242, 43133, 56495, 62432, 64746, 65362, 65500, 65529, - 65531, 65533, 65535, 0, 2, 18, 105, 301, 760, 1490, - 3472, 7568, 15002, 26424, 40330, 53029, 60048, 62964, 64274, 64890, - 65337, 65445, 65489, 65513, 65527, 65530, 65533, 65535, 0, 2, - 4, 6, 41, 102, 409, 853, 2031, 4316, 7302, 11328, - 16869, 24825, 34926, 43481, 50877, 56126, 59874, 62103, 63281, 63857, - 64166, 64675, 65382, 65522, 65531, 65533, 65535, 0, 2, 4, - 6, 8, 10, 12, 14, 16, 18, 29, 38, 53, - 58, 96, 181, 503, 1183, 2849, 5590, 8600, 11379, 13942, - 16478, 19453, 22638, 26039, 29411, 32921, 37596, 41433, 44998, 48560, - 51979, 55106, 57666, 59892, 61485, 62616, 63484, 64018, 64375, 64685, - 64924, 65076, 65278, 65395, 65471, 65509, 65529, 65535, 0, 65535, - 0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, 65535, - 0, 65535, 0, 65535, 0, 2, 65533, 65535, 0, 2, - 65533, 65535, 0, 2, 65533, 65535, 0, 2, 65533, 65535, - 0, 2, 65533, 65535, 0, 2, 65533, 65535, 0, 7, - 65519, 65535, 0, 2, 14, 65491, 65533, 65535, 0, 2, - 81, 65427, 65531, 65533, 65535, 0, 2, 4, 312, 65293, - 65528, 65533, 65535, 0, 65535, 0, 65535, 0, 65535, 0, - 65535, 0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, - 2, 65533, 65535, 0, 2, 65533, 65535, 0, 2, 65533, - 65535, 0, 5, 65523, 65535, 0, 2, 65533, 65535, 0, - 7, 65526, 65535, 0, 46, 65464, 65533, 65535, 0, 2, - 120, 65309, 65533, 65535, 0, 2, 5, 362, 65097, 65533, - 65535, 0, 2, 18, 1164, 64785, 65528, 65531, 65533, 65535, - 0, 65535, 0, 65535, 0, 65535, 0, 65533, 65535, 0, - 65535, 0, 65533, 65535, 0, 2, 65533, 65535, 0, 2, - 65533, 65535, 0, 2, 65533, 65535, 0, 2, 65530, 65535, - 0, 2, 65523, 65535, 0, 69, 65477, 65535, 0, 141, - 65459, 65535, 0, 194, 65325, 65533, 65535, 0, 2, 543, - 64912, 65533, 65535, 0, 5, 1270, 64301, 65529, 65531, 65533, - 65535, 0, 2, 4, 12, 2055, 63538, 65508, 65531, 65533, - 65535, 0, 2, 7, 102, 3775, 61970, 65429, 65526, 65528, - 65533, 65535, 0, 65535, 0, 65535, 0, 65535, 0, 2, - 65533, 65535, 0, 2, 65535, 0, 9, 65533, 65535, 0, - 25, 65512, 65535, 0, 2, 65533, 65535, 0, 44, 65480, - 65535, 0, 48, 65475, 65535, 0, 162, 65373, 65535, 0, - 637, 64806, 65533, 65535, 0, 2, 935, 64445, 65533, 65535, - 0, 2, 4, 1662, 64083, 65533, 65535, 0, 2, 12, - 3036, 62469, 65521, 65533, 65535, 0, 2, 120, 5405, 60468, - 65469, 65531, 65533, 65535, 0, 2, 4, 18, 254, 6663, - 58999, 65272, 65528, 65533, 65535, 0, 2, 4, 9, 12, - 67, 591, 8981, 56781, 64564, 65365, 65508, 65524, 65526, 65529, - 65531, 65533, 65535, 0, 65535, 0, 65535, 0, 2, 65533, - 65535, 0, 9, 65526, 65535, 0, 14, 65503, 65535, 0, - 127, 65390, 65535, 0, 517, 64990, 65535, 0, 178, 65330, - 65535, 0, 2, 1055, 64533, 65533, 65535, 0, 2, 1558, - 63942, 65533, 65535, 0, 2, 2205, 63173, 65533, 65535, 0, - 25, 4493, 60862, 65505, 65533, 65535, 0, 2, 48, 5890, - 59442, 65482, 65533, 65535, 0, 2, 4, 127, 7532, 58191, - 65394, 65533, 65535, 0, 2, 5, 32, 550, 10388, 54924, - 65046, 65510, 65531, 65533, 65535, 0, 2, 4, 30, 150, - 1685, 14340, 51375, 63619, 65288, 65503, 65528, 65533, 65535, 0, - 2, 4, 6, 8, 28, 97, 473, 2692, 15407, 50020, - 62880, 65064, 65445, 65508, 65531, 65533, 65535, 0, 2, 4, - 12, 32, 79, 150, 372, 907, 2184, 5868, 18207, 45431, - 59856, 64031, 65096, 65401, 65481, 65507, 65521, 65523, 65525, 65527, - 65529, 65531, 65533, 65535, 0, 65533, 65535, 0, 182, 65491, - 65535, 0, 877, 64286, 65535, 0, 9, 2708, 63612, 65533, - 65535, 0, 2, 6038, 59532, 65535, 0, 2, 92, 5500, - 60539, 65533, 65535, 0, 268, 8908, 56512, 65385, 65535, 0, - 129, 13110, 52742, 65036, 65535, 0, 2, 806, 14003, 51929, - 64732, 65523, 65535, 0, 7, 92, 2667, 18159, 47678, 62610, - 65355, 65535, 0, 32, 1836, 19676, 48237, 61677, 64960, 65526, - 65535, 0, 21, 159, 967, 5668, 22782, 44709, 58317, 64020, - 65406, 65528, 65535, 0, 7, 162, 1838, 8328, 23929, 43014, - 56394, 63374, 65216, 65484, 65521, 65535, 0, 2, 4, 6, - 28, 268, 1120, 3613, 10688, 24185, 40989, 54917, 61684, 64510, - 65403, 65530, 65535, 0, 2, 16, 44, 139, 492, 1739, - 5313, 13558, 26766, 41566, 52446, 58937, 62815, 64480, 65201, 65454, - 65524, 65533, 65535, 0, 7, 25, 76, 263, 612, 1466, - 3325, 6832, 12366, 20152, 29466, 39255, 47360, 53506, 57740, 60726, - 62845, 64131, 64882, 65260, 65459, 65521, 65528, 65530, 65535, 0, - 2, 4, 14, 48, 136, 312, 653, 1240, 2369, 4327, - 7028, 10759, 15449, 21235, 28027, 35386, 42938, 49562, 54990, 59119, - 62086, 63916, 64863, 65249, 65445, 65493, 65523, 65535, 0, 2, - 4, 6, 8, 10, 12, 21, 83, 208, 409, 723, - 1152, 1868, 2951, 4463, 6460, 8979, 11831, 15195, 18863, 22657, - 26762, 30881, 34963, 39098, 43054, 47069, 50620, 53871, 56821, 59386, - 61340, 62670, 63512, 64023, 64429, 64750, 64944, 65126, 65279, 65366, - 65413, 65445, 65473, 65505, 65510, 65521, 65528, 65530, 65535 -}; - -/* pointers to cdf tables for quantizer indices */ -const uint16_t *WebRtcIsacfix_kCdfGainPtr[3][12] = { - { WebRtcIsacfix_kCdfGain +0 +0, WebRtcIsacfix_kCdfGain +0 +8, WebRtcIsacfix_kCdfGain +0 +22, - WebRtcIsacfix_kCdfGain +0 +32, WebRtcIsacfix_kCdfGain +0 +48, WebRtcIsacfix_kCdfGain +0 +60, - WebRtcIsacfix_kCdfGain +0 +81, WebRtcIsacfix_kCdfGain +0 +95, WebRtcIsacfix_kCdfGain +0 +128, - WebRtcIsacfix_kCdfGain +0 +152, WebRtcIsacfix_kCdfGain +0 +210, WebRtcIsacfix_kCdfGain +0 +264 - }, - { WebRtcIsacfix_kCdfGain +404 +0, WebRtcIsacfix_kCdfGain +404 +8, WebRtcIsacfix_kCdfGain +404 +21, - WebRtcIsacfix_kCdfGain +404 +30, WebRtcIsacfix_kCdfGain +404 +46, WebRtcIsacfix_kCdfGain +404 +58, - WebRtcIsacfix_kCdfGain +404 +79, WebRtcIsacfix_kCdfGain +404 +93, WebRtcIsacfix_kCdfGain +404 +125, - WebRtcIsacfix_kCdfGain +404 +149, WebRtcIsacfix_kCdfGain +404 +207, WebRtcIsacfix_kCdfGain +404 +260 - }, - { WebRtcIsacfix_kCdfGain +803 +0, WebRtcIsacfix_kCdfGain +803 +8, WebRtcIsacfix_kCdfGain +803 +22, - WebRtcIsacfix_kCdfGain +803 +31, WebRtcIsacfix_kCdfGain +803 +48, WebRtcIsacfix_kCdfGain +803 +60, - WebRtcIsacfix_kCdfGain +803 +81, WebRtcIsacfix_kCdfGain +803 +96, WebRtcIsacfix_kCdfGain +803 +129, - WebRtcIsacfix_kCdfGain +803 +154, WebRtcIsacfix_kCdfGain +803 +212, WebRtcIsacfix_kCdfGain +803 +268 - } -}; - -const uint16_t *WebRtcIsacfix_kCdfShapePtr[3][108] = { - { WebRtcIsacfix_kCdfShape +0 +0, WebRtcIsacfix_kCdfShape +0 +2, WebRtcIsacfix_kCdfShape +0 +4, - WebRtcIsacfix_kCdfShape +0 +6, WebRtcIsacfix_kCdfShape +0 +8, WebRtcIsacfix_kCdfShape +0 +10, - WebRtcIsacfix_kCdfShape +0 +12, WebRtcIsacfix_kCdfShape +0 +14, WebRtcIsacfix_kCdfShape +0 +16, - WebRtcIsacfix_kCdfShape +0 +18, WebRtcIsacfix_kCdfShape +0 +21, WebRtcIsacfix_kCdfShape +0 +25, - WebRtcIsacfix_kCdfShape +0 +29, WebRtcIsacfix_kCdfShape +0 +33, WebRtcIsacfix_kCdfShape +0 +37, - WebRtcIsacfix_kCdfShape +0 +43, WebRtcIsacfix_kCdfShape +0 +49, WebRtcIsacfix_kCdfShape +0 +56, - WebRtcIsacfix_kCdfShape +0 +64, WebRtcIsacfix_kCdfShape +0 +66, WebRtcIsacfix_kCdfShape +0 +68, - WebRtcIsacfix_kCdfShape +0 +70, WebRtcIsacfix_kCdfShape +0 +72, WebRtcIsacfix_kCdfShape +0 +75, - WebRtcIsacfix_kCdfShape +0 +77, WebRtcIsacfix_kCdfShape +0 +79, WebRtcIsacfix_kCdfShape +0 +81, - WebRtcIsacfix_kCdfShape +0 +83, WebRtcIsacfix_kCdfShape +0 +86, WebRtcIsacfix_kCdfShape +0 +90, - WebRtcIsacfix_kCdfShape +0 +94, WebRtcIsacfix_kCdfShape +0 +98, WebRtcIsacfix_kCdfShape +0 +102, - WebRtcIsacfix_kCdfShape +0 +107, WebRtcIsacfix_kCdfShape +0 +113, WebRtcIsacfix_kCdfShape +0 +120, - WebRtcIsacfix_kCdfShape +0 +129, WebRtcIsacfix_kCdfShape +0 +131, WebRtcIsacfix_kCdfShape +0 +133, - WebRtcIsacfix_kCdfShape +0 +135, WebRtcIsacfix_kCdfShape +0 +137, WebRtcIsacfix_kCdfShape +0 +141, - WebRtcIsacfix_kCdfShape +0 +143, WebRtcIsacfix_kCdfShape +0 +147, WebRtcIsacfix_kCdfShape +0 +151, - WebRtcIsacfix_kCdfShape +0 +155, WebRtcIsacfix_kCdfShape +0 +159, WebRtcIsacfix_kCdfShape +0 +164, - WebRtcIsacfix_kCdfShape +0 +168, WebRtcIsacfix_kCdfShape +0 +172, WebRtcIsacfix_kCdfShape +0 +178, - WebRtcIsacfix_kCdfShape +0 +184, WebRtcIsacfix_kCdfShape +0 +192, WebRtcIsacfix_kCdfShape +0 +200, - WebRtcIsacfix_kCdfShape +0 +211, WebRtcIsacfix_kCdfShape +0 +213, WebRtcIsacfix_kCdfShape +0 +215, - WebRtcIsacfix_kCdfShape +0 +217, WebRtcIsacfix_kCdfShape +0 +219, WebRtcIsacfix_kCdfShape +0 +223, - WebRtcIsacfix_kCdfShape +0 +227, WebRtcIsacfix_kCdfShape +0 +231, WebRtcIsacfix_kCdfShape +0 +235, - WebRtcIsacfix_kCdfShape +0 +239, WebRtcIsacfix_kCdfShape +0 +243, WebRtcIsacfix_kCdfShape +0 +248, - WebRtcIsacfix_kCdfShape +0 +252, WebRtcIsacfix_kCdfShape +0 +258, WebRtcIsacfix_kCdfShape +0 +264, - WebRtcIsacfix_kCdfShape +0 +273, WebRtcIsacfix_kCdfShape +0 +282, WebRtcIsacfix_kCdfShape +0 +293, - WebRtcIsacfix_kCdfShape +0 +308, WebRtcIsacfix_kCdfShape +0 +310, WebRtcIsacfix_kCdfShape +0 +312, - WebRtcIsacfix_kCdfShape +0 +316, WebRtcIsacfix_kCdfShape +0 +320, WebRtcIsacfix_kCdfShape +0 +324, - WebRtcIsacfix_kCdfShape +0 +328, WebRtcIsacfix_kCdfShape +0 +332, WebRtcIsacfix_kCdfShape +0 +336, - WebRtcIsacfix_kCdfShape +0 +341, WebRtcIsacfix_kCdfShape +0 +347, WebRtcIsacfix_kCdfShape +0 +354, - WebRtcIsacfix_kCdfShape +0 +360, WebRtcIsacfix_kCdfShape +0 +368, WebRtcIsacfix_kCdfShape +0 +378, - WebRtcIsacfix_kCdfShape +0 +388, WebRtcIsacfix_kCdfShape +0 +400, WebRtcIsacfix_kCdfShape +0 +418, - WebRtcIsacfix_kCdfShape +0 +445, WebRtcIsacfix_kCdfShape +0 +447, WebRtcIsacfix_kCdfShape +0 +451, - WebRtcIsacfix_kCdfShape +0 +455, WebRtcIsacfix_kCdfShape +0 +461, WebRtcIsacfix_kCdfShape +0 +468, - WebRtcIsacfix_kCdfShape +0 +474, WebRtcIsacfix_kCdfShape +0 +480, WebRtcIsacfix_kCdfShape +0 +486, - WebRtcIsacfix_kCdfShape +0 +495, WebRtcIsacfix_kCdfShape +0 +505, WebRtcIsacfix_kCdfShape +0 +516, - WebRtcIsacfix_kCdfShape +0 +528, WebRtcIsacfix_kCdfShape +0 +543, WebRtcIsacfix_kCdfShape +0 +564, - WebRtcIsacfix_kCdfShape +0 +583, WebRtcIsacfix_kCdfShape +0 +608, WebRtcIsacfix_kCdfShape +0 +635 - }, - { WebRtcIsacfix_kCdfShape +686 +0, WebRtcIsacfix_kCdfShape +686 +2, WebRtcIsacfix_kCdfShape +686 +4, - WebRtcIsacfix_kCdfShape +686 +6, WebRtcIsacfix_kCdfShape +686 +8, WebRtcIsacfix_kCdfShape +686 +11, - WebRtcIsacfix_kCdfShape +686 +13, WebRtcIsacfix_kCdfShape +686 +15, WebRtcIsacfix_kCdfShape +686 +17, - WebRtcIsacfix_kCdfShape +686 +20, WebRtcIsacfix_kCdfShape +686 +23, WebRtcIsacfix_kCdfShape +686 +27, - WebRtcIsacfix_kCdfShape +686 +31, WebRtcIsacfix_kCdfShape +686 +35, WebRtcIsacfix_kCdfShape +686 +40, - WebRtcIsacfix_kCdfShape +686 +44, WebRtcIsacfix_kCdfShape +686 +50, WebRtcIsacfix_kCdfShape +686 +56, - WebRtcIsacfix_kCdfShape +686 +63, WebRtcIsacfix_kCdfShape +686 +65, WebRtcIsacfix_kCdfShape +686 +67, - WebRtcIsacfix_kCdfShape +686 +69, WebRtcIsacfix_kCdfShape +686 +71, WebRtcIsacfix_kCdfShape +686 +73, - WebRtcIsacfix_kCdfShape +686 +75, WebRtcIsacfix_kCdfShape +686 +77, WebRtcIsacfix_kCdfShape +686 +79, - WebRtcIsacfix_kCdfShape +686 +82, WebRtcIsacfix_kCdfShape +686 +85, WebRtcIsacfix_kCdfShape +686 +89, - WebRtcIsacfix_kCdfShape +686 +93, WebRtcIsacfix_kCdfShape +686 +97, WebRtcIsacfix_kCdfShape +686 +102, - WebRtcIsacfix_kCdfShape +686 +106, WebRtcIsacfix_kCdfShape +686 +112, WebRtcIsacfix_kCdfShape +686 +119, - WebRtcIsacfix_kCdfShape +686 +127, WebRtcIsacfix_kCdfShape +686 +129, WebRtcIsacfix_kCdfShape +686 +131, - WebRtcIsacfix_kCdfShape +686 +133, WebRtcIsacfix_kCdfShape +686 +135, WebRtcIsacfix_kCdfShape +686 +137, - WebRtcIsacfix_kCdfShape +686 +139, WebRtcIsacfix_kCdfShape +686 +142, WebRtcIsacfix_kCdfShape +686 +146, - WebRtcIsacfix_kCdfShape +686 +150, WebRtcIsacfix_kCdfShape +686 +154, WebRtcIsacfix_kCdfShape +686 +158, - WebRtcIsacfix_kCdfShape +686 +162, WebRtcIsacfix_kCdfShape +686 +167, WebRtcIsacfix_kCdfShape +686 +173, - WebRtcIsacfix_kCdfShape +686 +179, WebRtcIsacfix_kCdfShape +686 +186, WebRtcIsacfix_kCdfShape +686 +194, - WebRtcIsacfix_kCdfShape +686 +205, WebRtcIsacfix_kCdfShape +686 +207, WebRtcIsacfix_kCdfShape +686 +209, - WebRtcIsacfix_kCdfShape +686 +211, WebRtcIsacfix_kCdfShape +686 +214, WebRtcIsacfix_kCdfShape +686 +218, - WebRtcIsacfix_kCdfShape +686 +222, WebRtcIsacfix_kCdfShape +686 +226, WebRtcIsacfix_kCdfShape +686 +230, - WebRtcIsacfix_kCdfShape +686 +234, WebRtcIsacfix_kCdfShape +686 +238, WebRtcIsacfix_kCdfShape +686 +242, - WebRtcIsacfix_kCdfShape +686 +247, WebRtcIsacfix_kCdfShape +686 +253, WebRtcIsacfix_kCdfShape +686 +262, - WebRtcIsacfix_kCdfShape +686 +269, WebRtcIsacfix_kCdfShape +686 +278, WebRtcIsacfix_kCdfShape +686 +289, - WebRtcIsacfix_kCdfShape +686 +305, WebRtcIsacfix_kCdfShape +686 +307, WebRtcIsacfix_kCdfShape +686 +309, - WebRtcIsacfix_kCdfShape +686 +311, WebRtcIsacfix_kCdfShape +686 +315, WebRtcIsacfix_kCdfShape +686 +319, - WebRtcIsacfix_kCdfShape +686 +323, WebRtcIsacfix_kCdfShape +686 +327, WebRtcIsacfix_kCdfShape +686 +331, - WebRtcIsacfix_kCdfShape +686 +335, WebRtcIsacfix_kCdfShape +686 +340, WebRtcIsacfix_kCdfShape +686 +346, - WebRtcIsacfix_kCdfShape +686 +354, WebRtcIsacfix_kCdfShape +686 +362, WebRtcIsacfix_kCdfShape +686 +374, - WebRtcIsacfix_kCdfShape +686 +384, WebRtcIsacfix_kCdfShape +686 +396, WebRtcIsacfix_kCdfShape +686 +413, - WebRtcIsacfix_kCdfShape +686 +439, WebRtcIsacfix_kCdfShape +686 +442, WebRtcIsacfix_kCdfShape +686 +446, - WebRtcIsacfix_kCdfShape +686 +450, WebRtcIsacfix_kCdfShape +686 +455, WebRtcIsacfix_kCdfShape +686 +461, - WebRtcIsacfix_kCdfShape +686 +468, WebRtcIsacfix_kCdfShape +686 +475, WebRtcIsacfix_kCdfShape +686 +481, - WebRtcIsacfix_kCdfShape +686 +489, WebRtcIsacfix_kCdfShape +686 +498, WebRtcIsacfix_kCdfShape +686 +508, - WebRtcIsacfix_kCdfShape +686 +522, WebRtcIsacfix_kCdfShape +686 +534, WebRtcIsacfix_kCdfShape +686 +554, - WebRtcIsacfix_kCdfShape +686 +577, WebRtcIsacfix_kCdfShape +686 +602, WebRtcIsacfix_kCdfShape +686 +631 - }, - { WebRtcIsacfix_kCdfShape +1368 +0, WebRtcIsacfix_kCdfShape +1368 +2, WebRtcIsacfix_kCdfShape +1368 +4, - WebRtcIsacfix_kCdfShape +1368 +6, WebRtcIsacfix_kCdfShape +1368 +8, WebRtcIsacfix_kCdfShape +1368 +10, - WebRtcIsacfix_kCdfShape +1368 +12, WebRtcIsacfix_kCdfShape +1368 +14, WebRtcIsacfix_kCdfShape +1368 +16, - WebRtcIsacfix_kCdfShape +1368 +20, WebRtcIsacfix_kCdfShape +1368 +24, WebRtcIsacfix_kCdfShape +1368 +28, - WebRtcIsacfix_kCdfShape +1368 +32, WebRtcIsacfix_kCdfShape +1368 +36, WebRtcIsacfix_kCdfShape +1368 +40, - WebRtcIsacfix_kCdfShape +1368 +44, WebRtcIsacfix_kCdfShape +1368 +50, WebRtcIsacfix_kCdfShape +1368 +57, - WebRtcIsacfix_kCdfShape +1368 +65, WebRtcIsacfix_kCdfShape +1368 +67, WebRtcIsacfix_kCdfShape +1368 +69, - WebRtcIsacfix_kCdfShape +1368 +71, WebRtcIsacfix_kCdfShape +1368 +73, WebRtcIsacfix_kCdfShape +1368 +75, - WebRtcIsacfix_kCdfShape +1368 +77, WebRtcIsacfix_kCdfShape +1368 +79, WebRtcIsacfix_kCdfShape +1368 +81, - WebRtcIsacfix_kCdfShape +1368 +85, WebRtcIsacfix_kCdfShape +1368 +89, WebRtcIsacfix_kCdfShape +1368 +93, - WebRtcIsacfix_kCdfShape +1368 +97, WebRtcIsacfix_kCdfShape +1368 +101, WebRtcIsacfix_kCdfShape +1368 +105, - WebRtcIsacfix_kCdfShape +1368 +110, WebRtcIsacfix_kCdfShape +1368 +116, WebRtcIsacfix_kCdfShape +1368 +123, - WebRtcIsacfix_kCdfShape +1368 +132, WebRtcIsacfix_kCdfShape +1368 +134, WebRtcIsacfix_kCdfShape +1368 +136, - WebRtcIsacfix_kCdfShape +1368 +138, WebRtcIsacfix_kCdfShape +1368 +141, WebRtcIsacfix_kCdfShape +1368 +143, - WebRtcIsacfix_kCdfShape +1368 +146, WebRtcIsacfix_kCdfShape +1368 +150, WebRtcIsacfix_kCdfShape +1368 +154, - WebRtcIsacfix_kCdfShape +1368 +158, WebRtcIsacfix_kCdfShape +1368 +162, WebRtcIsacfix_kCdfShape +1368 +166, - WebRtcIsacfix_kCdfShape +1368 +170, WebRtcIsacfix_kCdfShape +1368 +174, WebRtcIsacfix_kCdfShape +1368 +179, - WebRtcIsacfix_kCdfShape +1368 +185, WebRtcIsacfix_kCdfShape +1368 +193, WebRtcIsacfix_kCdfShape +1368 +203, - WebRtcIsacfix_kCdfShape +1368 +214, WebRtcIsacfix_kCdfShape +1368 +216, WebRtcIsacfix_kCdfShape +1368 +218, - WebRtcIsacfix_kCdfShape +1368 +220, WebRtcIsacfix_kCdfShape +1368 +224, WebRtcIsacfix_kCdfShape +1368 +227, - WebRtcIsacfix_kCdfShape +1368 +231, WebRtcIsacfix_kCdfShape +1368 +235, WebRtcIsacfix_kCdfShape +1368 +239, - WebRtcIsacfix_kCdfShape +1368 +243, WebRtcIsacfix_kCdfShape +1368 +247, WebRtcIsacfix_kCdfShape +1368 +251, - WebRtcIsacfix_kCdfShape +1368 +256, WebRtcIsacfix_kCdfShape +1368 +262, WebRtcIsacfix_kCdfShape +1368 +269, - WebRtcIsacfix_kCdfShape +1368 +277, WebRtcIsacfix_kCdfShape +1368 +286, WebRtcIsacfix_kCdfShape +1368 +297, - WebRtcIsacfix_kCdfShape +1368 +315, WebRtcIsacfix_kCdfShape +1368 +317, WebRtcIsacfix_kCdfShape +1368 +319, - WebRtcIsacfix_kCdfShape +1368 +323, WebRtcIsacfix_kCdfShape +1368 +327, WebRtcIsacfix_kCdfShape +1368 +331, - WebRtcIsacfix_kCdfShape +1368 +335, WebRtcIsacfix_kCdfShape +1368 +339, WebRtcIsacfix_kCdfShape +1368 +343, - WebRtcIsacfix_kCdfShape +1368 +349, WebRtcIsacfix_kCdfShape +1368 +355, WebRtcIsacfix_kCdfShape +1368 +361, - WebRtcIsacfix_kCdfShape +1368 +368, WebRtcIsacfix_kCdfShape +1368 +376, WebRtcIsacfix_kCdfShape +1368 +385, - WebRtcIsacfix_kCdfShape +1368 +397, WebRtcIsacfix_kCdfShape +1368 +411, WebRtcIsacfix_kCdfShape +1368 +429, - WebRtcIsacfix_kCdfShape +1368 +456, WebRtcIsacfix_kCdfShape +1368 +459, WebRtcIsacfix_kCdfShape +1368 +463, - WebRtcIsacfix_kCdfShape +1368 +467, WebRtcIsacfix_kCdfShape +1368 +473, WebRtcIsacfix_kCdfShape +1368 +478, - WebRtcIsacfix_kCdfShape +1368 +485, WebRtcIsacfix_kCdfShape +1368 +491, WebRtcIsacfix_kCdfShape +1368 +497, - WebRtcIsacfix_kCdfShape +1368 +505, WebRtcIsacfix_kCdfShape +1368 +514, WebRtcIsacfix_kCdfShape +1368 +523, - WebRtcIsacfix_kCdfShape +1368 +535, WebRtcIsacfix_kCdfShape +1368 +548, WebRtcIsacfix_kCdfShape +1368 +565, - WebRtcIsacfix_kCdfShape +1368 +585, WebRtcIsacfix_kCdfShape +1368 +611, WebRtcIsacfix_kCdfShape +1368 +640 - } -}; - -/* code length for all coefficients using different models */ - -const int16_t WebRtcIsacfix_kCodeLenGainQ11[392] = { - 25189, 16036, 8717, 358, 8757, 15706, 21456, 24397, 18502, 17559 - , 13794, 11088, 7480, 873, 6603, 11636, 14627, 16805, 19132, 26624 - , 26624, 19408, 13751, 7280, 583, 7591, 15178, 23773, 28672, 25189 - , 19045, 16442, 13412, 10397, 5893, 1338, 6376, 9992, 12074, 13853 - , 15781, 19821, 22819, 28672, 28672, 25189, 19858, 15781, 11262, 5477 - , 1298, 5632, 11814, 17234, 22020, 28672, 19677, 18125, 16587, 14521 - , 13032, 11196, 9249, 5411, 2495, 4994, 7975, 10234, 12308, 13892 - , 15148, 17944, 21725, 23917, 25189, 19539, 16293, 11531, 7808, 4475 - , 2739, 4872, 8089, 11314, 14992, 18105, 23257, 26624, 25189, 23257 - , 23257, 20982, 18697, 18023, 16338, 16036, 14539, 13695, 13146, 11763 - , 10754, 9074, 7260, 5584, 4430, 5553, 6848, 8344, 10141, 11636 - , 12535, 13416, 14342, 15477, 17296, 19282, 22349, 23773, 28672, 28672 - , 26624, 23773, 21456, 18023, 15118, 13362, 11212, 9293, 8043, 6985 - , 5908, 5721, 5853, 6518, 7316, 8360, 9716, 11289, 12912, 14652 - , 16969, 19858, 23773, 26624, 28013, 30720, 30720, 28672, 25426, 23141 - , 25426, 23773, 20720, 19408, 18697, 19282, 16859, 16338, 16026, 15377 - , 15021, 14319, 14251, 13937, 13260, 13017, 12332, 11703, 11430, 10359 - , 10128, 9405, 8757, 8223, 7974, 7859, 7646, 7673, 7997, 8580 - , 8880, 9061, 9866, 10397, 11358, 12200, 13244, 14157, 15021, 16026 - , 16490, 18697, 18479, 20011, 19677, 20720, 24576, 26276, 30720, 30720 - , 28672, 30720, 24068, 25189, 22437, 20345, 18479, 16396, 16026, 14928 - , 13877, 13450, 12696, 12766, 11626, 11098, 10159, 9998, 9437, 9275 - , 8783, 8552, 8629, 8488, 8522, 8454, 8571, 8775, 8915, 9427 - , 9483, 9851, 10260, 10933, 11131, 11974, 12560, 13833, 15080, 16304 - , 17491, 19017, 18697, 19408, 22020, 25189, 25426, 22819, 26276, 30720 - , 30720, 30720, 30720, 30720, 30720, 28672, 30720, 30720, 30720, 30720 - , 28013, 25426, 24397, 23773, 25189, 26624, 25189, 22437, 21725, 20011 - , 20527, 20720, 20771, 22020, 22020, 19858, 19408, 19972, 17866, 17360 - , 17791, 17219, 16805, 16927, 16067, 16162, 15661, 15178, 15021, 15209 - , 14845, 14570, 14490, 14490, 13733, 13617, 13794, 13577, 13312, 12824 - , 13032, 12683, 12189, 12469, 12109, 11940, 11636, 11617, 11932, 12294 - , 11578, 11775, 12039, 11654, 11560, 11439, 11909, 11421, 12029, 11513 - , 11773, 11899, 11560, 11805, 11476, 11664, 11963, 11647, 11754, 11963 - , 11703, 12211, 11932, 12074, 12469, 12535, 12560, 12912, 12783, 12866 - , 12884, 13378, 13957, 13775, 13635, 14019, 14545, 15240, 15520, 15554 - , 15697, 16490, 16396, 17281, 16599, 16969, 17963, 16859, 16983, 16805 - , 17099, 18210, 17219, 17646, 17700, 17646, 18297, 17425, 18479, 17791 - , 17718, 19282, 18672, 20173, 20982, 21725, 21456, 23773, 23257, 25189 - , 30720, 30720, 25189, 26624, 30720, 30720, 30720, 30720, 28672, 26276 - , 30720, 30720 -}; - -const int16_t WebRtcIsacfix_kCodeLenShapeQ11[578] = { - 0, 0, 0, 0, 0, 0, 0, 0, 0, 28672 - , 0, 26624, 1, 23773, 22819, 4, 20982, 18598, 10, 19282 - , 16587, 22, 16442, 26624, 13126, 60, 14245, 26624, 26624, 12736 - , 79, 12912, 25189, 22819, 9563, 249, 9474, 22349, 28672, 23257 - , 17944, 7980, 434, 8181, 16431, 26624, 0, 0, 0, 0 - , 28672, 0, 0, 0, 0, 0, 28672, 0, 22437, 3 - , 22437, 20982, 5, 20982, 16442, 22, 16752, 13814, 49, 14646 - , 11645, 116, 11734, 26624, 28672, 10613, 158, 11010, 24397, 19539 - , 8046, 453, 7709, 19017, 28672, 23257, 15110, 6770, 758, 6523 - , 14108, 24397, 28672, 0, 0, 0, 0, 28672, 0, 28672 - , 0, 26624, 1, 28672, 28672, 1, 26624, 24397, 2, 23257 - , 21725, 4, 20982, 17158, 18, 17281, 28672, 15178, 35, 15209 - , 12343, 92, 12320, 26624, 10344, 189, 10217, 30720, 22020, 9033 - , 322, 8549, 23773, 28672, 30720, 20622, 7666, 473, 7806, 20527 - , 24397, 14135, 5995, 960, 6018, 14872, 23773, 26624, 20928, 16293 - , 10636, 4926, 1588, 5256, 11088, 18043, 25189, 0, 0, 0 - , 0, 24397, 1, 25189, 20720, 5, 21456, 21209, 3, 25189 - , 20982, 5, 21456, 15818, 30, 15410, 13794, 60, 13416, 28672 - , 11162, 142, 11025, 9337, 231, 10094, 23773, 8338, 405, 7930 - , 26624, 19677, 6787, 613, 7318, 19161, 28672, 16442, 6319, 932 - , 5748, 15312, 25189, 28672, 28672, 28672, 13998, 5513, 1263, 5146 - , 14024, 24397, 22819, 15818, 9460, 4447, 2122, 4681, 9970, 15945 - , 22349, 28672, 30720, 22622, 19017, 14872, 10689, 7405, 4473, 2983 - , 4783, 7894, 11186, 14964, 18210, 24397, 0, 0, 30720, 0 - , 30720, 21456, 3, 23773, 14964, 39, 14757, 14179, 53, 13751 - , 14928, 36, 15272, 12430, 79, 13228, 9135, 285, 9077, 28672 - , 28672, 8377, 403, 7919, 26624, 28672, 23257, 7068, 560, 7473 - , 20345, 19677, 6770, 720, 6464, 18697, 25189, 16249, 5779, 1087 - , 5494, 15209, 22819, 30720, 20622, 12601, 5240, 1419, 5091, 12095 - , 19408, 26624, 22819, 16805, 10683, 4812, 2056, 4293, 9836, 16026 - , 24397, 25189, 18409, 13833, 8681, 4503, 2653, 4220, 8329, 13853 - , 19132, 26624, 25189, 20771, 17219, 12630, 9520, 6733, 4565, 3657 - , 4817, 7069, 10058, 13212, 16805, 21209, 26624, 26276, 28672, 28672 - , 26276, 23257, 20173, 19282, 16538, 15051, 12811, 10754, 9267, 7547 - , 6270, 5407, 5214, 6057, 7054, 8226, 9488, 10806, 12793, 14442 - , 16442, 19677, 22099, 26276, 28672, 0, 30720, 0, 30720, 11920 - , 56, 20720, 30720, 6766, 355, 13130, 30720, 30720, 22180, 5589 - , 736, 7902, 26624, 30720, 7634, 354, 9721, 30720, 30720, 9027 - , 246, 10117, 30720, 30720, 9630, 453, 6709, 23257, 30720, 25683 - , 14228, 6127, 1271, 4615, 15178, 30720, 30720, 23504, 12382, 5739 - , 2015, 3492, 10560, 22020, 26624, 30720, 30720, 23257, 13192, 4873 - , 1527, 5001, 12445, 22020, 30720, 30720, 30720, 30720, 19344, 10761 - , 4051, 1927, 5281, 10594, 17866, 28672, 30720, 30720, 30720, 21869 - , 15554, 10060, 5979, 2710, 3085, 7889, 14646, 21725, 28672, 30720 - , 30720, 30720, 30720, 30720, 30720, 30720, 22719, 17425, 13212, 8083 - , 4439, 2820, 4305, 8136, 12988, 17425, 21151, 28672, 28672, 30720 - , 30720, 30720, 28672, 20527, 19282, 14412, 10513, 7407, 5079, 3744 - , 4115, 6308, 9621, 13599, 17040, 22349, 28672, 30720, 30720, 30720 - , 30720, 30720, 30720, 29522, 19282, 14545, 11485, 9093, 6760, 5262 - , 4672, 4970, 6005, 7852, 9732, 12343, 14672, 19161, 22819, 25189 - , 30720, 30720, 28672, 30720, 30720, 20720, 18125, 14388, 12007, 9825 - , 8092, 7064, 6069, 5903, 5932, 6359, 7169, 8310, 9324, 10711 - , 11867, 13096, 14157, 16338, 17040, 19161, 21725, 23773, 30720, 30720 - , 26276, 25426, 24397, 28672, 28672, 23257, 22020, 22349, 18297, 17646 - , 16983, 16431, 16162, 15021, 15178, 13751, 12142, 10895, 10193, 9632 - , 9086, 8896, 8823, 8735, 8591, 8754, 8649, 8361, 8329, 8522 - , 8373, 8739, 8993, 9657, 10454, 11279, 11899, 12614, 14024, 14273 - , 15477, 15240, 16649, 17866, 18697, 21151, 22099, 0 - // The final 0 was added due to http://bugs.webrtc.org/10584. -}; - -/* left KLT transforms */ -const int16_t WebRtcIsacfix_kT1GainQ15[3][4] = { - { -26130, 19773, 19773, 26130 }, - { -26664, 19046, 19046, 26664 }, - { -23538, 22797, 22797, 23538 } -}; - - - -const int16_t WebRtcIsacfix_kT1ShapeQ15[3][324] = { - { 52,16,168,7,439,-138,-89,306,671,882, - 157,1301,291,1598,-3571,-1943,-1119,32404,96,-12, - 379,-64,-307,345,-836,539,1045,2541,-2865,-992, - 1683,-4717,5808,7427,30599,2319,183,-73,451,481, - 933,-198,781,-397,1244,-777,3690,-2414,149,-1356, - -2593,-31140,8289,-1737,-202,-14,-214,360,501,450, - -245,-7,797,3638,-2804,3042,-337,22137,-22103,2264, - 6838,-3381,305,172,263,-195,-355,351,179,513, - 2234,3343,5509,7531,19075,-17740,-16836,2244,-629,-1505, - -153,108,124,-324,2694,-124,1492,-850,5347,4285, - 7439,-10229,-22822,-12467,-12891,3645,822,-232,131,13, - 374,565,536,4681,1294,-1935,1926,-5734,-10643,26462, - -12480,-5589,-1038,-2468,964,-704,-247,-106,186,-558, - -4050,3760,2972,2141,-7393,6294,26740,11991,-3251,5461, - 5341,1574,2208,-51,-552,-297,-753,-154,2068,-5371, - 3578,4106,28043,-10533,8041,2353,2389,4609,3410,1906, - 351,-249,18,-15,1117,539,2870,9084,17585,-24528, - -366,-6490,2009,-3170,2942,1116,-232,1672,1065,606, - -399,-388,-518,38,3728,28948,-11936,4543,4104,-4441, - 1545,-4044,1485,622,-68,186,-473,135,-280,125, - -546,-1813,6989,6606,23711,19376,-2636,2870,-4553,-1687, - 878,-375,205,-208,-409,-108,-200,-45,-1670,-337, - 8213,-5524,-2334,5240,-12939,-26205,5937,-1582,-592,-959, - -5374,2449,3400,559,349,-492,668,12379,-27684,3419, - 5117,4415,-297,-8270,-1252,-3490,-1272,-1199,-3159,191, - 630,488,-797,-3071,12912,-27783,-10249,1047,647,619, - 111,-3722,-915,-1055,-502,5,-1384,-306,221,68, - 5219,13173,-26474,-11663,-5626,927,806,-1127,236,-589, - -522,-230,-312,-315,-428,-573,426,192,-11830,-26883, - -14121,-2785,-1429,-109,410,-832,-302,539,-459,104, - 1,-530,-202,-289,153,116,30082,-12944,-671,20, - 649,98,103,215,234,0,280,-51,-169,298, - 31,230,-73,-51 - }, - { -154,-7,-192,61,-739,-389,-947,-162,-60,94, - 511,-716,1520,-1428,4168,-2214,1816,32270,-123,-77, - -199,-99,-42,-588,203,-240,-930,-35,1580,234, - 3206,-5507,-1495,-10946,30000,-2667,-136,-176,-240,-175, - -204,-661,-1796,-1039,-1271,498,3143,734,2663,2699, - -8127,29333,10495,2356,-72,113,-91,118,-2840,-723, - -1733,-1158,-389,-2116,-3054,-3,-5179,8071,29546,6308, - 5657,-3178,-186,-294,-473,-635,1213,-983,-1437,-1715, - -1094,1280,-92,-9573,948,29576,-7060,-5921,2954,1349, - -337,-108,-1099,962,418,-413,-1149,-334,1241,3975, - -6825,26725,-14377,7051,-4772,-1707,2335,2008,-150,570, - 1371,42,-1649,-619,2039,3369,-1225,1583,-2755,-15207, - -27504,-4855,-4304,1495,2733,1324,15,-448,403,353, - 3016,-1242,2338,2673,2064,-7496,-30447,-3686,5833,-1301, - -2455,2122,1519,608,43,-653,773,-3072,912,-1537, - 4505,10284,30237,1549,3200,-691,205,1702,658,1014, - 1499,148,79,-322,-1162,-4639,-813,7536,3204,29109, - -10747,-26,1611,2286,2114,2561,1022,372,348,207, - 1062,-1088,-443,-9849,2381,5671,29097,-7612,-2927,3853, - 194,1155,275,1438,1438,1312,581,888,-784,906, - 112,-11103,25104,14438,-9311,-3068,1210,368,370,-940, - -2434,-1148,1925,392,657,258,-526,1475,-2281,-4265, - -1880,1534,2185,-1472,959,-30934,6306,3114,-4109,1768, - -2612,-703,45,644,2185,2033,5670,7211,19114,-22427, - 6432,5150,-4090,-2694,3860,1245,-596,293,1829,369, - -319,229,-3256,2170,-6374,-26216,-4570,-16053,-5766,-262, - -2006,2873,-1477,147,378,-1544,-344,-544,-985,-481, - 4210,4542,30757,-7291,-4863,1529,-2079,-628,-603,-783, - -408,1646,697,808,-620,-292,181,158,-13313,-29173, - 5984,-1262,859,-1776,-558,-24,-883,-1421,739,210, - -531,-285,131,-160,-246,-56,29345,-13706,-2859,-2966, - -300,-970,-2382,-268,-103,-636,-12,-62,-691,-253, - -147,-127,27,66 - }, - { 55,-212,-198,489,-274,81,682,399,328,-934, - -389,-37,1357,-3632,5276,6581,-9493,-29921,29,-45, - 2,190,172,-15,311,-130,-1085,-25,324,-684, - 3223,-6580,4485,-5280,-29521,9933,82,-320,-530,229, - -705,-533,-414,848,-1842,-4473,1390,-857,6717,-6692, - 4648,29397,576,8339,-68,-85,238,-330,264,-1012, - -381,-203,-3384,-3329,3906,6810,3790,-6250,28312,-8078, - 8089,1565,160,-569,-612,-613,-1063,-1928,-1125,3421, - -7481,-7484,4942,-6984,4330,-25591,-10574,-6982,5682,-1781, - -308,89,178,-1715,-420,-3530,-5776,1219,-8617,-7137, - 7015,4981,24875,12657,-5408,-3356,-785,-1972,326,-858, - -506,-3382,-986,-6258,-2259,4015,-8374,-10482,3127,23826, - -14126,-514,-5417,2178,-2912,-17,-587,80,67,-5881, - -1702,-5351,-4481,398,-10156,-225,20727,-15460,-11603,7752, - 3660,1714,-2001,-359,499,-527,-1225,-7820,-1297,-6326, - -8526,7900,-18328,13311,-17488,-2926,-196,-17,2281,873, - 480,-160,-624,471,780,-8729,1707,-14262,-20647,1721, - 18590,-2206,-1214,-1066,312,-2602,783,-412,-113,49, - -119,1305,-2371,-15132,-1833,-18252,20295,-8316,2227,341, - -2074,-702,3082,-262,-465,-198,430,30,-70,-788, - 2342,-25132,-4863,19783,-484,2137,2811,-1906,799,1586, - 962,-734,-191,-30,-129,-93,-1126,1729,5860,-2030, - 8953,603,-3338,-10869,-1144,22070,12130,10513,3191,-6881, - -3514,2090,711,-666,1843,-5997,-5681,2921,-17641,-2801, - 4969,18590,7169,12214,8587,4405,3008,-1074,-371,-77, - 253,331,-5611,5014,13152,-1985,18483,-1696,8043,20463, - 2381,-393,1688,-1205,618,1220,457,248,-83,176, - 7920,-13676,-22139,-3038,17402,2036,844,3258,994,719, - 2087,-44,426,494,12,-91,46,5,-14204,22912, - -18156,-361,442,2298,-829,2229,386,1433,1335,1323, - 55,-592,-139,49,-12,-57,27783,17134,350,-282, - 552,158,142,2488,465,329,1087,118,143,10, - 56,65,-15,-31 - } -}; - -/* right KLT transforms */ -const int16_t WebRtcIsacfix_kT2GainQ15[3][36] = { - { 4775, -14892, 20313, -17104, 10533, -3613, -6782, 16044, -8889, - -11019, 21330, -10720, 13193, -15678, -11101, 14461, 12250, -13096, - -16951, 2167, 16066, 15569, -702, -16754, -19195, -12823, -4321, - 5128, 13348, 17825, 13232, 13404, 13494, 13490, 13383, 13261 - }, - { -3725, 11408, -18493, 20031, -13097, 3865, 9344, -19294, 10740, - 8856, -18432, 8982, 13975, -14444, -11930, 11774, 14285, -13594, - -16323, -4, 16340, 15609, 359, -17220, -18401, -13471, -4643, - 5225, 13375, 18053, 13124, 13463, 13621, 13583, 13393, 13072 - }, - { -3513, 11402, -17883, 19504, -14399, 4885, 8702, -19513, 12046, - 8533, -18110, 8447, 12778, -14838, -12444, 13177, 14107, -12759, - -17268, 914, 15822, 15661, 838, -16686, -18907, -12936, -4820, - 4175, 12398, 18830, 12913, 13215, 13433, 13572, 13601, 13518 - } -}; - -const int16_t WebRtcIsacfix_kT2ShapeQ15[3][36] = { - { 4400, -11512, 17205, -19470, 14770, -5345, 9784, -19222, 11228, - 6842, -18371, 9909, 14191, -13496, -11563, 14015, 11827, -14839, - -15439, 948, 17802, 14827, -2053, -17132, 18723, 14516, 4135, - -6822, -13869, -16016, 12975, 13341, 13563, 13603, 13478, 13296 - }, - { 5420, -14215, 19060, -18073, 11709, -3911, 9645, -18335, 7717, - 10842, -19283, 9777, 14898, -12555, -13661, 11668, 13520, -13733, - -15936, -1358, 15671, 16728, 328, -17100, 17527, 13973, 5587, - -5194, -14165, -17677, 12970, 13446, 13693, 13660, 13462, 13015 - }, - { 4386, -12426, 18019, -18895, 13894, -5034, 9713, -19270, 10283, - 8692, -18439, 9317, 13992, -13454, -13241, 12850, 13366, -13336, - -16334, -498, 15976, 16213, -114, -16987, 18191, 13659, 4958, - -5116, -13444, -18021, 12911, 13424, 13718, 13674, 13464, 13054 - } -}; - -/* means of log gains and LAR coefficients*/ -const int16_t WebRtcIsacfix_kMeansGainQ8[3][12] = { - { -1758, -1370, -1758, -1373, -1757, -1375, - -1758, -1374, -1758, -1373, -1755, -1370 - }, - { -1569, -1224, -1569, -1225, -1569, -1227, - -1569, -1226, -1567, -1225, -1565, -1224 - }, - { -1452, -957, -1447, -951, -1438, -944, - -1431, -938, -1419, -931, -1406, -926 - } -}; - - -const int32_t WebRtcIsacfix_kMeansShapeQ17[3][108] = { - { -119581, 34418, -44193, 11112, -4428, 18906, 9222, 8068, 1953, 5425, - 1871, 1689, 109933, 33751, 10471, -2566, 1090, 2320, -119219, 33728, - -43759, 11450, -4870, 19117, 9174, 8037, 1972, 5331, 1872, 1843, - 109899, 34301, 10629, -2316, 1272, 2562, -118608, 32318, -44012, 11591, - -4914, 18932, 9456, 8088, 1900, 5419, 1723, 1853, 109963, 35059, - 10745, -2335, 1161, 2520, -119174, 32107, -44462, 11635, -4694, 18611, - 9757, 8108, 1969, 5486, 1673, 1777, 109636, 34907, 10643, -2406, - 1034, 2420, -118597, 32320, -44590, 10854, -4569, 18821, 9701, 7866, - 2003, 5577, 1732, 1626, 109913, 34448, 10714, -2752, 990, 2228, - -118138, 32996, -44352, 10334, -3772, 18488, 9464, 7865, 2208, 5540, - 1745, 1664, 109880, 33381, 10640, -2779, 980, 2054 - }, - { -146328, 46370, 1047, 26431, 10035, 13933, 6415, 14359, -2368, 6661, - 2269, 1764, 96623, 7802, 4163, 10742, 1643, 2954, -146871, 46561, 1127, - 26225, 10113, 14096, 6771, 14323, -2037, 6788, 2297, 1761, 96324, 8382, - 4309, 10450, 1695, 3016, -146502, 46475, 1580, 26118, 10487, 14179, 6622, - 14439, -2034, 6757, 2342, 1761, 95869, 8966, 4347, 10358, 1999, 2855, - -146958, 47717, 826, 25952, 10263, 14061, 5266, 13681, -2417, 6582, 2047, - 1608, 96257, 9107, 4452, 10301, 1792, 2676, -146992, 47123, 446, 25822, - 10405, 14292, 5140, 13804, -2403, 6496, 1834, 1735, 97489, 9253, 4414, - 10684, 1549, 2721, -145811, 46182, 901, 26482, 10241, 14524, 6075, 14514, - -2147, 6691, 2196, 1899, 97011, 8178, 4102, 10758, 1638, 2869 - }, - { -166617, 46969, -43908, 17726, 6330, 25615, 6913, 5450, -2301, 1984, - 507, 2883, 149998, 28709, 19333, 16703, 11093, 8965, -168254, 46604, - -44315, 17862, 6474, 25746, 7018, 5373, -2343, 1930, 513, 2819, 150391, - 28627, 19194, 16678, 10998, 8929, -169093, 46084, -44767, 17427, 6401, - 25674, 7147, 5472, -2336, 1820, 491, 2802, 149860, 28430, 19064, 16524, - 10898, 8875, -170205, 46189, -44877, 17403, 6190, 25209, 7035, 5673, -2173, - 1894, 574, 2756, 148830, 28230, 18819, 16418, 10789, 8811, -171263, 45045, - -44834, 16858, 6103, 24726, 7014, 5713, -2103, 1877, 518, 2729, 147073, - 27744, 18629, 16277, 10690, 8703, -171720, 44153, -45062, 15951, 5872, - 24429, 7044, 5585, -2082, 1807, 519, 2769, 144791, 27402, 18490, 16126, - 10548, 8635 - } -}; diff --git a/modules/audio_coding/codecs/isac/fix/source/lpc_tables.h b/modules/audio_coding/codecs/isac/fix/source/lpc_tables.h deleted file mode 100644 index 50e1b12459..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/lpc_tables.h +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * lpc_tables.h - * - * header file for coding tables for the LPC coefficients - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_LPC_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_LPC_TABLES_H_ - -#include - -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -/* indices of KLT coefficients used */ -extern const uint16_t WebRtcIsacfix_kSelIndGain[12]; - -extern const uint16_t WebRtcIsacfix_kSelIndShape[108]; - -/* cdf array for model indicator */ -extern const uint16_t WebRtcIsacfix_kModelCdf[KLT_NUM_MODELS + 1]; - -/* pointer to cdf array for model indicator */ -extern const uint16_t* WebRtcIsacfix_kModelCdfPtr[1]; - -/* initial cdf index for decoder of model indicator */ -extern const uint16_t WebRtcIsacfix_kModelInitIndex[1]; - -/* offset to go from rounded value to quantization index */ -extern const int16_t WebRtcIsacfix_kQuantMinGain[12]; - -extern const int16_t WebRtcIsacfix_kQuantMinShape[108]; - -/* maximum quantization index */ -extern const uint16_t WebRtcIsacfix_kMaxIndGain[12]; - -extern const uint16_t WebRtcIsacfix_kMaxIndShape[108]; - -/* index offset */ -extern const uint16_t WebRtcIsacfix_kOffsetGain[KLT_NUM_MODELS][12]; - -extern const uint16_t WebRtcIsacfix_kOffsetShape[KLT_NUM_MODELS][108]; - -/* initial cdf index for KLT coefficients */ -extern const uint16_t WebRtcIsacfix_kInitIndexGain[KLT_NUM_MODELS][12]; - -extern const uint16_t WebRtcIsacfix_kInitIndexShape[KLT_NUM_MODELS][108]; - -/* offsets for quantizer representation levels */ -extern const uint16_t WebRtcIsacfix_kOfLevelsGain[3]; - -extern const uint16_t WebRtcIsacfix_kOfLevelsShape[3]; - -/* quantizer representation levels */ -extern const int32_t WebRtcIsacfix_kLevelsGainQ17[1176]; - -extern const int16_t WebRtcIsacfix_kLevelsShapeQ10[1735]; - -/* cdf tables for quantizer indices */ -extern const uint16_t WebRtcIsacfix_kCdfGain[1212]; - -extern const uint16_t WebRtcIsacfix_kCdfShape[2059]; - -/* pointers to cdf tables for quantizer indices */ -extern const uint16_t* WebRtcIsacfix_kCdfGainPtr[KLT_NUM_MODELS][12]; - -extern const uint16_t* WebRtcIsacfix_kCdfShapePtr[KLT_NUM_MODELS][108]; - -/* code length for all coefficients using different models */ -extern const int16_t WebRtcIsacfix_kCodeLenGainQ11[392]; - -extern const int16_t WebRtcIsacfix_kCodeLenShapeQ11[578]; - -/* left KLT transforms */ -extern const int16_t WebRtcIsacfix_kT1GainQ15[KLT_NUM_MODELS][4]; - -extern const int16_t WebRtcIsacfix_kT1ShapeQ15[KLT_NUM_MODELS][324]; - -/* right KLT transforms */ -extern const int16_t WebRtcIsacfix_kT2GainQ15[KLT_NUM_MODELS][36]; - -extern const int16_t WebRtcIsacfix_kT2ShapeQ15[KLT_NUM_MODELS][36]; - -/* means of log gains and LAR coefficients */ -extern const int16_t WebRtcIsacfix_kMeansGainQ8[KLT_NUM_MODELS][12]; - -extern const int32_t WebRtcIsacfix_kMeansShapeQ17[3][108]; - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_LPC_TABLES_H_ */ diff --git a/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.c b/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.c deleted file mode 100644 index 78cb93f7ae..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.c +++ /dev/null @@ -1,435 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "rtc_base/compile_assert_c.h" - -/* log2[0.2, 0.5, 0.98] in Q8 */ -static const int16_t kLogLagWinQ8[3] = { - -594, -256, -7 -}; - -/* [1 -0.75 0.25] in Q12 */ -static const int16_t kACoefQ12[3] = { - 4096, -3072, 1024 -}; - -int32_t WebRtcIsacfix_Log2Q8(uint32_t x) { - int32_t zeros; - int16_t frac; - - zeros=WebRtcSpl_NormU32(x); - frac = (int16_t)(((x << zeros) & 0x7FFFFFFF) >> 23); - /* log2(magn(i)) */ - - return ((31 - zeros) << 8) + frac; -} - -static __inline int16_t Exp2Q10(int16_t x) { // Both in and out in Q10 - - int16_t tmp16_1, tmp16_2; - - tmp16_2=(int16_t)(0x0400|(x&0x03FF)); - tmp16_1 = -(x >> 10); - if(tmp16_1>0) - return tmp16_2 >> tmp16_1; - else - return tmp16_2 << -tmp16_1; - -} - - - -/* 1D parabolic interpolation . All input and output values are in Q8 */ -static __inline void Intrp1DQ8(int32_t *x, int32_t *fx, int32_t *y, int32_t *fy) { - - int16_t sign1=1, sign2=1; - int32_t r32, q32, t32, nom32, den32; - int16_t t16, tmp16, tmp16_1; - - if ((fx[0]>0) && (fx[2]>0)) { - r32=fx[1]-fx[2]; - q32=fx[0]-fx[1]; - nom32=q32+r32; - den32 = (q32 - r32) * 2; - if (nom32<0) - sign1=-1; - if (den32<0) - sign2=-1; - - /* t = (q32+r32)/(2*(q32-r32)) = (fx[0]-fx[1] + fx[1]-fx[2])/(2 * fx[0]-fx[1] - (fx[1]-fx[2]))*/ - /* (Signs are removed because WebRtcSpl_DivResultInQ31 can't handle negative numbers) */ - /* t in Q31, without signs */ - t32 = WebRtcSpl_DivResultInQ31(nom32 * sign1, den32 * sign2); - - t16 = (int16_t)(t32 >> 23); /* Q8 */ - t16=t16*sign1*sign2; /* t in Q8 with signs */ - - *y = x[0]+t16; /* Q8 */ - // *y = x[1]+t16; /* Q8 */ - - /* The following code calculates fy in three steps */ - /* fy = 0.5 * t * (t-1) * fx[0] + (1-t*t) * fx[1] + 0.5 * t * (t+1) * fx[2]; */ - - /* Part I: 0.5 * t * (t-1) * fx[0] */ - tmp16_1 = (int16_t)(t16 * t16); /* Q8*Q8=Q16 */ - tmp16_1 >>= 2; /* Q16>>2 = Q14 */ - t16 <<= 6; /* Q8<<6 = Q14 */ - tmp16 = tmp16_1-t16; - *fy = WEBRTC_SPL_MUL_16_32_RSFT15(tmp16, fx[0]); /* (Q14 * Q8 >>15)/2 = Q8 */ - - /* Part II: (1-t*t) * fx[1] */ - tmp16 = 16384-tmp16_1; /* 1 in Q14 - Q14 */ - *fy += WEBRTC_SPL_MUL_16_32_RSFT14(tmp16, fx[1]);/* Q14 * Q8 >> 14 = Q8 */ - - /* Part III: 0.5 * t * (t+1) * fx[2] */ - tmp16 = tmp16_1+t16; - *fy += WEBRTC_SPL_MUL_16_32_RSFT15(tmp16, fx[2]);/* (Q14 * Q8 >>15)/2 = Q8 */ - } else { - *y = x[0]; - *fy= fx[1]; - } -} - - -static void FindFour32(int32_t *in, int16_t length, int16_t *bestind) -{ - int32_t best[4]= {-100, -100, -100, -100}; - int16_t k; - - for (k=0; k best[3]) { - if (in[k] > best[2]) { - if (in[k] > best[1]) { - if (in[k] > best[0]) { // The Best - best[3] = best[2]; - bestind[3] = bestind[2]; - best[2] = best[1]; - bestind[2] = bestind[1]; - best[1] = best[0]; - bestind[1] = bestind[0]; - best[0] = in[k]; - bestind[0] = k; - } else { // 2nd best - best[3] = best[2]; - bestind[3] = bestind[2]; - best[2] = best[1]; - bestind[2] = bestind[1]; - best[1] = in[k]; - bestind[1] = k; - } - } else { // 3rd best - best[3] = best[2]; - bestind[3] = bestind[2]; - best[2] = in[k]; - bestind[2] = k; - } - } else { // 4th best - best[3] = in[k]; - bestind[3] = k; - } - } - } -} - - - - - -extern void WebRtcIsacfix_PCorr2Q32(const int16_t *in, int32_t *logcorQ8); - - - -void WebRtcIsacfix_InitialPitch(const int16_t *in, /* Q0 */ - PitchAnalysisStruct *State, - int16_t *lagsQ7 /* Q7 */ - ) -{ - int16_t buf_dec16[PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2+2]; - int32_t *crrvecQ8_1,*crrvecQ8_2; - int32_t cv1q[PITCH_LAG_SPAN2+2],cv2q[PITCH_LAG_SPAN2+2], peakvq[PITCH_LAG_SPAN2+2]; - int k; - int16_t peaks_indq; - int16_t peakiq[PITCH_LAG_SPAN2]; - int32_t corr; - int32_t corr32, corr_max32, corr_max_o32; - int16_t npkq; - int16_t best4q[4]={0,0,0,0}; - int32_t xq[3],yq[1],fyq[1]; - int32_t *fxq; - int32_t best_lag1q, best_lag2q; - int32_t tmp32a,tmp32b,lag32,ratq; - int16_t start; - int16_t oldgQ12, tmp16a, tmp16b, gain_bias16,tmp16c, tmp16d, bias16; - int32_t tmp32c,tmp32d, tmp32e; - int16_t old_lagQ; - int32_t old_lagQ8; - int32_t lagsQ8[4]; - - old_lagQ = State->PFstr_wght.oldlagQ7; // Q7 - old_lagQ8 = old_lagQ << 1; // Q8 - - oldgQ12= State->PFstr_wght.oldgainQ12; - - crrvecQ8_1=&cv1q[1]; - crrvecQ8_2=&cv2q[1]; - - - /* copy old values from state buffer */ - memcpy(buf_dec16, State->dec_buffer16, sizeof(State->dec_buffer16)); - - /* decimation; put result after the old values */ - WebRtcIsacfix_DecimateAllpass32(in, State->decimator_state32, PITCH_FRAME_LEN, - &buf_dec16[PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2]); - - /* low-pass filtering */ - start= PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2; - WebRtcSpl_FilterARFastQ12(&buf_dec16[start],&buf_dec16[start],(int16_t*)kACoefQ12,3, PITCH_FRAME_LEN/2); - - /* copy end part back into state buffer */ - for (k = 0; k < (PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2); k++) - State->dec_buffer16[k] = buf_dec16[k+PITCH_FRAME_LEN/2]; - - - /* compute correlation for first and second half of the frame */ - WebRtcIsacfix_PCorr2Q32(buf_dec16, crrvecQ8_1); - WebRtcIsacfix_PCorr2Q32(buf_dec16 + PITCH_CORR_STEP2, crrvecQ8_2); - - - /* bias towards pitch lag of previous frame */ - tmp32a = WebRtcIsacfix_Log2Q8((uint32_t) old_lagQ8) - 2304; - // log2(0.5*oldlag) in Q8 - tmp32b = oldgQ12 * oldgQ12 >> 10; // Q12 & * 4.0; - gain_bias16 = (int16_t) tmp32b; //Q12 - if (gain_bias16 > 3276) gain_bias16 = 3276; // 0.8 in Q12 - - - for (k = 0; k < PITCH_LAG_SPAN2; k++) - { - if (crrvecQ8_1[k]>0) { - tmp32b = WebRtcIsacfix_Log2Q8((uint32_t) (k + (PITCH_MIN_LAG/2-2))); - tmp16a = (int16_t) (tmp32b - tmp32a); // Q8 & fabs(ratio)<4 - tmp32c = tmp16a * tmp16a >> 6; // Q10 - tmp16b = (int16_t) tmp32c; // Q10 & <8 - tmp32d = tmp16b * 177 >> 8; // mult with ln2 in Q8 - tmp16c = (int16_t) tmp32d; // Q10 & <4 - tmp16d = Exp2Q10((int16_t) -tmp16c); //Q10 - tmp32c = gain_bias16 * tmp16d >> 13; // Q10 & * 0.5 - bias16 = (int16_t) (1024 + tmp32c); // Q10 - tmp32b = WebRtcIsacfix_Log2Q8((uint32_t)bias16) - 2560; - // Q10 in -> Q8 out with 10*2^8 offset - crrvecQ8_1[k] += tmp32b ; // -10*2^8 offset - } - } - - /* taper correlation functions */ - for (k = 0; k < 3; k++) { - crrvecQ8_1[k] += kLogLagWinQ8[k]; - crrvecQ8_2[k] += kLogLagWinQ8[k]; - - crrvecQ8_1[PITCH_LAG_SPAN2-1-k] += kLogLagWinQ8[k]; - crrvecQ8_2[PITCH_LAG_SPAN2-1-k] += kLogLagWinQ8[k]; - } - - - /* Make zeropadded corr vectors */ - cv1q[0]=0; - cv2q[0]=0; - cv1q[PITCH_LAG_SPAN2+1]=0; - cv2q[PITCH_LAG_SPAN2+1]=0; - corr_max32 = 0; - - for (k = 1; k <= PITCH_LAG_SPAN2; k++) - { - - - corr32=crrvecQ8_1[k-1]; - if (corr32 > corr_max32) - corr_max32 = corr32; - - corr32=crrvecQ8_2[k-1]; - corr32 += -4; // Compensate for later (log2(0.99)) - - if (corr32 > corr_max32) - corr_max32 = corr32; - - } - - /* threshold value to qualify as a peak */ - // corr_max32 += -726; // log(0.14)/log(2.0) in Q8 - corr_max32 += -1000; // log(0.14)/log(2.0) in Q8 - corr_max_o32 = corr_max32; - - - /* find peaks in corr1 */ - peaks_indq = 0; - for (k = 1; k <= PITCH_LAG_SPAN2; k++) - { - corr32=cv1q[k]; - if (corr32>corr_max32) { // Disregard small peaks - if ((corr32>=cv1q[k-1]) && (corr32>cv1q[k+1])) { // Peak? - peakvq[peaks_indq] = corr32; - peakiq[peaks_indq++] = k; - } - } - } - - - /* find highest interpolated peak */ - corr_max32=0; - best_lag1q =0; - if (peaks_indq > 0) { - FindFour32(peakvq, (int16_t) peaks_indq, best4q); - npkq = WEBRTC_SPL_MIN(peaks_indq, 4); - - for (k=0;k> 8; - tmp32c= tmp32b + 256; - *fyq += tmp32c; - if (*fyq > corr_max32) { - corr_max32 = *fyq; - best_lag1q = *yq; - } - } - tmp32b = (best_lag1q - OFFSET_Q8) * 2; - lagsQ8[0] = tmp32b + PITCH_MIN_LAG_Q8; - lagsQ8[1] = lagsQ8[0]; - } else { - lagsQ8[0] = old_lagQ8; - lagsQ8[1] = lagsQ8[0]; - } - - /* Bias towards constant pitch */ - tmp32a = lagsQ8[0] - PITCH_MIN_LAG_Q8; - ratq = (tmp32a >> 1) + OFFSET_Q8; - - for (k = 1; k <= PITCH_LAG_SPAN2; k++) - { - tmp32a = k << 7; // 0.5*k Q8 - tmp32b = tmp32a * 2 - ratq; // Q8 - tmp32c = (int16_t)tmp32b * (int16_t)tmp32b >> 8; // Q8 - - tmp32b = tmp32c + (ratq >> 1); - // (k-r)^2 + 0.5 * r Q8 - tmp32c = WebRtcIsacfix_Log2Q8((uint32_t)tmp32a) - 2048; - // offset 8*2^8 , log2(0.5*k) Q8 - tmp32d = WebRtcIsacfix_Log2Q8((uint32_t)tmp32b) - 2048; - // offset 8*2^8 , log2(0.5*k) Q8 - tmp32e = tmp32c - tmp32d; - - cv2q[k] += tmp32e >> 1; - - } - - /* find peaks in corr2 */ - corr_max32 = corr_max_o32; - peaks_indq = 0; - - for (k = 1; k <= PITCH_LAG_SPAN2; k++) - { - corr=cv2q[k]; - if (corr>corr_max32) { // Disregard small peaks - if ((corr>=cv2q[k-1]) && (corr>cv2q[k+1])) { // Peak? - peakvq[peaks_indq] = corr; - peakiq[peaks_indq++] = k; - } - } - } - - - - /* find highest interpolated peak */ - corr_max32 = 0; - best_lag2q =0; - if (peaks_indq > 0) { - - FindFour32(peakvq, (int16_t) peaks_indq, best4q); - npkq = WEBRTC_SPL_MIN(peaks_indq, 4); - for (k=0;k> 8; - tmp32c= tmp32b + 256; - *fyq += tmp32c; - if (*fyq > corr_max32) { - corr_max32 = *fyq; - best_lag2q = *yq; - } - } - - tmp32b = (best_lag2q - OFFSET_Q8) * 2; - lagsQ8[2] = tmp32b + PITCH_MIN_LAG_Q8; - lagsQ8[3] = lagsQ8[2]; - } else { - lagsQ8[2] = lagsQ8[0]; - lagsQ8[3] = lagsQ8[0]; - } - - lagsQ7[0] = (int16_t)(lagsQ8[0] >> 1); - lagsQ7[1] = (int16_t)(lagsQ8[1] >> 1); - lagsQ7[2] = (int16_t)(lagsQ8[2] >> 1); - lagsQ7[3] = (int16_t)(lagsQ8[3] >> 1); -} - - - -void WebRtcIsacfix_PitchAnalysis(const int16_t *inn, /* PITCH_FRAME_LEN samples */ - int16_t *outQ0, /* PITCH_FRAME_LEN+QLOOKAHEAD samples */ - PitchAnalysisStruct *State, - int16_t *PitchLags_Q7, - int16_t *PitchGains_Q12) -{ - int16_t inbufQ0[PITCH_FRAME_LEN + QLOOKAHEAD]; - int16_t k; - - /* inital pitch estimate */ - WebRtcIsacfix_InitialPitch(inn, State, PitchLags_Q7); - - - /* Calculate gain */ - WebRtcIsacfix_PitchFilterGains(inn, &(State->PFstr_wght), PitchLags_Q7, PitchGains_Q12); - - /* concatenate previous input's end and current input */ - for (k = 0; k < QLOOKAHEAD; k++) { - inbufQ0[k] = State->inbuf[k]; - } - for (k = 0; k < PITCH_FRAME_LEN; k++) { - inbufQ0[k+QLOOKAHEAD] = (int16_t) inn[k]; - } - - /* lookahead pitch filtering for masking analysis */ - WebRtcIsacfix_PitchFilter(inbufQ0, outQ0, &(State->PFstr), PitchLags_Q7,PitchGains_Q12, 2); - - - /* store last part of input */ - for (k = 0; k < QLOOKAHEAD; k++) { - State->inbuf[k] = inbufQ0[k + PITCH_FRAME_LEN]; - } -} diff --git a/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h b/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h deleted file mode 100644 index 4303c82711..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * pitch_estimator.h - * - * Pitch functions - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_ESTIMATOR_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_ESTIMATOR_H_ - -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" - -void WebRtcIsacfix_PitchAnalysis( - const int16_t* in, /* PITCH_FRAME_LEN samples */ - int16_t* outQ0, /* PITCH_FRAME_LEN+QLOOKAHEAD samples */ - PitchAnalysisStruct* State, - int16_t* lagsQ7, - int16_t* PitchGains_Q12); - -void WebRtcIsacfix_InitialPitch(const int16_t* in, - PitchAnalysisStruct* State, - int16_t* qlags); - -void WebRtcIsacfix_PitchFilter(int16_t* indatFix, - int16_t* outdatQQ, - PitchFiltstr* pfp, - int16_t* lagsQ7, - int16_t* gainsQ12, - int16_t type); - -void WebRtcIsacfix_PitchFilterCore(int loopNumber, - int16_t gain, - size_t index, - int16_t sign, - int16_t* inputState, - int16_t* outputBuff2, - const int16_t* coefficient, - int16_t* inputBuf, - int16_t* outputBuf, - int* index2); - -void WebRtcIsacfix_PitchFilterGains(const int16_t* indatQ0, - PitchFiltstr* pfp, - int16_t* lagsQ7, - int16_t* gainsQ12); - -void WebRtcIsacfix_DecimateAllpass32( - const int16_t* in, - int32_t* state_in, /* array of size: 2*ALLPASSSECTIONS+1 */ - int16_t N, /* number of input samples */ - int16_t* out); /* array of size N/2 */ - -int32_t WebRtcIsacfix_Log2Q8(uint32_t x); - -void WebRtcIsacfix_PCorr2Q32(const int16_t* in, int32_t* logcorQ8); - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_ESTIMATOR_H_ */ diff --git a/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_c.c b/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_c.c deleted file mode 100644 index c4af9ab32a..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_c.c +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" - -#ifdef WEBRTC_HAS_NEON -#include -#endif - -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "rtc_base/compile_assert_c.h" - -extern int32_t WebRtcIsacfix_Log2Q8(uint32_t x); - -void WebRtcIsacfix_PCorr2Q32(const int16_t* in, int32_t* logcorQ8) { - int16_t scaling,n,k; - int32_t csum32, lys, lcs; - int64_t ysum64; - const int32_t oneQ8 = 1 << 8; // 1.00 in Q8 - const int16_t* x; - const int16_t* inptr; - - x = in + PITCH_MAX_LAG / 2 + 2; - scaling = WebRtcSpl_GetScalingSquare((int16_t*)in, - PITCH_CORR_LEN2, - PITCH_CORR_LEN2); - ysum64 = 1; - csum32 = 0; - x = in + PITCH_MAX_LAG / 2 + 2; - for (n = 0; n < PITCH_CORR_LEN2; n++) { - ysum64 += in[n] * in[n] >> scaling; // Q0 - csum32 += x[n] * in[n] >> scaling; // Q0 - } - logcorQ8 += PITCH_LAG_SPAN2 - 1; - lys = WebRtcIsacfix_Log2Q8((uint32_t)ysum64) >> 1; // Q8, sqrt(ysum) - if (csum32 > 0) { - lcs = WebRtcIsacfix_Log2Q8((uint32_t)csum32); // 2log(csum) in Q8 - if (lcs > (lys + oneQ8)) { // csum/sqrt(ysum) > 2 in Q8 - *logcorQ8 = lcs - lys; // log2(csum/sqrt(ysum)) - } else { - *logcorQ8 = oneQ8; // 1.00 - } - } else { - *logcorQ8 = 0; - } - - - for (k = 1; k < PITCH_LAG_SPAN2; k++) { - inptr = &in[k]; - ysum64 -= in[k - 1] * in[k - 1] >> scaling; - ysum64 += (int32_t)(in[PITCH_CORR_LEN2 + k - 1]) - * in[PITCH_CORR_LEN2 + k - 1] >> scaling; - -#ifdef WEBRTC_HAS_NEON - { - int32_t vbuff[4]; - int32x4_t int_32x4_sum = vmovq_n_s32(0); - // Can't shift a Neon register to right with a non-constant shift value. - int32x4_t int_32x4_scale = vdupq_n_s32(-scaling); - // Assert a codition used in loop unrolling at compile-time. - RTC_COMPILE_ASSERT(PITCH_CORR_LEN2 %4 == 0); - - for (n = 0; n < PITCH_CORR_LEN2; n += 4) { - int16x4_t int_16x4_x = vld1_s16(&x[n]); - int16x4_t int_16x4_in = vld1_s16(&inptr[n]); - int32x4_t int_32x4 = vmull_s16(int_16x4_x, int_16x4_in); - int_32x4 = vshlq_s32(int_32x4, int_32x4_scale); - int_32x4_sum = vaddq_s32(int_32x4_sum, int_32x4); - } - - // Use vector store to avoid long stall from data trasferring - // from vector to general register. - vst1q_s32(vbuff, int_32x4_sum); - csum32 = vbuff[0] + vbuff[1]; - csum32 += vbuff[2]; - csum32 += vbuff[3]; - } -#else - int64_t csum64_tmp = 0; - if(scaling == 0) { - for (n = 0; n < PITCH_CORR_LEN2; n++) { - csum64_tmp += (int32_t)(x[n]) * inptr[n]; - } - } else { - for (n = 0; n < PITCH_CORR_LEN2; n++) { - csum64_tmp += ((int32_t)(x[n]) * inptr[n]) >> scaling; - } - } - csum32 = csum64_tmp; -#endif - - logcorQ8--; - - lys = WebRtcIsacfix_Log2Q8((uint32_t)ysum64) >> 1; // Q8, sqrt(ysum) - - if (csum32 > 0) { - lcs = WebRtcIsacfix_Log2Q8((uint32_t)csum32); // 2log(csum) in Q8 - if (lcs > (lys + oneQ8)) { // csum/sqrt(ysum) > 2 - *logcorQ8 = lcs - lys; // log2(csum/sqrt(ysum)) - } else { - *logcorQ8 = oneQ8; // 1.00 - } - } else { - *logcorQ8 = 0; - } - } -} diff --git a/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_mips.c b/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_mips.c deleted file mode 100644 index 4ead84c492..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_mips.c +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "rtc_base/compile_assert_c.h" - -extern int32_t WebRtcIsacfix_Log2Q8(uint32_t x); - -void WebRtcIsacfix_PCorr2Q32(const int16_t* in, int32_t* logcorQ8) { - int16_t scaling,n,k; - int32_t ysum32,csum32, lys, lcs; - const int32_t oneQ8 = 1 << 8; // 1.00 in Q8 - const int16_t* x; - const int16_t* inptr; - - x = in + PITCH_MAX_LAG / 2 + 2; - scaling = WebRtcSpl_GetScalingSquare((int16_t*)in, - PITCH_CORR_LEN2, - PITCH_CORR_LEN2); - ysum32 = 1; - csum32 = 0; - x = in + PITCH_MAX_LAG / 2 + 2; - { - const int16_t* tmp_x = x; - const int16_t* tmp_in = in; - int32_t tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7, tmp8; - n = PITCH_CORR_LEN2; - RTC_COMPILE_ASSERT(PITCH_CORR_LEN2 % 4 == 0); - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "1: \n\t" - "lh %[tmp1], 0(%[tmp_in]) \n\t" - "lh %[tmp2], 2(%[tmp_in]) \n\t" - "lh %[tmp3], 4(%[tmp_in]) \n\t" - "lh %[tmp4], 6(%[tmp_in]) \n\t" - "lh %[tmp5], 0(%[tmp_x]) \n\t" - "lh %[tmp6], 2(%[tmp_x]) \n\t" - "lh %[tmp7], 4(%[tmp_x]) \n\t" - "lh %[tmp8], 6(%[tmp_x]) \n\t" - "mul %[tmp5], %[tmp1], %[tmp5] \n\t" - "mul %[tmp1], %[tmp1], %[tmp1] \n\t" - "mul %[tmp6], %[tmp2], %[tmp6] \n\t" - "mul %[tmp2], %[tmp2], %[tmp2] \n\t" - "mul %[tmp7], %[tmp3], %[tmp7] \n\t" - "mul %[tmp3], %[tmp3], %[tmp3] \n\t" - "mul %[tmp8], %[tmp4], %[tmp8] \n\t" - "mul %[tmp4], %[tmp4], %[tmp4] \n\t" - "addiu %[n], %[n], -4 \n\t" - "srav %[tmp5], %[tmp5], %[scaling] \n\t" - "srav %[tmp1], %[tmp1], %[scaling] \n\t" - "srav %[tmp6], %[tmp6], %[scaling] \n\t" - "srav %[tmp2], %[tmp2], %[scaling] \n\t" - "srav %[tmp7], %[tmp7], %[scaling] \n\t" - "srav %[tmp3], %[tmp3], %[scaling] \n\t" - "srav %[tmp8], %[tmp8], %[scaling] \n\t" - "srav %[tmp4], %[tmp4], %[scaling] \n\t" - "addu %[ysum32], %[ysum32], %[tmp1] \n\t" - "addu %[csum32], %[csum32], %[tmp5] \n\t" - "addu %[ysum32], %[ysum32], %[tmp2] \n\t" - "addu %[csum32], %[csum32], %[tmp6] \n\t" - "addu %[ysum32], %[ysum32], %[tmp3] \n\t" - "addu %[csum32], %[csum32], %[tmp7] \n\t" - "addu %[ysum32], %[ysum32], %[tmp4] \n\t" - "addu %[csum32], %[csum32], %[tmp8] \n\t" - "addiu %[tmp_in], %[tmp_in], 8 \n\t" - "bgtz %[n], 1b \n\t" - " addiu %[tmp_x], %[tmp_x], 8 \n\t" - ".set pop \n\t" - : [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [tmp3] "=&r" (tmp3), - [tmp4] "=&r" (tmp4), [tmp5] "=&r" (tmp5), [tmp6] "=&r" (tmp6), - [tmp7] "=&r" (tmp7), [tmp8] "=&r" (tmp8), [tmp_in] "+r" (tmp_in), - [ysum32] "+r" (ysum32), [tmp_x] "+r" (tmp_x), [csum32] "+r" (csum32), - [n] "+r" (n) - : [scaling] "r" (scaling) - : "memory", "hi", "lo" - ); - } - logcorQ8 += PITCH_LAG_SPAN2 - 1; - lys = WebRtcIsacfix_Log2Q8((uint32_t)ysum32) >> 1; // Q8, sqrt(ysum) - if (csum32 > 0) { - lcs = WebRtcIsacfix_Log2Q8((uint32_t)csum32); // 2log(csum) in Q8 - if (lcs > (lys + oneQ8)) { // csum/sqrt(ysum) > 2 in Q8 - *logcorQ8 = lcs - lys; // log2(csum/sqrt(ysum)) - } else { - *logcorQ8 = oneQ8; // 1.00 - } - } else { - *logcorQ8 = 0; - } - - for (k = 1; k < PITCH_LAG_SPAN2; k++) { - inptr = &in[k]; - const int16_t* tmp_in1 = &in[k - 1]; - const int16_t* tmp_in2 = &in[PITCH_CORR_LEN2 + k - 1]; - const int16_t* tmp_x = x; - int32_t tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7, tmp8; - n = PITCH_CORR_LEN2; - csum32 = 0; - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "lh %[tmp1], 0(%[tmp_in1]) \n\t" - "lh %[tmp2], 0(%[tmp_in2]) \n\t" - "mul %[tmp1], %[tmp1], %[tmp1] \n\t" - "mul %[tmp2], %[tmp2], %[tmp2] \n\t" - "srav %[tmp1], %[tmp1], %[scaling] \n\t" - "srav %[tmp2], %[tmp2], %[scaling] \n\t" - "subu %[ysum32], %[ysum32], %[tmp1] \n\t" - "bnez %[scaling], 2f \n\t" - " addu %[ysum32], %[ysum32], %[tmp2] \n\t" - "1: \n\t" - "lh %[tmp1], 0(%[inptr]) \n\t" - "lh %[tmp2], 0(%[tmp_x]) \n\t" - "lh %[tmp3], 2(%[inptr]) \n\t" - "lh %[tmp4], 2(%[tmp_x]) \n\t" - "lh %[tmp5], 4(%[inptr]) \n\t" - "lh %[tmp6], 4(%[tmp_x]) \n\t" - "lh %[tmp7], 6(%[inptr]) \n\t" - "lh %[tmp8], 6(%[tmp_x]) \n\t" - "mul %[tmp1], %[tmp1], %[tmp2] \n\t" - "mul %[tmp2], %[tmp3], %[tmp4] \n\t" - "mul %[tmp3], %[tmp5], %[tmp6] \n\t" - "mul %[tmp4], %[tmp7], %[tmp8] \n\t" - "addiu %[n], %[n], -4 \n\t" - "addiu %[inptr], %[inptr], 8 \n\t" - "addiu %[tmp_x], %[tmp_x], 8 \n\t" - "addu %[csum32], %[csum32], %[tmp1] \n\t" - "addu %[csum32], %[csum32], %[tmp2] \n\t" - "addu %[csum32], %[csum32], %[tmp3] \n\t" - "bgtz %[n], 1b \n\t" - " addu %[csum32], %[csum32], %[tmp4] \n\t" - "b 3f \n\t" - " nop \n\t" - "2: \n\t" - "lh %[tmp1], 0(%[inptr]) \n\t" - "lh %[tmp2], 0(%[tmp_x]) \n\t" - "lh %[tmp3], 2(%[inptr]) \n\t" - "lh %[tmp4], 2(%[tmp_x]) \n\t" - "lh %[tmp5], 4(%[inptr]) \n\t" - "lh %[tmp6], 4(%[tmp_x]) \n\t" - "lh %[tmp7], 6(%[inptr]) \n\t" - "lh %[tmp8], 6(%[tmp_x]) \n\t" - "mul %[tmp1], %[tmp1], %[tmp2] \n\t" - "mul %[tmp2], %[tmp3], %[tmp4] \n\t" - "mul %[tmp3], %[tmp5], %[tmp6] \n\t" - "mul %[tmp4], %[tmp7], %[tmp8] \n\t" - "addiu %[n], %[n], -4 \n\t" - "addiu %[inptr], %[inptr], 8 \n\t" - "addiu %[tmp_x], %[tmp_x], 8 \n\t" - "srav %[tmp1], %[tmp1], %[scaling] \n\t" - "srav %[tmp2], %[tmp2], %[scaling] \n\t" - "srav %[tmp3], %[tmp3], %[scaling] \n\t" - "srav %[tmp4], %[tmp4], %[scaling] \n\t" - "addu %[csum32], %[csum32], %[tmp1] \n\t" - "addu %[csum32], %[csum32], %[tmp2] \n\t" - "addu %[csum32], %[csum32], %[tmp3] \n\t" - "bgtz %[n], 2b \n\t" - " addu %[csum32], %[csum32], %[tmp4] \n\t" - "3: \n\t" - ".set pop \n\t" - : [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [tmp3] "=&r" (tmp3), - [tmp4] "=&r" (tmp4), [tmp5] "=&r" (tmp5), [tmp6] "=&r" (tmp6), - [tmp7] "=&r" (tmp7), [tmp8] "=&r" (tmp8), [inptr] "+r" (inptr), - [csum32] "+r" (csum32), [tmp_x] "+r" (tmp_x), [ysum32] "+r" (ysum32), - [n] "+r" (n) - : [tmp_in1] "r" (tmp_in1), [tmp_in2] "r" (tmp_in2), - [scaling] "r" (scaling) - : "memory", "hi", "lo" - ); - - logcorQ8--; - lys = WebRtcIsacfix_Log2Q8((uint32_t)ysum32) >> 1; // Q8, sqrt(ysum) - if (csum32 > 0) { - lcs = WebRtcIsacfix_Log2Q8((uint32_t)csum32); // 2log(csum) in Q8 - if (lcs > (lys + oneQ8)) { // csum/sqrt(ysum) > 2 - *logcorQ8 = lcs - lys; // log2(csum/sqrt(ysum)) - } else { - *logcorQ8 = oneQ8; // 1.00 - } - } else { - *logcorQ8 = 0; - } - } -} diff --git a/modules/audio_coding/codecs/isac/fix/source/pitch_filter.c b/modules/audio_coding/codecs/isac/fix/source/pitch_filter.c deleted file mode 100644 index 735533020e..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/pitch_filter.c +++ /dev/null @@ -1,248 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "modules/audio_coding/codecs/isac/fix/source/structs.h" -#include "rtc_base/compile_assert_c.h" - -// Number of segments in a pitch subframe. -static const int kSegments = 5; - -// A division factor of 1/5 in Q15. -static const int16_t kDivFactor = 6553; - -// Interpolation coefficients; generated by design_pitch_filter.m. -// Coefficients are stored in Q14. -static const int16_t kIntrpCoef[PITCH_FRACS][PITCH_FRACORDER] = { - {-367, 1090, -2706, 9945, 10596, -3318, 1626, -781, 287}, - {-325, 953, -2292, 7301, 12963, -3320, 1570, -743, 271}, - {-240, 693, -1622, 4634, 14809, -2782, 1262, -587, 212}, - {-125, 358, -817, 2144, 15982, -1668, 721, -329, 118}, - { 0, 0, -1, 1, 16380, 1, -1, 0, 0}, - { 118, -329, 721, -1668, 15982, 2144, -817, 358, -125}, - { 212, -587, 1262, -2782, 14809, 4634, -1622, 693, -240}, - { 271, -743, 1570, -3320, 12963, 7301, -2292, 953, -325} -}; - -static __inline size_t CalcLrIntQ(int16_t fixVal, - int16_t qDomain) { - int32_t roundVal = 1 << (qDomain - 1); - - return (fixVal + roundVal) >> qDomain; -} - -void WebRtcIsacfix_PitchFilter(int16_t* indatQQ, // Q10 if type is 1 or 4, - // Q0 if type is 2. - int16_t* outdatQQ, - PitchFiltstr* pfp, - int16_t* lagsQ7, - int16_t* gainsQ12, - int16_t type) { - int k, ind, cnt; - int16_t sign = 1; - int16_t inystateQQ[PITCH_DAMPORDER]; - int16_t ubufQQ[PITCH_INTBUFFSIZE + QLOOKAHEAD]; - const int16_t Gain = 21299; // 1.3 in Q14 - int16_t oldLagQ7; - int16_t oldGainQ12, lagdeltaQ7, curLagQ7, gaindeltaQ12, curGainQ12; - size_t frcQQ = 0; - int32_t indW32 = 0; - const int16_t* fracoeffQQ = NULL; - - // Assumptions in ARM assembly for WebRtcIsacfix_PitchFilterCoreARM(). - RTC_COMPILE_ASSERT(PITCH_FRACORDER == 9); - RTC_COMPILE_ASSERT(PITCH_DAMPORDER == 5); - - // Set up buffer and states. - memcpy(ubufQQ, pfp->ubufQQ, sizeof(pfp->ubufQQ)); - memcpy(inystateQQ, pfp->ystateQQ, sizeof(inystateQQ)); - - // Get old lag and gain value from memory. - oldLagQ7 = pfp->oldlagQ7; - oldGainQ12 = pfp->oldgainQ12; - - if (type == 4) { - sign = -1; - - // Make output more periodic. - for (k = 0; k < PITCH_SUBFRAMES; k++) { - gainsQ12[k] = (int16_t)(gainsQ12[k] * Gain >> 14); - } - } - - // No interpolation if pitch lag step is big. - if (((lagsQ7[0] * 3 >> 1) < oldLagQ7) || (lagsQ7[0] > (oldLagQ7 * 3 >> 1))) { - oldLagQ7 = lagsQ7[0]; - oldGainQ12 = gainsQ12[0]; - } - - ind = 0; - - for (k = 0; k < PITCH_SUBFRAMES; k++) { - // Calculate interpolation steps. - lagdeltaQ7 = lagsQ7[k] - oldLagQ7; - lagdeltaQ7 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND( - lagdeltaQ7, kDivFactor, 15); - curLagQ7 = oldLagQ7; - gaindeltaQ12 = gainsQ12[k] - oldGainQ12; - gaindeltaQ12 = (int16_t)(gaindeltaQ12 * kDivFactor >> 15); - - curGainQ12 = oldGainQ12; - oldLagQ7 = lagsQ7[k]; - oldGainQ12 = gainsQ12[k]; - - // Each frame has 4 60-sample pitch subframes, and each subframe has 5 - // 12-sample segments. Each segment need to be processed with - // newly-updated parameters, so we break the pitch filtering into - // two for-loops (5 x 12) below. It's also why kDivFactor = 0.2 (in Q15). - for (cnt = 0; cnt < kSegments; cnt++) { - // Update parameters for each segment. - curGainQ12 += gaindeltaQ12; - curLagQ7 += lagdeltaQ7; - indW32 = CalcLrIntQ(curLagQ7, 7); - if (indW32 < PITCH_FRACORDER - 2) { - // WebRtcIsacfix_PitchFilterCore requires indW32 >= PITCH_FRACORDER - - // 2; otherwise, it will read from entries of ubufQQ that haven't been - // written yet. (This problem has only been seen in fuzzer tests, not - // in real life.) See Chromium bug 581901. - indW32 = PITCH_FRACORDER - 2; - } - frcQQ = ((indW32 << 7) + 64 - curLagQ7) >> 4; - - if (frcQQ >= PITCH_FRACS) { - frcQQ = 0; - } - fracoeffQQ = kIntrpCoef[frcQQ]; - - // Pitch filtering. - WebRtcIsacfix_PitchFilterCore(PITCH_SUBFRAME_LEN / kSegments, curGainQ12, - indW32, sign, inystateQQ, ubufQQ, fracoeffQQ, indatQQ, outdatQQ, &ind); - } - } - - // Export buffer and states. - memcpy(pfp->ubufQQ, ubufQQ + PITCH_FRAME_LEN, sizeof(pfp->ubufQQ)); - memcpy(pfp->ystateQQ, inystateQQ, sizeof(pfp->ystateQQ)); - - pfp->oldlagQ7 = oldLagQ7; - pfp->oldgainQ12 = oldGainQ12; - - if (type == 2) { - // Filter look-ahead segment. - WebRtcIsacfix_PitchFilterCore(QLOOKAHEAD, curGainQ12, indW32, 1, inystateQQ, - ubufQQ, fracoeffQQ, indatQQ, outdatQQ, &ind); - } -} - - -void WebRtcIsacfix_PitchFilterGains(const int16_t* indatQ0, - PitchFiltstr* pfp, - int16_t* lagsQ7, - int16_t* gainsQ12) { - int k, n, m; - size_t ind, pos, pos3QQ; - - int16_t ubufQQ[PITCH_INTBUFFSIZE]; - int16_t oldLagQ7, lagdeltaQ7, curLagQ7; - const int16_t* fracoeffQQ = NULL; - int16_t scale; - int16_t cnt = 0, tmpW16; - size_t frcQQ, indW16 = 0; - int32_t tmpW32, tmp2W32, csum1QQ, esumxQQ; - - // Set up buffer and states. - memcpy(ubufQQ, pfp->ubufQQ, sizeof(pfp->ubufQQ)); - oldLagQ7 = pfp->oldlagQ7; - - // No interpolation if pitch lag step is big. - if (((lagsQ7[0] * 3 >> 1) < oldLagQ7) || (lagsQ7[0] > (oldLagQ7 * 3 >> 1))) { - oldLagQ7 = lagsQ7[0]; - } - - ind = 0; - pos = ind + PITCH_BUFFSIZE; - scale = 0; - for (k = 0; k < PITCH_SUBFRAMES; k++) { - - // Calculate interpolation steps. - lagdeltaQ7 = lagsQ7[k] - oldLagQ7; - lagdeltaQ7 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND( - lagdeltaQ7, kDivFactor, 15); - curLagQ7 = oldLagQ7; - oldLagQ7 = lagsQ7[k]; - - csum1QQ = 1; - esumxQQ = 1; - - // Same as function WebRtcIsacfix_PitchFilter(), we break the pitch - // filtering into two for-loops (5 x 12) below. - for (cnt = 0; cnt < kSegments; cnt++) { - // Update parameters for each segment. - curLagQ7 += lagdeltaQ7; - indW16 = CalcLrIntQ(curLagQ7, 7); - frcQQ = ((indW16 << 7) + 64 - curLagQ7) >> 4; - - if (frcQQ >= PITCH_FRACS) { - frcQQ = 0; - } - fracoeffQQ = kIntrpCoef[frcQQ]; - - pos3QQ = pos - (indW16 + 4); - - for (n = 0; n < PITCH_SUBFRAME_LEN / kSegments; n++) { - // Filter to get fractional pitch. - - tmpW32 = 0; - for (m = 0; m < PITCH_FRACORDER; m++) { - tmpW32 += ubufQQ[pos3QQ + m] * fracoeffQQ[m]; - } - - // Subtract from input and update buffer. - ubufQQ[pos] = indatQ0[ind]; - - tmp2W32 = WEBRTC_SPL_MUL_16_32_RSFT14(indatQ0[ind], tmpW32); - tmpW32 += 8192; - tmpW16 = tmpW32 >> 14; - tmpW32 = tmpW16 * tmpW16; - - if ((tmp2W32 > 1073700000) || (csum1QQ > 1073700000) || - (tmpW32 > 1073700000) || (esumxQQ > 1073700000)) { // 2^30 - scale++; - csum1QQ >>= 1; - esumxQQ >>= 1; - } - csum1QQ += tmp2W32 >> scale; - esumxQQ += tmpW32 >> scale; - - ind++; - pos++; - pos3QQ++; - } - } - - if (csum1QQ < esumxQQ) { - tmp2W32 = WebRtcSpl_DivResultInQ31(csum1QQ, esumxQQ); - - // Gain should be half the correlation. - tmpW32 = tmp2W32 >> 20; - } else { - tmpW32 = 4096; - } - gainsQ12[k] = (int16_t)WEBRTC_SPL_SAT(PITCH_MAX_GAIN_Q12, tmpW32, 0); - } - - // Export buffer and states. - memcpy(pfp->ubufQQ, ubufQQ + PITCH_FRAME_LEN, sizeof(pfp->ubufQQ)); - pfp->oldlagQ7 = lagsQ7[PITCH_SUBFRAMES - 1]; - pfp->oldgainQ12 = gainsQ12[PITCH_SUBFRAMES - 1]; - -} diff --git a/modules/audio_coding/codecs/isac/fix/source/pitch_filter_armv6.S b/modules/audio_coding/codecs/isac/fix/source/pitch_filter_armv6.S deleted file mode 100644 index 065946856f..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/pitch_filter_armv6.S +++ /dev/null @@ -1,143 +0,0 @@ -@ -@ Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. -@ -@ Use of this source code is governed by a BSD-style license -@ that can be found in the LICENSE file in the root of the source -@ tree. An additional intellectual property rights grant can be found -@ in the file PATENTS. All contributing project authors may -@ be found in the AUTHORS file in the root of the source tree. -@ - -@ Contains the core loop routine for the pitch filter function in iSAC, -@ optimized for ARMv7 platforms. -@ -@ Output is bit-exact with the reference C code in pitch_filter.c. - -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "rtc_base/system/asm_defines.h" - -GLOBAL_FUNCTION WebRtcIsacfix_PitchFilterCore -.align 2 - -@ void WebRtcIsacfix_PitchFilterCore(int loopNumber, -@ int16_t gain, -@ size_t index, -@ int16_t sign, -@ int16_t* inputState, -@ int16_t* outputBuf2, -@ const int16_t* coefficient, -@ int16_t* inputBuf, -@ int16_t* outputBuf, -@ int* index2) { -DEFINE_FUNCTION WebRtcIsacfix_PitchFilterCore - push {r4-r11} - sub sp, #8 - - str r0, [sp] @ loopNumber - str r3, [sp, #4] @ sign - ldr r3, [sp, #44] @ outputBuf2 - ldr r6, [sp, #60] @ index2 - ldr r7, [r6] @ *index2 - ldr r8, [sp, #52] @ inputBuf - ldr r12, [sp, #56] @ outputBuf - - add r4, r7, r0 - str r4, [r6] @ Store return value to index2. - - mov r10, r7, asl #1 - add r12, r10 @ &outputBuf[*index2] - add r8, r10 @ &inputBuf[*index2] - - add r4, r7, #PITCH_BUFFSIZE @ *index2 + PITCH_BUFFSIZE - add r6, r3, r4, lsl #1 @ &outputBuf2[*index2 + PITCH_BUFFSIZE] - sub r4, r2 @ r2: index - sub r4, #2 @ *index2 + PITCH_BUFFSIZE - index - 2 - add r3, r4, lsl #1 @ &ubufQQpos2[*index2] - ldr r9, [sp, #48] @ coefficient - -LOOP: -@ Usage of registers in the loop: -@ r0: loop counter -@ r1: gain -@ r2: tmpW32 -@ r3: &ubufQQpos2[] -@ r6: &outputBuf2[] -@ r8: &inputBuf[] -@ r9: &coefficient[] -@ r12: &outputBuf[] -@ r4, r5, r7, r10, r11: scratch - - @ Filter to get fractional pitch. - @ The pitch filter loop here is unrolled with 9 multipications. - pld [r3] - ldr r10, [r3], #4 @ ubufQQpos2[*index2 + 0, *index2 + 1] - ldr r4, [r9], #4 @ coefficient[0, 1] - ldr r11, [r3], #4 - ldr r5, [r9], #4 - smuad r2, r10, r4 - smlad r2, r11, r5, r2 - - ldr r10, [r3], #4 - ldr r4, [r9], #4 - ldr r11, [r3], #4 - ldr r5, [r9], #4 - smlad r2, r10, r4, r2 - ldrh r10, [r3], #-14 @ r3 back to &ubufQQpos2[*index2]. - ldrh r4, [r9], #-16 @ r9 back to &coefficient[0]. - smlad r2, r11, r5, r2 - smlabb r2, r10, r4, r2 - - @ Saturate to avoid overflow in tmpW16. - asr r2, #1 - add r4, r2, #0x1000 - ssat r7, #16, r4, asr #13 - - @ Shift low pass filter state, and excute the low pass filter. - @ The memmove() and the low pass filter loop are unrolled and mixed. - smulbb r5, r1, r7 - add r7, r5, #0x800 - asr r7, #12 @ Get the value for inputState[0]. - ldr r11, [sp, #40] @ inputState - pld [r11] - adr r10, kDampFilter - ldrsh r4, [r10], #2 @ kDampFilter[0] - mul r2, r7, r4 - ldr r4, [r11] @ inputState[0, 1], before shift. - strh r7, [r11] @ inputState[0], after shift. - ldr r5, [r11, #4] @ inputState[2, 3], before shift. - ldr r7, [r10], #4 @ kDampFilter[1, 2] - ldr r10, [r10] @ kDampFilter[3, 4] - str r4, [r11, #2] @ inputState[1, 2], after shift. - str r5, [r11, #6] @ inputState[3, 4], after shift. - smlad r2, r4, r7, r2 - smlad r2, r5, r10, r2 - - @ Saturate to avoid overflow. - @ First shift the sample to the range of [0xC0000000, 0x3FFFFFFF], - @ to avoid overflow in the next saturation step. - asr r2, #1 - add r10, r2, #0x2000 - ssat r10, #16, r10, asr #14 - - @ Subtract from input and update buffer. - ldr r11, [sp, #4] @ sign - ldrsh r4, [r8] - ldrsh r7, [r8], #2 @ inputBuf[*index2] - smulbb r5, r11, r10 - subs r0, #1 - sub r4, r5 - ssat r2, #16, r4 - strh r2, [r12], #2 @ outputBuf[*index2] - - add r2, r7 - ssat r2, #16, r2 - strh r2, [r6], #2 @ outputBuff2[*index2 + PITCH_BUFFSIZE] - bgt LOOP - - add sp, #8 - pop {r4-r11} - bx lr - -.align 2 -kDampFilter: - .short -2294, 8192, 20972, 8192, -2294 diff --git a/modules/audio_coding/codecs/isac/fix/source/pitch_filter_c.c b/modules/audio_coding/codecs/isac/fix/source/pitch_filter_c.c deleted file mode 100644 index f23d19de9c..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/pitch_filter_c.c +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" - -/* Filter coefficicients in Q15. */ -static const int16_t kDampFilter[PITCH_DAMPORDER] = { - -2294, 8192, 20972, 8192, -2294 -}; - -void WebRtcIsacfix_PitchFilterCore(int loopNumber, - int16_t gain, - size_t index, - int16_t sign, - int16_t* inputState, - int16_t* outputBuf2, - const int16_t* coefficient, - int16_t* inputBuf, - int16_t* outputBuf, - int* index2) { - int i = 0, j = 0; /* Loop counters. */ - int16_t* ubufQQpos2 = &outputBuf2[PITCH_BUFFSIZE - (index + 2)]; - int16_t tmpW16 = 0; - - for (i = 0; i < loopNumber; i++) { - int32_t tmpW32 = 0; - - /* Filter to get fractional pitch. */ - for (j = 0; j < PITCH_FRACORDER; j++) { - tmpW32 += ubufQQpos2[*index2 + j] * coefficient[j]; - } - - /* Saturate to avoid overflow in tmpW16. */ - tmpW32 = WEBRTC_SPL_SAT(536862719, tmpW32, -536879104); - tmpW32 += 8192; - tmpW16 = (int16_t)(tmpW32 >> 14); - - /* Shift low pass filter state. */ - memmove(&inputState[1], &inputState[0], - (PITCH_DAMPORDER - 1) * sizeof(int16_t)); - inputState[0] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND( - gain, tmpW16, 12); - - /* Low pass filter. */ - tmpW32 = 0; - /* TODO(kma): Define a static inline function WebRtcSpl_DotProduct() - in spl_inl.h to replace this and other similar loops. */ - for (j = 0; j < PITCH_DAMPORDER; j++) { - tmpW32 += inputState[j] * kDampFilter[j]; - } - - /* Saturate to avoid overflow in tmpW16. */ - tmpW32 = WEBRTC_SPL_SAT(1073725439, tmpW32, -1073758208); - tmpW32 += 16384; - tmpW16 = (int16_t)(tmpW32 >> 15); - - /* Subtract from input and update buffer. */ - tmpW32 = inputBuf[*index2] - sign * tmpW16; - outputBuf[*index2] = WebRtcSpl_SatW32ToW16(tmpW32); - tmpW32 = inputBuf[*index2] + outputBuf[*index2]; - outputBuf2[*index2 + PITCH_BUFFSIZE] = WebRtcSpl_SatW32ToW16(tmpW32); - - (*index2)++; - } -} diff --git a/modules/audio_coding/codecs/isac/fix/source/pitch_filter_mips.c b/modules/audio_coding/codecs/isac/fix/source/pitch_filter_mips.c deleted file mode 100644 index 785fd9464f..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/pitch_filter_mips.c +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h" - -void WebRtcIsacfix_PitchFilterCore(int loopNumber, - int16_t gain, - size_t index, - int16_t sign, - int16_t* inputState, - int16_t* outputBuf2, - const int16_t* coefficient, - int16_t* inputBuf, - int16_t* outputBuf, - int* index2) { - int ind2t = *index2; - int i = 0; - int16_t* out2_pos2 = &outputBuf2[PITCH_BUFFSIZE - (index + 2)] + ind2t; - int32_t w1, w2, w3, w4, w5, gain32, sign32; - int32_t coef1, coef2, coef3, coef4, coef5 = 0; - // Define damp factors as int32_t (pair of int16_t) - int32_t kDampF0 = 0x0000F70A; - int32_t kDampF1 = 0x51EC2000; - int32_t kDampF2 = 0xF70A2000; - int16_t* input1 = inputBuf + ind2t; - int16_t* output1 = outputBuf + ind2t; - int16_t* output2 = outputBuf2 + ind2t + PITCH_BUFFSIZE; - - // Load coefficients outside the loop and sign-extend gain and sign - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "lwl %[coef1], 3(%[coefficient]) \n\t" - "lwl %[coef2], 7(%[coefficient]) \n\t" - "lwl %[coef3], 11(%[coefficient]) \n\t" - "lwl %[coef4], 15(%[coefficient]) \n\t" - "lwr %[coef1], 0(%[coefficient]) \n\t" - "lwr %[coef2], 4(%[coefficient]) \n\t" - "lwr %[coef3], 8(%[coefficient]) \n\t" - "lwr %[coef4], 12(%[coefficient]) \n\t" - "lhu %[coef5], 16(%[coefficient]) \n\t" - "seh %[gain32], %[gain] \n\t" - "seh %[sign32], %[sign] \n\t" - ".set pop \n\t" - : [coef1] "=&r" (coef1), [coef2] "=&r" (coef2), [coef3] "=&r" (coef3), - [coef4] "=&r" (coef4), [coef5] "=&r" (coef5), [gain32] "=&r" (gain32), - [sign32] "=&r" (sign32) - : [coefficient] "r" (coefficient), [gain] "r" (gain), - [sign] "r" (sign) - : "memory" - ); - - for (i = 0; i < loopNumber; i++) { - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - // Filter to get fractional pitch - "li %[w1], 8192 \n\t" - "mtlo %[w1] \n\t" - "mthi $0 \n\t" - "lwl %[w1], 3(%[out2_pos2]) \n\t" - "lwl %[w2], 7(%[out2_pos2]) \n\t" - "lwl %[w3], 11(%[out2_pos2]) \n\t" - "lwl %[w4], 15(%[out2_pos2]) \n\t" - "lwr %[w1], 0(%[out2_pos2]) \n\t" - "lwr %[w2], 4(%[out2_pos2]) \n\t" - "lwr %[w3], 8(%[out2_pos2]) \n\t" - "lwr %[w4], 12(%[out2_pos2]) \n\t" - "lhu %[w5], 16(%[out2_pos2]) \n\t" - "dpa.w.ph $ac0, %[w1], %[coef1] \n\t" - "dpa.w.ph $ac0, %[w2], %[coef2] \n\t" - "dpa.w.ph $ac0, %[w3], %[coef3] \n\t" - "dpa.w.ph $ac0, %[w4], %[coef4] \n\t" - "dpa.w.ph $ac0, %[w5], %[coef5] \n\t" - "addiu %[out2_pos2], %[out2_pos2], 2 \n\t" - "mthi $0, $ac1 \n\t" - "lwl %[w2], 3(%[inputState]) \n\t" - "lwl %[w3], 7(%[inputState]) \n\t" - // Fractional pitch shift & saturation - "extr_s.h %[w1], $ac0, 14 \n\t" - "li %[w4], 16384 \n\t" - "lwr %[w2], 0(%[inputState]) \n\t" - "lwr %[w3], 4(%[inputState]) \n\t" - "mtlo %[w4], $ac1 \n\t" - // Shift low pass filter state - "swl %[w2], 5(%[inputState]) \n\t" - "swl %[w3], 9(%[inputState]) \n\t" - "mul %[w1], %[gain32], %[w1] \n\t" - "swr %[w2], 2(%[inputState]) \n\t" - "swr %[w3], 6(%[inputState]) \n\t" - // Low pass filter accumulation - "dpa.w.ph $ac1, %[kDampF1], %[w2] \n\t" - "dpa.w.ph $ac1, %[kDampF2], %[w3] \n\t" - "lh %[w4], 0(%[input1]) \n\t" - "addiu %[input1], %[input1], 2 \n\t" - "shra_r.w %[w1], %[w1], 12 \n\t" - "sh %[w1], 0(%[inputState]) \n\t" - "dpa.w.ph $ac1, %[kDampF0], %[w1] \n\t" - // Low pass filter shift & saturation - "extr_s.h %[w2], $ac1, 15 \n\t" - "mul %[w2], %[w2], %[sign32] \n\t" - // Buffer update - "subu %[w2], %[w4], %[w2] \n\t" - "shll_s.w %[w2], %[w2], 16 \n\t" - "sra %[w2], %[w2], 16 \n\t" - "sh %[w2], 0(%[output1]) \n\t" - "addu %[w2], %[w2], %[w4] \n\t" - "shll_s.w %[w2], %[w2], 16 \n\t" - "addiu %[output1], %[output1], 2 \n\t" - "sra %[w2], %[w2], 16 \n\t" - "sh %[w2], 0(%[output2]) \n\t" - "addiu %[output2], %[output2], 2 \n\t" - ".set pop \n\t" - : [w1] "=&r" (w1), [w2] "=&r" (w2), [w3] "=&r" (w3), [w4] "=&r" (w4), - [w5] "=&r" (w5), [input1] "+r" (input1), [out2_pos2] "+r" (out2_pos2), - [output1] "+r" (output1), [output2] "+r" (output2) - : [coefficient] "r" (coefficient), [inputState] "r" (inputState), - [gain32] "r" (gain32), [sign32] "r" (sign32), [kDampF0] "r" (kDampF0), - [kDampF1] "r" (kDampF1), [kDampF2] "r" (kDampF2), - [coef1] "r" (coef1), [coef2] "r" (coef2), [coef3] "r" (coef3), - [coef4] "r" (coef4), [coef5] "r" (coef5) - : "hi", "lo", "$ac1hi", "$ac1lo", "memory" - ); - } - (*index2) += loopNumber; -} diff --git a/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.c b/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.c deleted file mode 100644 index bfbab1950d..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.c +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * pitch_gain_tables.c - * - * This file contains tables for the pitch filter side-info in the entropy coder. - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.h" - - -/********************* Pitch Filter Gain Coefficient Tables ************************/ - -/* cdf for quantized pitch filter gains */ -const uint16_t WebRtcIsacfix_kPitchGainCdf[255] = { - 0, 2, 4, 6, 64, 901, 903, 905, 16954, 16956, - 16961, 17360, 17362, 17364, 17366, 17368, 17370, 17372, 17374, 17411, - 17514, 17516, 17583, 18790, 18796, 18802, 20760, 20777, 20782, 21722, - 21724, 21728, 21738, 21740, 21742, 21744, 21746, 21748, 22224, 22227, - 22230, 23214, 23229, 23239, 25086, 25108, 25120, 26088, 26094, 26098, - 26175, 26177, 26179, 26181, 26183, 26185, 26484, 26507, 26522, 27705, - 27731, 27750, 29767, 29799, 29817, 30866, 30883, 30885, 31025, 31029, - 31031, 31033, 31035, 31037, 31114, 31126, 31134, 32687, 32722, 32767, - 35718, 35742, 35757, 36943, 36952, 36954, 37115, 37128, 37130, 37132, - 37134, 37136, 37143, 37145, 37152, 38843, 38863, 38897, 47458, 47467, - 47474, 49040, 49061, 49063, 49145, 49157, 49159, 49161, 49163, 49165, - 49167, 49169, 49171, 49757, 49770, 49782, 61333, 61344, 61346, 62860, - 62883, 62885, 62887, 62889, 62891, 62893, 62895, 62897, 62899, 62901, - 62903, 62905, 62907, 62909, 65496, 65498, 65500, 65521, 65523, 65525, - 65527, 65529, 65531, 65533, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535 -}; - -/* index limits and ranges */ -const int16_t WebRtcIsacfix_kLowerlimiGain[3] = { - -7, -2, -1 -}; - -const int16_t WebRtcIsacfix_kUpperlimitGain[3] = { - 0, 3, 1 -}; - -const uint16_t WebRtcIsacfix_kMultsGain[2] = { - 18, 3 -}; - -/* size of cdf table */ -const uint16_t WebRtcIsacfix_kCdfTableSizeGain[1] = { - 256 -}; - -/* mean values of pitch filter gains in FIXED point Q12 */ -const int16_t WebRtcIsacfix_kPitchGain1[144] = { - 843, 1092, 1336, 1222, 1405, 1656, 1500, 1815, 1843, 1838, 1839, - 1843, 1843, 1843, 1843, 1843, 1843, 1843, 814, 846, 1092, 1013, - 1174, 1383, 1391, 1511, 1584, 1734, 1753, 1843, 1843, 1843, 1843, - 1843, 1843, 1843, 524, 689, 777, 845, 947, 1069, 1090, 1263, - 1380, 1447, 1559, 1676, 1645, 1749, 1843, 1843, 1843, 1843, 81, - 477, 563, 611, 706, 806, 849, 1012, 1192, 1128, 1330, 1489, - 1425, 1576, 1826, 1741, 1843, 1843, 0, 290, 305, 356, 488, - 575, 602, 741, 890, 835, 1079, 1196, 1182, 1376, 1519, 1506, - 1680, 1843, 0, 47, 97, 69, 289, 381, 385, 474, 617, - 664, 803, 1079, 935, 1160, 1269, 1265, 1506, 1741, 0, 0, - 0, 0, 112, 120, 190, 283, 442, 343, 526, 809, 684, - 935, 1134, 1020, 1265, 1506, 0, 0, 0, 0, 0, 0, - 0, 111, 256, 87, 373, 597, 430, 684, 935, 770, 1020, - 1265 -}; - -const int16_t WebRtcIsacfix_kPitchGain2[144] = { - 1760, 1525, 1285, 1747, 1671, 1393, 1843, 1826, 1555, 1843, 1784, - 1606, 1843, 1843, 1711, 1843, 1843, 1814, 1389, 1275, 1040, 1564, - 1414, 1252, 1610, 1495, 1343, 1753, 1592, 1405, 1804, 1720, 1475, - 1843, 1814, 1581, 1208, 1061, 856, 1349, 1148, 994, 1390, 1253, - 1111, 1495, 1343, 1178, 1770, 1465, 1234, 1814, 1581, 1342, 1040, - 793, 713, 1053, 895, 737, 1128, 1003, 861, 1277, 1094, 981, - 1475, 1192, 1019, 1581, 1342, 1098, 855, 570, 483, 833, 648, - 540, 948, 744, 572, 1009, 844, 636, 1234, 934, 685, 1342, - 1217, 984, 537, 318, 124, 603, 423, 350, 687, 479, 322, - 791, 581, 430, 987, 671, 488, 1098, 849, 597, 283, 27, - 0, 397, 222, 38, 513, 271, 124, 624, 325, 157, 737, - 484, 233, 849, 597, 343, 27, 0, 0, 141, 0, 0, - 256, 69, 0, 370, 87, 0, 484, 229, 0, 597, 343, - 87 -}; - -const int16_t WebRtcIsacfix_kPitchGain3[144] = { - 1843, 1843, 1711, 1843, 1818, 1606, 1843, 1827, 1511, 1814, 1639, - 1393, 1760, 1525, 1285, 1656, 1419, 1176, 1835, 1718, 1475, 1841, - 1650, 1387, 1648, 1498, 1287, 1600, 1411, 1176, 1522, 1299, 1040, - 1419, 1176, 928, 1773, 1461, 1128, 1532, 1355, 1202, 1429, 1260, - 1115, 1398, 1151, 1025, 1172, 1080, 790, 1176, 928, 677, 1475, - 1147, 1019, 1276, 1096, 922, 1214, 1010, 901, 1057, 893, 800, - 1040, 796, 734, 928, 677, 424, 1137, 897, 753, 1120, 830, - 710, 875, 751, 601, 795, 642, 583, 790, 544, 475, 677, - 474, 140, 987, 750, 482, 697, 573, 450, 691, 487, 303, - 661, 394, 332, 537, 303, 220, 424, 168, 0, 737, 484, - 229, 624, 348, 153, 441, 261, 136, 397, 166, 51, 283, - 27, 0, 168, 0, 0, 484, 229, 0, 370, 57, 0, - 256, 43, 0, 141, 0, 0, 27, 0, 0, 0, 0, - 0 -}; - - -const int16_t WebRtcIsacfix_kPitchGain4[144] = { - 1843, 1843, 1843, 1843, 1841, 1843, 1500, 1821, 1843, 1222, 1434, - 1656, 843, 1092, 1336, 504, 757, 1007, 1843, 1843, 1843, 1838, - 1791, 1843, 1265, 1505, 1599, 965, 1219, 1425, 730, 821, 1092, - 249, 504, 757, 1783, 1819, 1843, 1351, 1567, 1727, 1096, 1268, - 1409, 805, 961, 1131, 444, 670, 843, 0, 249, 504, 1425, - 1655, 1743, 1096, 1324, 1448, 822, 1019, 1199, 490, 704, 867, - 81, 450, 555, 0, 0, 249, 1247, 1428, 1530, 881, 1073, - 1283, 610, 759, 939, 278, 464, 645, 0, 200, 270, 0, - 0, 0, 935, 1163, 1410, 528, 790, 1068, 377, 499, 717, - 173, 240, 274, 0, 43, 62, 0, 0, 0, 684, 935, - 1182, 343, 551, 735, 161, 262, 423, 0, 55, 27, 0, - 0, 0, 0, 0, 0, 430, 684, 935, 87, 377, 597, - 0, 46, 256, 0, 0, 0, 0, 0, 0, 0, 0, - 0 -}; - - - -/* transform matrix in Q12*/ -const int16_t WebRtcIsacfix_kTransform[4][4] = { - { -2048, -2048, -2048, -2048 }, - { 2748, 916, -916, -2748 }, - { 2048, -2048, -2048, 2048 }, - { 916, -2748, 2748, -916 } -}; diff --git a/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.h b/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.h deleted file mode 100644 index 59e1738bce..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.h +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * pitch_gain_tables.h - * - * This file contains tables for the pitch filter side-info in the entropy - * coder. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_GAIN_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_GAIN_TABLES_H_ - -#include - -/********************* Pitch Filter Gain Coefficient Tables - * ************************/ -/* cdf for quantized pitch filter gains */ -extern const uint16_t WebRtcIsacfix_kPitchGainCdf[255]; - -/* index limits and ranges */ -extern const int16_t WebRtcIsacfix_kLowerlimiGain[3]; -extern const int16_t WebRtcIsacfix_kUpperlimitGain[3]; -extern const uint16_t WebRtcIsacfix_kMultsGain[2]; - -/* mean values of pitch filter gains in Q12*/ -extern const int16_t WebRtcIsacfix_kPitchGain1[144]; -extern const int16_t WebRtcIsacfix_kPitchGain2[144]; -extern const int16_t WebRtcIsacfix_kPitchGain3[144]; -extern const int16_t WebRtcIsacfix_kPitchGain4[144]; - -/* size of cdf table */ -extern const uint16_t WebRtcIsacfix_kCdfTableSizeGain[1]; - -/* transform matrix */ -extern const int16_t WebRtcIsacfix_kTransform[4][4]; - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_GAIN_TABLES_H_ */ diff --git a/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.c b/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.c deleted file mode 100644 index 894716e739..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.c +++ /dev/null @@ -1,306 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * pitch_lag_tables.c - * - * This file contains tables for the pitch filter side-info in the entropy coder. - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.h" - - -/********************* Pitch Filter Gain Coefficient Tables ************************/ - -/* tables for use with small pitch gain */ - -/* cdf for quantized pitch filter lags */ -const uint16_t WebRtcIsacfix_kPitchLagCdf1Lo[127] = { - 0, 134, 336, 549, 778, 998, 1264, 1512, 1777, 2070, - 2423, 2794, 3051, 3361, 3708, 3979, 4315, 4610, 4933, 5269, - 5575, 5896, 6155, 6480, 6816, 7129, 7477, 7764, 8061, 8358, - 8718, 9020, 9390, 9783, 10177, 10543, 10885, 11342, 11795, 12213, - 12680, 13096, 13524, 13919, 14436, 14903, 15349, 15795, 16267, 16734, - 17266, 17697, 18130, 18632, 19080, 19447, 19884, 20315, 20735, 21288, - 21764, 22264, 22723, 23193, 23680, 24111, 24557, 25022, 25537, 26082, - 26543, 27090, 27620, 28139, 28652, 29149, 29634, 30175, 30692, 31273, - 31866, 32506, 33059, 33650, 34296, 34955, 35629, 36295, 36967, 37726, - 38559, 39458, 40364, 41293, 42256, 43215, 44231, 45253, 46274, 47359, - 48482, 49678, 50810, 51853, 53016, 54148, 55235, 56263, 57282, 58363, - 59288, 60179, 61076, 61806, 62474, 63129, 63656, 64160, 64533, 64856, - 65152, 65535, 65535, 65535, 65535, 65535, 65535 -}; - -const uint16_t WebRtcIsacfix_kPitchLagCdf2Lo[20] = { - 0, 429, 3558, 5861, 8558, 11639, 15210, 19502, 24773, 31983, - 42602, 48567, 52601, 55676, 58160, 60172, 61889, 63235, 65383, 65535 -}; - -const uint16_t WebRtcIsacfix_kPitchLagCdf3Lo[2] = { - 0, 65535 -}; - -const uint16_t WebRtcIsacfix_kPitchLagCdf4Lo[10] = { - 0, 2966, 6368, 11182, 19431, 37793, 48532, 55353, 60626, 65535 -}; - -const uint16_t *WebRtcIsacfix_kPitchLagPtrLo[4] = { - WebRtcIsacfix_kPitchLagCdf1Lo, - WebRtcIsacfix_kPitchLagCdf2Lo, - WebRtcIsacfix_kPitchLagCdf3Lo, - WebRtcIsacfix_kPitchLagCdf4Lo -}; - -/* size of first cdf table */ -const uint16_t WebRtcIsacfix_kPitchLagSizeLo[1] = { - 128 -}; - -/* index limits and ranges */ -const int16_t WebRtcIsacfix_kLowerLimitLo[4] = { - -140, -9, 0, -4 -}; - -const int16_t WebRtcIsacfix_kUpperLimitLo[4] = { - -20, 9, 0, 4 -}; - -/* initial index for arithmetic decoder */ -const uint16_t WebRtcIsacfix_kInitIndLo[3] = { - 10, 1, 5 -}; - -/* mean values of pitch filter lags in Q10 */ - -const int16_t WebRtcIsacfix_kMeanLag2Lo[19] = { - -17627, -16207, -14409, -12319, -10253, -8200, -6054, -3986, -1948, -19, - 1937, 3974, 6064, 8155, 10229, 12270, 14296, 16127, 17520 -}; - -const int16_t WebRtcIsacfix_kMeanLag4Lo[9] = { - -7949, -6063, -4036, -1941, 38, 1977, 4060, 6059 -}; - - - -/* tables for use with medium pitch gain */ - -/* cdf for quantized pitch filter lags */ -const uint16_t WebRtcIsacfix_kPitchLagCdf1Mid[255] = { - 0, 28, 61, 88, 121, 149, 233, 331, 475, 559, - 624, 661, 689, 712, 745, 791, 815, 843, 866, 922, - 959, 1024, 1061, 1117, 1178, 1238, 1280, 1350, 1453, 1513, - 1564, 1625, 1671, 1741, 1788, 1904, 2072, 2421, 2626, 2770, - 2840, 2900, 2942, 3012, 3068, 3115, 3147, 3194, 3254, 3319, - 3366, 3520, 3678, 3780, 3850, 3911, 3957, 4032, 4106, 4185, - 4292, 4474, 4683, 4842, 5019, 5191, 5321, 5428, 5540, 5675, - 5763, 5847, 5959, 6127, 6304, 6564, 6839, 7090, 7263, 7421, - 7556, 7728, 7872, 7984, 8142, 8361, 8580, 8743, 8938, 9227, - 9409, 9539, 9674, 9795, 9930, 10060, 10177, 10382, 10614, 10861, - 11038, 11271, 11415, 11629, 11792, 12044, 12193, 12416, 12574, 12821, - 13007, 13235, 13445, 13654, 13901, 14134, 14488, 15000, 15703, 16285, - 16504, 16797, 17086, 17328, 17579, 17807, 17998, 18268, 18538, 18836, - 19087, 19274, 19474, 19716, 19935, 20270, 20833, 21303, 21532, 21741, - 21978, 22207, 22523, 22770, 23054, 23613, 23943, 24204, 24399, 24651, - 24832, 25074, 25270, 25549, 25759, 26015, 26150, 26424, 26713, 27048, - 27342, 27504, 27681, 27854, 28021, 28207, 28412, 28664, 28859, 29064, - 29278, 29548, 29748, 30107, 30377, 30656, 30856, 31164, 31452, 31755, - 32011, 32328, 32626, 32919, 33319, 33789, 34329, 34925, 35396, 35973, - 36443, 36964, 37551, 38156, 38724, 39357, 40023, 40908, 41587, 42602, - 43924, 45037, 45810, 46597, 47421, 48291, 49092, 50051, 51448, 52719, - 53440, 54241, 54944, 55977, 56676, 57299, 57872, 58389, 59059, 59688, - 60237, 60782, 61094, 61573, 61890, 62290, 62658, 63030, 63217, 63454, - 63622, 63882, 64003, 64273, 64427, 64529, 64581, 64697, 64758, 64902, - 65414, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535 -}; - -const uint16_t WebRtcIsacfix_kPitchLagCdf2Mid[36] = { - 0, 71, 335, 581, 836, 1039, 1323, 1795, 2258, 2608, - 3005, 3591, 4243, 5344, 7163, 10583, 16848, 28078, 49448, 57007, - 60357, 61850, 62837, 63437, 63872, 64188, 64377, 64614, 64774, 64949, - 65039, 65115, 65223, 65360, 65474, 65535 -}; - -const uint16_t WebRtcIsacfix_kPitchLagCdf3Mid[2] = { - 0, 65535 -}; - -const uint16_t WebRtcIsacfix_kPitchLagCdf4Mid[20] = { - 0, 28, 246, 459, 667, 1045, 1523, 2337, 4337, 11347, - 44231, 56709, 60781, 62243, 63161, 63969, 64608, 65062, 65502, 65535 -}; - -const uint16_t *WebRtcIsacfix_kPitchLagPtrMid[4] = { - WebRtcIsacfix_kPitchLagCdf1Mid, - WebRtcIsacfix_kPitchLagCdf2Mid, - WebRtcIsacfix_kPitchLagCdf3Mid, - WebRtcIsacfix_kPitchLagCdf4Mid -}; - -/* size of first cdf table */ -const uint16_t WebRtcIsacfix_kPitchLagSizeMid[1] = { - 256 -}; - -/* index limits and ranges */ -const int16_t WebRtcIsacfix_kLowerLimitMid[4] = { - -280, -17, 0, -9 -}; - -const int16_t WebRtcIsacfix_kUpperLimitMid[4] = { - -40, 17, 0, 9 -}; - -/* initial index for arithmetic decoder */ -const uint16_t WebRtcIsacfix_kInitIndMid[3] = { - 18, 1, 10 -}; - -/* mean values of pitch filter lags in Q10 */ - -const int16_t WebRtcIsacfix_kMeanLag2Mid[35] = { - -17297, -16250, -15416, -14343, -13341, -12363, -11270, - -10355, -9122, -8217, -7172, -6083, -5102, -4004, -3060, - -1982, -952, -18, 935, 1976, 3040, 4032, - 5082, 6065, 7257, 8202, 9264, 10225, 11242, - 12234, 13337, 14336, 15374, 16187, 17347 -}; - - -const int16_t WebRtcIsacfix_kMeanLag4Mid[19] = { - -8811, -8081, -7203, -6003, -5057, -4025, -2983, -1964, - -891, 29, 921, 1920, 2988, 4064, 5187, 6079, 7173, 8074, 8849 -}; - - -/* tables for use with large pitch gain */ - -/* cdf for quantized pitch filter lags */ -const uint16_t WebRtcIsacfix_kPitchLagCdf1Hi[511] = { - 0, 7, 18, 33, 69, 105, 156, 228, 315, 612, - 680, 691, 709, 724, 735, 738, 742, 746, 749, 753, - 756, 760, 764, 774, 782, 785, 789, 796, 800, 803, - 807, 814, 818, 822, 829, 832, 847, 854, 858, 869, - 876, 883, 898, 908, 934, 977, 1010, 1050, 1060, 1064, - 1075, 1078, 1086, 1089, 1093, 1104, 1111, 1122, 1133, 1136, - 1151, 1162, 1183, 1209, 1252, 1281, 1339, 1364, 1386, 1401, - 1411, 1415, 1426, 1430, 1433, 1440, 1448, 1455, 1462, 1477, - 1487, 1495, 1502, 1506, 1509, 1516, 1524, 1531, 1535, 1542, - 1553, 1556, 1578, 1589, 1611, 1625, 1639, 1643, 1654, 1665, - 1672, 1687, 1694, 1705, 1708, 1719, 1730, 1744, 1752, 1759, - 1791, 1795, 1820, 1867, 1886, 1915, 1936, 1943, 1965, 1987, - 2041, 2099, 2161, 2175, 2200, 2211, 2226, 2233, 2244, 2251, - 2266, 2280, 2287, 2298, 2309, 2316, 2331, 2342, 2356, 2378, - 2403, 2418, 2447, 2497, 2544, 2602, 2863, 2895, 2903, 2935, - 2950, 2971, 3004, 3011, 3018, 3029, 3040, 3062, 3087, 3127, - 3152, 3170, 3199, 3243, 3293, 3322, 3340, 3377, 3402, 3427, - 3474, 3518, 3543, 3579, 3601, 3637, 3659, 3706, 3731, 3760, - 3818, 3847, 3869, 3901, 3920, 3952, 4068, 4169, 4220, 4271, - 4524, 4571, 4604, 4632, 4672, 4730, 4777, 4806, 4857, 4904, - 4951, 5002, 5031, 5060, 5107, 5150, 5212, 5266, 5331, 5382, - 5432, 5490, 5544, 5610, 5700, 5762, 5812, 5874, 5972, 6022, - 6091, 6163, 6232, 6305, 6402, 6540, 6685, 6880, 7090, 7271, - 7379, 7452, 7542, 7625, 7687, 7770, 7843, 7911, 7966, 8024, - 8096, 8190, 8252, 8320, 8411, 8501, 8585, 8639, 8751, 8842, - 8918, 8986, 9066, 9127, 9203, 9269, 9345, 9406, 9464, 9536, - 9612, 9667, 9735, 9844, 9931, 10036, 10119, 10199, 10260, 10358, - 10441, 10514, 10666, 10734, 10872, 10951, 11053, 11125, 11223, 11324, - 11516, 11664, 11737, 11816, 11892, 12008, 12120, 12200, 12280, 12392, - 12490, 12576, 12685, 12812, 12917, 13003, 13108, 13210, 13300, 13384, - 13470, 13579, 13673, 13771, 13879, 13999, 14136, 14201, 14368, 14614, - 14759, 14867, 14958, 15030, 15121, 15189, 15280, 15385, 15461, 15555, - 15653, 15768, 15884, 15971, 16069, 16145, 16210, 16279, 16380, 16463, - 16539, 16615, 16688, 16818, 16919, 17017, 18041, 18338, 18523, 18649, - 18790, 18917, 19047, 19167, 19315, 19460, 19601, 19731, 19858, 20068, - 20173, 20318, 20466, 20625, 20741, 20911, 21045, 21201, 21396, 21588, - 21816, 22022, 22305, 22547, 22786, 23072, 23322, 23600, 23879, 24168, - 24433, 24769, 25120, 25511, 25895, 26289, 26792, 27219, 27683, 28077, - 28566, 29094, 29546, 29977, 30491, 30991, 31573, 32105, 32594, 33173, - 33788, 34497, 35181, 35833, 36488, 37255, 37921, 38645, 39275, 39894, - 40505, 41167, 41790, 42431, 43096, 43723, 44385, 45134, 45858, 46607, - 47349, 48091, 48768, 49405, 49955, 50555, 51167, 51985, 52611, 53078, - 53494, 53965, 54435, 54996, 55601, 56125, 56563, 56838, 57244, 57566, - 57967, 58297, 58771, 59093, 59419, 59647, 59886, 60143, 60461, 60693, - 60917, 61170, 61416, 61634, 61891, 62122, 62310, 62455, 62632, 62839, - 63103, 63436, 63639, 63805, 63906, 64015, 64192, 64355, 64475, 64558, - 64663, 64742, 64811, 64865, 64916, 64956, 64981, 65025, 65068, 65115, - 65195, 65314, 65419, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535 -}; - -const uint16_t WebRtcIsacfix_kPitchLagCdf2Hi[68] = { - 0, 7, 11, 22, 37, 52, 56, 59, 81, 85, - 89, 96, 115, 130, 137, 152, 170, 181, 193, 200, - 207, 233, 237, 259, 289, 318, 363, 433, 592, 992, - 1607, 3062, 6149, 12206, 25522, 48368, 58223, 61918, 63640, 64584, - 64943, 65098, 65206, 65268, 65294, 65335, 65350, 65372, 65387, 65402, - 65413, 65420, 65428, 65435, 65439, 65450, 65454, 65468, 65472, 65476, - 65483, 65491, 65498, 65505, 65516, 65520, 65528, 65535 -}; - -const uint16_t WebRtcIsacfix_kPitchLagCdf3Hi[2] = { - 0, 65535 -}; - -const uint16_t WebRtcIsacfix_kPitchLagCdf4Hi[35] = { - 0, 7, 19, 30, 41, 48, 63, 74, 82, 96, - 122, 152, 215, 330, 701, 2611, 10931, 48106, 61177, 64341, - 65112, 65238, 65309, 65338, 65364, 65379, 65401, 65427, 65453, - 65465, 65476, 65490, 65509, 65528, 65535 -}; - -const uint16_t *WebRtcIsacfix_kPitchLagPtrHi[4] = { - WebRtcIsacfix_kPitchLagCdf1Hi, - WebRtcIsacfix_kPitchLagCdf2Hi, - WebRtcIsacfix_kPitchLagCdf3Hi, - WebRtcIsacfix_kPitchLagCdf4Hi -}; - -/* size of first cdf table */ -const uint16_t WebRtcIsacfix_kPitchLagSizeHi[1] = { - 512 -}; - -/* index limits and ranges */ -const int16_t WebRtcIsacfix_kLowerLimitHi[4] = { - -552, -34, 0, -16 -}; - -const int16_t WebRtcIsacfix_kUpperLimitHi[4] = { - -80, 32, 0, 17 -}; - -/* initial index for arithmetic decoder */ -const uint16_t WebRtcIsacfix_kInitIndHi[3] = { - 34, 1, 18 -}; - -/* mean values of pitch filter lags */ - -const int16_t WebRtcIsacfix_kMeanLag2Hi[67] = { - -17482, -16896, -16220, -15929, -15329, -14848, -14336, -13807, -13312, -12800, -12218, -11720, - -11307, -10649, -10396, -9742, -9148, -8668, -8297, -7718, -7155, -6656, -6231, -5600, -5129, - -4610, -4110, -3521, -3040, -2525, -2016, -1506, -995, -477, -5, 469, 991, 1510, 2025, 2526, 3079, - 3555, 4124, 4601, 5131, 5613, 6194, 6671, 7140, 7645, 8207, 8601, 9132, 9728, 10359, 10752, 11302, - 11776, 12288, 12687, 13204, 13759, 14295, 14810, 15360, 15764, 16350 -}; - - -const int16_t WebRtcIsacfix_kMeanLag4Hi[34] = { - -8175, -7659, -7205, -6684, -6215, -5651, -5180, -4566, -4087, -3536, -3096, - -2532, -1990, -1482, -959, -440, 11, 451, 954, 1492, 2020, 2562, 3059, - 3577, 4113, 4618, 5134, 5724, 6060, 6758, 7015, 7716, 8066, 8741 -}; diff --git a/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.h b/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.h deleted file mode 100644 index 228da26731..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.h +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * pitch_lag_tables.h - * - * This file contains tables for the pitch filter side-info in the entropy - * coder. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_LAG_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_LAG_TABLES_H_ - -#include - -/********************* Pitch Filter Lag Coefficient Tables - * ************************/ - -/* tables for use with small pitch gain */ - -/* cdfs for quantized pitch lags */ -extern const uint16_t WebRtcIsacfix_kPitchLagCdf1Lo[127]; -extern const uint16_t WebRtcIsacfix_kPitchLagCdf2Lo[20]; -extern const uint16_t WebRtcIsacfix_kPitchLagCdf3Lo[2]; -extern const uint16_t WebRtcIsacfix_kPitchLagCdf4Lo[10]; - -extern const uint16_t* WebRtcIsacfix_kPitchLagPtrLo[4]; - -/* size of first cdf table */ -extern const uint16_t WebRtcIsacfix_kPitchLagSizeLo[1]; - -/* index limits and ranges */ -extern const int16_t WebRtcIsacfix_kLowerLimitLo[4]; -extern const int16_t WebRtcIsacfix_kUpperLimitLo[4]; - -/* initial index for arithmetic decoder */ -extern const uint16_t WebRtcIsacfix_kInitIndLo[3]; - -/* mean values of pitch filter lags */ -extern const int16_t WebRtcIsacfix_kMeanLag2Lo[19]; -extern const int16_t WebRtcIsacfix_kMeanLag4Lo[9]; - -/* tables for use with medium pitch gain */ - -/* cdfs for quantized pitch lags */ -extern const uint16_t WebRtcIsacfix_kPitchLagCdf1Mid[255]; -extern const uint16_t WebRtcIsacfix_kPitchLagCdf2Mid[36]; -extern const uint16_t WebRtcIsacfix_kPitchLagCdf3Mid[2]; -extern const uint16_t WebRtcIsacfix_kPitchLagCdf4Mid[20]; - -extern const uint16_t* WebRtcIsacfix_kPitchLagPtrMid[4]; - -/* size of first cdf table */ -extern const uint16_t WebRtcIsacfix_kPitchLagSizeMid[1]; - -/* index limits and ranges */ -extern const int16_t WebRtcIsacfix_kLowerLimitMid[4]; -extern const int16_t WebRtcIsacfix_kUpperLimitMid[4]; - -/* initial index for arithmetic decoder */ -extern const uint16_t WebRtcIsacfix_kInitIndMid[3]; - -/* mean values of pitch filter lags */ -extern const int16_t WebRtcIsacfix_kMeanLag2Mid[35]; -extern const int16_t WebRtcIsacfix_kMeanLag4Mid[19]; - -/* tables for use with large pitch gain */ - -/* cdfs for quantized pitch lags */ -extern const uint16_t WebRtcIsacfix_kPitchLagCdf1Hi[511]; -extern const uint16_t WebRtcIsacfix_kPitchLagCdf2Hi[68]; -extern const uint16_t WebRtcIsacfix_kPitchLagCdf3Hi[2]; -extern const uint16_t WebRtcIsacfix_kPitchLagCdf4Hi[35]; - -extern const uint16_t* WebRtcIsacfix_kPitchLagPtrHi[4]; - -/* size of first cdf table */ -extern const uint16_t WebRtcIsacfix_kPitchLagSizeHi[1]; - -/* index limits and ranges */ -extern const int16_t WebRtcIsacfix_kLowerLimitHi[4]; -extern const int16_t WebRtcIsacfix_kUpperLimitHi[4]; - -/* initial index for arithmetic decoder */ -extern const uint16_t WebRtcIsacfix_kInitIndHi[3]; - -/* mean values of pitch filter lags */ -extern const int16_t WebRtcIsacfix_kMeanLag2Hi[67]; -extern const int16_t WebRtcIsacfix_kMeanLag4Hi[34]; - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_LAG_TABLES_H_ */ diff --git a/modules/audio_coding/codecs/isac/fix/source/settings.h b/modules/audio_coding/codecs/isac/fix/source/settings.h deleted file mode 100644 index 03a2d05457..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/settings.h +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * settings.h - * - * Declaration of #defines used in the iSAC codec - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_SETTINGS_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_SETTINGS_H_ - -/* sampling frequency (Hz) */ -#define FS 16000 -/* 1.5 times Sampling frequency */ -#define FS_1_HALF (uint32_t)24000 -/* Three times Sampling frequency */ -#define FS3 (uint32_t)48000 -/* Eight times Sampling frequency */ -#define FS8 (uint32_t)128000 - -/* number of samples per frame (either 480 (30ms) or 960 (60ms)) */ -#define INITIAL_FRAMESAMPLES 960 - -/* miliseconds */ -#define FRAMESIZE 30 -/* number of samples per frame processed in the encoder (30ms) */ -#define FRAMESAMPLES 480 /* ((FRAMESIZE*FS)/1000) */ -#define FRAMESAMPLES_HALF 240 -/* max number of samples per frame (= 60 ms frame) */ -#define MAX_FRAMESAMPLES 960 -/* number of samples per 10ms frame */ -#define FRAMESAMPLES_10ms 160 /* ((10*FS)/1000) */ -/* Number of samples per 1 ms */ -#define SAMPLES_PER_MSEC 16 -/* number of subframes */ -#define SUBFRAMES 6 -/* length of a subframe */ -#define UPDATE 80 -/* length of half a subframe (low/high band) */ -#define HALF_SUBFRAMELEN 40 /* (UPDATE/2) */ -/* samples of look ahead (in a half-band, so actually half the samples of look - * ahead @ FS) */ -#define QLOOKAHEAD 24 /* 3 ms */ - -/* order of AR model in spectral entropy coder */ -#define AR_ORDER 6 -#define MAX_ORDER 13 -#define LEVINSON_MAX_ORDER 12 - -/* window length (masking analysis) */ -#define WINLEN 256 -/* order of low-band pole filter used to approximate masking curve */ -#define ORDERLO 12 -/* order of hi-band pole filter used to approximate masking curve */ -#define ORDERHI 6 - -#define KLT_NUM_AVG_GAIN 0 -#define KLT_NUM_AVG_SHAPE 0 -#define KLT_NUM_MODELS 3 -#define LPC_SHAPE_ORDER 18 /* (ORDERLO + ORDERHI) */ - -#define KLT_ORDER_GAIN 12 /* (2 * SUBFRAMES) */ -#define KLT_ORDER_SHAPE 108 /* (LPC_SHAPE_ORDER * SUBFRAMES) */ - -/* order for post_filter_bank */ -#define POSTQORDER 3 -/* order for pre-filterbank */ -#define QORDER 3 -/* for decimator */ -#define ALLPASSSECTIONS 2 -/* The number of composite all-pass filter factors */ -#define NUMBEROFCOMPOSITEAPSECTIONS 4 - -/* The number of all-pass filter factors in an upper or lower channel*/ -#define NUMBEROFCHANNELAPSECTIONS 2 - -#define DPMIN_Q10 -10240 /* -10.00 in Q10 */ -#define DPMAX_Q10 10240 /* 10.00 in Q10 */ -#define MINBITS_Q10 10240 /* 10.0 in Q10 */ - -/* array size for byte stream in number of Word16. */ -#define STREAM_MAXW16 \ - 300 /* The old maximum size still needed for the decoding */ -#define STREAM_MAXW16_30MS \ - 100 /* 100 Word16 = 200 bytes = 53.4 kbit/s @ 30 ms.framelength */ -#define STREAM_MAXW16_60MS \ - 200 /* 200 Word16 = 400 bytes = 53.4 kbit/s @ 60 ms.framelength */ -/* This is used only at the decoder bit-stream struct. - * - The encoder and decoder bitstream containers are of different size because - * old iSAC limited the encoded bitstream to 600 bytes. But newer versions - * restrict to shorter bitstream. - * - We add 10 bytes of guards to the internal bitstream container. The reason - * is that entropy decoder might read few bytes (3 according to our - * observations) more than the actual size of the bitstream. To avoid reading - * outside memory, in rare occasion of full-size bitstream we add 10 bytes - * of guard. */ -#define INTERNAL_STREAM_SIZE_W16 (STREAM_MAXW16 + 5) - -/* storage size for bit counts */ -//#define BIT_COUNTER_SIZE 30 -/* maximum order of any AR model or filter */ -#define MAX_AR_MODEL_ORDER 12 - -/* Maximum number of iterations allowed to limit payload size */ -#define MAX_PAYLOAD_LIMIT_ITERATION 1 - -/* Bandwidth estimator */ - -#define MIN_ISAC_BW 10000 /* Minimum bandwidth in bits per sec */ -#define MAX_ISAC_BW 32000 /* Maxmum bandwidth in bits per sec */ -#define MIN_ISAC_MD 5 /* Minimum Max Delay in ?? */ -#define MAX_ISAC_MD 25 /* Maxmum Max Delay in ?? */ -#define DELAY_CORRECTION_MAX 717 -#define DELAY_CORRECTION_MED 819 -#define Thld_30_60 18000 -#define Thld_60_30 27000 - -/* assumed header size; we don't know the exact number (header compression may - * be used) */ -#define HEADER_SIZE 35 /* bytes */ -#define INIT_FRAME_LEN 60 -#define INIT_BN_EST 20000 -#define INIT_BN_EST_Q7 2560000 /* 20 kbps in Q7 */ -#define INIT_REC_BN_EST_Q5 789312 /* INIT_BN_EST + INIT_HDR_RATE in Q5 */ - -/* 8738 in Q18 is ~ 1/30 */ -/* #define INIT_HDR_RATE (((HEADER_SIZE * 8 * 1000) * 8738) >> NUM_BITS_TO_SHIFT - * (INIT_FRAME_LEN)) */ -#define INIT_HDR_RATE 4666 -/* number of packets in a row for a high rate burst */ -#define BURST_LEN 3 -/* ms, max time between two full bursts */ -#define BURST_INTERVAL 800 -/* number of packets in a row for initial high rate burst */ -#define INIT_BURST_LEN 5 -/* bits/s, rate for the first BURST_LEN packets */ -#define INIT_RATE 10240000 /* INIT_BN_EST in Q9 */ - -/* For pitch analysis */ -#define PITCH_FRAME_LEN 240 /* (FRAMESAMPLES/2) 30 ms */ -#define PITCH_MAX_LAG 140 /* 57 Hz */ -#define PITCH_MIN_LAG 20 /* 400 Hz */ -#define PITCH_MIN_LAG_Q8 5120 /* 256 * PITCH_MIN_LAG */ -#define OFFSET_Q8 768 /* 256 * 3 */ - -#define PITCH_MAX_GAIN_Q12 1843 /* 0.45 */ -#define PITCH_LAG_SPAN2 65 /* (PITCH_MAX_LAG/2-PITCH_MIN_LAG/2+5) */ -#define PITCH_CORR_LEN2 60 /* 15 ms */ -#define PITCH_CORR_STEP2 60 /* (PITCH_FRAME_LEN/4) */ -#define PITCH_SUBFRAMES 4 -#define PITCH_SUBFRAME_LEN 60 /* (PITCH_FRAME_LEN/PITCH_SUBFRAMES) */ - -/* For pitch filter */ -#define PITCH_BUFFSIZE \ - 190 /* (PITCH_MAX_LAG + 50) Extra 50 for fraction and LP filters */ -#define PITCH_INTBUFFSIZE 430 /* (PITCH_FRAME_LEN+PITCH_BUFFSIZE) */ -#define PITCH_FRACS 8 -#define PITCH_FRACORDER 9 -#define PITCH_DAMPORDER 5 - -/* Order of high pass filter */ -#define HPORDER 2 - -/* PLC */ -#define DECAY_RATE \ - 10 /* Q15, 20% of decay every lost frame apllied linearly sample by sample*/ -#define PLC_WAS_USED 1 -#define PLC_NOT_USED 3 -#define RECOVERY_OVERLAP 80 -#define RESAMP_RES 256 -#define RESAMP_RES_BIT 8 - -/* Define Error codes */ -/* 6000 General */ -#define ISAC_MEMORY_ALLOCATION_FAILED 6010 -#define ISAC_MODE_MISMATCH 6020 -#define ISAC_DISALLOWED_BOTTLENECK 6030 -#define ISAC_DISALLOWED_FRAME_LENGTH 6040 -/* 6200 Bandwidth estimator */ -#define ISAC_RANGE_ERROR_BW_ESTIMATOR 6240 -/* 6400 Encoder */ -#define ISAC_ENCODER_NOT_INITIATED 6410 -#define ISAC_DISALLOWED_CODING_MODE 6420 -#define ISAC_DISALLOWED_FRAME_MODE_ENCODER 6430 -#define ISAC_DISALLOWED_BITSTREAM_LENGTH 6440 -#define ISAC_PAYLOAD_LARGER_THAN_LIMIT 6450 -/* 6600 Decoder */ -#define ISAC_DECODER_NOT_INITIATED 6610 -#define ISAC_EMPTY_PACKET 6620 -#define ISAC_PACKET_TOO_SHORT 6625 -#define ISAC_DISALLOWED_FRAME_MODE_DECODER 6630 -#define ISAC_RANGE_ERROR_DECODE_FRAME_LENGTH 6640 -#define ISAC_RANGE_ERROR_DECODE_BANDWIDTH 6650 -#define ISAC_RANGE_ERROR_DECODE_PITCH_GAIN 6660 -#define ISAC_RANGE_ERROR_DECODE_PITCH_LAG 6670 -#define ISAC_RANGE_ERROR_DECODE_LPC 6680 -#define ISAC_RANGE_ERROR_DECODE_SPECTRUM 6690 -#define ISAC_LENGTH_MISMATCH 6730 -/* 6800 Call setup formats */ -#define ISAC_INCOMPATIBLE_FORMATS 6810 - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_SETTINGS_H_ */ diff --git a/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.c b/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.c deleted file mode 100644 index 4ef9a338cc..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.c +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * spectrum_ar_model_tables.c - * - * This file contains tables with AR coefficients, Gain coefficients - * and cosine tables. - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -/********************* AR Coefficient Tables ************************/ - -/* cdf for quantized reflection coefficient 1 */ -const uint16_t WebRtcIsacfix_kRc1Cdf[12] = { - 0, 2, 4, 129, 7707, 57485, 65495, 65527, 65529, 65531, - 65533, 65535 -}; - -/* cdf for quantized reflection coefficient 2 */ -const uint16_t WebRtcIsacfix_kRc2Cdf[12] = { - 0, 2, 4, 7, 531, 25298, 64525, 65526, 65529, 65531, - 65533, 65535 -}; - -/* cdf for quantized reflection coefficient 3 */ -const uint16_t WebRtcIsacfix_kRc3Cdf[12] = { - 0, 2, 4, 6, 620, 22898, 64843, 65527, 65529, 65531, - 65533, 65535 -}; - -/* cdf for quantized reflection coefficient 4 */ -const uint16_t WebRtcIsacfix_kRc4Cdf[12] = { - 0, 2, 4, 6, 35, 10034, 60733, 65506, 65529, 65531, - 65533, 65535 -}; - -/* cdf for quantized reflection coefficient 5 */ -const uint16_t WebRtcIsacfix_kRc5Cdf[12] = { - 0, 2, 4, 6, 36, 7567, 56727, 65385, 65529, 65531, - 65533, 65535 -}; - -/* cdf for quantized reflection coefficient 6 */ -const uint16_t WebRtcIsacfix_kRc6Cdf[12] = { - 0, 2, 4, 6, 14, 6579, 57360, 65409, 65529, 65531, - 65533, 65535 -}; - -/* representation levels for quantized reflection coefficient 1 */ -const int16_t WebRtcIsacfix_kRc1Levels[11] = { - -32104, -29007, -23202, -15496, -9279, -2577, 5934, 17535, 24512, 29503, 32104 -}; - -/* representation levels for quantized reflection coefficient 2 */ -const int16_t WebRtcIsacfix_kRc2Levels[11] = { - -32104, -29503, -23494, -15261, -7309, -1399, 6158, 16381, 24512, 29503, 32104 -}; - -/* representation levels for quantized reflection coefficient 3 */ -const int16_t WebRtcIsacfix_kRc3Levels[11] = { - -32104, -29503, -23157, -15186, -7347, -1359, 5829, 17535, 24512, 29503, 32104 -}; - -/* representation levels for quantized reflection coefficient 4 */ -const int16_t WebRtcIsacfix_kRc4Levels[11] = { - -32104, -29503, -24512, -15362, -6665, -342, 6596, 14585, 24512, 29503, 32104 -}; - -/* representation levels for quantized reflection coefficient 5 */ -const int16_t WebRtcIsacfix_kRc5Levels[11] = { - -32104, -29503, -24512, -15005, -6564, -106, 7123, 14920, 24512, 29503, 32104 -}; - -/* representation levels for quantized reflection coefficient 6 */ -const int16_t WebRtcIsacfix_kRc6Levels[11] = { - -32104, -29503, -24512, -15096, -6656, -37, 7036, 14847, 24512, 29503, 32104 -}; - -/* quantization boundary levels for reflection coefficients */ -const int16_t WebRtcIsacfix_kRcBound[12] = { - -32768, -31441, -27566, -21458, -13612, -4663, - 4663, 13612, 21458, 27566, 31441, 32767 -}; - -/* initial index for AR reflection coefficient quantizer and cdf table search */ -const uint16_t WebRtcIsacfix_kRcInitInd[6] = { - 5, 5, 5, 5, 5, 5 -}; - -/* pointers to AR cdf tables */ -const uint16_t *WebRtcIsacfix_kRcCdfPtr[AR_ORDER] = { - WebRtcIsacfix_kRc1Cdf, - WebRtcIsacfix_kRc2Cdf, - WebRtcIsacfix_kRc3Cdf, - WebRtcIsacfix_kRc4Cdf, - WebRtcIsacfix_kRc5Cdf, - WebRtcIsacfix_kRc6Cdf -}; - -/* pointers to AR representation levels tables */ -const int16_t *WebRtcIsacfix_kRcLevPtr[AR_ORDER] = { - WebRtcIsacfix_kRc1Levels, - WebRtcIsacfix_kRc2Levels, - WebRtcIsacfix_kRc3Levels, - WebRtcIsacfix_kRc4Levels, - WebRtcIsacfix_kRc5Levels, - WebRtcIsacfix_kRc6Levels -}; - - -/******************** GAIN Coefficient Tables ***********************/ - -/* cdf for Gain coefficient */ -const uint16_t WebRtcIsacfix_kGainCdf[19] = { - 0, 2, 4, 6, 8, 10, 12, 14, 16, 1172, - 11119, 29411, 51699, 64445, 65527, 65529, 65531, 65533, 65535 -}; - -/* representation levels for quantized squared Gain coefficient */ -const int32_t WebRtcIsacfix_kGain2Lev[18] = { - 128, 128, 128, 128, 128, 215, 364, 709, 1268, - 1960, 3405, 6078, 11286, 17827, 51918, 134498, 487432, 2048000 -}; - -/* quantization boundary levels for squared Gain coefficient */ -const int32_t WebRtcIsacfix_kGain2Bound[19] = { - 0, 21, 35, 59, 99, 166, 280, 475, 815, 1414, - 2495, 4505, 8397, 16405, 34431, 81359, 240497, 921600, 0x7FFFFFFF -}; - -/* pointers to Gain cdf table */ -const uint16_t *WebRtcIsacfix_kGainPtr[1] = { - WebRtcIsacfix_kGainCdf -}; - -/* gain initial index for gain quantizer and cdf table search */ -const uint16_t WebRtcIsacfix_kGainInitInd[1] = { - 11 -}; - - -/************************* Cosine Tables ****************************/ - -/* cosine table */ -const int16_t WebRtcIsacfix_kCos[6][60] = { - { 512, 512, 511, 510, 508, 507, 505, 502, 499, 496, - 493, 489, 485, 480, 476, 470, 465, 459, 453, 447, - 440, 433, 426, 418, 410, 402, 394, 385, 376, 367, - 357, 348, 338, 327, 317, 306, 295, 284, 273, 262, - 250, 238, 226, 214, 202, 190, 177, 165, 152, 139, - 126, 113, 100, 87, 73, 60, 47, 33, 20, 7 }, - { 512, 510, 508, 503, 498, 491, 483, 473, 462, 450, - 437, 422, 406, 389, 371, 352, 333, 312, 290, 268, - 244, 220, 196, 171, 145, 120, 93, 67, 40, 13, - -13, -40, -67, -93, -120, -145, -171, -196, -220, -244, - -268, -290, -312, -333, -352, -371, -389, -406, -422, -437, - -450, -462, -473, -483, -491, -498, -503, -508, -510, -512 }, - { 512, 508, 502, 493, 480, 465, 447, 426, 402, 376, - 348, 317, 284, 250, 214, 177, 139, 100, 60, 20, - -20, -60, -100, -139, -177, -214, -250, -284, -317, -348, - -376, -402, -426, -447, -465, -480, -493, -502, -508, -512, - -512, -508, -502, -493, -480, -465, -447, -426, -402, -376, - -348, -317, -284, -250, -214, -177, -139, -100, -60, -20 }, - { 511, 506, 495, 478, 456, 429, 398, 362, 322, 279, - 232, 183, 133, 80, 27, -27, -80, -133, -183, -232, - -279, -322, -362, -398, -429, -456, -478, -495, -506, -511, - -511, -506, -495, -478, -456, -429, -398, -362, -322, -279, - -232, -183, -133, -80, -27, 27, 80, 133, 183, 232, - 279, 322, 362, 398, 429, 456, 478, 495, 506, 511 }, - { 511, 502, 485, 459, 426, 385, 338, 284, 226, 165, - 100, 33, -33, -100, -165, -226, -284, -338, -385, -426, - -459, -485, -502, -511, -511, -502, -485, -459, -426, -385, - -338, -284, -226, -165, -100, -33, 33, 100, 165, 226, - 284, 338, 385, 426, 459, 485, 502, 511, 511, 502, - 485, 459, 426, 385, 338, 284, 226, 165, 100, 33 }, - { 510, 498, 473, 437, 389, 333, 268, 196, 120, 40, - -40, -120, -196, -268, -333, -389, -437, -473, -498, -510, - -510, -498, -473, -437, -389, -333, -268, -196, -120, -40, - 40, 120, 196, 268, 333, 389, 437, 473, 498, 510, - 510, 498, 473, 437, 389, 333, 268, 196, 120, 40, - -40, -120, -196, -268, -333, -389, -437, -473, -498, -510 } -}; diff --git a/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.h b/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.h deleted file mode 100644 index 2282a369cb..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.h +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * spectrum_ar_model_tables.h - * - * This file contains definitions of tables with AR coefficients, - * Gain coefficients and cosine tables. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_SPECTRUM_AR_MODEL_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_SPECTRUM_AR_MODEL_TABLES_H_ - -#include - -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -/********************* AR Coefficient Tables ************************/ -/* cdf for quantized reflection coefficient 1 */ -extern const uint16_t WebRtcIsacfix_kRc1Cdf[12]; - -/* cdf for quantized reflection coefficient 2 */ -extern const uint16_t WebRtcIsacfix_kRc2Cdf[12]; - -/* cdf for quantized reflection coefficient 3 */ -extern const uint16_t WebRtcIsacfix_kRc3Cdf[12]; - -/* cdf for quantized reflection coefficient 4 */ -extern const uint16_t WebRtcIsacfix_kRc4Cdf[12]; - -/* cdf for quantized reflection coefficient 5 */ -extern const uint16_t WebRtcIsacfix_kRc5Cdf[12]; - -/* cdf for quantized reflection coefficient 6 */ -extern const uint16_t WebRtcIsacfix_kRc6Cdf[12]; - -/* representation levels for quantized reflection coefficient 1 */ -extern const int16_t WebRtcIsacfix_kRc1Levels[11]; - -/* representation levels for quantized reflection coefficient 2 */ -extern const int16_t WebRtcIsacfix_kRc2Levels[11]; - -/* representation levels for quantized reflection coefficient 3 */ -extern const int16_t WebRtcIsacfix_kRc3Levels[11]; - -/* representation levels for quantized reflection coefficient 4 */ -extern const int16_t WebRtcIsacfix_kRc4Levels[11]; - -/* representation levels for quantized reflection coefficient 5 */ -extern const int16_t WebRtcIsacfix_kRc5Levels[11]; - -/* representation levels for quantized reflection coefficient 6 */ -extern const int16_t WebRtcIsacfix_kRc6Levels[11]; - -/* quantization boundary levels for reflection coefficients */ -extern const int16_t WebRtcIsacfix_kRcBound[12]; - -/* initial indices for AR reflection coefficient quantizer and cdf table search - */ -extern const uint16_t WebRtcIsacfix_kRcInitInd[AR_ORDER]; - -/* pointers to AR cdf tables */ -extern const uint16_t* WebRtcIsacfix_kRcCdfPtr[AR_ORDER]; - -/* pointers to AR representation levels tables */ -extern const int16_t* WebRtcIsacfix_kRcLevPtr[AR_ORDER]; - -/******************** GAIN Coefficient Tables ***********************/ -/* cdf for Gain coefficient */ -extern const uint16_t WebRtcIsacfix_kGainCdf[19]; - -/* representation levels for quantized Gain coefficient */ -extern const int32_t WebRtcIsacfix_kGain2Lev[18]; - -/* squared quantization boundary levels for Gain coefficient */ -extern const int32_t WebRtcIsacfix_kGain2Bound[19]; - -/* pointer to Gain cdf table */ -extern const uint16_t* WebRtcIsacfix_kGainPtr[1]; - -/* Gain initial index for gain quantizer and cdf table search */ -extern const uint16_t WebRtcIsacfix_kGainInitInd[1]; - -/************************* Cosine Tables ****************************/ -/* Cosine table */ -extern const int16_t WebRtcIsacfix_kCos[6][60]; - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_SPECTRUM_AR_MODEL_TABLES_H_ \ - */ diff --git a/modules/audio_coding/codecs/isac/fix/source/structs.h b/modules/audio_coding/codecs/isac/fix/source/structs.h deleted file mode 100644 index 3044d5176b..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/structs.h +++ /dev/null @@ -1,345 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * structs.h - * - * This header file contains all the structs used in the ISAC codec - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_STRUCTS_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_STRUCTS_H_ - -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "modules/audio_coding/codecs/isac/bandwidth_info.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -/* Bitstream struct for decoder */ -typedef struct Bitstreamstruct_dec { - uint16_t stream[INTERNAL_STREAM_SIZE_W16]; /* Array bytestream to decode */ - uint32_t W_upper; /* Upper boundary of interval W */ - uint32_t streamval; - uint16_t stream_index; /* Index to the current position in bytestream */ - int16_t full; /* 0 - first byte in memory filled, second empty*/ - /* 1 - both bytes are empty (we just filled the previous memory */ - - size_t stream_size; /* The size of stream in bytes. */ -} Bitstr_dec; - -/* Bitstream struct for encoder */ -typedef struct Bitstreamstruct_enc { - uint16_t - stream[STREAM_MAXW16_60MS]; /* Vector for adding encoded bytestream */ - uint32_t W_upper; /* Upper boundary of interval W */ - uint32_t streamval; - uint16_t stream_index; /* Index to the current position in bytestream */ - int16_t full; /* 0 - first byte in memory filled, second empty*/ - /* 1 - both bytes are empty (we just filled the previous memory */ - -} Bitstr_enc; - -typedef struct { - int16_t DataBufferLoQ0[WINLEN]; - int16_t DataBufferHiQ0[WINLEN]; - - int32_t CorrBufLoQQ[ORDERLO + 1]; - int32_t CorrBufHiQQ[ORDERHI + 1]; - - int16_t CorrBufLoQdom[ORDERLO + 1]; - int16_t CorrBufHiQdom[ORDERHI + 1]; - - int32_t PreStateLoGQ15[ORDERLO + 1]; - int32_t PreStateHiGQ15[ORDERHI + 1]; - - uint32_t OldEnergy; - -} MaskFiltstr_enc; - -typedef struct { - int16_t PostStateLoGQ0[ORDERLO + 1]; - int16_t PostStateHiGQ0[ORDERHI + 1]; - - uint32_t OldEnergy; - -} MaskFiltstr_dec; - -typedef struct { - // state vectors for each of the two analysis filters - - int32_t INSTAT1_fix[2 * (QORDER - 1)]; - int32_t INSTAT2_fix[2 * (QORDER - 1)]; - int16_t INLABUF1_fix[QLOOKAHEAD]; - int16_t INLABUF2_fix[QLOOKAHEAD]; - - /* High pass filter */ - int32_t HPstates_fix[HPORDER]; - -} PreFiltBankstr; - -typedef struct { - // state vectors for each of the two analysis filters - int32_t STATE_0_LOWER_fix[2 * POSTQORDER]; - int32_t STATE_0_UPPER_fix[2 * POSTQORDER]; - - /* High pass filter */ - - int32_t HPstates1_fix[HPORDER]; - int32_t HPstates2_fix[HPORDER]; - -} PostFiltBankstr; - -typedef struct { - /* data buffer for pitch filter */ - int16_t ubufQQ[PITCH_BUFFSIZE]; - - /* low pass state vector */ - int16_t ystateQQ[PITCH_DAMPORDER]; - - /* old lag and gain */ - int16_t oldlagQ7; - int16_t oldgainQ12; - -} PitchFiltstr; - -typedef struct { - // for inital estimator - int16_t dec_buffer16[PITCH_CORR_LEN2 + PITCH_CORR_STEP2 + PITCH_MAX_LAG / 2 - - PITCH_FRAME_LEN / 2 + 2]; - int32_t decimator_state32[2 * ALLPASSSECTIONS + 1]; - int16_t inbuf[QLOOKAHEAD]; - - PitchFiltstr PFstr_wght; - PitchFiltstr PFstr; - -} PitchAnalysisStruct; - -typedef struct { - /* Parameters used in PLC to avoid re-computation */ - - /* --- residual signals --- */ - int16_t prevPitchInvIn[FRAMESAMPLES / 2]; - int16_t prevPitchInvOut[PITCH_MAX_LAG + 10]; // [FRAMESAMPLES/2]; save 90 - int32_t prevHP[PITCH_MAX_LAG + 10]; // [FRAMESAMPLES/2]; save 90 - - int16_t decayCoeffPriodic; /* how much to supress a sample */ - int16_t decayCoeffNoise; - int16_t used; /* if PLC is used */ - - int16_t* lastPitchLP; // [FRAMESAMPLES/2]; saved 240; - - /* --- LPC side info --- */ - int16_t lofilt_coefQ15[ORDERLO]; - int16_t hifilt_coefQ15[ORDERHI]; - int32_t gain_lo_hiQ17[2]; - - /* --- LTP side info --- */ - int16_t AvgPitchGain_Q12; - int16_t lastPitchGain_Q12; - int16_t lastPitchLag_Q7; - - /* --- Add-overlap in recovery packet --- */ - int16_t overlapLP[RECOVERY_OVERLAP]; // [FRAMESAMPLES/2]; saved 160 - - int16_t pitchCycles; - int16_t A; - int16_t B; - size_t pitchIndex; - size_t stretchLag; - int16_t* prevPitchLP; // [ FRAMESAMPLES/2 ]; saved 240 - int16_t seed; - - int16_t std; -} PLCstr; - -/* Have instance of struct together with other iSAC structs */ -typedef struct { - int16_t prevFrameSizeMs; /* Previous frame size (in ms) */ - uint16_t prevRtpNumber; /* Previous RTP timestamp from received packet */ - /* (in samples relative beginning) */ - uint32_t prevSendTime; /* Send time for previous packet, from RTP header */ - uint32_t prevArrivalTime; /* Arrival time for previous packet (in ms using - timeGetTime()) */ - uint16_t prevRtpRate; /* rate of previous packet, derived from RTP timestamps - (in bits/s) */ - uint32_t lastUpdate; /* Time since the last update of the Bottle Neck estimate - (in samples) */ - uint32_t lastReduction; /* Time sinse the last reduction (in samples) */ - int32_t countUpdates; /* How many times the estimate was update in the - beginning */ - - /* The estimated bottle neck rate from there to here (in bits/s) */ - uint32_t recBw; - uint32_t recBwInv; - uint32_t recBwAvg; - uint32_t recBwAvgQ; - - uint32_t minBwInv; - uint32_t maxBwInv; - - /* The estimated mean absolute jitter value, as seen on this side (in ms) */ - int32_t recJitter; - int32_t recJitterShortTerm; - int32_t recJitterShortTermAbs; - int32_t recMaxDelay; - int32_t recMaxDelayAvgQ; - - int16_t recHeaderRate; /* (assumed) bitrate for headers (bps) */ - - uint32_t sendBwAvg; /* The estimated bottle neck rate from here to there (in - bits/s) */ - int32_t sendMaxDelayAvg; /* The estimated mean absolute jitter value, as seen - on the other siee (in ms) */ - - int16_t countRecPkts; /* number of packets received since last update */ - int16_t highSpeedRec; /* flag for marking that a high speed network has been - detected downstream */ - - /* number of consecutive pkts sent during which the bwe estimate has - remained at a value greater than the downstream threshold for determining - highspeed network */ - int16_t countHighSpeedRec; - - /* flag indicating bwe should not adjust down immediately for very late pckts - */ - int16_t inWaitPeriod; - - /* variable holding the time of the start of a window of time when - bwe should not adjust down immediately for very late pckts */ - uint32_t startWaitPeriod; - - /* number of consecutive pkts sent during which the bwe estimate has - remained at a value greater than the upstream threshold for determining - highspeed network */ - int16_t countHighSpeedSent; - - /* flag indicated the desired number of packets over threshold rate have been - sent and bwe will assume the connection is over broadband network */ - int16_t highSpeedSend; - - IsacBandwidthInfo external_bw_info; -} BwEstimatorstr; - -typedef struct { - /* boolean, flags if previous packet exceeded B.N. */ - int16_t PrevExceed; - /* ms */ - int16_t ExceedAgo; - /* packets left to send in current burst */ - int16_t BurstCounter; - /* packets */ - int16_t InitCounter; - /* ms remaining in buffer when next packet will be sent */ - int16_t StillBuffered; - -} RateModel; - -/* The following strutc is used to store data from encoding, to make it - fast and easy to construct a new bitstream with a different Bandwidth - estimate. All values (except framelength and minBytes) is double size to - handle 60 ms of data. -*/ -typedef struct { - /* Used to keep track of if it is first or second part of 60 msec packet */ - int startIdx; - - /* Frame length in samples */ - int16_t framelength; - - /* Pitch Gain */ - int16_t pitchGain_index[2]; - - /* Pitch Lag */ - int32_t meanGain[2]; - int16_t pitchIndex[PITCH_SUBFRAMES * 2]; - - /* LPC */ - int32_t LPCcoeffs_g[12 * 2]; /* KLT_ORDER_GAIN = 12 */ - int16_t LPCindex_s[108 * 2]; /* KLT_ORDER_SHAPE = 108 */ - int16_t LPCindex_g[12 * 2]; /* KLT_ORDER_GAIN = 12 */ - - /* Encode Spec */ - int16_t fre[FRAMESAMPLES]; - int16_t fim[FRAMESAMPLES]; - int16_t AvgPitchGain[2]; - - /* Used in adaptive mode only */ - int minBytes; - -} IsacSaveEncoderData; - -typedef struct { - Bitstr_enc bitstr_obj; - MaskFiltstr_enc maskfiltstr_obj; - PreFiltBankstr prefiltbankstr_obj; - PitchFiltstr pitchfiltstr_obj; - PitchAnalysisStruct pitchanalysisstr_obj; - RateModel rate_data_obj; - - int16_t buffer_index; - int16_t current_framesamples; - - int16_t data_buffer_fix[FRAMESAMPLES]; // the size was MAX_FRAMESAMPLES - - int16_t frame_nb; - int16_t BottleNeck; - int16_t MaxDelay; - int16_t new_framelength; - int16_t s2nr; - uint16_t MaxBits; - - int16_t bitstr_seed; - - IsacSaveEncoderData* SaveEnc_ptr; - int16_t payloadLimitBytes30; /* Maximum allowed number of bits for a 30 msec - packet */ - int16_t payloadLimitBytes60; /* Maximum allowed number of bits for a 30 msec - packet */ - int16_t maxPayloadBytes; /* Maximum allowed number of bits for both 30 and 60 - msec packet */ - int16_t maxRateInBytes; /* Maximum allowed rate in bytes per 30 msec packet */ - int16_t enforceFrameSize; /* If set iSAC will never change packet size */ - -} IsacFixEncoderInstance; - -typedef struct { - Bitstr_dec bitstr_obj; - MaskFiltstr_dec maskfiltstr_obj; - PostFiltBankstr postfiltbankstr_obj; - PitchFiltstr pitchfiltstr_obj; - PLCstr plcstr_obj; /* TS; for packet loss concealment */ -} IsacFixDecoderInstance; - -typedef struct { - IsacFixEncoderInstance ISACenc_obj; - IsacFixDecoderInstance ISACdec_obj; - BwEstimatorstr bwestimator_obj; - int16_t CodingMode; /* 0 = adaptive; 1 = instantaneous */ - int16_t errorcode; - int16_t initflag; /* 0 = nothing initiated; 1 = encoder or decoder */ - /* not initiated; 2 = all initiated */ -} ISACFIX_SubStruct; - -typedef struct { - int32_t lpcGains[12]; /* 6 lower-band & 6 upper-band we may need to double it - for 60*/ - /* */ - uint32_t W_upper; /* Upper boundary of interval W */ - uint32_t streamval; - uint16_t stream_index; /* Index to the current position in bytestream */ - int16_t full; /* 0 - first byte in memory filled, second empty*/ - /* 1 - both bytes are empty (we just filled the previous memory */ - uint16_t beforeLastWord; - uint16_t lastWord; -} transcode_obj; - -// Bitstr_enc myBitStr; - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_STRUCTS_H_ */ diff --git a/modules/audio_coding/codecs/isac/fix/source/transform.c b/modules/audio_coding/codecs/isac/fix/source/transform.c deleted file mode 100644 index 80b244b5f1..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/transform.c +++ /dev/null @@ -1,214 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * WebRtcIsacfix_kTransform.c - * - * Transform functions - * - */ - -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/fft.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "modules/third_party/fft/fft.h" - -/* Tables are defined in transform_tables.c file or ARM assembly files. */ -/* Cosine table 1 in Q14 */ -extern const int16_t WebRtcIsacfix_kCosTab1[FRAMESAMPLES/2]; -/* Sine table 1 in Q14 */ -extern const int16_t WebRtcIsacfix_kSinTab1[FRAMESAMPLES/2]; -/* Sine table 2 in Q14 */ -extern const int16_t WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4]; - -void WebRtcIsacfix_Time2SpecC(int16_t *inre1Q9, - int16_t *inre2Q9, - int16_t *outreQ7, - int16_t *outimQ7) -{ - - int k; - int32_t tmpreQ16[FRAMESAMPLES/2], tmpimQ16[FRAMESAMPLES/2]; - int16_t tmp1rQ14, tmp1iQ14; - int32_t xrQ16, xiQ16, yrQ16, yiQ16; - int32_t v1Q16, v2Q16; - int16_t factQ19, sh; - - /* Multiply with complex exponentials and combine into one complex vector */ - factQ19 = 16921; // 0.5/sqrt(240) in Q19 is round(.5/sqrt(240)*(2^19)) = 16921 - for (k = 0; k < FRAMESAMPLES/2; k++) { - tmp1rQ14 = WebRtcIsacfix_kCosTab1[k]; - tmp1iQ14 = WebRtcIsacfix_kSinTab1[k]; - xrQ16 = (tmp1rQ14 * inre1Q9[k] + tmp1iQ14 * inre2Q9[k]) >> 7; - xiQ16 = (tmp1rQ14 * inre2Q9[k] - tmp1iQ14 * inre1Q9[k]) >> 7; - // Q-domains below: (Q16*Q19>>16)>>3 = Q16 - tmpreQ16[k] = (WEBRTC_SPL_MUL_16_32_RSFT16(factQ19, xrQ16) + 4) >> 3; - tmpimQ16[k] = (WEBRTC_SPL_MUL_16_32_RSFT16(factQ19, xiQ16) + 4) >> 3; - } - - - xrQ16 = WebRtcSpl_MaxAbsValueW32(tmpreQ16, FRAMESAMPLES/2); - yrQ16 = WebRtcSpl_MaxAbsValueW32(tmpimQ16, FRAMESAMPLES/2); - if (yrQ16>xrQ16) { - xrQ16 = yrQ16; - } - - sh = WebRtcSpl_NormW32(xrQ16); - sh = sh-24; //if sh becomes >=0, then we should shift sh steps to the left, and the domain will become Q(16+sh) - //if sh becomes <0, then we should shift -sh steps to the right, and the domain will become Q(16+sh) - - //"Fastest" vectors - if (sh>=0) { - for (k=0; k> -sh); // Q(16+sh) - inre2Q9[k] = (int16_t)((tmpimQ16[k] + round) >> -sh); // Q(16+sh) - } - } - - /* Get DFT */ - WebRtcIsacfix_FftRadix16Fastest(inre1Q9, inre2Q9, -1); // real call - - //"Fastest" vectors - if (sh>=0) { - for (k=0; k> sh; // Q(16+sh) -> Q16 - tmpimQ16[k] = inre2Q9[k] >> sh; // Q(16+sh) -> Q16 - } - } else { - for (k=0; k Q16 - tmpimQ16[k] = inre2Q9[k] << -sh; // Q(16+sh) -> Q16 - } - } - - - /* Use symmetry to separate into two complex vectors and center frames in time around zero */ - for (k = 0; k < FRAMESAMPLES/4; k++) { - xrQ16 = tmpreQ16[k] + tmpreQ16[FRAMESAMPLES/2 - 1 - k]; - yiQ16 = -tmpreQ16[k] + tmpreQ16[FRAMESAMPLES/2 - 1 - k]; - xiQ16 = tmpimQ16[k] - tmpimQ16[FRAMESAMPLES/2 - 1 - k]; - yrQ16 = tmpimQ16[k] + tmpimQ16[FRAMESAMPLES/2 - 1 - k]; - tmp1rQ14 = -WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4 - 1 - k]; - tmp1iQ14 = WebRtcIsacfix_kSinTab2[k]; - v1Q16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, xrQ16) - WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, xiQ16); - v2Q16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, xrQ16) + WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, xiQ16); - outreQ7[k] = (int16_t)(v1Q16 >> 9); - outimQ7[k] = (int16_t)(v2Q16 >> 9); - v1Q16 = -WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, yrQ16) - WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, yiQ16); - v2Q16 = -WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, yrQ16) + WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, yiQ16); - // CalcLrIntQ(v1Q16, 9); - outreQ7[FRAMESAMPLES / 2 - 1 - k] = (int16_t)(v1Q16 >> 9); - // CalcLrIntQ(v2Q16, 9); - outimQ7[FRAMESAMPLES / 2 - 1 - k] = (int16_t)(v2Q16 >> 9); - - } -} - - -void WebRtcIsacfix_Spec2TimeC(int16_t *inreQ7, int16_t *inimQ7, int32_t *outre1Q16, int32_t *outre2Q16) -{ - - int k; - int16_t tmp1rQ14, tmp1iQ14; - int32_t xrQ16, xiQ16, yrQ16, yiQ16; - int32_t tmpInRe, tmpInIm, tmpInRe2, tmpInIm2; - int16_t factQ11; - int16_t sh; - - for (k = 0; k < FRAMESAMPLES/4; k++) { - /* Move zero in time to beginning of frames */ - tmp1rQ14 = -WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4 - 1 - k]; - tmp1iQ14 = WebRtcIsacfix_kSinTab2[k]; - - tmpInRe = inreQ7[k] * (1 << 9); // Q7 -> Q16 - tmpInIm = inimQ7[k] * (1 << 9); // Q7 -> Q16 - tmpInRe2 = inreQ7[FRAMESAMPLES / 2 - 1 - k] * (1 << 9); // Q7 -> Q16 - tmpInIm2 = inimQ7[FRAMESAMPLES / 2 - 1 - k] * (1 << 9); // Q7 -> Q16 - - xrQ16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, tmpInRe) + WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, tmpInIm); - xiQ16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, tmpInIm) - WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, tmpInRe); - yrQ16 = -WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, tmpInIm2) - WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, tmpInRe2); - yiQ16 = -WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, tmpInRe2) + WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, tmpInIm2); - - /* Combine into one vector, z = x + j * y */ - outre1Q16[k] = xrQ16 - yiQ16; - outre1Q16[FRAMESAMPLES/2 - 1 - k] = xrQ16 + yiQ16; - outre2Q16[k] = xiQ16 + yrQ16; - outre2Q16[FRAMESAMPLES/2 - 1 - k] = -xiQ16 + yrQ16; - } - - /* Get IDFT */ - tmpInRe = WebRtcSpl_MaxAbsValueW32(outre1Q16, 240); - tmpInIm = WebRtcSpl_MaxAbsValueW32(outre2Q16, 240); - if (tmpInIm>tmpInRe) { - tmpInRe = tmpInIm; - } - - sh = WebRtcSpl_NormW32(tmpInRe); - sh = sh-24; //if sh becomes >=0, then we should shift sh steps to the left, and the domain will become Q(16+sh) - //if sh becomes <0, then we should shift -sh steps to the right, and the domain will become Q(16+sh) - - //"Fastest" vectors - if (sh>=0) { - for (k=0; k<240; k++) { - inreQ7[k] = (int16_t)(outre1Q16[k] << sh); // Q(16+sh) - inimQ7[k] = (int16_t)(outre2Q16[k] << sh); // Q(16+sh) - } - } else { - int32_t round = 1 << (-sh - 1); - for (k=0; k<240; k++) { - inreQ7[k] = (int16_t)((outre1Q16[k] + round) >> -sh); // Q(16+sh) - inimQ7[k] = (int16_t)((outre2Q16[k] + round) >> -sh); // Q(16+sh) - } - } - - WebRtcIsacfix_FftRadix16Fastest(inreQ7, inimQ7, 1); // real call - - //"Fastest" vectors - if (sh>=0) { - for (k=0; k<240; k++) { - outre1Q16[k] = inreQ7[k] >> sh; // Q(16+sh) -> Q16 - outre2Q16[k] = inimQ7[k] >> sh; // Q(16+sh) -> Q16 - } - } else { - for (k=0; k<240; k++) { - outre1Q16[k] = inreQ7[k] * (1 << -sh); // Q(16+sh) -> Q16 - outre2Q16[k] = inimQ7[k] * (1 << -sh); // Q(16+sh) -> Q16 - } - } - - /* Divide through by the normalizing constant: */ - /* scale all values with 1/240, i.e. with 273 in Q16 */ - /* 273/65536 ~= 0.0041656 */ - /* 1/240 ~= 0.0041666 */ - for (k=0; k<240; k++) { - outre1Q16[k] = WEBRTC_SPL_MUL_16_32_RSFT16(273, outre1Q16[k]); - outre2Q16[k] = WEBRTC_SPL_MUL_16_32_RSFT16(273, outre2Q16[k]); - } - - /* Demodulate and separate */ - factQ11 = 31727; // sqrt(240) in Q11 is round(15.49193338482967*2048) = 31727 - for (k = 0; k < FRAMESAMPLES/2; k++) { - tmp1rQ14 = WebRtcIsacfix_kCosTab1[k]; - tmp1iQ14 = WebRtcIsacfix_kSinTab1[k]; - xrQ16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, outre1Q16[k]) - WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, outre2Q16[k]); - xiQ16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, outre2Q16[k]) + WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, outre1Q16[k]); - xrQ16 = WEBRTC_SPL_MUL_16_32_RSFT11(factQ11, xrQ16); - xiQ16 = WEBRTC_SPL_MUL_16_32_RSFT11(factQ11, xiQ16); - outre2Q16[k] = xiQ16; - outre1Q16[k] = xrQ16; - } -} diff --git a/modules/audio_coding/codecs/isac/fix/source/transform_mips.c b/modules/audio_coding/codecs/isac/fix/source/transform_mips.c deleted file mode 100644 index a87b3b54f2..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/transform_mips.c +++ /dev/null @@ -1,1294 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/fft.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -// The tables are defined in transform_tables.c file. -extern const int16_t WebRtcIsacfix_kCosTab1[FRAMESAMPLES/2]; -extern const int16_t WebRtcIsacfix_kSinTab1[FRAMESAMPLES/2]; -extern const int16_t WebRtcIsacfix_kCosTab2[FRAMESAMPLES/4]; -extern const int16_t WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4]; - -// MIPS DSPr2 version of the WebRtcIsacfix_Time2Spec function -// is not bit-exact with the C version. -// The accuracy of the MIPS DSPr2 version is same or better. -void WebRtcIsacfix_Time2SpecMIPS(int16_t* inre1Q9, - int16_t* inre2Q9, - int16_t* outreQ7, - int16_t* outimQ7) { - int k = FRAMESAMPLES / 2; - int32_t tmpreQ16[FRAMESAMPLES / 2], tmpimQ16[FRAMESAMPLES / 2]; - int32_t r0, r1, r2, r3, r4, r5, r6, r7, r8, r9; - int32_t inre1, inre2, tmpre, tmpim, factor, max, max1; - int16_t* cosptr; - int16_t* sinptr; - - cosptr = (int16_t*)WebRtcIsacfix_kCosTab1; - sinptr = (int16_t*)WebRtcIsacfix_kSinTab1; - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addiu %[inre1], %[inre1Q9], 0 \n\t" - "addiu %[inre2], %[inre2Q9], 0 \n\t" - "addiu %[tmpre], %[tmpreQ16], 0 \n\t" - "addiu %[tmpim], %[tmpimQ16], 0 \n\t" - "addiu %[factor], $zero, 16921 \n\t" - "mul %[max], $zero, $zero \n\t" - // Multiply with complex exponentials and combine into one complex vector. - // Also, calculate the maximal absolute value in the same loop. - "1: \n\t" -#if defined(MIPS_DSP_R2_LE) - "lwl %[r0], 0(%[inre1]) \n\t" - "lwl %[r2], 0(%[cosptr]) \n\t" - "lwl %[r3], 0(%[sinptr]) \n\t" - "lwl %[r1], 0(%[inre2]) \n\t" - "lwr %[r0], 0(%[inre1]) \n\t" - "lwr %[r2], 0(%[cosptr]) \n\t" - "lwr %[r3], 0(%[sinptr]) \n\t" - "lwr %[r1], 0(%[inre2]) \n\t" - "muleq_s.w.phr %[r4], %[r2], %[r0] \n\t" - "muleq_s.w.phr %[r5], %[r3], %[r0] \n\t" - "muleq_s.w.phr %[r6], %[r3], %[r1] \n\t" - "muleq_s.w.phr %[r7], %[r2], %[r1] \n\t" - "muleq_s.w.phl %[r8], %[r2], %[r0] \n\t" - "muleq_s.w.phl %[r0], %[r3], %[r0] \n\t" - "muleq_s.w.phl %[r3], %[r3], %[r1] \n\t" - "muleq_s.w.phl %[r1], %[r2], %[r1] \n\t" - "addiu %[k], %[k], -2 \n\t" - "addu %[r4], %[r4], %[r6] \n\t" - "subu %[r5], %[r7], %[r5] \n\t" - "sra %[r4], %[r4], 8 \n\t" - "sra %[r5], %[r5], 8 \n\t" - "mult $ac0, %[factor], %[r4] \n\t" - "mult $ac1, %[factor], %[r5] \n\t" - "addu %[r3], %[r8], %[r3] \n\t" - "subu %[r0], %[r1], %[r0] \n\t" - "sra %[r3], %[r3], 8 \n\t" - "sra %[r0], %[r0], 8 \n\t" - "mult $ac2, %[factor], %[r3] \n\t" - "mult $ac3, %[factor], %[r0] \n\t" - "extr_r.w %[r4], $ac0, 16 \n\t" - "extr_r.w %[r5], $ac1, 16 \n\t" - "addiu %[inre1], %[inre1], 4 \n\t" - "addiu %[inre2], %[inre2], 4 \n\t" - "extr_r.w %[r6], $ac2, 16 \n\t" - "extr_r.w %[r7], $ac3, 16 \n\t" - "addiu %[cosptr], %[cosptr], 4 \n\t" - "addiu %[sinptr], %[sinptr], 4 \n\t" - "shra_r.w %[r4], %[r4], 3 \n\t" - "shra_r.w %[r5], %[r5], 3 \n\t" - "sw %[r4], 0(%[tmpre]) \n\t" - "absq_s.w %[r4], %[r4] \n\t" - "sw %[r5], 0(%[tmpim]) \n\t" - "absq_s.w %[r5], %[r5] \n\t" - "shra_r.w %[r6], %[r6], 3 \n\t" - "shra_r.w %[r7], %[r7], 3 \n\t" - "sw %[r6], 4(%[tmpre]) \n\t" - "absq_s.w %[r6], %[r6] \n\t" - "sw %[r7], 4(%[tmpim]) \n\t" - "absq_s.w %[r7], %[r7] \n\t" - "slt %[r0], %[r4], %[r5] \n\t" - "movn %[r4], %[r5], %[r0] \n\t" - "slt %[r1], %[r6], %[r7] \n\t" - "movn %[r6], %[r7], %[r1] \n\t" - "slt %[r0], %[max], %[r4] \n\t" - "movn %[max], %[r4], %[r0] \n\t" - "slt %[r1], %[max], %[r6] \n\t" - "movn %[max], %[r6], %[r1] \n\t" - "addiu %[tmpre], %[tmpre], 8 \n\t" - "bgtz %[k], 1b \n\t" - " addiu %[tmpim], %[tmpim], 8 \n\t" -#else // #if defined(MIPS_DSP_R2_LE) - "lh %[r0], 0(%[inre1]) \n\t" - "lh %[r1], 0(%[inre2]) \n\t" - "lh %[r2], 0(%[cosptr]) \n\t" - "lh %[r3], 0(%[sinptr]) \n\t" - "addiu %[k], %[k], -1 \n\t" - "mul %[r4], %[r0], %[r2] \n\t" - "mul %[r5], %[r1], %[r3] \n\t" - "mul %[r0], %[r0], %[r3] \n\t" - "mul %[r2], %[r1], %[r2] \n\t" - "addiu %[inre1], %[inre1], 2 \n\t" - "addiu %[inre2], %[inre2], 2 \n\t" - "addiu %[cosptr], %[cosptr], 2 \n\t" - "addiu %[sinptr], %[sinptr], 2 \n\t" - "addu %[r1], %[r4], %[r5] \n\t" - "sra %[r1], %[r1], 7 \n\t" - "sra %[r3], %[r1], 16 \n\t" - "andi %[r1], %[r1], 0xFFFF \n\t" - "sra %[r1], %[r1], 1 \n\t" - "mul %[r1], %[factor], %[r1] \n\t" - "mul %[r3], %[factor], %[r3] \n\t" - "subu %[r0], %[r2], %[r0] \n\t" - "sra %[r0], %[r0], 7 \n\t" - "sra %[r2], %[r0], 16 \n\t" - "andi %[r0], %[r0], 0xFFFF \n\t" - "sra %[r0], %[r0], 1 \n\t" - "mul %[r0], %[factor], %[r0] \n\t" - "mul %[r2], %[factor], %[r2] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r1], %[r1], 15 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r1], %[r1], 0x4000 \n\t" - "sra %[r1], %[r1], 15 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r1], %[r3], %[r1] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r1], %[r1], 3 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r1], %[r1], 4 \n\t" - "sra %[r1], %[r1], 3 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sw %[r1], 0(%[tmpre]) \n\t" - "addiu %[tmpre], %[tmpre], 4 \n\t" -#if defined(MIPS_DSP_R1_LE) - "absq_s.w %[r1], %[r1] \n\t" - "shra_r.w %[r0], %[r0], 15 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "negu %[r4], %[r1] \n\t" - "slt %[r3], %[r1], $zero \n\t" - "movn %[r1], %[r4], %[r3] \n\t" - "addiu %[r0], %[r0], 0x4000 \n\t" - "sra %[r0], %[r0], 15 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r0], %[r0], %[r2] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r0], %[r0], 3 \n\t" - "sw %[r0], 0(%[tmpim]) \n\t" - "absq_s.w %[r0], %[r0] \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r0], %[r0], 4 \n\t" - "sra %[r0], %[r0], 3 \n\t" - "sw %[r0], 0(%[tmpim]) \n\t" - "negu %[r2], %[r0] \n\t" - "slt %[r3], %[r0], $zero \n\t" - "movn %[r0], %[r2], %[r3] \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "slt %[r2], %[max], %[r1] \n\t" - "movn %[max], %[r1], %[r2] \n\t" - "slt %[r2], %[max], %[r0] \n\t" - "movn %[max], %[r0], %[r2] \n\t" - "bgtz %[k], 1b \n\t" - " addiu %[tmpim], %[tmpim], 4 \n\t" -#endif // #if defined(MIPS_DSP_R2_LE) - // Calculate WebRtcSpl_NormW32(max). - // If max gets value >=0, we should shift max steps to the left, and the - // domain will be Q(16+shift). If max gets value <0, we should shift -max - // steps to the right, and the domain will be Q(16+max) - "clz %[max], %[max] \n\t" - "addiu %[max], %[max], -25 \n\t" - ".set pop \n\t" - : [k] "+r" (k), [inre1] "=&r" (inre1), [inre2] "=&r" (inre2), - [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), - [r3] "=&r" (r3), [r4] "=&r" (r4), [tmpre] "=&r" (tmpre), - [tmpim] "=&r" (tmpim), [max] "=&r" (max), [factor] "=&r" (factor), -#if defined(MIPS_DSP_R2_LE) - [r6] "=&r" (r6), [r7] "=&r" (r7), [r8] "=&r" (r8), -#endif // #if defined(MIPS_DSP_R2_LE) - [r5] "=&r" (r5) - : [inre1Q9] "r" (inre1Q9), [inre2Q9] "r" (inre2Q9), - [tmpreQ16] "r" (tmpreQ16), [tmpimQ16] "r" (tmpimQ16), - [cosptr] "r" (cosptr), [sinptr] "r" (sinptr) - : "hi", "lo", "memory" -#if defined(MIPS_DSP_R2_LE) - , "$ac1hi", "$ac1lo", "$ac2hi", "$ac2lo", "$ac3hi", "$ac3lo" -#endif // #if defined(MIPS_DSP_R2_LE) - ); - - // "Fastest" vectors - k = FRAMESAMPLES / 4; - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addiu %[tmpre], %[tmpreQ16], 0 \n\t" - "addiu %[tmpim], %[tmpimQ16], 0 \n\t" - "addiu %[inre1], %[inre1Q9], 0 \n\t" - "addiu %[inre2], %[inre2Q9], 0 \n\t" - "blez %[max], 2f \n\t" - " subu %[max1], $zero, %[max] \n\t" - "1: \n\t" - "lw %[r0], 0(%[tmpre]) \n\t" - "lw %[r1], 0(%[tmpim]) \n\t" - "lw %[r2], 4(%[tmpre]) \n\t" - "lw %[r3], 4(%[tmpim]) \n\t" - "addiu %[k], %[k], -1 \n\t" - "sllv %[r0], %[r0], %[max] \n\t" - "sllv %[r1], %[r1], %[max] \n\t" - "sllv %[r2], %[r2], %[max] \n\t" - "sllv %[r3], %[r3], %[max] \n\t" - "addiu %[tmpre], %[tmpre], 8 \n\t" - "addiu %[tmpim], %[tmpim], 8 \n\t" - "sh %[r0], 0(%[inre1]) \n\t" - "sh %[r1], 0(%[inre2]) \n\t" - "sh %[r2], 2(%[inre1]) \n\t" - "sh %[r3], 2(%[inre2]) \n\t" - "addiu %[inre1], %[inre1], 4 \n\t" - "bgtz %[k], 1b \n\t" - " addiu %[inre2], %[inre2], 4 \n\t" - "b 4f \n\t" - " nop \n\t" - "2: \n\t" -#if !defined(MIPS_DSP_R1_LE) - "addiu %[r4], %[max1], -1 \n\t" - "addiu %[r5], $zero, 1 \n\t" - "sllv %[r4], %[r5], %[r4] \n\t" -#endif // #if !defined(MIPS_DSP_R1_LE) - "3: \n\t" - "lw %[r0], 0(%[tmpre]) \n\t" - "lw %[r1], 0(%[tmpim]) \n\t" - "lw %[r2], 4(%[tmpre]) \n\t" - "lw %[r3], 4(%[tmpim]) \n\t" - "addiu %[k], %[k], -1 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shrav_r.w %[r0], %[r0], %[max1] \n\t" - "shrav_r.w %[r1], %[r1], %[max1] \n\t" - "shrav_r.w %[r2], %[r2], %[max1] \n\t" - "shrav_r.w %[r3], %[r3], %[max1] \n\t" -#else // #if !defined(MIPS_DSP_R1_LE) - "addu %[r0], %[r0], %[r4] \n\t" - "addu %[r1], %[r1], %[r4] \n\t" - "addu %[r2], %[r2], %[r4] \n\t" - "addu %[r3], %[r3], %[r4] \n\t" - "srav %[r0], %[r0], %[max1] \n\t" - "srav %[r1], %[r1], %[max1] \n\t" - "srav %[r2], %[r2], %[max1] \n\t" - "srav %[r3], %[r3], %[max1] \n\t" -#endif // #if !defined(MIPS_DSP_R1_LE) - "addiu %[tmpre], %[tmpre], 8 \n\t" - "addiu %[tmpim], %[tmpim], 8 \n\t" - "sh %[r0], 0(%[inre1]) \n\t" - "sh %[r1], 0(%[inre2]) \n\t" - "sh %[r2], 2(%[inre1]) \n\t" - "sh %[r3], 2(%[inre2]) \n\t" - "addiu %[inre1], %[inre1], 4 \n\t" - "bgtz %[k], 3b \n\t" - " addiu %[inre2], %[inre2], 4 \n\t" - "4: \n\t" - ".set pop \n\t" - : [tmpre] "=&r" (tmpre), [tmpim] "=&r" (tmpim), [inre1] "=&r" (inre1), - [inre2] "=&r" (inre2), [k] "+r" (k), [max1] "=&r" (max1), -#if !defined(MIPS_DSP_R1_LE) - [r4] "=&r" (r4), [r5] "=&r" (r5), -#endif // #if !defined(MIPS_DSP_R1_LE) - [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3) - : [tmpreQ16] "r" (tmpreQ16), [tmpimQ16] "r" (tmpimQ16), - [inre1Q9] "r" (inre1Q9), [inre2Q9] "r" (inre2Q9), [max] "r" (max) - : "memory" - ); - - // Get DFT - WebRtcIsacfix_FftRadix16Fastest(inre1Q9, inre2Q9, -1); // real call - - // "Fastest" vectors and - // Use symmetry to separate into two complex vectors - // and center frames in time around zero - // merged into one loop - cosptr = (int16_t*)WebRtcIsacfix_kCosTab2; - sinptr = (int16_t*)WebRtcIsacfix_kSinTab2; - k = FRAMESAMPLES / 4; - factor = FRAMESAMPLES - 2; // offset for FRAMESAMPLES / 2 - 1 array member - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addiu %[inre1], %[inre1Q9], 0 \n\t" - "addiu %[inre2], %[inre2Q9], 0 \n\t" - "addiu %[tmpre], %[outreQ7], 0 \n\t" - "addiu %[tmpim], %[outimQ7], 0 \n\t" - "bltz %[max], 2f \n\t" - " subu %[max1], $zero, %[max] \n\t" - "1: \n\t" -#if !defined(MIPS_DSP_R1_LE) - "addu %[r4], %[inre1], %[offset] \n\t" - "addu %[r5], %[inre2], %[offset] \n\t" -#endif // #if !defined(MIPS_DSP_R1_LE) - "lh %[r0], 0(%[inre1]) \n\t" - "lh %[r1], 0(%[inre2]) \n\t" -#if defined(MIPS_DSP_R1_LE) - "lhx %[r2], %[offset](%[inre1]) \n\t" - "lhx %[r3], %[offset](%[inre2]) \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "lh %[r2], 0(%[r4]) \n\t" - "lh %[r3], 0(%[r5]) \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "srav %[r0], %[r0], %[max] \n\t" - "srav %[r1], %[r1], %[max] \n\t" - "srav %[r2], %[r2], %[max] \n\t" - "srav %[r3], %[r3], %[max] \n\t" - "addu %[r4], %[r0], %[r2] \n\t" - "subu %[r0], %[r2], %[r0] \n\t" - "subu %[r2], %[r1], %[r3] \n\t" - "addu %[r1], %[r1], %[r3] \n\t" - "lh %[r3], 0(%[cosptr]) \n\t" - "lh %[r5], 0(%[sinptr]) \n\t" - "andi %[r6], %[r4], 0xFFFF \n\t" - "sra %[r4], %[r4], 16 \n\t" - "mul %[r7], %[r3], %[r6] \n\t" - "mul %[r8], %[r3], %[r4] \n\t" - "mul %[r6], %[r5], %[r6] \n\t" - "mul %[r4], %[r5], %[r4] \n\t" - "addiu %[k], %[k], -1 \n\t" - "addiu %[inre1], %[inre1], 2 \n\t" - "addiu %[inre2], %[inre2], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r7], %[r7], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r7], %[r7], 0x2000 \n\t" - "sra %[r7], %[r7], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r8], %[r8], 2 \n\t" - "addu %[r8], %[r8], %[r7] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r6], %[r6], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r6], %[r6], 0x2000 \n\t" - "sra %[r6], %[r6], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r4], %[r4], 2 \n\t" - "addu %[r4], %[r4], %[r6] \n\t" - "andi %[r6], %[r2], 0xFFFF \n\t" - "sra %[r2], %[r2], 16 \n\t" - "mul %[r7], %[r5], %[r6] \n\t" - "mul %[r9], %[r5], %[r2] \n\t" - "mul %[r6], %[r3], %[r6] \n\t" - "mul %[r2], %[r3], %[r2] \n\t" - "addiu %[cosptr], %[cosptr], 2 \n\t" - "addiu %[sinptr], %[sinptr], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r7], %[r7], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r7], %[r7], 0x2000 \n\t" - "sra %[r7], %[r7], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r9], %[r9], 2 \n\t" - "addu %[r9], %[r7], %[r9] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r6], %[r6], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r6], %[r6], 0x2000 \n\t" - "sra %[r6], %[r6], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r2], %[r2], 2 \n\t" - "addu %[r2], %[r6], %[r2] \n\t" - "subu %[r8], %[r8], %[r9] \n\t" - "sra %[r8], %[r8], 9 \n\t" - "addu %[r2], %[r4], %[r2] \n\t" - "sra %[r2], %[r2], 9 \n\t" - "sh %[r8], 0(%[tmpre]) \n\t" - "sh %[r2], 0(%[tmpim]) \n\t" - - "andi %[r4], %[r1], 0xFFFF \n\t" - "sra %[r1], %[r1], 16 \n\t" - "andi %[r6], %[r0], 0xFFFF \n\t" - "sra %[r0], %[r0], 16 \n\t" - "mul %[r7], %[r5], %[r4] \n\t" - "mul %[r9], %[r5], %[r1] \n\t" - "mul %[r4], %[r3], %[r4] \n\t" - "mul %[r1], %[r3], %[r1] \n\t" - "mul %[r8], %[r3], %[r0] \n\t" - "mul %[r3], %[r3], %[r6] \n\t" - "mul %[r6], %[r5], %[r6] \n\t" - "mul %[r0], %[r5], %[r0] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r7], %[r7], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r7], %[r7], 0x2000 \n\t" - "sra %[r7], %[r7], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r9], %[r9], 2 \n\t" - "addu %[r9], %[r9], %[r7] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r4], %[r4], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r4], %[r4], 0x2000 \n\t" - "sra %[r4], %[r4], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r1], %[r1], 2 \n\t" - "addu %[r1], %[r1], %[r4] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r3], %[r3], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r3], %[r3], 0x2000 \n\t" - "sra %[r3], %[r3], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r8], %[r8], 2 \n\t" - "addu %[r8], %[r8], %[r3] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r6], %[r6], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r6], %[r6], 0x2000 \n\t" - "sra %[r6], %[r6], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r0], %[r0], 2 \n\t" - "addu %[r0], %[r0], %[r6] \n\t" - "addu %[r3], %[tmpre], %[offset] \n\t" - "addu %[r2], %[tmpim], %[offset] \n\t" - "addu %[r9], %[r9], %[r8] \n\t" - "negu %[r9], %[r9] \n\t" - "sra %[r9], %[r9], 9 \n\t" - "subu %[r0], %[r0], %[r1] \n\t" - "addiu %[offset], %[offset], -4 \n\t" - "sh %[r9], 0(%[r3]) \n\t" - "sh %[r0], 0(%[r2]) \n\t" - "addiu %[tmpre], %[tmpre], 2 \n\t" - "bgtz %[k], 1b \n\t" - " addiu %[tmpim], %[tmpim], 2 \n\t" - "b 3f \n\t" - " nop \n\t" - "2: \n\t" -#if !defined(MIPS_DSP_R1_LE) - "addu %[r4], %[inre1], %[offset] \n\t" - "addu %[r5], %[inre2], %[offset] \n\t" -#endif // #if !defined(MIPS_DSP_R1_LE) - "lh %[r0], 0(%[inre1]) \n\t" - "lh %[r1], 0(%[inre2]) \n\t" -#if defined(MIPS_DSP_R1_LE) - "lhx %[r2], %[offset](%[inre1]) \n\t" - "lhx %[r3], %[offset](%[inre2]) \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "lh %[r2], 0(%[r4]) \n\t" - "lh %[r3], 0(%[r5]) \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sllv %[r0], %[r0], %[max1] \n\t" - "sllv %[r1], %[r1], %[max1] \n\t" - "sllv %[r2], %[r2], %[max1] \n\t" - "sllv %[r3], %[r3], %[max1] \n\t" - "addu %[r4], %[r0], %[r2] \n\t" - "subu %[r0], %[r2], %[r0] \n\t" - "subu %[r2], %[r1], %[r3] \n\t" - "addu %[r1], %[r1], %[r3] \n\t" - "lh %[r3], 0(%[cosptr]) \n\t" - "lh %[r5], 0(%[sinptr]) \n\t" - "andi %[r6], %[r4], 0xFFFF \n\t" - "sra %[r4], %[r4], 16 \n\t" - "mul %[r7], %[r3], %[r6] \n\t" - "mul %[r8], %[r3], %[r4] \n\t" - "mul %[r6], %[r5], %[r6] \n\t" - "mul %[r4], %[r5], %[r4] \n\t" - "addiu %[k], %[k], -1 \n\t" - "addiu %[inre1], %[inre1], 2 \n\t" - "addiu %[inre2], %[inre2], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r7], %[r7], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r7], %[r7], 0x2000 \n\t" - "sra %[r7], %[r7], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r8], %[r8], 2 \n\t" - "addu %[r8], %[r8], %[r7] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r6], %[r6], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r6], %[r6], 0x2000 \n\t" - "sra %[r6], %[r6], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r4], %[r4], 2 \n\t" - "addu %[r4], %[r4], %[r6] \n\t" - "andi %[r6], %[r2], 0xFFFF \n\t" - "sra %[r2], %[r2], 16 \n\t" - "mul %[r7], %[r5], %[r6] \n\t" - "mul %[r9], %[r5], %[r2] \n\t" - "mul %[r6], %[r3], %[r6] \n\t" - "mul %[r2], %[r3], %[r2] \n\t" - "addiu %[cosptr], %[cosptr], 2 \n\t" - "addiu %[sinptr], %[sinptr], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r7], %[r7], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r7], %[r7], 0x2000 \n\t" - "sra %[r7], %[r7], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r9], %[r9], 2 \n\t" - "addu %[r9], %[r7], %[r9] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r6], %[r6], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r6], %[r6], 0x2000 \n\t" - "sra %[r6], %[r6], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r2], %[r2], 2 \n\t" - "addu %[r2], %[r6], %[r2] \n\t" - "subu %[r8], %[r8], %[r9] \n\t" - "sra %[r8], %[r8], 9 \n\t" - "addu %[r2], %[r4], %[r2] \n\t" - "sra %[r2], %[r2], 9 \n\t" - "sh %[r8], 0(%[tmpre]) \n\t" - "sh %[r2], 0(%[tmpim]) \n\t" - "andi %[r4], %[r1], 0xFFFF \n\t" - "sra %[r1], %[r1], 16 \n\t" - "andi %[r6], %[r0], 0xFFFF \n\t" - "sra %[r0], %[r0], 16 \n\t" - "mul %[r7], %[r5], %[r4] \n\t" - "mul %[r9], %[r5], %[r1] \n\t" - "mul %[r4], %[r3], %[r4] \n\t" - "mul %[r1], %[r3], %[r1] \n\t" - "mul %[r8], %[r3], %[r0] \n\t" - "mul %[r3], %[r3], %[r6] \n\t" - "mul %[r6], %[r5], %[r6] \n\t" - "mul %[r0], %[r5], %[r0] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r7], %[r7], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r7], %[r7], 0x2000 \n\t" - "sra %[r7], %[r7], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r9], %[r9], 2 \n\t" - "addu %[r9], %[r9], %[r7] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r4], %[r4], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r4], %[r4], 0x2000 \n\t" - "sra %[r4], %[r4], 14 \n\t" -#endif - "sll %[r1], %[r1], 2 \n\t" - "addu %[r1], %[r1], %[r4] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r3], %[r3], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r3], %[r3], 0x2000 \n\t" - "sra %[r3], %[r3], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r8], %[r8], 2 \n\t" - "addu %[r8], %[r8], %[r3] \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r6], %[r6], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r6], %[r6], 0x2000 \n\t" - "sra %[r6], %[r6], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "sll %[r0], %[r0], 2 \n\t" - "addu %[r0], %[r0], %[r6] \n\t" - "addu %[r3], %[tmpre], %[offset] \n\t" - "addu %[r2], %[tmpim], %[offset] \n\t" - "addu %[r9], %[r9], %[r8] \n\t" - "negu %[r9], %[r9] \n\t" - "sra %[r9], %[r9], 9 \n\t" - "subu %[r0], %[r0], %[r1] \n\t" - "sra %[r0], %[r0], 9 \n\t" - "addiu %[offset], %[offset], -4 \n\t" - "sh %[r9], 0(%[r3]) \n\t" - "sh %[r0], 0(%[r2]) \n\t" - "addiu %[tmpre], %[tmpre], 2 \n\t" - "bgtz %[k], 2b \n\t" - " addiu %[tmpim], %[tmpim], 2 \n\t" - "3: \n\t" - ".set pop \n\t" - : [inre1] "=&r" (inre1), [inre2] "=&r" (inre2), [tmpre] "=&r" (tmpre), - [tmpim] "=&r" (tmpim), [offset] "+r" (factor), [k] "+r" (k), - [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3), - [r4] "=&r" (r4), [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7), - [r8] "=&r" (r8), [r9] "=&r" (r9), [max1] "=&r" (max1) - : [inre1Q9] "r" (inre1Q9), [inre2Q9] "r" (inre2Q9), - [outreQ7] "r" (outreQ7), [outimQ7] "r" (outimQ7), - [max] "r" (max), [cosptr] "r" (cosptr), [sinptr] "r" (sinptr) - : "hi", "lo", "memory" - ); -} - -void WebRtcIsacfix_Spec2TimeMIPS(int16_t *inreQ7, - int16_t *inimQ7, - int32_t *outre1Q16, - int32_t *outre2Q16) { - int k = FRAMESAMPLES / 4; - int16_t* inre; - int16_t* inim; - int32_t* outre1; - int32_t* outre2; - int16_t* cosptr = (int16_t*)WebRtcIsacfix_kCosTab2; - int16_t* sinptr = (int16_t*)WebRtcIsacfix_kSinTab2; - int32_t r0, r1, r2, r3, r4, r5, r6, r7, r8, r9, max, max1; -#if defined(MIPS_DSP_R1_LE) - int32_t offset = FRAMESAMPLES - 4; -#else // #if defined(MIPS_DSP_R1_LE) - int32_t offset = FRAMESAMPLES - 2; -#endif // #if defined(MIPS_DSP_R1_LE) - - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addiu %[inre], %[inreQ7], 0 \n\t" - "addiu %[inim] , %[inimQ7], 0 \n\t" - "addiu %[outre1], %[outre1Q16], 0 \n\t" - "addiu %[outre2], %[outre2Q16], 0 \n\t" - "mul %[max], $zero, $zero \n\t" - "1: \n\t" -#if defined(MIPS_DSP_R1_LE) - // Process two samples in one iteration avoiding left shift before - // multiplication. MaxAbsValueW32 function inlined into the loop. - "addu %[r8], %[inre], %[offset] \n\t" - "addu %[r9], %[inim], %[offset] \n\t" - "lwl %[r4], 0(%[r8]) \n\t" - "lwl %[r5], 0(%[r9]) \n\t" - "lwl %[r0], 0(%[inre]) \n\t" - "lwl %[r1], 0(%[inim]) \n\t" - "lwl %[r2], 0(%[cosptr]) \n\t" - "lwl %[r3], 0(%[sinptr]) \n\t" - "lwr %[r4], 0(%[r8]) \n\t" - "lwr %[r5], 0(%[r9]) \n\t" - "lwr %[r0], 0(%[inre]) \n\t" - "lwr %[r1], 0(%[inim]) \n\t" - "lwr %[r2], 0(%[cosptr]) \n\t" - "lwr %[r3], 0(%[sinptr]) \n\t" - "packrl.ph %[r4], %[r4], %[r4] \n\t" - "packrl.ph %[r5], %[r5], %[r5] \n\t" - "muleq_s.w.phr %[r6], %[r0], %[r2] \n\t" - "muleq_s.w.phr %[r7], %[r1], %[r3] \n\t" - "muleq_s.w.phr %[r8], %[r4], %[r2] \n\t" - "muleq_s.w.phr %[r9], %[r5], %[r3] \n\t" - "addiu %[k], %[k], -2 \n\t" - "addiu %[cosptr], %[cosptr], 4 \n\t" - "addiu %[sinptr], %[sinptr], 4 \n\t" - "addiu %[inre], %[inre], 4 \n\t" - "addiu %[inim], %[inim], 4 \n\t" - "shra_r.w %[r6], %[r6], 6 \n\t" - "shra_r.w %[r7], %[r7], 6 \n\t" - "shra_r.w %[r8], %[r8], 6 \n\t" - "shra_r.w %[r9], %[r9], 6 \n\t" - "addu %[r6], %[r6], %[r7] \n\t" - "subu %[r9], %[r9], %[r8] \n\t" - "subu %[r7], %[r6], %[r9] \n\t" - "addu %[r6], %[r6], %[r9] \n\t" - "sw %[r7], 0(%[outre1]) \n\t" - "absq_s.w %[r7], %[r7] \n\t" - "slt %[r8], %[max], %[r7] \n\t" - "movn %[max], %[r7], %[r8] \n\t" - "sll %[r7], %[offset], 1 \n\t" - "addu %[r7], %[outre1], %[r7] \n\t" - "sw %[r6], 4(%[r7]) \n\t" - "absq_s.w %[r6], %[r6] \n\t" - "slt %[r8], %[max], %[r6] \n\t" - "movn %[max], %[r6], %[r8] \n\t" - "muleq_s.w.phl %[r6], %[r0], %[r2] \n\t" - "muleq_s.w.phl %[r7], %[r1], %[r3] \n\t" - "muleq_s.w.phl %[r8], %[r4], %[r2] \n\t" - "muleq_s.w.phl %[r9], %[r5], %[r3] \n\t" - "shra_r.w %[r6], %[r6], 6 \n\t" - "shra_r.w %[r7], %[r7], 6 \n\t" - "shra_r.w %[r8], %[r8], 6 \n\t" - "shra_r.w %[r9], %[r9], 6 \n\t" - "addu %[r6], %[r6], %[r7] \n\t" - "subu %[r9], %[r9], %[r8] \n\t" - "subu %[r7], %[r6], %[r9] \n\t" - "addu %[r6], %[r6], %[r9] \n\t" - "sw %[r7], 4(%[outre1]) \n\t" - "absq_s.w %[r7], %[r7] \n\t" - "slt %[r8], %[max], %[r7] \n\t" - "movn %[max], %[r7], %[r8] \n\t" - "sll %[r7], %[offset], 1 \n\t" - "addu %[r7], %[outre1], %[r7] \n\t" - "sw %[r6], 0(%[r7]) \n\t" - "absq_s.w %[r6], %[r6] \n\t" - "slt %[r8], %[max], %[r6] \n\t" - "movn %[max], %[r6], %[r8] \n\t" - "muleq_s.w.phr %[r6], %[r1], %[r2] \n\t" - "muleq_s.w.phr %[r7], %[r0], %[r3] \n\t" - "muleq_s.w.phr %[r8], %[r5], %[r2] \n\t" - "muleq_s.w.phr %[r9], %[r4], %[r3] \n\t" - "addiu %[outre1], %[outre1], 8 \n\t" - "shra_r.w %[r6], %[r6], 6 \n\t" - "shra_r.w %[r7], %[r7], 6 \n\t" - "shra_r.w %[r8], %[r8], 6 \n\t" - "shra_r.w %[r9], %[r9], 6 \n\t" - "subu %[r6], %[r6], %[r7] \n\t" - "addu %[r9], %[r9], %[r8] \n\t" - "subu %[r7], %[r6], %[r9] \n\t" - "addu %[r6], %[r9], %[r6] \n\t" - "negu %[r6], %[r6] \n\t" - "sw %[r7], 0(%[outre2]) \n\t" - "absq_s.w %[r7], %[r7] \n\t" - "slt %[r8], %[max], %[r7] \n\t" - "movn %[max], %[r7], %[r8] \n\t" - "sll %[r7], %[offset], 1 \n\t" - "addu %[r7], %[outre2], %[r7] \n\t" - "sw %[r6], 4(%[r7]) \n\t" - "absq_s.w %[r6], %[r6] \n\t" - "slt %[r8], %[max], %[r6] \n\t" - "movn %[max], %[r6], %[r8] \n\t" - "muleq_s.w.phl %[r6], %[r1], %[r2] \n\t" - "muleq_s.w.phl %[r7], %[r0], %[r3] \n\t" - "muleq_s.w.phl %[r8], %[r5], %[r2] \n\t" - "muleq_s.w.phl %[r9], %[r4], %[r3] \n\t" - "addiu %[offset], %[offset], -8 \n\t" - "shra_r.w %[r6], %[r6], 6 \n\t" - "shra_r.w %[r7], %[r7], 6 \n\t" - "shra_r.w %[r8], %[r8], 6 \n\t" - "shra_r.w %[r9], %[r9], 6 \n\t" - "subu %[r6], %[r6], %[r7] \n\t" - "addu %[r9], %[r9], %[r8] \n\t" - "subu %[r7], %[r6], %[r9] \n\t" - "addu %[r6], %[r9], %[r6] \n\t" - "negu %[r6], %[r6] \n\t" - "sw %[r7], 4(%[outre2]) \n\t" - "absq_s.w %[r7], %[r7] \n\t" - "slt %[r8], %[max], %[r7] \n\t" - "movn %[max], %[r7], %[r8] \n\t" - "sll %[r7], %[offset], 1 \n\t" - "addu %[r7], %[outre2], %[r7] \n\t" - "sw %[r6], 0(%[r7]) \n\t" - "absq_s.w %[r6], %[r6] \n\t" - "slt %[r8], %[max], %[r6] \n\t" - "movn %[max], %[r6], %[r8] \n\t" - "bgtz %[k], 1b \n\t" - " addiu %[outre2], %[outre2], 8 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "lh %[r0], 0(%[inre]) \n\t" - "lh %[r1], 0(%[inim]) \n\t" - "lh %[r4], 0(%[cosptr]) \n\t" - "lh %[r5], 0(%[sinptr]) \n\t" - "addiu %[k], %[k], -1 \n\t" - "mul %[r2], %[r0], %[r4] \n\t" - "mul %[r0], %[r0], %[r5] \n\t" - "mul %[r3], %[r1], %[r5] \n\t" - "mul %[r1], %[r1], %[r4] \n\t" - "addiu %[cosptr], %[cosptr], 2 \n\t" - "addiu %[sinptr], %[sinptr], 2 \n\t" - "addu %[r8], %[inre], %[offset] \n\t" - "addu %[r9], %[inim], %[offset] \n\t" - "addiu %[r2], %[r2], 16 \n\t" - "sra %[r2], %[r2], 5 \n\t" - "addiu %[r0], %[r0], 16 \n\t" - "sra %[r0], %[r0], 5 \n\t" - "addiu %[r3], %[r3], 16 \n\t" - "sra %[r3], %[r3], 5 \n\t" - "lh %[r6], 0(%[r8]) \n\t" - "lh %[r7], 0(%[r9]) \n\t" - "addiu %[r1], %[r1], 16 \n\t" - "sra %[r1], %[r1], 5 \n\t" - "mul %[r8], %[r7], %[r4] \n\t" - "mul %[r7], %[r7], %[r5] \n\t" - "mul %[r9], %[r6], %[r4] \n\t" - "mul %[r6], %[r6], %[r5] \n\t" - "addu %[r2], %[r2], %[r3] \n\t" - "subu %[r1], %[r1], %[r0] \n\t" - "sll %[r0], %[offset], 1 \n\t" - "addu %[r4], %[outre1], %[r0] \n\t" - "addu %[r5], %[outre2], %[r0] \n\t" - "addiu %[r8], %[r8], 16 \n\t" - "sra %[r8], %[r8], 5 \n\t" - "addiu %[r7], %[r7], 16 \n\t" - "sra %[r7], %[r7], 5 \n\t" - "addiu %[r6], %[r6], 16 \n\t" - "sra %[r6], %[r6], 5 \n\t" - "addiu %[r9], %[r9], 16 \n\t" - "sra %[r9], %[r9], 5 \n\t" - "addu %[r8], %[r8], %[r6] \n\t" - "negu %[r8], %[r8] \n\t" - "subu %[r7], %[r7], %[r9] \n\t" - "subu %[r6], %[r2], %[r7] \n\t" - "addu %[r0], %[r2], %[r7] \n\t" - "addu %[r3], %[r1], %[r8] \n\t" - "subu %[r1], %[r8], %[r1] \n\t" - "sw %[r6], 0(%[outre1]) \n\t" - "sw %[r0], 0(%[r4]) \n\t" - "sw %[r3], 0(%[outre2]) \n\t" - "sw %[r1], 0(%[r5]) \n\t" - "addiu %[outre1], %[outre1], 4 \n\t" - "addiu %[offset], %[offset], -4 \n\t" - "addiu %[inre], %[inre], 2 \n\t" - "addiu %[inim], %[inim], 2 \n\t" - // Inlined WebRtcSpl_MaxAbsValueW32 - "negu %[r5], %[r6] \n\t" - "slt %[r2], %[r6], $zero \n\t" - "movn %[r6], %[r5], %[r2] \n\t" - "negu %[r5], %[r0] \n\t" - "slt %[r2], %[r0], $zero \n\t" - "movn %[r0], %[r5], %[r2] \n\t" - "negu %[r5], %[r3] \n\t" - "slt %[r2], %[r3], $zero \n\t" - "movn %[r3], %[r5], %[r2] \n\t" - "negu %[r5], %[r1] \n\t" - "slt %[r2], %[r1], $zero \n\t" - "movn %[r1], %[r5], %[r2] \n\t" - "slt %[r2], %[r6], %[r0] \n\t" - "slt %[r5], %[r3], %[r1] \n\t" - "movn %[r6], %[r0], %[r2] \n\t" - "movn %[r3], %[r1], %[r5] \n\t" - "slt %[r2], %[r6], %[r3] \n\t" - "movn %[r6], %[r3], %[r2] \n\t" - "slt %[r2], %[max], %[r6] \n\t" - "movn %[max], %[r6], %[r2] \n\t" - "bgtz %[k], 1b \n\t" - " addiu %[outre2], %[outre2], 4 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "clz %[max], %[max] \n\t" - "addiu %[max], %[max], -25 \n\t" - ".set pop \n\t" - : [inre] "=&r" (inre), [inim] "=&r" (inim), - [outre1] "=&r" (outre1), [outre2] "=&r" (outre2), - [offset] "+r" (offset), [k] "+r" (k), [r0] "=&r" (r0), - [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3), - [r4] "=&r" (r4), [r5] "=&r" (r5), [r6] "=&r" (r6), - [r7] "=&r" (r7), [r8] "=&r" (r8), [r9] "=&r" (r9), - [max] "=&r" (max) - : [inreQ7] "r" (inreQ7), [inimQ7] "r" (inimQ7), - [cosptr] "r" (cosptr), [sinptr] "r" (sinptr), - [outre1Q16] "r" (outre1Q16), [outre2Q16] "r" (outre2Q16) - : "hi", "lo", "memory" - ); - - // "Fastest" vectors - k = FRAMESAMPLES / 4; - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addiu %[inre], %[inreQ7], 0 \n\t" - "addiu %[inim], %[inimQ7], 0 \n\t" - "addiu %[outre1], %[outre1Q16], 0 \n\t" - "addiu %[outre2], %[outre2Q16], 0 \n\t" - "bltz %[max], 2f \n\t" - " subu %[max1], $zero, %[max] \n\t" - "1: \n\t" - "lw %[r0], 0(%[outre1]) \n\t" - "lw %[r1], 0(%[outre2]) \n\t" - "lw %[r2], 4(%[outre1]) \n\t" - "lw %[r3], 4(%[outre2]) \n\t" - "sllv %[r0], %[r0], %[max] \n\t" - "sllv %[r1], %[r1], %[max] \n\t" - "sllv %[r2], %[r2], %[max] \n\t" - "sllv %[r3], %[r3], %[max] \n\t" - "addiu %[k], %[k], -1 \n\t" - "addiu %[outre1], %[outre1], 8 \n\t" - "addiu %[outre2], %[outre2], 8 \n\t" - "sh %[r0], 0(%[inre]) \n\t" - "sh %[r1], 0(%[inim]) \n\t" - "sh %[r2], 2(%[inre]) \n\t" - "sh %[r3], 2(%[inim]) \n\t" - "addiu %[inre], %[inre], 4 \n\t" - "bgtz %[k], 1b \n\t" - " addiu %[inim], %[inim], 4 \n\t" - "b 4f \n\t" - " nop \n\t" - "2: \n\t" -#if !defined(MIPS_DSP_R1_LE) - "addiu %[r4], $zero, 1 \n\t" - "addiu %[r5], %[max1], -1 \n\t" - "sllv %[r4], %[r4], %[r5] \n\t" -#endif // #if !defined(MIPS_DSP_R1_LE) - "3: \n\t" - "lw %[r0], 0(%[outre1]) \n\t" - "lw %[r1], 0(%[outre2]) \n\t" - "lw %[r2], 4(%[outre1]) \n\t" - "lw %[r3], 4(%[outre2]) \n\t" -#if defined(MIPS_DSP_R1_LE) - "shrav_r.w %[r0], %[r0], %[max1] \n\t" - "shrav_r.w %[r1], %[r1], %[max1] \n\t" - "shrav_r.w %[r2], %[r2], %[max1] \n\t" - "shrav_r.w %[r3], %[r3], %[max1] \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addu %[r0], %[r0], %[r4] \n\t" - "addu %[r1], %[r1], %[r4] \n\t" - "addu %[r2], %[r2], %[r4] \n\t" - "addu %[r3], %[r3], %[r4] \n\t" - "srav %[r0], %[r0], %[max1] \n\t" - "srav %[r1], %[r1], %[max1] \n\t" - "srav %[r2], %[r2], %[max1] \n\t" - "srav %[r3], %[r3], %[max1] \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addiu %[outre1], %[outre1], 8 \n\t" - "addiu %[outre2], %[outre2], 8 \n\t" - "sh %[r0], 0(%[inre]) \n\t" - "sh %[r1], 0(%[inim]) \n\t" - "sh %[r2], 2(%[inre]) \n\t" - "sh %[r3], 2(%[inim]) \n\t" - "addiu %[k], %[k], -1 \n\t" - "addiu %[inre], %[inre], 4 \n\t" - "bgtz %[k], 3b \n\t" - " addiu %[inim], %[inim], 4 \n\t" - "4: \n\t" - ".set pop \n\t" - : [k] "+r" (k), [max1] "=&r" (max1), [r0] "=&r" (r0), - [inre] "=&r" (inre), [inim] "=&r" (inim), - [outre1] "=&r" (outre1), [outre2] "=&r" (outre2), -#if !defined(MIPS_DSP_R1_LE) - [r4] "=&r" (r4), [r5] "=&r" (r5), -#endif // #if !defined(MIPS_DSP_R1_LE) - [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3) - : [max] "r" (max), [inreQ7] "r" (inreQ7), - [inimQ7] "r" (inimQ7), [outre1Q16] "r" (outre1Q16), - [outre2Q16] "r" (outre2Q16) - : "memory" - ); - - WebRtcIsacfix_FftRadix16Fastest(inreQ7, inimQ7, 1); // real call - - // All the remaining processing is done inside a single loop to avoid - // unnecessary memory accesses. MIPS DSPr2 version processes two samples - // at a time. - cosptr = (int16_t*)WebRtcIsacfix_kCosTab1; - sinptr = (int16_t*)WebRtcIsacfix_kSinTab1; - k = FRAMESAMPLES / 2; - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addiu %[inre], %[inreQ7], 0 \n\t" - "addiu %[inim], %[inimQ7], 0 \n\t" - "addiu %[outre1], %[outre1Q16], 0 \n\t" - "addiu %[outre2], %[outre2Q16], 0 \n\t" - "addiu %[r4], $zero, 273 \n\t" - "addiu %[r5], $zero, 31727 \n\t" -#if defined(MIPS_DSP_R2_LE) - "addiu %[max], %[max], 16 \n\t" - "replv.ph %[r4], %[r4] \n\t" -#endif // #if defined(MIPS_DSP_R2_LE) - "bltz %[max], 2f \n\t" - " subu %[max1], $zero, %[max] \n\t" -#if defined(MIPS_DSP_R2_LE) - "addiu %[max], %[max], 1 \n\t" -#endif // #if defined(MIPS_DSP_R2_LE) - "1: \n\t" -#if defined(MIPS_DSP_R2_LE) - "lwl %[r0], 0(%[inre]) \n\t" - "lwl %[r1], 0(%[inim]) \n\t" - "lh %[r2], 0(%[cosptr]) \n\t" - "lwr %[r0], 0(%[inre]) \n\t" - "lwr %[r1], 0(%[inim]) \n\t" - "lh %[r3], 0(%[sinptr]) \n\t" - "muleq_s.w.phr %[r6], %[r0], %[r4] \n\t" - "muleq_s.w.phr %[r7], %[r1], %[r4] \n\t" - "muleq_s.w.phl %[r0], %[r0], %[r4] \n\t" - "muleq_s.w.phl %[r1], %[r1], %[r4] \n\t" - "addiu %[k], %[k], -2 \n\t" - "addiu %[inre], %[inre], 4 \n\t" - "addiu %[inim], %[inim], 4 \n\t" - "shrav_r.w %[r6], %[r6], %[max] \n\t" - "shrav_r.w %[r7], %[r7], %[max] \n\t" - "mult $ac0, %[r2], %[r6] \n\t" - "mult $ac1, %[r3], %[r7] \n\t" - "mult $ac2, %[r2], %[r7] \n\t" - "mult $ac3, %[r3], %[r6] \n\t" - "lh %[r2], 2(%[cosptr]) \n\t" - "lh %[r3], 2(%[sinptr]) \n\t" - "extr_r.w %[r6], $ac0, 14 \n\t" - "extr_r.w %[r7], $ac1, 14 \n\t" - "extr_r.w %[r8], $ac2, 14 \n\t" - "extr_r.w %[r9], $ac3, 14 \n\t" - "shrav_r.w %[r0], %[r0], %[max] \n\t" - "shrav_r.w %[r1], %[r1], %[max] \n\t" - "mult $ac0, %[r2], %[r0] \n\t" - "mult $ac1, %[r3], %[r1] \n\t" - "mult $ac2, %[r2], %[r1] \n\t" - "mult $ac3, %[r3], %[r0] \n\t" - "addiu %[cosptr], %[cosptr], 4 \n\t" - "extr_r.w %[r0], $ac0, 14 \n\t" - "extr_r.w %[r1], $ac1, 14 \n\t" - "extr_r.w %[r2], $ac2, 14 \n\t" - "extr_r.w %[r3], $ac3, 14 \n\t" - "subu %[r6], %[r6], %[r7] \n\t" - "addu %[r8], %[r8], %[r9] \n\t" - "mult $ac0, %[r5], %[r6] \n\t" - "mult $ac1, %[r5], %[r8] \n\t" - "addiu %[sinptr], %[sinptr], 4 \n\t" - "subu %[r0], %[r0], %[r1] \n\t" - "addu %[r2], %[r2], %[r3] \n\t" - "extr_r.w %[r1], $ac0, 11 \n\t" - "extr_r.w %[r3], $ac1, 11 \n\t" - "mult $ac2, %[r5], %[r0] \n\t" - "mult $ac3, %[r5], %[r2] \n\t" - "sw %[r1], 0(%[outre1]) \n\t" - "sw %[r3], 0(%[outre2]) \n\t" - "addiu %[outre1], %[outre1], 8 \n\t" - "extr_r.w %[r0], $ac2, 11 \n\t" - "extr_r.w %[r2], $ac3, 11 \n\t" - "sw %[r0], -4(%[outre1]) \n\t" - "sw %[r2], 4(%[outre2]) \n\t" - "bgtz %[k], 1b \n\t" - " addiu %[outre2], %[outre2], 8 \n\t" - "b 3f \n\t" -#else // #if defined(MIPS_DSP_R2_LE) - "lh %[r0], 0(%[inre]) \n\t" - "lh %[r1], 0(%[inim]) \n\t" - "addiu %[k], %[k], -1 \n\t" - "srav %[r0], %[r0], %[max] \n\t" - "srav %[r1], %[r1], %[max] \n\t" - "sra %[r2], %[r0], 16 \n\t" - "andi %[r0], %[r0], 0xFFFF \n\t" - "sra %[r0], %[r0], 1 \n\t" - "sra %[r3], %[r1], 16 \n\t" - "andi %[r1], %[r1], 0xFFFF \n\t" - "sra %[r1], %[r1], 1 \n\t" - "mul %[r2], %[r2], %[r4] \n\t" - "mul %[r0], %[r0], %[r4] \n\t" - "mul %[r3], %[r3], %[r4] \n\t" - "mul %[r1], %[r1], %[r4] \n\t" - "addiu %[inre], %[inre], 2 \n\t" - "addiu %[inim], %[inim], 2 \n\t" - "lh %[r6], 0(%[cosptr]) \n\t" - "lh %[r7], 0(%[sinptr]) \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r0], %[r0], 15 \n\t" - "shra_r.w %[r1], %[r1], 15 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r0], %[r0], 0x4000 \n\t" - "addiu %[r1], %[r1], 0x4000 \n\t" - "sra %[r0], %[r0], 15 \n\t" - "sra %[r1], %[r1], 15 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r0], %[r2], %[r0] \n\t" - "addu %[r1], %[r3], %[r1] \n\t" - "sra %[r2], %[r0], 16 \n\t" - "andi %[r0], %[r0], 0xFFFF \n\t" - "mul %[r9], %[r2], %[r6] \n\t" - "mul %[r2], %[r2], %[r7] \n\t" - "mul %[r8], %[r0], %[r6] \n\t" - "mul %[r0], %[r0], %[r7] \n\t" - "sra %[r3], %[r3], 16 \n\t" - "andi %[r1], %[r1], 0xFFFF \n\t" - "sll %[r9], %[r9], 2 \n\t" - "sll %[r2], %[r2], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r8], %[r8], 14 \n\t" - "shra_r.w %[r0], %[r0], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r8], %[r8], 0x2000 \n\t" - "addiu %[r0], %[r0], 0x2000 \n\t" - "sra %[r8], %[r8], 14 \n\t" - "sra %[r0], %[r0], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r9], %[r9], %[r8] \n\t" - "addu %[r2], %[r2], %[r0] \n\t" - "mul %[r0], %[r3], %[r6] \n\t" - "mul %[r3], %[r3], %[r7] \n\t" - "mul %[r8], %[r1], %[r6] \n\t" - "mul %[r1], %[r1], %[r8] \n\t" - "addiu %[cosptr], %[cosptr], 2 \n\t" - "addiu %[sinptr], %[sinptr], 2 \n\t" - "sll %[r0], %[r0], 2 \n\t" - "sll %[r3], %[r3], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r8], %[r8], 14 \n\t" - "shra_r.w %[r1], %[r1], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r8], %[r8], 0x2000 \n\t" - "addiu %[r1], %[r1], 0x2000 \n\t" - "sra %[r8], %[r8], 14 \n\t" - "sra %[r1], %[r1], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r0], %[r0], %[r8] \n\t" - "addu %[r3], %[r3], %[r1] \n\t" - "subu %[r9], %[r9], %[r3] \n\t" - "addu %[r0], %[r0], %[r2] \n\t" - "sra %[r1], %[r9], 16 \n\t" - "andi %[r9], %[r9], 0xFFFF \n\t" - "mul %[r1], %[r1], %[r5] \n\t" - "mul %[r9], %[r9], %[r5] \n\t" - "sra %[r2], %[r0], 16 \n\t" - "andi %[r0], %[r0], 0xFFFF \n\t" - "mul %[r2], %[r2], %[r5] \n\t" - "mul %[r0], %[r0], %[r5] \n\t" - "sll %[r1], %[r1], 5 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r9], %[r9], 11 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r9], %[r9], 0x400 \n\t" - "sra %[r9], %[r9], 11 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r1], %[r1], %[r9] \n\t" - "sll %[r2], %[r2], 5 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r0], %[r0], 11 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r0], %[r0], 0x400 \n\t" - "sra %[r0], %[r0], 11 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r0], %[r0], %[r2] \n\t" - "sw %[r1], 0(%[outre1]) \n\t" - "addiu %[outre1], %[outre1], 4 \n\t" - "sw %[r0], 0(%[outre2]) \n\t" - "bgtz %[k], 1b \n\t" - " addiu %[outre2], %[outre2], 4 \n\t" - "b 3f \n\t" - " nop \n\t" -#endif // #if defined(MIPS_DSP_R2_LE) - "2: \n\t" -#if defined(MIPS_DSP_R2_LE) - "addiu %[max1], %[max1], -1 \n\t" - "21: \n\t" - "lwl %[r0], 0(%[inre]) \n\t" - "lwl %[r1], 0(%[inim]) \n\t" - "lh %[r2], 0(%[cosptr]) \n\t" - "lwr %[r0], 0(%[inre]) \n\t" - "lwr %[r1], 0(%[inim]) \n\t" - "lh %[r3], 0(%[sinptr]) \n\t" - "muleq_s.w.phr %[r6], %[r0], %[r4] \n\t" - "muleq_s.w.phr %[r7], %[r1], %[r4] \n\t" - "muleq_s.w.phl %[r0], %[r0], %[r4] \n\t" - "muleq_s.w.phl %[r1], %[r1], %[r4] \n\t" - "addiu %[k], %[k], -2 \n\t" - "addiu %[inre], %[inre], 4 \n\t" - "addiu %[inim], %[inim], 4 \n\t" - "sllv %[r6], %[r6], %[max1] \n\t" - "sllv %[r7], %[r7], %[max1] \n\t" - "mult $ac0, %[r2], %[r6] \n\t" - "mult $ac1, %[r3], %[r7] \n\t" - "mult $ac2, %[r2], %[r7] \n\t" - "mult $ac3, %[r3], %[r6] \n\t" - "lh %[r2], 2(%[cosptr]) \n\t" - "lh %[r3], 2(%[sinptr]) \n\t" - "extr_r.w %[r6], $ac0, 14 \n\t" - "extr_r.w %[r7], $ac1, 14 \n\t" - "extr_r.w %[r8], $ac2, 14 \n\t" - "extr_r.w %[r9], $ac3, 14 \n\t" - "sllv %[r0], %[r0], %[max1] \n\t" - "sllv %[r1], %[r1], %[max1] \n\t" - "mult $ac0, %[r2], %[r0] \n\t" - "mult $ac1, %[r3], %[r1] \n\t" - "mult $ac2, %[r2], %[r1] \n\t" - "mult $ac3, %[r3], %[r0] \n\t" - "addiu %[cosptr], %[cosptr], 4 \n\t" - "extr_r.w %[r0], $ac0, 14 \n\t" - "extr_r.w %[r1], $ac1, 14 \n\t" - "extr_r.w %[r2], $ac2, 14 \n\t" - "extr_r.w %[r3], $ac3, 14 \n\t" - "subu %[r6], %[r6], %[r7] \n\t" - "addu %[r8], %[r8], %[r9] \n\t" - "mult $ac0, %[r5], %[r6] \n\t" - "mult $ac1, %[r5], %[r8] \n\t" - "addiu %[sinptr], %[sinptr], 4 \n\t" - "subu %[r0], %[r0], %[r1] \n\t" - "addu %[r2], %[r2], %[r3] \n\t" - "extr_r.w %[r1], $ac0, 11 \n\t" - "extr_r.w %[r3], $ac1, 11 \n\t" - "mult $ac2, %[r5], %[r0] \n\t" - "mult $ac3, %[r5], %[r2] \n\t" - "sw %[r1], 0(%[outre1]) \n\t" - "sw %[r3], 0(%[outre2]) \n\t" - "addiu %[outre1], %[outre1], 8 \n\t" - "extr_r.w %[r0], $ac2, 11 \n\t" - "extr_r.w %[r2], $ac3, 11 \n\t" - "sw %[r0], -4(%[outre1]) \n\t" - "sw %[r2], 4(%[outre2]) \n\t" - "bgtz %[k], 21b \n\t" - " addiu %[outre2], %[outre2], 8 \n\t" - "b 3f \n\t" - " nop \n\t" -#else // #if defined(MIPS_DSP_R2_LE) - "lh %[r0], 0(%[inre]) \n\t" - "lh %[r1], 0(%[inim]) \n\t" - "addiu %[k], %[k], -1 \n\t" - "sllv %[r0], %[r0], %[max1] \n\t" - "sllv %[r1], %[r1], %[max1] \n\t" - "sra %[r2], %[r0], 16 \n\t" - "andi %[r0], %[r0], 0xFFFF \n\t" - "sra %[r0], %[r0], 1 \n\t" - "sra %[r3], %[r1], 16 \n\t" - "andi %[r1], %[r1], 0xFFFF \n\t" - "sra %[r1], %[r1], 1 \n\t" - "mul %[r2], %[r2], %[r4] \n\t" - "mul %[r0], %[r0], %[r4] \n\t" - "mul %[r3], %[r3], %[r4] \n\t" - "mul %[r1], %[r1], %[r4] \n\t" - "addiu %[inre], %[inre], 2 \n\t" - "addiu %[inim], %[inim], 2 \n\t" - "lh %[r6], 0(%[cosptr]) \n\t" - "lh %[r7], 0(%[sinptr]) \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r0], %[r0], 15 \n\t" - "shra_r.w %[r1], %[r1], 15 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r0], %[r0], 0x4000 \n\t" - "addiu %[r1], %[r1], 0x4000 \n\t" - "sra %[r0], %[r0], 15 \n\t" - "sra %[r1], %[r1], 15 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r0], %[r2], %[r0] \n\t" - "addu %[r1], %[r3], %[r1] \n\t" - "sra %[r2], %[r0], 16 \n\t" - "andi %[r0], %[r0], 0xFFFF \n\t" - "mul %[r9], %[r2], %[r6] \n\t" - "mul %[r2], %[r2], %[r7] \n\t" - "mul %[r8], %[r0], %[r6] \n\t" - "mul %[r0], %[r0], %[r7] \n\t" - "sra %[r3], %[r1], 16 \n\t" - "andi %[r1], %[r1], 0xFFFF \n\t" - "sll %[r9], %[r9], 2 \n\t" - "sll %[r2], %[r2], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r8], %[r8], 14 \n\t" - "shra_r.w %[r0], %[r0], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r8], %[r8], 0x2000 \n\t" - "addiu %[r0], %[r0], 0x2000 \n\t" - "sra %[r8], %[r8], 14 \n\t" - "sra %[r0], %[r0], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r9], %[r9], %[r8] \n\t" - "addu %[r2], %[r2], %[r0] \n\t" - "mul %[r0], %[r3], %[r6] \n\t" - "mul %[r3], %[r3], %[r7] \n\t" - "mul %[r8], %[r1], %[r6] \n\t" - "mul %[r1], %[r1], %[r7] \n\t" - "addiu %[cosptr], %[cosptr], 2 \n\t" - "addiu %[sinptr], %[sinptr], 2 \n\t" - "sll %[r0], %[r0], 2 \n\t" - "sll %[r3], %[r3], 2 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r8], %[r8], 14 \n\t" - "shra_r.w %[r1], %[r1], 14 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r8], %[r8], 0x2000 \n\t" - "addiu %[r1], %[r1], 0x2000 \n\t" - "sra %[r8], %[r8], 14 \n\t" - "sra %[r1], %[r1], 14 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r0], %[r0], %[r8] \n\t" - "addu %[r3], %[r3], %[r1] \n\t" - "subu %[r9], %[r9], %[r3] \n\t" - "addu %[r0], %[r0], %[r2] \n\t" - "sra %[r1], %[r9], 16 \n\t" - "andi %[r9], %[r9], 0xFFFF \n\t" - "mul %[r1], %[r1], %[r5] \n\t" - "mul %[r9], %[r9], %[r5] \n\t" - "sra %[r2], %[r0], 16 \n\t" - "andi %[r0], %[r0], 0xFFFF \n\t" - "mul %[r2], %[r2], %[r5] \n\t" - "mul %[r0], %[r0], %[r5] \n\t" - "sll %[r1], %[r1], 5 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r9], %[r9], 11 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r9], %[r9], 0x400 \n\t" - "sra %[r9], %[r9], 11 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r1], %[r1], %[r9] \n\t" - "sll %[r2], %[r2], 5 \n\t" -#if defined(MIPS_DSP_R1_LE) - "shra_r.w %[r0], %[r0], 11 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r0], %[r0], 0x400 \n\t" - "sra %[r0], %[r0], 11 \n\t" -#endif // #if defined(MIPS_DSP_R1_LE) - "addu %[r0], %[r0], %[r2] \n\t" - "sw %[r1], 0(%[outre1]) \n\t" - "addiu %[outre1], %[outre1], 4 \n\t" - "sw %[r0], 0(%[outre2]) \n\t" - "bgtz %[k], 2b \n\t" - " addiu %[outre2], %[outre2], 4 \n\t" -#endif // #if defined(MIPS_DSP_R2_LE) - "3: \n\t" - ".set pop \n\t" - : [k] "+r" (k), [r0] "=&r" (r0), [r1] "=&r" (r1), - [r2] "=&r" (r2), [r3] "=&r" (r3), [r4] "=&r" (r4), - [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7), - [r8] "=&r" (r8), [r9] "=&r" (r9), [max1] "=&r" (max1), - [inre] "=&r" (inre), [inim] "=&r" (inim), - [outre1] "=&r" (outre1), [outre2] "=&r" (outre2) - : [max] "r" (max), [inreQ7] "r" (inreQ7), - [inimQ7] "r" (inimQ7), [cosptr] "r" (cosptr), - [sinptr] "r" (sinptr), [outre1Q16] "r" (outre1Q16), - [outre2Q16] "r" (outre2Q16) - : "hi", "lo", "memory" -#if defined(MIPS_DSP_R2_LE) - , "$ac1hi", "$ac1lo", "$ac2hi", "$ac2lo", "$ac3hi", "$ac3lo" -#endif // #if defined(MIPS_DSP_R2_LE) - ); -} diff --git a/modules/audio_coding/codecs/isac/fix/source/transform_neon.c b/modules/audio_coding/codecs/isac/fix/source/transform_neon.c deleted file mode 100644 index 79dadc4600..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/transform_neon.c +++ /dev/null @@ -1,479 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "modules/audio_coding/codecs/isac/fix/source/fft.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -// Tables are defined in transform_tables.c file. -// Cosine table 1 in Q14. -extern const int16_t WebRtcIsacfix_kCosTab1[FRAMESAMPLES/2]; -// Sine table 1 in Q14. -extern const int16_t WebRtcIsacfix_kSinTab1[FRAMESAMPLES/2]; -// Sine table 2 in Q14. -extern const int16_t WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4]; - -static inline int32_t ComplexMulAndFindMaxNeon(int16_t* inre1Q9, - int16_t* inre2Q9, - int32_t* outreQ16, - int32_t* outimQ16) { - int k; - const int16_t* kCosTab = &WebRtcIsacfix_kCosTab1[0]; - const int16_t* kSinTab = &WebRtcIsacfix_kSinTab1[0]; - // 0.5 / sqrt(240) in Q19 is round((.5 / sqrt(240)) * (2^19)) = 16921. - // Use "16921 << 5" and vqdmulh, instead of ">> 26" as in the C code. - int32_t fact = 16921 << 5; - int32x4_t factq = vdupq_n_s32(fact); - uint32x4_t max_r = vdupq_n_u32(0); - uint32x4_t max_i = vdupq_n_u32(0); - - for (k = 0; k < FRAMESAMPLES/2; k += 8) { - int16x8_t tmpr = vld1q_s16(kCosTab); - int16x8_t tmpi = vld1q_s16(kSinTab); - int16x8_t inre1 = vld1q_s16(inre1Q9); - int16x8_t inre2 = vld1q_s16(inre2Q9); - kCosTab += 8; - kSinTab += 8; - inre1Q9 += 8; - inre2Q9 += 8; - - // Use ">> 26", instead of ">> 7", ">> 16" and then ">> 3" as in the C code. - int32x4_t tmp0 = vmull_s16(vget_low_s16(tmpr), vget_low_s16(inre1)); - int32x4_t tmp1 = vmull_s16(vget_low_s16(tmpr), vget_low_s16(inre2)); - tmp0 = vmlal_s16(tmp0, vget_low_s16(tmpi), vget_low_s16(inre2)); - tmp1 = vmlsl_s16(tmp1, vget_low_s16(tmpi), vget_low_s16(inre1)); -#if defined(WEBRTC_ARCH_ARM64) - int32x4_t tmp2 = vmull_high_s16(tmpr, inre1); - int32x4_t tmp3 = vmull_high_s16(tmpr, inre2); - tmp2 = vmlal_high_s16(tmp2, tmpi, inre2); - tmp3 = vmlsl_high_s16(tmp3, tmpi, inre1); -#else - int32x4_t tmp2 = vmull_s16(vget_high_s16(tmpr), vget_high_s16(inre1)); - int32x4_t tmp3 = vmull_s16(vget_high_s16(tmpr), vget_high_s16(inre2)); - tmp2 = vmlal_s16(tmp2, vget_high_s16(tmpi), vget_high_s16(inre2)); - tmp3 = vmlsl_s16(tmp3, vget_high_s16(tmpi), vget_high_s16(inre1)); -#endif - - int32x4_t outr_0 = vqdmulhq_s32(tmp0, factq); - int32x4_t outr_1 = vqdmulhq_s32(tmp2, factq); - int32x4_t outi_0 = vqdmulhq_s32(tmp1, factq); - int32x4_t outi_1 = vqdmulhq_s32(tmp3, factq); - vst1q_s32(outreQ16, outr_0); - outreQ16 += 4; - vst1q_s32(outreQ16, outr_1); - outreQ16 += 4; - vst1q_s32(outimQ16, outi_0); - outimQ16 += 4; - vst1q_s32(outimQ16, outi_1); - outimQ16 += 4; - - // Find the absolute maximum in the vectors. - tmp0 = vabsq_s32(outr_0); - tmp1 = vabsq_s32(outr_1); - tmp2 = vabsq_s32(outi_0); - tmp3 = vabsq_s32(outi_1); - // vabs doesn't change the value of 0x80000000. - // Use u32 so we don't lose the value 0x80000000. - max_r = vmaxq_u32(max_r, vreinterpretq_u32_s32(tmp0)); - max_i = vmaxq_u32(max_i, vreinterpretq_u32_s32(tmp2)); - max_r = vmaxq_u32(max_r, vreinterpretq_u32_s32(tmp1)); - max_i = vmaxq_u32(max_i, vreinterpretq_u32_s32(tmp3)); - } - - max_r = vmaxq_u32(max_r, max_i); -#if defined(WEBRTC_ARCH_ARM64) - uint32_t maximum = vmaxvq_u32(max_r); -#else - uint32x2_t max32x2_r = vmax_u32(vget_low_u32(max_r), vget_high_u32(max_r)); - max32x2_r = vpmax_u32(max32x2_r, max32x2_r); - uint32_t maximum = vget_lane_u32(max32x2_r, 0); -#endif - - return (int32_t)maximum; -} - -static inline void PreShiftW32toW16Neon(int32_t* inre, - int32_t* inim, - int16_t* outre, - int16_t* outim, - int32_t sh) { - int k; - int32x4_t sh32x4 = vdupq_n_s32(sh); - for (k = 0; k < FRAMESAMPLES/2; k += 16) { - int32x4x4_t inre32x4x4 = vld4q_s32(inre); - int32x4x4_t inim32x4x4 = vld4q_s32(inim); - inre += 16; - inim += 16; - inre32x4x4.val[0] = vrshlq_s32(inre32x4x4.val[0], sh32x4); - inre32x4x4.val[1] = vrshlq_s32(inre32x4x4.val[1], sh32x4); - inre32x4x4.val[2] = vrshlq_s32(inre32x4x4.val[2], sh32x4); - inre32x4x4.val[3] = vrshlq_s32(inre32x4x4.val[3], sh32x4); - inim32x4x4.val[0] = vrshlq_s32(inim32x4x4.val[0], sh32x4); - inim32x4x4.val[1] = vrshlq_s32(inim32x4x4.val[1], sh32x4); - inim32x4x4.val[2] = vrshlq_s32(inim32x4x4.val[2], sh32x4); - inim32x4x4.val[3] = vrshlq_s32(inim32x4x4.val[3], sh32x4); - int16x4x4_t outre16x4x4; - int16x4x4_t outim16x4x4; - outre16x4x4.val[0] = vmovn_s32(inre32x4x4.val[0]); - outre16x4x4.val[1] = vmovn_s32(inre32x4x4.val[1]); - outre16x4x4.val[2] = vmovn_s32(inre32x4x4.val[2]); - outre16x4x4.val[3] = vmovn_s32(inre32x4x4.val[3]); - outim16x4x4.val[0] = vmovn_s32(inim32x4x4.val[0]); - outim16x4x4.val[1] = vmovn_s32(inim32x4x4.val[1]); - outim16x4x4.val[2] = vmovn_s32(inim32x4x4.val[2]); - outim16x4x4.val[3] = vmovn_s32(inim32x4x4.val[3]); - vst4_s16(outre, outre16x4x4); - vst4_s16(outim, outim16x4x4); - outre += 16; - outim += 16; - } -} - -static inline void PostShiftAndSeparateNeon(int16_t* inre, - int16_t* inim, - int16_t* outre, - int16_t* outim, - int32_t sh) { - int k; - int16_t* inre1 = inre; - int16_t* inre2 = &inre[FRAMESAMPLES/2 - 4]; - int16_t* inim1 = inim; - int16_t* inim2 = &inim[FRAMESAMPLES/2 - 4]; - int16_t* outre1 = outre; - int16_t* outre2 = &outre[FRAMESAMPLES/2 - 4]; - int16_t* outim1 = outim; - int16_t* outim2 = &outim[FRAMESAMPLES/2 - 4]; - const int16_t* kSinTab1 = &WebRtcIsacfix_kSinTab2[0]; - const int16_t* kSinTab2 = &WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4 -4]; - // By vshl, we effectively did "<< (-sh - 23)", instead of "<< (-sh)", - // ">> 14" and then ">> 9" as in the C code. - int32x4_t shift = vdupq_n_s32(-sh - 23); - - for (k = 0; k < FRAMESAMPLES/4; k += 4) { - int16x4_t tmpi = vld1_s16(kSinTab1); - kSinTab1 += 4; - int16x4_t tmpr = vld1_s16(kSinTab2); - kSinTab2 -= 4; - int16x4_t inre_0 = vld1_s16(inre1); - inre1 += 4; - int16x4_t inre_1 = vld1_s16(inre2); - inre2 -= 4; - int16x4_t inim_0 = vld1_s16(inim1); - inim1 += 4; - int16x4_t inim_1 = vld1_s16(inim2); - inim2 -= 4; - tmpr = vneg_s16(tmpr); - inre_1 = vrev64_s16(inre_1); - inim_1 = vrev64_s16(inim_1); - tmpr = vrev64_s16(tmpr); - - int16x4_t xr = vqadd_s16(inre_0, inre_1); - int16x4_t xi = vqsub_s16(inim_0, inim_1); - int16x4_t yr = vqadd_s16(inim_0, inim_1); - int16x4_t yi = vqsub_s16(inre_1, inre_0); - - int32x4_t outr0 = vmull_s16(tmpr, xr); - int32x4_t outi0 = vmull_s16(tmpi, xr); - int32x4_t outr1 = vmull_s16(tmpi, yr); - int32x4_t outi1 = vmull_s16(tmpi, yi); - outr0 = vmlsl_s16(outr0, tmpi, xi); - outi0 = vmlal_s16(outi0, tmpr, xi); - outr1 = vmlal_s16(outr1, tmpr, yi); - outi1 = vmlsl_s16(outi1, tmpr, yr); - - outr0 = vshlq_s32(outr0, shift); - outi0 = vshlq_s32(outi0, shift); - outr1 = vshlq_s32(outr1, shift); - outi1 = vshlq_s32(outi1, shift); - outr1 = vnegq_s32(outr1); - - int16x4_t outre_0 = vmovn_s32(outr0); - int16x4_t outim_0 = vmovn_s32(outi0); - int16x4_t outre_1 = vmovn_s32(outr1); - int16x4_t outim_1 = vmovn_s32(outi1); - outre_1 = vrev64_s16(outre_1); - outim_1 = vrev64_s16(outim_1); - - vst1_s16(outre1, outre_0); - outre1 += 4; - vst1_s16(outim1, outim_0); - outim1 += 4; - vst1_s16(outre2, outre_1); - outre2 -= 4; - vst1_s16(outim2, outim_1); - outim2 -= 4; - } -} - -void WebRtcIsacfix_Time2SpecNeon(int16_t* inre1Q9, - int16_t* inre2Q9, - int16_t* outreQ7, - int16_t* outimQ7) { - int32_t tmpreQ16[FRAMESAMPLES/2], tmpimQ16[FRAMESAMPLES/2]; - int32_t max; - int32_t sh; - - // Multiply with complex exponentials and combine into one complex vector. - // And find the maximum. - max = ComplexMulAndFindMaxNeon(inre1Q9, inre2Q9, tmpreQ16, tmpimQ16); - - sh = (int32_t)WebRtcSpl_NormW32(max); - sh = sh - 24; - - // If sh becomes >= 0, then we should shift sh steps to the left, - // and the domain will become Q(16 + sh). - // If sh becomes < 0, then we should shift -sh steps to the right, - // and the domain will become Q(16 + sh). - PreShiftW32toW16Neon(tmpreQ16, tmpimQ16, inre1Q9, inre2Q9, sh); - - // Get DFT. - WebRtcIsacfix_FftRadix16Fastest(inre1Q9, inre2Q9, -1); - - // If sh >= 0, shift sh steps to the right, - // If sh < 0, shift -sh steps to the left. - // Use symmetry to separate into two complex vectors - // and center frames in time around zero. - PostShiftAndSeparateNeon(inre1Q9, inre2Q9, outreQ7, outimQ7, sh); -} - -static inline int32_t TransformAndFindMaxNeon(int16_t* inre, - int16_t* inim, - int32_t* outre, - int32_t* outim) { - int k; - int16_t* inre1 = inre; - int16_t* inre2 = &inre[FRAMESAMPLES/2 - 4]; - int16_t* inim1 = inim; - int16_t* inim2 = &inim[FRAMESAMPLES/2 - 4]; - int32_t* outre1 = outre; - int32_t* outre2 = &outre[FRAMESAMPLES/2 - 4]; - int32_t* outim1 = outim; - int32_t* outim2 = &outim[FRAMESAMPLES/2 - 4]; - const int16_t* kSinTab1 = &WebRtcIsacfix_kSinTab2[0]; - const int16_t* kSinTab2 = &WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4 - 4]; - uint32x4_t max_r = vdupq_n_u32(0); - uint32x4_t max_i = vdupq_n_u32(0); - - // Use ">> 5", instead of "<< 9" and then ">> 14" as in the C code. - for (k = 0; k < FRAMESAMPLES/4; k += 4) { - int16x4_t tmpi = vld1_s16(kSinTab1); - kSinTab1 += 4; - int16x4_t tmpr = vld1_s16(kSinTab2); - kSinTab2 -= 4; - int16x4_t inre_0 = vld1_s16(inre1); - inre1 += 4; - int16x4_t inre_1 = vld1_s16(inre2); - inre2 -= 4; - int16x4_t inim_0 = vld1_s16(inim1); - inim1 += 4; - int16x4_t inim_1 = vld1_s16(inim2); - inim2 -= 4; - tmpr = vneg_s16(tmpr); - inre_1 = vrev64_s16(inre_1); - inim_1 = vrev64_s16(inim_1); - tmpr = vrev64_s16(tmpr); - - int32x4_t xr = vmull_s16(tmpr, inre_0); - int32x4_t xi = vmull_s16(tmpr, inim_0); - int32x4_t yr = vmull_s16(tmpr, inim_1); - int32x4_t yi = vmull_s16(tmpi, inim_1); - xr = vmlal_s16(xr, tmpi, inim_0); - xi = vmlsl_s16(xi, tmpi, inre_0); - yr = vmlal_s16(yr, tmpi, inre_1); - yi = vmlsl_s16(yi, tmpr, inre_1); - yr = vnegq_s32(yr); - - xr = vshrq_n_s32(xr, 5); - xi = vshrq_n_s32(xi, 5); - yr = vshrq_n_s32(yr, 5); - yi = vshrq_n_s32(yi, 5); - - int32x4_t outr0 = vsubq_s32(xr, yi); - int32x4_t outr1 = vaddq_s32(xr, yi); - int32x4_t outi0 = vaddq_s32(xi, yr); - int32x4_t outi1 = vsubq_s32(yr, xi); - - // Find the absolute maximum in the vectors. - int32x4_t tmp0 = vabsq_s32(outr0); - int32x4_t tmp1 = vabsq_s32(outr1); - int32x4_t tmp2 = vabsq_s32(outi0); - int32x4_t tmp3 = vabsq_s32(outi1); - // vabs doesn't change the value of 0x80000000. - // Use u32 so we don't lose the value 0x80000000. - max_r = vmaxq_u32(max_r, vreinterpretq_u32_s32(tmp0)); - max_i = vmaxq_u32(max_i, vreinterpretq_u32_s32(tmp2)); - max_r = vmaxq_u32(max_r, vreinterpretq_u32_s32(tmp1)); - max_i = vmaxq_u32(max_i, vreinterpretq_u32_s32(tmp3)); - - // Store the vectors. - outr1 = vrev64q_s32(outr1); - outi1 = vrev64q_s32(outi1); - int32x4_t outr_1 = vcombine_s32(vget_high_s32(outr1), vget_low_s32(outr1)); - int32x4_t outi_1 = vcombine_s32(vget_high_s32(outi1), vget_low_s32(outi1)); - - vst1q_s32(outre1, outr0); - outre1 += 4; - vst1q_s32(outim1, outi0); - outim1 += 4; - vst1q_s32(outre2, outr_1); - outre2 -= 4; - vst1q_s32(outim2, outi_1); - outim2 -= 4; - } - - max_r = vmaxq_u32(max_r, max_i); -#if defined(WEBRTC_ARCH_ARM64) - uint32_t maximum = vmaxvq_u32(max_r); -#else - uint32x2_t max32x2_r = vmax_u32(vget_low_u32(max_r), vget_high_u32(max_r)); - max32x2_r = vpmax_u32(max32x2_r, max32x2_r); - uint32_t maximum = vget_lane_u32(max32x2_r, 0); -#endif - - return (int32_t)maximum; -} - -static inline void PostShiftAndDivideAndDemodulateNeon(int16_t* inre, - int16_t* inim, - int32_t* outre1, - int32_t* outre2, - int32_t sh) { - int k; - int16_t* p_inre = inre; - int16_t* p_inim = inim; - int32_t* p_outre1 = outre1; - int32_t* p_outre2 = outre2; - const int16_t* kCosTab = &WebRtcIsacfix_kCosTab1[0]; - const int16_t* kSinTab = &WebRtcIsacfix_kSinTab1[0]; - int32x4_t shift = vdupq_n_s32(-sh - 16); - // Divide through by the normalizing constant: - // scale all values with 1/240, i.e. with 273 in Q16. - // 273/65536 ~= 0.0041656 - // 1/240 ~= 0.0041666 - int16x8_t scale = vdupq_n_s16(273); - // Sqrt(240) in Q11 is round(15.49193338482967 * 2048) = 31727. - int factQ19 = 31727 << 16; - int32x4_t fact = vdupq_n_s32(factQ19); - - for (k = 0; k < FRAMESAMPLES/2; k += 8) { - int16x8_t inre16x8 = vld1q_s16(p_inre); - int16x8_t inim16x8 = vld1q_s16(p_inim); - p_inre += 8; - p_inim += 8; - int16x8_t tmpr = vld1q_s16(kCosTab); - int16x8_t tmpi = vld1q_s16(kSinTab); - kCosTab += 8; - kSinTab += 8; - // By vshl and vmull, we effectively did "<< (-sh - 16)", - // instead of "<< (-sh)" and ">> 16" as in the C code. - int32x4_t outre1_0 = vmull_s16(vget_low_s16(inre16x8), vget_low_s16(scale)); - int32x4_t outre2_0 = vmull_s16(vget_low_s16(inim16x8), vget_low_s16(scale)); -#if defined(WEBRTC_ARCH_ARM64) - int32x4_t outre1_1 = vmull_high_s16(inre16x8, scale); - int32x4_t outre2_1 = vmull_high_s16(inim16x8, scale); -#else - int32x4_t outre1_1 = vmull_s16(vget_high_s16(inre16x8), - vget_high_s16(scale)); - int32x4_t outre2_1 = vmull_s16(vget_high_s16(inim16x8), - vget_high_s16(scale)); -#endif - - outre1_0 = vshlq_s32(outre1_0, shift); - outre1_1 = vshlq_s32(outre1_1, shift); - outre2_0 = vshlq_s32(outre2_0, shift); - outre2_1 = vshlq_s32(outre2_1, shift); - - // Demodulate and separate. - int32x4_t tmpr_0 = vmovl_s16(vget_low_s16(tmpr)); - int32x4_t tmpi_0 = vmovl_s16(vget_low_s16(tmpi)); -#if defined(WEBRTC_ARCH_ARM64) - int32x4_t tmpr_1 = vmovl_high_s16(tmpr); - int32x4_t tmpi_1 = vmovl_high_s16(tmpi); -#else - int32x4_t tmpr_1 = vmovl_s16(vget_high_s16(tmpr)); - int32x4_t tmpi_1 = vmovl_s16(vget_high_s16(tmpi)); -#endif - - int64x2_t xr0 = vmull_s32(vget_low_s32(tmpr_0), vget_low_s32(outre1_0)); - int64x2_t xi0 = vmull_s32(vget_low_s32(tmpr_0), vget_low_s32(outre2_0)); - int64x2_t xr2 = vmull_s32(vget_low_s32(tmpr_1), vget_low_s32(outre1_1)); - int64x2_t xi2 = vmull_s32(vget_low_s32(tmpr_1), vget_low_s32(outre2_1)); - xr0 = vmlsl_s32(xr0, vget_low_s32(tmpi_0), vget_low_s32(outre2_0)); - xi0 = vmlal_s32(xi0, vget_low_s32(tmpi_0), vget_low_s32(outre1_0)); - xr2 = vmlsl_s32(xr2, vget_low_s32(tmpi_1), vget_low_s32(outre2_1)); - xi2 = vmlal_s32(xi2, vget_low_s32(tmpi_1), vget_low_s32(outre1_1)); - -#if defined(WEBRTC_ARCH_ARM64) - int64x2_t xr1 = vmull_high_s32(tmpr_0, outre1_0); - int64x2_t xi1 = vmull_high_s32(tmpr_0, outre2_0); - int64x2_t xr3 = vmull_high_s32(tmpr_1, outre1_1); - int64x2_t xi3 = vmull_high_s32(tmpr_1, outre2_1); - xr1 = vmlsl_high_s32(xr1, tmpi_0, outre2_0); - xi1 = vmlal_high_s32(xi1, tmpi_0, outre1_0); - xr3 = vmlsl_high_s32(xr3, tmpi_1, outre2_1); - xi3 = vmlal_high_s32(xi3, tmpi_1, outre1_1); -#else - int64x2_t xr1 = vmull_s32(vget_high_s32(tmpr_0), vget_high_s32(outre1_0)); - int64x2_t xi1 = vmull_s32(vget_high_s32(tmpr_0), vget_high_s32(outre2_0)); - int64x2_t xr3 = vmull_s32(vget_high_s32(tmpr_1), vget_high_s32(outre1_1)); - int64x2_t xi3 = vmull_s32(vget_high_s32(tmpr_1), vget_high_s32(outre2_1)); - xr1 = vmlsl_s32(xr1, vget_high_s32(tmpi_0), vget_high_s32(outre2_0)); - xi1 = vmlal_s32(xi1, vget_high_s32(tmpi_0), vget_high_s32(outre1_0)); - xr3 = vmlsl_s32(xr3, vget_high_s32(tmpi_1), vget_high_s32(outre2_1)); - xi3 = vmlal_s32(xi3, vget_high_s32(tmpi_1), vget_high_s32(outre1_1)); -#endif - - outre1_0 = vcombine_s32(vrshrn_n_s64(xr0, 10), vrshrn_n_s64(xr1, 10)); - outre2_0 = vcombine_s32(vrshrn_n_s64(xi0, 10), vrshrn_n_s64(xi1, 10)); - outre1_1 = vcombine_s32(vrshrn_n_s64(xr2, 10), vrshrn_n_s64(xr3, 10)); - outre2_1 = vcombine_s32(vrshrn_n_s64(xi2, 10), vrshrn_n_s64(xi3, 10)); - outre1_0 = vqdmulhq_s32(outre1_0, fact); - outre2_0 = vqdmulhq_s32(outre2_0, fact); - outre1_1 = vqdmulhq_s32(outre1_1, fact); - outre2_1 = vqdmulhq_s32(outre2_1, fact); - - vst1q_s32(p_outre1, outre1_0); - p_outre1 += 4; - vst1q_s32(p_outre1, outre1_1); - p_outre1 += 4; - vst1q_s32(p_outre2, outre2_0); - p_outre2 += 4; - vst1q_s32(p_outre2, outre2_1); - p_outre2 += 4; - } -} - -void WebRtcIsacfix_Spec2TimeNeon(int16_t* inreQ7, - int16_t* inimQ7, - int32_t* outre1Q16, - int32_t* outre2Q16) { - int32_t max; - int32_t sh; - - max = TransformAndFindMaxNeon(inreQ7, inimQ7, outre1Q16, outre2Q16); - - - sh = (int32_t)WebRtcSpl_NormW32(max); - sh = sh - 24; - // If sh becomes >= 0, then we should shift sh steps to the left, - // and the domain will become Q(16 + sh). - // If sh becomes < 0, then we should shift -sh steps to the right, - // and the domain will become Q(16 + sh). - - // "Fastest" vectors. - PreShiftW32toW16Neon(outre1Q16, outre2Q16, inreQ7, inimQ7, sh); - - // Get IDFT. - WebRtcIsacfix_FftRadix16Fastest(inreQ7, inimQ7, 1); - - PostShiftAndDivideAndDemodulateNeon(inreQ7, inimQ7, outre1Q16, outre2Q16, sh); -} diff --git a/modules/audio_coding/codecs/isac/fix/source/transform_tables.c b/modules/audio_coding/codecs/isac/fix/source/transform_tables.c deleted file mode 100644 index e661effdde..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/transform_tables.c +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * This file contains trigonometric functions look-up tables used in - * transform functions WebRtcIsacfix_Time2Spec and WebRtcIsacfix_Spec2Time. - */ - -#include - -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" - -/* Cosine table 1 in Q14. */ -const int16_t WebRtcIsacfix_kCosTab1[FRAMESAMPLES/2] = { - 16384, 16383, 16378, 16371, 16362, 16349, 16333, 16315, 16294, 16270, - 16244, 16214, 16182, 16147, 16110, 16069, 16026, 15980, 15931, 15880, - 15826, 15769, 15709, 15647, 15582, 15515, 15444, 15371, 15296, 15218, - 15137, 15053, 14968, 14879, 14788, 14694, 14598, 14500, 14399, 14295, - 14189, 14081, 13970, 13856, 13741, 13623, 13502, 13380, 13255, 13128, - 12998, 12867, 12733, 12597, 12458, 12318, 12176, 12031, 11885, 11736, - 11585, 11433, 11278, 11121, 10963, 10803, 10641, 10477, 10311, 10143, - 9974, 9803, 9630, 9456, 9280, 9102, 8923, 8743, 8561, 8377, - 8192, 8006, 7818, 7629, 7438, 7246, 7053, 6859, 6664, 6467, - 6270, 6071, 5872, 5671, 5469, 5266, 5063, 4859, 4653, 4447, - 4240, 4033, 3825, 3616, 3406, 3196, 2986, 2775, 2563, 2351, - 2139, 1926, 1713, 1499, 1285, 1072, 857, 643, 429, 214, - 0, -214, -429, -643, -857, -1072, -1285, -1499, -1713, -1926, - -2139, -2351, -2563, -2775, -2986, -3196, -3406, -3616, -3825, -4033, - -4240, -4447, -4653, -4859, -5063, -5266, -5469, -5671, -5872, -6071, - -6270, -6467, -6664, -6859, -7053, -7246, -7438, -7629, -7818, -8006, - -8192, -8377, -8561, -8743, -8923, -9102, -9280, -9456, -9630, -9803, - -9974, -10143, -10311, -10477, -10641, -10803, -10963, -11121, -11278, -11433, - -11585, -11736, -11885, -12031, -12176, -12318, -12458, -12597, -12733, - -12867, -12998, -13128, -13255, -13380, -13502, -13623, -13741, -13856, - -13970, -14081, -14189, -14295, -14399, -14500, -14598, -14694, -14788, - -14879, -14968, -15053, -15137, -15218, -15296, -15371, -15444, -15515, - -15582, -15647, -15709, -15769, -15826, -15880, -15931, -15980, -16026, - -16069, -16110, -16147, -16182, -16214, -16244, -16270, -16294, -16315, - -16333, -16349, -16362, -16371, -16378, -16383 -}; - -/* Sine table 1 in Q14. */ -const int16_t WebRtcIsacfix_kSinTab1[FRAMESAMPLES/2] = { - 0, 214, 429, 643, 857, 1072, 1285, 1499, 1713, 1926, - 2139, 2351, 2563, 2775, 2986, 3196, 3406, 3616, 3825, 4033, - 4240, 4447, 4653, 4859, 5063, 5266, 5469, 5671, 5872, 6071, - 6270, 6467, 6664, 6859, 7053, 7246, 7438, 7629, 7818, 8006, - 8192, 8377, 8561, 8743, 8923, 9102, 9280, 9456, 9630, 9803, - 9974, 10143, 10311, 10477, 10641, 10803, 10963, 11121, 11278, 11433, - 11585, 11736, 11885, 12031, 12176, 12318, 12458, 12597, 12733, 12867, - 12998, 13128, 13255, 13380, 13502, 13623, 13741, 13856, 13970, 14081, - 14189, 14295, 14399, 14500, 14598, 14694, 14788, 14879, 14968, 15053, - 15137, 15218, 15296, 15371, 15444, 15515, 15582, 15647, 15709, 15769, - 15826, 15880, 15931, 15980, 16026, 16069, 16110, 16147, 16182, 16214, - 16244, 16270, 16294, 16315, 16333, 16349, 16362, 16371, 16378, 16383, - 16384, 16383, 16378, 16371, 16362, 16349, 16333, 16315, 16294, 16270, - 16244, 16214, 16182, 16147, 16110, 16069, 16026, 15980, 15931, 15880, - 15826, 15769, 15709, 15647, 15582, 15515, 15444, 15371, 15296, 15218, - 15137, 15053, 14968, 14879, 14788, 14694, 14598, 14500, 14399, 14295, - 14189, 14081, 13970, 13856, 13741, 13623, 13502, 13380, 13255, 13128, - 12998, 12867, 12733, 12597, 12458, 12318, 12176, 12031, 11885, 11736, - 11585, 11433, 11278, 11121, 10963, 10803, 10641, 10477, 10311, 10143, - 9974, 9803, 9630, 9456, 9280, 9102, 8923, 8743, 8561, 8377, - 8192, 8006, 7818, 7629, 7438, 7246, 7053, 6859, 6664, 6467, - 6270, 6071, 5872, 5671, 5469, 5266, 5063, 4859, 4653, 4447, - 4240, 4033, 3825, 3616, 3406, 3196, 2986, 2775, 2563, 2351, - 2139, 1926, 1713, 1499, 1285, 1072, 857, 643, 429, 214 -}; - - -/* Sine table 2 in Q14. */ -const int16_t WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4] = { - 16384, -16381, 16375, -16367, 16356, -16342, 16325, -16305, 16283, -16257, - 16229, -16199, 16165, -16129, 16090, -16048, 16003, -15956, 15906, -15853, - 15798, -15739, 15679, -15615, 15549, -15480, 15408, -15334, 15257, -15178, - 15095, -15011, 14924, -14834, 14741, -14647, 14549, -14449, 14347, -14242, - 14135, -14025, 13913, -13799, 13682, -13563, 13441, -13318, 13192, -13063, - 12933, -12800, 12665, -12528, 12389, -12247, 12104, -11958, 11810, -11661, - 11509, -11356, 11200, -11042, 10883, -10722, 10559, -10394, 10227, -10059, - 9889, -9717, 9543, -9368, 9191, -9013, 8833, -8652, 8469, -8285, - 8099, -7912, 7723, -7534, 7342, -7150, 6957, -6762, 6566, -6369, - 6171, -5971, 5771, -5570, 5368, -5165, 4961, -4756, 4550, -4344, - 4137, -3929, 3720, -3511, 3301, -3091, 2880, -2669, 2457, -2245, - 2032, -1819, 1606, -1392, 1179, -965, 750, -536, 322, -107 -}; - -#if defined(MIPS32_LE) -/* Cosine table 2 in Q14. Used only on MIPS platforms. */ -const int16_t WebRtcIsacfix_kCosTab2[FRAMESAMPLES/4] = { - 107, -322, 536, -750, 965, -1179, 1392, -1606, 1819, -2032, - 2245, -2457, 2669, -2880, 3091, -3301, 3511, -3720, 3929, -4137, - 4344, -4550, 4756, -4961, 5165, -5368, 5570, -5771, 5971, -6171, - 6369, -6566, 6762, -6957, 7150, -7342, 7534, -7723, 7912, -8099, - 8285, -8469, 8652, -8833, 9013, -9191, 9368, -9543, 9717, -9889, - 10059, -10227, 10394, -10559, 10722, -10883, 11042, -11200, 11356, -11509, - 11661, -11810, 11958, -12104, 12247, -12389, 12528, -12665, 12800, -12933, - 13063, -13192, 13318, -13441, 13563, -13682, 13799, -13913, 14025, -14135, - 14242, -14347, 14449, -14549, 14647, -14741, 14834, -14924, 15011, -15095, - 15178, -15257, 15334, -15408, 15480, -15549, 15615, -15679, 15739, -15798, - 15853, -15906, 15956, -16003, 16048, -16090, 16129, -16165, 16199, -16229, - 16257, -16283, 16305, -16325, 16342, -16356, 16367, -16375, 16381, -16384 -}; -#endif diff --git a/modules/audio_coding/codecs/isac/fix/source/transform_unittest.cc b/modules/audio_coding/codecs/isac/fix/source/transform_unittest.cc deleted file mode 100644 index 433ec534fe..0000000000 --- a/modules/audio_coding/codecs/isac/fix/source/transform_unittest.cc +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/source/codec.h" -#include "system_wrappers/include/cpu_features_wrapper.h" -#include "test/gtest.h" - -static const int kSamples = FRAMESAMPLES / 2; -static const int32_t spec2time_out_expected_1[kSamples] = { - -3366470, -2285227, -3415765, -2310215, -3118030, -2222470, -3030254, - -2192091, -3423170, -2216041, -3305541, -2171936, -3195767, -2095779, - -3153304, -2157560, -3071167, -2032108, -3101190, -1972016, -3103824, - -2089118, -3139811, -1898337, -3102801, -2055082, -3029665, -1854140, - -2962586, -1966454, -3071167, -1894588, -2851743, -1917315, -2848087, - -1594932, -2799242, -1462184, -2845887, -1437599, -2691776, -1329637, - -2770659, -1268491, -2625161, -1578991, -2460299, -1186385, -2365613, - -1039354, -2322608, -958518, -2271749, -789860, -2254538, -850308, - -2384436, -850959, -2133734, -587678, -2093316, -495115, -1973364, - -475177, -1801282, -173507, -1848516, -158015, -1792018, -62648, - -1643313, 214746, -1500758, 267077, -1450193, 560521, -1521579, - 675283, -1345408, 857559, -1300822, 1116332, -1294533, 1241117, - -1070027, 1263503, -983816, 1529821, -1019586, 1910421, -955420, - 2073688, -836459, 2401105, -653905, 2690474, -731425, 2930131, - -935234, 3299500, -875978, 3523432, -878906, 3924822, -1081630, - 4561267, -1203023, 5105274, -1510983, 6052762, -2294646, 7021597, - -3108053, 8826736, -4935222, 11678789, -8442713, 18725700, -21526692, - 25420577, 19589811, -28108666, 12634054, -14483066, 6263217, -9979706, - 3665661, -7909736, 2531530, -6434896, 1700772, -5525393, 1479473, - -4894262, 1231760, -4353044, 1032940, -3786590, 941152, -3331614, - 665090, -2851619, 830696, -2762201, 958007, -2483118, 788233, - -2184965, 804825, -1967306, 1007255, -1862474, 920889, -1457506, - 755406, -1405841, 890230, -1302124, 1161599, -701867, 1154163, - -1083366, 1204743, -513581, 1547264, -650636, 1493384, -285543, - 1771863, -277906, 1841343, -9078, 1751863, 230222, 1819578, - 207170, 1978972, 398137, 2106468, 552155, 1997624, 685213, - 2129520, 601078, 2238736, 944591, 2441879, 1194178, 2355280, - 986124, 2393328, 1049005, 2417944, 1208368, 2489516, 1352023, - 2572118, 1445283, 2856081, 1532997, 2742279, 1615877, 2915274, - 1808036, 2856871, 1806936, 3241747, 1622461, 2978558, 1841297, - 3010378, 1923666, 3271367, 2126700, 3070935, 1956958, 3107588, - 2128405, 3288872, 2114911, 3315952, 2406651, 3344038, 2370199, - 3368980, 2144361, 3305030, 2183803, 3401450, 2523102, 3405463, - 2452475, 3463355, 2421678, 3551968, 2431949, 3477251, 2148125, - 3244489, 2174090}; -static const int32_t spec2time_out_expected_2[kSamples] = { - 1691694, -2499988, -2035547, 1060469, 988634, -2044502, -306271, - 2041000, 201454, -2289456, 93694, 2129427, -369152, -1887834, - 860796, 2089102, -929424, -1673956, 1395291, 1785651, -1619673, - -1380109, 1963449, 1093311, -2111007, -840456, 2372786, 578119, - -2242702, 89774, 2463304, -132717, -2121480, 643634, 2277636, - -1125999, -1995858, 1543748, 2227861, -1483779, -1495491, 2102642, - 1833876, -1920568, -958378, 2485101, 772261, -2454257, -24942, - 2918714, 136838, -2500453, 816118, 3039735, -746560, -2365815, - 1586396, 2714951, -1511696, -1942334, 2571792, 2182827, -2325335, - -1311543, 3055970, 1367220, -2737182, -110626, 3889222, 631008, - -3280879, 853066, 4122279, -706638, -3334449, 2148311, 3993512, - -1846301, -3004894, 3426779, 3329522, -3165264, -2242423, 4756866, - 2557711, -4131280, -805259, 5702711, 1120592, -4852821, 743664, - 6476444, -621186, -5465828, 2815787, 6768835, -3017442, -5338409, - 5658126, 6838454, -5492288, -4682382, 8874947, 6153814, -8832561, - -2649251, 12817398, 4237692, -13000247, 1190661, 18986363, -115738, - -19693978, 9908367, 30660381, -10632635, -37962068, 47022884, 89744622, - -42087632, 40279224, -88869341, -47542383, 38572364, 10441576, -30339718, - -9926740, 19896578, 28009, -18886612, -1124047, 13232498, -4150304, - -12770551, 2637074, 9051831, -6162211, -8713972, 4557937, 5489716, - -6862312, -5532349, 5415449, 2791310, -6999367, -2790102, 5375806, - 546222, -6486452, -821261, 4994973, -1278840, -5645501, 1060484, - 3996285, -2503954, -4653629, 2220549, 3036977, -3282133, -3318585, - 2780636, 1789880, -4004589, -2041031, 3105373, 574819, -3992722, - -971004, 3001703, -676739, -3841508, 417284, 2897970, -1427018, - -3058480, 1189948, 2210960, -2268992, -2603272, 1949785, 1576172, - -2720404, -1891738, 2309456, 769178, -2975646, -707150, 2424652, - -88039, -2966660, -65452, 2320780, -957557, -2798978, 744640, - 1879794, -1672081, -2365319, 1253309, 1366383, -2204082, -1544367, - 1801452, 613828, -2531994, -983847, 2064842, 118326, -2613790, - -203220, 2219635, -730341, -2641861, 563557, 1765434, -1329916, - -2272927, 1037138, 1266725, -1939220, -1588643, 1754528, 816552, - -2376303, -1099167, 1864999, 122477, -2422762, -400027, 1889228, - -579916, -2490353, 287139, 2011318, -1176657, -2502978, 812896, - 1116502, -1940211}; -static const int16_t time2spec_out_expected_1[kSamples] = { - 20342, 23889, -10063, -9419, 3242, 7280, -2012, -5029, 332, 4478, - -97, -3244, -891, 3117, 773, -2204, -1335, 2009, 1236, -1469, - -1562, 1277, 1366, -815, -1619, 599, 1449, -177, -1507, 116, - 1294, 263, -1338, -244, 1059, 553, -1045, -549, 829, 826, - -731, -755, 516, 909, -427, -853, 189, 1004, -184, -828, - -108, 888, 72, -700, -280, 717, 342, -611, -534, 601, - 534, -374, -646, 399, 567, -171, -720, 234, 645, -11, - -712, -26, 593, 215, -643, -172, 536, 361, -527, -403, - 388, 550, -361, -480, 208, 623, -206, -585, 41, 578, - 12, -504, -182, 583, 218, -437, -339, 499, 263, -354, - -450, 347, 456, -193, -524, 212, 475, -74, -566, 94, - 511, 112, -577, -201, 408, 217, -546, -295, 338, 387, - -13, 4, -46, 2, -76, 103, -83, 108, -55, 100, - -150, 131, -156, 141, -171, 179, -190, 128, -227, 172, - -214, 215, -189, 265, -244, 322, -335, 337, -352, 358, - -368, 362, -355, 366, -381, 403, -395, 411, -392, 446, - -458, 504, -449, 507, -464, 452, -491, 481, -534, 486, - -516, 560, -535, 525, -537, 559, -554, 570, -616, 591, - -585, 627, -509, 588, -584, 547, -610, 580, -614, 635, - -620, 655, -554, 546, -591, 642, -590, 660, -656, 629, - -604, 620, -580, 617, -645, 648, -573, 612, -604, 584, - -571, 597, -562, 627, -550, 560, -606, 529, -584, 568, - -503, 532, -463, 512, -440, 399, -457, 437, -349, 278, - -317, 257, -220, 163, -8, -61, 18, -161, 367, -1306}; -static const int16_t time2spec_out_expected_2[kSamples] = { - 14283, -11552, -15335, 6626, 7554, -2150, -6309, 1307, 4523, -4, - -3908, -314, 3001, 914, -2715, -1042, 2094, 1272, -1715, -1399, - 1263, 1508, -1021, -1534, 735, 1595, -439, -1447, 155, 1433, - 22, -1325, -268, 1205, 424, -1030, -608, 950, 643, -733, - -787, 661, 861, -502, -888, 331, 852, -144, -849, 19, - 833, 99, -826, -154, 771, 368, -735, -459, 645, 513, - -491, -604, 431, 630, -314, -598, 183, 622, -78, -612, - -48, 641, 154, -645, -257, 610, 281, -529, -444, 450, - 441, -327, -506, 274, 476, -232, -570, 117, 554, -86, - -531, -21, 572, 151, -606, -221, 496, 322, -407, -388, - 407, 394, -268, -428, 280, 505, -115, -588, 19, 513, - -29, -539, -109, 468, 173, -501, -242, 442, 278, -478, - -680, 656, -659, 656, -669, 602, -688, 612, -667, 612, - -642, 627, -648, 653, -676, 596, -680, 655, -649, 678, - -672, 587, -608, 637, -645, 637, -620, 556, -580, 553, - -635, 518, -599, 583, -501, 536, -544, 473, -552, 583, - -511, 541, -532, 563, -486, 461, -453, 486, -388, 424, - -416, 432, -374, 399, -462, 364, -346, 293, -329, 331, - -313, 281, -247, 309, -337, 241, -190, 207, -194, 179, - -163, 155, -156, 117, -135, 107, -126, 29, -22, 81, - -8, 17, -61, -10, 8, -37, 80, -44, 72, -88, - 65, -89, 130, -114, 181, -215, 189, -245, 260, -288, - 294, -339, 344, -396, 407, -429, 438, -439, 485, -556, - 629, -612, 637, -645, 661, -737, 829, -830, 831, -1041}; - -class TransformTest : public ::testing::Test { - protected: - // Pass a function pointer to the Tester function. - void Time2SpecTester(Time2Spec Time2SpecFunction) { - // WebRtcIsacfix_Time2Spec functions hard coded the buffer lengths. It's a - // large buffer but we have to test it here. - int16_t data_in_1[kSamples] = {0}; - int16_t data_in_2[kSamples] = {0}; - int16_t data_out_1[kSamples] = {0}; - int16_t data_out_2[kSamples] = {0}; - - for (int i = 0; i < kSamples; i++) { - data_in_1[i] = i * i + 1777; - data_in_2[i] = WEBRTC_SPL_WORD16_MAX / (i + 1) + 17; - } - - Time2SpecFunction(data_in_1, data_in_2, data_out_1, data_out_2); - - for (int i = 0; i < kSamples; i++) { - // We don't require bit-exact for ARM assembly code. - EXPECT_LE(abs(time2spec_out_expected_1[i] - data_out_1[i]), 1); - EXPECT_LE(abs(time2spec_out_expected_2[i] - data_out_2[i]), 1); - } - } - - // Pass a function pointer to the Tester function. - void Spec2TimeTester(Spec2Time Spec2TimeFunction) { - // WebRtcIsacfix_Spec2Time functions hard coded the buffer lengths. It's a - // large buffer but we have to test it here. - int16_t data_in_1[kSamples] = {0}; - int16_t data_in_2[kSamples] = {0}; - int32_t data_out_1[kSamples] = {0}; - int32_t data_out_2[kSamples] = {0}; - for (int i = 0; i < kSamples; i++) { - data_in_1[i] = i * i + 1777; - data_in_2[i] = WEBRTC_SPL_WORD16_MAX / (i + 1) + 17; - } - - Spec2TimeFunction(data_in_1, data_in_2, data_out_1, data_out_2); - - for (int i = 0; i < kSamples; i++) { - // We don't require bit-exact for ARM assembly code. - EXPECT_LE(abs(spec2time_out_expected_1[i] - data_out_1[i]), 16); - EXPECT_LE(abs(spec2time_out_expected_2[i] - data_out_2[i]), 16); - } - } -}; - -TEST_F(TransformTest, Time2SpecTest) { - Time2SpecTester(WebRtcIsacfix_Time2SpecC); -#if defined(WEBRTC_HAS_NEON) - Time2SpecTester(WebRtcIsacfix_Time2SpecNeon); -#endif -} - -TEST_F(TransformTest, Spec2TimeTest) { - Spec2TimeTester(WebRtcIsacfix_Spec2TimeC); -#if defined(WEBRTC_HAS_NEON) - Spec2TimeTester(WebRtcIsacfix_Spec2TimeNeon); -#endif -} diff --git a/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc b/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc deleted file mode 100644 index 903ac64aff..0000000000 --- a/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/include/isacfix.h" -#include "modules/audio_coding/codecs/isac/fix/source/settings.h" -#include "modules/audio_coding/codecs/tools/audio_codec_speed_test.h" -#include "rtc_base/checks.h" - -using std::string; - -namespace webrtc { - -static const int kIsacBlockDurationMs = 30; -static const int kIsacInputSamplingKhz = 16; -static const int kIsacOutputSamplingKhz = 16; - -class IsacSpeedTest : public AudioCodecSpeedTest { - protected: - IsacSpeedTest(); - void SetUp() override; - void TearDown() override; - float EncodeABlock(int16_t* in_data, - uint8_t* bit_stream, - size_t max_bytes, - size_t* encoded_bytes) override; - float DecodeABlock(const uint8_t* bit_stream, - size_t encoded_bytes, - int16_t* out_data) override; - ISACFIX_MainStruct* ISACFIX_main_inst_; -}; - -IsacSpeedTest::IsacSpeedTest() - : AudioCodecSpeedTest(kIsacBlockDurationMs, - kIsacInputSamplingKhz, - kIsacOutputSamplingKhz), - ISACFIX_main_inst_(NULL) {} - -void IsacSpeedTest::SetUp() { - AudioCodecSpeedTest::SetUp(); - - // Check whether the allocated buffer for the bit stream is large enough. - EXPECT_GE(max_bytes_, static_cast(STREAM_MAXW16_60MS)); - - // Create encoder memory. - EXPECT_EQ(0, WebRtcIsacfix_Create(&ISACFIX_main_inst_)); - EXPECT_EQ(0, WebRtcIsacfix_EncoderInit(ISACFIX_main_inst_, 1)); - WebRtcIsacfix_DecoderInit(ISACFIX_main_inst_); - // Set bitrate and block length. - EXPECT_EQ(0, WebRtcIsacfix_Control(ISACFIX_main_inst_, bit_rate_, - block_duration_ms_)); -} - -void IsacSpeedTest::TearDown() { - AudioCodecSpeedTest::TearDown(); - // Free memory. - EXPECT_EQ(0, WebRtcIsacfix_Free(ISACFIX_main_inst_)); -} - -float IsacSpeedTest::EncodeABlock(int16_t* in_data, - uint8_t* bit_stream, - size_t max_bytes, - size_t* encoded_bytes) { - // ISAC takes 10 ms everycall - const int subblocks = block_duration_ms_ / 10; - const int subblock_length = 10 * input_sampling_khz_; - int value = 0; - - clock_t clocks = clock(); - size_t pointer = 0; - for (int idx = 0; idx < subblocks; idx++, pointer += subblock_length) { - value = - WebRtcIsacfix_Encode(ISACFIX_main_inst_, &in_data[pointer], bit_stream); - if (idx == subblocks - 1) - EXPECT_GT(value, 0); - else - EXPECT_EQ(0, value); - } - clocks = clock() - clocks; - *encoded_bytes = static_cast(value); - RTC_DCHECK_LE(*encoded_bytes, max_bytes); - return 1000.0 * clocks / CLOCKS_PER_SEC; -} - -float IsacSpeedTest::DecodeABlock(const uint8_t* bit_stream, - size_t encoded_bytes, - int16_t* out_data) { - int value; - int16_t audio_type; - clock_t clocks = clock(); - value = WebRtcIsacfix_Decode(ISACFIX_main_inst_, bit_stream, encoded_bytes, - out_data, &audio_type); - clocks = clock() - clocks; - EXPECT_EQ(output_length_sample_, static_cast(value)); - return 1000.0 * clocks / CLOCKS_PER_SEC; -} - -TEST_P(IsacSpeedTest, IsacEncodeDecodeTest) { - size_t kDurationSec = 400; // Test audio length in second. - EncodeDecode(kDurationSec); -} - -const coding_param param_set[] = { - std::make_tuple(1, - 32000, - string("audio_coding/speech_mono_16kHz"), - string("pcm"), - true)}; - -INSTANTIATE_TEST_SUITE_P(AllTest, - IsacSpeedTest, - ::testing::ValuesIn(param_set)); - -} // namespace webrtc diff --git a/modules/audio_coding/codecs/isac/isac_webrtc_api_test.cc b/modules/audio_coding/codecs/isac/isac_webrtc_api_test.cc deleted file mode 100644 index cafca75e46..0000000000 --- a/modules/audio_coding/codecs/isac/isac_webrtc_api_test.cc +++ /dev/null @@ -1,346 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include -#include -#include - -#include "absl/strings/string_view.h" -#include "api/array_view.h" -#include "api/audio_codecs/isac/audio_decoder_isac_fix.h" -#include "api/audio_codecs/isac/audio_decoder_isac_float.h" -#include "api/audio_codecs/isac/audio_encoder_isac_fix.h" -#include "api/audio_codecs/isac/audio_encoder_isac_float.h" -#include "modules/audio_coding/test/PCMFile.h" -#include "rtc_base/checks.h" -#include "rtc_base/strings/string_builder.h" -#include "test/gtest.h" -#include "test/testsupport/file_utils.h" - -namespace webrtc { -namespace { - -constexpr int kPayloadType = 42; - -enum class IsacImpl { kFixed, kFloat }; - -absl::string_view IsacImplToString(IsacImpl impl) { - switch (impl) { - case IsacImpl::kFixed: - return "fixed"; - case IsacImpl::kFloat: - return "float"; - } -} - -std::unique_ptr GetPcmTestFileReader(int sample_rate_hz) { - std::string filename; - switch (sample_rate_hz) { - case 16000: - filename = test::ResourcePath("audio_coding/testfile16kHz", "pcm"); - break; - case 32000: - filename = test::ResourcePath("audio_coding/testfile32kHz", "pcm"); - break; - default: - RTC_DCHECK_NOTREACHED() - << "No test file available for " << sample_rate_hz << " Hz."; - } - auto pcm_file = std::make_unique(); - pcm_file->ReadStereo(false); - pcm_file->Open(filename, sample_rate_hz, "rb", /*auto_rewind=*/true); - pcm_file->FastForward(/*num_10ms_blocks=*/100); // Skip initial silence. - RTC_CHECK(!pcm_file->EndOfFile()); - return pcm_file; -} - -// Returns a view to the interleaved samples of an AudioFrame object. -rtc::ArrayView AudioFrameToView(const AudioFrame& audio_frame) { - return {audio_frame.data(), - audio_frame.samples_per_channel() * audio_frame.num_channels()}; -} - -std::unique_ptr CreateEncoder(IsacImpl impl, - int sample_rate_hz, - int frame_size_ms, - int bitrate_bps) { - RTC_CHECK(sample_rate_hz == 16000 || sample_rate_hz == 32000); - RTC_CHECK(frame_size_ms == 30 || frame_size_ms == 60); - RTC_CHECK_GT(bitrate_bps, 0); - switch (impl) { - case IsacImpl::kFixed: { - AudioEncoderIsacFix::Config config; - config.bit_rate = bitrate_bps; - config.frame_size_ms = frame_size_ms; - RTC_CHECK_EQ(16000, sample_rate_hz); - return AudioEncoderIsacFix::MakeAudioEncoder(config, kPayloadType); - } - case IsacImpl::kFloat: { - AudioEncoderIsacFloat::Config config; - config.bit_rate = bitrate_bps; - config.frame_size_ms = frame_size_ms; - config.sample_rate_hz = sample_rate_hz; - return AudioEncoderIsacFloat::MakeAudioEncoder(config, kPayloadType); - } - } -} - -std::unique_ptr CreateDecoder(IsacImpl impl, int sample_rate_hz) { - RTC_CHECK(sample_rate_hz == 16000 || sample_rate_hz == 32000); - switch (impl) { - case IsacImpl::kFixed: { - webrtc::AudioDecoderIsacFix::Config config; - RTC_CHECK_EQ(16000, sample_rate_hz); - return webrtc::AudioDecoderIsacFix::MakeAudioDecoder(config); - } - case IsacImpl::kFloat: { - webrtc::AudioDecoderIsacFloat::Config config; - config.sample_rate_hz = sample_rate_hz; - return webrtc::AudioDecoderIsacFloat::MakeAudioDecoder(config); - } - } -} - -struct EncoderTestParams { - IsacImpl impl; - int sample_rate_hz; - int frame_size_ms; -}; - -class EncoderTest : public testing::TestWithParam { - protected: - EncoderTest() = default; - IsacImpl GetIsacImpl() const { return GetParam().impl; } - int GetSampleRateHz() const { return GetParam().sample_rate_hz; } - int GetFrameSizeMs() const { return GetParam().frame_size_ms; } -}; - -TEST_P(EncoderTest, TestConfig) { - for (int bitrate_bps : {10000, 21000, 32000}) { - SCOPED_TRACE(bitrate_bps); - auto encoder = CreateEncoder(GetIsacImpl(), GetSampleRateHz(), - GetFrameSizeMs(), bitrate_bps); - EXPECT_EQ(GetSampleRateHz(), encoder->SampleRateHz()); - EXPECT_EQ(size_t{1}, encoder->NumChannels()); - EXPECT_EQ(bitrate_bps, encoder->GetTargetBitrate()); - } -} - -// Encodes an input audio sequence with a low and a high target bitrate and -// checks that the number of produces bytes in the first case is less than that -// of the second case. -TEST_P(EncoderTest, TestDifferentBitrates) { - auto pcm_file = GetPcmTestFileReader(GetSampleRateHz()); - constexpr int kLowBps = 20000; - constexpr int kHighBps = 25000; - auto encoder_low = CreateEncoder(GetIsacImpl(), GetSampleRateHz(), - GetFrameSizeMs(), kLowBps); - auto encoder_high = CreateEncoder(GetIsacImpl(), GetSampleRateHz(), - GetFrameSizeMs(), kHighBps); - int num_bytes_low = 0; - int num_bytes_high = 0; - constexpr int kNumFrames = 12; - for (int i = 0; i < kNumFrames; ++i) { - AudioFrame in; - pcm_file->Read10MsData(in); - rtc::Buffer low, high; - encoder_low->Encode(/*rtp_timestamp=*/0, AudioFrameToView(in), &low); - encoder_high->Encode(/*rtp_timestamp=*/0, AudioFrameToView(in), &high); - num_bytes_low += low.size(); - num_bytes_high += high.size(); - } - EXPECT_LT(num_bytes_low, num_bytes_high); -} - -// Encodes an input audio sequence first with a low, then with a high target -// bitrate *using the same encoder* and checks that the number of emitted bytes -// in the first case is less than in the second case. -TEST_P(EncoderTest, TestDynamicBitrateChange) { - constexpr int kLowBps = 20000; - constexpr int kHighBps = 25000; - constexpr int kStartBps = 30000; - auto encoder = CreateEncoder(GetIsacImpl(), GetSampleRateHz(), - GetFrameSizeMs(), kStartBps); - std::map num_bytes; - constexpr int kNumFrames = 200; // 2 seconds. - for (int bitrate_bps : {kLowBps, kHighBps}) { - auto pcm_file = GetPcmTestFileReader(GetSampleRateHz()); - encoder->OnReceivedTargetAudioBitrate(bitrate_bps); - for (int i = 0; i < kNumFrames; ++i) { - AudioFrame in; - pcm_file->Read10MsData(in); - rtc::Buffer buf; - encoder->Encode(/*rtp_timestamp=*/0, AudioFrameToView(in), &buf); - num_bytes[bitrate_bps] += buf.size(); - } - } - // kHighBps / kLowBps == 1.25, so require the high-bitrate run to produce at - // least 1.195 times the number of bytes. - EXPECT_LT(1.195 * num_bytes[kLowBps], num_bytes[kHighBps]); -} - -// Checks that, given a target bitrate, the encoder does not overshoot too much. -TEST_P(EncoderTest, DoNotOvershootTargetBitrate) { - for (int bitrate_bps : {10000, 15000, 20000, 26000, 32000}) { - SCOPED_TRACE(bitrate_bps); - auto pcm_file = GetPcmTestFileReader(GetSampleRateHz()); - auto e = CreateEncoder(GetIsacImpl(), GetSampleRateHz(), GetFrameSizeMs(), - bitrate_bps); - int num_bytes = 0; - constexpr int kNumFrames = 200; // 2 seconds. - for (int i = 0; i < kNumFrames; ++i) { - AudioFrame in; - pcm_file->Read10MsData(in); - rtc::Buffer encoded; - e->Encode(/*rtp_timestamp=*/0, AudioFrameToView(in), &encoded); - num_bytes += encoded.size(); - } - // Inverse of the duration of `kNumFrames` 10 ms frames (unit: seconds^-1). - constexpr float kAudioDurationInv = 100.f / kNumFrames; - const int measured_bitrate_bps = 8 * num_bytes * kAudioDurationInv; - EXPECT_LT(measured_bitrate_bps, bitrate_bps + 2250); // Max 2250 bps extra. - } -} - -// Creates tests for different encoder configurations and implementations. -INSTANTIATE_TEST_SUITE_P( - IsacApiTest, - EncoderTest, - ::testing::ValuesIn([] { - std::vector cases; - for (IsacImpl impl : {IsacImpl::kFloat, IsacImpl::kFixed}) { - for (int frame_size_ms : {30, 60}) { - cases.push_back({impl, 16000, frame_size_ms}); - } - } - cases.push_back({IsacImpl::kFloat, 32000, 30}); - return cases; - }()), - [](const ::testing::TestParamInfo& info) { - rtc::StringBuilder b; - const auto& p = info.param; - b << IsacImplToString(p.impl) << "_" << p.sample_rate_hz << "_" - << p.frame_size_ms; - return b.Release(); - }); - -struct DecoderTestParams { - IsacImpl impl; - int sample_rate_hz; -}; - -class DecoderTest : public testing::TestWithParam { - protected: - DecoderTest() = default; - IsacImpl GetIsacImpl() const { return GetParam().impl; } - int GetSampleRateHz() const { return GetParam().sample_rate_hz; } -}; - -TEST_P(DecoderTest, TestConfig) { - auto decoder = CreateDecoder(GetIsacImpl(), GetSampleRateHz()); - EXPECT_EQ(GetSampleRateHz(), decoder->SampleRateHz()); - EXPECT_EQ(size_t{1}, decoder->Channels()); -} - -// Creates tests for different decoder configurations and implementations. -INSTANTIATE_TEST_SUITE_P( - IsacApiTest, - DecoderTest, - ::testing::ValuesIn({DecoderTestParams{IsacImpl::kFixed, 16000}, - DecoderTestParams{IsacImpl::kFloat, 16000}, - DecoderTestParams{IsacImpl::kFloat, 32000}}), - [](const ::testing::TestParamInfo& info) { - const auto& p = info.param; - return (rtc::StringBuilder() - << IsacImplToString(p.impl) << "_" << p.sample_rate_hz) - .Release(); - }); - -struct EncoderDecoderPairTestParams { - int sample_rate_hz; - int frame_size_ms; - IsacImpl encoder_impl; - IsacImpl decoder_impl; -}; - -class EncoderDecoderPairTest - : public testing::TestWithParam { - protected: - EncoderDecoderPairTest() = default; - int GetSampleRateHz() const { return GetParam().sample_rate_hz; } - int GetEncoderFrameSizeMs() const { return GetParam().frame_size_ms; } - IsacImpl GetEncoderIsacImpl() const { return GetParam().encoder_impl; } - IsacImpl GetDecoderIsacImpl() const { return GetParam().decoder_impl; } - int GetEncoderFrameSize() const { - return GetEncoderFrameSizeMs() * GetSampleRateHz() / 1000; - } -}; - -// Checks that the number of encoded and decoded samples match. -TEST_P(EncoderDecoderPairTest, EncodeDecode) { - auto pcm_file = GetPcmTestFileReader(GetSampleRateHz()); - auto encoder = CreateEncoder(GetEncoderIsacImpl(), GetSampleRateHz(), - GetEncoderFrameSizeMs(), /*bitrate_bps=*/20000); - auto decoder = CreateDecoder(GetDecoderIsacImpl(), GetSampleRateHz()); - const int encoder_frame_size = GetEncoderFrameSize(); - std::vector out(encoder_frame_size); - size_t num_encoded_samples = 0; - size_t num_decoded_samples = 0; - constexpr int kNumFrames = 12; - for (int i = 0; i < kNumFrames; ++i) { - AudioFrame in; - pcm_file->Read10MsData(in); - rtc::Buffer encoded; - encoder->Encode(/*rtp_timestamp=*/0, AudioFrameToView(in), &encoded); - num_encoded_samples += in.samples_per_channel(); - if (encoded.empty()) { - continue; - } - // Decode. - const std::vector parse_result = - decoder->ParsePayload(std::move(encoded), /*timestamp=*/0); - EXPECT_EQ(parse_result.size(), size_t{1}); - auto decode_result = parse_result[0].frame->Decode(out); - EXPECT_TRUE(decode_result.has_value()); - EXPECT_EQ(out.size(), decode_result->num_decoded_samples); - num_decoded_samples += decode_result->num_decoded_samples; - } - EXPECT_EQ(num_encoded_samples, num_decoded_samples); -} - -// Creates tests for different encoder frame sizes and different -// encoder/decoder implementations. -INSTANTIATE_TEST_SUITE_P( - IsacApiTest, - EncoderDecoderPairTest, - ::testing::ValuesIn([] { - std::vector cases; - for (int frame_size_ms : {30, 60}) { - for (IsacImpl enc : {IsacImpl::kFloat, IsacImpl::kFixed}) { - for (IsacImpl dec : {IsacImpl::kFloat, IsacImpl::kFixed}) { - cases.push_back({16000, frame_size_ms, enc, dec}); - } - } - } - cases.push_back({32000, 30, IsacImpl::kFloat, IsacImpl::kFloat}); - return cases; - }()), - [](const ::testing::TestParamInfo& info) { - rtc::StringBuilder b; - const auto& p = info.param; - b << p.sample_rate_hz << "_" << p.frame_size_ms << "_" - << IsacImplToString(p.encoder_impl) << "_" - << IsacImplToString(p.decoder_impl); - return b.Release(); - }); - -} // namespace -} // namespace webrtc diff --git a/modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h b/modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h deleted file mode 100644 index fae2f3d4a7..0000000000 --- a/modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INCLUDE_AUDIO_DECODER_ISAC_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INCLUDE_AUDIO_DECODER_ISAC_H_ - -#include "modules/audio_coding/codecs/isac/audio_decoder_isac_t.h" -#include "modules/audio_coding/codecs/isac/main/source/isac_float_type.h" - -namespace webrtc { - -using AudioDecoderIsacFloatImpl = AudioDecoderIsacT; - -} // namespace webrtc -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INCLUDE_AUDIO_ENCODER_ISAC_H_ diff --git a/modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h b/modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h deleted file mode 100644 index dc32bcdde6..0000000000 --- a/modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INCLUDE_AUDIO_ENCODER_ISAC_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INCLUDE_AUDIO_ENCODER_ISAC_H_ - -#include "modules/audio_coding/codecs/isac/audio_encoder_isac_t.h" -#include "modules/audio_coding/codecs/isac/main/source/isac_float_type.h" - -namespace webrtc { - -using AudioEncoderIsacFloatImpl = AudioEncoderIsacT; - -} // namespace webrtc -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INCLUDE_AUDIO_ENCODER_ISAC_H_ diff --git a/modules/audio_coding/codecs/isac/main/include/isac.h b/modules/audio_coding/codecs/isac/main/include/isac.h deleted file mode 100644 index 3b05a8bcda..0000000000 --- a/modules/audio_coding/codecs/isac/main/include/isac.h +++ /dev/null @@ -1,617 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INCLUDE_ISAC_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INCLUDE_ISAC_H_ - -#include - -#include "modules/audio_coding/codecs/isac/bandwidth_info.h" - -typedef struct WebRtcISACStruct ISACStruct; - -#if defined(__cplusplus) -extern "C" { -#endif - -/****************************************************************************** - * WebRtcIsac_Create(...) - * - * This function creates an ISAC instance, which will contain the state - * information for one coding/decoding channel. - * - * Input: - * - *ISAC_main_inst : a pointer to the coder instance. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsac_Create(ISACStruct** ISAC_main_inst); - -/****************************************************************************** - * WebRtcIsac_Free(...) - * - * This function frees the ISAC instance created at the beginning. - * - * Input: - * - ISAC_main_inst : an ISAC instance. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsac_Free(ISACStruct* ISAC_main_inst); - -/****************************************************************************** - * WebRtcIsac_EncoderInit(...) - * - * This function initializes an ISAC instance prior to the encoder calls. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - CodingMode : 0 -> Bit rate and frame length are - * automatically adjusted to available bandwidth - * on transmission channel, just valid if codec - * is created to work in wideband mode. - * 1 -> User sets a frame length and a target bit - * rate which is taken as the maximum - * short-term average bit rate. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsac_EncoderInit(ISACStruct* ISAC_main_inst, int16_t CodingMode); - -/****************************************************************************** - * WebRtcIsac_Encode(...) - * - * This function encodes 10ms audio blocks and inserts it into a package. - * Input speech length has 160 samples if operating at 16 kHz sampling - * rate, or 320 if operating at 32 kHz sampling rate. The encoder buffers the - * input audio until the whole frame is buffered then proceeds with encoding. - * - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - speechIn : input speech vector. - * - * Output: - * - encoded : the encoded data vector - * - * Return value: - * : >0 - Length (in bytes) of coded data - * : 0 - The buffer didn't reach the chosen - * frame-size so it keeps buffering speech - * samples. - * : -1 - Error - */ - -int WebRtcIsac_Encode(ISACStruct* ISAC_main_inst, - const int16_t* speechIn, - uint8_t* encoded); - -/****************************************************************************** - * WebRtcIsac_DecoderInit(...) - * - * This function initializes an ISAC instance prior to the decoder calls. - * - * Input: - * - ISAC_main_inst : ISAC instance. - */ - -void WebRtcIsac_DecoderInit(ISACStruct* ISAC_main_inst); - -/****************************************************************************** - * WebRtcIsac_UpdateBwEstimate(...) - * - * This function updates the estimate of the bandwidth. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC frame(s). - * - packet_size : size of the packet. - * - rtp_seq_number : the RTP number of the packet. - * - send_ts : the RTP send timestamp, given in samples - * - arr_ts : the arrival time of the packet (from NetEq) - * in samples. - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIsac_UpdateBwEstimate(ISACStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t packet_size, - uint16_t rtp_seq_number, - uint32_t send_ts, - uint32_t arr_ts); - -/****************************************************************************** - * WebRtcIsac_Decode(...) - * - * This function decodes an ISAC frame. At 16 kHz sampling rate, the length - * of the output audio could be either 480 or 960 samples, equivalent to - * 30 or 60 ms respectively. At 32 kHz sampling rate, the length of the - * output audio is 960 samples, which is 30 ms. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC frame(s). - * - len : bytes in encoded vector. - * - * Output: - * - decoded : The decoded vector. - * - * Return value : >0 - number of samples in decoded vector. - * -1 - Error. - */ - -int WebRtcIsac_Decode(ISACStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t len, - int16_t* decoded, - int16_t* speechType); - -/****************************************************************************** - * WebRtcIsac_DecodePlc(...) - * - * This function conducts PLC for ISAC frame(s). Output speech length - * will be a multiple of frames, i.e. multiples of 30 ms audio. Therefore, - * the output is multiple of 480 samples if operating at 16 kHz and multiple - * of 960 if operating at 32 kHz. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - noOfLostFrames : Number of PLC frames to produce. - * - * Output: - * - decoded : The decoded vector. - * - * Return value : Number of samples in decoded PLC vector - */ - -size_t WebRtcIsac_DecodePlc(ISACStruct* ISAC_main_inst, - int16_t* decoded, - size_t noOfLostFrames); - -/****************************************************************************** - * WebRtcIsac_Control(...) - * - * This function sets the limit on the short-term average bit-rate and the - * frame length. Should be used only in Instantaneous mode. At 16 kHz sampling - * rate, an average bit-rate between 10000 to 32000 bps is valid and a - * frame-size of 30 or 60 ms is acceptable. At 32 kHz, an average bit-rate - * between 10000 to 56000 is acceptable, and the valid frame-size is 30 ms. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - rate : limit on the short-term average bit rate, - * in bits/second. - * - framesize : frame-size in millisecond. - * - * Return value : 0 - ok - * -1 - Error - */ - -int16_t WebRtcIsac_Control(ISACStruct* ISAC_main_inst, - int32_t rate, - int framesize); - -void WebRtcIsac_SetInitialBweBottleneck(ISACStruct* ISAC_main_inst, - int bottleneck_bits_per_second); - -/****************************************************************************** - * WebRtcIsac_ControlBwe(...) - * - * This function sets the initial values of bottleneck and frame-size if - * iSAC is used in channel-adaptive mode. Therefore, this API is not - * applicable if the codec is created to operate in super-wideband mode. - * - * Through this API, users can enforce a frame-size for all values of - * bottleneck. Then iSAC will not automatically change the frame-size. - * - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - rateBPS : initial value of bottleneck in bits/second - * 10000 <= rateBPS <= 56000 is accepted - * For default bottleneck set rateBPS = 0 - * - frameSizeMs : number of milliseconds per frame (30 or 60) - * - enforceFrameSize : 1 to enforce the given frame-size through - * out the adaptation process, 0 to let iSAC - * change the frame-size if required. - * - * Return value : 0 - ok - * -1 - Error - */ - -int16_t WebRtcIsac_ControlBwe(ISACStruct* ISAC_main_inst, - int32_t rateBPS, - int frameSizeMs, - int16_t enforceFrameSize); - -/****************************************************************************** - * WebRtcIsac_ReadFrameLen(...) - * - * This function returns the length of the frame represented in the packet. - * - * Input: - * - encoded : Encoded bit-stream - * - * Output: - * - frameLength : Length of frame in packet (in samples) - * - */ - -int16_t WebRtcIsac_ReadFrameLen(const ISACStruct* ISAC_main_inst, - const uint8_t* encoded, - int16_t* frameLength); - -/****************************************************************************** - * WebRtcIsac_version(...) - * - * This function returns the version number. - * - * Output: - * - version : Pointer to character string - * - */ - -void WebRtcIsac_version(char* version); - -/****************************************************************************** - * WebRtcIsac_GetErrorCode(...) - * - * This function can be used to check the error code of an iSAC instance. When - * a function returns -1 a error code will be set for that instance. The - * function below extract the code of the last error that occurred in the - * specified instance. - * - * Input: - * - ISAC_main_inst : ISAC instance - * - * Return value : Error code - */ - -int16_t WebRtcIsac_GetErrorCode(ISACStruct* ISAC_main_inst); - -/**************************************************************************** - * WebRtcIsac_GetUplinkBw(...) - * - * This function outputs the target bottleneck of the codec. In - * channel-adaptive mode, the target bottleneck is specified through in-band - * signalling retreived by bandwidth estimator. - * In channel-independent, also called instantaneous mode, the target - * bottleneck is provided to the encoder by calling xxx_control(...). If - * xxx_control is never called the default values is returned. The default - * value for bottleneck at 16 kHz encoder sampling rate is 32000 bits/sec, - * and it is 56000 bits/sec for 32 kHz sampling rate. - * Note that the output is the iSAC internal operating bottleneck which might - * differ slightly from the one provided through xxx_control(). - * - * Input: - * - ISAC_main_inst : iSAC instance - * - * Output: - * - *bottleneck : bottleneck in bits/sec - * - * Return value : -1 if error happens - * 0 bit-rates computed correctly. - */ - -int16_t WebRtcIsac_GetUplinkBw(ISACStruct* ISAC_main_inst, int32_t* bottleneck); - -/****************************************************************************** - * WebRtcIsac_SetMaxPayloadSize(...) - * - * This function sets a limit for the maximum payload size of iSAC. The same - * value is used both for 30 and 60 ms packets. If the encoder sampling rate - * is 16 kHz the maximum payload size is between 120 and 400 bytes. If the - * encoder sampling rate is 32 kHz the maximum payload size is between 120 - * and 600 bytes. - * - * If an out of range limit is used, the function returns -1, but the closest - * valid value will be applied. - * - * --------------- - * IMPORTANT NOTES - * --------------- - * The size of a packet is limited to the minimum of 'max-payload-size' and - * 'max-rate.' For instance, let's assume the max-payload-size is set to - * 170 bytes, and max-rate is set to 40 kbps. Note that a limit of 40 kbps - * translates to 150 bytes for 30ms frame-size & 300 bytes for 60ms - * frame-size. Then a packet with a frame-size of 30 ms is limited to 150, - * i.e. min(170, 150), and a packet with 60 ms frame-size is limited to - * 170 bytes, i.e. min(170, 300). - * - * Input: - * - ISAC_main_inst : iSAC instance - * - maxPayloadBytes : maximum size of the payload in bytes - * valid values are between 120 and 400 bytes - * if encoder sampling rate is 16 kHz. For - * 32 kHz encoder sampling rate valid values - * are between 120 and 600 bytes. - * - * Return value : 0 if successful - * -1 if error happens - */ - -int16_t WebRtcIsac_SetMaxPayloadSize(ISACStruct* ISAC_main_inst, - int16_t maxPayloadBytes); - -/****************************************************************************** - * WebRtcIsac_SetMaxRate(...) - * - * This function sets the maximum rate which the codec may not exceed for - * any signal packet. The maximum rate is defined and payload-size per - * frame-size in bits per second. - * - * The codec has a maximum rate of 53400 bits per second (200 bytes per 30 - * ms) if the encoder sampling rate is 16kHz, and 160 kbps (600 bytes/30 ms) - * if the encoder sampling rate is 32 kHz. - * - * It is possible to set a maximum rate between 32000 and 53400 bits/sec - * in wideband mode, and 32000 to 160000 bits/sec in super-wideband mode. - * - * If an out of range limit is used, the function returns -1, but the closest - * valid value will be applied. - * - * --------------- - * IMPORTANT NOTES - * --------------- - * The size of a packet is limited to the minimum of 'max-payload-size' and - * 'max-rate.' For instance, let's assume the max-payload-size is set to - * 170 bytes, and max-rate is set to 40 kbps. Note that a limit of 40 kbps - * translates to 150 bytes for 30ms frame-size & 300 bytes for 60ms - * frame-size. Then a packet with a frame-size of 30 ms is limited to 150, - * i.e. min(170, 150), and a packet with 60 ms frame-size is limited to - * 170 bytes, min(170, 300). - * - * Input: - * - ISAC_main_inst : iSAC instance - * - maxRate : maximum rate in bits per second, - * valid values are 32000 to 53400 bits/sec in - * wideband mode, and 32000 to 160000 bits/sec in - * super-wideband mode. - * - * Return value : 0 if successful - * -1 if error happens - */ - -int16_t WebRtcIsac_SetMaxRate(ISACStruct* ISAC_main_inst, int32_t maxRate); - -/****************************************************************************** - * WebRtcIsac_DecSampRate() - * Return the sampling rate of the decoded audio. - * - * Input: - * - ISAC_main_inst : iSAC instance - * - * Return value : sampling frequency in Hertz. - * - */ - -uint16_t WebRtcIsac_DecSampRate(ISACStruct* ISAC_main_inst); - -/****************************************************************************** - * WebRtcIsac_EncSampRate() - * - * Input: - * - ISAC_main_inst : iSAC instance - * - * Return value : sampling rate in Hertz. - * - */ - -uint16_t WebRtcIsac_EncSampRate(ISACStruct* ISAC_main_inst); - -/****************************************************************************** - * WebRtcIsac_SetDecSampRate() - * Set the sampling rate of the decoder. Initialization of the decoder WILL - * NOT overwrite the sampling rate of the encoder. The default value is 16 kHz - * which is set when the instance is created. - * - * Input: - * - ISAC_main_inst : iSAC instance - * - sampRate : sampling rate in Hertz. - * - * Return value : 0 if successful - * -1 if failed. - */ - -int16_t WebRtcIsac_SetDecSampRate(ISACStruct* ISAC_main_inst, - uint16_t samp_rate_hz); - -/****************************************************************************** - * WebRtcIsac_SetEncSampRate() - * Set the sampling rate of the encoder. Initialization of the encoder WILL - * NOT overwrite the sampling rate of the encoder. The default value is 16 kHz - * which is set when the instance is created. The encoding-mode and the - * bottleneck remain unchanged by this call, however, the maximum rate and - * maximum payload-size will reset to their default value. - * - * Input: - * - ISAC_main_inst : iSAC instance - * - sampRate : sampling rate in Hertz. - * - * Return value : 0 if successful - * -1 if failed. - */ - -int16_t WebRtcIsac_SetEncSampRate(ISACStruct* ISAC_main_inst, - uint16_t sample_rate_hz); - -/****************************************************************************** - * WebRtcIsac_GetNewBitStream(...) - * - * This function returns encoded data, with the received bwe-index in the - * stream. If the rate is set to a value less than bottleneck of codec - * the new bistream will be re-encoded with the given target rate. - * It should always return a complete packet, i.e. only called once - * even for 60 msec frames. - * - * NOTE 1! This function does not write in the ISACStruct, it is not allowed. - * NOTE 2! Currently not implemented for SWB mode. - * NOTE 3! Rates larger than the bottleneck of the codec will be limited - * to the current bottleneck. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - bweIndex : Index of bandwidth estimate to put in new - * bitstream - * - rate : target rate of the transcoder is bits/sec. - * Valid values are the accepted rate in iSAC, - * i.e. 10000 to 56000. - * - isRCU : if the new bit-stream is an RCU - * stream. Note that the rate parameter always indicates the target rate of the - * main payload, regardless of 'isRCU' value. - * - * Output: - * - encoded : The encoded data vector - * - * Return value : >0 - Length (in bytes) of coded data - * -1 - Error or called in SWB mode - * NOTE! No error code is written to - * the struct since it is only allowed to read - * the struct. - */ -int16_t WebRtcIsac_GetNewBitStream(ISACStruct* ISAC_main_inst, - int16_t bweIndex, - int16_t jitterInfo, - int32_t rate, - uint8_t* encoded, - int16_t isRCU); - -/**************************************************************************** - * WebRtcIsac_GetDownLinkBwIndex(...) - * - * This function returns index representing the Bandwidth estimate from - * other side to this side. - * - * Input: - * - ISAC_main_inst : iSAC struct - * - * Output: - * - bweIndex : Bandwidth estimate to transmit to other side. - * - */ - -int16_t WebRtcIsac_GetDownLinkBwIndex(ISACStruct* ISAC_main_inst, - int16_t* bweIndex, - int16_t* jitterInfo); - -/**************************************************************************** - * WebRtcIsac_UpdateUplinkBw(...) - * - * This function takes an index representing the Bandwidth estimate from - * this side to other side and updates BWE. - * - * Input: - * - ISAC_main_inst : iSAC struct - * - bweIndex : Bandwidth estimate from other side. - * - */ - -int16_t WebRtcIsac_UpdateUplinkBw(ISACStruct* ISAC_main_inst, int16_t bweIndex); - -/**************************************************************************** - * WebRtcIsac_ReadBwIndex(...) - * - * This function returns the index of the Bandwidth estimate from the bitstream. - * - * Input: - * - encoded : Encoded bitstream - * - * Output: - * - frameLength : Length of frame in packet (in samples) - * - bweIndex : Bandwidth estimate in bitstream - * - */ - -int16_t WebRtcIsac_ReadBwIndex(const uint8_t* encoded, int16_t* bweIndex); - -/******************************************************************************* - * WebRtcIsac_GetNewFrameLen(...) - * - * returns the frame lenght (in samples) of the next packet. In the case of - * channel-adaptive mode, iSAC decides on its frame lenght based on the - * estimated bottleneck this allows a user to prepare for the next packet (at - * the encoder) - * - * The primary usage is in CE to make the iSAC works in channel-adaptive mode - * - * Input: - * - ISAC_main_inst : iSAC struct - * - * Return Value : frame lenght in samples - * - */ - -int16_t WebRtcIsac_GetNewFrameLen(ISACStruct* ISAC_main_inst); - -/**************************************************************************** - * WebRtcIsac_GetRedPayload(...) - * - * Populates "encoded" with the redundant payload of the recently encoded - * frame. This function has to be called once that WebRtcIsac_Encode(...) - * returns a positive value. Regardless of the frame-size this function will - * be called only once after encoding is completed. - * - * Input: - * - ISAC_main_inst : iSAC struct - * - * Output: - * - encoded : the encoded data vector - * - * - * Return value: - * : >0 - Length (in bytes) of coded data - * : -1 - Error - * - * - */ -int16_t WebRtcIsac_GetRedPayload(ISACStruct* ISAC_main_inst, uint8_t* encoded); - -/**************************************************************************** - * WebRtcIsac_DecodeRcu(...) - * - * This function decodes a redundant (RCU) iSAC frame. Function is called in - * NetEq with a stored RCU payload i case of packet loss. Output speech length - * will be a multiple of 480 samples: 480 or 960 samples, - * depending on the framesize (30 or 60 ms). - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC RCU frame(s) - * - len : bytes in encoded vector - * - * Output: - * - decoded : The decoded vector - * - * Return value : >0 - number of samples in decoded vector - * -1 - Error - */ -int WebRtcIsac_DecodeRcu(ISACStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t len, - int16_t* decoded, - int16_t* speechType); - -/* If `inst` is a decoder but not an encoder: tell it what sample rate the - encoder is using, for bandwidth estimation purposes. */ -void WebRtcIsac_SetEncSampRateInDecoder(ISACStruct* inst, int sample_rate_hz); - -#if defined(__cplusplus) -} -#endif - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_INCLUDE_ISAC_H_ */ diff --git a/modules/audio_coding/codecs/isac/main/source/arith_routines.c b/modules/audio_coding/codecs/isac/main/source/arith_routines.c deleted file mode 100644 index 9d5c6930b1..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/arith_routines.c +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/main/source/arith_routines.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - - -/* - * terminate and return byte stream; - * returns the number of bytes in the stream - */ -int WebRtcIsac_EncTerminate(Bitstr *streamdata) /* in-/output struct containing bitstream */ -{ - uint8_t *stream_ptr; - - - /* point to the right place in the stream buffer */ - stream_ptr = streamdata->stream + streamdata->stream_index; - - /* find minimum length (determined by current interval width) */ - if ( streamdata->W_upper > 0x01FFFFFF ) - { - streamdata->streamval += 0x01000000; - /* add carry to buffer */ - if (streamdata->streamval < 0x01000000) - { - /* propagate carry */ - while ( !(++(*--stream_ptr)) ); - /* put pointer back to the old value */ - stream_ptr = streamdata->stream + streamdata->stream_index; - } - /* write remaining data to bitstream */ - *stream_ptr++ = (uint8_t) (streamdata->streamval >> 24); - } - else - { - streamdata->streamval += 0x00010000; - /* add carry to buffer */ - if (streamdata->streamval < 0x00010000) - { - /* propagate carry */ - while ( !(++(*--stream_ptr)) ); - /* put pointer back to the old value */ - stream_ptr = streamdata->stream + streamdata->stream_index; - } - /* write remaining data to bitstream */ - *stream_ptr++ = (uint8_t) (streamdata->streamval >> 24); - *stream_ptr++ = (uint8_t) ((streamdata->streamval >> 16) & 0x00FF); - } - - /* calculate stream length */ - return (int)(stream_ptr - streamdata->stream); -} diff --git a/modules/audio_coding/codecs/isac/main/source/arith_routines.h b/modules/audio_coding/codecs/isac/main/source/arith_routines.h deleted file mode 100644 index 3f9f6de7bb..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/arith_routines.h +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * arith_routines.h - * - * Functions for arithmetic coding. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ARITH_ROUTINES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ARITH_ROUTINES_H_ - -#include "modules/audio_coding/codecs/isac/main/source/structs.h" - -int WebRtcIsac_EncLogisticMulti2( - Bitstr* streamdata, /* in-/output struct containing bitstream */ - int16_t* dataQ7, /* input: data vector */ - const uint16_t* - env, /* input: side info vector defining the width of the pdf */ - int N, /* input: data vector length */ - int16_t isSWB12kHz); /* if the codec is working in 12kHz bandwidth */ - -/* returns the number of bytes in the stream */ -int WebRtcIsac_EncTerminate( - Bitstr* streamdata); /* in-/output struct containing bitstream */ - -/* returns the number of bytes in the stream so far */ -int WebRtcIsac_DecLogisticMulti2( - int16_t* data, /* output: data vector */ - Bitstr* streamdata, /* in-/output struct containing bitstream */ - const uint16_t* - env, /* input: side info vector defining the width of the pdf */ - const int16_t* dither, /* input: dither vector */ - int N, /* input: data vector length */ - int16_t isSWB12kHz); /* if the codec is working in 12kHz bandwidth */ - -void WebRtcIsac_EncHistMulti( - Bitstr* streamdata, /* in-/output struct containing bitstream */ - const int* data, /* input: data vector */ - const uint16_t* const* cdf, /* input: array of cdf arrays */ - int N); /* input: data vector length */ - -int WebRtcIsac_DecHistBisectMulti( - int* data, /* output: data vector */ - Bitstr* streamdata, /* in-/output struct containing bitstream */ - const uint16_t* const* cdf, /* input: array of cdf arrays */ - const uint16_t* - cdf_size, /* input: array of cdf table sizes+1 (power of two: 2^k) */ - int N); /* input: data vector length */ - -int WebRtcIsac_DecHistOneStepMulti( - int* data, /* output: data vector */ - Bitstr* streamdata, /* in-/output struct containing bitstream */ - const uint16_t* const* cdf, /* input: array of cdf arrays */ - const uint16_t* - init_index, /* input: vector of initial cdf table search entries */ - int N); /* input: data vector length */ - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ARITH_ROUTINES_H_ */ diff --git a/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c b/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c deleted file mode 100644 index e948979fd7..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c +++ /dev/null @@ -1,291 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/arith_routines.h" - - -/* - * code symbols into arithmetic bytestream - */ -void WebRtcIsac_EncHistMulti(Bitstr *streamdata, /* in-/output struct containing bitstream */ - const int *data, /* input: data vector */ - const uint16_t *const *cdf, /* input: array of cdf arrays */ - const int N) /* input: data vector length */ -{ - uint32_t W_lower, W_upper; - uint32_t W_upper_LSB, W_upper_MSB; - uint8_t *stream_ptr; - uint8_t *stream_ptr_carry; - uint32_t cdf_lo, cdf_hi; - int k; - - - /* point to beginning of stream buffer */ - stream_ptr = streamdata->stream + streamdata->stream_index; - W_upper = streamdata->W_upper; - - for (k=N; k>0; k--) - { - /* fetch cdf_lower and cdf_upper from cdf tables */ - cdf_lo = (uint32_t) *(*cdf + *data); - cdf_hi = (uint32_t) *(*cdf++ + *data++ + 1); - - /* update interval */ - W_upper_LSB = W_upper & 0x0000FFFF; - W_upper_MSB = W_upper >> 16; - W_lower = W_upper_MSB * cdf_lo; - W_lower += (W_upper_LSB * cdf_lo) >> 16; - W_upper = W_upper_MSB * cdf_hi; - W_upper += (W_upper_LSB * cdf_hi) >> 16; - - /* shift interval such that it begins at zero */ - W_upper -= ++W_lower; - - /* add integer to bitstream */ - streamdata->streamval += W_lower; - - /* handle carry */ - if (streamdata->streamval < W_lower) - { - /* propagate carry */ - stream_ptr_carry = stream_ptr; - while (!(++(*--stream_ptr_carry))); - } - - /* renormalize interval, store most significant byte of streamval and update streamval */ - while ( !(W_upper & 0xFF000000) ) /* W_upper < 2^24 */ - { - W_upper <<= 8; - *stream_ptr++ = (uint8_t) (streamdata->streamval >> 24); - streamdata->streamval <<= 8; - } - } - - /* calculate new stream_index */ - streamdata->stream_index = (int)(stream_ptr - streamdata->stream); - streamdata->W_upper = W_upper; - - return; -} - - - -/* - * function to decode more symbols from the arithmetic bytestream, using method of bisection - * cdf tables should be of size 2^k-1 (which corresponds to an alphabet size of 2^k-2) - */ -int WebRtcIsac_DecHistBisectMulti(int *data, /* output: data vector */ - Bitstr *streamdata, /* in-/output struct containing bitstream */ - const uint16_t *const *cdf, /* input: array of cdf arrays */ - const uint16_t *cdf_size, /* input: array of cdf table sizes+1 (power of two: 2^k) */ - const int N) /* input: data vector length */ -{ - uint32_t W_lower, W_upper; - uint32_t W_tmp; - uint32_t W_upper_LSB, W_upper_MSB; - uint32_t streamval; - const uint8_t *stream_ptr; - const uint16_t *cdf_ptr; - int size_tmp; - int k; - - W_lower = 0; //to remove warning -DH - stream_ptr = streamdata->stream + streamdata->stream_index; - W_upper = streamdata->W_upper; - if (W_upper == 0) - /* Should not be possible in normal operation */ - return -2; - - if (streamdata->stream_index == 0) /* first time decoder is called for this stream */ - { - /* read first word from bytestream */ - streamval = *stream_ptr << 24; - streamval |= *++stream_ptr << 16; - streamval |= *++stream_ptr << 8; - streamval |= *++stream_ptr; - } else { - streamval = streamdata->streamval; - } - - for (k=N; k>0; k--) - { - /* find the integer *data for which streamval lies in [W_lower+1, W_upper] */ - W_upper_LSB = W_upper & 0x0000FFFF; - W_upper_MSB = W_upper >> 16; - - /* start halfway the cdf range */ - size_tmp = *cdf_size++ >> 1; - cdf_ptr = *cdf + (size_tmp - 1); - - /* method of bisection */ - for ( ;; ) - { - W_tmp = W_upper_MSB * *cdf_ptr; - W_tmp += (W_upper_LSB * *cdf_ptr) >> 16; - size_tmp >>= 1; - if (size_tmp == 0) break; - if (streamval > W_tmp) - { - W_lower = W_tmp; - cdf_ptr += size_tmp; - } else { - W_upper = W_tmp; - cdf_ptr -= size_tmp; - } - } - if (streamval > W_tmp) - { - W_lower = W_tmp; - *data++ = (int)(cdf_ptr - *cdf++); - } else { - W_upper = W_tmp; - *data++ = (int)(cdf_ptr - *cdf++ - 1); - } - - /* shift interval to start at zero */ - W_upper -= ++W_lower; - - /* add integer to bitstream */ - streamval -= W_lower; - - /* renormalize interval and update streamval */ - while ( !(W_upper & 0xFF000000) ) /* W_upper < 2^24 */ - { - /* read next byte from stream */ - streamval = (streamval << 8) | *++stream_ptr; - W_upper <<= 8; - } - - if (W_upper == 0) - /* Should not be possible in normal operation */ - return -2; - - - } - - streamdata->stream_index = (int)(stream_ptr - streamdata->stream); - streamdata->W_upper = W_upper; - streamdata->streamval = streamval; - - - /* find number of bytes in original stream (determined by current interval width) */ - if ( W_upper > 0x01FFFFFF ) - return streamdata->stream_index - 2; - else - return streamdata->stream_index - 1; -} - - - -/* - * function to decode more symbols from the arithmetic bytestream, taking single step up or - * down at a time - * cdf tables can be of arbitrary size, but large tables may take a lot of iterations - */ -int WebRtcIsac_DecHistOneStepMulti(int *data, /* output: data vector */ - Bitstr *streamdata, /* in-/output struct containing bitstream */ - const uint16_t *const *cdf, /* input: array of cdf arrays */ - const uint16_t *init_index, /* input: vector of initial cdf table search entries */ - const int N) /* input: data vector length */ -{ - uint32_t W_lower, W_upper; - uint32_t W_tmp; - uint32_t W_upper_LSB, W_upper_MSB; - uint32_t streamval; - const uint8_t *stream_ptr; - const uint16_t *cdf_ptr; - int k; - - - stream_ptr = streamdata->stream + streamdata->stream_index; - W_upper = streamdata->W_upper; - if (W_upper == 0) - /* Should not be possible in normal operation */ - return -2; - - if (streamdata->stream_index == 0) /* first time decoder is called for this stream */ - { - /* read first word from bytestream */ - streamval = (uint32_t)(*stream_ptr) << 24; - streamval |= (uint32_t)(*++stream_ptr) << 16; - streamval |= (uint32_t)(*++stream_ptr) << 8; - streamval |= (uint32_t)(*++stream_ptr); - } else { - streamval = streamdata->streamval; - } - - - for (k=N; k>0; k--) - { - /* find the integer *data for which streamval lies in [W_lower+1, W_upper] */ - W_upper_LSB = W_upper & 0x0000FFFF; - W_upper_MSB = W_upper >> 16; - - /* start at the specified table entry */ - cdf_ptr = *cdf + (*init_index++); - W_tmp = W_upper_MSB * *cdf_ptr; - W_tmp += (W_upper_LSB * *cdf_ptr) >> 16; - if (streamval > W_tmp) - { - for ( ;; ) - { - W_lower = W_tmp; - if (cdf_ptr[0]==65535) - /* range check */ - return -3; - W_tmp = W_upper_MSB * *++cdf_ptr; - W_tmp += (W_upper_LSB * *cdf_ptr) >> 16; - if (streamval <= W_tmp) break; - } - W_upper = W_tmp; - *data++ = (int)(cdf_ptr - *cdf++ - 1); - } else { - for ( ;; ) - { - W_upper = W_tmp; - --cdf_ptr; - if (cdf_ptr<*cdf) { - /* range check */ - return -3; - } - W_tmp = W_upper_MSB * *cdf_ptr; - W_tmp += (W_upper_LSB * *cdf_ptr) >> 16; - if (streamval > W_tmp) break; - } - W_lower = W_tmp; - *data++ = (int)(cdf_ptr - *cdf++); - } - - /* shift interval to start at zero */ - W_upper -= ++W_lower; - /* add integer to bitstream */ - streamval -= W_lower; - - /* renormalize interval and update streamval */ - while ( !(W_upper & 0xFF000000) ) /* W_upper < 2^24 */ - { - /* read next byte from stream */ - streamval = (streamval << 8) | *++stream_ptr; - W_upper <<= 8; - } - } - - streamdata->stream_index = (int)(stream_ptr - streamdata->stream); - streamdata->W_upper = W_upper; - streamdata->streamval = streamval; - - - /* find number of bytes in original stream (determined by current interval width) */ - if ( W_upper > 0x01FFFFFF ) - return streamdata->stream_index - 2; - else - return streamdata->stream_index - 1; -} diff --git a/modules/audio_coding/codecs/isac/main/source/arith_routines_logist.c b/modules/audio_coding/codecs/isac/main/source/arith_routines_logist.c deleted file mode 100644 index 777780f54f..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/arith_routines_logist.c +++ /dev/null @@ -1,303 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * arith_routines.h - * - * This file contains functions for arithmatically encoding and - * decoding DFT coefficients. - * - */ - - -#include "modules/audio_coding/codecs/isac/main/source/arith_routines.h" - - - -static const int32_t kHistEdgesQ15[51] = { - -327680, -314573, -301466, -288359, -275252, -262144, -249037, -235930, -222823, -209716, - -196608, -183501, -170394, -157287, -144180, -131072, -117965, -104858, -91751, -78644, - -65536, -52429, -39322, -26215, -13108, 0, 13107, 26214, 39321, 52428, - 65536, 78643, 91750, 104857, 117964, 131072, 144179, 157286, 170393, 183500, - 196608, 209715, 222822, 235929, 249036, 262144, 275251, 288358, 301465, 314572, - 327680}; - - -static const int kCdfSlopeQ0[51] = { /* Q0 */ - 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, - 5, 5, 13, 23, 47, 87, 154, 315, 700, 1088, - 2471, 6064, 14221, 21463, 36634, 36924, 19750, 13270, 5806, 2312, - 1095, 660, 316, 145, 86, 41, 32, 5, 5, 5, - 5, 5, 5, 5, 5, 5, 5, 5, 5, 2, 0}; - - -static const int kCdfQ16[51] = { /* Q16 */ - 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, - 20, 22, 24, 29, 38, 57, 92, 153, 279, 559, - 994, 1983, 4408, 10097, 18682, 33336, 48105, 56005, 61313, 63636, - 64560, 64998, 65262, 65389, 65447, 65481, 65497, 65510, 65512, 65514, - 65516, 65518, 65520, 65522, 65524, 65526, 65528, 65530, 65532, 65534, - 65535}; - - - -/* function to be converted to fixed point */ -static __inline uint32_t piecewise(int32_t xinQ15) { - - int32_t ind, qtmp1, qtmp2, qtmp3; - uint32_t tmpUW32; - - - qtmp2 = xinQ15; - - if (qtmp2 < kHistEdgesQ15[0]) { - qtmp2 = kHistEdgesQ15[0]; - } - if (qtmp2 > kHistEdgesQ15[50]) { - qtmp2 = kHistEdgesQ15[50]; - } - - qtmp1 = qtmp2 - kHistEdgesQ15[0]; /* Q15 - Q15 = Q15 */ - ind = (qtmp1 * 5) >> 16; /* 2^16 / 5 = 0.4 in Q15 */ - /* Q15 -> Q0 */ - qtmp1 = qtmp2 - kHistEdgesQ15[ind]; /* Q15 - Q15 = Q15 */ - qtmp2 = kCdfSlopeQ0[ind] * qtmp1; /* Q0 * Q15 = Q15 */ - qtmp3 = qtmp2>>15; /* Q15 -> Q0 */ - - tmpUW32 = kCdfQ16[ind] + qtmp3; /* Q0 + Q0 = Q0 */ - return tmpUW32; -} - - - -int WebRtcIsac_EncLogisticMulti2( - Bitstr *streamdata, /* in-/output struct containing bitstream */ - int16_t *dataQ7, /* input: data vector */ - const uint16_t *envQ8, /* input: side info vector defining the width of the pdf */ - const int N, /* input: data vector length / 2 */ - const int16_t isSWB12kHz) -{ - uint32_t W_lower, W_upper; - uint32_t W_upper_LSB, W_upper_MSB; - uint8_t *stream_ptr; - uint8_t *maxStreamPtr; - uint8_t *stream_ptr_carry; - uint32_t cdf_lo, cdf_hi; - int k; - - /* point to beginning of stream buffer */ - stream_ptr = streamdata->stream + streamdata->stream_index; - W_upper = streamdata->W_upper; - - maxStreamPtr = streamdata->stream + STREAM_SIZE_MAX_60 - 1; - for (k = 0; k < N; k++) - { - /* compute cdf_lower and cdf_upper by evaluating the piecewise linear cdf */ - cdf_lo = piecewise((*dataQ7 - 64) * *envQ8); - cdf_hi = piecewise((*dataQ7 + 64) * *envQ8); - - /* test and clip if probability gets too small */ - while (cdf_lo+1 >= cdf_hi) { - /* clip */ - if (*dataQ7 > 0) { - *dataQ7 -= 128; - cdf_hi = cdf_lo; - cdf_lo = piecewise((*dataQ7 - 64) * *envQ8); - } else { - *dataQ7 += 128; - cdf_lo = cdf_hi; - cdf_hi = piecewise((*dataQ7 + 64) * *envQ8); - } - } - - dataQ7++; - // increment only once per 4 iterations for SWB-16kHz or WB - // increment only once per 2 iterations for SWB-12kHz - envQ8 += (isSWB12kHz)? (k & 1):((k & 1) & (k >> 1)); - - - /* update interval */ - W_upper_LSB = W_upper & 0x0000FFFF; - W_upper_MSB = W_upper >> 16; - W_lower = W_upper_MSB * cdf_lo; - W_lower += (W_upper_LSB * cdf_lo) >> 16; - W_upper = W_upper_MSB * cdf_hi; - W_upper += (W_upper_LSB * cdf_hi) >> 16; - - /* shift interval such that it begins at zero */ - W_upper -= ++W_lower; - - /* add integer to bitstream */ - streamdata->streamval += W_lower; - - /* handle carry */ - if (streamdata->streamval < W_lower) - { - /* propagate carry */ - stream_ptr_carry = stream_ptr; - while (!(++(*--stream_ptr_carry))); - } - - /* renormalize interval, store most significant byte of streamval and update streamval */ - while ( !(W_upper & 0xFF000000) ) /* W_upper < 2^24 */ - { - W_upper <<= 8; - *stream_ptr++ = (uint8_t) (streamdata->streamval >> 24); - - if(stream_ptr > maxStreamPtr) - { - return -ISAC_DISALLOWED_BITSTREAM_LENGTH; - } - streamdata->streamval <<= 8; - } - } - - /* calculate new stream_index */ - streamdata->stream_index = (int)(stream_ptr - streamdata->stream); - streamdata->W_upper = W_upper; - - return 0; -} - - - -int WebRtcIsac_DecLogisticMulti2( - int16_t *dataQ7, /* output: data vector */ - Bitstr *streamdata, /* in-/output struct containing bitstream */ - const uint16_t *envQ8, /* input: side info vector defining the width of the pdf */ - const int16_t *ditherQ7,/* input: dither vector */ - const int N, /* input: data vector length */ - const int16_t isSWB12kHz) -{ - uint32_t W_lower, W_upper; - uint32_t W_tmp; - uint32_t W_upper_LSB, W_upper_MSB; - uint32_t streamval; - const uint8_t *stream_ptr; - uint32_t cdf_tmp; - int16_t candQ7; - int k; - - // Position just past the end of the stream. STREAM_SIZE_MAX_60 instead of - // STREAM_SIZE_MAX (which is the size of the allocated buffer) because that's - // the limit to how much data is filled in. - const uint8_t* const stream_end = streamdata->stream + STREAM_SIZE_MAX_60; - - stream_ptr = streamdata->stream + streamdata->stream_index; - W_upper = streamdata->W_upper; - if (streamdata->stream_index == 0) /* first time decoder is called for this stream */ - { - /* read first word from bytestream */ - if (stream_ptr + 3 >= stream_end) - return -1; // Would read out of bounds. Malformed input? - streamval = *stream_ptr << 24; - streamval |= *++stream_ptr << 16; - streamval |= *++stream_ptr << 8; - streamval |= *++stream_ptr; - } else { - streamval = streamdata->streamval; - } - - - for (k = 0; k < N; k++) - { - /* find the integer *data for which streamval lies in [W_lower+1, W_upper] */ - W_upper_LSB = W_upper & 0x0000FFFF; - W_upper_MSB = W_upper >> 16; - - /* find first candidate by inverting the logistic cdf */ - candQ7 = - *ditherQ7 + 64; - cdf_tmp = piecewise(candQ7 * *envQ8); - - W_tmp = W_upper_MSB * cdf_tmp; - W_tmp += (W_upper_LSB * cdf_tmp) >> 16; - if (streamval > W_tmp) - { - W_lower = W_tmp; - candQ7 += 128; - cdf_tmp = piecewise(candQ7 * *envQ8); - - W_tmp = W_upper_MSB * cdf_tmp; - W_tmp += (W_upper_LSB * cdf_tmp) >> 16; - while (streamval > W_tmp) - { - W_lower = W_tmp; - candQ7 += 128; - cdf_tmp = piecewise(candQ7 * *envQ8); - - W_tmp = W_upper_MSB * cdf_tmp; - W_tmp += (W_upper_LSB * cdf_tmp) >> 16; - - /* error check */ - if (W_lower == W_tmp) return -1; - } - W_upper = W_tmp; - - /* another sample decoded */ - *dataQ7 = candQ7 - 64; - } - else - { - W_upper = W_tmp; - candQ7 -= 128; - cdf_tmp = piecewise(candQ7 * *envQ8); - - W_tmp = W_upper_MSB * cdf_tmp; - W_tmp += (W_upper_LSB * cdf_tmp) >> 16; - while ( !(streamval > W_tmp) ) - { - W_upper = W_tmp; - candQ7 -= 128; - cdf_tmp = piecewise(candQ7 * *envQ8); - - W_tmp = W_upper_MSB * cdf_tmp; - W_tmp += (W_upper_LSB * cdf_tmp) >> 16; - - /* error check */ - if (W_upper == W_tmp) return -1; - } - W_lower = W_tmp; - - /* another sample decoded */ - *dataQ7 = candQ7 + 64; - } - ditherQ7++; - dataQ7++; - // increment only once per 4 iterations for SWB-16kHz or WB - // increment only once per 2 iterations for SWB-12kHz - envQ8 += (isSWB12kHz)? (k & 1):((k & 1) & (k >> 1)); - - /* shift interval to start at zero */ - W_upper -= ++W_lower; - - /* add integer to bitstream */ - streamval -= W_lower; - - /* renormalize interval and update streamval */ - while ( !(W_upper & 0xFF000000) ) /* W_upper < 2^24 */ - { - /* read next byte from stream */ - if (stream_ptr + 1 >= stream_end) - return -1; // Would read out of bounds. Malformed input? - streamval = (streamval << 8) | *++stream_ptr; - W_upper <<= 8; - } - } - - streamdata->stream_index = (int)(stream_ptr - streamdata->stream); - streamdata->W_upper = W_upper; - streamdata->streamval = streamval; - - /* find number of bytes in original stream (determined by current interval width) */ - if ( W_upper > 0x01FFFFFF ) - return streamdata->stream_index - 2; - else - return streamdata->stream_index - 1; -} diff --git a/modules/audio_coding/codecs/isac/main/source/audio_encoder_isac.cc b/modules/audio_coding/codecs/isac/main/source/audio_encoder_isac.cc deleted file mode 100644 index b7f2c0b1af..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/audio_encoder_isac.cc +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h" - -#include "modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h" - -namespace webrtc { - -// Explicit instantiation: -template class AudioEncoderIsacT; - -} // namespace webrtc diff --git a/modules/audio_coding/codecs/isac/main/source/audio_encoder_isac_unittest.cc b/modules/audio_coding/codecs/isac/main/source/audio_encoder_isac_unittest.cc deleted file mode 100644 index 07bab055e1..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/audio_encoder_isac_unittest.cc +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h" - -#include - -#include "test/gtest.h" - -namespace webrtc { - -namespace { - -void TestBadConfig(const AudioEncoderIsacFloatImpl::Config& config) { - EXPECT_FALSE(config.IsOk()); -} - -void TestGoodConfig(const AudioEncoderIsacFloatImpl::Config& config) { - EXPECT_TRUE(config.IsOk()); - AudioEncoderIsacFloatImpl aei(config); -} - -// Wrap subroutine calls that test things in this, so that the error messages -// will be accompanied by stack traces that make it possible to tell which -// subroutine invocation caused the failure. -#define S(x) \ - do { \ - SCOPED_TRACE(#x); \ - x; \ - } while (0) - -} // namespace - -TEST(AudioEncoderIsacTest, TestConfigBitrate) { - AudioEncoderIsacFloatImpl::Config config; - - // The default value is some real, positive value. - EXPECT_GT(config.bit_rate, 1); - S(TestGoodConfig(config)); - - // 0 is another way to ask for the default value. - config.bit_rate = 0; - S(TestGoodConfig(config)); - - // Try some unreasonable values and watch them fail. - config.bit_rate = -1; - S(TestBadConfig(config)); - config.bit_rate = 1; - S(TestBadConfig(config)); - config.bit_rate = std::numeric_limits::max(); - S(TestBadConfig(config)); -} - -} // namespace webrtc diff --git a/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.c b/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.c deleted file mode 100644 index 486cd95914..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.c +++ /dev/null @@ -1,1013 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * BwEstimator.c - * - * This file contains the code for the Bandwidth Estimator designed - * for iSAC. - * - */ - -#include -#include - -#include "modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/include/isac.h" -#include "rtc_base/checks.h" - -/* array of quantization levels for bottle neck info; Matlab code: */ -/* sprintf('%4.1ff, ', logspace(log10(5000), log10(40000), 12)) */ -static const float kQRateTableWb[12] = -{ - 10000.0f, 11115.3f, 12355.1f, 13733.1f, 15264.8f, 16967.3f, - 18859.8f, 20963.3f, 23301.4f, 25900.3f, 28789.0f, 32000.0f}; - - -static const float kQRateTableSwb[24] = -{ - 10000.0f, 11115.3f, 12355.1f, 13733.1f, 15264.8f, 16967.3f, - 18859.8f, 20963.3f, 23153.1f, 25342.9f, 27532.7f, 29722.5f, - 31912.3f, 34102.1f, 36291.9f, 38481.7f, 40671.4f, 42861.2f, - 45051.0f, 47240.8f, 49430.6f, 51620.4f, 53810.2f, 56000.0f, -}; - - - - -int32_t WebRtcIsac_InitBandwidthEstimator( - BwEstimatorstr* bwest_str, - enum IsacSamplingRate encoderSampRate, - enum IsacSamplingRate decoderSampRate) -{ - switch(encoderSampRate) - { - case kIsacWideband: - { - bwest_str->send_bw_avg = INIT_BN_EST_WB; - break; - } - case kIsacSuperWideband: - { - bwest_str->send_bw_avg = INIT_BN_EST_SWB; - break; - } - } - - switch(decoderSampRate) - { - case kIsacWideband: - { - bwest_str->prev_frame_length = INIT_FRAME_LEN_WB; - bwest_str->rec_bw_inv = 1.0f / - (INIT_BN_EST_WB + INIT_HDR_RATE_WB); - bwest_str->rec_bw = (int32_t)INIT_BN_EST_WB; - bwest_str->rec_bw_avg_Q = INIT_BN_EST_WB; - bwest_str->rec_bw_avg = INIT_BN_EST_WB + INIT_HDR_RATE_WB; - bwest_str->rec_header_rate = INIT_HDR_RATE_WB; - break; - } - case kIsacSuperWideband: - { - bwest_str->prev_frame_length = INIT_FRAME_LEN_SWB; - bwest_str->rec_bw_inv = 1.0f / - (INIT_BN_EST_SWB + INIT_HDR_RATE_SWB); - bwest_str->rec_bw = (int32_t)INIT_BN_EST_SWB; - bwest_str->rec_bw_avg_Q = INIT_BN_EST_SWB; - bwest_str->rec_bw_avg = INIT_BN_EST_SWB + INIT_HDR_RATE_SWB; - bwest_str->rec_header_rate = INIT_HDR_RATE_SWB; - break; - } - } - - bwest_str->prev_rec_rtp_number = 0; - bwest_str->prev_rec_arr_ts = 0; - bwest_str->prev_rec_send_ts = 0; - bwest_str->prev_rec_rtp_rate = 1.0f; - bwest_str->last_update_ts = 0; - bwest_str->last_reduction_ts = 0; - bwest_str->count_tot_updates_rec = -9; - bwest_str->rec_jitter = 10.0f; - bwest_str->rec_jitter_short_term = 0.0f; - bwest_str->rec_jitter_short_term_abs = 5.0f; - bwest_str->rec_max_delay = 10.0f; - bwest_str->rec_max_delay_avg_Q = 10.0f; - bwest_str->num_pkts_rec = 0; - - bwest_str->send_max_delay_avg = 10.0f; - - bwest_str->hsn_detect_rec = 0; - - bwest_str->num_consec_rec_pkts_over_30k = 0; - - bwest_str->hsn_detect_snd = 0; - - bwest_str->num_consec_snt_pkts_over_30k = 0; - - bwest_str->in_wait_period = 0; - - bwest_str->change_to_WB = 0; - - bwest_str->numConsecLatePkts = 0; - bwest_str->consecLatency = 0; - bwest_str->inWaitLatePkts = 0; - bwest_str->senderTimestamp = 0; - bwest_str->receiverTimestamp = 0; - - bwest_str->external_bw_info.in_use = 0; - - return 0; -} - -/* This function updates both bottle neck rates */ -/* Parameters: */ -/* rtp_number - value from RTP packet, from NetEq */ -/* frame length - length of signal frame in ms, from iSAC decoder */ -/* send_ts - value in RTP header giving send time in samples */ -/* arr_ts - value given by timeGetTime() time of arrival in samples of packet from NetEq */ -/* pksize - size of packet in bytes, from NetEq */ -/* Index - integer (range 0...23) indicating bottle neck & jitter as estimated by other side */ -/* returns 0 if everything went fine, -1 otherwise */ -int16_t WebRtcIsac_UpdateBandwidthEstimator( - BwEstimatorstr* bwest_str, - const uint16_t rtp_number, - const int32_t frame_length, - const uint32_t send_ts, - const uint32_t arr_ts, - const size_t pksize - /*, const uint16_t Index*/) -{ - float weight = 0.0f; - float curr_bw_inv = 0.0f; - float rec_rtp_rate; - float t_diff_proj; - float arr_ts_diff; - float send_ts_diff; - float arr_time_noise; - float arr_time_noise_abs; - - float delay_correction_factor = 1; - float late_diff = 0.0f; - int immediate_set = 0; - int num_pkts_expected; - - RTC_DCHECK(!bwest_str->external_bw_info.in_use); - - // We have to adjust the header-rate if the first packet has a - // frame-size different than the initialized value. - if ( frame_length != bwest_str->prev_frame_length ) - { - bwest_str->rec_header_rate = (float)HEADER_SIZE * 8.0f * - 1000.0f / (float)frame_length; /* bits/s */ - } - - /* UPDATE ESTIMATES ON THIS SIDE */ - /* compute far-side transmission rate */ - rec_rtp_rate = ((float)pksize * 8.0f * 1000.0f / (float)frame_length) + - bwest_str->rec_header_rate; - // rec_rtp_rate packet bits/s + header bits/s - - /* check for timer wrap-around */ - if (arr_ts < bwest_str->prev_rec_arr_ts) - { - bwest_str->prev_rec_arr_ts = arr_ts; - bwest_str->last_update_ts = arr_ts; - bwest_str->last_reduction_ts = arr_ts + 3*FS; - bwest_str->num_pkts_rec = 0; - - /* store frame length */ - bwest_str->prev_frame_length = frame_length; - - /* store far-side transmission rate */ - bwest_str->prev_rec_rtp_rate = rec_rtp_rate; - - /* store far-side RTP time stamp */ - bwest_str->prev_rec_rtp_number = rtp_number; - - return 0; - } - - bwest_str->num_pkts_rec++; - - /* check that it's not one of the first 9 packets */ - if ( bwest_str->count_tot_updates_rec > 0 ) - { - if(bwest_str->in_wait_period > 0 ) - { - bwest_str->in_wait_period--; - } - - bwest_str->inWaitLatePkts -= ((bwest_str->inWaitLatePkts > 0)? 1:0); - send_ts_diff = (float)(send_ts - bwest_str->prev_rec_send_ts); - - if (send_ts_diff <= (16 * frame_length)*2) - //doesn't allow for a dropped packet, not sure necessary to be - // that strict -DH - { - /* if not been updated for a long time, reduce the BN estimate */ - if((uint32_t)(arr_ts - bwest_str->last_update_ts) * - 1000.0f / FS > 3000) - { - //how many frames should have been received since the last - // update if too many have been dropped or there have been - // big delays won't allow this reduction may no longer need - // the send_ts_diff here - num_pkts_expected = (int)(((float)(arr_ts - - bwest_str->last_update_ts) * 1000.0f /(float) FS) / - (float)frame_length); - - if(((float)bwest_str->num_pkts_rec/(float)num_pkts_expected) > - 0.9) - { - float inv_bitrate = (float) pow( 0.99995, - (double)((uint32_t)(arr_ts - - bwest_str->last_reduction_ts)*1000.0f/FS) ); - - if ( inv_bitrate ) - { - bwest_str->rec_bw_inv /= inv_bitrate; - - //precautionary, likely never necessary - if (bwest_str->hsn_detect_snd && - bwest_str->hsn_detect_rec) - { - if (bwest_str->rec_bw_inv > 0.000066f) - { - bwest_str->rec_bw_inv = 0.000066f; - } - } - } - else - { - bwest_str->rec_bw_inv = 1.0f / - (INIT_BN_EST_WB + INIT_HDR_RATE_WB); - } - /* reset time-since-update counter */ - bwest_str->last_reduction_ts = arr_ts; - } - else - //reset here? - { - bwest_str->last_reduction_ts = arr_ts + 3*FS; - bwest_str->last_update_ts = arr_ts; - bwest_str->num_pkts_rec = 0; - } - } - } - else - { - bwest_str->last_reduction_ts = arr_ts + 3*FS; - bwest_str->last_update_ts = arr_ts; - bwest_str->num_pkts_rec = 0; - } - - - /* temporarily speed up adaptation if frame length has changed */ - if ( frame_length != bwest_str->prev_frame_length ) - { - bwest_str->count_tot_updates_rec = 10; - bwest_str->rec_header_rate = (float)HEADER_SIZE * 8.0f * - 1000.0f / (float)frame_length; /* bits/s */ - - bwest_str->rec_bw_inv = 1.0f /((float)bwest_str->rec_bw + - bwest_str->rec_header_rate); - } - - //////////////////////// - arr_ts_diff = (float)(arr_ts - bwest_str->prev_rec_arr_ts); - - if (send_ts_diff > 0 ) - { - late_diff = arr_ts_diff - send_ts_diff; - } - else - { - late_diff = arr_ts_diff - (float)(16 * frame_length); - } - - if((late_diff > 0) && !bwest_str->inWaitLatePkts) - { - bwest_str->numConsecLatePkts++; - bwest_str->consecLatency += late_diff; - } - else - { - bwest_str->numConsecLatePkts = 0; - bwest_str->consecLatency = 0; - } - if(bwest_str->numConsecLatePkts > 50) - { - float latencyMs = bwest_str->consecLatency/(FS/1000); - float averageLatencyMs = latencyMs / bwest_str->numConsecLatePkts; - delay_correction_factor = frame_length / (frame_length + averageLatencyMs); - immediate_set = 1; - bwest_str->inWaitLatePkts = (int16_t)((bwest_str->consecLatency/(FS/1000)) / 30);// + 150; - bwest_str->start_wait_period = arr_ts; - } - /////////////////////////////////////////////// - - - - /* update only if previous packet was not lost */ - if ( rtp_number == bwest_str->prev_rec_rtp_number + 1 ) - { - - - if (!(bwest_str->hsn_detect_snd && bwest_str->hsn_detect_rec)) - { - if ((arr_ts_diff > (float)(16 * frame_length))) - { - //1/2 second - if ((late_diff > 8000.0f) && !bwest_str->in_wait_period) - { - delay_correction_factor = 0.7f; - bwest_str->in_wait_period = 55; - bwest_str->start_wait_period = arr_ts; - immediate_set = 1; - } - //320 ms - else if (late_diff > 5120.0f && !bwest_str->in_wait_period) - { - delay_correction_factor = 0.8f; - immediate_set = 1; - bwest_str->in_wait_period = 44; - bwest_str->start_wait_period = arr_ts; - } - } - } - - - if ((bwest_str->prev_rec_rtp_rate > bwest_str->rec_bw_avg) && - (rec_rtp_rate > bwest_str->rec_bw_avg) && - !bwest_str->in_wait_period) - { - /* test if still in initiation period and increment counter */ - if (bwest_str->count_tot_updates_rec++ > 99) - { - /* constant weight after initiation part */ - weight = 0.01f; - } - else - { - /* weight decreases with number of updates */ - weight = 1.0f / (float) bwest_str->count_tot_updates_rec; - } - /* Bottle Neck Estimation */ - - /* limit outliers */ - /* if more than 25 ms too much */ - if (arr_ts_diff > frame_length * FS/1000 + 400.0f) - { - // in samples, why 25ms?? - arr_ts_diff = frame_length * FS/1000 + 400.0f; - } - if(arr_ts_diff < (frame_length * FS/1000) - 160.0f) - { - /* don't allow it to be less than frame rate - 10 ms */ - arr_ts_diff = (float)frame_length * FS/1000 - 160.0f; - } - - /* compute inverse receiving rate for last packet */ - curr_bw_inv = arr_ts_diff / ((float)(pksize + HEADER_SIZE) * - 8.0f * FS); // (180+35)*8*16000 = 27.5 Mbit.... - - - if(curr_bw_inv < - (1.0f / (MAX_ISAC_BW + bwest_str->rec_header_rate))) - { - // don't allow inv rate to be larger than MAX - curr_bw_inv = (1.0f / - (MAX_ISAC_BW + bwest_str->rec_header_rate)); - } - - /* update bottle neck rate estimate */ - bwest_str->rec_bw_inv = weight * curr_bw_inv + - (1.0f - weight) * bwest_str->rec_bw_inv; - - /* reset time-since-update counter */ - bwest_str->last_update_ts = arr_ts; - bwest_str->last_reduction_ts = arr_ts + 3 * FS; - bwest_str->num_pkts_rec = 0; - - /* Jitter Estimation */ - /* projected difference between arrival times */ - t_diff_proj = ((float)(pksize + HEADER_SIZE) * 8.0f * - 1000.0f) / bwest_str->rec_bw_avg; - - - // difference between projected and actual - // arrival time differences - arr_time_noise = (float)(arr_ts_diff*1000.0f/FS) - - t_diff_proj; - arr_time_noise_abs = (float) fabs( arr_time_noise ); - - /* long term averaged absolute jitter */ - bwest_str->rec_jitter = weight * arr_time_noise_abs + - (1.0f - weight) * bwest_str->rec_jitter; - if (bwest_str->rec_jitter > 10.0f) - { - bwest_str->rec_jitter = 10.0f; - } - /* short term averaged absolute jitter */ - bwest_str->rec_jitter_short_term_abs = 0.05f * - arr_time_noise_abs + 0.95f * - bwest_str->rec_jitter_short_term_abs; - - /* short term averaged jitter */ - bwest_str->rec_jitter_short_term = 0.05f * arr_time_noise + - 0.95f * bwest_str->rec_jitter_short_term; - } - } - } - else - { - // reset time-since-update counter when - // receiving the first 9 packets - bwest_str->last_update_ts = arr_ts; - bwest_str->last_reduction_ts = arr_ts + 3*FS; - bwest_str->num_pkts_rec = 0; - - bwest_str->count_tot_updates_rec++; - } - - /* limit minimum bottle neck rate */ - if (bwest_str->rec_bw_inv > 1.0f / ((float)MIN_ISAC_BW + - bwest_str->rec_header_rate)) - { - bwest_str->rec_bw_inv = 1.0f / ((float)MIN_ISAC_BW + - bwest_str->rec_header_rate); - } - - // limit maximum bitrate - if (bwest_str->rec_bw_inv < 1.0f / ((float)MAX_ISAC_BW + - bwest_str->rec_header_rate)) - { - bwest_str->rec_bw_inv = 1.0f / ((float)MAX_ISAC_BW + - bwest_str->rec_header_rate); - } - - /* store frame length */ - bwest_str->prev_frame_length = frame_length; - - /* store far-side transmission rate */ - bwest_str->prev_rec_rtp_rate = rec_rtp_rate; - - /* store far-side RTP time stamp */ - bwest_str->prev_rec_rtp_number = rtp_number; - - // Replace bwest_str->rec_max_delay by the new - // value (atomic operation) - bwest_str->rec_max_delay = 3.0f * bwest_str->rec_jitter; - - /* store send and arrival time stamp */ - bwest_str->prev_rec_arr_ts = arr_ts ; - bwest_str->prev_rec_send_ts = send_ts; - - /* Replace bwest_str->rec_bw by the new value (atomic operation) */ - bwest_str->rec_bw = (int32_t)(1.0f / bwest_str->rec_bw_inv - - bwest_str->rec_header_rate); - - if (immediate_set) - { - bwest_str->rec_bw = (int32_t) (delay_correction_factor * - (float) bwest_str->rec_bw); - - if (bwest_str->rec_bw < (int32_t) MIN_ISAC_BW) - { - bwest_str->rec_bw = (int32_t) MIN_ISAC_BW; - } - - bwest_str->rec_bw_avg = bwest_str->rec_bw + - bwest_str->rec_header_rate; - - bwest_str->rec_bw_avg_Q = (float) bwest_str->rec_bw; - - bwest_str->rec_jitter_short_term = 0.0f; - - bwest_str->rec_bw_inv = 1.0f / (bwest_str->rec_bw + - bwest_str->rec_header_rate); - - bwest_str->count_tot_updates_rec = 1; - - immediate_set = 0; - bwest_str->consecLatency = 0; - bwest_str->numConsecLatePkts = 0; - } - - return 0; -} - - -/* This function updates the send bottle neck rate */ -/* Index - integer (range 0...23) indicating bottle neck & jitter as estimated by other side */ -/* returns 0 if everything went fine, -1 otherwise */ -int16_t WebRtcIsac_UpdateUplinkBwImpl( - BwEstimatorstr* bwest_str, - int16_t index, - enum IsacSamplingRate encoderSamplingFreq) -{ - RTC_DCHECK(!bwest_str->external_bw_info.in_use); - - if((index < 0) || (index > 23)) - { - return -ISAC_RANGE_ERROR_BW_ESTIMATOR; - } - - /* UPDATE ESTIMATES FROM OTHER SIDE */ - if(encoderSamplingFreq == kIsacWideband) - { - if(index > 11) - { - index -= 12; - /* compute the jitter estimate as decoded on the other side */ - bwest_str->send_max_delay_avg = 0.9f * bwest_str->send_max_delay_avg + - 0.1f * (float)MAX_ISAC_MD; - } - else - { - /* compute the jitter estimate as decoded on the other side */ - bwest_str->send_max_delay_avg = 0.9f * bwest_str->send_max_delay_avg + - 0.1f * (float)MIN_ISAC_MD; - } - - /* compute the BN estimate as decoded on the other side */ - bwest_str->send_bw_avg = 0.9f * bwest_str->send_bw_avg + - 0.1f * kQRateTableWb[index]; - } - else - { - /* compute the BN estimate as decoded on the other side */ - bwest_str->send_bw_avg = 0.9f * bwest_str->send_bw_avg + - 0.1f * kQRateTableSwb[index]; - } - - if (bwest_str->send_bw_avg > (float) 28000 && !bwest_str->hsn_detect_snd) - { - bwest_str->num_consec_snt_pkts_over_30k++; - - if (bwest_str->num_consec_snt_pkts_over_30k >= 66) - { - //approx 2 seconds with 30ms frames - bwest_str->hsn_detect_snd = 1; - } - } - else if (!bwest_str->hsn_detect_snd) - { - bwest_str->num_consec_snt_pkts_over_30k = 0; - } - return 0; -} - -// called when there is upper-band bit-stream to update jitter -// statistics. -int16_t WebRtcIsac_UpdateUplinkJitter( - BwEstimatorstr* bwest_str, - int32_t index) -{ - RTC_DCHECK(!bwest_str->external_bw_info.in_use); - - if((index < 0) || (index > 23)) - { - return -ISAC_RANGE_ERROR_BW_ESTIMATOR; - } - - if(index > 0) - { - /* compute the jitter estimate as decoded on the other side */ - bwest_str->send_max_delay_avg = 0.9f * bwest_str->send_max_delay_avg + - 0.1f * (float)MAX_ISAC_MD; - } - else - { - /* compute the jitter estimate as decoded on the other side */ - bwest_str->send_max_delay_avg = 0.9f * bwest_str->send_max_delay_avg + - 0.1f * (float)MIN_ISAC_MD; - } - - return 0; -} - - - -// Returns the bandwidth/jitter estimation code (integer 0...23) -// to put in the sending iSAC payload -void -WebRtcIsac_GetDownlinkBwJitIndexImpl( - BwEstimatorstr* bwest_str, - int16_t* bottleneckIndex, - int16_t* jitterInfo, - enum IsacSamplingRate decoderSamplingFreq) -{ - float MaxDelay; - //uint16_t MaxDelayBit; - - float rate; - float r; - float e1, e2; - const float weight = 0.1f; - const float* ptrQuantizationTable; - int16_t addJitterInfo; - int16_t minInd; - int16_t maxInd; - int16_t midInd; - - if (bwest_str->external_bw_info.in_use) { - *bottleneckIndex = bwest_str->external_bw_info.bottleneck_idx; - *jitterInfo = bwest_str->external_bw_info.jitter_info; - return; - } - - /* Get Max Delay Bit */ - /* get unquantized max delay */ - MaxDelay = (float)WebRtcIsac_GetDownlinkMaxDelay(bwest_str); - - if ( ((1.f - weight) * bwest_str->rec_max_delay_avg_Q + weight * - MAX_ISAC_MD - MaxDelay) > (MaxDelay - (1.f-weight) * - bwest_str->rec_max_delay_avg_Q - weight * MIN_ISAC_MD) ) - { - jitterInfo[0] = 0; - /* update quantized average */ - bwest_str->rec_max_delay_avg_Q = - (1.f - weight) * bwest_str->rec_max_delay_avg_Q + weight * - (float)MIN_ISAC_MD; - } - else - { - jitterInfo[0] = 1; - /* update quantized average */ - bwest_str->rec_max_delay_avg_Q = - (1.f-weight) * bwest_str->rec_max_delay_avg_Q + weight * - (float)MAX_ISAC_MD; - } - - // Get unquantized rate. - rate = (float)WebRtcIsac_GetDownlinkBandwidth(bwest_str); - - /* Get Rate Index */ - if(decoderSamplingFreq == kIsacWideband) - { - ptrQuantizationTable = kQRateTableWb; - addJitterInfo = 1; - maxInd = 11; - } - else - { - ptrQuantizationTable = kQRateTableSwb; - addJitterInfo = 0; - maxInd = 23; - } - - minInd = 0; - while(maxInd > minInd + 1) - { - midInd = (maxInd + minInd) >> 1; - if(rate > ptrQuantizationTable[midInd]) - { - minInd = midInd; - } - else - { - maxInd = midInd; - } - } - // Chose the index which gives results an average which is closest - // to rate - r = (1 - weight) * bwest_str->rec_bw_avg_Q - rate; - e1 = weight * ptrQuantizationTable[minInd] + r; - e2 = weight * ptrQuantizationTable[maxInd] + r; - e1 = (e1 > 0)? e1:-e1; - e2 = (e2 > 0)? e2:-e2; - if(e1 < e2) - { - bottleneckIndex[0] = minInd; - } - else - { - bottleneckIndex[0] = maxInd; - } - - bwest_str->rec_bw_avg_Q = (1 - weight) * bwest_str->rec_bw_avg_Q + - weight * ptrQuantizationTable[bottleneckIndex[0]]; - bottleneckIndex[0] += jitterInfo[0] * 12 * addJitterInfo; - - bwest_str->rec_bw_avg = (1 - weight) * bwest_str->rec_bw_avg + weight * - (rate + bwest_str->rec_header_rate); -} - - - -/* get the bottle neck rate from far side to here, as estimated on this side */ -int32_t WebRtcIsac_GetDownlinkBandwidth( const BwEstimatorstr *bwest_str) -{ - int32_t rec_bw; - float jitter_sign; - float bw_adjust; - - RTC_DCHECK(!bwest_str->external_bw_info.in_use); - - /* create a value between -1.0 and 1.0 indicating "average sign" of jitter */ - jitter_sign = bwest_str->rec_jitter_short_term / - bwest_str->rec_jitter_short_term_abs; - - /* adjust bw proportionally to negative average jitter sign */ - bw_adjust = 1.0f - jitter_sign * (0.15f + 0.15f * jitter_sign * jitter_sign); - - /* adjust Rate if jitter sign is mostly constant */ - rec_bw = (int32_t)(bwest_str->rec_bw * bw_adjust); - - /* limit range of bottle neck rate */ - if (rec_bw < MIN_ISAC_BW) - { - rec_bw = MIN_ISAC_BW; - } - else if (rec_bw > MAX_ISAC_BW) - { - rec_bw = MAX_ISAC_BW; - } - return rec_bw; -} - -/* Returns the max delay (in ms) */ -int32_t -WebRtcIsac_GetDownlinkMaxDelay(const BwEstimatorstr *bwest_str) -{ - int32_t rec_max_delay; - - RTC_DCHECK(!bwest_str->external_bw_info.in_use); - - rec_max_delay = (int32_t)(bwest_str->rec_max_delay); - - /* limit range of jitter estimate */ - if (rec_max_delay < MIN_ISAC_MD) - { - rec_max_delay = MIN_ISAC_MD; - } - else if (rec_max_delay > MAX_ISAC_MD) - { - rec_max_delay = MAX_ISAC_MD; - } - return rec_max_delay; -} - -/* Clamp val to the closed interval [min,max]. */ -static int32_t clamp(int32_t val, int32_t min, int32_t max) { - RTC_DCHECK_LE(min, max); - return val < min ? min : (val > max ? max : val); -} - -int32_t WebRtcIsac_GetUplinkBandwidth(const BwEstimatorstr* bwest_str) { - return bwest_str->external_bw_info.in_use - ? bwest_str->external_bw_info.send_bw_avg - : clamp(bwest_str->send_bw_avg, MIN_ISAC_BW, MAX_ISAC_BW); -} - -int32_t WebRtcIsac_GetUplinkMaxDelay(const BwEstimatorstr* bwest_str) { - return bwest_str->external_bw_info.in_use - ? bwest_str->external_bw_info.send_max_delay_avg - : clamp(bwest_str->send_max_delay_avg, MIN_ISAC_MD, MAX_ISAC_MD); -} - -/* - * update long-term average bitrate and amount of data in buffer - * returns minimum payload size (bytes) - */ -int WebRtcIsac_GetMinBytes( - RateModel* State, - int StreamSize, /* bytes in bitstream */ - const int FrameSamples, /* samples per frame */ - const double BottleNeck, /* bottle neck rate; excl headers (bps) */ - const double DelayBuildUp, /* max delay from bottleneck buffering (ms) */ - enum ISACBandwidth bandwidth - /*,int16_t frequentLargePackets*/) -{ - double MinRate = 0.0; - int MinBytes; - double TransmissionTime; - int burstInterval = BURST_INTERVAL; - - // first 10 packets @ low rate, then INIT_BURST_LEN packets @ - // fixed rate of INIT_RATE bps - if (State->InitCounter > 0) - { - if (State->InitCounter-- <= INIT_BURST_LEN) - { - if(bandwidth == isac8kHz) - { - MinRate = INIT_RATE_WB; - } - else - { - MinRate = INIT_RATE_SWB; - } - } - else - { - MinRate = 0; - } - } - else - { - /* handle burst */ - if (State->BurstCounter) - { - if (State->StillBuffered < (1.0 - 1.0/BURST_LEN) * DelayBuildUp) - { - /* max bps derived from BottleNeck and DelayBuildUp values */ - MinRate = (1.0 + (FS/1000) * DelayBuildUp / - (double)(BURST_LEN * FrameSamples)) * BottleNeck; - } - else - { - // max bps derived from StillBuffered and DelayBuildUp - // values - MinRate = (1.0 + (FS/1000) * (DelayBuildUp - - State->StillBuffered) / (double)FrameSamples) * BottleNeck; - if (MinRate < 1.04 * BottleNeck) - { - MinRate = 1.04 * BottleNeck; - } - } - State->BurstCounter--; - } - } - - - /* convert rate from bits/second to bytes/packet */ - MinBytes = (int) (MinRate * FrameSamples / (8.0 * FS)); - - /* StreamSize will be adjusted if less than MinBytes */ - if (StreamSize < MinBytes) - { - StreamSize = MinBytes; - } - - /* keep track of when bottle neck was last exceeded by at least 1% */ - if (StreamSize * 8.0 * FS / FrameSamples > 1.01 * BottleNeck) { - if (State->PrevExceed) { - /* bottle_neck exceded twice in a row, decrease ExceedAgo */ - State->ExceedAgo -= /*BURST_INTERVAL*/ burstInterval / (BURST_LEN - 1); - if (State->ExceedAgo < 0) - State->ExceedAgo = 0; - } - else - { - State->ExceedAgo += (FrameSamples * 1000) / FS; /* ms */ - State->PrevExceed = 1; - } - } - else - { - State->PrevExceed = 0; - State->ExceedAgo += (FrameSamples * 1000) / FS; /* ms */ - } - - /* set burst flag if bottle neck not exceeded for long time */ - if ((State->ExceedAgo > burstInterval) && - (State->BurstCounter == 0)) - { - if (State->PrevExceed) - { - State->BurstCounter = BURST_LEN - 1; - } - else - { - State->BurstCounter = BURST_LEN; - } - } - - - /* Update buffer delay */ - TransmissionTime = StreamSize * 8.0 * 1000.0 / BottleNeck; /* ms */ - State->StillBuffered += TransmissionTime; - State->StillBuffered -= (FrameSamples * 1000) / FS; /* ms */ - if (State->StillBuffered < 0.0) - { - State->StillBuffered = 0.0; - } - - return MinBytes; -} - - -/* - * update long-term average bitrate and amount of data in buffer - */ -void WebRtcIsac_UpdateRateModel( - RateModel *State, - int StreamSize, /* bytes in bitstream */ - const int FrameSamples, /* samples per frame */ - const double BottleNeck) /* bottle neck rate; excl headers (bps) */ -{ - double TransmissionTime; - - /* avoid the initial "high-rate" burst */ - State->InitCounter = 0; - - /* Update buffer delay */ - TransmissionTime = StreamSize * 8.0 * 1000.0 / BottleNeck; /* ms */ - State->StillBuffered += TransmissionTime; - State->StillBuffered -= (FrameSamples * 1000) / FS; /* ms */ - if (State->StillBuffered < 0.0) - State->StillBuffered = 0.0; - -} - - -void WebRtcIsac_InitRateModel( - RateModel *State) -{ - State->PrevExceed = 0; /* boolean */ - State->ExceedAgo = 0; /* ms */ - State->BurstCounter = 0; /* packets */ - State->InitCounter = INIT_BURST_LEN + 10; /* packets */ - State->StillBuffered = 1.0; /* ms */ -} - -int WebRtcIsac_GetNewFrameLength( - double bottle_neck, - int current_framesamples) -{ - int new_framesamples; - - const int Thld_20_30 = 20000; - - //const int Thld_30_20 = 30000; - const int Thld_30_20 = 1000000; // disable 20 ms frames - - const int Thld_30_60 = 18000; - //const int Thld_30_60 = 0; // disable 60 ms frames - - const int Thld_60_30 = 27000; - - - new_framesamples = current_framesamples; - - /* find new framelength */ - switch(current_framesamples) { - case 320: - if (bottle_neck < Thld_20_30) - new_framesamples = 480; - break; - case 480: - if (bottle_neck < Thld_30_60) - new_framesamples = 960; - else if (bottle_neck > Thld_30_20) - new_framesamples = 320; - break; - case 960: - if (bottle_neck >= Thld_60_30) - new_framesamples = 480; - break; - } - - return new_framesamples; -} - -double WebRtcIsac_GetSnr( - double bottle_neck, - int framesamples) -{ - double s2nr; - - const double a_20 = -30.0; - const double b_20 = 0.8; - const double c_20 = 0.0; - - const double a_30 = -23.0; - const double b_30 = 0.48; - const double c_30 = 0.0; - - const double a_60 = -23.0; - const double b_60 = 0.53; - const double c_60 = 0.0; - - - /* find new SNR value */ - switch(framesamples) { - case 320: - s2nr = a_20 + b_20 * bottle_neck * 0.001 + c_20 * bottle_neck * - bottle_neck * 0.000001; - break; - case 480: - s2nr = a_30 + b_30 * bottle_neck * 0.001 + c_30 * bottle_neck * - bottle_neck * 0.000001; - break; - case 960: - s2nr = a_60 + b_60 * bottle_neck * 0.001 + c_60 * bottle_neck * - bottle_neck * 0.000001; - break; - default: - s2nr = 0; - } - - return s2nr; - -} diff --git a/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h b/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h deleted file mode 100644 index 5f4550a3a5..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * bandwidth_estimator.h - * - * This header file contains the API for the Bandwidth Estimator - * designed for iSAC. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_BANDWIDTH_ESTIMATOR_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_BANDWIDTH_ESTIMATOR_H_ - -#include - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/structs.h" - -#define MIN_ISAC_BW 10000 -#define MIN_ISAC_BW_LB 10000 -#define MIN_ISAC_BW_UB 25000 - -#define MAX_ISAC_BW 56000 -#define MAX_ISAC_BW_UB 32000 -#define MAX_ISAC_BW_LB 32000 - -#define MIN_ISAC_MD 5 -#define MAX_ISAC_MD 25 - -// assumed header size, in bytes; we don't know the exact number -// (header compression may be used) -#define HEADER_SIZE 35 - -// Initial Frame-Size, in ms, for Wideband & Super-Wideband Mode -#define INIT_FRAME_LEN_WB 60 -#define INIT_FRAME_LEN_SWB 30 - -// Initial Bottleneck Estimate, in bits/sec, for -// Wideband & Super-wideband mode -#define INIT_BN_EST_WB 20e3f -#define INIT_BN_EST_SWB 56e3f - -// Initial Header rate (header rate depends on frame-size), -// in bits/sec, for Wideband & Super-Wideband mode. -#define INIT_HDR_RATE_WB \ - ((float)HEADER_SIZE * 8.0f * 1000.0f / (float)INIT_FRAME_LEN_WB) -#define INIT_HDR_RATE_SWB \ - ((float)HEADER_SIZE * 8.0f * 1000.0f / (float)INIT_FRAME_LEN_SWB) - -// number of packets in a row for a high rate burst -#define BURST_LEN 3 - -// ms, max time between two full bursts -#define BURST_INTERVAL 500 - -// number of packets in a row for initial high rate burst -#define INIT_BURST_LEN 5 - -// bits/s, rate for the first BURST_LEN packets -#define INIT_RATE_WB INIT_BN_EST_WB -#define INIT_RATE_SWB INIT_BN_EST_SWB - -#if defined(__cplusplus) -extern "C" { -#endif - -/* This function initializes the struct */ -/* to be called before using the struct for anything else */ -/* returns 0 if everything went fine, -1 otherwise */ -int32_t WebRtcIsac_InitBandwidthEstimator( - BwEstimatorstr* bwest_str, - enum IsacSamplingRate encoderSampRate, - enum IsacSamplingRate decoderSampRate); - -/* This function updates the receiving estimate */ -/* Parameters: */ -/* rtp_number - value from RTP packet, from NetEq */ -/* frame length - length of signal frame in ms, from iSAC decoder */ -/* send_ts - value in RTP header giving send time in samples */ -/* arr_ts - value given by timeGetTime() time of arrival in samples of - * packet from NetEq */ -/* pksize - size of packet in bytes, from NetEq */ -/* Index - integer (range 0...23) indicating bottle neck & jitter as - * estimated by other side */ -/* returns 0 if everything went fine, -1 otherwise */ -int16_t WebRtcIsac_UpdateBandwidthEstimator(BwEstimatorstr* bwest_str, - uint16_t rtp_number, - int32_t frame_length, - uint32_t send_ts, - uint32_t arr_ts, - size_t pksize); - -/* Update receiving estimates. Used when we only receive BWE index, no iSAC data - * packet. */ -int16_t WebRtcIsac_UpdateUplinkBwImpl( - BwEstimatorstr* bwest_str, - int16_t Index, - enum IsacSamplingRate encoderSamplingFreq); - -/* Returns the bandwidth/jitter estimation code (integer 0...23) to put in the - * sending iSAC payload */ -void WebRtcIsac_GetDownlinkBwJitIndexImpl( - BwEstimatorstr* bwest_str, - int16_t* bottleneckIndex, - int16_t* jitterInfo, - enum IsacSamplingRate decoderSamplingFreq); - -/* Returns the bandwidth estimation (in bps) */ -int32_t WebRtcIsac_GetDownlinkBandwidth(const BwEstimatorstr* bwest_str); - -/* Returns the max delay (in ms) */ -int32_t WebRtcIsac_GetDownlinkMaxDelay(const BwEstimatorstr* bwest_str); - -/* Returns the bandwidth that iSAC should send with in bps */ -int32_t WebRtcIsac_GetUplinkBandwidth(const BwEstimatorstr* bwest_str); - -/* Returns the max delay value from the other side in ms */ -int32_t WebRtcIsac_GetUplinkMaxDelay(const BwEstimatorstr* bwest_str); - -/* - * update amount of data in bottle neck buffer and burst handling - * returns minimum payload size (bytes) - */ -int WebRtcIsac_GetMinBytes( - RateModel* State, - int StreamSize, /* bytes in bitstream */ - int FrameLen, /* ms per frame */ - double BottleNeck, /* bottle neck rate; excl headers (bps) */ - double DelayBuildUp, /* max delay from bottleneck buffering (ms) */ - enum ISACBandwidth bandwidth - /*,int16_t frequentLargePackets*/); - -/* - * update long-term average bitrate and amount of data in buffer - */ -void WebRtcIsac_UpdateRateModel( - RateModel* State, - int StreamSize, /* bytes in bitstream */ - int FrameSamples, /* samples per frame */ - double BottleNeck); /* bottle neck rate; excl headers (bps) */ - -void WebRtcIsac_InitRateModel(RateModel* State); - -/* Returns the new framelength value (input argument: bottle_neck) */ -int WebRtcIsac_GetNewFrameLength(double bottle_neck, int current_framelength); - -/* Returns the new SNR value (input argument: bottle_neck) */ -double WebRtcIsac_GetSnr(double bottle_neck, int new_framelength); - -int16_t WebRtcIsac_UpdateUplinkJitter(BwEstimatorstr* bwest_str, int32_t index); - -#if defined(__cplusplus) -} -#endif - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_BANDWIDTH_ESTIMATOR_H_ \ - */ diff --git a/modules/audio_coding/codecs/isac/main/source/codec.h b/modules/audio_coding/codecs/isac/main/source/codec.h deleted file mode 100644 index a7c7ddc14a..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/codec.h +++ /dev/null @@ -1,223 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * codec.h - * - * This header file contains the calls to the internal encoder - * and decoder functions. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_CODEC_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_CODEC_H_ - -#include - -#include "modules/audio_coding/codecs/isac/main/source/structs.h" -#include "modules/third_party/fft/fft.h" - -void WebRtcIsac_ResetBitstream(Bitstr* bit_stream); - -int WebRtcIsac_EstimateBandwidth(BwEstimatorstr* bwest_str, - Bitstr* streamdata, - size_t packet_size, - uint16_t rtp_seq_number, - uint32_t send_ts, - uint32_t arr_ts, - enum IsacSamplingRate encoderSampRate, - enum IsacSamplingRate decoderSampRate); - -int WebRtcIsac_DecodeLb(const TransformTables* transform_tables, - float* signal_out, - ISACLBDecStruct* ISACdec_obj, - int16_t* current_framesamples, - int16_t isRCUPayload); - -int WebRtcIsac_DecodeRcuLb(float* signal_out, - ISACLBDecStruct* ISACdec_obj, - int16_t* current_framesamples); - -int WebRtcIsac_EncodeLb(const TransformTables* transform_tables, - float* in, - ISACLBEncStruct* ISACencLB_obj, - int16_t codingMode, - int16_t bottleneckIndex); - -int WebRtcIsac_EncodeStoredDataLb(const IsacSaveEncoderData* ISACSavedEnc_obj, - Bitstr* ISACBitStr_obj, - int BWnumber, - float scale); - -int WebRtcIsac_EncodeStoredDataUb( - const ISACUBSaveEncDataStruct* ISACSavedEnc_obj, - Bitstr* bitStream, - int32_t jitterInfo, - float scale, - enum ISACBandwidth bandwidth); - -int16_t WebRtcIsac_GetRedPayloadUb( - const ISACUBSaveEncDataStruct* ISACSavedEncObj, - Bitstr* bitStreamObj, - enum ISACBandwidth bandwidth); - -/****************************************************************************** - * WebRtcIsac_RateAllocation() - * Internal function to perform a rate-allocation for upper and lower-band, - * given a total rate. - * - * Input: - * - inRateBitPerSec : a total bit-rate in bits/sec. - * - * Output: - * - rateLBBitPerSec : a bit-rate allocated to the lower-band - * in bits/sec. - * - rateUBBitPerSec : a bit-rate allocated to the upper-band - * in bits/sec. - * - * Return value : 0 if rate allocation has been successful. - * -1 if failed to allocate rates. - */ - -int16_t WebRtcIsac_RateAllocation(int32_t inRateBitPerSec, - double* rateLBBitPerSec, - double* rateUBBitPerSec, - enum ISACBandwidth* bandwidthKHz); - -/****************************************************************************** - * WebRtcIsac_DecodeUb16() - * - * Decode the upper-band if the codec is in 0-16 kHz mode. - * - * Input/Output: - * -ISACdec_obj : pointer to the upper-band decoder object. The - * bit-stream is stored inside the decoder object. - * - * Output: - * -signal_out : decoded audio, 480 samples 30 ms. - * - * Return value : >0 number of decoded bytes. - * <0 if an error occurred. - */ -int WebRtcIsac_DecodeUb16(const TransformTables* transform_tables, - float* signal_out, - ISACUBDecStruct* ISACdec_obj, - int16_t isRCUPayload); - -/****************************************************************************** - * WebRtcIsac_DecodeUb12() - * - * Decode the upper-band if the codec is in 0-12 kHz mode. - * - * Input/Output: - * -ISACdec_obj : pointer to the upper-band decoder object. The - * bit-stream is stored inside the decoder object. - * - * Output: - * -signal_out : decoded audio, 480 samples 30 ms. - * - * Return value : >0 number of decoded bytes. - * <0 if an error occurred. - */ -int WebRtcIsac_DecodeUb12(const TransformTables* transform_tables, - float* signal_out, - ISACUBDecStruct* ISACdec_obj, - int16_t isRCUPayload); - -/****************************************************************************** - * WebRtcIsac_EncodeUb16() - * - * Encode the upper-band if the codec is in 0-16 kHz mode. - * - * Input: - * -in : upper-band audio, 160 samples (10 ms). - * - * Input/Output: - * -ISACdec_obj : pointer to the upper-band encoder object. The - * bit-stream is stored inside the encoder object. - * - * Return value : >0 number of encoded bytes. - * <0 if an error occurred. - */ -int WebRtcIsac_EncodeUb16(const TransformTables* transform_tables, - float* in, - ISACUBEncStruct* ISACenc_obj, - int32_t jitterInfo); - -/****************************************************************************** - * WebRtcIsac_EncodeUb12() - * - * Encode the upper-band if the codec is in 0-12 kHz mode. - * - * Input: - * -in : upper-band audio, 160 samples (10 ms). - * - * Input/Output: - * -ISACdec_obj : pointer to the upper-band encoder object. The - * bit-stream is stored inside the encoder object. - * - * Return value : >0 number of encoded bytes. - * <0 if an error occurred. - */ -int WebRtcIsac_EncodeUb12(const TransformTables* transform_tables, - float* in, - ISACUBEncStruct* ISACenc_obj, - int32_t jitterInfo); - -/************************** initialization functions *************************/ - -void WebRtcIsac_InitMasking(MaskFiltstr* maskdata); - -void WebRtcIsac_InitPostFilterbank(PostFiltBankstr* postfiltdata); - -/**************************** transform functions ****************************/ - -void WebRtcIsac_InitTransform(TransformTables* tables); - -void WebRtcIsac_Time2Spec(const TransformTables* tables, - double* inre1, - double* inre2, - int16_t* outre, - int16_t* outim, - FFTstr* fftstr_obj); - -void WebRtcIsac_Spec2time(const TransformTables* tables, - double* inre, - double* inim, - double* outre1, - double* outre2, - FFTstr* fftstr_obj); - -/***************************** filterbank functions **************************/ - -void WebRtcIsac_FilterAndCombineFloat(float* InLP, - float* InHP, - float* Out, - PostFiltBankstr* postfiltdata); - -/************************* normalized lattice filters ************************/ - -void WebRtcIsac_NormLatticeFilterMa(int orderCoef, - float* stateF, - float* stateG, - float* lat_in, - double* filtcoeflo, - double* lat_out); - -void WebRtcIsac_NormLatticeFilterAr(int orderCoef, - float* stateF, - float* stateG, - double* lat_in, - double* lo_filt_coef, - float* lat_out); - -void WebRtcIsac_Dir2Lat(double* a, int orderCoef, float* sth, float* cth); - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_CODEC_H_ */ diff --git a/modules/audio_coding/codecs/isac/main/source/crc.c b/modules/audio_coding/codecs/isac/main/source/crc.c deleted file mode 100644 index 1bb0827031..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/crc.c +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "modules/audio_coding/codecs/isac/main/source/crc.h" -#include "common_audio/signal_processing/include/signal_processing_library.h" - -#define POLYNOMIAL 0x04c11db7L - - -static const uint32_t kCrcTable[256] = { - 0, 0x4c11db7, 0x9823b6e, 0xd4326d9, 0x130476dc, 0x17c56b6b, - 0x1a864db2, 0x1e475005, 0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61, - 0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd, 0x4c11db70, 0x48d0c6c7, - 0x4593e01e, 0x4152fda9, 0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75, - 0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011, 0x791d4014, 0x7ddc5da3, - 0x709f7b7a, 0x745e66cd, 0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039, - 0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5, 0xbe2b5b58, 0xbaea46ef, - 0xb7a96036, 0xb3687d81, 0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d, - 0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49, 0xc7361b4c, 0xc3f706fb, - 0xceb42022, 0xca753d95, 0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1, - 0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d, 0x34867077, 0x30476dc0, - 0x3d044b19, 0x39c556ae, 0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072, - 0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16, 0x18aeb13, 0x54bf6a4, - 0x808d07d, 0xcc9cdca, 0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde, - 0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02, 0x5e9f46bf, 0x5a5e5b08, - 0x571d7dd1, 0x53dc6066, 0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba, - 0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e, 0xbfa1b04b, 0xbb60adfc, - 0xb6238b25, 0xb2e29692, 0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6, - 0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a, 0xe0b41de7, 0xe4750050, - 0xe9362689, 0xedf73b3e, 0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2, - 0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686, 0xd5b88683, 0xd1799b34, - 0xdc3abded, 0xd8fba05a, 0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637, - 0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb, 0x4f040d56, 0x4bc510e1, - 0x46863638, 0x42472b8f, 0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53, - 0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47, 0x36194d42, 0x32d850f5, - 0x3f9b762c, 0x3b5a6b9b, 0x315d626, 0x7d4cb91, 0xa97ed48, 0xe56f0ff, - 0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623, 0xf12f560e, 0xf5ee4bb9, - 0xf8ad6d60, 0xfc6c70d7, 0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b, - 0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f, 0xc423cd6a, 0xc0e2d0dd, - 0xcda1f604, 0xc960ebb3, 0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7, - 0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b, 0x9b3660c6, 0x9ff77d71, - 0x92b45ba8, 0x9675461f, 0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3, - 0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640, 0x4e8ee645, 0x4a4ffbf2, - 0x470cdd2b, 0x43cdc09c, 0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8, - 0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24, 0x119b4be9, 0x155a565e, - 0x18197087, 0x1cd86d30, 0x29f3d35, 0x65e2082, 0xb1d065b, 0xfdc1bec, - 0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088, 0x2497d08d, 0x2056cd3a, - 0x2d15ebe3, 0x29d4f654, 0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0, - 0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c, 0xe3a1cbc1, 0xe760d676, - 0xea23f0af, 0xeee2ed18, 0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4, - 0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0, 0x9abc8bd5, 0x9e7d9662, - 0x933eb0bb, 0x97ffad0c, 0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668, - 0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4 -}; - - - - -/**************************************************************************** - * WebRtcIsac_GetCrc(...) - * - * This function returns a 32 bit CRC checksum of a bit stream - * - * Input: - * - bitstream : payload bitstream - * - len_bitstream_in_bytes : number of 8-bit words in the bit stream - * - * Output: - * - crc : checksum - * - * Return value : 0 - Ok - * -1 - Error - */ - -int WebRtcIsac_GetCrc(const int16_t* bitstream, - int len_bitstream_in_bytes, - uint32_t* crc) -{ - uint8_t* bitstream_ptr_uw8; - uint32_t crc_state; - int byte_cntr; - int crc_tbl_indx; - - /* Sanity Check. */ - if (bitstream == NULL) { - return -1; - } - /* cast to UWord8 pointer */ - bitstream_ptr_uw8 = (uint8_t *)bitstream; - - /* initialize */ - crc_state = 0xFFFFFFFF; - - for (byte_cntr = 0; byte_cntr < len_bitstream_in_bytes; byte_cntr++) { - crc_tbl_indx = (WEBRTC_SPL_RSHIFT_U32(crc_state, 24) ^ - bitstream_ptr_uw8[byte_cntr]) & 0xFF; - crc_state = (crc_state << 8) ^ kCrcTable[crc_tbl_indx]; - } - - *crc = ~crc_state; - return 0; -} diff --git a/modules/audio_coding/codecs/isac/main/source/crc.h b/modules/audio_coding/codecs/isac/main/source/crc.h deleted file mode 100644 index f031019ed3..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/crc.h +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * crc.h - * - * Checksum functions - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_CRC_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_CRC_H_ - -#include - -/**************************************************************************** - * WebRtcIsac_GetCrc(...) - * - * This function returns a 32 bit CRC checksum of a bit stream - * - * Input: - * - encoded : payload bit stream - * - no_of_word8s : number of 8-bit words in the bit stream - * - * Output: - * - crc : checksum - * - * Return value : 0 - Ok - * -1 - Error - */ - -int WebRtcIsac_GetCrc(const int16_t* encoded, int no_of_word8s, uint32_t* crc); - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_CRC_H_ */ diff --git a/modules/audio_coding/codecs/isac/main/source/decode.c b/modules/audio_coding/codecs/isac/main/source/decode.c deleted file mode 100644 index 6e114e4a2b..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/decode.c +++ /dev/null @@ -1,303 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * decode_B.c - * - * This file contains definition of funtions for decoding. - * Decoding of lower-band, including normal-decoding and RCU decoding. - * Decoding of upper-band, including 8-12 kHz, when the bandwidth is - * 0-12 kHz, and 8-16 kHz, when the bandwidth is 0-16 kHz. - * - */ - -#include -#include -#include - -#include "modules/audio_coding/codecs/isac/main/source/codec.h" -#include "modules/audio_coding/codecs/isac/main/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/main/source/pitch_estimator.h" -#include "modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h" -#include "modules/audio_coding/codecs/isac/main/source/structs.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/pitch_filter.h" - -/* - * function to decode the bitstream - * returns the total number of bytes in the stream - */ -int WebRtcIsac_DecodeLb(const TransformTables* transform_tables, - float* signal_out, ISACLBDecStruct* ISACdecLB_obj, - int16_t* current_framesamples, - int16_t isRCUPayload) { - int k; - int len, err; - int16_t bandwidthInd; - - float LP_dec_float[FRAMESAMPLES_HALF]; - float HP_dec_float[FRAMESAMPLES_HALF]; - - double LPw[FRAMESAMPLES_HALF]; - double HPw[FRAMESAMPLES_HALF]; - double LPw_pf[FRAMESAMPLES_HALF]; - - double lo_filt_coef[(ORDERLO + 1)*SUBFRAMES]; - double hi_filt_coef[(ORDERHI + 1)*SUBFRAMES]; - - double real_f[FRAMESAMPLES_HALF]; - double imag_f[FRAMESAMPLES_HALF]; - - double PitchLags[4]; - double PitchGains[4]; - double AvgPitchGain; - int16_t PitchGains_Q12[4]; - int16_t AvgPitchGain_Q12; - - float gain; - - int frame_nb; /* counter */ - int frame_mode; /* 0 30ms, 1 for 60ms */ - /* Processed_samples: 480 (30, 60 ms). Cannot take other values. */ - - WebRtcIsac_ResetBitstream(&(ISACdecLB_obj->bitstr_obj)); - - len = 0; - - /* Decode framelength and BW estimation - not used, - only for stream pointer*/ - err = WebRtcIsac_DecodeFrameLen(&ISACdecLB_obj->bitstr_obj, - current_framesamples); - if (err < 0) { - return err; - } - - /* Frame_mode: - * 0: indicates 30 ms frame (480 samples) - * 1: indicates 60 ms frame (960 samples) */ - frame_mode = *current_framesamples / MAX_FRAMESAMPLES; - - err = WebRtcIsac_DecodeSendBW(&ISACdecLB_obj->bitstr_obj, &bandwidthInd); - if (err < 0) { - return err; - } - - /* One loop if it's one frame (20 or 30ms), 2 loops if 2 frames - bundled together (60ms). */ - for (frame_nb = 0; frame_nb <= frame_mode; frame_nb++) { - /* Decode & de-quantize pitch parameters */ - err = WebRtcIsac_DecodePitchGain(&ISACdecLB_obj->bitstr_obj, - PitchGains_Q12); - if (err < 0) { - return err; - } - - err = WebRtcIsac_DecodePitchLag(&ISACdecLB_obj->bitstr_obj, PitchGains_Q12, - PitchLags); - if (err < 0) { - return err; - } - - AvgPitchGain_Q12 = (PitchGains_Q12[0] + PitchGains_Q12[1] + - PitchGains_Q12[2] + PitchGains_Q12[3]) >> 2; - - /* Decode & de-quantize filter coefficients. */ - err = WebRtcIsac_DecodeLpc(&ISACdecLB_obj->bitstr_obj, lo_filt_coef, - hi_filt_coef); - if (err < 0) { - return err; - } - /* Decode & de-quantize spectrum. */ - len = WebRtcIsac_DecodeSpec(&ISACdecLB_obj->bitstr_obj, AvgPitchGain_Q12, - kIsacLowerBand, real_f, imag_f); - if (len < 0) { - return len; - } - - /* Inverse transform. */ - WebRtcIsac_Spec2time(transform_tables, real_f, imag_f, LPw, HPw, - &ISACdecLB_obj->fftstr_obj); - - /* Convert PitchGains back to float for pitchfilter_post */ - for (k = 0; k < 4; k++) { - PitchGains[k] = ((float)PitchGains_Q12[k]) / 4096; - } - if (isRCUPayload) { - for (k = 0; k < 240; k++) { - LPw[k] *= RCU_TRANSCODING_SCALE_INVERSE; - HPw[k] *= RCU_TRANSCODING_SCALE_INVERSE; - } - } - - /* Inverse pitch filter. */ - WebRtcIsac_PitchfilterPost(LPw, LPw_pf, &ISACdecLB_obj->pitchfiltstr_obj, - PitchLags, PitchGains); - /* Convert AvgPitchGain back to float for computation of gain. */ - AvgPitchGain = ((float)AvgPitchGain_Q12) / 4096; - gain = 1.0f - 0.45f * (float)AvgPitchGain; - - for (k = 0; k < FRAMESAMPLES_HALF; k++) { - /* Reduce gain to compensate for pitch enhancer. */ - LPw_pf[k] *= gain; - } - - if (isRCUPayload) { - for (k = 0; k < FRAMESAMPLES_HALF; k++) { - /* Compensation for transcoding gain changes. */ - LPw_pf[k] *= RCU_TRANSCODING_SCALE; - HPw[k] *= RCU_TRANSCODING_SCALE; - } - } - /* Perceptual post-filtering (using normalized lattice filter). */ - WebRtcIsac_NormLatticeFilterAr( - ORDERLO, ISACdecLB_obj->maskfiltstr_obj.PostStateLoF, - (ISACdecLB_obj->maskfiltstr_obj).PostStateLoG, LPw_pf, lo_filt_coef, - LP_dec_float); - WebRtcIsac_NormLatticeFilterAr( - ORDERHI, ISACdecLB_obj->maskfiltstr_obj.PostStateHiF, - (ISACdecLB_obj->maskfiltstr_obj).PostStateHiG, HPw, hi_filt_coef, - HP_dec_float); - - /* Recombine the 2 bands. */ - WebRtcIsac_FilterAndCombineFloat(LP_dec_float, HP_dec_float, - signal_out + frame_nb * FRAMESAMPLES, - &ISACdecLB_obj->postfiltbankstr_obj); - } - return len; -} - - -/* - * This decode function is called when the codec is operating in 16 kHz - * bandwidth to decode the upperband, i.e. 8-16 kHz. - * - * Contrary to lower-band, the upper-band (8-16 kHz) is not split in - * frequency, but split to 12 sub-frames, i.e. twice as lower-band. - */ -int WebRtcIsac_DecodeUb16(const TransformTables* transform_tables, - float* signal_out, ISACUBDecStruct* ISACdecUB_obj, - int16_t isRCUPayload) { - int len, err; - - double halfFrameFirst[FRAMESAMPLES_HALF]; - double halfFrameSecond[FRAMESAMPLES_HALF]; - - double percepFilterParam[(UB_LPC_ORDER + 1) * (SUBFRAMES << 1) + - (UB_LPC_ORDER + 1)]; - - double real_f[FRAMESAMPLES_HALF]; - double imag_f[FRAMESAMPLES_HALF]; - const int16_t kAveragePitchGain = 0; /* No pitch-gain for upper-band. */ - len = 0; - - /* Decode & de-quantize filter coefficients. */ - memset(percepFilterParam, 0, sizeof(percepFilterParam)); - err = WebRtcIsac_DecodeInterpolLpcUb(&ISACdecUB_obj->bitstr_obj, - percepFilterParam, isac16kHz); - if (err < 0) { - return err; - } - - /* Decode & de-quantize spectrum. */ - len = WebRtcIsac_DecodeSpec(&ISACdecUB_obj->bitstr_obj, kAveragePitchGain, - kIsacUpperBand16, real_f, imag_f); - if (len < 0) { - return len; - } - if (isRCUPayload) { - int n; - for (n = 0; n < 240; n++) { - real_f[n] *= RCU_TRANSCODING_SCALE_UB_INVERSE; - imag_f[n] *= RCU_TRANSCODING_SCALE_UB_INVERSE; - } - } - /* Inverse transform. */ - WebRtcIsac_Spec2time(transform_tables, - real_f, imag_f, halfFrameFirst, halfFrameSecond, - &ISACdecUB_obj->fftstr_obj); - - /* Perceptual post-filtering (using normalized lattice filter). */ - WebRtcIsac_NormLatticeFilterAr( - UB_LPC_ORDER, ISACdecUB_obj->maskfiltstr_obj.PostStateLoF, - (ISACdecUB_obj->maskfiltstr_obj).PostStateLoG, halfFrameFirst, - &percepFilterParam[(UB_LPC_ORDER + 1)], signal_out); - - WebRtcIsac_NormLatticeFilterAr( - UB_LPC_ORDER, ISACdecUB_obj->maskfiltstr_obj.PostStateLoF, - (ISACdecUB_obj->maskfiltstr_obj).PostStateLoG, halfFrameSecond, - &percepFilterParam[(UB_LPC_ORDER + 1) * SUBFRAMES + (UB_LPC_ORDER + 1)], - &signal_out[FRAMESAMPLES_HALF]); - - return len; -} - -/* - * This decode function is called when the codec operates at 0-12 kHz - * bandwidth to decode the upperband, i.e. 8-12 kHz. - * - * At the encoder the upper-band is split into two band, 8-12 kHz & 12-16 - * kHz, and only 8-12 kHz is encoded. At the decoder, 8-12 kHz band is - * reconstructed and 12-16 kHz replaced with zeros. Then two bands - * are combined, to reconstruct the upperband 8-16 kHz. - */ -int WebRtcIsac_DecodeUb12(const TransformTables* transform_tables, - float* signal_out, ISACUBDecStruct* ISACdecUB_obj, - int16_t isRCUPayload) { - int len, err; - - float LP_dec_float[FRAMESAMPLES_HALF]; - float HP_dec_float[FRAMESAMPLES_HALF]; - - double LPw[FRAMESAMPLES_HALF]; - double HPw[FRAMESAMPLES_HALF]; - - double percepFilterParam[(UB_LPC_ORDER + 1)*SUBFRAMES]; - - double real_f[FRAMESAMPLES_HALF]; - double imag_f[FRAMESAMPLES_HALF]; - const int16_t kAveragePitchGain = 0; /* No pitch-gain for upper-band. */ - len = 0; - - /* Decode & dequantize filter coefficients. */ - err = WebRtcIsac_DecodeInterpolLpcUb(&ISACdecUB_obj->bitstr_obj, - percepFilterParam, isac12kHz); - if (err < 0) { - return err; - } - - /* Decode & de-quantize spectrum. */ - len = WebRtcIsac_DecodeSpec(&ISACdecUB_obj->bitstr_obj, kAveragePitchGain, - kIsacUpperBand12, real_f, imag_f); - if (len < 0) { - return len; - } - - if (isRCUPayload) { - int n; - for (n = 0; n < 240; n++) { - real_f[n] *= RCU_TRANSCODING_SCALE_UB_INVERSE; - imag_f[n] *= RCU_TRANSCODING_SCALE_UB_INVERSE; - } - } - /* Inverse transform. */ - WebRtcIsac_Spec2time(transform_tables, - real_f, imag_f, LPw, HPw, &ISACdecUB_obj->fftstr_obj); - /* perceptual post-filtering (using normalized lattice filter) */ - WebRtcIsac_NormLatticeFilterAr(UB_LPC_ORDER, - ISACdecUB_obj->maskfiltstr_obj.PostStateLoF, - (ISACdecUB_obj->maskfiltstr_obj).PostStateLoG, - LPw, percepFilterParam, LP_dec_float); - /* Zero for 12-16 kHz. */ - memset(HP_dec_float, 0, sizeof(float) * (FRAMESAMPLES_HALF)); - /* Recombine the 2 bands. */ - WebRtcIsac_FilterAndCombineFloat(HP_dec_float, LP_dec_float, signal_out, - &ISACdecUB_obj->postfiltbankstr_obj); - return len; -} diff --git a/modules/audio_coding/codecs/isac/main/source/decode_bwe.c b/modules/audio_coding/codecs/isac/main/source/decode_bwe.c deleted file mode 100644 index 89d970fc75..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/decode_bwe.c +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/main/source/structs.h" -#include "modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h" -#include "modules/audio_coding/codecs/isac/main/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/main/source/codec.h" - - -int -WebRtcIsac_EstimateBandwidth( - BwEstimatorstr* bwest_str, - Bitstr* streamdata, - size_t packet_size, - uint16_t rtp_seq_number, - uint32_t send_ts, - uint32_t arr_ts, - enum IsacSamplingRate encoderSampRate, - enum IsacSamplingRate decoderSampRate) -{ - int16_t index; - int16_t frame_samples; - uint32_t sendTimestampIn16kHz; - uint32_t arrivalTimestampIn16kHz; - uint32_t diffSendTime; - uint32_t diffArrivalTime; - int err; - - /* decode framelength and BW estimation */ - err = WebRtcIsac_DecodeFrameLen(streamdata, &frame_samples); - if(err < 0) // error check - { - return err; - } - err = WebRtcIsac_DecodeSendBW(streamdata, &index); - if(err < 0) // error check - { - return err; - } - - /* UPDATE ESTIMATES FROM OTHER SIDE */ - err = WebRtcIsac_UpdateUplinkBwImpl(bwest_str, index, encoderSampRate); - if(err < 0) - { - return err; - } - - // We like BWE to work at 16 kHz sampling rate, - // therefore, we have to change the timestamps accordingly. - // translate the send timestamp if required - diffSendTime = (uint32_t)((uint32_t)send_ts - - (uint32_t)bwest_str->senderTimestamp); - bwest_str->senderTimestamp = send_ts; - - diffArrivalTime = (uint32_t)((uint32_t)arr_ts - - (uint32_t)bwest_str->receiverTimestamp); - bwest_str->receiverTimestamp = arr_ts; - - if(decoderSampRate == kIsacSuperWideband) - { - diffArrivalTime = (uint32_t)diffArrivalTime >> 1; - diffSendTime = (uint32_t)diffSendTime >> 1; - } - - // arrival timestamp in 16 kHz - arrivalTimestampIn16kHz = (uint32_t)((uint32_t) - bwest_str->prev_rec_arr_ts + (uint32_t)diffArrivalTime); - // send timestamp in 16 kHz - sendTimestampIn16kHz = (uint32_t)((uint32_t) - bwest_str->prev_rec_send_ts + (uint32_t)diffSendTime); - - err = WebRtcIsac_UpdateBandwidthEstimator(bwest_str, rtp_seq_number, - (frame_samples * 1000) / FS, sendTimestampIn16kHz, - arrivalTimestampIn16kHz, packet_size); - // error check - if(err < 0) - { - return err; - } - - return 0; -} diff --git a/modules/audio_coding/codecs/isac/main/source/encode.c b/modules/audio_coding/codecs/isac/main/source/encode.c deleted file mode 100644 index bf92d02c53..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/encode.c +++ /dev/null @@ -1,1260 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * encode.c - * - * This file contains definition of funtions for encoding. - * Decoding of upper-band, including 8-12 kHz, when the bandwidth is - * 0-12 kHz, and 8-16 kHz, when the bandwidth is 0-16 kHz. - * - */ - -#include -#include -#include - -#include "modules/audio_coding/codecs/isac/main/source/structs.h" -#include "modules/audio_coding/codecs/isac/main/source/codec.h" -#include "modules/audio_coding/codecs/isac/main/source/pitch_estimator.h" -#include "modules/audio_coding/codecs/isac/main/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/main/source/arith_routines.h" -#include "modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_analysis.h" -#include "modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/isac_vad.h" -#include "modules/audio_coding/codecs/isac/main/source/pitch_filter.h" - - -#define UB_LOOKAHEAD 24 - - -/* - Rate allocation tables of lower and upper-band bottleneck for - 12kHz & 16kHz bandwidth. - - 12 kHz bandwidth - ----------------- - The overall bottleneck of the coder is between 38 kbps and 45 kbps. We have - considered 7 enteries, uniformly distributed in this interval, i.e. 38, - 39.17, 40.33, 41.5, 42.67, 43.83 and 45. For every entery, the lower-band - and the upper-band bottlenecks are specified in - 'kLowerBandBitRate12' and 'kUpperBandBitRate12' - tables, respectively. E.g. the overall rate of 41.5 kbps corresponts to a - bottleneck of 31 kbps for lower-band and 27 kbps for upper-band. Given an - overall bottleneck of the codec, we use linear interpolation to get - lower-band and upper-band bottlenecks. - - 16 kHz bandwidth - ----------------- - The overall bottleneck of the coder is between 50 kbps and 56 kbps. We have - considered 7 enteries, uniformly distributed in this interval, i.e. 50, 51.2, - 52.4, 53.6, 54.8 and 56. For every entery, the lower-band and the upper-band - bottlenecks are specified in 'kLowerBandBitRate16' and - 'kUpperBandBitRate16' tables, respectively. E.g. the overall rate - of 53.6 kbps corresponts to a bottleneck of 32 kbps for lower-band and 30 - kbps for upper-band. Given an overall bottleneck of the codec, we use linear - interpolation to get lower-band and upper-band bottlenecks. - - */ - -/* 38 39.17 40.33 41.5 42.67 43.83 45 */ -static const int16_t kLowerBandBitRate12[7] = { - 29000, 30000, 30000, 31000, 31000, 32000, 32000 }; -static const int16_t kUpperBandBitRate12[7] = { - 25000, 25000, 27000, 27000, 29000, 29000, 32000 }; - -/* 50 51.2 52.4 53.6 54.8 56 */ -static const int16_t kLowerBandBitRate16[6] = { - 31000, 31000, 32000, 32000, 32000, 32000 }; -static const int16_t kUpperBandBitRate16[6] = { - 28000, 29000, 29000, 30000, 31000, 32000 }; - -/****************************************************************************** - * WebRtcIsac_RateAllocation() - * Internal function to perform a rate-allocation for upper and lower-band, - * given a total rate. - * - * Input: - * - inRateBitPerSec : a total bottleneck in bits/sec. - * - * Output: - * - rateLBBitPerSec : a bottleneck allocated to the lower-band - * in bits/sec. - * - rateUBBitPerSec : a bottleneck allocated to the upper-band - * in bits/sec. - * - * Return value : 0 if rate allocation has been successful. - * -1 if failed to allocate rates. - */ - -int16_t WebRtcIsac_RateAllocation(int32_t inRateBitPerSec, - double* rateLBBitPerSec, - double* rateUBBitPerSec, - enum ISACBandwidth* bandwidthKHz) { - int16_t idx; - double idxD; - double idxErr; - if (inRateBitPerSec < 38000) { - /* If the given overall bottleneck is less than 38000 then - * then codec has to operate in wideband mode, i.e. 8 kHz - * bandwidth. */ - *rateLBBitPerSec = (int16_t)((inRateBitPerSec > 32000) ? - 32000 : inRateBitPerSec); - *rateUBBitPerSec = 0; - *bandwidthKHz = isac8kHz; - } else if ((inRateBitPerSec >= 38000) && (inRateBitPerSec < 50000)) { - /* At a bottleneck between 38 and 50 kbps the codec is operating - * at 12 kHz bandwidth. Using xxxBandBitRate12[] to calculates - * upper/lower bottleneck */ - - /* Find the bottlenecks by linear interpolation, - * step is (45000 - 38000)/6.0 we use the inverse of it. */ - const double stepSizeInv = 8.5714286e-4; - idxD = (inRateBitPerSec - 38000) * stepSizeInv; - idx = (idxD >= 6) ? 6 : ((int16_t)idxD); - idxErr = idxD - idx; - *rateLBBitPerSec = kLowerBandBitRate12[idx]; - *rateUBBitPerSec = kUpperBandBitRate12[idx]; - - if (idx < 6) { - *rateLBBitPerSec += (int16_t)( - idxErr * (kLowerBandBitRate12[idx + 1] - kLowerBandBitRate12[idx])); - *rateUBBitPerSec += (int16_t)( - idxErr * (kUpperBandBitRate12[idx + 1] - kUpperBandBitRate12[idx])); - } - *bandwidthKHz = isac12kHz; - } else if ((inRateBitPerSec >= 50000) && (inRateBitPerSec <= 56000)) { - /* A bottleneck between 50 and 56 kbps corresponds to bandwidth - * of 16 kHz. Using xxxBandBitRate16[] to calculates - * upper/lower bottleneck. */ - - /* Find the bottlenecks by linear interpolation - * step is (56000 - 50000)/5 we use the inverse of it. */ - const double stepSizeInv = 8.3333333e-4; - idxD = (inRateBitPerSec - 50000) * stepSizeInv; - idx = (idxD >= 5) ? 5 : ((int16_t)idxD); - idxErr = idxD - idx; - *rateLBBitPerSec = kLowerBandBitRate16[idx]; - *rateUBBitPerSec = kUpperBandBitRate16[idx]; - - if (idx < 5) { - *rateLBBitPerSec += (int16_t)(idxErr * - (kLowerBandBitRate16[idx + 1] - - kLowerBandBitRate16[idx])); - - *rateUBBitPerSec += (int16_t)(idxErr * - (kUpperBandBitRate16[idx + 1] - - kUpperBandBitRate16[idx])); - } - *bandwidthKHz = isac16kHz; - } else { - /* Out-of-range botlteneck value. */ - return -1; - } - - /* limit the values. */ - *rateLBBitPerSec = (*rateLBBitPerSec > 32000) ? 32000 : *rateLBBitPerSec; - *rateUBBitPerSec = (*rateUBBitPerSec > 32000) ? 32000 : *rateUBBitPerSec; - return 0; -} - - -void WebRtcIsac_ResetBitstream(Bitstr* bit_stream) { - bit_stream->W_upper = 0xFFFFFFFF; - bit_stream->stream_index = 0; - bit_stream->streamval = 0; -} - -int WebRtcIsac_EncodeLb(const TransformTables* transform_tables, - float* in, ISACLBEncStruct* ISACencLB_obj, - int16_t codingMode, - int16_t bottleneckIndex) { - int stream_length = 0; - int err; - int k; - int iterCntr; - - double lofilt_coef[(ORDERLO + 1)*SUBFRAMES]; - double hifilt_coef[(ORDERHI + 1)*SUBFRAMES]; - float LP[FRAMESAMPLES_HALF]; - float HP[FRAMESAMPLES_HALF]; - - double LP_lookahead[FRAMESAMPLES_HALF]; - double HP_lookahead[FRAMESAMPLES_HALF]; - double LP_lookahead_pf[FRAMESAMPLES_HALF + QLOOKAHEAD]; - double LPw[FRAMESAMPLES_HALF]; - - double HPw[FRAMESAMPLES_HALF]; - double LPw_pf[FRAMESAMPLES_HALF]; - int16_t fre[FRAMESAMPLES_HALF]; /* Q7 */ - int16_t fim[FRAMESAMPLES_HALF]; /* Q7 */ - - double PitchLags[4]; - double PitchGains[4]; - int16_t PitchGains_Q12[4]; - int16_t AvgPitchGain_Q12; - - int frame_mode; /* 0 for 30ms, 1 for 60ms */ - int status = 0; - int my_index; - transcode_obj transcodingParam; - double bytesLeftSpecCoding; - uint16_t payloadLimitBytes; - - /* Copy new frame-length and bottleneck rate only for the first 10 ms data */ - if (ISACencLB_obj->buffer_index == 0) { - /* Set the framelength for the next packet. */ - ISACencLB_obj->current_framesamples = ISACencLB_obj->new_framelength; - } - /* 'frame_mode' is 0 (30 ms) or 1 (60 ms). */ - frame_mode = ISACencLB_obj->current_framesamples / MAX_FRAMESAMPLES; - - /* buffer speech samples (by 10ms packet) until the frame-length */ - /* is reached (30 or 60 ms). */ - /*****************************************************************/ - - /* fill the buffer with 10ms input data */ - for (k = 0; k < FRAMESAMPLES_10ms; k++) { - ISACencLB_obj->data_buffer_float[k + ISACencLB_obj->buffer_index] = in[k]; - } - - /* If buffersize is not equal to current framesize then increase index - * and return. We do no encoding untill we have enough audio. */ - if (ISACencLB_obj->buffer_index + FRAMESAMPLES_10ms != FRAMESAMPLES) { - ISACencLB_obj->buffer_index += FRAMESAMPLES_10ms; - return 0; - } - /* If buffer reached the right size, reset index and continue with - * encoding the frame. */ - ISACencLB_obj->buffer_index = 0; - - /* End of buffer function. */ - /**************************/ - - /* Encoding */ - /************/ - - if (frame_mode == 0 || ISACencLB_obj->frame_nb == 0) { - /* This is to avoid Linux warnings until we change 'int' to 'Word32' - * at all places. */ - int intVar; - /* reset bitstream */ - WebRtcIsac_ResetBitstream(&(ISACencLB_obj->bitstr_obj)); - - if ((codingMode == 0) && (frame_mode == 0) && - (ISACencLB_obj->enforceFrameSize == 0)) { - ISACencLB_obj->new_framelength = WebRtcIsac_GetNewFrameLength( - ISACencLB_obj->bottleneck, ISACencLB_obj->current_framesamples); - } - - ISACencLB_obj->s2nr = WebRtcIsac_GetSnr( - ISACencLB_obj->bottleneck, ISACencLB_obj->current_framesamples); - - /* Encode frame length. */ - status = WebRtcIsac_EncodeFrameLen( - ISACencLB_obj->current_framesamples, &ISACencLB_obj->bitstr_obj); - if (status < 0) { - /* Wrong frame size. */ - return status; - } - /* Save framelength for multiple packets memory. */ - ISACencLB_obj->SaveEnc_obj.framelength = - ISACencLB_obj->current_framesamples; - - /* To be used for Redundant Coding. */ - ISACencLB_obj->lastBWIdx = bottleneckIndex; - intVar = (int)bottleneckIndex; - WebRtcIsac_EncodeReceiveBw(&intVar, &ISACencLB_obj->bitstr_obj); - } - - /* Split signal in two bands. */ - WebRtcIsac_SplitAndFilterFloat(ISACencLB_obj->data_buffer_float, LP, HP, - LP_lookahead, HP_lookahead, - &ISACencLB_obj->prefiltbankstr_obj); - - /* estimate pitch parameters and pitch-filter lookahead signal */ - WebRtcIsac_PitchAnalysis(LP_lookahead, LP_lookahead_pf, - &ISACencLB_obj->pitchanalysisstr_obj, PitchLags, - PitchGains); - - /* Encode in FIX Q12. */ - - /* Convert PitchGain to Fixed point. */ - for (k = 0; k < PITCH_SUBFRAMES; k++) { - PitchGains_Q12[k] = (int16_t)(PitchGains[k] * 4096.0); - } - - /* Set where to store data in multiple packets memory. */ - if (frame_mode == 0 || ISACencLB_obj->frame_nb == 0) { - ISACencLB_obj->SaveEnc_obj.startIdx = 0; - } else { - ISACencLB_obj->SaveEnc_obj.startIdx = 1; - } - - /* Quantize & encode pitch parameters. */ - WebRtcIsac_EncodePitchGain(PitchGains_Q12, &ISACencLB_obj->bitstr_obj, - &ISACencLB_obj->SaveEnc_obj); - WebRtcIsac_EncodePitchLag(PitchLags, PitchGains_Q12, - &ISACencLB_obj->bitstr_obj, - &ISACencLB_obj->SaveEnc_obj); - - AvgPitchGain_Q12 = (PitchGains_Q12[0] + PitchGains_Q12[1] + - PitchGains_Q12[2] + PitchGains_Q12[3]) >> 2; - - /* Find coefficients for perceptual pre-filters. */ - WebRtcIsac_GetLpcCoefLb(LP_lookahead_pf, HP_lookahead, - &ISACencLB_obj->maskfiltstr_obj, ISACencLB_obj->s2nr, - PitchGains_Q12, lofilt_coef, hifilt_coef); - - /* Code LPC model and shape - gains not quantized yet. */ - WebRtcIsac_EncodeLpcLb(lofilt_coef, hifilt_coef, &ISACencLB_obj->bitstr_obj, - &ISACencLB_obj->SaveEnc_obj); - - /* Convert PitchGains back to FLOAT for pitchfilter_pre. */ - for (k = 0; k < 4; k++) { - PitchGains[k] = ((float)PitchGains_Q12[k]) / 4096; - } - - /* Store the state of arithmetic coder before coding LPC gains. */ - transcodingParam.W_upper = ISACencLB_obj->bitstr_obj.W_upper; - transcodingParam.stream_index = ISACencLB_obj->bitstr_obj.stream_index; - transcodingParam.streamval = ISACencLB_obj->bitstr_obj.streamval; - transcodingParam.stream[0] = - ISACencLB_obj->bitstr_obj.stream[ISACencLB_obj->bitstr_obj.stream_index - - 2]; - transcodingParam.stream[1] = - ISACencLB_obj->bitstr_obj.stream[ISACencLB_obj->bitstr_obj.stream_index - - 1]; - transcodingParam.stream[2] = - ISACencLB_obj->bitstr_obj.stream[ISACencLB_obj->bitstr_obj.stream_index]; - - /* Store LPC Gains before encoding them. */ - for (k = 0; k < SUBFRAMES; k++) { - transcodingParam.loFiltGain[k] = lofilt_coef[(LPC_LOBAND_ORDER + 1) * k]; - transcodingParam.hiFiltGain[k] = hifilt_coef[(LPC_HIBAND_ORDER + 1) * k]; - } - - /* Code gains */ - WebRtcIsac_EncodeLpcGainLb(lofilt_coef, hifilt_coef, - &ISACencLB_obj->bitstr_obj, - &ISACencLB_obj->SaveEnc_obj); - - /* Get the correct value for the payload limit and calculate the - * number of bytes left for coding the spectrum. */ - if ((frame_mode == 1) && (ISACencLB_obj->frame_nb == 0)) { - /* It is a 60ms and we are in the first 30ms then the limit at - * this point should be half of the assigned value. */ - payloadLimitBytes = ISACencLB_obj->payloadLimitBytes60 >> 1; - } else if (frame_mode == 0) { - /* It is a 30ms frame */ - /* Subract 3 because termination process may add 3 bytes. */ - payloadLimitBytes = ISACencLB_obj->payloadLimitBytes30 - 3; - } else { - /* This is the second half of a 60ms frame. */ - /* Subract 3 because termination process may add 3 bytes. */ - payloadLimitBytes = ISACencLB_obj->payloadLimitBytes60 - 3; - } - bytesLeftSpecCoding = payloadLimitBytes - transcodingParam.stream_index; - - /* Perceptual pre-filtering (using normalized lattice filter). */ - /* Low-band filtering. */ - WebRtcIsac_NormLatticeFilterMa(ORDERLO, - ISACencLB_obj->maskfiltstr_obj.PreStateLoF, - ISACencLB_obj->maskfiltstr_obj.PreStateLoG, - LP, lofilt_coef, LPw); - /* High-band filtering. */ - WebRtcIsac_NormLatticeFilterMa(ORDERHI, - ISACencLB_obj->maskfiltstr_obj.PreStateHiF, - ISACencLB_obj->maskfiltstr_obj.PreStateHiG, - HP, hifilt_coef, HPw); - /* Pitch filter. */ - WebRtcIsac_PitchfilterPre(LPw, LPw_pf, &ISACencLB_obj->pitchfiltstr_obj, - PitchLags, PitchGains); - /* Transform */ - WebRtcIsac_Time2Spec(transform_tables, - LPw_pf, HPw, fre, fim, &ISACencLB_obj->fftstr_obj); - - /* Save data for multiple packets memory. */ - my_index = ISACencLB_obj->SaveEnc_obj.startIdx * FRAMESAMPLES_HALF; - memcpy(&ISACencLB_obj->SaveEnc_obj.fre[my_index], fre, sizeof(fre)); - memcpy(&ISACencLB_obj->SaveEnc_obj.fim[my_index], fim, sizeof(fim)); - - ISACencLB_obj->SaveEnc_obj.AvgPitchGain[ISACencLB_obj->SaveEnc_obj.startIdx] = - AvgPitchGain_Q12; - - /* Quantization and loss-less coding. */ - err = WebRtcIsac_EncodeSpec(fre, fim, AvgPitchGain_Q12, kIsacLowerBand, - &ISACencLB_obj->bitstr_obj); - if ((err < 0) && (err != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) { - /* There has been an error but it was not too large payload - (we can cure too large payload). */ - if (frame_mode == 1 && ISACencLB_obj->frame_nb == 1) { - /* If this is the second 30ms of a 60ms frame reset - this such that in the next call encoder starts fresh. */ - ISACencLB_obj->frame_nb = 0; - } - return err; - } - iterCntr = 0; - while ((ISACencLB_obj->bitstr_obj.stream_index > payloadLimitBytes) || - (err == -ISAC_DISALLOWED_BITSTREAM_LENGTH)) { - double bytesSpecCoderUsed; - double transcodeScale; - - if (iterCntr >= MAX_PAYLOAD_LIMIT_ITERATION) { - /* We were not able to limit the payload size */ - if ((frame_mode == 1) && (ISACencLB_obj->frame_nb == 0)) { - /* This was the first 30ms of a 60ms frame. Although - the payload is larger than it should be but we let - the second 30ms be encoded. Maybe together we - won't exceed the limit. */ - ISACencLB_obj->frame_nb = 1; - return 0; - } else if ((frame_mode == 1) && (ISACencLB_obj->frame_nb == 1)) { - ISACencLB_obj->frame_nb = 0; - } - - if (err != -ISAC_DISALLOWED_BITSTREAM_LENGTH) { - return -ISAC_PAYLOAD_LARGER_THAN_LIMIT; - } else { - return status; - } - } - - if (err == -ISAC_DISALLOWED_BITSTREAM_LENGTH) { - bytesSpecCoderUsed = STREAM_SIZE_MAX; - /* Being conservative */ - transcodeScale = bytesLeftSpecCoding / bytesSpecCoderUsed * 0.5; - } else { - bytesSpecCoderUsed = ISACencLB_obj->bitstr_obj.stream_index - - transcodingParam.stream_index; - transcodeScale = bytesLeftSpecCoding / bytesSpecCoderUsed; - } - - /* To be safe, we reduce the scale depending on - the number of iterations. */ - transcodeScale *= (1.0 - (0.9 * (double)iterCntr / - (double)MAX_PAYLOAD_LIMIT_ITERATION)); - - /* Scale the LPC Gains. */ - for (k = 0; k < SUBFRAMES; k++) { - lofilt_coef[(LPC_LOBAND_ORDER + 1) * k] = - transcodingParam.loFiltGain[k] * transcodeScale; - hifilt_coef[(LPC_HIBAND_ORDER + 1) * k] = - transcodingParam.hiFiltGain[k] * transcodeScale; - transcodingParam.loFiltGain[k] = lofilt_coef[(LPC_LOBAND_ORDER + 1) * k]; - transcodingParam.hiFiltGain[k] = hifilt_coef[(LPC_HIBAND_ORDER + 1) * k]; - } - - /* Scale DFT coefficients. */ - for (k = 0; k < FRAMESAMPLES_HALF; k++) { - fre[k] = (int16_t)(fre[k] * transcodeScale); - fim[k] = (int16_t)(fim[k] * transcodeScale); - } - - /* Save data for multiple packets memory. */ - my_index = ISACencLB_obj->SaveEnc_obj.startIdx * FRAMESAMPLES_HALF; - memcpy(&ISACencLB_obj->SaveEnc_obj.fre[my_index], fre, sizeof(fre)); - memcpy(&ISACencLB_obj->SaveEnc_obj.fim[my_index], fim, sizeof(fim)); - - /* Re-store the state of arithmetic coder before coding LPC gains. */ - ISACencLB_obj->bitstr_obj.W_upper = transcodingParam.W_upper; - ISACencLB_obj->bitstr_obj.stream_index = transcodingParam.stream_index; - ISACencLB_obj->bitstr_obj.streamval = transcodingParam.streamval; - ISACencLB_obj->bitstr_obj.stream[transcodingParam.stream_index - 2] = - transcodingParam.stream[0]; - ISACencLB_obj->bitstr_obj.stream[transcodingParam.stream_index - 1] = - transcodingParam.stream[1]; - ISACencLB_obj->bitstr_obj.stream[transcodingParam.stream_index] = - transcodingParam.stream[2]; - - /* Code gains. */ - WebRtcIsac_EncodeLpcGainLb(lofilt_coef, hifilt_coef, - &ISACencLB_obj->bitstr_obj, - &ISACencLB_obj->SaveEnc_obj); - - /* Update the number of bytes left for encoding the spectrum. */ - bytesLeftSpecCoding = payloadLimitBytes - transcodingParam.stream_index; - - /* Encode the spectrum. */ - err = WebRtcIsac_EncodeSpec(fre, fim, AvgPitchGain_Q12, kIsacLowerBand, - &ISACencLB_obj->bitstr_obj); - - if ((err < 0) && (err != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) { - /* There has been an error but it was not too large - payload (we can cure too large payload). */ - if (frame_mode == 1 && ISACencLB_obj->frame_nb == 1) { - /* If this is the second 30 ms of a 60 ms frame reset - this such that in the next call encoder starts fresh. */ - ISACencLB_obj->frame_nb = 0; - } - return err; - } - iterCntr++; - } - - /* If 60 ms frame-size and just processed the first 30 ms, */ - /* go back to main function to buffer the other 30 ms speech frame. */ - if (frame_mode == 1) { - if (ISACencLB_obj->frame_nb == 0) { - ISACencLB_obj->frame_nb = 1; - return 0; - } else if (ISACencLB_obj->frame_nb == 1) { - ISACencLB_obj->frame_nb = 0; - /* Also update the frame-length for next packet, - in Adaptive mode only. */ - if (codingMode == 0 && (ISACencLB_obj->enforceFrameSize == 0)) { - ISACencLB_obj->new_framelength = - WebRtcIsac_GetNewFrameLength(ISACencLB_obj->bottleneck, - ISACencLB_obj->current_framesamples); - } - } - } else { - ISACencLB_obj->frame_nb = 0; - } - - /* Complete arithmetic coding. */ - stream_length = WebRtcIsac_EncTerminate(&ISACencLB_obj->bitstr_obj); - return stream_length; -} - - - -static int LimitPayloadUb(ISACUBEncStruct* ISACencUB_obj, - uint16_t payloadLimitBytes, - double bytesLeftSpecCoding, - transcode_obj* transcodingParam, - int16_t* fre, int16_t* fim, - double* lpcGains, enum ISACBand band, int status) { - - int iterCntr = 0; - int k; - double bytesSpecCoderUsed; - double transcodeScale; - const int16_t kAveragePitchGain = 0.0; - - do { - if (iterCntr >= MAX_PAYLOAD_LIMIT_ITERATION) { - /* We were not able to limit the payload size. */ - return -ISAC_PAYLOAD_LARGER_THAN_LIMIT; - } - - if (status == -ISAC_DISALLOWED_BITSTREAM_LENGTH) { - bytesSpecCoderUsed = STREAM_SIZE_MAX; - /* Being conservative. */ - transcodeScale = bytesLeftSpecCoding / bytesSpecCoderUsed * 0.5; - } else { - bytesSpecCoderUsed = ISACencUB_obj->bitstr_obj.stream_index - - transcodingParam->stream_index; - transcodeScale = bytesLeftSpecCoding / bytesSpecCoderUsed; - } - - /* To be safe, we reduce the scale depending on the - number of iterations. */ - transcodeScale *= (1.0 - (0.9 * (double)iterCntr / - (double)MAX_PAYLOAD_LIMIT_ITERATION)); - - /* Scale the LPC Gains. */ - if (band == kIsacUpperBand16) { - /* Two sets of coefficients if 16 kHz. */ - for (k = 0; k < SUBFRAMES; k++) { - transcodingParam->loFiltGain[k] *= transcodeScale; - transcodingParam->hiFiltGain[k] *= transcodeScale; - } - } else { - /* One sets of coefficients if 12 kHz. */ - for (k = 0; k < SUBFRAMES; k++) { - transcodingParam->loFiltGain[k] *= transcodeScale; - } - } - - /* Scale DFT coefficients. */ - for (k = 0; k < FRAMESAMPLES_HALF; k++) { - fre[k] = (int16_t)(fre[k] * transcodeScale + 0.5); - fim[k] = (int16_t)(fim[k] * transcodeScale + 0.5); - } - /* Store FFT coefficients for multiple encoding. */ - memcpy(ISACencUB_obj->SaveEnc_obj.realFFT, fre, - sizeof(ISACencUB_obj->SaveEnc_obj.realFFT)); - memcpy(ISACencUB_obj->SaveEnc_obj.imagFFT, fim, - sizeof(ISACencUB_obj->SaveEnc_obj.imagFFT)); - - /* Store the state of arithmetic coder before coding LPC gains */ - ISACencUB_obj->bitstr_obj.W_upper = transcodingParam->W_upper; - ISACencUB_obj->bitstr_obj.stream_index = transcodingParam->stream_index; - ISACencUB_obj->bitstr_obj.streamval = transcodingParam->streamval; - ISACencUB_obj->bitstr_obj.stream[transcodingParam->stream_index - 2] = - transcodingParam->stream[0]; - ISACencUB_obj->bitstr_obj.stream[transcodingParam->stream_index - 1] = - transcodingParam->stream[1]; - ISACencUB_obj->bitstr_obj.stream[transcodingParam->stream_index] = - transcodingParam->stream[2]; - - /* Store the gains for multiple encoding. */ - memcpy(ISACencUB_obj->SaveEnc_obj.lpcGain, lpcGains, - SUBFRAMES * sizeof(double)); - /* Entropy Code lpc-gains, indices are stored for a later use.*/ - WebRtcIsac_EncodeLpcGainUb(transcodingParam->loFiltGain, - &ISACencUB_obj->bitstr_obj, - ISACencUB_obj->SaveEnc_obj.lpcGainIndex); - - /* If 16kHz should do one more set. */ - if (band == kIsacUpperBand16) { - /* Store the gains for multiple encoding. */ - memcpy(&ISACencUB_obj->SaveEnc_obj.lpcGain[SUBFRAMES], - &lpcGains[SUBFRAMES], SUBFRAMES * sizeof(double)); - /* Entropy Code lpc-gains, indices are stored for a later use.*/ - WebRtcIsac_EncodeLpcGainUb( - transcodingParam->hiFiltGain, &ISACencUB_obj->bitstr_obj, - &ISACencUB_obj->SaveEnc_obj.lpcGainIndex[SUBFRAMES]); - } - - /* Update the number of bytes left for encoding the spectrum. */ - bytesLeftSpecCoding = payloadLimitBytes - - ISACencUB_obj->bitstr_obj.stream_index; - - /* Save the bit-stream object at this point for FEC. */ - memcpy(&ISACencUB_obj->SaveEnc_obj.bitStreamObj, - &ISACencUB_obj->bitstr_obj, sizeof(Bitstr)); - - /* Encode the spectrum. */ - status = WebRtcIsac_EncodeSpec(fre, fim, kAveragePitchGain, - band, &ISACencUB_obj->bitstr_obj); - if ((status < 0) && (status != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) { - /* There has been an error but it was not too large payload - (we can cure too large payload). */ - return status; - } - iterCntr++; - } while ((ISACencUB_obj->bitstr_obj.stream_index > payloadLimitBytes) || - (status == -ISAC_DISALLOWED_BITSTREAM_LENGTH)); - return 0; -} - -int WebRtcIsac_EncodeUb16(const TransformTables* transform_tables, - float* in, ISACUBEncStruct* ISACencUB_obj, - int32_t jitterInfo) { - int err; - int k; - - double lpcVecs[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME]; - double percepFilterParams[(1 + UB_LPC_ORDER) * (SUBFRAMES << 1) + - (1 + UB_LPC_ORDER)]; - - double LP_lookahead[FRAMESAMPLES]; - int16_t fre[FRAMESAMPLES_HALF]; /* Q7 */ - int16_t fim[FRAMESAMPLES_HALF]; /* Q7 */ - - int status = 0; - - double varscale[2]; - double corr[SUBFRAMES << 1][UB_LPC_ORDER + 1]; - double lpcGains[SUBFRAMES << 1]; - transcode_obj transcodingParam; - uint16_t payloadLimitBytes; - double s2nr; - const int16_t kAveragePitchGain = 0.0; - int bytesLeftSpecCoding; - - /* Buffer speech samples (by 10ms packet) until the frame-length is */ - /* reached (30 ms). */ - /*********************************************************************/ - - /* fill the buffer with 10ms input data */ - memcpy(&ISACencUB_obj->data_buffer_float[ISACencUB_obj->buffer_index], in, - FRAMESAMPLES_10ms * sizeof(float)); - - /* If buffer size is not equal to current frame-size, and end of file is - * not reached yet, we don't do encoding unless we have the whole frame. */ - if (ISACencUB_obj->buffer_index + FRAMESAMPLES_10ms < FRAMESAMPLES) { - ISACencUB_obj->buffer_index += FRAMESAMPLES_10ms; - return 0; - } - - /* End of buffer function. */ - /**************************/ - - /* Encoding */ - /************/ - - /* Reset bit-stream */ - WebRtcIsac_ResetBitstream(&(ISACencUB_obj->bitstr_obj)); - - /* Encoding of bandwidth information. */ - WebRtcIsac_EncodeJitterInfo(jitterInfo, &ISACencUB_obj->bitstr_obj); - - status = WebRtcIsac_EncodeBandwidth(isac16kHz, &ISACencUB_obj->bitstr_obj); - if (status < 0) { - return status; - } - - s2nr = WebRtcIsac_GetSnr(ISACencUB_obj->bottleneck, FRAMESAMPLES); - - memcpy(lpcVecs, ISACencUB_obj->lastLPCVec, UB_LPC_ORDER * sizeof(double)); - - for (k = 0; k < FRAMESAMPLES; k++) { - LP_lookahead[k] = ISACencUB_obj->data_buffer_float[UB_LOOKAHEAD + k]; - } - - /* Find coefficients for perceptual pre-filters. */ - WebRtcIsac_GetLpcCoefUb(LP_lookahead, &ISACencUB_obj->maskfiltstr_obj, - &lpcVecs[UB_LPC_ORDER], corr, varscale, isac16kHz); - - memcpy(ISACencUB_obj->lastLPCVec, - &lpcVecs[(UB16_LPC_VEC_PER_FRAME - 1) * (UB_LPC_ORDER)], - sizeof(double) * UB_LPC_ORDER); - - /* Code LPC model and shape - gains not quantized yet. */ - WebRtcIsac_EncodeLpcUB(lpcVecs, &ISACencUB_obj->bitstr_obj, - percepFilterParams, isac16kHz, - &ISACencUB_obj->SaveEnc_obj); - - /* the first set of lpc parameters are from the last sub-frame of - * the previous frame. so we don't care about them. */ - WebRtcIsac_GetLpcGain(s2nr, &percepFilterParams[UB_LPC_ORDER + 1], - (SUBFRAMES << 1), lpcGains, corr, varscale); - - /* Store the state of arithmetic coder before coding LPC gains */ - transcodingParam.stream_index = ISACencUB_obj->bitstr_obj.stream_index; - transcodingParam.W_upper = ISACencUB_obj->bitstr_obj.W_upper; - transcodingParam.streamval = ISACencUB_obj->bitstr_obj.streamval; - transcodingParam.stream[0] = - ISACencUB_obj->bitstr_obj.stream[ISACencUB_obj->bitstr_obj.stream_index - - 2]; - transcodingParam.stream[1] = - ISACencUB_obj->bitstr_obj.stream[ISACencUB_obj->bitstr_obj.stream_index - - 1]; - transcodingParam.stream[2] = - ISACencUB_obj->bitstr_obj.stream[ISACencUB_obj->bitstr_obj.stream_index]; - - /* Store LPC Gains before encoding them. */ - for (k = 0; k < SUBFRAMES; k++) { - transcodingParam.loFiltGain[k] = lpcGains[k]; - transcodingParam.hiFiltGain[k] = lpcGains[SUBFRAMES + k]; - } - - /* Store the gains for multiple encoding. */ - memcpy(ISACencUB_obj->SaveEnc_obj.lpcGain, lpcGains, - (SUBFRAMES << 1) * sizeof(double)); - - WebRtcIsac_EncodeLpcGainUb(lpcGains, &ISACencUB_obj->bitstr_obj, - ISACencUB_obj->SaveEnc_obj.lpcGainIndex); - WebRtcIsac_EncodeLpcGainUb( - &lpcGains[SUBFRAMES], &ISACencUB_obj->bitstr_obj, - &ISACencUB_obj->SaveEnc_obj.lpcGainIndex[SUBFRAMES]); - - /* Get the correct value for the payload limit and calculate the number of - bytes left for coding the spectrum. It is a 30ms frame - Subract 3 because termination process may add 3 bytes */ - payloadLimitBytes = ISACencUB_obj->maxPayloadSizeBytes - - ISACencUB_obj->numBytesUsed - 3; - bytesLeftSpecCoding = payloadLimitBytes - - ISACencUB_obj->bitstr_obj.stream_index; - - for (k = 0; k < (SUBFRAMES << 1); k++) { - percepFilterParams[k * (UB_LPC_ORDER + 1) + (UB_LPC_ORDER + 1)] = - lpcGains[k]; - } - - /* LPC filtering (using normalized lattice filter), */ - /* first half-frame. */ - WebRtcIsac_NormLatticeFilterMa(UB_LPC_ORDER, - ISACencUB_obj->maskfiltstr_obj.PreStateLoF, - ISACencUB_obj->maskfiltstr_obj.PreStateLoG, - &ISACencUB_obj->data_buffer_float[0], - &percepFilterParams[UB_LPC_ORDER + 1], - &LP_lookahead[0]); - - /* Second half-frame filtering. */ - WebRtcIsac_NormLatticeFilterMa( - UB_LPC_ORDER, ISACencUB_obj->maskfiltstr_obj.PreStateLoF, - ISACencUB_obj->maskfiltstr_obj.PreStateLoG, - &ISACencUB_obj->data_buffer_float[FRAMESAMPLES_HALF], - &percepFilterParams[(UB_LPC_ORDER + 1) + SUBFRAMES * (UB_LPC_ORDER + 1)], - &LP_lookahead[FRAMESAMPLES_HALF]); - - WebRtcIsac_Time2Spec(transform_tables, - &LP_lookahead[0], &LP_lookahead[FRAMESAMPLES_HALF], - fre, fim, &ISACencUB_obj->fftstr_obj); - - /* Store FFT coefficients for multiple encoding. */ - memcpy(ISACencUB_obj->SaveEnc_obj.realFFT, fre, sizeof(fre)); - memcpy(ISACencUB_obj->SaveEnc_obj.imagFFT, fim, sizeof(fim)); - - /* Prepare the audio buffer for the next packet - * move the last 3 ms to the beginning of the buffer. */ - memcpy(ISACencUB_obj->data_buffer_float, - &ISACencUB_obj->data_buffer_float[FRAMESAMPLES], - LB_TOTAL_DELAY_SAMPLES * sizeof(float)); - /* start writing with 3 ms delay to compensate for the delay - * of the lower-band. */ - ISACencUB_obj->buffer_index = LB_TOTAL_DELAY_SAMPLES; - - /* Save the bit-stream object at this point for FEC. */ - memcpy(&ISACencUB_obj->SaveEnc_obj.bitStreamObj, &ISACencUB_obj->bitstr_obj, - sizeof(Bitstr)); - - /* Qantization and lossless coding */ - /* Note that there is no pitch-gain for this band so kAveragePitchGain = 0 - * is passed to the function. In fact, the function ignores the 3rd parameter - * for this band. */ - err = WebRtcIsac_EncodeSpec(fre, fim, kAveragePitchGain, kIsacUpperBand16, - &ISACencUB_obj->bitstr_obj); - if ((err < 0) && (err != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) { - return err; - } - - if ((ISACencUB_obj->bitstr_obj.stream_index > payloadLimitBytes) || - (err == -ISAC_DISALLOWED_BITSTREAM_LENGTH)) { - err = LimitPayloadUb(ISACencUB_obj, payloadLimitBytes, bytesLeftSpecCoding, - &transcodingParam, fre, fim, lpcGains, - kIsacUpperBand16, err); - } - if (err < 0) { - return err; - } - /* Complete arithmetic coding. */ - return WebRtcIsac_EncTerminate(&ISACencUB_obj->bitstr_obj); -} - - -int WebRtcIsac_EncodeUb12(const TransformTables* transform_tables, - float* in, ISACUBEncStruct* ISACencUB_obj, - int32_t jitterInfo) { - int err; - int k; - - double lpcVecs[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME]; - - double percepFilterParams[(1 + UB_LPC_ORDER) * SUBFRAMES]; - float LP[FRAMESAMPLES_HALF]; - float HP[FRAMESAMPLES_HALF]; - - double LP_lookahead[FRAMESAMPLES_HALF]; - double HP_lookahead[FRAMESAMPLES_HALF]; - double LPw[FRAMESAMPLES_HALF]; - - double HPw[FRAMESAMPLES_HALF]; - int16_t fre[FRAMESAMPLES_HALF]; /* Q7 */ - int16_t fim[FRAMESAMPLES_HALF]; /* Q7 */ - - int status = 0; - - double varscale[1]; - - double corr[UB_LPC_GAIN_DIM][UB_LPC_ORDER + 1]; - double lpcGains[SUBFRAMES]; - transcode_obj transcodingParam; - uint16_t payloadLimitBytes; - double s2nr; - const int16_t kAveragePitchGain = 0.0; - double bytesLeftSpecCoding; - - /* Buffer speech samples (by 10ms packet) until the framelength is */ - /* reached (30 ms). */ - /********************************************************************/ - - /* Fill the buffer with 10ms input data. */ - memcpy(&ISACencUB_obj->data_buffer_float[ISACencUB_obj->buffer_index], in, - FRAMESAMPLES_10ms * sizeof(float)); - - /* if buffer-size is not equal to current frame-size then increase the - index and return. We do the encoding when we have enough audio. */ - if (ISACencUB_obj->buffer_index + FRAMESAMPLES_10ms < FRAMESAMPLES) { - ISACencUB_obj->buffer_index += FRAMESAMPLES_10ms; - return 0; - } - /* If buffer reached the right size, reset index and continue - with encoding the frame */ - ISACencUB_obj->buffer_index = 0; - - /* End of buffer function */ - /**************************/ - - /* Encoding */ - /************/ - - /* Reset bit-stream. */ - WebRtcIsac_ResetBitstream(&(ISACencUB_obj->bitstr_obj)); - - /* Encoding bandwidth information. */ - WebRtcIsac_EncodeJitterInfo(jitterInfo, &ISACencUB_obj->bitstr_obj); - status = WebRtcIsac_EncodeBandwidth(isac12kHz, &ISACencUB_obj->bitstr_obj); - if (status < 0) { - return status; - } - - s2nr = WebRtcIsac_GetSnr(ISACencUB_obj->bottleneck, FRAMESAMPLES); - - /* Split signal in two bands. */ - WebRtcIsac_SplitAndFilterFloat(ISACencUB_obj->data_buffer_float, HP, LP, - HP_lookahead, LP_lookahead, - &ISACencUB_obj->prefiltbankstr_obj); - - /* Find coefficients for perceptual pre-filters. */ - WebRtcIsac_GetLpcCoefUb(LP_lookahead, &ISACencUB_obj->maskfiltstr_obj, - lpcVecs, corr, varscale, isac12kHz); - - /* Code LPC model and shape - gains not quantized yet. */ - WebRtcIsac_EncodeLpcUB(lpcVecs, &ISACencUB_obj->bitstr_obj, - percepFilterParams, isac12kHz, - &ISACencUB_obj->SaveEnc_obj); - - WebRtcIsac_GetLpcGain(s2nr, percepFilterParams, SUBFRAMES, lpcGains, corr, - varscale); - - /* Store the state of arithmetic coder before coding LPC gains. */ - transcodingParam.W_upper = ISACencUB_obj->bitstr_obj.W_upper; - transcodingParam.stream_index = ISACencUB_obj->bitstr_obj.stream_index; - transcodingParam.streamval = ISACencUB_obj->bitstr_obj.streamval; - transcodingParam.stream[0] = - ISACencUB_obj->bitstr_obj.stream[ISACencUB_obj->bitstr_obj.stream_index - - 2]; - transcodingParam.stream[1] = - ISACencUB_obj->bitstr_obj.stream[ISACencUB_obj->bitstr_obj.stream_index - - 1]; - transcodingParam.stream[2] = - ISACencUB_obj->bitstr_obj.stream[ISACencUB_obj->bitstr_obj.stream_index]; - - /* Store LPC Gains before encoding them. */ - for (k = 0; k < SUBFRAMES; k++) { - transcodingParam.loFiltGain[k] = lpcGains[k]; - } - - /* Store the gains for multiple encoding. */ - memcpy(ISACencUB_obj->SaveEnc_obj.lpcGain, lpcGains, SUBFRAMES * - sizeof(double)); - - WebRtcIsac_EncodeLpcGainUb(lpcGains, &ISACencUB_obj->bitstr_obj, - ISACencUB_obj->SaveEnc_obj.lpcGainIndex); - - for (k = 0; k < SUBFRAMES; k++) { - percepFilterParams[k * (UB_LPC_ORDER + 1)] = lpcGains[k]; - } - - /* perceptual pre-filtering (using normalized lattice filter) */ - /* low-band filtering */ - WebRtcIsac_NormLatticeFilterMa(UB_LPC_ORDER, - ISACencUB_obj->maskfiltstr_obj.PreStateLoF, - ISACencUB_obj->maskfiltstr_obj.PreStateLoG, LP, - percepFilterParams, LPw); - - /* Get the correct value for the payload limit and calculate the number - of bytes left for coding the spectrum. It is a 30ms frame Subract 3 - because termination process may add 3 bytes */ - payloadLimitBytes = ISACencUB_obj->maxPayloadSizeBytes - - ISACencUB_obj->numBytesUsed - 3; - bytesLeftSpecCoding = payloadLimitBytes - - ISACencUB_obj->bitstr_obj.stream_index; - - memset(HPw, 0, sizeof(HPw)); - - /* Transform */ - WebRtcIsac_Time2Spec(transform_tables, - LPw, HPw, fre, fim, &ISACencUB_obj->fftstr_obj); - - /* Store FFT coefficients for multiple encoding. */ - memcpy(ISACencUB_obj->SaveEnc_obj.realFFT, fre, - sizeof(ISACencUB_obj->SaveEnc_obj.realFFT)); - memcpy(ISACencUB_obj->SaveEnc_obj.imagFFT, fim, - sizeof(ISACencUB_obj->SaveEnc_obj.imagFFT)); - - /* Save the bit-stream object at this point for FEC. */ - memcpy(&ISACencUB_obj->SaveEnc_obj.bitStreamObj, - &ISACencUB_obj->bitstr_obj, sizeof(Bitstr)); - - /* Quantization and loss-less coding */ - /* The 4th parameter to this function is pitch-gain, which is only used - * when encoding 0-8 kHz band, and irrelevant in this function, therefore, - * we insert zero here. */ - err = WebRtcIsac_EncodeSpec(fre, fim, kAveragePitchGain, kIsacUpperBand12, - &ISACencUB_obj->bitstr_obj); - if ((err < 0) && (err != -ISAC_DISALLOWED_BITSTREAM_LENGTH)) { - /* There has been an error but it was not too large - payload (we can cure too large payload) */ - return err; - } - - if ((ISACencUB_obj->bitstr_obj.stream_index > payloadLimitBytes) || - (err == -ISAC_DISALLOWED_BITSTREAM_LENGTH)) { - err = LimitPayloadUb(ISACencUB_obj, payloadLimitBytes, bytesLeftSpecCoding, - &transcodingParam, fre, fim, lpcGains, - kIsacUpperBand12, err); - } - if (err < 0) { - return err; - } - /* Complete arithmetic coding. */ - return WebRtcIsac_EncTerminate(&ISACencUB_obj->bitstr_obj); -} - - - - - - -/* This function is used to create a new bit-stream with new BWE. - The same data as previously encoded with the function WebRtcIsac_Encoder(). - The data needed is taken from the structure, where it was stored - when calling the encoder. */ - -int WebRtcIsac_EncodeStoredDataLb(const IsacSaveEncoderData* ISACSavedEnc_obj, - Bitstr* ISACBitStr_obj, int BWnumber, - float scale) { - int ii; - int status; - int BWno = BWnumber; - - const uint16_t* WebRtcIsac_kQPitchGainCdf_ptr[1]; - const uint16_t** cdf; - - double tmpLPCcoeffs_lo[(ORDERLO + 1)*SUBFRAMES * 2]; - double tmpLPCcoeffs_hi[(ORDERHI + 1)*SUBFRAMES * 2]; - int tmpLPCindex_g[12 * 2]; - int16_t tmp_fre[FRAMESAMPLES], tmp_fim[FRAMESAMPLES]; - const int kModel = 0; - - /* Sanity Check - possible values for BWnumber is 0 - 23. */ - if ((BWnumber < 0) || (BWnumber > 23)) { - return -ISAC_RANGE_ERROR_BW_ESTIMATOR; - } - - /* Reset bit-stream. */ - WebRtcIsac_ResetBitstream(ISACBitStr_obj); - - /* Encode frame length */ - status = WebRtcIsac_EncodeFrameLen(ISACSavedEnc_obj->framelength, - ISACBitStr_obj); - if (status < 0) { - /* Wrong frame size. */ - return status; - } - - /* Transcoding */ - if ((scale > 0.0) && (scale < 1.0)) { - /* Compensate LPC gain. */ - for (ii = 0; - ii < ((ORDERLO + 1)* SUBFRAMES * (1 + ISACSavedEnc_obj->startIdx)); - ii++) { - tmpLPCcoeffs_lo[ii] = scale * ISACSavedEnc_obj->LPCcoeffs_lo[ii]; - } - for (ii = 0; - ii < ((ORDERHI + 1) * SUBFRAMES * (1 + ISACSavedEnc_obj->startIdx)); - ii++) { - tmpLPCcoeffs_hi[ii] = scale * ISACSavedEnc_obj->LPCcoeffs_hi[ii]; - } - /* Scale DFT. */ - for (ii = 0; - ii < (FRAMESAMPLES_HALF * (1 + ISACSavedEnc_obj->startIdx)); - ii++) { - tmp_fre[ii] = (int16_t)((scale) * (float)ISACSavedEnc_obj->fre[ii]); - tmp_fim[ii] = (int16_t)((scale) * (float)ISACSavedEnc_obj->fim[ii]); - } - } else { - for (ii = 0; - ii < (KLT_ORDER_GAIN * (1 + ISACSavedEnc_obj->startIdx)); - ii++) { - tmpLPCindex_g[ii] = ISACSavedEnc_obj->LPCindex_g[ii]; - } - for (ii = 0; - ii < (FRAMESAMPLES_HALF * (1 + ISACSavedEnc_obj->startIdx)); - ii++) { - tmp_fre[ii] = ISACSavedEnc_obj->fre[ii]; - tmp_fim[ii] = ISACSavedEnc_obj->fim[ii]; - } - } - - /* Encode bandwidth estimate. */ - WebRtcIsac_EncodeReceiveBw(&BWno, ISACBitStr_obj); - - /* Loop over number of 30 msec */ - for (ii = 0; ii <= ISACSavedEnc_obj->startIdx; ii++) { - /* Encode pitch gains. */ - *WebRtcIsac_kQPitchGainCdf_ptr = WebRtcIsac_kQPitchGainCdf; - WebRtcIsac_EncHistMulti(ISACBitStr_obj, - &ISACSavedEnc_obj->pitchGain_index[ii], - WebRtcIsac_kQPitchGainCdf_ptr, 1); - - /* Entropy coding of quantization pitch lags */ - /* Voicing classification. */ - if (ISACSavedEnc_obj->meanGain[ii] < 0.2) { - cdf = WebRtcIsac_kQPitchLagCdfPtrLo; - } else if (ISACSavedEnc_obj->meanGain[ii] < 0.4) { - cdf = WebRtcIsac_kQPitchLagCdfPtrMid; - } else { - cdf = WebRtcIsac_kQPitchLagCdfPtrHi; - } - WebRtcIsac_EncHistMulti(ISACBitStr_obj, - &ISACSavedEnc_obj->pitchIndex[PITCH_SUBFRAMES * ii], - cdf, PITCH_SUBFRAMES); - - /* LPC */ - /* Only one model exists. The entropy coding is done only for backward - * compatibility. */ - WebRtcIsac_EncHistMulti(ISACBitStr_obj, &kModel, - WebRtcIsac_kQKltModelCdfPtr, 1); - /* Entropy coding of quantization indices - LPC shape only. */ - WebRtcIsac_EncHistMulti(ISACBitStr_obj, - &ISACSavedEnc_obj->LPCindex_s[KLT_ORDER_SHAPE * ii], - WebRtcIsac_kQKltCdfPtrShape, - KLT_ORDER_SHAPE); - - /* If transcoding, get new LPC gain indices */ - if (scale < 1.0) { - WebRtcIsac_TranscodeLPCCoef( - &tmpLPCcoeffs_lo[(ORDERLO + 1) * SUBFRAMES * ii], - &tmpLPCcoeffs_hi[(ORDERHI + 1)*SUBFRAMES * ii], - &tmpLPCindex_g[KLT_ORDER_GAIN * ii]); - } - - /* Entropy coding of quantization indices - LPC gain. */ - WebRtcIsac_EncHistMulti(ISACBitStr_obj, &tmpLPCindex_g[KLT_ORDER_GAIN * ii], - WebRtcIsac_kQKltCdfPtrGain, KLT_ORDER_GAIN); - - /* Quantization and loss-less coding. */ - status = WebRtcIsac_EncodeSpec(&tmp_fre[ii * FRAMESAMPLES_HALF], - &tmp_fim[ii * FRAMESAMPLES_HALF], - ISACSavedEnc_obj->AvgPitchGain[ii], - kIsacLowerBand, ISACBitStr_obj); - if (status < 0) { - return status; - } - } - /* Complete arithmetic coding. */ - return WebRtcIsac_EncTerminate(ISACBitStr_obj); -} - - -int WebRtcIsac_EncodeStoredDataUb( - const ISACUBSaveEncDataStruct* ISACSavedEnc_obj, - Bitstr* bitStream, - int32_t jitterInfo, - float scale, - enum ISACBandwidth bandwidth) { - int n; - int err; - double lpcGain[SUBFRAMES]; - int16_t realFFT[FRAMESAMPLES_HALF]; - int16_t imagFFT[FRAMESAMPLES_HALF]; - const uint16_t** shape_cdf; - int shape_len; - const int16_t kAveragePitchGain = 0.0; - enum ISACBand band; - /* Reset bitstream. */ - WebRtcIsac_ResetBitstream(bitStream); - - /* Encode jitter index. */ - WebRtcIsac_EncodeJitterInfo(jitterInfo, bitStream); - - err = WebRtcIsac_EncodeBandwidth(bandwidth, bitStream); - if (err < 0) { - return err; - } - - /* Encode LPC-shape. */ - if (bandwidth == isac12kHz) { - shape_cdf = WebRtcIsac_kLpcShapeCdfMatUb12; - shape_len = UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME; - band = kIsacUpperBand12; - } else { - shape_cdf = WebRtcIsac_kLpcShapeCdfMatUb16; - shape_len = UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME; - band = kIsacUpperBand16; - } - WebRtcIsac_EncHistMulti(bitStream, ISACSavedEnc_obj->indexLPCShape, - shape_cdf, shape_len); - - if ((scale <= 0.0) || (scale >= 1.0)) { - /* We only consider scales between zero and one. */ - WebRtcIsac_EncHistMulti(bitStream, ISACSavedEnc_obj->lpcGainIndex, - WebRtcIsac_kLpcGainCdfMat, UB_LPC_GAIN_DIM); - if (bandwidth == isac16kHz) { - /* Store gain indices of the second half. */ - WebRtcIsac_EncHistMulti(bitStream, - &ISACSavedEnc_obj->lpcGainIndex[SUBFRAMES], - WebRtcIsac_kLpcGainCdfMat, UB_LPC_GAIN_DIM); - } - /* Store FFT coefficients. */ - err = WebRtcIsac_EncodeSpec(ISACSavedEnc_obj->realFFT, - ISACSavedEnc_obj->imagFFT, kAveragePitchGain, - band, bitStream); - } else { - /* Scale LPC gain and FFT coefficients. */ - for (n = 0; n < SUBFRAMES; n++) { - lpcGain[n] = scale * ISACSavedEnc_obj->lpcGain[n]; - } - /* Store LPC gains. */ - WebRtcIsac_StoreLpcGainUb(lpcGain, bitStream); - - if (bandwidth == isac16kHz) { - /* Scale and code the gains of the second half of the frame, if 16kHz. */ - for (n = 0; n < SUBFRAMES; n++) { - lpcGain[n] = scale * ISACSavedEnc_obj->lpcGain[n + SUBFRAMES]; - } - WebRtcIsac_StoreLpcGainUb(lpcGain, bitStream); - } - - for (n = 0; n < FRAMESAMPLES_HALF; n++) { - realFFT[n] = (int16_t)(scale * (float)ISACSavedEnc_obj->realFFT[n] + - 0.5f); - imagFFT[n] = (int16_t)(scale * (float)ISACSavedEnc_obj->imagFFT[n] + - 0.5f); - } - /* Store FFT coefficients. */ - err = WebRtcIsac_EncodeSpec(realFFT, imagFFT, kAveragePitchGain, - band, bitStream); - } - if (err < 0) { - /* Error happened while encoding FFT coefficients. */ - return err; - } - - /* Complete arithmetic coding. */ - return WebRtcIsac_EncTerminate(bitStream); -} - -int16_t WebRtcIsac_GetRedPayloadUb( - const ISACUBSaveEncDataStruct* ISACSavedEncObj, - Bitstr* bitStreamObj, - enum ISACBandwidth bandwidth) { - int n; - int16_t status; - int16_t realFFT[FRAMESAMPLES_HALF]; - int16_t imagFFT[FRAMESAMPLES_HALF]; - enum ISACBand band; - const int16_t kAveragePitchGain = 0.0; - /* Store bit-stream object. */ - memcpy(bitStreamObj, &ISACSavedEncObj->bitStreamObj, sizeof(Bitstr)); - - /* Scale FFT coefficients. */ - for (n = 0; n < FRAMESAMPLES_HALF; n++) { - realFFT[n] = (int16_t)((float)ISACSavedEncObj->realFFT[n] * - RCU_TRANSCODING_SCALE_UB + 0.5); - imagFFT[n] = (int16_t)((float)ISACSavedEncObj->imagFFT[n] * - RCU_TRANSCODING_SCALE_UB + 0.5); - } - - band = (bandwidth == isac12kHz) ? kIsacUpperBand12 : kIsacUpperBand16; - status = WebRtcIsac_EncodeSpec(realFFT, imagFFT, kAveragePitchGain, band, - bitStreamObj); - if (status < 0) { - return status; - } else { - /* Terminate entropy coding */ - return WebRtcIsac_EncTerminate(bitStreamObj); - } -} diff --git a/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.c b/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.c deleted file mode 100644 index 7b02e64a01..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.c +++ /dev/null @@ -1,706 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * code_LPC_UB.c - * - * This file contains definition of functions used to - * encode LPC parameters (Shape & gain) of the upper band. - * - */ - -#include -#include -#include - -#include "modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -/****************************************************************************** - * WebRtcIsac_RemoveLarMean() - * - * Remove the means from LAR coefficients. - * - * Input: - * -lar : pointer to lar vectors. LAR vectors are - * concatenated. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -lar : pointer to mean-removed LAR:s. - * - * - */ -int16_t -WebRtcIsac_RemoveLarMean( - double* lar, - int16_t bandwidth) -{ - int16_t coeffCntr; - int16_t vecCntr; - int16_t numVec; - const double* meanLAR; - switch(bandwidth) - { - case isac12kHz: - { - numVec = UB_LPC_VEC_PER_FRAME; - meanLAR = WebRtcIsac_kMeanLarUb12; - break; - } - case isac16kHz: - { - numVec = UB16_LPC_VEC_PER_FRAME; - meanLAR = WebRtcIsac_kMeanLarUb16; - break; - } - default: - return -1; - } - - for(vecCntr = 0; vecCntr < numVec; vecCntr++) - { - for(coeffCntr = 0; coeffCntr < UB_LPC_ORDER; coeffCntr++) - { - // REMOVE MEAN - *lar++ -= meanLAR[coeffCntr]; - } - } - return 0; -} - -/****************************************************************************** - * WebRtcIsac_DecorrelateIntraVec() - * - * Remove the correlation amonge the components of LAR vectors. If LAR vectors - * of one frame are put in a matrix where each column is a LAR vector of a - * sub-frame, then this is equivalent to multiplying the LAR matrix with - * a decorrelting mtrix from left. - * - * Input: - * -inLar : pointer to mean-removed LAR vecrtors. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -out : decorrelated LAR vectors. - */ -int16_t -WebRtcIsac_DecorrelateIntraVec( - const double* data, - double* out, - int16_t bandwidth) -{ - const double* ptrData; - const double* ptrRow; - int16_t rowCntr; - int16_t colCntr; - int16_t larVecCntr; - int16_t numVec; - const double* decorrMat; - switch(bandwidth) - { - case isac12kHz: - { - decorrMat = &WebRtcIsac_kIntraVecDecorrMatUb12[0][0]; - numVec = UB_LPC_VEC_PER_FRAME; - break; - } - case isac16kHz: - { - decorrMat = &WebRtcIsac_kIintraVecDecorrMatUb16[0][0]; - numVec = UB16_LPC_VEC_PER_FRAME; - break; - } - default: - return -1; - } - - // - // decorrMat * data - // - // data is assumed to contain 'numVec' of LAR - // vectors (mean removed) each of dimension 'UB_LPC_ORDER' - // concatenated one after the other. - // - - ptrData = data; - for(larVecCntr = 0; larVecCntr < numVec; larVecCntr++) - { - for(rowCntr = 0; rowCntr < UB_LPC_ORDER; rowCntr++) - { - ptrRow = &decorrMat[rowCntr * UB_LPC_ORDER]; - *out = 0; - for(colCntr = 0; colCntr < UB_LPC_ORDER; colCntr++) - { - *out += ptrData[colCntr] * ptrRow[colCntr]; - } - out++; - } - ptrData += UB_LPC_ORDER; - } - return 0; -} - -/****************************************************************************** - * WebRtcIsac_DecorrelateInterVec() - * - * Remover the correlation among mean-removed LAR vectors. If LAR vectors - * of one frame are put in a matrix where each column is a LAR vector of a - * sub-frame, then this is equivalent to multiplying the LAR matrix with - * a decorrelting mtrix from right. - * - * Input: - * -data : pointer to matrix of LAR vectors. The matrix - * is stored column-wise. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -out : decorrelated LAR vectors. - */ -int16_t -WebRtcIsac_DecorrelateInterVec( - const double* data, - double* out, - int16_t bandwidth) -{ - int16_t coeffCntr; - int16_t rowCntr; - int16_t colCntr; - const double* decorrMat; - int16_t interVecDim; - - switch(bandwidth) - { - case isac12kHz: - { - decorrMat = &WebRtcIsac_kInterVecDecorrMatUb12[0][0]; - interVecDim = UB_LPC_VEC_PER_FRAME; - break; - } - case isac16kHz: - { - decorrMat = &WebRtcIsac_kInterVecDecorrMatUb16[0][0]; - interVecDim = UB16_LPC_VEC_PER_FRAME; - break; - } - default: - return -1; - } - - // - // data * decorrMat - // - // data is of size 'interVecDim' * 'UB_LPC_ORDER' - // That is 'interVecDim' of LAR vectors (mean removed) - // in columns each of dimension 'UB_LPC_ORDER'. - // matrix is stored column-wise. - // - - for(coeffCntr = 0; coeffCntr < UB_LPC_ORDER; coeffCntr++) - { - for(colCntr = 0; colCntr < interVecDim; colCntr++) - { - out[coeffCntr + colCntr * UB_LPC_ORDER] = 0; - for(rowCntr = 0; rowCntr < interVecDim; rowCntr++) - { - out[coeffCntr + colCntr * UB_LPC_ORDER] += - data[coeffCntr + rowCntr * UB_LPC_ORDER] * - decorrMat[rowCntr * interVecDim + colCntr]; - } - } - } - return 0; -} - -/****************************************************************************** - * WebRtcIsac_QuantizeUncorrLar() - * - * Quantize the uncorrelated parameters. - * - * Input: - * -data : uncorrelated LAR vectors. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -data : quantized version of the input. - * -idx : pointer to quantization indices. - */ -double -WebRtcIsac_QuantizeUncorrLar( - double* data, - int* recIdx, - int16_t bandwidth) -{ - int16_t cntr; - int32_t idx; - int16_t interVecDim; - const double* leftRecPoint; - double quantizationStepSize; - const int16_t* numQuantCell; - switch(bandwidth) - { - case isac12kHz: - { - leftRecPoint = WebRtcIsac_kLpcShapeLeftRecPointUb12; - quantizationStepSize = WebRtcIsac_kLpcShapeQStepSizeUb12; - numQuantCell = WebRtcIsac_kLpcShapeNumRecPointUb12; - interVecDim = UB_LPC_VEC_PER_FRAME; - break; - } - case isac16kHz: - { - leftRecPoint = WebRtcIsac_kLpcShapeLeftRecPointUb16; - quantizationStepSize = WebRtcIsac_kLpcShapeQStepSizeUb16; - numQuantCell = WebRtcIsac_kLpcShapeNumRecPointUb16; - interVecDim = UB16_LPC_VEC_PER_FRAME; - break; - } - default: - return -1; - } - - // - // Quantize the parametrs. - // - for(cntr = 0; cntr < UB_LPC_ORDER * interVecDim; cntr++) - { - idx = (int32_t)floor((*data - leftRecPoint[cntr]) / - quantizationStepSize + 0.5); - if(idx < 0) - { - idx = 0; - } - else if(idx >= numQuantCell[cntr]) - { - idx = numQuantCell[cntr] - 1; - } - - *data++ = leftRecPoint[cntr] + idx * quantizationStepSize; - *recIdx++ = idx; - } - return 0; -} - - -/****************************************************************************** - * WebRtcIsac_DequantizeLpcParam() - * - * Get the quantized value of uncorrelated LARs given the quantization indices. - * - * Input: - * -idx : pointer to quantiztion indices. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -out : pointer to quantized values. - */ -int16_t -WebRtcIsac_DequantizeLpcParam( - const int* idx, - double* out, - int16_t bandwidth) -{ - int16_t cntr; - int16_t interVecDim; - const double* leftRecPoint; - double quantizationStepSize; - - switch(bandwidth) - { - case isac12kHz: - { - leftRecPoint = WebRtcIsac_kLpcShapeLeftRecPointUb12; - quantizationStepSize = WebRtcIsac_kLpcShapeQStepSizeUb12; - interVecDim = UB_LPC_VEC_PER_FRAME; - break; - } - case isac16kHz: - { - leftRecPoint = WebRtcIsac_kLpcShapeLeftRecPointUb16; - quantizationStepSize = WebRtcIsac_kLpcShapeQStepSizeUb16; - interVecDim = UB16_LPC_VEC_PER_FRAME; - break; - } - default: - return -1; - } - - // - // Dequantize given the quantization indices - // - - for(cntr = 0; cntr < UB_LPC_ORDER * interVecDim; cntr++) - { - *out++ = leftRecPoint[cntr] + *idx++ * quantizationStepSize; - } - return 0; -} - - -/****************************************************************************** - * WebRtcIsac_CorrelateIntraVec() - * - * This is the inverse of WebRtcIsac_DecorrelateIntraVec(). - * - * Input: - * -data : uncorrelated parameters. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -out : correlated parametrs. - */ -int16_t -WebRtcIsac_CorrelateIntraVec( - const double* data, - double* out, - int16_t bandwidth) -{ - int16_t vecCntr; - int16_t rowCntr; - int16_t colCntr; - int16_t numVec; - const double* ptrData; - const double* intraVecDecorrMat; - - switch(bandwidth) - { - case isac12kHz: - { - numVec = UB_LPC_VEC_PER_FRAME; - intraVecDecorrMat = &WebRtcIsac_kIntraVecDecorrMatUb12[0][0]; - break; - } - case isac16kHz: - { - numVec = UB16_LPC_VEC_PER_FRAME; - intraVecDecorrMat = &WebRtcIsac_kIintraVecDecorrMatUb16[0][0]; - break; - } - default: - return -1; - } - - - ptrData = data; - for(vecCntr = 0; vecCntr < numVec; vecCntr++) - { - for(colCntr = 0; colCntr < UB_LPC_ORDER; colCntr++) - { - *out = 0; - for(rowCntr = 0; rowCntr < UB_LPC_ORDER; rowCntr++) - { - *out += ptrData[rowCntr] * - intraVecDecorrMat[rowCntr * UB_LPC_ORDER + colCntr]; - } - out++; - } - ptrData += UB_LPC_ORDER; - } - return 0; -} - -/****************************************************************************** - * WebRtcIsac_CorrelateInterVec() - * - * This is the inverse of WebRtcIsac_DecorrelateInterVec(). - * - * Input: - * -data - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -out : correlated parametrs. - */ -int16_t -WebRtcIsac_CorrelateInterVec( - const double* data, - double* out, - int16_t bandwidth) -{ - int16_t coeffCntr; - int16_t rowCntr; - int16_t colCntr; - int16_t interVecDim; - double myVec[UB16_LPC_VEC_PER_FRAME] = {0.0}; - const double* interVecDecorrMat; - - switch(bandwidth) - { - case isac12kHz: - { - interVecDim = UB_LPC_VEC_PER_FRAME; - interVecDecorrMat = &WebRtcIsac_kInterVecDecorrMatUb12[0][0]; - break; - } - case isac16kHz: - { - interVecDim = UB16_LPC_VEC_PER_FRAME; - interVecDecorrMat = &WebRtcIsac_kInterVecDecorrMatUb16[0][0]; - break; - } - default: - return -1; - } - - for(coeffCntr = 0; coeffCntr < UB_LPC_ORDER; coeffCntr++) - { - for(rowCntr = 0; rowCntr < interVecDim; rowCntr++) - { - myVec[rowCntr] = 0; - for(colCntr = 0; colCntr < interVecDim; colCntr++) - { - myVec[rowCntr] += data[coeffCntr + colCntr * UB_LPC_ORDER] * //*ptrData * - interVecDecorrMat[rowCntr * interVecDim + colCntr]; - //ptrData += UB_LPC_ORDER; - } - } - - for(rowCntr = 0; rowCntr < interVecDim; rowCntr++) - { - out[coeffCntr + rowCntr * UB_LPC_ORDER] = myVec[rowCntr]; - } - } - return 0; -} - -/****************************************************************************** - * WebRtcIsac_AddLarMean() - * - * This is the inverse of WebRtcIsac_RemoveLarMean() - * - * Input: - * -data : pointer to mean-removed LAR:s. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -data : pointer to LARs. - */ -int16_t -WebRtcIsac_AddLarMean( - double* data, - int16_t bandwidth) -{ - int16_t coeffCntr; - int16_t vecCntr; - int16_t numVec; - const double* meanLAR; - - switch(bandwidth) - { - case isac12kHz: - { - numVec = UB_LPC_VEC_PER_FRAME; - meanLAR = WebRtcIsac_kMeanLarUb12; - break; - } - case isac16kHz: - { - numVec = UB16_LPC_VEC_PER_FRAME; - meanLAR = WebRtcIsac_kMeanLarUb16; - break; - } - default: - return -1; - } - - for(vecCntr = 0; vecCntr < numVec; vecCntr++) - { - for(coeffCntr = 0; coeffCntr < UB_LPC_ORDER; coeffCntr++) - { - *data++ += meanLAR[coeffCntr]; - } - } - return 0; -} - -/****************************************************************************** - * WebRtcIsac_ToLogDomainRemoveMean() - * - * Transform the LPC gain to log domain then remove the mean value. - * - * Input: - * -lpcGain : pointer to LPC Gain, expecting 6 LPC gains - * - * Output: - * -lpcGain : mean-removed in log domain. - */ -int16_t -WebRtcIsac_ToLogDomainRemoveMean( - double* data) -{ - int16_t coeffCntr; - for(coeffCntr = 0; coeffCntr < UB_LPC_GAIN_DIM; coeffCntr++) - { - data[coeffCntr] = log(data[coeffCntr]) - WebRtcIsac_kMeanLpcGain; - } - return 0; -} - - -/****************************************************************************** - * WebRtcIsac_DecorrelateLPGain() - * - * Decorrelate LPC gains. There are 6 LPC Gains per frame. This is like - * multiplying gain vector with decorrelating matrix. - * - * Input: - * -data : LPC gain in log-domain with mean removed. - * - * Output: - * -out : decorrelated parameters. - */ -int16_t WebRtcIsac_DecorrelateLPGain( - const double* data, - double* out) -{ - int16_t rowCntr; - int16_t colCntr; - - for(colCntr = 0; colCntr < UB_LPC_GAIN_DIM; colCntr++) - { - *out = 0; - for(rowCntr = 0; rowCntr < UB_LPC_GAIN_DIM; rowCntr++) - { - *out += data[rowCntr] * WebRtcIsac_kLpcGainDecorrMat[rowCntr][colCntr]; - } - out++; - } - return 0; -} - -/****************************************************************************** - * WebRtcIsac_QuantizeLpcGain() - * - * Quantize the decorrelated log-domain gains. - * - * Input: - * -lpcGain : uncorrelated LPC gains. - * - * Output: - * -idx : quantization indices - * -lpcGain : quantized value of the inpt. - */ -double WebRtcIsac_QuantizeLpcGain( - double* data, - int* idx) -{ - int16_t coeffCntr; - for(coeffCntr = 0; coeffCntr < UB_LPC_GAIN_DIM; coeffCntr++) - { - *idx = (int)floor((*data - WebRtcIsac_kLeftRecPointLpcGain[coeffCntr]) / - WebRtcIsac_kQSizeLpcGain + 0.5); - - if(*idx < 0) - { - *idx = 0; - } - else if(*idx >= WebRtcIsac_kNumQCellLpcGain[coeffCntr]) - { - *idx = WebRtcIsac_kNumQCellLpcGain[coeffCntr] - 1; - } - *data = WebRtcIsac_kLeftRecPointLpcGain[coeffCntr] + *idx * - WebRtcIsac_kQSizeLpcGain; - - data++; - idx++; - } - return 0; -} - -/****************************************************************************** - * WebRtcIsac_DequantizeLpcGain() - * - * Get the quantized values given the quantization indices. - * - * Input: - * -idx : pointer to quantization indices. - * - * Output: - * -lpcGains : quantized values of the given parametes. - */ -int16_t WebRtcIsac_DequantizeLpcGain( - const int* idx, - double* out) -{ - int16_t coeffCntr; - for(coeffCntr = 0; coeffCntr < UB_LPC_GAIN_DIM; coeffCntr++) - { - *out = WebRtcIsac_kLeftRecPointLpcGain[coeffCntr] + *idx * - WebRtcIsac_kQSizeLpcGain; - out++; - idx++; - } - return 0; -} - -/****************************************************************************** - * WebRtcIsac_CorrelateLpcGain() - * - * This is the inverse of WebRtcIsac_DecorrelateLPGain(). - * - * Input: - * -data : decorrelated parameters. - * - * Output: - * -out : correlated parameters. - */ -int16_t WebRtcIsac_CorrelateLpcGain( - const double* data, - double* out) -{ - int16_t rowCntr; - int16_t colCntr; - - for(rowCntr = 0; rowCntr < UB_LPC_GAIN_DIM; rowCntr++) - { - *out = 0; - for(colCntr = 0; colCntr < UB_LPC_GAIN_DIM; colCntr++) - { - *out += WebRtcIsac_kLpcGainDecorrMat[rowCntr][colCntr] * data[colCntr]; - } - out++; - } - - return 0; -} - - -/****************************************************************************** - * WebRtcIsac_AddMeanToLinearDomain() - * - * This is the inverse of WebRtcIsac_ToLogDomainRemoveMean(). - * - * Input: - * -lpcGain : LPC gain in log-domain & mean removed - * - * Output: - * -lpcGain : LPC gain in normal domain. - */ -int16_t WebRtcIsac_AddMeanToLinearDomain( - double* lpcGains) -{ - int16_t coeffCntr; - for(coeffCntr = 0; coeffCntr < UB_LPC_GAIN_DIM; coeffCntr++) - { - lpcGains[coeffCntr] = exp(lpcGains[coeffCntr] + WebRtcIsac_kMeanLpcGain); - } - return 0; -} diff --git a/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.h b/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.h deleted file mode 100644 index 8bc3d752c3..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.h +++ /dev/null @@ -1,246 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * encode_lpc_swb.h - * - * This file contains declaration of functions used to - * encode LPC parameters (Shape & gain) of the upper band. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ENCODE_LPC_SWB_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ENCODE_LPC_SWB_H_ - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/structs.h" - -/****************************************************************************** - * WebRtcIsac_RemoveLarMean() - * - * Remove the means from LAR coefficients. - * - * Input: - * -lar : pointer to lar vectors. LAR vectors are - * concatenated. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -lar : pointer to mean-removed LAR:s. - * - * - */ -int16_t WebRtcIsac_RemoveLarMean(double* lar, int16_t bandwidth); - -/****************************************************************************** - * WebRtcIsac_DecorrelateIntraVec() - * - * Remove the correlation amonge the components of LAR vectors. If LAR vectors - * of one frame are put in a matrix where each column is a LAR vector of a - * sub-frame, then this is equivalent to multiplying the LAR matrix with - * a decorrelting mtrix from left. - * - * Input: - * -inLar : pointer to mean-removed LAR vecrtors. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -out : decorrelated LAR vectors. - */ -int16_t WebRtcIsac_DecorrelateIntraVec(const double* inLAR, - double* out, - int16_t bandwidth); - -/****************************************************************************** - * WebRtcIsac_DecorrelateInterVec() - * - * Remover the correlation among mean-removed LAR vectors. If LAR vectors - * of one frame are put in a matrix where each column is a LAR vector of a - * sub-frame, then this is equivalent to multiplying the LAR matrix with - * a decorrelting mtrix from right. - * - * Input: - * -data : pointer to matrix of LAR vectors. The matrix - * is stored column-wise. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -out : decorrelated LAR vectors. - */ -int16_t WebRtcIsac_DecorrelateInterVec(const double* data, - double* out, - int16_t bandwidth); - -/****************************************************************************** - * WebRtcIsac_QuantizeUncorrLar() - * - * Quantize the uncorrelated parameters. - * - * Input: - * -data : uncorrelated LAR vectors. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -data : quantized version of the input. - * -idx : pointer to quantization indices. - */ -double WebRtcIsac_QuantizeUncorrLar(double* data, int* idx, int16_t bandwidth); - -/****************************************************************************** - * WebRtcIsac_CorrelateIntraVec() - * - * This is the inverse of WebRtcIsac_DecorrelateIntraVec(). - * - * Input: - * -data : uncorrelated parameters. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -out : correlated parametrs. - */ -int16_t WebRtcIsac_CorrelateIntraVec(const double* data, - double* out, - int16_t bandwidth); - -/****************************************************************************** - * WebRtcIsac_CorrelateInterVec() - * - * This is the inverse of WebRtcIsac_DecorrelateInterVec(). - * - * Input: - * -data - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -out : correlated parametrs. - */ -int16_t WebRtcIsac_CorrelateInterVec(const double* data, - double* out, - int16_t bandwidth); - -/****************************************************************************** - * WebRtcIsac_AddLarMean() - * - * This is the inverse of WebRtcIsac_RemoveLarMean() - * - * Input: - * -data : pointer to mean-removed LAR:s. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -data : pointer to LARs. - */ -int16_t WebRtcIsac_AddLarMean(double* data, int16_t bandwidth); - -/****************************************************************************** - * WebRtcIsac_DequantizeLpcParam() - * - * Get the quantized value of uncorrelated LARs given the quantization indices. - * - * Input: - * -idx : pointer to quantiztion indices. - * -bandwidth : indicates if the given LAR vectors belong - * to SWB-12kHz or SWB-16kHz. - * - * Output: - * -out : pointer to quantized values. - */ -int16_t WebRtcIsac_DequantizeLpcParam(const int* idx, - double* out, - int16_t bandwidth); - -/****************************************************************************** - * WebRtcIsac_ToLogDomainRemoveMean() - * - * Transform the LPC gain to log domain then remove the mean value. - * - * Input: - * -lpcGain : pointer to LPC Gain, expecting 6 LPC gains - * - * Output: - * -lpcGain : mean-removed in log domain. - */ -int16_t WebRtcIsac_ToLogDomainRemoveMean(double* lpGains); - -/****************************************************************************** - * WebRtcIsac_DecorrelateLPGain() - * - * Decorrelate LPC gains. There are 6 LPC Gains per frame. This is like - * multiplying gain vector with decorrelating matrix. - * - * Input: - * -data : LPC gain in log-domain with mean removed. - * - * Output: - * -out : decorrelated parameters. - */ -int16_t WebRtcIsac_DecorrelateLPGain(const double* data, double* out); - -/****************************************************************************** - * WebRtcIsac_QuantizeLpcGain() - * - * Quantize the decorrelated log-domain gains. - * - * Input: - * -lpcGain : uncorrelated LPC gains. - * - * Output: - * -idx : quantization indices - * -lpcGain : quantized value of the inpt. - */ -double WebRtcIsac_QuantizeLpcGain(double* lpGains, int* idx); - -/****************************************************************************** - * WebRtcIsac_DequantizeLpcGain() - * - * Get the quantized values given the quantization indices. - * - * Input: - * -idx : pointer to quantization indices. - * - * Output: - * -lpcGains : quantized values of the given parametes. - */ -int16_t WebRtcIsac_DequantizeLpcGain(const int* idx, double* lpGains); - -/****************************************************************************** - * WebRtcIsac_CorrelateLpcGain() - * - * This is the inverse of WebRtcIsac_DecorrelateLPGain(). - * - * Input: - * -data : decorrelated parameters. - * - * Output: - * -out : correlated parameters. - */ -int16_t WebRtcIsac_CorrelateLpcGain(const double* data, double* out); - -/****************************************************************************** - * WebRtcIsac_AddMeanToLinearDomain() - * - * This is the inverse of WebRtcIsac_ToLogDomainRemoveMean(). - * - * Input: - * -lpcGain : LPC gain in log-domain & mean removed - * - * Output: - * -lpcGain : LPC gain in normal domain. - */ -int16_t WebRtcIsac_AddMeanToLinearDomain(double* lpcGains); - -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ENCODE_LPC_SWB_H_ diff --git a/modules/audio_coding/codecs/isac/main/source/entropy_coding.c b/modules/audio_coding/codecs/isac/main/source/entropy_coding.c deleted file mode 100644 index 188c8f6b86..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/entropy_coding.c +++ /dev/null @@ -1,2066 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * entropy_coding.c - * - * This header file defines all of the functions used to arithmetically - * encode the iSAC bistream - * - */ - - -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "modules/audio_coding/codecs/isac/main/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/arith_routines.h" -#include "modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/os_specific_inline.h" - -#include -#include - -static const uint16_t kLpcVecPerSegmentUb12 = 5; -static const uint16_t kLpcVecPerSegmentUb16 = 4; - -/* CDF array for encoder bandwidth (12 vs 16 kHz) indicator. */ -static const uint16_t kOneBitEqualProbCdf[3] = { - 0, 32768, 65535 }; - -/* Pointer to cdf array for encoder bandwidth (12 vs 16 kHz) indicator. */ -static const uint16_t* const kOneBitEqualProbCdf_ptr[1] = { - kOneBitEqualProbCdf }; - -/* - * Initial cdf index for decoder of encoded bandwidth - * (12 vs 16 kHz) indicator. - */ -static const uint16_t kOneBitEqualProbInitIndex[1] = { 1 }; - - -static const int kIsSWB12 = 1; - -/* compute correlation from power spectrum */ -static void FindCorrelation(int32_t* PSpecQ12, int32_t* CorrQ7) { - int32_t summ[FRAMESAMPLES / 8]; - int32_t diff[FRAMESAMPLES / 8]; - const int16_t* CS_ptrQ9; - int32_t sum; - int k, n; - - for (k = 0; k < FRAMESAMPLES / 8; k++) { - summ[k] = (PSpecQ12[k] + PSpecQ12[FRAMESAMPLES_QUARTER - 1 - k] + 16) >> 5; - diff[k] = (PSpecQ12[k] - PSpecQ12[FRAMESAMPLES_QUARTER - 1 - k] + 16) >> 5; - } - - sum = 2; - for (n = 0; n < FRAMESAMPLES / 8; n++) { - sum += summ[n]; - } - CorrQ7[0] = sum; - - for (k = 0; k < AR_ORDER; k += 2) { - sum = 0; - CS_ptrQ9 = WebRtcIsac_kCos[k]; - for (n = 0; n < FRAMESAMPLES / 8; n++) - sum += (CS_ptrQ9[n] * diff[n] + 256) >> 9; - CorrQ7[k + 1] = sum; - } - - for (k = 1; k < AR_ORDER; k += 2) { - sum = 0; - CS_ptrQ9 = WebRtcIsac_kCos[k]; - for (n = 0; n < FRAMESAMPLES / 8; n++) - sum += (CS_ptrQ9[n] * summ[n] + 256) >> 9; - CorrQ7[k + 1] = sum; - } -} - -/* compute inverse AR power spectrum */ -/* Changed to the function used in iSAC FIX for compatibility reasons */ -static void FindInvArSpec(const int16_t* ARCoefQ12, - const int32_t gainQ10, - int32_t* CurveQ16) { - int32_t CorrQ11[AR_ORDER + 1]; - int64_t sum, tmpGain; - int32_t diffQ16[FRAMESAMPLES / 8]; - const int16_t* CS_ptrQ9; - int k, n; - int16_t round, shftVal = 0, sh; - - sum = 0; - for (n = 0; n < AR_ORDER + 1; n++) { - sum += WEBRTC_SPL_MUL(ARCoefQ12[n], ARCoefQ12[n]); /* Q24 */ - } - sum = ((sum >> 6) * 65 + 32768) >> 16; /* Q8 */ - CorrQ11[0] = (sum * gainQ10 + 256) >> 9; - - /* To avoid overflow, we shift down gainQ10 if it is large. - * We will not lose any precision */ - if (gainQ10 > 400000) { - tmpGain = gainQ10 >> 3; - round = 32; - shftVal = 6; - } else { - tmpGain = gainQ10; - round = 256; - shftVal = 9; - } - - for (k = 1; k < AR_ORDER + 1; k++) { - sum = 16384; - for (n = k; n < AR_ORDER + 1; n++) - sum += WEBRTC_SPL_MUL(ARCoefQ12[n - k], ARCoefQ12[n]); /* Q24 */ - sum >>= 15; - CorrQ11[k] = (sum * tmpGain + round) >> shftVal; - } - sum = CorrQ11[0] << 7; - for (n = 0; n < FRAMESAMPLES / 8; n++) { - CurveQ16[n] = sum; - } - for (k = 1; k < AR_ORDER; k += 2) { - for (n = 0; n < FRAMESAMPLES / 8; n++) { - CurveQ16[n] += (WebRtcIsac_kCos[k][n] * CorrQ11[k + 1] + 2) >> 2; - } - } - - CS_ptrQ9 = WebRtcIsac_kCos[0]; - - /* If CorrQ11[1] too large we avoid getting overflow in the - * calculation by shifting */ - sh = WebRtcSpl_NormW32(CorrQ11[1]); - if (CorrQ11[1] == 0) { /* Use next correlation */ - sh = WebRtcSpl_NormW32(CorrQ11[2]); - } - if (sh < 9) { - shftVal = 9 - sh; - } else { - shftVal = 0; - } - for (n = 0; n < FRAMESAMPLES / 8; n++) { - diffQ16[n] = (CS_ptrQ9[n] * (CorrQ11[1] >> shftVal) + 2) >> 2; - } - for (k = 2; k < AR_ORDER; k += 2) { - CS_ptrQ9 = WebRtcIsac_kCos[k]; - for (n = 0; n < FRAMESAMPLES / 8; n++) { - diffQ16[n] += (CS_ptrQ9[n] * (CorrQ11[k + 1] >> shftVal) + 2) >> 2; - } - } - - for (k = 0; k < FRAMESAMPLES / 8; k++) { - int32_t diff_q16_shifted = (int32_t)((uint32_t)(diffQ16[k]) << shftVal); - CurveQ16[FRAMESAMPLES_QUARTER - 1 - k] = CurveQ16[k] - diff_q16_shifted; - CurveQ16[k] += diff_q16_shifted; - } -} - -/* Generate array of dither samples in Q7. */ -static void GenerateDitherQ7Lb(int16_t* bufQ7, uint32_t seed, - int length, int16_t AvgPitchGain_Q12) { - int k, shft; - int16_t dither1_Q7, dither2_Q7, dither_gain_Q14; - - /* This threshold should be equal to that in decode_spec(). */ - if (AvgPitchGain_Q12 < 614) { - for (k = 0; k < length - 2; k += 3) { - /* New random unsigned int. */ - seed = (seed * 196314165) + 907633515; - - /* Fixed-point dither sample between -64 and 64 (Q7). */ - /* dither = seed * 128 / 4294967295 */ - dither1_Q7 = (int16_t)(((int32_t)(seed + 16777216)) >> 25); - - /* New random unsigned int. */ - seed = (seed * 196314165) + 907633515; - - /* Fixed-point dither sample between -64 and 64. */ - dither2_Q7 = (int16_t)(((int32_t)(seed + 16777216)) >> 25); - - shft = (seed >> 25) & 15; - if (shft < 5) { - bufQ7[k] = dither1_Q7; - bufQ7[k + 1] = dither2_Q7; - bufQ7[k + 2] = 0; - } else if (shft < 10) { - bufQ7[k] = dither1_Q7; - bufQ7[k + 1] = 0; - bufQ7[k + 2] = dither2_Q7; - } else { - bufQ7[k] = 0; - bufQ7[k + 1] = dither1_Q7; - bufQ7[k + 2] = dither2_Q7; - } - } - } else { - dither_gain_Q14 = (int16_t)(22528 - 10 * AvgPitchGain_Q12); - - /* Dither on half of the coefficients. */ - for (k = 0; k < length - 1; k += 2) { - /* New random unsigned int */ - seed = (seed * 196314165) + 907633515; - - /* Fixed-point dither sample between -64 and 64. */ - dither1_Q7 = (int16_t)(((int32_t)(seed + 16777216)) >> 25); - - /* Dither sample is placed in either even or odd index. */ - shft = (seed >> 25) & 1; /* Either 0 or 1 */ - - bufQ7[k + shft] = (((dither_gain_Q14 * dither1_Q7) + 8192) >> 14); - bufQ7[k + 1 - shft] = 0; - } - } -} - - - -/****************************************************************************** - * GenerateDitherQ7LbUB() - * - * generate array of dither samples in Q7 There are less zeros in dither - * vector compared to GenerateDitherQ7Lb. - * - * A uniform random number generator with the range of [-64 64] is employed - * but the generated dithers are scaled by 0.35, a heuristic scaling. - * - * Input: - * -seed : the initial seed for the random number generator. - * -length : the number of dither values to be generated. - * - * Output: - * -bufQ7 : pointer to a buffer where dithers are written to. - */ -static void GenerateDitherQ7LbUB( - int16_t* bufQ7, - uint32_t seed, - int length) { - int k; - for (k = 0; k < length; k++) { - /* new random unsigned int */ - seed = (seed * 196314165) + 907633515; - - /* Fixed-point dither sample between -64 and 64 (Q7). */ - /* bufQ7 = seed * 128 / 4294967295 */ - bufQ7[k] = (int16_t)(((int32_t)(seed + 16777216)) >> 25); - - /* Scale by 0.35. */ - bufQ7[k] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(bufQ7[k], 2048, 13); - } -} - -/* - * Function to decode the complex spectrum from the bit stream - * returns the total number of bytes in the stream. - */ -int WebRtcIsac_DecodeSpec(Bitstr* streamdata, int16_t AvgPitchGain_Q12, - enum ISACBand band, double* fr, double* fi) { - int16_t DitherQ7[FRAMESAMPLES]; - int16_t data[FRAMESAMPLES]; - int32_t invARSpec2_Q16[FRAMESAMPLES_QUARTER]; - uint16_t invARSpecQ8[FRAMESAMPLES_QUARTER]; - int16_t ARCoefQ12[AR_ORDER + 1]; - int16_t RCQ15[AR_ORDER]; - int16_t gainQ10; - int32_t gain2_Q10, res; - int32_t in_sqrt; - int32_t newRes; - int k, len, i; - int is_12khz = !kIsSWB12; - int num_dft_coeff = FRAMESAMPLES; - /* Create dither signal. */ - if (band == kIsacLowerBand) { - GenerateDitherQ7Lb(DitherQ7, streamdata->W_upper, FRAMESAMPLES, - AvgPitchGain_Q12); - } else { - GenerateDitherQ7LbUB(DitherQ7, streamdata->W_upper, FRAMESAMPLES); - if (band == kIsacUpperBand12) { - is_12khz = kIsSWB12; - num_dft_coeff = FRAMESAMPLES_HALF; - } - } - - /* Decode model parameters. */ - if (WebRtcIsac_DecodeRc(streamdata, RCQ15) < 0) - return -ISAC_RANGE_ERROR_DECODE_SPECTRUM; - - WebRtcSpl_ReflCoefToLpc(RCQ15, AR_ORDER, ARCoefQ12); - - if (WebRtcIsac_DecodeGain2(streamdata, &gain2_Q10) < 0) - return -ISAC_RANGE_ERROR_DECODE_SPECTRUM; - - /* Compute inverse AR power spectrum. */ - FindInvArSpec(ARCoefQ12, gain2_Q10, invARSpec2_Q16); - - /* Convert to magnitude spectrum, - * by doing square-roots (modified from SPLIB). */ - res = 1 << (WebRtcSpl_GetSizeInBits(invARSpec2_Q16[0]) >> 1); - for (k = 0; k < FRAMESAMPLES_QUARTER; k++) { - in_sqrt = invARSpec2_Q16[k]; - i = 10; - - /* Negative values make no sense for a real sqrt-function. */ - if (in_sqrt < 0) - in_sqrt = -in_sqrt; - - newRes = (in_sqrt / res + res) >> 1; - do { - res = newRes; - newRes = (in_sqrt / res + res) >> 1; - } while (newRes != res && i-- > 0); - - invARSpecQ8[k] = (int16_t)newRes; - } - - len = WebRtcIsac_DecLogisticMulti2(data, streamdata, invARSpecQ8, DitherQ7, - num_dft_coeff, is_12khz); - /* Arithmetic decoding of spectrum. */ - if (len < 1) { - return -ISAC_RANGE_ERROR_DECODE_SPECTRUM; - } - - switch (band) { - case kIsacLowerBand: { - /* Scale down spectral samples with low SNR. */ - int32_t p1; - int32_t p2; - if (AvgPitchGain_Q12 <= 614) { - p1 = 30 << 10; - p2 = 32768 + (33 << 16); - } else { - p1 = 36 << 10; - p2 = 32768 + (40 << 16); - } - for (k = 0; k < FRAMESAMPLES; k += 4) { - gainQ10 = WebRtcSpl_DivW32W16ResW16(p1, (int16_t)( - (invARSpec2_Q16[k >> 2] + p2) >> 16)); - *fr++ = (double)((data[ k ] * gainQ10 + 512) >> 10) / 128.0; - *fi++ = (double)((data[k + 1] * gainQ10 + 512) >> 10) / 128.0; - *fr++ = (double)((data[k + 2] * gainQ10 + 512) >> 10) / 128.0; - *fi++ = (double)((data[k + 3] * gainQ10 + 512) >> 10) / 128.0; - } - break; - } - case kIsacUpperBand12: { - for (k = 0, i = 0; k < FRAMESAMPLES_HALF; k += 4) { - fr[i] = (double)data[ k ] / 128.0; - fi[i] = (double)data[k + 1] / 128.0; - i++; - fr[i] = (double)data[k + 2] / 128.0; - fi[i] = (double)data[k + 3] / 128.0; - i++; - } - /* The second half of real and imaginary coefficients is zero. This is - * due to using the old FFT module which requires two signals as input - * while in 0-12 kHz mode we only have 8-12 kHz band, and the second - * signal is set to zero. */ - memset(&fr[FRAMESAMPLES_QUARTER], 0, FRAMESAMPLES_QUARTER * - sizeof(double)); - memset(&fi[FRAMESAMPLES_QUARTER], 0, FRAMESAMPLES_QUARTER * - sizeof(double)); - break; - } - case kIsacUpperBand16: { - for (i = 0, k = 0; k < FRAMESAMPLES; k += 4, i++) { - fr[i] = (double)data[ k ] / 128.0; - fi[i] = (double)data[k + 1] / 128.0; - fr[(FRAMESAMPLES_HALF) - 1 - i] = (double)data[k + 2] / 128.0; - fi[(FRAMESAMPLES_HALF) - 1 - i] = (double)data[k + 3] / 128.0; - } - break; - } - } - return len; -} - - -int WebRtcIsac_EncodeSpec(const int16_t* fr, const int16_t* fi, - int16_t AvgPitchGain_Q12, enum ISACBand band, - Bitstr* streamdata) { - int16_t ditherQ7[FRAMESAMPLES]; - int16_t dataQ7[FRAMESAMPLES]; - int32_t PSpec[FRAMESAMPLES_QUARTER]; - int32_t invARSpec2_Q16[FRAMESAMPLES_QUARTER]; - uint16_t invARSpecQ8[FRAMESAMPLES_QUARTER]; - int32_t CorrQ7[AR_ORDER + 1]; - int32_t CorrQ7_norm[AR_ORDER + 1]; - int16_t RCQ15[AR_ORDER]; - int16_t ARCoefQ12[AR_ORDER + 1]; - int32_t gain2_Q10; - int16_t val; - int32_t nrg, res; - uint32_t sum; - int32_t in_sqrt; - int32_t newRes; - int16_t err; - uint32_t nrg_u32; - int shift_var; - int k, n, j, i; - int is_12khz = !kIsSWB12; - int num_dft_coeff = FRAMESAMPLES; - - /* Create dither signal. */ - if (band == kIsacLowerBand) { - GenerateDitherQ7Lb(ditherQ7, streamdata->W_upper, FRAMESAMPLES, - AvgPitchGain_Q12); - } else { - GenerateDitherQ7LbUB(ditherQ7, streamdata->W_upper, FRAMESAMPLES); - if (band == kIsacUpperBand12) { - is_12khz = kIsSWB12; - num_dft_coeff = FRAMESAMPLES_HALF; - } - } - - /* add dither and quantize, and compute power spectrum */ - switch (band) { - case kIsacLowerBand: { - for (k = 0; k < FRAMESAMPLES; k += 4) { - val = ((*fr++ + ditherQ7[k] + 64) & 0xFF80) - ditherQ7[k]; - dataQ7[k] = val; - sum = val * val; - - val = ((*fi++ + ditherQ7[k + 1] + 64) & 0xFF80) - ditherQ7[k + 1]; - dataQ7[k + 1] = val; - sum += val * val; - - val = ((*fr++ + ditherQ7[k + 2] + 64) & 0xFF80) - ditherQ7[k + 2]; - dataQ7[k + 2] = val; - sum += val * val; - - val = ((*fi++ + ditherQ7[k + 3] + 64) & 0xFF80) - ditherQ7[k + 3]; - dataQ7[k + 3] = val; - sum += val * val; - - PSpec[k >> 2] = sum >> 2; - } - break; - } - case kIsacUpperBand12: { - for (k = 0, j = 0; k < FRAMESAMPLES_HALF; k += 4) { - val = ((*fr++ + ditherQ7[k] + 64) & 0xFF80) - ditherQ7[k]; - dataQ7[k] = val; - sum = val * val; - - val = ((*fi++ + ditherQ7[k + 1] + 64) & 0xFF80) - ditherQ7[k + 1]; - dataQ7[k + 1] = val; - sum += val * val; - - PSpec[j++] = sum >> 1; - - val = ((*fr++ + ditherQ7[k + 2] + 64) & 0xFF80) - ditherQ7[k + 2]; - dataQ7[k + 2] = val; - sum = val * val; - - val = ((*fi++ + ditherQ7[k + 3] + 64) & 0xFF80) - ditherQ7[k + 3]; - dataQ7[k + 3] = val; - sum += val * val; - - PSpec[j++] = sum >> 1; - } - break; - } - case kIsacUpperBand16: { - for (j = 0, k = 0; k < FRAMESAMPLES; k += 4, j++) { - val = ((fr[j] + ditherQ7[k] + 64) & 0xFF80) - ditherQ7[k]; - dataQ7[k] = val; - sum = val * val; - - val = ((fi[j] + ditherQ7[k + 1] + 64) & 0xFF80) - ditherQ7[k + 1]; - dataQ7[k + 1] = val; - sum += val * val; - - val = ((fr[(FRAMESAMPLES_HALF) - 1 - j] + ditherQ7[k + 2] + 64) & - 0xFF80) - ditherQ7[k + 2]; - dataQ7[k + 2] = val; - sum += val * val; - - val = ((fi[(FRAMESAMPLES_HALF) - 1 - j] + ditherQ7[k + 3] + 64) & - 0xFF80) - ditherQ7[k + 3]; - dataQ7[k + 3] = val; - sum += val * val; - - PSpec[k >> 2] = sum >> 2; - } - break; - } - } - - /* compute correlation from power spectrum */ - FindCorrelation(PSpec, CorrQ7); - - /* Find AR coefficients */ - /* Aumber of bit shifts to 14-bit normalize CorrQ7[0] - * (leaving room for sign) */ - shift_var = WebRtcSpl_NormW32(CorrQ7[0]) - 18; - - if (shift_var > 0) { - for (k = 0; k < AR_ORDER + 1; k++) { - CorrQ7_norm[k] = CorrQ7[k] << shift_var; - } - } else { - for (k = 0; k < AR_ORDER + 1; k++) { - CorrQ7_norm[k] = CorrQ7[k] >> (-shift_var); - } - } - - /* Find RC coefficients. */ - WebRtcSpl_AutoCorrToReflCoef(CorrQ7_norm, AR_ORDER, RCQ15); - - /* Quantize & code RC Coefficient. */ - WebRtcIsac_EncodeRc(RCQ15, streamdata); - - /* RC -> AR coefficients */ - WebRtcSpl_ReflCoefToLpc(RCQ15, AR_ORDER, ARCoefQ12); - - /* Compute ARCoef' * Corr * ARCoef in Q19. */ - nrg = 0; - for (j = 0; j <= AR_ORDER; j++) { - for (n = 0; n <= j; n++) { - nrg += (ARCoefQ12[j] * ((CorrQ7_norm[j - n] * ARCoefQ12[n] + 256) >> 9) + - 4) >> 3; - } - for (n = j + 1; n <= AR_ORDER; n++) { - nrg += (ARCoefQ12[j] * ((CorrQ7_norm[n - j] * ARCoefQ12[n] + 256) >> 9) + - 4) >> 3; - } - } - - nrg_u32 = (uint32_t)nrg; - if (shift_var > 0) { - nrg_u32 = nrg_u32 >> shift_var; - } else { - nrg_u32 = nrg_u32 << (-shift_var); - } - if (nrg_u32 > 0x7FFFFFFF) { - nrg = 0x7FFFFFFF; - } else { - nrg = (int32_t)nrg_u32; - } - /* Also shifts 31 bits to the left! */ - gain2_Q10 = WebRtcSpl_DivResultInQ31(FRAMESAMPLES_QUARTER, nrg); - - /* Quantize & code gain2_Q10. */ - if (WebRtcIsac_EncodeGain2(&gain2_Q10, streamdata)) { - return -1; - } - - /* Compute inverse AR power spectrum. */ - FindInvArSpec(ARCoefQ12, gain2_Q10, invARSpec2_Q16); - /* Convert to magnitude spectrum, by doing square-roots - * (modified from SPLIB). */ - res = 1 << (WebRtcSpl_GetSizeInBits(invARSpec2_Q16[0]) >> 1); - for (k = 0; k < FRAMESAMPLES_QUARTER; k++) { - in_sqrt = invARSpec2_Q16[k]; - i = 10; - /* Negative values make no sense for a real sqrt-function. */ - if (in_sqrt < 0) { - in_sqrt = -in_sqrt; - } - newRes = (in_sqrt / res + res) >> 1; - do { - res = newRes; - newRes = (in_sqrt / res + res) >> 1; - } while (newRes != res && i-- > 0); - - invARSpecQ8[k] = (int16_t)newRes; - } - /* arithmetic coding of spectrum */ - err = WebRtcIsac_EncLogisticMulti2(streamdata, dataQ7, invARSpecQ8, - num_dft_coeff, is_12khz); - if (err < 0) { - return (err); - } - return 0; -} - - -/* step-up */ -void WebRtcIsac_Rc2Poly(double* RC, int N, double* a) { - int m, k; - double tmp[MAX_AR_MODEL_ORDER]; - - a[0] = 1.0; - tmp[0] = 1.0; - for (m = 1; m <= N; m++) { - /* copy */ - memcpy(&tmp[1], &a[1], (m - 1) * sizeof(double)); - a[m] = RC[m - 1]; - for (k = 1; k < m; k++) { - a[k] += RC[m - 1] * tmp[m - k]; - } - } - return; -} - -/* step-down */ -void WebRtcIsac_Poly2Rc(double* a, int N, double* RC) { - int m, k; - double tmp[MAX_AR_MODEL_ORDER]; - double tmp_inv; - - RC[N - 1] = a[N]; - for (m = N - 1; m > 0; m--) { - tmp_inv = 1.0 / (1.0 - RC[m] * RC[m]); - for (k = 1; k <= m; k++) { - tmp[k] = (a[k] - RC[m] * a[m - k + 1]) * tmp_inv; - } - - memcpy(&a[1], &tmp[1], (m - 1) * sizeof(double)); - RC[m - 1] = tmp[m]; - } - return; -} - - -#define MAX_ORDER 100 - -/* Matlab's LAR definition */ -void WebRtcIsac_Rc2Lar(const double* refc, double* lar, int order) { - int k; - for (k = 0; k < order; k++) { - lar[k] = log((1 + refc[k]) / (1 - refc[k])); - } -} - - -void WebRtcIsac_Lar2Rc(const double* lar, double* refc, int order) { - int k; - double tmp; - - for (k = 0; k < order; k++) { - tmp = exp(lar[k]); - refc[k] = (tmp - 1) / (tmp + 1); - } -} - -void WebRtcIsac_Poly2Lar(double* lowband, int orderLo, double* hiband, - int orderHi, int Nsub, double* lars) { - int k; - double rc[MAX_ORDER], *inpl, *inph, *outp; - - inpl = lowband; - inph = hiband; - outp = lars; - for (k = 0; k < Nsub; k++) { - /* gains */ - outp[0] = inpl[0]; - outp[1] = inph[0]; - outp += 2; - - /* Low band */ - inpl[0] = 1.0; - WebRtcIsac_Poly2Rc(inpl, orderLo, rc); - WebRtcIsac_Rc2Lar(rc, outp, orderLo); - outp += orderLo; - - /* High band */ - inph[0] = 1.0; - WebRtcIsac_Poly2Rc(inph, orderHi, rc); - WebRtcIsac_Rc2Lar(rc, outp, orderHi); - outp += orderHi; - - inpl += orderLo + 1; - inph += orderHi + 1; - } -} - - -int16_t WebRtcIsac_Poly2LarUB(double* lpcVecs, int16_t bandwidth) { - double poly[MAX_ORDER]; - double rc[MAX_ORDER]; - double* ptrIO; - int16_t vecCntr; - int16_t vecSize; - int16_t numVec; - - vecSize = UB_LPC_ORDER; - switch (bandwidth) { - case isac12kHz: { - numVec = UB_LPC_VEC_PER_FRAME; - break; - } - case isac16kHz: { - numVec = UB16_LPC_VEC_PER_FRAME; - break; - } - default: - return -1; - } - - ptrIO = lpcVecs; - poly[0] = 1.0; - for (vecCntr = 0; vecCntr < numVec; vecCntr++) { - memcpy(&poly[1], ptrIO, sizeof(double) * vecSize); - WebRtcIsac_Poly2Rc(poly, vecSize, rc); - WebRtcIsac_Rc2Lar(rc, ptrIO, vecSize); - ptrIO += vecSize; - } - return 0; -} - - -void WebRtcIsac_Lar2Poly(double* lars, double* lowband, int orderLo, - double* hiband, int orderHi, int Nsub) { - int k, orderTot; - double rc[MAX_ORDER], *outpl, *outph, *inp; - - orderTot = (orderLo + orderHi + 2); - outpl = lowband; - outph = hiband; - /* First two elements of 'inp' store gains*/ - inp = lars; - for (k = 0; k < Nsub; k++) { - /* Low band */ - WebRtcIsac_Lar2Rc(&inp[2], rc, orderLo); - WebRtcIsac_Rc2Poly(rc, orderLo, outpl); - - /* High band */ - WebRtcIsac_Lar2Rc(&inp[orderLo + 2], rc, orderHi); - WebRtcIsac_Rc2Poly(rc, orderHi, outph); - - /* gains */ - outpl[0] = inp[0]; - outph[0] = inp[1]; - - outpl += orderLo + 1; - outph += orderHi + 1; - inp += orderTot; - } -} - -/* - * assumes 2 LAR vectors interpolates to 'numPolyVec' A-polynomials - * Note: 'numPolyVecs' includes the first and the last point of the interval - */ -void WebRtcIsac_Lar2PolyInterpolUB(double* larVecs, double* percepFilterParams, - int numPolyVecs) { - int polyCntr, coeffCntr; - double larInterpol[UB_LPC_ORDER]; - double rc[UB_LPC_ORDER]; - double delta[UB_LPC_ORDER]; - - /* calculate the step-size for linear interpolation coefficients */ - for (coeffCntr = 0; coeffCntr < UB_LPC_ORDER; coeffCntr++) { - delta[coeffCntr] = (larVecs[UB_LPC_ORDER + coeffCntr] - - larVecs[coeffCntr]) / (numPolyVecs - 1); - } - - for (polyCntr = 0; polyCntr < numPolyVecs; polyCntr++) { - for (coeffCntr = 0; coeffCntr < UB_LPC_ORDER; coeffCntr++) { - larInterpol[coeffCntr] = larVecs[coeffCntr] + - delta[coeffCntr] * polyCntr; - } - WebRtcIsac_Lar2Rc(larInterpol, rc, UB_LPC_ORDER); - - /* convert to A-polynomial, the following function returns A[0] = 1; - * which is written where gains had to be written. Then we write the - * gain (outside this function). This way we say a memcpy. */ - WebRtcIsac_Rc2Poly(rc, UB_LPC_ORDER, percepFilterParams); - percepFilterParams += (UB_LPC_ORDER + 1); - } -} - -int WebRtcIsac_DecodeLpc(Bitstr* streamdata, double* LPCCoef_lo, - double* LPCCoef_hi) { - double lars[KLT_ORDER_GAIN + KLT_ORDER_SHAPE]; - int err; - - err = WebRtcIsac_DecodeLpcCoef(streamdata, lars); - if (err < 0) { - return -ISAC_RANGE_ERROR_DECODE_LPC; - } - WebRtcIsac_Lar2Poly(lars, LPCCoef_lo, ORDERLO, LPCCoef_hi, ORDERHI, - SUBFRAMES); - return 0; -} - -int16_t WebRtcIsac_DecodeInterpolLpcUb(Bitstr* streamdata, - double* percepFilterParams, - int16_t bandwidth) { - double lpcCoeff[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME]; - int err; - int interpolCntr; - int subframeCntr; - int16_t numSegments; - int16_t numVecPerSegment; - int16_t numGains; - - double percepFilterGains[SUBFRAMES << 1]; - double* ptrOutParam = percepFilterParams; - - err = WebRtcIsac_DecodeLpcCoefUB(streamdata, lpcCoeff, percepFilterGains, - bandwidth); - if (err < 0) { - return -ISAC_RANGE_ERROR_DECODE_LPC; - } - - switch (bandwidth) { - case isac12kHz: { - numGains = SUBFRAMES; - numSegments = UB_LPC_VEC_PER_FRAME - 1; - numVecPerSegment = kLpcVecPerSegmentUb12; - break; - } - case isac16kHz: { - numGains = SUBFRAMES << 1; - numSegments = UB16_LPC_VEC_PER_FRAME - 1; - numVecPerSegment = kLpcVecPerSegmentUb16; - break; - } - default: - return -1; - } - - for (interpolCntr = 0; interpolCntr < numSegments; interpolCntr++) { - WebRtcIsac_Lar2PolyInterpolUB(&lpcCoeff[interpolCntr * UB_LPC_ORDER], - ptrOutParam, numVecPerSegment + 1); - ptrOutParam += (numVecPerSegment * (UB_LPC_ORDER + 1)); - } - - ptrOutParam = percepFilterParams; - - if (bandwidth == isac16kHz) { - ptrOutParam += (1 + UB_LPC_ORDER); - } - - for (subframeCntr = 0; subframeCntr < numGains; subframeCntr++) { - *ptrOutParam = percepFilterGains[subframeCntr]; - ptrOutParam += (1 + UB_LPC_ORDER); - } - return 0; -} - - -/* decode & dequantize LPC Coef */ -int WebRtcIsac_DecodeLpcCoef(Bitstr* streamdata, double* LPCCoef) { - int j, k, n, pos, pos2, posg, poss, offsg, offss, offs2; - int index_g[KLT_ORDER_GAIN], index_s[KLT_ORDER_SHAPE]; - double tmpcoeffs_g[KLT_ORDER_GAIN], tmpcoeffs_s[KLT_ORDER_SHAPE]; - double tmpcoeffs2_g[KLT_ORDER_GAIN], tmpcoeffs2_s[KLT_ORDER_SHAPE]; - double sum; - int err; - int model = 1; - - /* entropy decoding of model number */ - /* We are keeping this for backward compatibility of bit-streams. */ - err = WebRtcIsac_DecHistOneStepMulti(&model, streamdata, - WebRtcIsac_kQKltModelCdfPtr, - WebRtcIsac_kQKltModelInitIndex, 1); - if (err < 0) { - return err; - } - /* Only accepted value of model is 0. It is kept in bit-stream for backward - * compatibility. */ - if (model != 0) { - return -ISAC_DISALLOWED_LPC_MODEL; - } - - /* entropy decoding of quantization indices */ - err = WebRtcIsac_DecHistOneStepMulti( - index_s, streamdata, WebRtcIsac_kQKltCdfPtrShape, - WebRtcIsac_kQKltInitIndexShape, KLT_ORDER_SHAPE); - if (err < 0) { - return err; - } - err = WebRtcIsac_DecHistOneStepMulti( - index_g, streamdata, WebRtcIsac_kQKltCdfPtrGain, - WebRtcIsac_kQKltInitIndexGain, KLT_ORDER_GAIN); - if (err < 0) { - return err; - } - - /* find quantization levels for coefficients */ - for (k = 0; k < KLT_ORDER_SHAPE; k++) { - tmpcoeffs_s[k] = - WebRtcIsac_kQKltLevelsShape[WebRtcIsac_kQKltOffsetShape[k] + - index_s[k]]; - } - for (k = 0; k < KLT_ORDER_GAIN; k++) { - tmpcoeffs_g[k] = WebRtcIsac_kQKltLevelsGain[WebRtcIsac_kQKltOffsetGain[k] + - index_g[k]]; - } - - /* Inverse KLT */ - - /* Left transform, transpose matrix! */ - offsg = 0; - offss = 0; - posg = 0; - poss = 0; - for (j = 0; j < SUBFRAMES; j++) { - offs2 = 0; - for (k = 0; k < LPC_GAIN_ORDER; k++) { - sum = 0; - pos = offsg; - pos2 = offs2; - for (n = 0; n < LPC_GAIN_ORDER; n++) { - sum += tmpcoeffs_g[pos++] * WebRtcIsac_kKltT1Gain[pos2++]; - } - tmpcoeffs2_g[posg++] = sum; - offs2 += LPC_GAIN_ORDER; - } - offs2 = 0; - for (k = 0; k < LPC_SHAPE_ORDER; k++) { - sum = 0; - pos = offss; - pos2 = offs2; - for (n = 0; n < LPC_SHAPE_ORDER; n++) { - sum += tmpcoeffs_s[pos++] * WebRtcIsac_kKltT1Shape[pos2++]; - } - tmpcoeffs2_s[poss++] = sum; - offs2 += LPC_SHAPE_ORDER; - } - offsg += LPC_GAIN_ORDER; - offss += LPC_SHAPE_ORDER; - } - - /* Right transform, transpose matrix */ - offsg = 0; - offss = 0; - posg = 0; - poss = 0; - for (j = 0; j < SUBFRAMES; j++) { - posg = offsg; - for (k = 0; k < LPC_GAIN_ORDER; k++) { - sum = 0; - pos = k; - pos2 = j; - for (n = 0; n < SUBFRAMES; n++) { - sum += tmpcoeffs2_g[pos] * WebRtcIsac_kKltT2Gain[pos2]; - pos += LPC_GAIN_ORDER; - pos2 += SUBFRAMES; - - } - tmpcoeffs_g[posg++] = sum; - } - poss = offss; - for (k = 0; k < LPC_SHAPE_ORDER; k++) { - sum = 0; - pos = k; - pos2 = j; - for (n = 0; n < SUBFRAMES; n++) { - sum += tmpcoeffs2_s[pos] * WebRtcIsac_kKltT2Shape[pos2]; - pos += LPC_SHAPE_ORDER; - pos2 += SUBFRAMES; - } - tmpcoeffs_s[poss++] = sum; - } - offsg += LPC_GAIN_ORDER; - offss += LPC_SHAPE_ORDER; - } - - /* scaling, mean addition, and gain restoration */ - posg = 0; - poss = 0; - pos = 0; - for (k = 0; k < SUBFRAMES; k++) { - /* log gains */ - LPCCoef[pos] = tmpcoeffs_g[posg] / LPC_GAIN_SCALE; - LPCCoef[pos] += WebRtcIsac_kLpcMeansGain[posg]; - LPCCoef[pos] = exp(LPCCoef[pos]); - pos++; - posg++; - LPCCoef[pos] = tmpcoeffs_g[posg] / LPC_GAIN_SCALE; - LPCCoef[pos] += WebRtcIsac_kLpcMeansGain[posg]; - LPCCoef[pos] = exp(LPCCoef[pos]); - pos++; - posg++; - - /* Low-band LAR coefficients. */ - for (n = 0; n < LPC_LOBAND_ORDER; n++, pos++, poss++) { - LPCCoef[pos] = tmpcoeffs_s[poss] / LPC_LOBAND_SCALE; - LPCCoef[pos] += WebRtcIsac_kLpcMeansShape[poss]; - } - - /* High-band LAR coefficients. */ - for (n = 0; n < LPC_HIBAND_ORDER; n++, pos++, poss++) { - LPCCoef[pos] = tmpcoeffs_s[poss] / LPC_HIBAND_SCALE; - LPCCoef[pos] += WebRtcIsac_kLpcMeansShape[poss]; - } - } - return 0; -} - -/* Encode LPC in LAR domain. */ -void WebRtcIsac_EncodeLar(double* LPCCoef, Bitstr* streamdata, - IsacSaveEncoderData* encData) { - int j, k, n, pos, pos2, poss, offss, offs2; - int index_s[KLT_ORDER_SHAPE]; - int index_ovr_s[KLT_ORDER_SHAPE]; - double tmpcoeffs_s[KLT_ORDER_SHAPE]; - double tmpcoeffs2_s[KLT_ORDER_SHAPE]; - double sum; - const int kModel = 0; - - /* Mean removal and scaling. */ - poss = 0; - pos = 0; - for (k = 0; k < SUBFRAMES; k++) { - /* First two element are gains, move over them. */ - pos += 2; - - /* Low-band LAR coefficients. */ - for (n = 0; n < LPC_LOBAND_ORDER; n++, poss++, pos++) { - tmpcoeffs_s[poss] = LPCCoef[pos] - WebRtcIsac_kLpcMeansShape[poss]; - tmpcoeffs_s[poss] *= LPC_LOBAND_SCALE; - } - - /* High-band LAR coefficients. */ - for (n = 0; n < LPC_HIBAND_ORDER; n++, poss++, pos++) { - tmpcoeffs_s[poss] = LPCCoef[pos] - WebRtcIsac_kLpcMeansShape[poss]; - tmpcoeffs_s[poss] *= LPC_HIBAND_SCALE; - } - } - - /* KLT */ - - /* Left transform. */ - offss = 0; - for (j = 0; j < SUBFRAMES; j++) { - poss = offss; - for (k = 0; k < LPC_SHAPE_ORDER; k++) { - sum = 0; - pos = offss; - pos2 = k; - for (n = 0; n < LPC_SHAPE_ORDER; n++) { - sum += tmpcoeffs_s[pos++] * WebRtcIsac_kKltT1Shape[pos2]; - pos2 += LPC_SHAPE_ORDER; - } - tmpcoeffs2_s[poss++] = sum; - } - offss += LPC_SHAPE_ORDER; - } - - /* Right transform. */ - offss = 0; - offs2 = 0; - for (j = 0; j < SUBFRAMES; j++) { - poss = offss; - for (k = 0; k < LPC_SHAPE_ORDER; k++) { - sum = 0; - pos = k; - pos2 = offs2; - for (n = 0; n < SUBFRAMES; n++) { - sum += tmpcoeffs2_s[pos] * WebRtcIsac_kKltT2Shape[pos2++]; - pos += LPC_SHAPE_ORDER; - } - tmpcoeffs_s[poss++] = sum; - } - offs2 += SUBFRAMES; - offss += LPC_SHAPE_ORDER; - } - - /* Quantize coefficients. */ - for (k = 0; k < KLT_ORDER_SHAPE; k++) { - index_s[k] = (WebRtcIsac_lrint(tmpcoeffs_s[k] / KLT_STEPSIZE)) + - WebRtcIsac_kQKltQuantMinShape[k]; - if (index_s[k] < 0) { - index_s[k] = 0; - } else if (index_s[k] > WebRtcIsac_kQKltMaxIndShape[k]) { - index_s[k] = WebRtcIsac_kQKltMaxIndShape[k]; - } - index_ovr_s[k] = WebRtcIsac_kQKltOffsetShape[k] + index_s[k]; - } - - - /* Only one model remains in this version of the code, kModel = 0. We - * are keeping for bit-streams to be backward compatible. */ - /* entropy coding of model number */ - WebRtcIsac_EncHistMulti(streamdata, &kModel, WebRtcIsac_kQKltModelCdfPtr, 1); - - /* Save data for creation of multiple bit streams */ - /* Entropy coding of quantization indices - shape only. */ - WebRtcIsac_EncHistMulti(streamdata, index_s, WebRtcIsac_kQKltCdfPtrShape, - KLT_ORDER_SHAPE); - - /* Save data for creation of multiple bit streams. */ - for (k = 0; k < KLT_ORDER_SHAPE; k++) { - encData->LPCindex_s[KLT_ORDER_SHAPE * encData->startIdx + k] = index_s[k]; - } - - /* Find quantization levels for shape coefficients. */ - for (k = 0; k < KLT_ORDER_SHAPE; k++) { - tmpcoeffs_s[k] = WebRtcIsac_kQKltLevelsShape[index_ovr_s[k]]; - } - /* Inverse KLT. */ - /* Left transform, transpose matrix.! */ - offss = 0; - poss = 0; - for (j = 0; j < SUBFRAMES; j++) { - offs2 = 0; - for (k = 0; k < LPC_SHAPE_ORDER; k++) { - sum = 0; - pos = offss; - pos2 = offs2; - for (n = 0; n < LPC_SHAPE_ORDER; n++) { - sum += tmpcoeffs_s[pos++] * WebRtcIsac_kKltT1Shape[pos2++]; - } - tmpcoeffs2_s[poss++] = sum; - offs2 += LPC_SHAPE_ORDER; - } - offss += LPC_SHAPE_ORDER; - } - - /* Right transform, Transpose matrix */ - offss = 0; - poss = 0; - for (j = 0; j < SUBFRAMES; j++) { - poss = offss; - for (k = 0; k < LPC_SHAPE_ORDER; k++) { - sum = 0; - pos = k; - pos2 = j; - for (n = 0; n < SUBFRAMES; n++) { - sum += tmpcoeffs2_s[pos] * WebRtcIsac_kKltT2Shape[pos2]; - pos += LPC_SHAPE_ORDER; - pos2 += SUBFRAMES; - } - tmpcoeffs_s[poss++] = sum; - } - offss += LPC_SHAPE_ORDER; - } - - /* Scaling, mean addition, and gain restoration. */ - poss = 0; - pos = 0; - for (k = 0; k < SUBFRAMES; k++) { - /* Ignore gains. */ - pos += 2; - - /* Low band LAR coefficients. */ - for (n = 0; n < LPC_LOBAND_ORDER; n++, pos++, poss++) { - LPCCoef[pos] = tmpcoeffs_s[poss] / LPC_LOBAND_SCALE; - LPCCoef[pos] += WebRtcIsac_kLpcMeansShape[poss]; - } - - /* High band LAR coefficients. */ - for (n = 0; n < LPC_HIBAND_ORDER; n++, pos++, poss++) { - LPCCoef[pos] = tmpcoeffs_s[poss] / LPC_HIBAND_SCALE; - LPCCoef[pos] += WebRtcIsac_kLpcMeansShape[poss]; - } - } -} - - -void WebRtcIsac_EncodeLpcLb(double* LPCCoef_lo, double* LPCCoef_hi, - Bitstr* streamdata, IsacSaveEncoderData* encData) { - double lars[KLT_ORDER_GAIN + KLT_ORDER_SHAPE]; - int k; - - WebRtcIsac_Poly2Lar(LPCCoef_lo, ORDERLO, LPCCoef_hi, ORDERHI, SUBFRAMES, - lars); - WebRtcIsac_EncodeLar(lars, streamdata, encData); - WebRtcIsac_Lar2Poly(lars, LPCCoef_lo, ORDERLO, LPCCoef_hi, ORDERHI, - SUBFRAMES); - /* Save data for creation of multiple bit streams (and transcoding). */ - for (k = 0; k < (ORDERLO + 1)*SUBFRAMES; k++) { - encData->LPCcoeffs_lo[(ORDERLO + 1)*SUBFRAMES * encData->startIdx + k] = - LPCCoef_lo[k]; - } - for (k = 0; k < (ORDERHI + 1)*SUBFRAMES; k++) { - encData->LPCcoeffs_hi[(ORDERHI + 1)*SUBFRAMES * encData->startIdx + k] = - LPCCoef_hi[k]; - } -} - - -int16_t WebRtcIsac_EncodeLpcUB(double* lpcVecs, Bitstr* streamdata, - double* interpolLPCCoeff, - int16_t bandwidth, - ISACUBSaveEncDataStruct* encData) { - double U[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME]; - int idx[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME]; - int interpolCntr; - - WebRtcIsac_Poly2LarUB(lpcVecs, bandwidth); - WebRtcIsac_RemoveLarMean(lpcVecs, bandwidth); - WebRtcIsac_DecorrelateIntraVec(lpcVecs, U, bandwidth); - WebRtcIsac_DecorrelateInterVec(U, lpcVecs, bandwidth); - WebRtcIsac_QuantizeUncorrLar(lpcVecs, idx, bandwidth); - - WebRtcIsac_CorrelateInterVec(lpcVecs, U, bandwidth); - WebRtcIsac_CorrelateIntraVec(U, lpcVecs, bandwidth); - WebRtcIsac_AddLarMean(lpcVecs, bandwidth); - - switch (bandwidth) { - case isac12kHz: { - /* Store the indices to be used for multiple encoding. */ - memcpy(encData->indexLPCShape, idx, UB_LPC_ORDER * - UB_LPC_VEC_PER_FRAME * sizeof(int)); - WebRtcIsac_EncHistMulti(streamdata, idx, WebRtcIsac_kLpcShapeCdfMatUb12, - UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME); - for (interpolCntr = 0; interpolCntr < UB_INTERPOL_SEGMENTS; - interpolCntr++) { - WebRtcIsac_Lar2PolyInterpolUB(lpcVecs, interpolLPCCoeff, - kLpcVecPerSegmentUb12 + 1); - lpcVecs += UB_LPC_ORDER; - interpolLPCCoeff += (kLpcVecPerSegmentUb12 * (UB_LPC_ORDER + 1)); - } - break; - } - case isac16kHz: { - /* Store the indices to be used for multiple encoding. */ - memcpy(encData->indexLPCShape, idx, UB_LPC_ORDER * - UB16_LPC_VEC_PER_FRAME * sizeof(int)); - WebRtcIsac_EncHistMulti(streamdata, idx, WebRtcIsac_kLpcShapeCdfMatUb16, - UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME); - for (interpolCntr = 0; interpolCntr < UB16_INTERPOL_SEGMENTS; - interpolCntr++) { - WebRtcIsac_Lar2PolyInterpolUB(lpcVecs, interpolLPCCoeff, - kLpcVecPerSegmentUb16 + 1); - lpcVecs += UB_LPC_ORDER; - interpolLPCCoeff += (kLpcVecPerSegmentUb16 * (UB_LPC_ORDER + 1)); - } - break; - } - default: - return -1; - } - return 0; -} - -void WebRtcIsac_EncodeLpcGainLb(double* LPCCoef_lo, double* LPCCoef_hi, - Bitstr* streamdata, - IsacSaveEncoderData* encData) { - int j, k, n, pos, pos2, posg, offsg, offs2; - int index_g[KLT_ORDER_GAIN]; - int index_ovr_g[KLT_ORDER_GAIN]; - double tmpcoeffs_g[KLT_ORDER_GAIN]; - double tmpcoeffs2_g[KLT_ORDER_GAIN]; - double sum; - /* log gains, mean removal and scaling */ - posg = 0; - for (k = 0; k < SUBFRAMES; k++) { - tmpcoeffs_g[posg] = log(LPCCoef_lo[(LPC_LOBAND_ORDER + 1) * k]); - tmpcoeffs_g[posg] -= WebRtcIsac_kLpcMeansGain[posg]; - tmpcoeffs_g[posg] *= LPC_GAIN_SCALE; - posg++; - tmpcoeffs_g[posg] = log(LPCCoef_hi[(LPC_HIBAND_ORDER + 1) * k]); - tmpcoeffs_g[posg] -= WebRtcIsac_kLpcMeansGain[posg]; - tmpcoeffs_g[posg] *= LPC_GAIN_SCALE; - posg++; - } - - /* KLT */ - - /* Left transform. */ - offsg = 0; - for (j = 0; j < SUBFRAMES; j++) { - posg = offsg; - for (k = 0; k < LPC_GAIN_ORDER; k++) { - sum = 0; - pos = offsg; - pos2 = k; - for (n = 0; n < LPC_GAIN_ORDER; n++) { - sum += tmpcoeffs_g[pos++] * WebRtcIsac_kKltT1Gain[pos2]; - pos2 += LPC_GAIN_ORDER; - } - tmpcoeffs2_g[posg++] = sum; - } - offsg += LPC_GAIN_ORDER; - } - - /* Right transform. */ - offsg = 0; - offs2 = 0; - for (j = 0; j < SUBFRAMES; j++) { - posg = offsg; - for (k = 0; k < LPC_GAIN_ORDER; k++) { - sum = 0; - pos = k; - pos2 = offs2; - for (n = 0; n < SUBFRAMES; n++) { - sum += tmpcoeffs2_g[pos] * WebRtcIsac_kKltT2Gain[pos2++]; - pos += LPC_GAIN_ORDER; - } - tmpcoeffs_g[posg++] = sum; - } - offs2 += SUBFRAMES; - offsg += LPC_GAIN_ORDER; - } - - /* Quantize coefficients. */ - for (k = 0; k < KLT_ORDER_GAIN; k++) { - /* Get index. */ - pos2 = WebRtcIsac_lrint(tmpcoeffs_g[k] / KLT_STEPSIZE); - index_g[k] = (pos2) + WebRtcIsac_kQKltQuantMinGain[k]; - if (index_g[k] < 0) { - index_g[k] = 0; - } else if (index_g[k] > WebRtcIsac_kQKltMaxIndGain[k]) { - index_g[k] = WebRtcIsac_kQKltMaxIndGain[k]; - } - index_ovr_g[k] = WebRtcIsac_kQKltOffsetGain[k] + index_g[k]; - - /* Find quantization levels for coefficients. */ - tmpcoeffs_g[k] = WebRtcIsac_kQKltLevelsGain[index_ovr_g[k]]; - - /* Save data for creation of multiple bit streams. */ - encData->LPCindex_g[KLT_ORDER_GAIN * encData->startIdx + k] = index_g[k]; - } - - /* Entropy coding of quantization indices - gain. */ - WebRtcIsac_EncHistMulti(streamdata, index_g, WebRtcIsac_kQKltCdfPtrGain, - KLT_ORDER_GAIN); - - /* Find quantization levels for coefficients. */ - /* Left transform. */ - offsg = 0; - posg = 0; - for (j = 0; j < SUBFRAMES; j++) { - offs2 = 0; - for (k = 0; k < LPC_GAIN_ORDER; k++) { - sum = 0; - pos = offsg; - pos2 = offs2; - for (n = 0; n < LPC_GAIN_ORDER; n++) - sum += tmpcoeffs_g[pos++] * WebRtcIsac_kKltT1Gain[pos2++]; - tmpcoeffs2_g[posg++] = sum; - offs2 += LPC_GAIN_ORDER; - } - offsg += LPC_GAIN_ORDER; - } - - /* Right transform, transpose matrix. */ - offsg = 0; - posg = 0; - for (j = 0; j < SUBFRAMES; j++) { - posg = offsg; - for (k = 0; k < LPC_GAIN_ORDER; k++) { - sum = 0; - pos = k; - pos2 = j; - for (n = 0; n < SUBFRAMES; n++) { - sum += tmpcoeffs2_g[pos] * WebRtcIsac_kKltT2Gain[pos2]; - pos += LPC_GAIN_ORDER; - pos2 += SUBFRAMES; - } - tmpcoeffs_g[posg++] = sum; - } - offsg += LPC_GAIN_ORDER; - } - - - /* Scaling, mean addition, and gain restoration. */ - posg = 0; - for (k = 0; k < SUBFRAMES; k++) { - sum = tmpcoeffs_g[posg] / LPC_GAIN_SCALE; - sum += WebRtcIsac_kLpcMeansGain[posg]; - LPCCoef_lo[k * (LPC_LOBAND_ORDER + 1)] = exp(sum); - pos++; - posg++; - sum = tmpcoeffs_g[posg] / LPC_GAIN_SCALE; - sum += WebRtcIsac_kLpcMeansGain[posg]; - LPCCoef_hi[k * (LPC_HIBAND_ORDER + 1)] = exp(sum); - pos++; - posg++; - } - -} - -void WebRtcIsac_EncodeLpcGainUb(double* lpGains, Bitstr* streamdata, - int* lpcGainIndex) { - double U[UB_LPC_GAIN_DIM]; - int idx[UB_LPC_GAIN_DIM]; - WebRtcIsac_ToLogDomainRemoveMean(lpGains); - WebRtcIsac_DecorrelateLPGain(lpGains, U); - WebRtcIsac_QuantizeLpcGain(U, idx); - /* Store the index for re-encoding for FEC. */ - memcpy(lpcGainIndex, idx, UB_LPC_GAIN_DIM * sizeof(int)); - WebRtcIsac_CorrelateLpcGain(U, lpGains); - WebRtcIsac_AddMeanToLinearDomain(lpGains); - WebRtcIsac_EncHistMulti(streamdata, idx, WebRtcIsac_kLpcGainCdfMat, - UB_LPC_GAIN_DIM); -} - - -void WebRtcIsac_StoreLpcGainUb(double* lpGains, Bitstr* streamdata) { - double U[UB_LPC_GAIN_DIM]; - int idx[UB_LPC_GAIN_DIM]; - WebRtcIsac_ToLogDomainRemoveMean(lpGains); - WebRtcIsac_DecorrelateLPGain(lpGains, U); - WebRtcIsac_QuantizeLpcGain(U, idx); - WebRtcIsac_EncHistMulti(streamdata, idx, WebRtcIsac_kLpcGainCdfMat, - UB_LPC_GAIN_DIM); -} - - - -int16_t WebRtcIsac_DecodeLpcGainUb(double* lpGains, Bitstr* streamdata) { - double U[UB_LPC_GAIN_DIM]; - int idx[UB_LPC_GAIN_DIM]; - int err; - err = WebRtcIsac_DecHistOneStepMulti(idx, streamdata, - WebRtcIsac_kLpcGainCdfMat, - WebRtcIsac_kLpcGainEntropySearch, - UB_LPC_GAIN_DIM); - if (err < 0) { - return -1; - } - WebRtcIsac_DequantizeLpcGain(idx, U); - WebRtcIsac_CorrelateLpcGain(U, lpGains); - WebRtcIsac_AddMeanToLinearDomain(lpGains); - return 0; -} - - - -/* decode & dequantize RC */ -int WebRtcIsac_DecodeRc(Bitstr* streamdata, int16_t* RCQ15) { - int k, err; - int index[AR_ORDER]; - - /* entropy decoding of quantization indices */ - err = WebRtcIsac_DecHistOneStepMulti(index, streamdata, - WebRtcIsac_kQArRcCdfPtr, - WebRtcIsac_kQArRcInitIndex, AR_ORDER); - if (err < 0) - return err; - - /* find quantization levels for reflection coefficients */ - for (k = 0; k < AR_ORDER; k++) { - RCQ15[k] = *(WebRtcIsac_kQArRcLevelsPtr[k] + index[k]); - } - return 0; -} - - -/* quantize & code RC */ -void WebRtcIsac_EncodeRc(int16_t* RCQ15, Bitstr* streamdata) { - int k; - int index[AR_ORDER]; - - /* quantize reflection coefficients (add noise feedback?) */ - for (k = 0; k < AR_ORDER; k++) { - index[k] = WebRtcIsac_kQArRcInitIndex[k]; - // The safe-guards in following while conditions are to suppress gcc 4.8.3 - // warnings, Issue 2888. Otherwise, first and last elements of - // `WebRtcIsac_kQArBoundaryLevels` are such that the following search - // *never* cause an out-of-boundary read. - if (RCQ15[k] > WebRtcIsac_kQArBoundaryLevels[index[k]]) { - while (index[k] + 1 < NUM_AR_RC_QUANT_BAUNDARY && - RCQ15[k] > WebRtcIsac_kQArBoundaryLevels[index[k] + 1]) { - index[k]++; - } - } else { - while (index[k] > 0 && - RCQ15[k] < WebRtcIsac_kQArBoundaryLevels[--index[k]]) ; - } - RCQ15[k] = *(WebRtcIsac_kQArRcLevelsPtr[k] + index[k]); - } - - /* entropy coding of quantization indices */ - WebRtcIsac_EncHistMulti(streamdata, index, WebRtcIsac_kQArRcCdfPtr, AR_ORDER); -} - - -/* decode & dequantize squared Gain */ -int WebRtcIsac_DecodeGain2(Bitstr* streamdata, int32_t* gainQ10) { - int index, err; - - /* entropy decoding of quantization index */ - err = WebRtcIsac_DecHistOneStepMulti(&index, streamdata, - WebRtcIsac_kQGainCdf_ptr, - WebRtcIsac_kQGainInitIndex, 1); - if (err < 0) { - return err; - } - /* find quantization level */ - *gainQ10 = WebRtcIsac_kQGain2Levels[index]; - return 0; -} - - -/* quantize & code squared Gain */ -int WebRtcIsac_EncodeGain2(int32_t* gainQ10, Bitstr* streamdata) { - int index; - - /* find quantization index */ - index = WebRtcIsac_kQGainInitIndex[0]; - if (*gainQ10 > WebRtcIsac_kQGain2BoundaryLevels[index]) { - while (*gainQ10 > WebRtcIsac_kQGain2BoundaryLevels[index + 1]) { - index++; - } - } else { - while (*gainQ10 < WebRtcIsac_kQGain2BoundaryLevels[--index]) ; - } - /* De-quantize */ - *gainQ10 = WebRtcIsac_kQGain2Levels[index]; - - /* entropy coding of quantization index */ - WebRtcIsac_EncHistMulti(streamdata, &index, WebRtcIsac_kQGainCdf_ptr, 1); - return 0; -} - - -/* code and decode Pitch Gains and Lags functions */ - -/* decode & dequantize Pitch Gains */ -int WebRtcIsac_DecodePitchGain(Bitstr* streamdata, - int16_t* PitchGains_Q12) { - int index_comb, err; - const uint16_t* WebRtcIsac_kQPitchGainCdf_ptr[1]; - - /* Entropy decoding of quantization indices */ - *WebRtcIsac_kQPitchGainCdf_ptr = WebRtcIsac_kQPitchGainCdf; - err = WebRtcIsac_DecHistBisectMulti(&index_comb, streamdata, - WebRtcIsac_kQPitchGainCdf_ptr, - WebRtcIsac_kQCdfTableSizeGain, 1); - /* Error check, Q_mean_Gain.. tables are of size 144 */ - if ((err < 0) || (index_comb < 0) || (index_comb >= 144)) { - return -ISAC_RANGE_ERROR_DECODE_PITCH_GAIN; - } - /* De-quantize back to pitch gains by table look-up. */ - PitchGains_Q12[0] = WebRtcIsac_kQMeanGain1Q12[index_comb]; - PitchGains_Q12[1] = WebRtcIsac_kQMeanGain2Q12[index_comb]; - PitchGains_Q12[2] = WebRtcIsac_kQMeanGain3Q12[index_comb]; - PitchGains_Q12[3] = WebRtcIsac_kQMeanGain4Q12[index_comb]; - return 0; -} - - -/* Quantize & code Pitch Gains. */ -void WebRtcIsac_EncodePitchGain(int16_t* PitchGains_Q12, - Bitstr* streamdata, - IsacSaveEncoderData* encData) { - int k, j; - double C; - double S[PITCH_SUBFRAMES]; - int index[3]; - int index_comb; - const uint16_t* WebRtcIsac_kQPitchGainCdf_ptr[1]; - double PitchGains[PITCH_SUBFRAMES] = {0, 0, 0, 0}; - - /* Take the asin. */ - for (k = 0; k < PITCH_SUBFRAMES; k++) { - PitchGains[k] = ((float)PitchGains_Q12[k]) / 4096; - S[k] = asin(PitchGains[k]); - } - - /* Find quantization index; only for the first three - * transform coefficients. */ - for (k = 0; k < 3; k++) { - /* transform */ - C = 0.0; - for (j = 0; j < PITCH_SUBFRAMES; j++) { - C += WebRtcIsac_kTransform[k][j] * S[j]; - } - /* Quantize */ - index[k] = WebRtcIsac_lrint(C / PITCH_GAIN_STEPSIZE); - - /* Check that the index is not outside the boundaries of the table. */ - if (index[k] < WebRtcIsac_kIndexLowerLimitGain[k]) { - index[k] = WebRtcIsac_kIndexLowerLimitGain[k]; - } else if (index[k] > WebRtcIsac_kIndexUpperLimitGain[k]) { - index[k] = WebRtcIsac_kIndexUpperLimitGain[k]; - } - index[k] -= WebRtcIsac_kIndexLowerLimitGain[k]; - } - - /* Calculate unique overall index. */ - index_comb = WebRtcIsac_kIndexMultsGain[0] * index[0] + - WebRtcIsac_kIndexMultsGain[1] * index[1] + index[2]; - - /* unquantize back to pitch gains by table look-up */ - PitchGains_Q12[0] = WebRtcIsac_kQMeanGain1Q12[index_comb]; - PitchGains_Q12[1] = WebRtcIsac_kQMeanGain2Q12[index_comb]; - PitchGains_Q12[2] = WebRtcIsac_kQMeanGain3Q12[index_comb]; - PitchGains_Q12[3] = WebRtcIsac_kQMeanGain4Q12[index_comb]; - - /* entropy coding of quantization pitch gains */ - *WebRtcIsac_kQPitchGainCdf_ptr = WebRtcIsac_kQPitchGainCdf; - WebRtcIsac_EncHistMulti(streamdata, &index_comb, - WebRtcIsac_kQPitchGainCdf_ptr, 1); - encData->pitchGain_index[encData->startIdx] = index_comb; -} - - - -/* Pitch LAG */ -/* Decode & de-quantize Pitch Lags. */ -int WebRtcIsac_DecodePitchLag(Bitstr* streamdata, int16_t* PitchGain_Q12, - double* PitchLags) { - int k, err; - double StepSize; - double C; - int index[PITCH_SUBFRAMES]; - double mean_gain; - const double* mean_val2, *mean_val3, *mean_val4; - const int16_t* lower_limit; - const uint16_t* init_index; - const uint16_t* cdf_size; - const uint16_t** cdf; - double PitchGain[4] = {0, 0, 0, 0}; - - /* compute mean pitch gain */ - mean_gain = 0.0; - for (k = 0; k < 4; k++) { - PitchGain[k] = ((float)PitchGain_Q12[k]) / 4096; - mean_gain += PitchGain[k]; - } - mean_gain /= 4.0; - - /* voicing classification. */ - if (mean_gain < 0.2) { - StepSize = WebRtcIsac_kQPitchLagStepsizeLo; - cdf = WebRtcIsac_kQPitchLagCdfPtrLo; - cdf_size = WebRtcIsac_kQPitchLagCdfSizeLo; - mean_val2 = WebRtcIsac_kQMeanLag2Lo; - mean_val3 = WebRtcIsac_kQMeanLag3Lo; - mean_val4 = WebRtcIsac_kQMeanLag4Lo; - lower_limit = WebRtcIsac_kQIndexLowerLimitLagLo; - init_index = WebRtcIsac_kQInitIndexLagLo; - } else if (mean_gain < 0.4) { - StepSize = WebRtcIsac_kQPitchLagStepsizeMid; - cdf = WebRtcIsac_kQPitchLagCdfPtrMid; - cdf_size = WebRtcIsac_kQPitchLagCdfSizeMid; - mean_val2 = WebRtcIsac_kQMeanLag2Mid; - mean_val3 = WebRtcIsac_kQMeanLag3Mid; - mean_val4 = WebRtcIsac_kQMeanLag4Mid; - lower_limit = WebRtcIsac_kQIndexLowerLimitLagMid; - init_index = WebRtcIsac_kQInitIndexLagMid; - } else { - StepSize = WebRtcIsac_kQPitchLagStepsizeHi; - cdf = WebRtcIsac_kQPitchLagCdfPtrHi; - cdf_size = WebRtcIsac_kQPitchLagCdfSizeHi; - mean_val2 = WebRtcIsac_kQMeanLag2Hi; - mean_val3 = WebRtcIsac_kQMeanLag3Hi; - mean_val4 = WebRtcIsac_kQMeanLag4Hi; - lower_limit = WebRtcIsac_kQindexLowerLimitLagHi; - init_index = WebRtcIsac_kQInitIndexLagHi; - } - - /* Entropy decoding of quantization indices. */ - err = WebRtcIsac_DecHistBisectMulti(index, streamdata, cdf, cdf_size, 1); - if ((err < 0) || (index[0] < 0)) { - return -ISAC_RANGE_ERROR_DECODE_PITCH_LAG; - } - err = WebRtcIsac_DecHistOneStepMulti(index + 1, streamdata, cdf + 1, - init_index, 3); - if (err < 0) { - return -ISAC_RANGE_ERROR_DECODE_PITCH_LAG; - } - - /* Unquantize back to transform coefficients and do the inverse transform: - * S = T'*C. */ - C = (index[0] + lower_limit[0]) * StepSize; - for (k = 0; k < PITCH_SUBFRAMES; k++) { - PitchLags[k] = WebRtcIsac_kTransformTranspose[k][0] * C; - } - C = mean_val2[index[1]]; - for (k = 0; k < PITCH_SUBFRAMES; k++) { - PitchLags[k] += WebRtcIsac_kTransformTranspose[k][1] * C; - } - C = mean_val3[index[2]]; - for (k = 0; k < PITCH_SUBFRAMES; k++) { - PitchLags[k] += WebRtcIsac_kTransformTranspose[k][2] * C; - } - C = mean_val4[index[3]]; - for (k = 0; k < PITCH_SUBFRAMES; k++) { - PitchLags[k] += WebRtcIsac_kTransformTranspose[k][3] * C; - } - return 0; -} - - - -/* Quantize & code pitch lags. */ -void WebRtcIsac_EncodePitchLag(double* PitchLags, int16_t* PitchGain_Q12, - Bitstr* streamdata, - IsacSaveEncoderData* encData) { - int k, j; - double StepSize; - double C; - int index[PITCH_SUBFRAMES]; - double mean_gain; - const double* mean_val2, *mean_val3, *mean_val4; - const int16_t* lower_limit, *upper_limit; - const uint16_t** cdf; - double PitchGain[4] = {0, 0, 0, 0}; - - /* compute mean pitch gain */ - mean_gain = 0.0; - for (k = 0; k < 4; k++) { - PitchGain[k] = ((float)PitchGain_Q12[k]) / 4096; - mean_gain += PitchGain[k]; - } - mean_gain /= 4.0; - - /* Save data for creation of multiple bit streams */ - encData->meanGain[encData->startIdx] = mean_gain; - - /* Voicing classification. */ - if (mean_gain < 0.2) { - StepSize = WebRtcIsac_kQPitchLagStepsizeLo; - cdf = WebRtcIsac_kQPitchLagCdfPtrLo; - mean_val2 = WebRtcIsac_kQMeanLag2Lo; - mean_val3 = WebRtcIsac_kQMeanLag3Lo; - mean_val4 = WebRtcIsac_kQMeanLag4Lo; - lower_limit = WebRtcIsac_kQIndexLowerLimitLagLo; - upper_limit = WebRtcIsac_kQIndexUpperLimitLagLo; - } else if (mean_gain < 0.4) { - StepSize = WebRtcIsac_kQPitchLagStepsizeMid; - cdf = WebRtcIsac_kQPitchLagCdfPtrMid; - mean_val2 = WebRtcIsac_kQMeanLag2Mid; - mean_val3 = WebRtcIsac_kQMeanLag3Mid; - mean_val4 = WebRtcIsac_kQMeanLag4Mid; - lower_limit = WebRtcIsac_kQIndexLowerLimitLagMid; - upper_limit = WebRtcIsac_kQIndexUpperLimitLagMid; - } else { - StepSize = WebRtcIsac_kQPitchLagStepsizeHi; - cdf = WebRtcIsac_kQPitchLagCdfPtrHi; - mean_val2 = WebRtcIsac_kQMeanLag2Hi; - mean_val3 = WebRtcIsac_kQMeanLag3Hi; - mean_val4 = WebRtcIsac_kQMeanLag4Hi; - lower_limit = WebRtcIsac_kQindexLowerLimitLagHi; - upper_limit = WebRtcIsac_kQindexUpperLimitLagHi; - } - - /* find quantization index */ - for (k = 0; k < 4; k++) { - /* transform */ - C = 0.0; - for (j = 0; j < PITCH_SUBFRAMES; j++) { - C += WebRtcIsac_kTransform[k][j] * PitchLags[j]; - } - /* quantize */ - index[k] = WebRtcIsac_lrint(C / StepSize); - - /* check that the index is not outside the boundaries of the table */ - if (index[k] < lower_limit[k]) { - index[k] = lower_limit[k]; - } else if (index[k] > upper_limit[k]) index[k] = upper_limit[k]; { - index[k] -= lower_limit[k]; - } - /* Save data for creation of multiple bit streams */ - encData->pitchIndex[PITCH_SUBFRAMES * encData->startIdx + k] = index[k]; - } - - /* Un-quantize back to transform coefficients and do the inverse transform: - * S = T'*C */ - C = (index[0] + lower_limit[0]) * StepSize; - for (k = 0; k < PITCH_SUBFRAMES; k++) { - PitchLags[k] = WebRtcIsac_kTransformTranspose[k][0] * C; - } - C = mean_val2[index[1]]; - for (k = 0; k < PITCH_SUBFRAMES; k++) { - PitchLags[k] += WebRtcIsac_kTransformTranspose[k][1] * C; - } - C = mean_val3[index[2]]; - for (k = 0; k < PITCH_SUBFRAMES; k++) { - PitchLags[k] += WebRtcIsac_kTransformTranspose[k][2] * C; - } - C = mean_val4[index[3]]; - for (k = 0; k < PITCH_SUBFRAMES; k++) { - PitchLags[k] += WebRtcIsac_kTransformTranspose[k][3] * C; - } - /* entropy coding of quantization pitch lags */ - WebRtcIsac_EncHistMulti(streamdata, index, cdf, PITCH_SUBFRAMES); -} - - - -/* Routines for in-band signaling of bandwidth estimation */ -/* Histograms based on uniform distribution of indices */ -/* Move global variables later! */ - - -/* cdf array for frame length indicator */ -const uint16_t WebRtcIsac_kFrameLengthCdf[4] = { - 0, 21845, 43690, 65535 }; - -/* pointer to cdf array for frame length indicator */ -const uint16_t* WebRtcIsac_kFrameLengthCdf_ptr[1] = { - WebRtcIsac_kFrameLengthCdf }; - -/* initial cdf index for decoder of frame length indicator */ -const uint16_t WebRtcIsac_kFrameLengthInitIndex[1] = { 1 }; - - -int WebRtcIsac_DecodeFrameLen(Bitstr* streamdata, int16_t* framesamples) { - int frame_mode, err; - err = 0; - /* entropy decoding of frame length [1:30ms,2:60ms] */ - err = WebRtcIsac_DecHistOneStepMulti(&frame_mode, streamdata, - WebRtcIsac_kFrameLengthCdf_ptr, - WebRtcIsac_kFrameLengthInitIndex, 1); - if (err < 0) - return -ISAC_RANGE_ERROR_DECODE_FRAME_LENGTH; - - switch (frame_mode) { - case 1: - *framesamples = 480; /* 30ms */ - break; - case 2: - *framesamples = 960; /* 60ms */ - break; - default: - err = -ISAC_DISALLOWED_FRAME_MODE_DECODER; - } - return err; -} - -int WebRtcIsac_EncodeFrameLen(int16_t framesamples, Bitstr* streamdata) { - int frame_mode, status; - - status = 0; - frame_mode = 0; - /* entropy coding of frame length [1:480 samples,2:960 samples] */ - switch (framesamples) { - case 480: - frame_mode = 1; - break; - case 960: - frame_mode = 2; - break; - default: - status = - ISAC_DISALLOWED_FRAME_MODE_ENCODER; - } - - if (status < 0) - return status; - - WebRtcIsac_EncHistMulti(streamdata, &frame_mode, - WebRtcIsac_kFrameLengthCdf_ptr, 1); - return status; -} - -/* cdf array for estimated bandwidth */ -static const uint16_t kBwCdf[25] = { - 0, 2731, 5461, 8192, 10923, 13653, 16384, 19114, 21845, 24576, 27306, 30037, - 32768, 35498, 38229, 40959, 43690, 46421, 49151, 51882, 54613, 57343, 60074, - 62804, 65535 }; - -/* pointer to cdf array for estimated bandwidth */ -static const uint16_t* const kBwCdfPtr[1] = { kBwCdf }; - -/* initial cdf index for decoder of estimated bandwidth*/ -static const uint16_t kBwInitIndex[1] = { 7 }; - - -int WebRtcIsac_DecodeSendBW(Bitstr* streamdata, int16_t* BWno) { - int BWno32, err; - - /* entropy decoding of sender's BW estimation [0..23] */ - err = WebRtcIsac_DecHistOneStepMulti(&BWno32, streamdata, kBwCdfPtr, - kBwInitIndex, 1); - if (err < 0) { - return -ISAC_RANGE_ERROR_DECODE_BANDWIDTH; - } - *BWno = (int16_t)BWno32; - return err; -} - -void WebRtcIsac_EncodeReceiveBw(int* BWno, Bitstr* streamdata) { - /* entropy encoding of receiver's BW estimation [0..23] */ - WebRtcIsac_EncHistMulti(streamdata, BWno, kBwCdfPtr, 1); -} - - -/* estimate code length of LPC Coef */ -void WebRtcIsac_TranscodeLPCCoef(double* LPCCoef_lo, double* LPCCoef_hi, - int* index_g) { - int j, k, n, pos, pos2, posg, offsg, offs2; - int index_ovr_g[KLT_ORDER_GAIN]; - double tmpcoeffs_g[KLT_ORDER_GAIN]; - double tmpcoeffs2_g[KLT_ORDER_GAIN]; - double sum; - - /* log gains, mean removal and scaling */ - posg = 0; - for (k = 0; k < SUBFRAMES; k++) { - tmpcoeffs_g[posg] = log(LPCCoef_lo[(LPC_LOBAND_ORDER + 1) * k]); - tmpcoeffs_g[posg] -= WebRtcIsac_kLpcMeansGain[posg]; - tmpcoeffs_g[posg] *= LPC_GAIN_SCALE; - posg++; - tmpcoeffs_g[posg] = log(LPCCoef_hi[(LPC_HIBAND_ORDER + 1) * k]); - tmpcoeffs_g[posg] -= WebRtcIsac_kLpcMeansGain[posg]; - tmpcoeffs_g[posg] *= LPC_GAIN_SCALE; - posg++; - } - - /* KLT */ - - /* Left transform. */ - offsg = 0; - for (j = 0; j < SUBFRAMES; j++) { - posg = offsg; - for (k = 0; k < LPC_GAIN_ORDER; k++) { - sum = 0; - pos = offsg; - pos2 = k; - for (n = 0; n < LPC_GAIN_ORDER; n++) { - sum += tmpcoeffs_g[pos++] * WebRtcIsac_kKltT1Gain[pos2]; - pos2 += LPC_GAIN_ORDER; - } - tmpcoeffs2_g[posg++] = sum; - } - offsg += LPC_GAIN_ORDER; - } - - /* Right transform. */ - offsg = 0; - offs2 = 0; - for (j = 0; j < SUBFRAMES; j++) { - posg = offsg; - for (k = 0; k < LPC_GAIN_ORDER; k++) { - sum = 0; - pos = k; - pos2 = offs2; - for (n = 0; n < SUBFRAMES; n++) { - sum += tmpcoeffs2_g[pos] * WebRtcIsac_kKltT2Gain[pos2++]; - pos += LPC_GAIN_ORDER; - } - tmpcoeffs_g[posg++] = sum; - } - offs2 += SUBFRAMES; - offsg += LPC_GAIN_ORDER; - } - - - /* quantize coefficients */ - for (k = 0; k < KLT_ORDER_GAIN; k++) { - /* Get index. */ - pos2 = WebRtcIsac_lrint(tmpcoeffs_g[k] / KLT_STEPSIZE); - index_g[k] = (pos2) + WebRtcIsac_kQKltQuantMinGain[k]; - if (index_g[k] < 0) { - index_g[k] = 0; - } else if (index_g[k] > WebRtcIsac_kQKltMaxIndGain[k]) { - index_g[k] = WebRtcIsac_kQKltMaxIndGain[k]; - } - index_ovr_g[k] = WebRtcIsac_kQKltOffsetGain[k] + index_g[k]; - - /* find quantization levels for coefficients */ - tmpcoeffs_g[k] = WebRtcIsac_kQKltLevelsGain[index_ovr_g[k]]; - } -} - - -/* Decode & de-quantize LPC Coefficients. */ -int WebRtcIsac_DecodeLpcCoefUB(Bitstr* streamdata, double* lpcVecs, - double* percepFilterGains, - int16_t bandwidth) { - int index_s[KLT_ORDER_SHAPE]; - - double U[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME]; - int err; - - /* Entropy decoding of quantization indices. */ - switch (bandwidth) { - case isac12kHz: { - err = WebRtcIsac_DecHistOneStepMulti( - index_s, streamdata, WebRtcIsac_kLpcShapeCdfMatUb12, - WebRtcIsac_kLpcShapeEntropySearchUb12, UB_LPC_ORDER * - UB_LPC_VEC_PER_FRAME); - break; - } - case isac16kHz: { - err = WebRtcIsac_DecHistOneStepMulti( - index_s, streamdata, WebRtcIsac_kLpcShapeCdfMatUb16, - WebRtcIsac_kLpcShapeEntropySearchUb16, UB_LPC_ORDER * - UB16_LPC_VEC_PER_FRAME); - break; - } - default: - return -1; - } - - if (err < 0) { - return err; - } - - WebRtcIsac_DequantizeLpcParam(index_s, lpcVecs, bandwidth); - WebRtcIsac_CorrelateInterVec(lpcVecs, U, bandwidth); - WebRtcIsac_CorrelateIntraVec(U, lpcVecs, bandwidth); - WebRtcIsac_AddLarMean(lpcVecs, bandwidth); - WebRtcIsac_DecodeLpcGainUb(percepFilterGains, streamdata); - - if (bandwidth == isac16kHz) { - /* Decode another set of Gains. */ - WebRtcIsac_DecodeLpcGainUb(&percepFilterGains[SUBFRAMES], streamdata); - } - return 0; -} - -int16_t WebRtcIsac_EncodeBandwidth(enum ISACBandwidth bandwidth, - Bitstr* streamData) { - int bandwidthMode; - switch (bandwidth) { - case isac12kHz: { - bandwidthMode = 0; - break; - } - case isac16kHz: { - bandwidthMode = 1; - break; - } - default: - return -ISAC_DISALLOWED_ENCODER_BANDWIDTH; - } - WebRtcIsac_EncHistMulti(streamData, &bandwidthMode, kOneBitEqualProbCdf_ptr, - 1); - return 0; -} - -int16_t WebRtcIsac_DecodeBandwidth(Bitstr* streamData, - enum ISACBandwidth* bandwidth) { - int bandwidthMode; - if (WebRtcIsac_DecHistOneStepMulti(&bandwidthMode, streamData, - kOneBitEqualProbCdf_ptr, - kOneBitEqualProbInitIndex, 1) < 0) { - return -ISAC_RANGE_ERROR_DECODE_BANDWITH; - } - switch (bandwidthMode) { - case 0: { - *bandwidth = isac12kHz; - break; - } - case 1: { - *bandwidth = isac16kHz; - break; - } - default: - return -ISAC_DISALLOWED_BANDWIDTH_MODE_DECODER; - } - return 0; -} - -int16_t WebRtcIsac_EncodeJitterInfo(int32_t jitterIndex, - Bitstr* streamData) { - /* This is to avoid LINUX warning until we change 'int' to 'Word32'. */ - int intVar; - - if ((jitterIndex < 0) || (jitterIndex > 1)) { - return -1; - } - intVar = (int)(jitterIndex); - /* Use the same CDF table as for bandwidth - * both take two values with equal probability.*/ - WebRtcIsac_EncHistMulti(streamData, &intVar, kOneBitEqualProbCdf_ptr, 1); - return 0; -} - -int16_t WebRtcIsac_DecodeJitterInfo(Bitstr* streamData, - int32_t* jitterInfo) { - int intVar; - /* Use the same CDF table as for bandwidth - * both take two values with equal probability. */ - if (WebRtcIsac_DecHistOneStepMulti(&intVar, streamData, - kOneBitEqualProbCdf_ptr, - kOneBitEqualProbInitIndex, 1) < 0) { - return -ISAC_RANGE_ERROR_DECODE_BANDWITH; - } - *jitterInfo = (int16_t)(intVar); - return 0; -} diff --git a/modules/audio_coding/codecs/isac/main/source/entropy_coding.h b/modules/audio_coding/codecs/isac/main/source/entropy_coding.h deleted file mode 100644 index 6c2b8d3cc1..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/entropy_coding.h +++ /dev/null @@ -1,347 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * entropy_coding.h - * - * This header file declares all of the functions used to arithmetically - * encode the iSAC bistream - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ENTROPY_CODING_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ENTROPY_CODING_H_ - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/structs.h" - -/****************************************************************************** - * WebRtcIsac_DecodeSpec() - * Decode real and imaginary part of the DFT coefficients, given a bit-stream. - * The decoded DFT coefficient can be transformed to time domain by - * WebRtcIsac_Time2Spec(). - * - * Input: - * - streamdata : pointer to a stucture containg the encoded - * data and theparameters needed for entropy - * coding. - * - AvgPitchGain_Q12 : average pitch-gain of the frame. This is only - * relevant for 0-4 kHz band, and the input value is - * not used in other bands. - * - band : specifies which band's DFT should be decoded. - * - * Output: - * - *fr : pointer to a buffer where the real part of DFT - * coefficients are written to. - * - *fi : pointer to a buffer where the imaginary part - * of DFT coefficients are written to. - * - * Return value : < 0 if an error occures - * 0 if succeeded. - */ -int WebRtcIsac_DecodeSpec(Bitstr* streamdata, - int16_t AvgPitchGain_Q12, - enum ISACBand band, - double* fr, - double* fi); - -/****************************************************************************** - * WebRtcIsac_EncodeSpec() - * Encode real and imaginary part of the DFT coefficients into the given - * bit-stream. - * - * Input: - * - *fr : pointer to a buffer where the real part of DFT - * coefficients are written to. - * - *fi : pointer to a buffer where the imaginary part - * of DFT coefficients are written to. - * - AvgPitchGain_Q12 : average pitch-gain of the frame. This is only - * relevant for 0-4 kHz band, and the input value is - * not used in other bands. - * - band : specifies which band's DFT should be decoded. - * - * Output: - * - streamdata : pointer to a stucture containg the encoded - * data and theparameters needed for entropy - * coding. - * - * Return value : < 0 if an error occures - * 0 if succeeded. - */ -int WebRtcIsac_EncodeSpec(const int16_t* fr, - const int16_t* fi, - int16_t AvgPitchGain_Q12, - enum ISACBand band, - Bitstr* streamdata); - -/* decode & dequantize LPC Coef */ -int WebRtcIsac_DecodeLpcCoef(Bitstr* streamdata, double* LPCCoef); -int WebRtcIsac_DecodeLpcCoefUB(Bitstr* streamdata, - double* lpcVecs, - double* percepFilterGains, - int16_t bandwidth); - -int WebRtcIsac_DecodeLpc(Bitstr* streamdata, - double* LPCCoef_lo, - double* LPCCoef_hi); - -/* quantize & code LPC Coef */ -void WebRtcIsac_EncodeLpcLb(double* LPCCoef_lo, - double* LPCCoef_hi, - Bitstr* streamdata, - IsacSaveEncoderData* encData); - -void WebRtcIsac_EncodeLpcGainLb(double* LPCCoef_lo, - double* LPCCoef_hi, - Bitstr* streamdata, - IsacSaveEncoderData* encData); - -/****************************************************************************** - * WebRtcIsac_EncodeLpcUB() - * Encode LPC parameters, given as A-polynomial, of upper-band. The encoding - * is performed in LAR domain. - * For the upper-band, we compute and encode LPC of some sub-frames, LPC of - * other sub-frames are computed by linear interpolation, in LAR domain. This - * function performs the interpolation and returns the LPC of all sub-frames. - * - * Inputs: - * - lpcCoef : a buffer containing A-polynomials of sub-frames - * (excluding first coefficient that is 1). - * - bandwidth : specifies if the codec is operating at 0-12 kHz - * or 0-16 kHz mode. - * - * Input/output: - * - streamdata : pointer to a structure containing the encoded - * data and the parameters needed for entropy - * coding. - * - * Output: - * - interpolLPCCoeff : Decoded and interpolated LPC (A-polynomial) - * of all sub-frames. - * If LP analysis is of order K, and there are N - * sub-frames then this is a buffer of size - * (k + 1) * N, each vector starts with the LPC gain - * of the corresponding sub-frame. The LPC gains - * are encoded and inserted after this function is - * called. The first A-coefficient which is 1 is not - * included. - * - * Return value : 0 if encoding is successful, - * <0 if failed to encode. - */ -int16_t WebRtcIsac_EncodeLpcUB(double* lpcCoeff, - Bitstr* streamdata, - double* interpolLPCCoeff, - int16_t bandwidth, - ISACUBSaveEncDataStruct* encData); - -/****************************************************************************** - * WebRtcIsac_DecodeInterpolLpcUb() - * Decode LPC coefficients and interpolate to get the coefficients fo all - * sub-frmaes. - * - * Inputs: - * - bandwidth : spepecifies if the codec is in 0-12 kHz or - * 0-16 kHz mode. - * - * Input/output: - * - streamdata : pointer to a stucture containg the encoded - * data and theparameters needed for entropy - * coding. - * - * Output: - * - percepFilterParam : Decoded and interpolated LPC (A-polynomial) of - * all sub-frames. - * If LP analysis is of order K, and there are N - * sub-frames then this is a buffer of size - * (k + 1) * N, each vector starts with the LPC gain - * of the corresponding sub-frame. The LPC gains - * are encoded and inserted after this function is - * called. The first A-coefficient which is 1 is not - * included. - * - * Return value : 0 if encoding is successful, - * <0 if failed to encode. - */ -int16_t WebRtcIsac_DecodeInterpolLpcUb(Bitstr* streamdata, - double* percepFilterParam, - int16_t bandwidth); - -/* Decode & dequantize RC */ -int WebRtcIsac_DecodeRc(Bitstr* streamdata, int16_t* RCQ15); - -/* Quantize & code RC */ -void WebRtcIsac_EncodeRc(int16_t* RCQ15, Bitstr* streamdata); - -/* Decode & dequantize squared Gain */ -int WebRtcIsac_DecodeGain2(Bitstr* streamdata, int32_t* Gain2); - -/* Quantize & code squared Gain (input is squared gain) */ -int WebRtcIsac_EncodeGain2(int32_t* gain2, Bitstr* streamdata); - -void WebRtcIsac_EncodePitchGain(int16_t* PitchGains_Q12, - Bitstr* streamdata, - IsacSaveEncoderData* encData); - -void WebRtcIsac_EncodePitchLag(double* PitchLags, - int16_t* PitchGain_Q12, - Bitstr* streamdata, - IsacSaveEncoderData* encData); - -int WebRtcIsac_DecodePitchGain(Bitstr* streamdata, int16_t* PitchGain_Q12); -int WebRtcIsac_DecodePitchLag(Bitstr* streamdata, - int16_t* PitchGain_Q12, - double* PitchLag); - -int WebRtcIsac_DecodeFrameLen(Bitstr* streamdata, int16_t* framelength); -int WebRtcIsac_EncodeFrameLen(int16_t framelength, Bitstr* streamdata); -int WebRtcIsac_DecodeSendBW(Bitstr* streamdata, int16_t* BWno); -void WebRtcIsac_EncodeReceiveBw(int* BWno, Bitstr* streamdata); - -/* Step-down */ -void WebRtcIsac_Poly2Rc(double* a, int N, double* RC); - -/* Step-up */ -void WebRtcIsac_Rc2Poly(double* RC, int N, double* a); - -void WebRtcIsac_TranscodeLPCCoef(double* LPCCoef_lo, - double* LPCCoef_hi, - int* index_g); - -/****************************************************************************** - * WebRtcIsac_EncodeLpcGainUb() - * Encode LPC gains of sub-Frames. - * - * Input/outputs: - * - lpGains : a buffer which contains 'SUBFRAME' number of - * LP gains to be encoded. The input values are - * overwritten by the quantized values. - * - streamdata : pointer to a stucture containg the encoded - * data and theparameters needed for entropy - * coding. - * - * Output: - * - lpcGainIndex : quantization indices for lpc gains, these will - * be stored to be used for FEC. - */ -void WebRtcIsac_EncodeLpcGainUb(double* lpGains, - Bitstr* streamdata, - int* lpcGainIndex); - -/****************************************************************************** - * WebRtcIsac_EncodeLpcGainUb() - * Store LPC gains of sub-Frames in 'streamdata'. - * - * Input: - * - lpGains : a buffer which contains 'SUBFRAME' number of - * LP gains to be encoded. - * Input/outputs: - * - streamdata : pointer to a stucture containg the encoded - * data and theparameters needed for entropy - * coding. - * - */ -void WebRtcIsac_StoreLpcGainUb(double* lpGains, Bitstr* streamdata); - -/****************************************************************************** - * WebRtcIsac_DecodeLpcGainUb() - * Decode the LPC gain of sub-frames. - * - * Input/output: - * - streamdata : pointer to a stucture containg the encoded - * data and theparameters needed for entropy - * coding. - * - * Output: - * - lpGains : a buffer where decoded LPC gians will be stored. - * - * Return value : 0 if succeeded. - * <0 if failed. - */ -int16_t WebRtcIsac_DecodeLpcGainUb(double* lpGains, Bitstr* streamdata); - -/****************************************************************************** - * WebRtcIsac_EncodeBandwidth() - * Encode if the bandwidth of encoded audio is 0-12 kHz or 0-16 kHz. - * - * Input: - * - bandwidth : an enumerator specifying if the codec in is - * 0-12 kHz or 0-16 kHz mode. - * - * Input/output: - * - streamdata : pointer to a stucture containg the encoded - * data and theparameters needed for entropy - * coding. - * - * Return value : 0 if succeeded. - * <0 if failed. - */ -int16_t WebRtcIsac_EncodeBandwidth(enum ISACBandwidth bandwidth, - Bitstr* streamData); - -/****************************************************************************** - * WebRtcIsac_DecodeBandwidth() - * Decode the bandwidth of the encoded audio, i.e. if the bandwidth is 0-12 kHz - * or 0-16 kHz. - * - * Input/output: - * - streamdata : pointer to a stucture containg the encoded - * data and theparameters needed for entropy - * coding. - * - * Output: - * - bandwidth : an enumerator specifying if the codec is in - * 0-12 kHz or 0-16 kHz mode. - * - * Return value : 0 if succeeded. - * <0 if failed. - */ -int16_t WebRtcIsac_DecodeBandwidth(Bitstr* streamData, - enum ISACBandwidth* bandwidth); - -/****************************************************************************** - * WebRtcIsac_EncodeJitterInfo() - * Decode the jitter information. - * - * Input/output: - * - streamdata : pointer to a stucture containg the encoded - * data and theparameters needed for entropy - * coding. - * - * Input: - * - jitterInfo : one bit of info specifying if the channel is - * in high/low jitter. Zero indicates low jitter - * and one indicates high jitter. - * - * Return value : 0 if succeeded. - * <0 if failed. - */ -int16_t WebRtcIsac_EncodeJitterInfo(int32_t jitterIndex, Bitstr* streamData); - -/****************************************************************************** - * WebRtcIsac_DecodeJitterInfo() - * Decode the jitter information. - * - * Input/output: - * - streamdata : pointer to a stucture containg the encoded - * data and theparameters needed for entropy - * coding. - * - * Output: - * - jitterInfo : one bit of info specifying if the channel is - * in high/low jitter. Zero indicates low jitter - * and one indicates high jitter. - * - * Return value : 0 if succeeded. - * <0 if failed. - */ -int16_t WebRtcIsac_DecodeJitterInfo(Bitstr* streamData, int32_t* jitterInfo); - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ENTROPY_CODING_H_ */ diff --git a/modules/audio_coding/codecs/isac/main/source/filterbanks.c b/modules/audio_coding/codecs/isac/main/source/filterbanks.c deleted file mode 100644 index d57b55022d..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/filterbanks.c +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * filterbanks.c - * - * This file contains function WebRtcIsac_AllPassFilter2Float, - * WebRtcIsac_SplitAndFilter, and WebRtcIsac_FilterAndCombine - * which implement filterbanks that produce decimated lowpass and - * highpass versions of a signal, and performs reconstruction. - * - */ - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/codec.h" -#include "modules/audio_coding/codecs/isac/main/source/isac_vad.h" - -/* Combining */ - -/* HPstcoeff_out_1 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; */ -static const float kHpStCoefOut1Float[4] = -{-1.99701049409000f, 0.99714204490000f, 0.01701049409000f, -0.01704204490000f}; - -/* HPstcoeff_out_2 = {a1, a2, b1 - b0 * a1, b2 - b0 * a2}; */ -static const float kHpStCoefOut2Float[4] = -{-1.98645294509837f, 0.98672435560000f, 0.00645294509837f, -0.00662435560000f}; - - -/* Function WebRtcIsac_FilterAndCombine */ -/* This is a decoder function that takes the decimated - length FRAMESAMPLES_HALF input low-pass and - high-pass signals and creates a reconstructed fullband - output signal of length FRAMESAMPLES. WebRtcIsac_FilterAndCombine - is the sibling function of WebRtcIsac_SplitAndFilter */ -/* INPUTS: - inLP: a length FRAMESAMPLES_HALF array of input low-pass - samples. - inHP: a length FRAMESAMPLES_HALF array of input high-pass - samples. - postfiltdata: input data structure containing the filterbank - states from the previous decoding iteration. - OUTPUTS: - Out: a length FRAMESAMPLES array of output reconstructed - samples (fullband) based on the input low-pass and - high-pass signals. - postfiltdata: the input data structure containing the filterbank - states is updated for the next decoding iteration */ -void WebRtcIsac_FilterAndCombineFloat(float *InLP, - float *InHP, - float *Out, - PostFiltBankstr *postfiltdata) -{ - int k; - float tempin_ch1[FRAMESAMPLES+MAX_AR_MODEL_ORDER]; - float tempin_ch2[FRAMESAMPLES+MAX_AR_MODEL_ORDER]; - float ftmp, ftmp2; - - /* Form the polyphase signals*/ - for (k=0;kSTATE_0_UPPER_float); - - /* Now, all-pass filter the new lower channel signal. But since all-pass filter factors - at the decoder are swapped from the ones at the encoder, the 'upper' channel - all-pass filter factors (WebRtcIsac_kUpperApFactorsFloat) are used to filter this new - lower channel signal */ - WebRtcIsac_AllPassFilter2Float(tempin_ch2, WebRtcIsac_kUpperApFactorsFloat, - FRAMESAMPLES_HALF, NUMBEROFCHANNELAPSECTIONS,postfiltdata->STATE_0_LOWER_float); - - - /* Merge outputs to form the full length output signal.*/ - for (k=0;kHPstates1_float[0] + - kHpStCoefOut1Float[3] * postfiltdata->HPstates1_float[1]; - ftmp = Out[k] - kHpStCoefOut1Float[0] * postfiltdata->HPstates1_float[0] - - kHpStCoefOut1Float[1] * postfiltdata->HPstates1_float[1]; - postfiltdata->HPstates1_float[1] = postfiltdata->HPstates1_float[0]; - postfiltdata->HPstates1_float[0] = ftmp; - Out[k] = ftmp2; - } - - for (k=0;kHPstates2_float[0] + - kHpStCoefOut2Float[3] * postfiltdata->HPstates2_float[1]; - ftmp = Out[k] - kHpStCoefOut2Float[0] * postfiltdata->HPstates2_float[0] - - kHpStCoefOut2Float[1] * postfiltdata->HPstates2_float[1]; - postfiltdata->HPstates2_float[1] = postfiltdata->HPstates2_float[0]; - postfiltdata->HPstates2_float[0] = ftmp; - Out[k] = ftmp2; - } -} diff --git a/modules/audio_coding/codecs/isac/main/source/intialize.c b/modules/audio_coding/codecs/isac/main/source/intialize.c deleted file mode 100644 index 5c951f6e9d..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/intialize.c +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* encode.c - Encoding function for the iSAC coder */ - -#include - -#include "modules/audio_coding/codecs/isac/main/source/structs.h" -#include "modules/audio_coding/codecs/isac/main/source/codec.h" -#include "modules/audio_coding/codecs/isac/main/source/pitch_estimator.h" - -void WebRtcIsac_InitMasking(MaskFiltstr *maskdata) { - - int k; - - for (k = 0; k < WINLEN; k++) { - maskdata->DataBufferLo[k] = 0.0; - maskdata->DataBufferHi[k] = 0.0; - } - for (k = 0; k < ORDERLO+1; k++) { - maskdata->CorrBufLo[k] = 0.0; - maskdata->PreStateLoF[k] = 0.0; - maskdata->PreStateLoG[k] = 0.0; - maskdata->PostStateLoF[k] = 0.0; - maskdata->PostStateLoG[k] = 0.0; - } - for (k = 0; k < ORDERHI+1; k++) { - maskdata->CorrBufHi[k] = 0.0; - maskdata->PreStateHiF[k] = 0.0; - maskdata->PreStateHiG[k] = 0.0; - maskdata->PostStateHiF[k] = 0.0; - maskdata->PostStateHiG[k] = 0.0; - } - - maskdata->OldEnergy = 10.0; - return; -} - -void WebRtcIsac_InitPostFilterbank(PostFiltBankstr *postfiltdata) -{ - int k; - - for (k = 0; k < 2*POSTQORDER; k++) { - postfiltdata->STATE_0_LOWER[k] = 0; - postfiltdata->STATE_0_UPPER[k] = 0; - - postfiltdata->STATE_0_LOWER_float[k] = 0; - postfiltdata->STATE_0_UPPER_float[k] = 0; - } - - /* High pass filter states */ - postfiltdata->HPstates1[0] = 0.0; - postfiltdata->HPstates1[1] = 0.0; - - postfiltdata->HPstates2[0] = 0.0; - postfiltdata->HPstates2[1] = 0.0; - - postfiltdata->HPstates1_float[0] = 0.0f; - postfiltdata->HPstates1_float[1] = 0.0f; - - postfiltdata->HPstates2_float[0] = 0.0f; - postfiltdata->HPstates2_float[1] = 0.0f; - - return; -} diff --git a/modules/audio_coding/codecs/isac/main/source/isac.c b/modules/audio_coding/codecs/isac/main/source/isac.c deleted file mode 100644 index 456f447d9a..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/isac.c +++ /dev/null @@ -1,2307 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * isac.c - * - * This C file contains the functions for the ISAC API - * - */ - -#include "modules/audio_coding/codecs/isac/main/include/isac.h" - -#include -#include -#include -#include - -#include "rtc_base/checks.h" -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h" -#include "modules/audio_coding/codecs/isac/main/source/codec.h" -#include "modules/audio_coding/codecs/isac/main/source/crc.h" -#include "modules/audio_coding/codecs/isac/main/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/os_specific_inline.h" -#include "modules/audio_coding/codecs/isac/main/source/structs.h" -#include "modules/audio_coding/codecs/isac/main/source/isac_vad.h" -#include "rtc_base/system/arch.h" - -#define BIT_MASK_DEC_INIT 0x0001 -#define BIT_MASK_ENC_INIT 0x0002 - -#define LEN_CHECK_SUM_WORD8 4 -#define MAX_NUM_LAYERS 10 - - -/**************************************************************************** - * UpdatePayloadSizeLimit(...) - * - * Call this function to update the limit on the payload size. The limit on - * payload size might change i) if a user ''directly changes the limit by - * calling xxx_setMaxPayloadSize() or xxx_setMaxRate(), or ii) indirectly - * when bandwidth is changing. The latter might be the result of bandwidth - * adaptation, or direct change of the bottleneck in instantaneous mode. - * - * This function takes the current overall limit on payload, and translates it - * to the limits on lower and upper-band. If the codec is in wideband mode, - * then the overall limit and the limit on the lower-band is the same. - * Otherwise, a fraction of the limit should be allocated to lower-band - * leaving some room for the upper-band bit-stream. That is why an update - * of limit is required every time that the bandwidth is changing. - * - */ -static void UpdatePayloadSizeLimit(ISACMainStruct* instISAC) { - int16_t lim30MsPayloadBytes = WEBRTC_SPL_MIN( - (instISAC->maxPayloadSizeBytes), - (instISAC->maxRateBytesPer30Ms)); - int16_t lim60MsPayloadBytes = WEBRTC_SPL_MIN( - (instISAC->maxPayloadSizeBytes), - (instISAC->maxRateBytesPer30Ms << 1)); - - /* The only time that iSAC will have 60 ms - * frame-size is when operating in wideband, so - * there is no upper-band bit-stream. */ - - if (instISAC->bandwidthKHz == isac8kHz) { - /* At 8 kHz there is no upper-band bit-stream, - * therefore, the lower-band limit is the overall limit. */ - instISAC->instLB.ISACencLB_obj.payloadLimitBytes60 = - lim60MsPayloadBytes; - instISAC->instLB.ISACencLB_obj.payloadLimitBytes30 = - lim30MsPayloadBytes; - } else { - /* When in super-wideband, we only have 30 ms frames. - * Do a rate allocation for the given limit. */ - if (lim30MsPayloadBytes > 250) { - /* 4/5 to lower-band the rest for upper-band. */ - instISAC->instLB.ISACencLB_obj.payloadLimitBytes30 = - (lim30MsPayloadBytes << 2) / 5; - } else if (lim30MsPayloadBytes > 200) { - /* For the interval of 200 to 250 the share of - * upper-band linearly grows from 20 to 50. */ - instISAC->instLB.ISACencLB_obj.payloadLimitBytes30 = - (lim30MsPayloadBytes << 1) / 5 + 100; - } else { - /* Allocate only 20 for upper-band. */ - instISAC->instLB.ISACencLB_obj.payloadLimitBytes30 = - lim30MsPayloadBytes - 20; - } - instISAC->instUB.ISACencUB_obj.maxPayloadSizeBytes = - lim30MsPayloadBytes; - } -} - - -/**************************************************************************** - * UpdateBottleneck(...) - * - * This function updates the bottleneck only if the codec is operating in - * channel-adaptive mode. Furthermore, as the update of bottleneck might - * result in an update of bandwidth, therefore, the bottlenech should be - * updated just right before the first 10ms of a frame is pushed into encoder. - * - */ -static void UpdateBottleneck(ISACMainStruct* instISAC) { - /* Read the bottleneck from bandwidth estimator for the - * first 10 ms audio. This way, if there is a change - * in bandwidth, upper and lower-band will be in sync. */ - if ((instISAC->codingMode == 0) && - (instISAC->instLB.ISACencLB_obj.buffer_index == 0) && - (instISAC->instLB.ISACencLB_obj.frame_nb == 0)) { - int32_t bottleneck = - WebRtcIsac_GetUplinkBandwidth(&instISAC->bwestimator_obj); - - /* Adding hysteresis when increasing signal bandwidth. */ - if ((instISAC->bandwidthKHz == isac8kHz) - && (bottleneck > 37000) - && (bottleneck < 41000)) { - bottleneck = 37000; - } - - /* Switching from 12 kHz to 16 kHz is not allowed at this revision. - * If we let this happen, we have to take care of buffer_index and - * the last LPC vector. */ - if ((instISAC->bandwidthKHz != isac16kHz) && - (bottleneck > 46000)) { - bottleneck = 46000; - } - - /* We might need a rate allocation. */ - if (instISAC->encoderSamplingRateKHz == kIsacWideband) { - /* Wideband is the only choice we have here. */ - instISAC->instLB.ISACencLB_obj.bottleneck = - (bottleneck > 32000) ? 32000 : bottleneck; - instISAC->bandwidthKHz = isac8kHz; - } else { - /* Do the rate-allocation and get the new bandwidth. */ - enum ISACBandwidth bandwidth; - WebRtcIsac_RateAllocation(bottleneck, - &(instISAC->instLB.ISACencLB_obj.bottleneck), - &(instISAC->instUB.ISACencUB_obj.bottleneck), - &bandwidth); - if (bandwidth != isac8kHz) { - instISAC->instLB.ISACencLB_obj.new_framelength = 480; - } - if (bandwidth != instISAC->bandwidthKHz) { - /* Bandwidth is changing. */ - instISAC->bandwidthKHz = bandwidth; - UpdatePayloadSizeLimit(instISAC); - if (bandwidth == isac12kHz) { - instISAC->instLB.ISACencLB_obj.buffer_index = 0; - } - /* Currently we don't let the bandwidth to switch to 16 kHz - * if in adaptive mode. If we let this happen, we have to take - * care of buffer_index and the last LPC vector. */ - } - } - } -} - - -/**************************************************************************** - * GetSendBandwidthInfo(...) - * - * This is called to get the bandwidth info. This info is the bandwidth and - * the jitter of 'there-to-here' channel, estimated 'here.' These info - * is signaled in an in-band fashion to the other side. - * - * The call to the bandwidth estimator triggers a recursive averaging which - * has to be synchronized between encoder & decoder, therefore, the call to - * BWE should be once per packet. As the BWE info is inserted into bit-stream - * We need a valid info right before the encodeLB function is going to - * generate a bit-stream. That is when lower-band buffer has already 20ms - * of audio, and the 3rd block of 10ms is going to be injected into encoder. - * - * Inputs: - * - instISAC : iSAC instance. - * - * Outputs: - * - bandwidthIndex : an index which has to be encoded in - * lower-band bit-stream, indicating the - * bandwidth of there-to-here channel. - * - jitterInfo : this indicates if the jitter is high - * or low and it is encoded in upper-band - * bit-stream. - * - */ -static void GetSendBandwidthInfo(ISACMainStruct* instISAC, - int16_t* bandwidthIndex, - int16_t* jitterInfo) { - if ((instISAC->instLB.ISACencLB_obj.buffer_index == - (FRAMESAMPLES_10ms << 1)) && - (instISAC->instLB.ISACencLB_obj.frame_nb == 0)) { - /* Bandwidth estimation and coding. */ - WebRtcIsac_GetDownlinkBwJitIndexImpl(&(instISAC->bwestimator_obj), - bandwidthIndex, jitterInfo, - instISAC->decoderSamplingRateKHz); - } -} - - -/**************************************************************************** - * WebRtcIsac_Create(...) - * - * This function creates an ISAC instance, which will contain the state - * information for one coding/decoding channel. - * - * Input: - * - ISAC_main_inst : address of the pointer to the coder instance. - * - * Return value : 0 - Ok - * -1 - Error - */ -int16_t WebRtcIsac_Create(ISACStruct** ISAC_main_inst) { - ISACMainStruct* instISAC; - - if (ISAC_main_inst != NULL) { - instISAC = (ISACMainStruct*)malloc(sizeof(ISACMainStruct)); - *ISAC_main_inst = (ISACStruct*)instISAC; - if (*ISAC_main_inst != NULL) { - instISAC->errorCode = 0; - instISAC->initFlag = 0; - /* Default is wideband. */ - instISAC->bandwidthKHz = isac8kHz; - instISAC->encoderSamplingRateKHz = kIsacWideband; - instISAC->decoderSamplingRateKHz = kIsacWideband; - instISAC->in_sample_rate_hz = 16000; - - WebRtcIsac_InitTransform(&instISAC->transform_tables); - return 0; - } else { - return -1; - } - } else { - return -1; - } -} - - -/**************************************************************************** - * WebRtcIsac_Free(...) - * - * This function frees the ISAC instance created at the beginning. - * - * Input: - * - ISAC_main_inst : a ISAC instance. - * - * Return value : 0 - Ok - * -1 - Error - */ -int16_t WebRtcIsac_Free(ISACStruct* ISAC_main_inst) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - free(instISAC); - return 0; -} - - -/**************************************************************************** - * EncoderInitLb(...) - internal function for initialization of - * Lower Band - * EncoderInitUb(...) - internal function for initialization of - * Upper Band - * WebRtcIsac_EncoderInit(...) - API function - * - * This function initializes a ISAC instance prior to the encoder calls. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - CodingMode : 0 -> Bit rate and frame length are automatically - * adjusted to available bandwidth on - * transmission channel, applicable just to - * wideband mode. - * 1 -> User sets a frame length and a target bit - * rate which is taken as the maximum - * short-term average bit rate. - * - * Return value : 0 - Ok - * -1 - Error - */ -static int16_t EncoderInitLb(ISACLBStruct* instLB, - int16_t codingMode, - enum IsacSamplingRate sampRate) { - int16_t statusInit = 0; - int k; - - /* Init stream vector to zero */ - for (k = 0; k < STREAM_SIZE_MAX_60; k++) { - instLB->ISACencLB_obj.bitstr_obj.stream[k] = 0; - } - - if ((codingMode == 1) || (sampRate == kIsacSuperWideband)) { - /* 30 ms frame-size if either in super-wideband or - * instantaneous mode (I-mode). */ - instLB->ISACencLB_obj.new_framelength = 480; - } else { - instLB->ISACencLB_obj.new_framelength = INITIAL_FRAMESAMPLES; - } - - WebRtcIsac_InitMasking(&instLB->ISACencLB_obj.maskfiltstr_obj); - WebRtcIsac_InitPreFilterbank(&instLB->ISACencLB_obj.prefiltbankstr_obj); - WebRtcIsac_InitPitchFilter(&instLB->ISACencLB_obj.pitchfiltstr_obj); - WebRtcIsac_InitPitchAnalysis( - &instLB->ISACencLB_obj.pitchanalysisstr_obj); - - instLB->ISACencLB_obj.buffer_index = 0; - instLB->ISACencLB_obj.frame_nb = 0; - /* Default for I-mode. */ - instLB->ISACencLB_obj.bottleneck = 32000; - instLB->ISACencLB_obj.current_framesamples = 0; - instLB->ISACencLB_obj.s2nr = 0; - instLB->ISACencLB_obj.payloadLimitBytes30 = STREAM_SIZE_MAX_30; - instLB->ISACencLB_obj.payloadLimitBytes60 = STREAM_SIZE_MAX_60; - instLB->ISACencLB_obj.maxPayloadBytes = STREAM_SIZE_MAX_60; - instLB->ISACencLB_obj.maxRateInBytes = STREAM_SIZE_MAX_30; - instLB->ISACencLB_obj.enforceFrameSize = 0; - /* Invalid value prevents getRedPayload to - run before encoder is called. */ - instLB->ISACencLB_obj.lastBWIdx = -1; - return statusInit; -} - -static int16_t EncoderInitUb(ISACUBStruct* instUB, - int16_t bandwidth) { - int16_t statusInit = 0; - int k; - - /* Init stream vector to zero. */ - for (k = 0; k < STREAM_SIZE_MAX_60; k++) { - instUB->ISACencUB_obj.bitstr_obj.stream[k] = 0; - } - - WebRtcIsac_InitMasking(&instUB->ISACencUB_obj.maskfiltstr_obj); - WebRtcIsac_InitPreFilterbank(&instUB->ISACencUB_obj.prefiltbankstr_obj); - - if (bandwidth == isac16kHz) { - instUB->ISACencUB_obj.buffer_index = LB_TOTAL_DELAY_SAMPLES; - } else { - instUB->ISACencUB_obj.buffer_index = 0; - } - /* Default for I-mode. */ - instUB->ISACencUB_obj.bottleneck = 32000; - /* These store the limits for the wideband + super-wideband bit-stream. */ - instUB->ISACencUB_obj.maxPayloadSizeBytes = STREAM_SIZE_MAX_30 << 1; - /* This has to be updated after each lower-band encoding to guarantee - * a correct payload-limitation. */ - instUB->ISACencUB_obj.numBytesUsed = 0; - memset(instUB->ISACencUB_obj.data_buffer_float, 0, - (MAX_FRAMESAMPLES + LB_TOTAL_DELAY_SAMPLES) * sizeof(float)); - - memcpy(&(instUB->ISACencUB_obj.lastLPCVec), - WebRtcIsac_kMeanLarUb16, sizeof(double) * UB_LPC_ORDER); - - return statusInit; -} - - -int16_t WebRtcIsac_EncoderInit(ISACStruct* ISAC_main_inst, - int16_t codingMode) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - int16_t status; - - if ((codingMode != 0) && (codingMode != 1)) { - instISAC->errorCode = ISAC_DISALLOWED_CODING_MODE; - return -1; - } - /* Default bottleneck. */ - instISAC->bottleneck = MAX_ISAC_BW; - - if (instISAC->encoderSamplingRateKHz == kIsacWideband) { - instISAC->bandwidthKHz = isac8kHz; - instISAC->maxPayloadSizeBytes = STREAM_SIZE_MAX_60; - instISAC->maxRateBytesPer30Ms = STREAM_SIZE_MAX_30; - } else { - instISAC->bandwidthKHz = isac16kHz; - instISAC->maxPayloadSizeBytes = STREAM_SIZE_MAX; - instISAC->maxRateBytesPer30Ms = STREAM_SIZE_MAX; - } - - /* Channel-adaptive = 0; Instantaneous (Channel-independent) = 1. */ - instISAC->codingMode = codingMode; - - WebRtcIsac_InitBandwidthEstimator(&instISAC->bwestimator_obj, - instISAC->encoderSamplingRateKHz, - instISAC->decoderSamplingRateKHz); - - WebRtcIsac_InitRateModel(&instISAC->rate_data_obj); - /* Default for I-mode. */ - instISAC->MaxDelay = 10.0; - - status = EncoderInitLb(&instISAC->instLB, codingMode, - instISAC->encoderSamplingRateKHz); - if (status < 0) { - instISAC->errorCode = -status; - return -1; - } - - if (instISAC->encoderSamplingRateKHz == kIsacSuperWideband) { - /* Initialize encoder filter-bank. */ - memset(instISAC->analysisFBState1, 0, - FB_STATE_SIZE_WORD32 * sizeof(int32_t)); - memset(instISAC->analysisFBState2, 0, - FB_STATE_SIZE_WORD32 * sizeof(int32_t)); - - status = EncoderInitUb(&(instISAC->instUB), - instISAC->bandwidthKHz); - if (status < 0) { - instISAC->errorCode = -status; - return -1; - } - } - /* Initialization is successful, set the flag. */ - instISAC->initFlag |= BIT_MASK_ENC_INIT; - return 0; -} - - -/**************************************************************************** - * WebRtcIsac_Encode(...) - * - * This function encodes 10ms frame(s) and inserts it into a package. - * Input speech length has to be 160 samples (10ms). The encoder buffers those - * 10ms frames until it reaches the chosen Framesize (480 or 960 samples - * corresponding to 30 or 60 ms frames), and then proceeds to the encoding. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - speechIn : input speech vector. - * - * Output: - * - encoded : the encoded data vector - * - * Return value: - * : >0 - Length (in bytes) of coded data - * : 0 - The buffer didn't reach the chosen - * frameSize so it keeps buffering speech - * samples. - * : -1 - Error - */ -int WebRtcIsac_Encode(ISACStruct* ISAC_main_inst, - const int16_t* speechIn, - uint8_t* encoded) { - float inFrame[FRAMESAMPLES_10ms]; - int16_t speechInLB[FRAMESAMPLES_10ms]; - int16_t speechInUB[FRAMESAMPLES_10ms]; - int streamLenLB = 0; - int streamLenUB = 0; - int streamLen = 0; - size_t k = 0; - uint8_t garbageLen = 0; - int32_t bottleneck = 0; - int16_t bottleneckIdx = 0; - int16_t jitterInfo = 0; - - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - ISACLBStruct* instLB = &(instISAC->instLB); - ISACUBStruct* instUB = &(instISAC->instUB); - - /* Check if encoder initiated. */ - if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != - BIT_MASK_ENC_INIT) { - instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED; - return -1; - } - - if (instISAC->encoderSamplingRateKHz == kIsacSuperWideband) { - WebRtcSpl_AnalysisQMF(speechIn, SWBFRAMESAMPLES_10ms, speechInLB, - speechInUB, instISAC->analysisFBState1, - instISAC->analysisFBState2); - - /* Convert from fixed to floating point. */ - for (k = 0; k < FRAMESAMPLES_10ms; k++) { - inFrame[k] = (float)speechInLB[k]; - } - } else { - for (k = 0; k < FRAMESAMPLES_10ms; k++) { - inFrame[k] = (float) speechIn[k]; - } - } - - /* Add some noise to avoid denormal numbers. */ - inFrame[0] += (float)1.23455334e-3; - inFrame[1] -= (float)2.04324239e-3; - inFrame[2] += (float)1.90854954e-3; - inFrame[9] += (float)1.84854878e-3; - - /* This function will update the bottleneck if required. */ - UpdateBottleneck(instISAC); - - /* Get the bandwith information which has to be sent to the other side. */ - GetSendBandwidthInfo(instISAC, &bottleneckIdx, &jitterInfo); - - /* Encode lower-band. */ - streamLenLB = WebRtcIsac_EncodeLb(&instISAC->transform_tables, - inFrame, &instLB->ISACencLB_obj, - instISAC->codingMode, bottleneckIdx); - if (streamLenLB < 0) { - return -1; - } - - if (instISAC->encoderSamplingRateKHz == kIsacSuperWideband) { - instUB = &(instISAC->instUB); - - /* Convert to float. */ - for (k = 0; k < FRAMESAMPLES_10ms; k++) { - inFrame[k] = (float) speechInUB[k]; - } - - /* Add some noise to avoid denormal numbers. */ - inFrame[0] += (float)1.23455334e-3; - inFrame[1] -= (float)2.04324239e-3; - inFrame[2] += (float)1.90854954e-3; - inFrame[9] += (float)1.84854878e-3; - - /* Tell to upper-band the number of bytes used so far. - * This is for payload limitation. */ - instUB->ISACencUB_obj.numBytesUsed = - (int16_t)(streamLenLB + 1 + LEN_CHECK_SUM_WORD8); - /* Encode upper-band. */ - switch (instISAC->bandwidthKHz) { - case isac12kHz: { - streamLenUB = WebRtcIsac_EncodeUb12(&instISAC->transform_tables, - inFrame, &instUB->ISACencUB_obj, - jitterInfo); - break; - } - case isac16kHz: { - streamLenUB = WebRtcIsac_EncodeUb16(&instISAC->transform_tables, - inFrame, &instUB->ISACencUB_obj, - jitterInfo); - break; - } - case isac8kHz: { - streamLenUB = 0; - break; - } - } - - if ((streamLenUB < 0) && (streamLenUB != -ISAC_PAYLOAD_LARGER_THAN_LIMIT)) { - /* An error has happened but this is not the error due to a - * bit-stream larger than the limit. */ - return -1; - } - - if (streamLenLB == 0) { - return 0; - } - - /* One byte is allocated for the length. According to older decoders - so the length bit-stream plus one byte for size and - LEN_CHECK_SUM_WORD8 for the checksum should be less than or equal - to 255. */ - if ((streamLenUB > (255 - (LEN_CHECK_SUM_WORD8 + 1))) || - (streamLenUB == -ISAC_PAYLOAD_LARGER_THAN_LIMIT)) { - /* We have got a too long bit-stream we skip the upper-band - * bit-stream for this frame. */ - streamLenUB = 0; - } - - memcpy(encoded, instLB->ISACencLB_obj.bitstr_obj.stream, streamLenLB); - streamLen = streamLenLB; - if (streamLenUB > 0) { - encoded[streamLenLB] = (uint8_t)(streamLenUB + 1 + LEN_CHECK_SUM_WORD8); - memcpy(&encoded[streamLenLB + 1], - instUB->ISACencUB_obj.bitstr_obj.stream, - streamLenUB); - streamLen += encoded[streamLenLB]; - } else { - encoded[streamLenLB] = 0; - } - } else { - if (streamLenLB == 0) { - return 0; - } - memcpy(encoded, instLB->ISACencLB_obj.bitstr_obj.stream, streamLenLB); - streamLenUB = 0; - streamLen = streamLenLB; - } - - /* Add Garbage if required. */ - bottleneck = WebRtcIsac_GetUplinkBandwidth(&instISAC->bwestimator_obj); - if (instISAC->codingMode == 0) { - int minBytes; - int limit; - uint8_t* ptrGarbage; - - instISAC->MaxDelay = (double)WebRtcIsac_GetUplinkMaxDelay( - &instISAC->bwestimator_obj); - - /* Update rate model and get minimum number of bytes in this packet. */ - minBytes = WebRtcIsac_GetMinBytes( - &(instISAC->rate_data_obj), streamLen, - instISAC->instLB.ISACencLB_obj.current_framesamples, bottleneck, - instISAC->MaxDelay, instISAC->bandwidthKHz); - - /* Make sure MinBytes does not exceed packet size limit. */ - if (instISAC->bandwidthKHz == isac8kHz) { - if (instLB->ISACencLB_obj.current_framesamples == FRAMESAMPLES) { - limit = instLB->ISACencLB_obj.payloadLimitBytes30; - } else { - limit = instLB->ISACencLB_obj.payloadLimitBytes60; - } - } else { - limit = instUB->ISACencUB_obj.maxPayloadSizeBytes; - } - minBytes = (minBytes > limit) ? limit : minBytes; - - /* Make sure we don't allow more than 255 bytes of garbage data. - * We store the length of the garbage data in 8 bits in the bitstream, - * 255 is the max garbage length we can signal using 8 bits. */ - if ((instISAC->bandwidthKHz == isac8kHz) || - (streamLenUB == 0)) { - ptrGarbage = &encoded[streamLenLB]; - limit = streamLen + 255; - } else { - ptrGarbage = &encoded[streamLenLB + 1 + streamLenUB]; - limit = streamLen + (255 - encoded[streamLenLB]); - } - minBytes = (minBytes > limit) ? limit : minBytes; - - garbageLen = (minBytes > streamLen) ? (uint8_t)(minBytes - streamLen) : 0; - - /* Save data for creation of multiple bit-streams. */ - /* If bit-stream too short then add garbage at the end. */ - if (garbageLen > 0) { - /* Overwrite the garbage area to avoid leaking possibly sensitive data - over the network. This also makes the output deterministic. */ - memset(ptrGarbage, 0, garbageLen); - - /* For a correct length of the upper-band bit-stream together - * with the garbage. Garbage is embeded in upper-band bit-stream. - * That is the only way to preserve backward compatibility. */ - if ((instISAC->bandwidthKHz == isac8kHz) || - (streamLenUB == 0)) { - encoded[streamLenLB] = garbageLen; - } else { - encoded[streamLenLB] += garbageLen; - /* Write the length of the garbage at the end of the upper-band - * bit-stream, if exists. This helps for sanity check. */ - encoded[streamLenLB + 1 + streamLenUB] = garbageLen; - - } - streamLen += garbageLen; - } - } else { - /* update rate model */ - WebRtcIsac_UpdateRateModel( - &instISAC->rate_data_obj, streamLen, - instISAC->instLB.ISACencLB_obj.current_framesamples, bottleneck); - garbageLen = 0; - } - - /* Generate CRC if required. */ - if ((instISAC->bandwidthKHz != isac8kHz) && (streamLenUB > 0)) { - uint32_t crc; - - WebRtcIsac_GetCrc((int16_t*)(&(encoded[streamLenLB + 1])), - streamLenUB + garbageLen, &crc); -#ifndef WEBRTC_ARCH_BIG_ENDIAN - for (k = 0; k < LEN_CHECK_SUM_WORD8; k++) { - encoded[streamLen - LEN_CHECK_SUM_WORD8 + k] = - (uint8_t)(crc >> (24 - k * 8)); - } -#else - memcpy(&encoded[streamLenLB + streamLenUB + 1], &crc, LEN_CHECK_SUM_WORD8); -#endif - } - return streamLen; -} - - -/****************************************************************************** - * WebRtcIsac_GetNewBitStream(...) - * - * This function returns encoded data, with the received bwe-index in the - * stream. If the rate is set to a value less than bottleneck of codec - * the new bistream will be re-encoded with the given target rate. - * It should always return a complete packet, i.e. only called once - * even for 60 msec frames. - * - * NOTE 1! This function does not write in the ISACStruct, it is not allowed. - * NOTE 2! Rates larger than the bottleneck of the codec will be limited - * to the current bottleneck. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - bweIndex : Index of bandwidth estimate to put in new - * bitstream - * - rate : target rate of the transcoder is bits/sec. - * Valid values are the accepted rate in iSAC, - * i.e. 10000 to 56000. - * - * Output: - * - encoded : The encoded data vector - * - * Return value : >0 - Length (in bytes) of coded data - * -1 - Error or called in SWB mode - * NOTE! No error code is written to - * the struct since it is only allowed to read - * the struct. - */ -int16_t WebRtcIsac_GetNewBitStream(ISACStruct* ISAC_main_inst, - int16_t bweIndex, - int16_t jitterInfo, - int32_t rate, - uint8_t* encoded, - int16_t isRCU) { - Bitstr iSACBitStreamInst; /* Local struct for bitstream handling */ - int16_t streamLenLB; - int16_t streamLenUB; - int16_t totalStreamLen; - double gain2; - double gain1; - float scale; - enum ISACBandwidth bandwidthKHz; - double rateLB; - double rateUB; - int32_t currentBN; - uint32_t crc; -#ifndef WEBRTC_ARCH_BIG_ENDIAN - int16_t k; -#endif - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - - if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != - BIT_MASK_ENC_INIT) { - return -1; - } - - /* Get the bottleneck of this iSAC and limit the - * given rate to the current bottleneck. */ - WebRtcIsac_GetUplinkBw(ISAC_main_inst, ¤tBN); - if (rate > currentBN) { - rate = currentBN; - } - - if (WebRtcIsac_RateAllocation(rate, &rateLB, &rateUB, &bandwidthKHz) < 0) { - return -1; - } - - /* Cannot transcode from 16 kHz to 12 kHz. */ - if ((bandwidthKHz == isac12kHz) && - (instISAC->bandwidthKHz == isac16kHz)) { - return -1; - } - - /* A gain [dB] for the given rate. */ - gain1 = WebRtcIsac_GetSnr( - rateLB, instISAC->instLB.ISACencLB_obj.current_framesamples); - /* The gain [dB] of this iSAC. */ - gain2 = WebRtcIsac_GetSnr( - instISAC->instLB.ISACencLB_obj.bottleneck, - instISAC->instLB.ISACencLB_obj.current_framesamples); - - /* Scale is the ratio of two gains in normal domain. */ - scale = (float)pow(10, (gain1 - gain2) / 20.0); - /* Change the scale if this is a RCU bit-stream. */ - scale = (isRCU) ? (scale * RCU_TRANSCODING_SCALE) : scale; - - streamLenLB = WebRtcIsac_EncodeStoredDataLb( - &instISAC->instLB.ISACencLB_obj.SaveEnc_obj, - &iSACBitStreamInst, bweIndex, scale); - - if (streamLenLB < 0) { - return -1; - } - - /* Convert from bytes to int16_t. */ - memcpy(encoded, iSACBitStreamInst.stream, streamLenLB); - - if (bandwidthKHz == isac8kHz) { - return streamLenLB; - } - - totalStreamLen = streamLenLB; - /* super-wideband is always at 30ms. - * These gains are in dB. - * Gain for the given rate. */ - gain1 = WebRtcIsac_GetSnr(rateUB, FRAMESAMPLES); - /* Gain of this iSAC */ - gain2 = WebRtcIsac_GetSnr(instISAC->instUB.ISACencUB_obj.bottleneck, - FRAMESAMPLES); - - /* Scale is the ratio of two gains in normal domain. */ - scale = (float)pow(10, (gain1 - gain2) / 20.0); - - /* Change the scale if this is a RCU bit-stream. */ - scale = (isRCU)? (scale * RCU_TRANSCODING_SCALE_UB) : scale; - - streamLenUB = WebRtcIsac_EncodeStoredDataUb( - &(instISAC->instUB.ISACencUB_obj.SaveEnc_obj), - &iSACBitStreamInst, jitterInfo, scale, - instISAC->bandwidthKHz); - - if (streamLenUB < 0) { - return -1; - } - - if (streamLenUB + 1 + LEN_CHECK_SUM_WORD8 > 255) { - return streamLenLB; - } - - totalStreamLen = streamLenLB + streamLenUB + 1 + LEN_CHECK_SUM_WORD8; - encoded[streamLenLB] = streamLenUB + 1 + LEN_CHECK_SUM_WORD8; - - memcpy(&encoded[streamLenLB + 1], iSACBitStreamInst.stream, - streamLenUB); - - WebRtcIsac_GetCrc((int16_t*)(&(encoded[streamLenLB + 1])), - streamLenUB, &crc); -#ifndef WEBRTC_ARCH_BIG_ENDIAN - for (k = 0; k < LEN_CHECK_SUM_WORD8; k++) { - encoded[totalStreamLen - LEN_CHECK_SUM_WORD8 + k] = - (uint8_t)((crc >> (24 - k * 8)) & 0xFF); - } -#else - memcpy(&encoded[streamLenLB + streamLenUB + 1], &crc, - LEN_CHECK_SUM_WORD8); -#endif - return totalStreamLen; -} - - -/**************************************************************************** - * DecoderInitLb(...) - internal function for initialization of - * Lower Band - * DecoderInitUb(...) - internal function for initialization of - * Upper Band - * WebRtcIsac_DecoderInit(...) - API function - * - * This function initializes a ISAC instance prior to the decoder calls. - * - * Input: - * - ISAC_main_inst : ISAC instance. - */ -static void DecoderInitLb(ISACLBStruct* instISAC) { - int i; - /* Initialize stream vector to zero. */ - for (i = 0; i < STREAM_SIZE_MAX_60; i++) { - instISAC->ISACdecLB_obj.bitstr_obj.stream[i] = 0; - } - - WebRtcIsac_InitMasking(&instISAC->ISACdecLB_obj.maskfiltstr_obj); - WebRtcIsac_InitPostFilterbank( - &instISAC->ISACdecLB_obj.postfiltbankstr_obj); - WebRtcIsac_InitPitchFilter(&instISAC->ISACdecLB_obj.pitchfiltstr_obj); -} - -static void DecoderInitUb(ISACUBStruct* instISAC) { - int i; - /* Init stream vector to zero */ - for (i = 0; i < STREAM_SIZE_MAX_60; i++) { - instISAC->ISACdecUB_obj.bitstr_obj.stream[i] = 0; - } - - WebRtcIsac_InitMasking(&instISAC->ISACdecUB_obj.maskfiltstr_obj); - WebRtcIsac_InitPostFilterbank( - &instISAC->ISACdecUB_obj.postfiltbankstr_obj); -} - -void WebRtcIsac_DecoderInit(ISACStruct* ISAC_main_inst) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - - DecoderInitLb(&instISAC->instLB); - if (instISAC->decoderSamplingRateKHz == kIsacSuperWideband) { - memset(instISAC->synthesisFBState1, 0, - FB_STATE_SIZE_WORD32 * sizeof(int32_t)); - memset(instISAC->synthesisFBState2, 0, - FB_STATE_SIZE_WORD32 * sizeof(int32_t)); - DecoderInitUb(&(instISAC->instUB)); - } - if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != BIT_MASK_ENC_INIT) { - WebRtcIsac_InitBandwidthEstimator(&instISAC->bwestimator_obj, - instISAC->encoderSamplingRateKHz, - instISAC->decoderSamplingRateKHz); - } - instISAC->initFlag |= BIT_MASK_DEC_INIT; - instISAC->resetFlag_8kHz = 0; -} - - -/**************************************************************************** - * WebRtcIsac_UpdateBwEstimate(...) - * - * This function updates the estimate of the bandwidth. - * - * NOTE: - * The estimates of bandwidth is not valid if the sample rate of the far-end - * encoder is set to 48 kHz and send timestamps are increamented according to - * 48 kHz sampling rate. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC frame(s). - * - packet_size : size of the packet. - * - rtp_seq_number : the RTP number of the packet. - * - arr_ts : the arrival time of the packet (from NetEq) - * in samples. - * - * Return value : 0 - Ok - * -1 - Error - */ -int16_t WebRtcIsac_UpdateBwEstimate(ISACStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t packet_size, - uint16_t rtp_seq_number, - uint32_t send_ts, - uint32_t arr_ts) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - Bitstr streamdata; -#ifndef WEBRTC_ARCH_BIG_ENDIAN - int k; -#endif - int16_t err; - - /* Check if decoder initiated. */ - if ((instISAC->initFlag & BIT_MASK_DEC_INIT) != BIT_MASK_DEC_INIT) { - instISAC->errorCode = ISAC_DECODER_NOT_INITIATED; - return -1; - } - - /* Check that the size of the packet is valid, and if not return without - * updating the bandwidth estimate. A valid size is at least 10 bytes. */ - if (packet_size < 10) { - /* Return error code if the packet length is null. */ - instISAC->errorCode = ISAC_EMPTY_PACKET; - return -1; - } - - WebRtcIsac_ResetBitstream(&(streamdata)); - -#ifndef WEBRTC_ARCH_BIG_ENDIAN - for (k = 0; k < 10; k++) { - uint16_t ek = ((const uint16_t*)encoded)[k >> 1]; - streamdata.stream[k] = (uint8_t)((ek >> ((k & 1) << 3)) & 0xff); - } -#else - memcpy(streamdata.stream, encoded, 10); -#endif - - err = WebRtcIsac_EstimateBandwidth(&instISAC->bwestimator_obj, &streamdata, - packet_size, rtp_seq_number, send_ts, - arr_ts, instISAC->encoderSamplingRateKHz, - instISAC->decoderSamplingRateKHz); - if (err < 0) { - /* Return error code if something went wrong. */ - instISAC->errorCode = -err; - return -1; - } - return 0; -} - -static int Decode(ISACStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t lenEncodedBytes, - int16_t* decoded, - int16_t* speechType, - int16_t isRCUPayload) { - /* Number of samples (480 or 960), output from decoder - that were actually used in the encoder/decoder - (determined on the fly). */ - int16_t numSamplesLB; - int16_t numSamplesUB; - int16_t speechIdx; - float outFrame[MAX_FRAMESAMPLES]; - int16_t outFrameLB[MAX_FRAMESAMPLES]; - int16_t outFrameUB[MAX_FRAMESAMPLES]; - int numDecodedBytesLBint; - size_t numDecodedBytesLB; - int numDecodedBytesUB; - size_t lenEncodedLBBytes; - int16_t validChecksum = 1; - int16_t k; - uint16_t numLayer; - size_t totSizeBytes; - int16_t err; - - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - ISACUBDecStruct* decInstUB = &(instISAC->instUB.ISACdecUB_obj); - ISACLBDecStruct* decInstLB = &(instISAC->instLB.ISACdecLB_obj); - - /* Check if decoder initiated. */ - if ((instISAC->initFlag & BIT_MASK_DEC_INIT) != - BIT_MASK_DEC_INIT) { - instISAC->errorCode = ISAC_DECODER_NOT_INITIATED; - return -1; - } - - if (lenEncodedBytes == 0) { - /* return error code if the packet length is null. */ - instISAC->errorCode = ISAC_EMPTY_PACKET; - return -1; - } - - /* The size of the encoded lower-band is bounded by - * STREAM_SIZE_MAX. If a payload with the size larger than STREAM_SIZE_MAX - * is received, it is not considered erroneous. */ - lenEncodedLBBytes = (lenEncodedBytes > STREAM_SIZE_MAX) ? - STREAM_SIZE_MAX : lenEncodedBytes; - - /* Copy to lower-band bit-stream structure. */ - memcpy(instISAC->instLB.ISACdecLB_obj.bitstr_obj.stream, encoded, - lenEncodedLBBytes); - - /* We need to initialize numSamplesLB to something; otherwise, in the test - for whether we should return -1 below, the compiler might generate code - that fools Memcheck (Valgrind) into thinking that the control flow depends - on the uninitialized value in numSamplesLB (since WebRtcIsac_DecodeLb will - not fill it in if it fails and returns -1). */ - numSamplesLB = 0; - - /* Regardless of that the current codec is setup to work in - * wideband or super-wideband, the decoding of the lower-band - * has to be performed. */ - numDecodedBytesLBint = WebRtcIsac_DecodeLb(&instISAC->transform_tables, - outFrame, decInstLB, - &numSamplesLB, isRCUPayload); - numDecodedBytesLB = (size_t)numDecodedBytesLBint; - if ((numDecodedBytesLBint < 0) || - (numDecodedBytesLB > lenEncodedLBBytes) || - (numSamplesLB > MAX_FRAMESAMPLES)) { - instISAC->errorCode = ISAC_LENGTH_MISMATCH; - return -1; - } - - /* Error Check, we accept multi-layer bit-stream This will limit number - * of iterations of the while loop. Even without this the number - * of iterations is limited. */ - numLayer = 1; - totSizeBytes = numDecodedBytesLB; - while (totSizeBytes != lenEncodedBytes) { - if ((totSizeBytes > lenEncodedBytes) || - (encoded[totSizeBytes] == 0) || - (numLayer > MAX_NUM_LAYERS)) { - instISAC->errorCode = ISAC_LENGTH_MISMATCH; - return -1; - } - totSizeBytes += encoded[totSizeBytes]; - numLayer++; - } - - if (instISAC->decoderSamplingRateKHz == kIsacWideband) { - for (k = 0; k < numSamplesLB; k++) { - if (outFrame[k] > 32767) { - decoded[k] = 32767; - } else if (outFrame[k] < -32768) { - decoded[k] = -32768; - } else { - decoded[k] = (int16_t)WebRtcIsac_lrint(outFrame[k]); - } - } - numSamplesUB = 0; - } else { - uint32_t crc; - /* We don't accept larger than 30ms (480 samples at lower-band) - * frame-size. */ - for (k = 0; k < numSamplesLB; k++) { - if (outFrame[k] > 32767) { - outFrameLB[k] = 32767; - } else if (outFrame[k] < -32768) { - outFrameLB[k] = -32768; - } else { - outFrameLB[k] = (int16_t)WebRtcIsac_lrint(outFrame[k]); - } - } - - /* Check for possible error, and if upper-band stream exists. */ - if (numDecodedBytesLB == lenEncodedBytes) { - /* Decoding was successful. No super-wideband bit-stream exists. */ - numSamplesUB = numSamplesLB; - memset(outFrameUB, 0, sizeof(int16_t) * numSamplesUB); - - /* Prepare for the potential increase of signal bandwidth. */ - instISAC->resetFlag_8kHz = 2; - } else { - /* This includes the checksum and the bytes that stores the length. */ - int16_t lenNextStream = encoded[numDecodedBytesLB]; - - /* Is this garbage or valid super-wideband bit-stream? - * Check if checksum is valid. */ - if (lenNextStream <= (LEN_CHECK_SUM_WORD8 + 1)) { - /* Such a small second layer cannot be super-wideband layer. - * It must be a short garbage. */ - validChecksum = 0; - } else { - /* Run CRC to see if the checksum match. */ - WebRtcIsac_GetCrc((int16_t*)(&encoded[numDecodedBytesLB + 1]), - lenNextStream - LEN_CHECK_SUM_WORD8 - 1, &crc); - - validChecksum = 1; - for (k = 0; k < LEN_CHECK_SUM_WORD8; k++) { - validChecksum &= (((crc >> (24 - k * 8)) & 0xFF) == - encoded[numDecodedBytesLB + lenNextStream - - LEN_CHECK_SUM_WORD8 + k]); - } - } - - if (!validChecksum) { - /* This is a garbage, we have received a wideband - * bit-stream with garbage. */ - numSamplesUB = numSamplesLB; - memset(outFrameUB, 0, sizeof(int16_t) * numSamplesUB); - } else { - /* A valid super-wideband biststream exists. */ - enum ISACBandwidth bandwidthKHz; - int32_t maxDelayBit; - - /* If we have super-wideband bit-stream, we cannot - * have 60 ms frame-size. */ - if (numSamplesLB > FRAMESAMPLES) { - instISAC->errorCode = ISAC_LENGTH_MISMATCH; - return -1; - } - - /* The rest of the bit-stream contains the upper-band - * bit-stream curently this is the only thing there, - * however, we might add more layers. */ - - /* Have to exclude one byte where the length is stored - * and last 'LEN_CHECK_SUM_WORD8' bytes where the - * checksum is stored. */ - lenNextStream -= (LEN_CHECK_SUM_WORD8 + 1); - - memcpy(decInstUB->bitstr_obj.stream, - &encoded[numDecodedBytesLB + 1], lenNextStream); - - /* Reset bit-stream object, this is the first decoding. */ - WebRtcIsac_ResetBitstream(&(decInstUB->bitstr_obj)); - - /* Decode jitter information. */ - err = WebRtcIsac_DecodeJitterInfo(&decInstUB->bitstr_obj, &maxDelayBit); - if (err < 0) { - instISAC->errorCode = -err; - return -1; - } - - /* Update jitter info which is in the upper-band bit-stream - * only if the encoder is in super-wideband. Otherwise, - * the jitter info is already embedded in bandwidth index - * and has been updated. */ - if (instISAC->encoderSamplingRateKHz == kIsacSuperWideband) { - err = WebRtcIsac_UpdateUplinkJitter( - &(instISAC->bwestimator_obj), maxDelayBit); - if (err < 0) { - instISAC->errorCode = -err; - return -1; - } - } - - /* Decode bandwidth information. */ - err = WebRtcIsac_DecodeBandwidth(&decInstUB->bitstr_obj, - &bandwidthKHz); - if (err < 0) { - instISAC->errorCode = -err; - return -1; - } - - switch (bandwidthKHz) { - case isac12kHz: { - numDecodedBytesUB = WebRtcIsac_DecodeUb12( - &instISAC->transform_tables, outFrame, decInstUB, isRCUPayload); - - /* Hang-over for transient alleviation - - * wait two frames to add the upper band going up from 8 kHz. */ - if (instISAC->resetFlag_8kHz > 0) { - if (instISAC->resetFlag_8kHz == 2) { - /* Silence first and a half frame. */ - memset(outFrame, 0, MAX_FRAMESAMPLES * - sizeof(float)); - } else { - const float rampStep = 2.0f / MAX_FRAMESAMPLES; - float rampVal = 0; - memset(outFrame, 0, (MAX_FRAMESAMPLES >> 1) * - sizeof(float)); - - /* Ramp up second half of second frame. */ - for (k = MAX_FRAMESAMPLES / 2; k < MAX_FRAMESAMPLES; k++) { - outFrame[k] *= rampVal; - rampVal += rampStep; - } - } - instISAC->resetFlag_8kHz -= 1; - } - - break; - } - case isac16kHz: { - numDecodedBytesUB = WebRtcIsac_DecodeUb16( - &instISAC->transform_tables, outFrame, decInstUB, isRCUPayload); - break; - } - default: - return -1; - } - - if (numDecodedBytesUB < 0) { - instISAC->errorCode = numDecodedBytesUB; - return -1; - } - if (numDecodedBytesLB + numDecodedBytesUB > lenEncodedBytes) { - // We have supposedly decoded more bytes than we were given. Likely - // caused by bad input data. - instISAC->errorCode = ISAC_LENGTH_MISMATCH; - return -1; - } - - /* It might be less due to garbage. */ - if ((numDecodedBytesUB != lenNextStream) && - (numDecodedBytesLB + 1 + numDecodedBytesUB >= lenEncodedBytes || - numDecodedBytesUB != - (lenNextStream - - encoded[numDecodedBytesLB + 1 + numDecodedBytesUB]))) { - instISAC->errorCode = ISAC_LENGTH_MISMATCH; - return -1; - } - - /* If there is no error Upper-band always decodes - * 30 ms (480 samples). */ - numSamplesUB = FRAMESAMPLES; - - /* Convert to W16. */ - for (k = 0; k < numSamplesUB; k++) { - if (outFrame[k] > 32767) { - outFrameUB[k] = 32767; - } else if (outFrame[k] < -32768) { - outFrameUB[k] = -32768; - } else { - outFrameUB[k] = (int16_t)WebRtcIsac_lrint( - outFrame[k]); - } - } - } - } - - speechIdx = 0; - while (speechIdx < numSamplesLB) { - WebRtcSpl_SynthesisQMF(&outFrameLB[speechIdx], &outFrameUB[speechIdx], - FRAMESAMPLES_10ms, &decoded[(speechIdx << 1)], - instISAC->synthesisFBState1, - instISAC->synthesisFBState2); - - speechIdx += FRAMESAMPLES_10ms; - } - } - *speechType = 0; - return (numSamplesLB + numSamplesUB); -} - - - - - - - -/**************************************************************************** - * WebRtcIsac_Decode(...) - * - * This function decodes a ISAC frame. Output speech length - * will be a multiple of 480 samples: 480 or 960 samples, - * depending on the frameSize (30 or 60 ms). - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC frame(s) - * - len : bytes in encoded vector - * - * Output: - * - decoded : The decoded vector - * - * Return value : >0 - number of samples in decoded vector - * -1 - Error - */ - -int WebRtcIsac_Decode(ISACStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t lenEncodedBytes, - int16_t* decoded, - int16_t* speechType) { - int16_t isRCUPayload = 0; - return Decode(ISAC_main_inst, encoded, lenEncodedBytes, decoded, - speechType, isRCUPayload); -} - -/**************************************************************************** - * WebRtcIsac_DecodeRcu(...) - * - * This function decodes a redundant (RCU) iSAC frame. Function is called in - * NetEq with a stored RCU payload in case of packet loss. Output speech length - * will be a multiple of 480 samples: 480 or 960 samples, - * depending on the framesize (30 or 60 ms). - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - encoded : encoded ISAC RCU frame(s) - * - len : bytes in encoded vector - * - * Output: - * - decoded : The decoded vector - * - * Return value : >0 - number of samples in decoded vector - * -1 - Error - */ - - - -int WebRtcIsac_DecodeRcu(ISACStruct* ISAC_main_inst, - const uint8_t* encoded, - size_t lenEncodedBytes, - int16_t* decoded, - int16_t* speechType) { - int16_t isRCUPayload = 1; - return Decode(ISAC_main_inst, encoded, lenEncodedBytes, decoded, - speechType, isRCUPayload); -} - - -/**************************************************************************** - * WebRtcIsac_DecodePlc(...) - * - * This function conducts PLC for ISAC frame(s). Output speech length - * will be a multiple of 480 samples: 480 or 960 samples, - * depending on the frameSize (30 or 60 ms). - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - noOfLostFrames : Number of PLC frames to produce - * - * Output: - * - decoded : The decoded vector - * - * Return value : Number of samples in decoded PLC vector - */ -size_t WebRtcIsac_DecodePlc(ISACStruct* ISAC_main_inst, - int16_t* decoded, - size_t noOfLostFrames) { - size_t numSamples = 0; - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - - /* Limit number of frames to two = 60 millisecond. - * Otherwise we exceed data vectors. */ - if (noOfLostFrames > 2) { - noOfLostFrames = 2; - } - - /* Get the number of samples per frame */ - switch (instISAC->decoderSamplingRateKHz) { - case kIsacWideband: { - numSamples = 480 * noOfLostFrames; - break; - } - case kIsacSuperWideband: { - numSamples = 960 * noOfLostFrames; - break; - } - } - - /* Set output samples to zero. */ - memset(decoded, 0, numSamples * sizeof(int16_t)); - return numSamples; -} - - -/**************************************************************************** - * ControlLb(...) - Internal function for controlling Lower Band - * ControlUb(...) - Internal function for controlling Upper Band - * WebRtcIsac_Control(...) - API function - * - * This function sets the limit on the short-term average bit rate and the - * frame length. Should be used only in Instantaneous mode. - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - rate : limit on the short-term average bit rate, - * in bits/second (between 10000 and 32000) - * - frameSize : number of milliseconds per frame (30 or 60) - * - * Return value : 0 - ok - * -1 - Error - */ -static int16_t ControlLb(ISACLBStruct* instISAC, double rate, - int16_t frameSize) { - if ((rate >= 10000) && (rate <= 32000)) { - instISAC->ISACencLB_obj.bottleneck = rate; - } else { - return -ISAC_DISALLOWED_BOTTLENECK; - } - - if ((frameSize == 30) || (frameSize == 60)) { - instISAC->ISACencLB_obj.new_framelength = (FS / 1000) * frameSize; - } else { - return -ISAC_DISALLOWED_FRAME_LENGTH; - } - - return 0; -} - -static int16_t ControlUb(ISACUBStruct* instISAC, double rate) { - if ((rate >= 10000) && (rate <= 32000)) { - instISAC->ISACencUB_obj.bottleneck = rate; - } else { - return -ISAC_DISALLOWED_BOTTLENECK; - } - return 0; -} - -int16_t WebRtcIsac_Control(ISACStruct* ISAC_main_inst, - int32_t bottleneckBPS, - int frameSize) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - int16_t status; - double rateLB; - double rateUB; - enum ISACBandwidth bandwidthKHz; - - if (instISAC->codingMode == 0) { - /* In adaptive mode. */ - instISAC->errorCode = ISAC_MODE_MISMATCH; - return -1; - } - - /* Check if encoder initiated */ - if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != - BIT_MASK_ENC_INIT) { - instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED; - return -1; - } - - if (instISAC->encoderSamplingRateKHz == kIsacWideband) { - /* If the sampling rate is 16kHz then bandwith should be 8kHz, - * regardless of bottleneck. */ - bandwidthKHz = isac8kHz; - rateLB = (bottleneckBPS > 32000) ? 32000 : bottleneckBPS; - rateUB = 0; - } else { - if (WebRtcIsac_RateAllocation(bottleneckBPS, &rateLB, &rateUB, - &bandwidthKHz) < 0) { - return -1; - } - } - - if ((instISAC->encoderSamplingRateKHz == kIsacSuperWideband) && - (frameSize != 30) && - (bandwidthKHz != isac8kHz)) { - /* Cannot have 60 ms in super-wideband. */ - instISAC->errorCode = ISAC_DISALLOWED_FRAME_LENGTH; - return -1; - } - - status = ControlLb(&instISAC->instLB, rateLB, (int16_t)frameSize); - if (status < 0) { - instISAC->errorCode = -status; - return -1; - } - if (bandwidthKHz != isac8kHz) { - status = ControlUb(&(instISAC->instUB), rateUB); - if (status < 0) { - instISAC->errorCode = -status; - return -1; - } - } - - - /* Check if bandwidth is changing from wideband to super-wideband - * then we have to synch data buffer of lower & upper-band. Also - * clean up the upper-band data buffer. */ - - if ((instISAC->bandwidthKHz == isac8kHz) && (bandwidthKHz != isac8kHz)) { - memset(instISAC->instUB.ISACencUB_obj.data_buffer_float, 0, - sizeof(float) * (MAX_FRAMESAMPLES + LB_TOTAL_DELAY_SAMPLES)); - - if (bandwidthKHz == isac12kHz) { - instISAC->instUB.ISACencUB_obj.buffer_index = - instISAC->instLB.ISACencLB_obj.buffer_index; - } else { - instISAC->instUB.ISACencUB_obj.buffer_index = - LB_TOTAL_DELAY_SAMPLES + instISAC->instLB.ISACencLB_obj.buffer_index; - - memcpy(&(instISAC->instUB.ISACencUB_obj.lastLPCVec), - WebRtcIsac_kMeanLarUb16, sizeof(double) * UB_LPC_ORDER); - } - } - - /* Update the payload limit if the bandwidth is changing. */ - if (instISAC->bandwidthKHz != bandwidthKHz) { - instISAC->bandwidthKHz = bandwidthKHz; - UpdatePayloadSizeLimit(instISAC); - } - instISAC->bottleneck = bottleneckBPS; - return 0; -} - -void WebRtcIsac_SetInitialBweBottleneck(ISACStruct* ISAC_main_inst, - int bottleneck_bits_per_second) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - RTC_DCHECK_GE(bottleneck_bits_per_second, 10000); - RTC_DCHECK_LE(bottleneck_bits_per_second, 32000); - instISAC->bwestimator_obj.send_bw_avg = (float)bottleneck_bits_per_second; -} - -/**************************************************************************** - * WebRtcIsac_ControlBwe(...) - * - * This function sets the initial values of bottleneck and frame-size if - * iSAC is used in channel-adaptive mode. Through this API, users can - * enforce a frame-size for all values of bottleneck. Then iSAC will not - * automatically change the frame-size. - * - * - * Input: - * - ISAC_main_inst : ISAC instance. - * - rateBPS : initial value of bottleneck in bits/second - * 10000 <= rateBPS <= 32000 is accepted - * For default bottleneck set rateBPS = 0 - * - frameSizeMs : number of milliseconds per frame (30 or 60) - * - enforceFrameSize : 1 to enforce the given frame-size through out - * the adaptation process, 0 to let iSAC change - * the frame-size if required. - * - * Return value : 0 - ok - * -1 - Error - */ -int16_t WebRtcIsac_ControlBwe(ISACStruct* ISAC_main_inst, - int32_t bottleneckBPS, - int frameSizeMs, - int16_t enforceFrameSize) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - enum ISACBandwidth bandwidth; - - /* Check if encoder initiated */ - if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != - BIT_MASK_ENC_INIT) { - instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED; - return -1; - } - - /* Check that we are in channel-adaptive mode, otherwise, return (-1) */ - if (instISAC->codingMode != 0) { - instISAC->errorCode = ISAC_MODE_MISMATCH; - return -1; - } - if ((frameSizeMs != 30) && - (instISAC->encoderSamplingRateKHz == kIsacSuperWideband)) { - return -1; - } - - /* Set structure variable if enforceFrameSize is set. ISAC will then - * keep the chosen frame size. */ - if (enforceFrameSize != 0) { - instISAC->instLB.ISACencLB_obj.enforceFrameSize = 1; - } else { - instISAC->instLB.ISACencLB_obj.enforceFrameSize = 0; - } - - /* Set the initial rate. If the input value is zero then the default intial - * rate is used. Otehrwise, values between 10 to 32 kbps are accepted. */ - if (bottleneckBPS != 0) { - double rateLB; - double rateUB; - if (WebRtcIsac_RateAllocation(bottleneckBPS, &rateLB, &rateUB, - &bandwidth) < 0) { - return -1; - } - instISAC->bwestimator_obj.send_bw_avg = (float)bottleneckBPS; - instISAC->bandwidthKHz = bandwidth; - } - - /* Set the initial frame-size. If 'enforceFrameSize' is set, the frame-size - * will not change */ - if (frameSizeMs != 0) { - if ((frameSizeMs == 30) || (frameSizeMs == 60)) { - instISAC->instLB.ISACencLB_obj.new_framelength = - (int16_t)((FS / 1000) * frameSizeMs); - } else { - instISAC->errorCode = ISAC_DISALLOWED_FRAME_LENGTH; - return -1; - } - } - return 0; -} - - -/**************************************************************************** - * WebRtcIsac_GetDownLinkBwIndex(...) - * - * This function returns index representing the Bandwidth estimate from - * the other side to this side. - * - * Input: - * - ISAC_main_inst : iSAC structure - * - * Output: - * - bweIndex : Bandwidth estimate to transmit to other side. - * - */ -int16_t WebRtcIsac_GetDownLinkBwIndex(ISACStruct* ISAC_main_inst, - int16_t* bweIndex, - int16_t* jitterInfo) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - - /* Check if encoder initialized. */ - if ((instISAC->initFlag & BIT_MASK_DEC_INIT) != - BIT_MASK_DEC_INIT) { - instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED; - return -1; - } - - /* Call function to get Bandwidth Estimate. */ - WebRtcIsac_GetDownlinkBwJitIndexImpl(&(instISAC->bwestimator_obj), bweIndex, - jitterInfo, - instISAC->decoderSamplingRateKHz); - return 0; -} - - -/**************************************************************************** - * WebRtcIsac_UpdateUplinkBw(...) - * - * This function takes an index representing the Bandwidth estimate from - * this side to other side and updates BWE. - * - * Input: - * - ISAC_main_inst : iSAC structure - * - rateIndex : Bandwidth estimate from other side. - * - * Return value : 0 - ok - * -1 - index out of range - */ -int16_t WebRtcIsac_UpdateUplinkBw(ISACStruct* ISAC_main_inst, - int16_t bweIndex) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - int16_t returnVal; - - /* Check if encoder initiated. */ - if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != - BIT_MASK_ENC_INIT) { - instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED; - return -1; - } - - /* Call function to get Bandwidth Estimate. */ - returnVal = WebRtcIsac_UpdateUplinkBwImpl( - &(instISAC->bwestimator_obj), bweIndex, - instISAC->encoderSamplingRateKHz); - - if (returnVal < 0) { - instISAC->errorCode = -returnVal; - return -1; - } else { - return 0; - } -} - - -/**************************************************************************** - * WebRtcIsac_ReadBwIndex(...) - * - * This function returns the index of the Bandwidth estimate from the - * bit-stream. - * - * Input: - * - encoded : Encoded bit-stream - * - * Output: - * - frameLength : Length of frame in packet (in samples) - * - bweIndex : Bandwidth estimate in bit-stream - * - */ -int16_t WebRtcIsac_ReadBwIndex(const uint8_t* encoded, - int16_t* bweIndex) { - Bitstr streamdata; -#ifndef WEBRTC_ARCH_BIG_ENDIAN - int k; -#endif - int16_t err; - - WebRtcIsac_ResetBitstream(&(streamdata)); - -#ifndef WEBRTC_ARCH_BIG_ENDIAN - for (k = 0; k < 10; k++) { - int16_t ek2 = ((const int16_t*)encoded)[k >> 1]; - streamdata.stream[k] = (uint8_t)((ek2 >> ((k & 1) << 3)) & 0xff); - } -#else - memcpy(streamdata.stream, encoded, 10); -#endif - - /* Decode frame length. */ - err = WebRtcIsac_DecodeFrameLen(&streamdata, bweIndex); - if (err < 0) { - return err; - } - - /* Decode BW estimation. */ - err = WebRtcIsac_DecodeSendBW(&streamdata, bweIndex); - if (err < 0) { - return err; - } - - return 0; -} - - -/**************************************************************************** - * WebRtcIsac_ReadFrameLen(...) - * - * This function returns the number of samples the decoder will generate if - * the given payload is decoded. - * - * Input: - * - encoded : Encoded bitstream - * - * Output: - * - frameLength : Length of frame in packet (in samples) - * - */ -int16_t WebRtcIsac_ReadFrameLen(const ISACStruct* ISAC_main_inst, - const uint8_t* encoded, - int16_t* frameLength) { - Bitstr streamdata; -#ifndef WEBRTC_ARCH_BIG_ENDIAN - int k; -#endif - int16_t err; - ISACMainStruct* instISAC; - - WebRtcIsac_ResetBitstream(&(streamdata)); - -#ifndef WEBRTC_ARCH_BIG_ENDIAN - for (k = 0; k < 10; k++) { - int16_t ek2 = ((const int16_t*)encoded)[k >> 1]; - streamdata.stream[k] = (uint8_t)((ek2 >> ((k & 1) << 3)) & 0xff); - } -#else - memcpy(streamdata.stream, encoded, 10); -#endif - - /* Decode frame length. */ - err = WebRtcIsac_DecodeFrameLen(&streamdata, frameLength); - if (err < 0) { - return -1; - } - instISAC = (ISACMainStruct*)ISAC_main_inst; - - if (instISAC->decoderSamplingRateKHz == kIsacSuperWideband) { - /* The decoded frame length indicates the number of samples in - * lower-band in this case, multiply by 2 to get the total number - * of samples. */ - *frameLength <<= 1; - } - return 0; -} - - -/******************************************************************************* - * WebRtcIsac_GetNewFrameLen(...) - * - * This function returns the frame length (in samples) of the next packet. - * In the case of channel-adaptive mode, iSAC decides on its frame length based - * on the estimated bottleneck, this AOI allows a user to prepare for the next - * packet (at the encoder). - * - * The primary usage is in CE to make the iSAC works in channel-adaptive mode - * - * Input: - * - ISAC_main_inst : iSAC struct - * - * Return Value : frame lenght in samples - * - */ -int16_t WebRtcIsac_GetNewFrameLen(ISACStruct* ISAC_main_inst) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - - /* Return new frame length. */ - if (instISAC->in_sample_rate_hz == 16000) - return (instISAC->instLB.ISACencLB_obj.new_framelength); - else /* 32000 Hz */ - return ((instISAC->instLB.ISACencLB_obj.new_framelength) * 2); -} - - -/**************************************************************************** - * WebRtcIsac_GetErrorCode(...) - * - * This function can be used to check the error code of an iSAC instance. - * When a function returns -1 an error code will be set for that instance. - * The function below extracts the code of the last error that occurred in - * the specified instance. - * - * Input: - * - ISAC_main_inst : ISAC instance - * - * Return value : Error code - */ -int16_t WebRtcIsac_GetErrorCode(ISACStruct* ISAC_main_inst) { - return ((ISACMainStruct*)ISAC_main_inst)->errorCode; -} - - -/**************************************************************************** - * WebRtcIsac_GetUplinkBw(...) - * - * This function outputs the target bottleneck of the codec. In - * channel-adaptive mode, the target bottleneck is specified through an in-band - * signalling retrieved by bandwidth estimator. - * In channel-independent, also called instantaneous mode, the target - * bottleneck is provided to the encoder by calling xxx_control(...) (if - * xxx_control is never called, the default values are used.). - * Note that the output is the iSAC internal operating bottleneck which might - * differ slightly from the one provided through xxx_control(). - * - * Input: - * - ISAC_main_inst : iSAC instance - * - * Output: - * - *bottleneck : bottleneck in bits/sec - * - * Return value : -1 if error happens - * 0 bit-rates computed correctly. - */ -int16_t WebRtcIsac_GetUplinkBw(ISACStruct* ISAC_main_inst, - int32_t* bottleneck) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - - if (instISAC->codingMode == 0) { - /* We are in adaptive mode then get the bottleneck from BWE. */ - *bottleneck = (int32_t)instISAC->bwestimator_obj.send_bw_avg; - } else { - *bottleneck = instISAC->bottleneck; - } - - if ((*bottleneck > 32000) && (*bottleneck < 38000)) { - *bottleneck = 32000; - } else if ((*bottleneck > 45000) && (*bottleneck < 50000)) { - *bottleneck = 45000; - } else if (*bottleneck > 56000) { - *bottleneck = 56000; - } - return 0; -} - - -/****************************************************************************** - * WebRtcIsac_SetMaxPayloadSize(...) - * - * This function sets a limit for the maximum payload size of iSAC. The same - * value is used both for 30 and 60 ms packets. If the encoder sampling rate - * is 16 kHz the maximum payload size is between 120 and 400 bytes. If the - * encoder sampling rate is 32 kHz the maximum payload size is between 120 - * and 600 bytes. - * - * --------------- - * IMPORTANT NOTES - * --------------- - * The size of a packet is limited to the minimum of 'max-payload-size' and - * 'max-rate.' For instance, let's assume the max-payload-size is set to - * 170 bytes, and max-rate is set to 40 kbps. Note that a limit of 40 kbps - * translates to 150 bytes for 30ms frame-size & 300 bytes for 60ms - * frame-size. Then a packet with a frame-size of 30 ms is limited to 150, - * i.e. min(170, 150), and a packet with 60 ms frame-size is limited to - * 170 bytes, i.e. min(170, 300). - * - * Input: - * - ISAC_main_inst : iSAC instance - * - maxPayloadBytes : maximum size of the payload in bytes - * valid values are between 100 and 400 bytes - * if encoder sampling rate is 16 kHz. For - * 32 kHz encoder sampling rate valid values - * are between 100 and 600 bytes. - * - * Return value : 0 if successful - * -1 if error happens - */ -int16_t WebRtcIsac_SetMaxPayloadSize(ISACStruct* ISAC_main_inst, - int16_t maxPayloadBytes) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - int16_t status = 0; - - /* Check if encoder initiated */ - if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != - BIT_MASK_ENC_INIT) { - instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED; - return -1; - } - - if (instISAC->encoderSamplingRateKHz == kIsacSuperWideband) { - /* Sanity check. */ - if (maxPayloadBytes < 120) { - /* 'maxRate' is out of valid range - * set to the acceptable value and return -1. */ - maxPayloadBytes = 120; - status = -1; - } - - /* sanity check */ - if (maxPayloadBytes > STREAM_SIZE_MAX) { - /* maxRate is out of valid range, - * set to the acceptable value and return -1. */ - maxPayloadBytes = STREAM_SIZE_MAX; - status = -1; - } - } else { - if (maxPayloadBytes < 120) { - /* Max payload-size is out of valid range - * set to the acceptable value and return -1. */ - maxPayloadBytes = 120; - status = -1; - } - if (maxPayloadBytes > STREAM_SIZE_MAX_60) { - /* Max payload-size is out of valid range - * set to the acceptable value and return -1. */ - maxPayloadBytes = STREAM_SIZE_MAX_60; - status = -1; - } - } - instISAC->maxPayloadSizeBytes = maxPayloadBytes; - UpdatePayloadSizeLimit(instISAC); - return status; -} - - -/****************************************************************************** - * WebRtcIsac_SetMaxRate(...) - * - * This function sets the maximum rate which the codec may not exceed for - * any signal packet. The maximum rate is defined and payload-size per - * frame-size in bits per second. - * - * The codec has a maximum rate of 53400 bits per second (200 bytes per 30 - * ms) if the encoder sampling rate is 16kHz, and 160 kbps (600 bytes/30 ms) - * if the encoder sampling rate is 32 kHz. - * - * It is possible to set a maximum rate between 32000 and 53400 bits/sec - * in wideband mode, and 32000 to 160000 bits/sec in super-wideband mode. - * - * --------------- - * IMPORTANT NOTES - * --------------- - * The size of a packet is limited to the minimum of 'max-payload-size' and - * 'max-rate.' For instance, let's assume the max-payload-size is set to - * 170 bytes, and max-rate is set to 40 kbps. Note that a limit of 40 kbps - * translates to 150 bytes for 30ms frame-size & 300 bytes for 60ms - * frame-size. Then a packet with a frame-size of 30 ms is limited to 150, - * i.e. min(170, 150), and a packet with 60 ms frame-size is limited to - * 170 bytes, min(170, 300). - * - * Input: - * - ISAC_main_inst : iSAC instance - * - maxRate : maximum rate in bits per second, - * valid values are 32000 to 53400 bits/sec in - * wideband mode, and 32000 to 160000 bits/sec in - * super-wideband mode. - * - * Return value : 0 if successful - * -1 if error happens - */ -int16_t WebRtcIsac_SetMaxRate(ISACStruct* ISAC_main_inst, - int32_t maxRate) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - int16_t maxRateInBytesPer30Ms; - int16_t status = 0; - - /* check if encoder initiated */ - if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != BIT_MASK_ENC_INIT) { - instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED; - return -1; - } - /* Calculate maximum number of bytes per 30 msec packets for the - given maximum rate. Multiply with 30/1000 to get number of - bits per 30 ms, divide by 8 to get number of bytes per 30 ms: - maxRateInBytes = floor((maxRate * 30/1000) / 8); */ - maxRateInBytesPer30Ms = (int16_t)(maxRate * 3 / 800); - - if (instISAC->encoderSamplingRateKHz == kIsacWideband) { - if (maxRate < 32000) { - /* 'maxRate' is out of valid range. - * Set to the acceptable value and return -1. */ - maxRateInBytesPer30Ms = 120; - status = -1; - } - - if (maxRate > 53400) { - /* 'maxRate' is out of valid range. - * Set to the acceptable value and return -1. */ - maxRateInBytesPer30Ms = 200; - status = -1; - } - } else { - if (maxRateInBytesPer30Ms < 120) { - /* 'maxRate' is out of valid range - * Set to the acceptable value and return -1. */ - maxRateInBytesPer30Ms = 120; - status = -1; - } - - if (maxRateInBytesPer30Ms > STREAM_SIZE_MAX) { - /* 'maxRate' is out of valid range. - * Set to the acceptable value and return -1. */ - maxRateInBytesPer30Ms = STREAM_SIZE_MAX; - status = -1; - } - } - instISAC->maxRateBytesPer30Ms = maxRateInBytesPer30Ms; - UpdatePayloadSizeLimit(instISAC); - return status; -} - - -/**************************************************************************** - * WebRtcIsac_GetRedPayload(...) - * - * This function populates "encoded" with the redundant payload of the recently - * encodedframe. This function has to be called once that WebRtcIsac_Encode(...) - * returns a positive value. Regardless of the frame-size this function will - * be called only once after encoding is completed. The bit-stream is - * targeted for 16000 bit/sec. - * - * Input: - * - ISAC_main_inst : iSAC struct - * - * Output: - * - encoded : the encoded data vector - * - * - * Return value : >0 - Length (in bytes) of coded data - * : -1 - Error - */ -int16_t WebRtcIsac_GetRedPayload(ISACStruct* ISAC_main_inst, - uint8_t* encoded) { - Bitstr iSACBitStreamInst; - int16_t streamLenLB; - int16_t streamLenUB; - int16_t streamLen; - int16_t totalLenUB; - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; -#ifndef WEBRTC_ARCH_BIG_ENDIAN - int k; -#endif - - if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != - BIT_MASK_ENC_INIT) { - instISAC->errorCode = ISAC_ENCODER_NOT_INITIATED; - } - - WebRtcIsac_ResetBitstream(&(iSACBitStreamInst)); - - streamLenLB = WebRtcIsac_EncodeStoredDataLb( - &instISAC->instLB.ISACencLB_obj.SaveEnc_obj, - &iSACBitStreamInst, - instISAC->instLB.ISACencLB_obj.lastBWIdx, - RCU_TRANSCODING_SCALE); - if (streamLenLB < 0) { - return -1; - } - - /* convert from bytes to int16_t. */ - memcpy(encoded, iSACBitStreamInst.stream, streamLenLB); - streamLen = streamLenLB; - if (instISAC->bandwidthKHz == isac8kHz) { - return streamLenLB; - } - - streamLenUB = WebRtcIsac_GetRedPayloadUb( - &instISAC->instUB.ISACencUB_obj.SaveEnc_obj, - &iSACBitStreamInst, instISAC->bandwidthKHz); - if (streamLenUB < 0) { - /* An error has happened but this is not the error due to a - * bit-stream larger than the limit. */ - return -1; - } - - /* We have one byte to write the total length of the upper-band. - * The length includes the bit-stream length, check-sum and the - * single byte where the length is written to. This is according to - * iSAC wideband and how the "garbage" is dealt. */ - totalLenUB = streamLenUB + 1 + LEN_CHECK_SUM_WORD8; - if (totalLenUB > 255) { - streamLenUB = 0; - } - - /* Generate CRC if required. */ - if ((instISAC->bandwidthKHz != isac8kHz) && - (streamLenUB > 0)) { - uint32_t crc; - streamLen += totalLenUB; - encoded[streamLenLB] = (uint8_t)totalLenUB; - memcpy(&encoded[streamLenLB + 1], iSACBitStreamInst.stream, - streamLenUB); - - WebRtcIsac_GetCrc((int16_t*)(&(encoded[streamLenLB + 1])), - streamLenUB, &crc); -#ifndef WEBRTC_ARCH_BIG_ENDIAN - for (k = 0; k < LEN_CHECK_SUM_WORD8; k++) { - encoded[streamLen - LEN_CHECK_SUM_WORD8 + k] = - (uint8_t)((crc >> (24 - k * 8)) & 0xFF); - } -#else - memcpy(&encoded[streamLenLB + streamLenUB + 1], &crc, - LEN_CHECK_SUM_WORD8); -#endif - } - return streamLen; -} - - -/**************************************************************************** - * WebRtcIsac_version(...) - * - * This function returns the version number. - * - * Output: - * - version : Pointer to character string - * - */ -void WebRtcIsac_version(char* version) { - strcpy(version, "4.3.0"); -} - - -/****************************************************************************** - * WebRtcIsac_SetEncSampRate() - * This function sets the sampling rate of the encoder. Initialization of the - * encoder WILL NOT overwrite the sampling rate of the encoder. The default - * value is 16 kHz which is set when the instance is created. The encoding-mode - * and the bottleneck remain unchanged by this call, however, the maximum rate - * and maximum payload-size will be reset to their default values. - * - * Input: - * - ISAC_main_inst : iSAC instance - * - sample_rate_hz : sampling rate in Hertz, valid values are 16000 - * and 32000. - * - * Return value : 0 if successful - * -1 if failed. - */ -int16_t WebRtcIsac_SetEncSampRate(ISACStruct* ISAC_main_inst, - uint16_t sample_rate_hz) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - enum IsacSamplingRate encoder_operational_rate; - - if ((sample_rate_hz != 16000) && (sample_rate_hz != 32000)) { - /* Sampling Frequency is not supported. */ - instISAC->errorCode = ISAC_UNSUPPORTED_SAMPLING_FREQUENCY; - return -1; - } - if (sample_rate_hz == 16000) { - encoder_operational_rate = kIsacWideband; - } else { - encoder_operational_rate = kIsacSuperWideband; - } - - if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != - BIT_MASK_ENC_INIT) { - if (encoder_operational_rate == kIsacWideband) { - instISAC->bandwidthKHz = isac8kHz; - } else { - instISAC->bandwidthKHz = isac16kHz; - } - } else { - ISACUBStruct* instUB = &(instISAC->instUB); - ISACLBStruct* instLB = &(instISAC->instLB); - int32_t bottleneck = instISAC->bottleneck; - int16_t codingMode = instISAC->codingMode; - int16_t frameSizeMs = instLB->ISACencLB_obj.new_framelength / - (FS / 1000); - - if ((encoder_operational_rate == kIsacWideband) && - (instISAC->encoderSamplingRateKHz == kIsacSuperWideband)) { - /* Changing from super-wideband to wideband. - * we don't need to re-initialize the encoder of the lower-band. */ - instISAC->bandwidthKHz = isac8kHz; - if (codingMode == 1) { - ControlLb(instLB, - (bottleneck > 32000) ? 32000 : bottleneck, FRAMESIZE); - } - instISAC->maxPayloadSizeBytes = STREAM_SIZE_MAX_60; - instISAC->maxRateBytesPer30Ms = STREAM_SIZE_MAX_30; - } else if ((encoder_operational_rate == kIsacSuperWideband) && - (instISAC->encoderSamplingRateKHz == kIsacWideband)) { - double bottleneckLB = 0; - double bottleneckUB = 0; - if (codingMode == 1) { - WebRtcIsac_RateAllocation(bottleneck, &bottleneckLB, &bottleneckUB, - &(instISAC->bandwidthKHz)); - } - - instISAC->bandwidthKHz = isac16kHz; - instISAC->maxPayloadSizeBytes = STREAM_SIZE_MAX; - instISAC->maxRateBytesPer30Ms = STREAM_SIZE_MAX; - - EncoderInitLb(instLB, codingMode, encoder_operational_rate); - EncoderInitUb(instUB, instISAC->bandwidthKHz); - - memset(instISAC->analysisFBState1, 0, - FB_STATE_SIZE_WORD32 * sizeof(int32_t)); - memset(instISAC->analysisFBState2, 0, - FB_STATE_SIZE_WORD32 * sizeof(int32_t)); - - if (codingMode == 1) { - instISAC->bottleneck = bottleneck; - ControlLb(instLB, bottleneckLB, - (instISAC->bandwidthKHz == isac8kHz) ? frameSizeMs:FRAMESIZE); - if (instISAC->bandwidthKHz > isac8kHz) { - ControlUb(instUB, bottleneckUB); - } - } else { - instLB->ISACencLB_obj.enforceFrameSize = 0; - instLB->ISACencLB_obj.new_framelength = FRAMESAMPLES; - } - } - } - instISAC->encoderSamplingRateKHz = encoder_operational_rate; - instISAC->in_sample_rate_hz = sample_rate_hz; - return 0; -} - - -/****************************************************************************** - * WebRtcIsac_SetDecSampRate() - * This function sets the sampling rate of the decoder. Initialization of the - * decoder WILL NOT overwrite the sampling rate of the encoder. The default - * value is 16 kHz which is set when the instance is created. - * - * Input: - * - ISAC_main_inst : iSAC instance - * - sample_rate_hz : sampling rate in Hertz, valid values are 16000 - * and 32000. - * - * Return value : 0 if successful - * -1 if failed. - */ -int16_t WebRtcIsac_SetDecSampRate(ISACStruct* ISAC_main_inst, - uint16_t sample_rate_hz) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - enum IsacSamplingRate decoder_operational_rate; - - if (sample_rate_hz == 16000) { - decoder_operational_rate = kIsacWideband; - } else if (sample_rate_hz == 32000) { - decoder_operational_rate = kIsacSuperWideband; - } else { - /* Sampling Frequency is not supported. */ - instISAC->errorCode = ISAC_UNSUPPORTED_SAMPLING_FREQUENCY; - return -1; - } - - if ((instISAC->decoderSamplingRateKHz == kIsacWideband) && - (decoder_operational_rate == kIsacSuperWideband)) { - /* Switching from wideband to super-wideband at the decoder - * we need to reset the filter-bank and initialize upper-band decoder. */ - memset(instISAC->synthesisFBState1, 0, - FB_STATE_SIZE_WORD32 * sizeof(int32_t)); - memset(instISAC->synthesisFBState2, 0, - FB_STATE_SIZE_WORD32 * sizeof(int32_t)); - - DecoderInitUb(&instISAC->instUB); - } - instISAC->decoderSamplingRateKHz = decoder_operational_rate; - return 0; -} - - -/****************************************************************************** - * WebRtcIsac_EncSampRate() - * - * Input: - * - ISAC_main_inst : iSAC instance - * - * Return value : sampling rate in Hertz. The input to encoder - * is expected to be sampled in this rate. - * - */ -uint16_t WebRtcIsac_EncSampRate(ISACStruct* ISAC_main_inst) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - return instISAC->in_sample_rate_hz; -} - - -/****************************************************************************** - * WebRtcIsac_DecSampRate() - * Return the sampling rate of the decoded audio. - * - * Input: - * - ISAC_main_inst : iSAC instance - * - * Return value : sampling rate in Hertz. Decoder output is - * sampled at this rate. - * - */ -uint16_t WebRtcIsac_DecSampRate(ISACStruct* ISAC_main_inst) { - ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst; - return instISAC->decoderSamplingRateKHz == kIsacWideband ? 16000 : 32000; -} - -void WebRtcIsac_SetEncSampRateInDecoder(ISACStruct* inst, - int sample_rate_hz) { - ISACMainStruct* instISAC = (ISACMainStruct*)inst; - RTC_DCHECK_NE(0, instISAC->initFlag & BIT_MASK_DEC_INIT); - RTC_DCHECK(!(instISAC->initFlag & BIT_MASK_ENC_INIT)); - RTC_DCHECK(sample_rate_hz == 16000 || sample_rate_hz == 32000); - instISAC->encoderSamplingRateKHz = sample_rate_hz / 1000; -} diff --git a/modules/audio_coding/codecs/isac/main/source/isac_float_type.h b/modules/audio_coding/codecs/isac/main/source/isac_float_type.h deleted file mode 100644 index 511bc97ee6..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/isac_float_type.h +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ISAC_FLOAT_TYPE_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ISAC_FLOAT_TYPE_H_ - -#include "modules/audio_coding/codecs/isac/main/include/isac.h" - -namespace webrtc { - -struct IsacFloat { - using instance_type = ISACStruct; - static const bool has_swb = true; - static inline int16_t Control(instance_type* inst, - int32_t rate, - int framesize) { - return WebRtcIsac_Control(inst, rate, framesize); - } - static inline int16_t ControlBwe(instance_type* inst, - int32_t rate_bps, - int frame_size_ms, - int16_t enforce_frame_size) { - return WebRtcIsac_ControlBwe(inst, rate_bps, frame_size_ms, - enforce_frame_size); - } - static inline int16_t Create(instance_type** inst) { - return WebRtcIsac_Create(inst); - } - static inline int DecodeInternal(instance_type* inst, - const uint8_t* encoded, - size_t len, - int16_t* decoded, - int16_t* speech_type) { - return WebRtcIsac_Decode(inst, encoded, len, decoded, speech_type); - } - static inline size_t DecodePlc(instance_type* inst, - int16_t* decoded, - size_t num_lost_frames) { - return WebRtcIsac_DecodePlc(inst, decoded, num_lost_frames); - } - - static inline void DecoderInit(instance_type* inst) { - WebRtcIsac_DecoderInit(inst); - } - static inline int Encode(instance_type* inst, - const int16_t* speech_in, - uint8_t* encoded) { - return WebRtcIsac_Encode(inst, speech_in, encoded); - } - static inline int16_t EncoderInit(instance_type* inst, int16_t coding_mode) { - return WebRtcIsac_EncoderInit(inst, coding_mode); - } - static inline uint16_t EncSampRate(instance_type* inst) { - return WebRtcIsac_EncSampRate(inst); - } - - static inline int16_t Free(instance_type* inst) { - return WebRtcIsac_Free(inst); - } - static inline int16_t GetErrorCode(instance_type* inst) { - return WebRtcIsac_GetErrorCode(inst); - } - - static inline int16_t GetNewFrameLen(instance_type* inst) { - return WebRtcIsac_GetNewFrameLen(inst); - } - static inline int16_t SetDecSampRate(instance_type* inst, - uint16_t sample_rate_hz) { - return WebRtcIsac_SetDecSampRate(inst, sample_rate_hz); - } - static inline int16_t SetEncSampRate(instance_type* inst, - uint16_t sample_rate_hz) { - return WebRtcIsac_SetEncSampRate(inst, sample_rate_hz); - } - static inline void SetEncSampRateInDecoder(instance_type* inst, - uint16_t sample_rate_hz) { - WebRtcIsac_SetEncSampRateInDecoder(inst, sample_rate_hz); - } - static inline void SetInitialBweBottleneck(instance_type* inst, - int bottleneck_bits_per_second) { - WebRtcIsac_SetInitialBweBottleneck(inst, bottleneck_bits_per_second); - } - static inline int16_t SetMaxPayloadSize(instance_type* inst, - int16_t max_payload_size_bytes) { - return WebRtcIsac_SetMaxPayloadSize(inst, max_payload_size_bytes); - } - static inline int16_t SetMaxRate(instance_type* inst, int32_t max_bit_rate) { - return WebRtcIsac_SetMaxRate(inst, max_bit_rate); - } -}; - -} // namespace webrtc -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ISAC_FLOAT_TYPE_H_ diff --git a/modules/audio_coding/codecs/isac/main/source/isac_unittest.cc b/modules/audio_coding/codecs/isac/main/source/isac_unittest.cc deleted file mode 100644 index c98b21d86f..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/isac_unittest.cc +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/audio_coding/codecs/isac/main/include/isac.h" - -#include - -#include "test/gtest.h" -#include "test/testsupport/file_utils.h" - -struct WebRtcISACStruct; - -namespace webrtc { - -// Number of samples in a 60 ms, sampled at 32 kHz. -const int kIsacNumberOfSamples = 320 * 6; -// Maximum number of bytes in output bitstream. -const size_t kMaxBytes = 1000; - -class IsacTest : public ::testing::Test { - protected: - IsacTest(); - virtual void SetUp(); - - WebRtcISACStruct* isac_codec_; - - int16_t speech_data_[kIsacNumberOfSamples]; - int16_t output_data_[kIsacNumberOfSamples]; - uint8_t bitstream_[kMaxBytes]; - uint8_t bitstream_small_[7]; // Simulate sync packets. -}; - -IsacTest::IsacTest() : isac_codec_(NULL) {} - -void IsacTest::SetUp() { - // Read some samples from a speech file, to be used in the encode test. - FILE* input_file; - const std::string file_name = - webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"); - input_file = fopen(file_name.c_str(), "rb"); - ASSERT_TRUE(input_file != NULL); - ASSERT_EQ(kIsacNumberOfSamples, - static_cast(fread(speech_data_, sizeof(int16_t), - kIsacNumberOfSamples, input_file))); - fclose(input_file); - input_file = NULL; -} - -// Test failing Create. -TEST_F(IsacTest, IsacCreateFail) { - // Test to see that an invalid pointer is caught. - EXPECT_EQ(-1, WebRtcIsac_Create(NULL)); -} - -// Test failing Free. -TEST_F(IsacTest, IsacFreeFail) { - // Test to see that free function doesn't crash. - EXPECT_EQ(0, WebRtcIsac_Free(NULL)); -} - -// Test normal Create and Free. -TEST_F(IsacTest, IsacCreateFree) { - EXPECT_EQ(0, WebRtcIsac_Create(&isac_codec_)); - EXPECT_TRUE(isac_codec_ != NULL); - EXPECT_EQ(0, WebRtcIsac_Free(isac_codec_)); -} - -TEST_F(IsacTest, IsacUpdateBWE) { - // Create encoder memory. - EXPECT_EQ(0, WebRtcIsac_Create(&isac_codec_)); - - // Init encoder (adaptive mode) and decoder. - WebRtcIsac_EncoderInit(isac_codec_, 0); - WebRtcIsac_DecoderInit(isac_codec_); - - int encoded_bytes; - - // Test with call with a small packet (sync packet). - EXPECT_EQ(-1, WebRtcIsac_UpdateBwEstimate(isac_codec_, bitstream_small_, 7, 1, - 12345, 56789)); - - // Encode 60 ms of data (needed to create a first packet). - encoded_bytes = WebRtcIsac_Encode(isac_codec_, speech_data_, bitstream_); - EXPECT_EQ(0, encoded_bytes); - encoded_bytes = WebRtcIsac_Encode(isac_codec_, speech_data_, bitstream_); - EXPECT_EQ(0, encoded_bytes); - encoded_bytes = WebRtcIsac_Encode(isac_codec_, speech_data_, bitstream_); - EXPECT_EQ(0, encoded_bytes); - encoded_bytes = WebRtcIsac_Encode(isac_codec_, speech_data_, bitstream_); - EXPECT_EQ(0, encoded_bytes); - encoded_bytes = WebRtcIsac_Encode(isac_codec_, speech_data_, bitstream_); - EXPECT_EQ(0, encoded_bytes); - encoded_bytes = WebRtcIsac_Encode(isac_codec_, speech_data_, bitstream_); - EXPECT_GT(encoded_bytes, 0); - - // Call to update bandwidth estimator with real data. - EXPECT_EQ(0, WebRtcIsac_UpdateBwEstimate(isac_codec_, bitstream_, - static_cast(encoded_bytes), - 1, 12345, 56789)); - - // Free memory. - EXPECT_EQ(0, WebRtcIsac_Free(isac_codec_)); -} - -} // namespace webrtc diff --git a/modules/audio_coding/codecs/isac/main/source/lattice.c b/modules/audio_coding/codecs/isac/main/source/lattice.c deleted file mode 100644 index d9d2d65665..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/lattice.c +++ /dev/null @@ -1,219 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * lattice.c - * - * contains the normalized lattice filter routines (MA and AR) for iSAC codec - * - */ - -#include -#include -#include -#ifdef WEBRTC_ANDROID -#include -#endif - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/codec.h" - -/* filter the signal using normalized lattice filter */ -/* MA filter */ -void WebRtcIsac_NormLatticeFilterMa(int orderCoef, - float *stateF, - float *stateG, - float *lat_in, - double *filtcoeflo, - double *lat_out) -{ - int n,k,i,u,temp1; - int ord_1 = orderCoef+1; - float sth[MAX_AR_MODEL_ORDER]; - float cth[MAX_AR_MODEL_ORDER]; - float inv_cth[MAX_AR_MODEL_ORDER]; - double a[MAX_AR_MODEL_ORDER+1]; - float f[MAX_AR_MODEL_ORDER+1][HALF_SUBFRAMELEN], g[MAX_AR_MODEL_ORDER+1][HALF_SUBFRAMELEN]; - float gain1; - - for (u=0;u=0;i--) //get the state of f&g for the first input, for all orders - { - ARf[i][0] = cth[i]*ARf[i+1][0] - sth[i]*stateG[i]; - ARg[i+1][0] = sth[i]*ARf[i+1][0] + cth[i]* stateG[i]; - } - ARg[0][0] = ARf[0][0]; - - for(n=0;n<(HALF_SUBFRAMELEN-1);n++) - { - for(k=orderCoef-1;k>=0;k--) - { - ARf[k][n+1] = cth[k]*ARf[k+1][n+1] - sth[k]*ARg[k][n]; - ARg[k+1][n+1] = sth[k]*ARf[k+1][n+1] + cth[k]* ARg[k][n]; - } - ARg[0][n+1] = ARf[0][n+1]; - } - - memcpy(lat_out+u * HALF_SUBFRAMELEN, &(ARf[0][0]), sizeof(float) * HALF_SUBFRAMELEN); - - /* cannot use memcpy in the following */ - for (i=0;i0; m--) - { - tmp_inv = 1.0f / cth2; - for (k=1; k<=m; k++) - { - tmp[k] = ((float)a[k] - sth[m] * (float)a[m-k+1]) * tmp_inv; - } - - for (k=1; k -#include - -#include "modules/audio_coding/codecs/isac/main/source/lpc_analysis.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/codec.h" -#include "modules/audio_coding/codecs/isac/main/source/entropy_coding.h" -#include "modules/audio_coding/codecs/isac/main/source/filter_functions.h" -#include "modules/audio_coding/codecs/isac/main/source/isac_vad.h" - -/* window */ -/* Matlab generation code: - * t = (1:256)/257; r = 1-(1-t).^.45; w = sin(r*pi).^3; w = w/sum(w); plot((1:256)/8, w); grid; - * for k=1:16, fprintf(1, '%.8f, ', w(k*16 + (-15:0))); fprintf(1, '\n'); end - */ -static const double kLpcCorrWindow[WINLEN] = { - 0.00000000, 0.00000001, 0.00000004, 0.00000010, 0.00000020, - 0.00000035, 0.00000055, 0.00000083, 0.00000118, 0.00000163, - 0.00000218, 0.00000283, 0.00000361, 0.00000453, 0.00000558, 0.00000679, - 0.00000817, 0.00000973, 0.00001147, 0.00001342, 0.00001558, - 0.00001796, 0.00002058, 0.00002344, 0.00002657, 0.00002997, - 0.00003365, 0.00003762, 0.00004190, 0.00004651, 0.00005144, 0.00005673, - 0.00006236, 0.00006837, 0.00007476, 0.00008155, 0.00008875, - 0.00009636, 0.00010441, 0.00011290, 0.00012186, 0.00013128, - 0.00014119, 0.00015160, 0.00016252, 0.00017396, 0.00018594, 0.00019846, - 0.00021155, 0.00022521, 0.00023946, 0.00025432, 0.00026978, - 0.00028587, 0.00030260, 0.00031998, 0.00033802, 0.00035674, - 0.00037615, 0.00039626, 0.00041708, 0.00043863, 0.00046092, 0.00048396, - 0.00050775, 0.00053233, 0.00055768, 0.00058384, 0.00061080, - 0.00063858, 0.00066720, 0.00069665, 0.00072696, 0.00075813, - 0.00079017, 0.00082310, 0.00085692, 0.00089164, 0.00092728, 0.00096384, - 0.00100133, 0.00103976, 0.00107914, 0.00111947, 0.00116077, - 0.00120304, 0.00124630, 0.00129053, 0.00133577, 0.00138200, - 0.00142924, 0.00147749, 0.00152676, 0.00157705, 0.00162836, 0.00168070, - 0.00173408, 0.00178850, 0.00184395, 0.00190045, 0.00195799, - 0.00201658, 0.00207621, 0.00213688, 0.00219860, 0.00226137, - 0.00232518, 0.00239003, 0.00245591, 0.00252284, 0.00259079, 0.00265977, - 0.00272977, 0.00280078, 0.00287280, 0.00294582, 0.00301984, - 0.00309484, 0.00317081, 0.00324774, 0.00332563, 0.00340446, - 0.00348421, 0.00356488, 0.00364644, 0.00372889, 0.00381220, 0.00389636, - 0.00398135, 0.00406715, 0.00415374, 0.00424109, 0.00432920, - 0.00441802, 0.00450754, 0.00459773, 0.00468857, 0.00478001, - 0.00487205, 0.00496464, 0.00505775, 0.00515136, 0.00524542, 0.00533990, - 0.00543476, 0.00552997, 0.00562548, 0.00572125, 0.00581725, - 0.00591342, 0.00600973, 0.00610612, 0.00620254, 0.00629895, - 0.00639530, 0.00649153, 0.00658758, 0.00668341, 0.00677894, 0.00687413, - 0.00696891, 0.00706322, 0.00715699, 0.00725016, 0.00734266, - 0.00743441, 0.00752535, 0.00761540, 0.00770449, 0.00779254, - 0.00787947, 0.00796519, 0.00804963, 0.00813270, 0.00821431, 0.00829437, - 0.00837280, 0.00844949, 0.00852436, 0.00859730, 0.00866822, - 0.00873701, 0.00880358, 0.00886781, 0.00892960, 0.00898884, - 0.00904542, 0.00909923, 0.00915014, 0.00919805, 0.00924283, 0.00928436, - 0.00932252, 0.00935718, 0.00938821, 0.00941550, 0.00943890, - 0.00945828, 0.00947351, 0.00948446, 0.00949098, 0.00949294, - 0.00949020, 0.00948262, 0.00947005, 0.00945235, 0.00942938, 0.00940099, - 0.00936704, 0.00932738, 0.00928186, 0.00923034, 0.00917268, - 0.00910872, 0.00903832, 0.00896134, 0.00887763, 0.00878706, - 0.00868949, 0.00858478, 0.00847280, 0.00835343, 0.00822653, 0.00809199, - 0.00794970, 0.00779956, 0.00764145, 0.00747530, 0.00730103, - 0.00711857, 0.00692787, 0.00672888, 0.00652158, 0.00630597, - 0.00608208, 0.00584994, 0.00560962, 0.00536124, 0.00510493, 0.00484089, - 0.00456935, 0.00429062, 0.00400505, 0.00371310, 0.00341532, - 0.00311238, 0.00280511, 0.00249452, 0.00218184, 0.00186864, - 0.00155690, 0.00124918, 0.00094895, 0.00066112, 0.00039320, 0.00015881 -}; - -static void WebRtcIsac_GetVars(const double* input, - const int16_t* pitchGains_Q12, - double* oldEnergy, - double* varscale) { - double nrg[4], chng, pg; - int k; - - double pitchGains[4]={0,0,0,0};; - - /* Calculate energies of first and second frame halfs */ - nrg[0] = 0.0001; - for (k = QLOOKAHEAD/2; k < (FRAMESAMPLES_QUARTER + QLOOKAHEAD) / 2; k++) { - nrg[0] += input[k]*input[k]; - } - nrg[1] = 0.0001; - for ( ; k < (FRAMESAMPLES_HALF + QLOOKAHEAD) / 2; k++) { - nrg[1] += input[k]*input[k]; - } - nrg[2] = 0.0001; - for ( ; k < (FRAMESAMPLES*3/4 + QLOOKAHEAD) / 2; k++) { - nrg[2] += input[k]*input[k]; - } - nrg[3] = 0.0001; - for ( ; k < (FRAMESAMPLES + QLOOKAHEAD) / 2; k++) { - nrg[3] += input[k]*input[k]; - } - - /* Calculate average level change */ - chng = 0.25 * (fabs(10.0 * log10(nrg[3] / nrg[2])) + - fabs(10.0 * log10(nrg[2] / nrg[1])) + - fabs(10.0 * log10(nrg[1] / nrg[0])) + - fabs(10.0 * log10(nrg[0] / *oldEnergy))); - - - /* Find average pitch gain */ - pg = 0.0; - for (k=0; k<4; k++) - { - pitchGains[k] = ((float)pitchGains_Q12[k])/4096; - pg += pitchGains[k]; - } - pg *= 0.25; - - /* If pitch gain is low and energy constant - increase noise level*/ - /* Matlab code: - pg = 0:.01:.45; plot(pg, 0.0 + 1.0 * exp( -1.0 * exp(-200.0 * pg.*pg.*pg) / (1.0 + 0.4 * 0) )) - */ - *varscale = 0.0 + 1.0 * exp( -1.4 * exp(-200.0 * pg*pg*pg) / (1.0 + 0.4 * chng) ); - - *oldEnergy = nrg[3]; -} - -static void WebRtcIsac_GetVarsUB(const double* input, - double* oldEnergy, - double* varscale) { - double nrg[4], chng; - int k; - - /* Calculate energies of first and second frame halfs */ - nrg[0] = 0.0001; - for (k = 0; k < (FRAMESAMPLES_QUARTER) / 2; k++) { - nrg[0] += input[k]*input[k]; - } - nrg[1] = 0.0001; - for ( ; k < (FRAMESAMPLES_HALF) / 2; k++) { - nrg[1] += input[k]*input[k]; - } - nrg[2] = 0.0001; - for ( ; k < (FRAMESAMPLES*3/4) / 2; k++) { - nrg[2] += input[k]*input[k]; - } - nrg[3] = 0.0001; - for ( ; k < (FRAMESAMPLES) / 2; k++) { - nrg[3] += input[k]*input[k]; - } - - /* Calculate average level change */ - chng = 0.25 * (fabs(10.0 * log10(nrg[3] / nrg[2])) + - fabs(10.0 * log10(nrg[2] / nrg[1])) + - fabs(10.0 * log10(nrg[1] / nrg[0])) + - fabs(10.0 * log10(nrg[0] / *oldEnergy))); - - - /* If pitch gain is low and energy constant - increase noise level*/ - /* Matlab code: - pg = 0:.01:.45; plot(pg, 0.0 + 1.0 * exp( -1.0 * exp(-200.0 * pg.*pg.*pg) / (1.0 + 0.4 * 0) )) - */ - *varscale = exp( -1.4 / (1.0 + 0.4 * chng) ); - - *oldEnergy = nrg[3]; -} - -void WebRtcIsac_GetLpcCoefLb(double *inLo, double *inHi, MaskFiltstr *maskdata, - double signal_noise_ratio, const int16_t *pitchGains_Q12, - double *lo_coeff, double *hi_coeff) -{ - int k, n, j, pos1, pos2; - double varscale; - - double DataLo[WINLEN], DataHi[WINLEN]; - double corrlo[ORDERLO+2], corrlo2[ORDERLO+1]; - double corrhi[ORDERHI+1]; - double k_veclo[ORDERLO], k_vechi[ORDERHI]; - - double a_LO[ORDERLO+1], a_HI[ORDERHI+1]; - double tmp, res_nrg; - - double FwdA, FwdB; - - /* hearing threshold level in dB; higher value gives more noise */ - const double HearThresOffset = -28.0; - - /* bandwdith expansion factors for low- and high band */ - const double gammaLo = 0.9; - const double gammaHi = 0.8; - - /* less-noise-at-low-frequencies factor */ - double aa; - - - /* convert from dB to signal level */ - const double H_T_H = pow(10.0, 0.05 * HearThresOffset); - double S_N_R = pow(10.0, 0.05 * signal_noise_ratio) / 3.46; /* divide by sqrt(12) */ - - /* change quallevel depending on pitch gains and level fluctuations */ - WebRtcIsac_GetVars(inLo, pitchGains_Q12, &(maskdata->OldEnergy), &varscale); - - /* less-noise-at-low-frequencies factor */ - aa = 0.35 * (0.5 + 0.5 * varscale); - - /* replace data in buffer by new look-ahead data */ - for (pos1 = 0; pos1 < QLOOKAHEAD; pos1++) - maskdata->DataBufferLo[pos1 + WINLEN - QLOOKAHEAD] = inLo[pos1]; - - for (k = 0; k < SUBFRAMES; k++) { - - /* Update input buffer and multiply signal with window */ - for (pos1 = 0; pos1 < WINLEN - UPDATE/2; pos1++) { - maskdata->DataBufferLo[pos1] = maskdata->DataBufferLo[pos1 + UPDATE/2]; - maskdata->DataBufferHi[pos1] = maskdata->DataBufferHi[pos1 + UPDATE/2]; - DataLo[pos1] = maskdata->DataBufferLo[pos1] * kLpcCorrWindow[pos1]; - DataHi[pos1] = maskdata->DataBufferHi[pos1] * kLpcCorrWindow[pos1]; - } - pos2 = k * UPDATE/2; - for (n = 0; n < UPDATE/2; n++, pos1++) { - maskdata->DataBufferLo[pos1] = inLo[QLOOKAHEAD + pos2]; - maskdata->DataBufferHi[pos1] = inHi[pos2++]; - DataLo[pos1] = maskdata->DataBufferLo[pos1] * kLpcCorrWindow[pos1]; - DataHi[pos1] = maskdata->DataBufferHi[pos1] * kLpcCorrWindow[pos1]; - } - - /* Get correlation coefficients */ - WebRtcIsac_AutoCorr(corrlo, DataLo, WINLEN, ORDERLO+1); /* computing autocorrelation */ - WebRtcIsac_AutoCorr(corrhi, DataHi, WINLEN, ORDERHI); - - - /* less noise for lower frequencies, by filtering/scaling autocorrelation sequences */ - corrlo2[0] = (1.0+aa*aa) * corrlo[0] - 2.0*aa * corrlo[1]; - tmp = (1.0 + aa*aa); - for (n = 1; n <= ORDERLO; n++) { - corrlo2[n] = tmp * corrlo[n] - aa * (corrlo[n-1] + corrlo[n+1]); - } - tmp = (1.0+aa) * (1.0+aa); - for (n = 0; n <= ORDERHI; n++) { - corrhi[n] = tmp * corrhi[n]; - } - - /* add white noise floor */ - corrlo2[0] += 1e-6; - corrhi[0] += 1e-6; - - - FwdA = 0.01; - FwdB = 0.01; - - /* recursive filtering of correlation over subframes */ - for (n = 0; n <= ORDERLO; n++) { - maskdata->CorrBufLo[n] = FwdA * maskdata->CorrBufLo[n] + corrlo2[n]; - corrlo2[n] = ((1.0-FwdA)*FwdB) * maskdata->CorrBufLo[n] + (1.0-FwdB) * corrlo2[n]; - } - for (n = 0; n <= ORDERHI; n++) { - maskdata->CorrBufHi[n] = FwdA * maskdata->CorrBufHi[n] + corrhi[n]; - corrhi[n] = ((1.0-FwdA)*FwdB) * maskdata->CorrBufHi[n] + (1.0-FwdB) * corrhi[n]; - } - - /* compute prediction coefficients */ - WebRtcIsac_LevDurb(a_LO, k_veclo, corrlo2, ORDERLO); - WebRtcIsac_LevDurb(a_HI, k_vechi, corrhi, ORDERHI); - - /* bandwidth expansion */ - tmp = gammaLo; - for (n = 1; n <= ORDERLO; n++) { - a_LO[n] *= tmp; - tmp *= gammaLo; - } - - /* residual energy */ - res_nrg = 0.0; - for (j = 0; j <= ORDERLO; j++) { - for (n = 0; n <= j; n++) { - res_nrg += a_LO[j] * corrlo2[j-n] * a_LO[n]; - } - for (n = j+1; n <= ORDERLO; n++) { - res_nrg += a_LO[j] * corrlo2[n-j] * a_LO[n]; - } - } - - /* add hearing threshold and compute the gain */ - *lo_coeff++ = S_N_R / (sqrt(res_nrg) / varscale + H_T_H); - - /* copy coefficients to output array */ - for (n = 1; n <= ORDERLO; n++) { - *lo_coeff++ = a_LO[n]; - } - - - /* bandwidth expansion */ - tmp = gammaHi; - for (n = 1; n <= ORDERHI; n++) { - a_HI[n] *= tmp; - tmp *= gammaHi; - } - - /* residual energy */ - res_nrg = 0.0; - for (j = 0; j <= ORDERHI; j++) { - for (n = 0; n <= j; n++) { - res_nrg += a_HI[j] * corrhi[j-n] * a_HI[n]; - } - for (n = j+1; n <= ORDERHI; n++) { - res_nrg += a_HI[j] * corrhi[n-j] * a_HI[n]; - } - } - - /* add hearing threshold and compute of the gain */ - *hi_coeff++ = S_N_R / (sqrt(res_nrg) / varscale + H_T_H); - - /* copy coefficients to output array */ - for (n = 1; n <= ORDERHI; n++) { - *hi_coeff++ = a_HI[n]; - } - } -} - - - -/****************************************************************************** - * WebRtcIsac_GetLpcCoefUb() - * - * Compute LP coefficients and correlation coefficients. At 12 kHz LP - * coefficients of the first and the last sub-frame is computed. At 16 kHz - * LP coefficients of 4th, 8th and 12th sub-frames are computed. We always - * compute correlation coefficients of all sub-frames. - * - * Inputs: - * -inSignal : Input signal - * -maskdata : a structure keeping signal from previous frame. - * -bandwidth : specifies if the codec is in 0-16 kHz mode or - * 0-12 kHz mode. - * - * Outputs: - * -lpCoeff : pointer to a buffer where A-polynomials are - * written to (first coeff is 1 and it is not - * written) - * -corrMat : a matrix where correlation coefficients of each - * sub-frame are written to one row. - * -varscale : a scale used to compute LPC gains. - */ -void -WebRtcIsac_GetLpcCoefUb( - double* inSignal, - MaskFiltstr* maskdata, - double* lpCoeff, - double corrMat[][UB_LPC_ORDER + 1], - double* varscale, - int16_t bandwidth) -{ - int frameCntr, activeFrameCntr, n, pos1, pos2; - int16_t criterion1; - int16_t criterion2; - int16_t numSubFrames = SUBFRAMES * (1 + (bandwidth == isac16kHz)); - double data[WINLEN]; - double corrSubFrame[UB_LPC_ORDER+2]; - double reflecCoeff[UB_LPC_ORDER]; - - double aPolynom[UB_LPC_ORDER+1]; - double tmp; - - /* bandwdith expansion factors */ - const double gamma = 0.9; - - /* change quallevel depending on pitch gains and level fluctuations */ - WebRtcIsac_GetVarsUB(inSignal, &(maskdata->OldEnergy), varscale); - - /* replace data in buffer by new look-ahead data */ - for(frameCntr = 0, activeFrameCntr = 0; frameCntr < numSubFrames; - frameCntr++) - { - if(frameCntr == SUBFRAMES) - { - // we are in 16 kHz - varscale++; - WebRtcIsac_GetVarsUB(&inSignal[FRAMESAMPLES_HALF], - &(maskdata->OldEnergy), varscale); - } - /* Update input buffer and multiply signal with window */ - for(pos1 = 0; pos1 < WINLEN - UPDATE/2; pos1++) - { - maskdata->DataBufferLo[pos1] = maskdata->DataBufferLo[pos1 + - UPDATE/2]; - data[pos1] = maskdata->DataBufferLo[pos1] * kLpcCorrWindow[pos1]; - } - pos2 = frameCntr * UPDATE/2; - for(n = 0; n < UPDATE/2; n++, pos1++, pos2++) - { - maskdata->DataBufferLo[pos1] = inSignal[pos2]; - data[pos1] = maskdata->DataBufferLo[pos1] * kLpcCorrWindow[pos1]; - } - - /* Get correlation coefficients */ - /* computing autocorrelation */ - WebRtcIsac_AutoCorr(corrSubFrame, data, WINLEN, UB_LPC_ORDER+1); - memcpy(corrMat[frameCntr], corrSubFrame, - (UB_LPC_ORDER+1)*sizeof(double)); - - criterion1 = ((frameCntr == 0) || (frameCntr == (SUBFRAMES - 1))) && - (bandwidth == isac12kHz); - criterion2 = (((frameCntr+1) % 4) == 0) && - (bandwidth == isac16kHz); - if(criterion1 || criterion2) - { - /* add noise */ - corrSubFrame[0] += 1e-6; - /* compute prediction coefficients */ - WebRtcIsac_LevDurb(aPolynom, reflecCoeff, corrSubFrame, - UB_LPC_ORDER); - - /* bandwidth expansion */ - tmp = gamma; - for (n = 1; n <= UB_LPC_ORDER; n++) - { - *lpCoeff++ = aPolynom[n] * tmp; - tmp *= gamma; - } - activeFrameCntr++; - } - } -} - - - -/****************************************************************************** - * WebRtcIsac_GetLpcGain() - * - * Compute the LPC gains for each sub-frame, given the LPC of each sub-frame - * and the corresponding correlation coefficients. - * - * Inputs: - * -signal_noise_ratio : the desired SNR in dB. - * -numVecs : number of sub-frames - * -corrMat : a matrix of correlation coefficients where - * each row is a set of correlation coefficients of - * one sub-frame. - * -varscale : a scale computed when WebRtcIsac_GetLpcCoefUb() - * is called. - * - * Outputs: - * -gain : pointer to a buffer where LP gains are written. - * - */ -void -WebRtcIsac_GetLpcGain( - double signal_noise_ratio, - const double* filtCoeffVecs, - int numVecs, - double* gain, - double corrMat[][UB_LPC_ORDER + 1], - const double* varscale) -{ - int16_t j, n; - int16_t subFrameCntr; - double aPolynom[ORDERLO + 1]; - double res_nrg; - - const double HearThresOffset = -28.0; - const double H_T_H = pow(10.0, 0.05 * HearThresOffset); - /* divide by sqrt(12) = 3.46 */ - const double S_N_R = pow(10.0, 0.05 * signal_noise_ratio) / 3.46; - - aPolynom[0] = 1; - for(subFrameCntr = 0; subFrameCntr < numVecs; subFrameCntr++) - { - if(subFrameCntr == SUBFRAMES) - { - // we are in second half of a SWB frame. use new varscale - varscale++; - } - memcpy(&aPolynom[1], &filtCoeffVecs[(subFrameCntr * (UB_LPC_ORDER + 1)) + - 1], sizeof(double) * UB_LPC_ORDER); - - /* residual energy */ - res_nrg = 0.0; - for(j = 0; j <= UB_LPC_ORDER; j++) - { - for(n = 0; n <= j; n++) - { - res_nrg += aPolynom[j] * corrMat[subFrameCntr][j-n] * - aPolynom[n]; - } - for(n = j+1; n <= UB_LPC_ORDER; n++) - { - res_nrg += aPolynom[j] * corrMat[subFrameCntr][n-j] * - aPolynom[n]; - } - } - - /* add hearing threshold and compute the gain */ - gain[subFrameCntr] = S_N_R / (sqrt(res_nrg) / *varscale + H_T_H); - } -} diff --git a/modules/audio_coding/codecs/isac/main/source/lpc_analysis.h b/modules/audio_coding/codecs/isac/main/source/lpc_analysis.h deleted file mode 100644 index 5503e2d49b..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/lpc_analysis.h +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * lpc_analysis.h - * - * LPC functions - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_ANALYSIS_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_ANALYSIS_H_ - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/structs.h" - -void WebRtcIsac_GetLpcCoefLb(double* inLo, - double* inHi, - MaskFiltstr* maskdata, - double signal_noise_ratio, - const int16_t* pitchGains_Q12, - double* lo_coeff, - double* hi_coeff); - -void WebRtcIsac_GetLpcGain(double signal_noise_ratio, - const double* filtCoeffVecs, - int numVecs, - double* gain, - double corrLo[][UB_LPC_ORDER + 1], - const double* varscale); - -void WebRtcIsac_GetLpcCoefUb(double* inSignal, - MaskFiltstr* maskdata, - double* lpCoeff, - double corr[][UB_LPC_ORDER + 1], - double* varscale, - int16_t bandwidth); - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_ANALYIS_H_ */ diff --git a/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.c b/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.c deleted file mode 100644 index 670754065f..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.c +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * SWB_KLT_Tables_LPCGain.c - * - * This file defines tables used for entropy coding of LPC Gain - * of upper-band. - * - */ - -#include "modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -const double WebRtcIsac_kQSizeLpcGain = 0.100000; - -const double WebRtcIsac_kMeanLpcGain = -3.3822; - -/* -* The smallest reconstruction points for quantiztion of -* LPC gains. -*/ -const double WebRtcIsac_kLeftRecPointLpcGain[SUBFRAMES] = -{ - -0.800000, -1.000000, -1.200000, -2.200000, -3.000000, -12.700000 -}; - -/* -* Number of reconstruction points of quantizers for LPC Gains. -*/ -const int16_t WebRtcIsac_kNumQCellLpcGain[SUBFRAMES] = -{ - 17, 20, 25, 45, 77, 170 -}; -/* -* Starting index for entropy decoder to search for the right interval, -* one entry per LAR coefficient -*/ -const uint16_t WebRtcIsac_kLpcGainEntropySearch[SUBFRAMES] = -{ - 8, 10, 12, 22, 38, 85 -}; - -/* -* The following 6 vectors define CDF of 6 decorrelated LPC -* gains. -*/ -const uint16_t WebRtcIsac_kLpcGainCdfVec0[18] = -{ - 0, 10, 27, 83, 234, 568, 1601, 4683, 16830, 57534, 63437, - 64767, 65229, 65408, 65483, 65514, 65527, 65535 -}; - -const uint16_t WebRtcIsac_kLpcGainCdfVec1[21] = -{ - 0, 15, 33, 84, 185, 385, 807, 1619, 3529, 7850, 19488, - 51365, 62437, 64548, 65088, 65304, 65409, 65484, 65507, 65522, 65535 -}; - -const uint16_t WebRtcIsac_kLpcGainCdfVec2[26] = -{ - 0, 15, 29, 54, 89, 145, 228, 380, 652, 1493, 4260, - 12359, 34133, 50749, 57224, 60814, 62927, 64078, 64742, 65103, 65311, 65418, - 65473, 65509, 65521, 65535 -}; - -const uint16_t WebRtcIsac_kLpcGainCdfVec3[46] = -{ - 0, 8, 12, 16, 26, 42, 56, 76, 111, 164, 247, - 366, 508, 693, 1000, 1442, 2155, 3188, 4854, 7387, 11249, 17617, - 30079, 46711, 56291, 60127, 62140, 63258, 63954, 64384, 64690, 64891, 65031, - 65139, 65227, 65293, 65351, 65399, 65438, 65467, 65492, 65504, 65510, 65518, - 65523, 65535 -}; - -const uint16_t WebRtcIsac_kLpcGainCdfVec4[78] = -{ - 0, 17, 29, 39, 51, 70, 104, 154, 234, 324, 443, - 590, 760, 971, 1202, 1494, 1845, 2274, 2797, 3366, 4088, 4905, - 5899, 7142, 8683, 10625, 12983, 16095, 20637, 28216, 38859, 47237, 51537, - 54150, 56066, 57583, 58756, 59685, 60458, 61103, 61659, 62144, 62550, 62886, - 63186, 63480, 63743, 63954, 64148, 64320, 64467, 64600, 64719, 64837, 64939, - 65014, 65098, 65160, 65211, 65250, 65290, 65325, 65344, 65366, 65391, 65410, - 65430, 65447, 65460, 65474, 65487, 65494, 65501, 65509, 65513, 65518, 65520, - 65535 -}; - -const uint16_t WebRtcIsac_kLpcGainCdfVec5[171] = -{ - 0, 10, 12, 14, 16, 18, 23, 29, 35, 42, 51, - 58, 65, 72, 78, 87, 96, 103, 111, 122, 134, 150, - 167, 184, 202, 223, 244, 265, 289, 315, 346, 379, 414, - 450, 491, 532, 572, 613, 656, 700, 751, 802, 853, 905, - 957, 1021, 1098, 1174, 1250, 1331, 1413, 1490, 1565, 1647, 1730, - 1821, 1913, 2004, 2100, 2207, 2314, 2420, 2532, 2652, 2783, 2921, - 3056, 3189, 3327, 3468, 3640, 3817, 3993, 4171, 4362, 4554, 4751, - 4948, 5142, 5346, 5566, 5799, 6044, 6301, 6565, 6852, 7150, 7470, - 7797, 8143, 8492, 8835, 9181, 9547, 9919, 10315, 10718, 11136, 11566, - 12015, 12482, 12967, 13458, 13953, 14432, 14903, 15416, 15936, 16452, 16967, - 17492, 18024, 18600, 19173, 19736, 20311, 20911, 21490, 22041, 22597, 23157, - 23768, 24405, 25034, 25660, 26280, 26899, 27614, 28331, 29015, 29702, 30403, - 31107, 31817, 32566, 33381, 34224, 35099, 36112, 37222, 38375, 39549, 40801, - 42074, 43350, 44626, 45982, 47354, 48860, 50361, 51845, 53312, 54739, 56026, - 57116, 58104, 58996, 59842, 60658, 61488, 62324, 63057, 63769, 64285, 64779, - 65076, 65344, 65430, 65500, 65517, 65535 -}; - -/* -* An array of pointers to CDFs of decorrelated LPC Gains -*/ -const uint16_t* WebRtcIsac_kLpcGainCdfMat[SUBFRAMES] = -{ - WebRtcIsac_kLpcGainCdfVec0, WebRtcIsac_kLpcGainCdfVec1, - WebRtcIsac_kLpcGainCdfVec2, WebRtcIsac_kLpcGainCdfVec3, - WebRtcIsac_kLpcGainCdfVec4, WebRtcIsac_kLpcGainCdfVec5 -}; - -/* -* A matrix to decorrellate LPC gains of subframes. -*/ -const double WebRtcIsac_kLpcGainDecorrMat[SUBFRAMES][SUBFRAMES] = -{ - {-0.150860, 0.327872, 0.367220, 0.504613, 0.559270, 0.409234}, - { 0.457128, -0.613591, -0.289283, -0.029734, 0.393760, 0.418240}, - {-0.626043, 0.136489, -0.439118, -0.448323, 0.135987, 0.420869}, - { 0.526617, 0.480187, 0.242552, -0.488754, -0.158713, 0.411331}, - {-0.302587, -0.494953, 0.588112, -0.063035, -0.404290, 0.387510}, - { 0.086378, 0.147714, -0.428875, 0.548300, -0.570121, 0.401391} -}; diff --git a/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h b/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h deleted file mode 100644 index 39c4a24ef4..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * SWB_KLT_Tables_LPCGain.h - * - * This file declares tables used for entropy coding of LPC Gain - * of upper-band. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_GAIN_SWB_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_GAIN_SWB_TABLES_H_ - -#include - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -extern const double WebRtcIsac_kQSizeLpcGain; - -extern const double WebRtcIsac_kLeftRecPointLpcGain[SUBFRAMES]; - -extern const int16_t WebRtcIsac_kNumQCellLpcGain[SUBFRAMES]; - -extern const uint16_t WebRtcIsac_kLpcGainEntropySearch[SUBFRAMES]; - -extern const uint16_t WebRtcIsac_kLpcGainCdfVec0[18]; - -extern const uint16_t WebRtcIsac_kLpcGainCdfVec1[21]; - -extern const uint16_t WebRtcIsac_kLpcGainCdfVec2[26]; - -extern const uint16_t WebRtcIsac_kLpcGainCdfVec3[46]; - -extern const uint16_t WebRtcIsac_kLpcGainCdfVec4[78]; - -extern const uint16_t WebRtcIsac_kLpcGainCdfVec5[171]; - -extern const uint16_t* WebRtcIsac_kLpcGainCdfMat[SUBFRAMES]; - -extern const double WebRtcIsac_kLpcGainDecorrMat[SUBFRAMES][SUBFRAMES]; - -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_GAIN_SWB_TABLES_H_ diff --git a/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.c b/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.c deleted file mode 100644 index e3600a7fab..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.c +++ /dev/null @@ -1,158 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * SWB_KLT_Tables.c - * - * This file defines tables used for entropy coding of LPC shape of - * upper-band signal if the bandwidth is 12 kHz. - * - */ - -#include "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -/* -* Mean value of LAR -*/ -const double WebRtcIsac_kMeanLarUb12[UB_LPC_ORDER] = -{ - 0.03748928306641, 0.09453441192543, -0.01112522344398, 0.03800237516842 -}; - -/* -* A rotation matrix to decorrelate intra-vector correlation, -* i.e. correlation among components of LAR vector. -*/ -const double WebRtcIsac_kIntraVecDecorrMatUb12[UB_LPC_ORDER][UB_LPC_ORDER] = -{ - {-0.00075365493856, -0.05809964887743, -0.23397966154116, 0.97050367376411}, - { 0.00625021257734, -0.17299965610679, 0.95977735920651, 0.22104179375008}, - { 0.20543384258374, -0.96202143495696, -0.15301870801552, -0.09432375099565}, - {-0.97865075648479, -0.20300322280841, -0.02581111653779, -0.01913568980258} -}; - -/* -* A rotation matrix to remove correlation among LAR coefficients -* of different LAR vectors. One might guess that decorrelation matrix -* for the first component should differ from the second component -* but we haven't observed a significant benefit of having different -* decorrelation matrices for different components. -*/ -const double WebRtcIsac_kInterVecDecorrMatUb12 -[UB_LPC_VEC_PER_FRAME][UB_LPC_VEC_PER_FRAME] = -{ - { 0.70650597970460, -0.70770707262373}, - {-0.70770707262373, -0.70650597970460} -}; - -/* -* LAR quantization step-size. -*/ -const double WebRtcIsac_kLpcShapeQStepSizeUb12 = 0.150000; - -/* -* The smallest reconstruction points for quantiztion of LAR coefficients. -*/ -const double WebRtcIsac_kLpcShapeLeftRecPointUb12 -[UB_LPC_ORDER*UB_LPC_VEC_PER_FRAME] = -{ - -0.900000, -1.050000, -1.350000, -1.800000, -1.350000, -1.650000, - -2.250000, -3.450000 -}; - -/* -* Number of reconstruction points of quantizers for LAR coefficients. -*/ -const int16_t WebRtcIsac_kLpcShapeNumRecPointUb12 -[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME] = -{ - 13, 15, 19, 27, 19, 24, 32, 48 -}; - -/* -* Starting index for entropy decoder to search for the right interval, -* one entry per LAR coefficient -*/ -const uint16_t WebRtcIsac_kLpcShapeEntropySearchUb12 -[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME] = -{ - 6, 7, 9, 13, 9, 12, 16, 24 -}; - -/* -* The following 8 vectors define CDF of 8 decorrelated LAR -* coefficients. -*/ -const uint16_t WebRtcIsac_kLpcShapeCdfVec0Ub12[14] = -{ - 0, 13, 95, 418, 1687, 6498, 21317, 44200, 59029, 63849, 65147, - 65449, 65525, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec1Ub12[16] = -{ - 0, 10, 59, 255, 858, 2667, 8200, 22609, 42988, 57202, 62947, - 64743, 65308, 65476, 65522, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec2Ub12[20] = -{ - 0, 18, 40, 118, 332, 857, 2017, 4822, 11321, 24330, 41279, - 54342, 60637, 63394, 64659, 65184, 65398, 65482, 65518, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec3Ub12[28] = -{ - 0, 21, 38, 90, 196, 398, 770, 1400, 2589, 4650, 8211, - 14933, 26044, 39592, 50814, 57452, 60971, 62884, 63995, 64621, 65019, 65273, - 65410, 65480, 65514, 65522, 65531, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec4Ub12[20] = -{ - 0, 7, 46, 141, 403, 969, 2132, 4649, 10633, 24902, 43254, - 54665, 59928, 62674, 64173, 64938, 65293, 65464, 65523, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec5Ub12[25] = -{ - 0, 7, 22, 72, 174, 411, 854, 1737, 3545, 6774, 13165, - 25221, 40980, 52821, 58714, 61706, 63472, 64437, 64989, 65287, 65430, 65503, - 65525, 65529, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec6Ub12[33] = -{ - 0, 11, 21, 36, 65, 128, 228, 401, 707, 1241, 2126, - 3589, 6060, 10517, 18853, 31114, 42477, 49770, 54271, 57467, 59838, 61569, - 62831, 63772, 64433, 64833, 65123, 65306, 65419, 65466, 65499, 65519, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec7Ub12[49] = -{ - 0, 14, 34, 67, 107, 167, 245, 326, 449, 645, 861, - 1155, 1508, 2003, 2669, 3544, 4592, 5961, 7583, 9887, 13256, 18765, - 26519, 34077, 40034, 44349, 47795, 50663, 53262, 55473, 57458, 59122, 60592, - 61742, 62690, 63391, 63997, 64463, 64794, 65045, 65207, 65309, 65394, 65443, - 65478, 65504, 65514, 65523, 65535 -}; - -/* -* An array of pointers to CDFs of decorrelated LARs -*/ -const uint16_t* WebRtcIsac_kLpcShapeCdfMatUb12 -[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME] = -{ - WebRtcIsac_kLpcShapeCdfVec0Ub12, WebRtcIsac_kLpcShapeCdfVec1Ub12, - WebRtcIsac_kLpcShapeCdfVec2Ub12, WebRtcIsac_kLpcShapeCdfVec3Ub12, - WebRtcIsac_kLpcShapeCdfVec4Ub12, WebRtcIsac_kLpcShapeCdfVec5Ub12, - WebRtcIsac_kLpcShapeCdfVec6Ub12, WebRtcIsac_kLpcShapeCdfVec7Ub12 -}; diff --git a/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h b/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h deleted file mode 100644 index 7448a1e76b..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * lpc_shape_swb12_tables.h - * - * This file declares tables used for entropy coding of LPC shape of - * upper-band signal if the bandwidth is 12 kHz. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_SHAPE_SWB12_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_SHAPE_SWB12_TABLES_H_ - -#include - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -extern const double WebRtcIsac_kMeanLarUb12[UB_LPC_ORDER]; - -extern const double WebRtcIsac_kMeanLpcGain; - -extern const double WebRtcIsac_kIntraVecDecorrMatUb12[UB_LPC_ORDER] - [UB_LPC_ORDER]; - -extern const double WebRtcIsac_kInterVecDecorrMatUb12[UB_LPC_VEC_PER_FRAME] - [UB_LPC_VEC_PER_FRAME]; - -extern const double WebRtcIsac_kLpcShapeQStepSizeUb12; - -extern const double - WebRtcIsac_kLpcShapeLeftRecPointUb12[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME]; - -extern const int16_t - WebRtcIsac_kLpcShapeNumRecPointUb12[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME]; - -extern const uint16_t - WebRtcIsac_kLpcShapeEntropySearchUb12[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec0Ub12[14]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec1Ub12[16]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec2Ub12[20]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec3Ub12[28]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec4Ub12[20]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec5Ub12[25]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec6Ub12[33]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec7Ub12[49]; - -extern const uint16_t* - WebRtcIsac_kLpcShapeCdfMatUb12[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME]; - -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_SHAPE_SWB12_TABLES_H_ diff --git a/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.c b/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.c deleted file mode 100644 index 59617fd274..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.c +++ /dev/null @@ -1,247 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * SWB16_KLT_Tables.c - * - * This file defines tables used for entropy coding of LPC shape of - * upper-band signal if the bandwidth is 16 kHz. - * - */ - -#include "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -/* -* Mean value of LAR -*/ -const double WebRtcIsac_kMeanLarUb16[UB_LPC_ORDER] = -{ -0.454978, 0.364747, 0.102999, 0.104523 -}; - -/* -* A rotation matrix to decorrelate intra-vector correlation, -* i.e. correlation among components of LAR vector. -*/ -const double WebRtcIsac_kIintraVecDecorrMatUb16[UB_LPC_ORDER][UB_LPC_ORDER] = -{ - {-0.020528, -0.085858, -0.002431, 0.996093}, - {-0.033155, 0.036102, 0.998786, 0.004866}, - { 0.202627, 0.974853, -0.028940, 0.088132}, - {-0.978479, 0.202454, -0.039785, -0.002811} -}; - -/* -* A rotation matrix to remove correlation among LAR coefficients -* of different LAR vectors. One might guess that decorrelation matrix -* for the first component should differ from the second component -* but we haven't observed a significant benefit of having different -* decorrelation matrices for different components. -*/ -const double WebRtcIsac_kInterVecDecorrMatUb16 -[UB16_LPC_VEC_PER_FRAME][UB16_LPC_VEC_PER_FRAME] = -{ - { 0.291675, -0.515786, 0.644927, 0.482658}, - {-0.647220, 0.479712, 0.289556, 0.516856}, - { 0.643084, 0.485489, -0.289307, 0.516763}, - {-0.287185, -0.517823, -0.645389, 0.482553} -}; - -/* -* The following 16 vectors define CDF of 16 decorrelated LAR -* coefficients. -*/ -const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub16[14] = -{ - 0, 2, 20, 159, 1034, 5688, 20892, 44653, - 59849, 64485, 65383, 65518, 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec1Ub16[16] = -{ - 0, 1, 7, 43, 276, 1496, 6681, 21653, - 43891, 58859, 64022, 65248, 65489, 65529, 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec2Ub16[18] = -{ - 0, 1, 9, 54, 238, 933, 3192, 9461, - 23226, 42146, 56138, 62413, 64623, 65300, 65473, 65521, - 65533, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec3Ub16[30] = -{ - 0, 2, 4, 8, 17, 36, 75, 155, - 329, 683, 1376, 2662, 5047, 9508, 17526, 29027, - 40363, 48997, 55096, 59180, 61789, 63407, 64400, 64967, - 65273, 65429, 65497, 65526, 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec4Ub16[16] = -{ - 0, 1, 10, 63, 361, 1785, 7407, 22242, - 43337, 58125, 63729, 65181, 65472, 65527, 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec5Ub16[17] = -{ - 0, 1, 7, 29, 134, 599, 2443, 8590, - 22962, 42635, 56911, 63060, 64940, 65408, 65513, 65531, - 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec6Ub16[21] = -{ - 0, 1, 5, 16, 57, 191, 611, 1808, - 4847, 11755, 24612, 40910, 53789, 60698, 63729, 64924, - 65346, 65486, 65523, 65532, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec7Ub16[36] = -{ - 0, 1, 4, 12, 25, 55, 104, 184, - 314, 539, 926, 1550, 2479, 3861, 5892, 8845, - 13281, 20018, 29019, 38029, 45581, 51557, 56057, 59284, - 61517, 63047, 64030, 64648, 65031, 65261, 65402, 65480, - 65518, 65530, 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec8Ub16[21] = -{ - 0, 1, 2, 7, 26, 103, 351, 1149, - 3583, 10204, 23846, 41711, 55361, 61917, 64382, 65186, - 65433, 65506, 65528, 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub160[21] = -{ - 0, 6, 19, 63, 205, 638, 1799, 4784, - 11721, 24494, 40803, 53805, 60886, 63822, 64931, 65333, - 65472, 65517, 65530, 65533, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub161[28] = -{ - 0, 1, 3, 11, 31, 86, 221, 506, - 1101, 2296, 4486, 8477, 15356, 26079, 38941, 49952, - 57165, 61257, 63426, 64549, 65097, 65351, 65463, 65510, - 65526, 65532, 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub162[55] = -{ - 0, 3, 12, 23, 42, 65, 89, 115, - 150, 195, 248, 327, 430, 580, 784, 1099, - 1586, 2358, 3651, 5899, 9568, 14312, 19158, 23776, - 28267, 32663, 36991, 41153, 45098, 48680, 51870, 54729, - 57141, 59158, 60772, 62029, 63000, 63761, 64322, 64728, - 65000, 65192, 65321, 65411, 65463, 65496, 65514, 65523, - 65527, 65529, 65531, 65532, 65533, 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub163[26] = -{ - 0, 2, 4, 10, 21, 48, 114, 280, - 701, 1765, 4555, 11270, 24267, 41213, 54285, 61003, - 63767, 64840, 65254, 65421, 65489, 65514, 65526, 65532, - 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub164[28] = -{ - 0, 1, 3, 6, 15, 36, 82, 196, - 453, 1087, 2557, 5923, 13016, 25366, 40449, 52582, - 59539, 62896, 64389, 65033, 65316, 65442, 65494, 65519, - 65529, 65533, 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub165[34] = -{ - 0, 2, 4, 8, 18, 35, 73, 146, - 279, 524, 980, 1789, 3235, 5784, 10040, 16998, - 27070, 38543, 48499, 55421, 59712, 62257, 63748, 64591, - 65041, 65278, 65410, 65474, 65508, 65522, 65530, 65533, - 65534, 65535 -}; - -const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub166[71] = -{ - 0, 1, 2, 6, 13, 26, 55, 92, - 141, 191, 242, 296, 355, 429, 522, 636, - 777, 947, 1162, 1428, 1753, 2137, 2605, 3140, - 3743, 4409, 5164, 6016, 6982, 8118, 9451, 10993, - 12754, 14810, 17130, 19780, 22864, 26424, 30547, 35222, - 40140, 44716, 48698, 52056, 54850, 57162, 59068, 60643, - 61877, 62827, 63561, 64113, 64519, 64807, 65019, 65167, - 65272, 65343, 65399, 65440, 65471, 65487, 65500, 65509, - 65518, 65524, 65527, 65531, 65533, 65534, 65535 -}; - -/* -* An array of pointers to CDFs of decorrelated LARs -*/ -const uint16_t* WebRtcIsac_kLpcShapeCdfMatUb16 -[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME] = { - WebRtcIsac_kLpcShapeCdfVec01Ub16, - WebRtcIsac_kLpcShapeCdfVec1Ub16, - WebRtcIsac_kLpcShapeCdfVec2Ub16, - WebRtcIsac_kLpcShapeCdfVec3Ub16, - WebRtcIsac_kLpcShapeCdfVec4Ub16, - WebRtcIsac_kLpcShapeCdfVec5Ub16, - WebRtcIsac_kLpcShapeCdfVec6Ub16, - WebRtcIsac_kLpcShapeCdfVec7Ub16, - WebRtcIsac_kLpcShapeCdfVec8Ub16, - WebRtcIsac_kLpcShapeCdfVec01Ub160, - WebRtcIsac_kLpcShapeCdfVec01Ub161, - WebRtcIsac_kLpcShapeCdfVec01Ub162, - WebRtcIsac_kLpcShapeCdfVec01Ub163, - WebRtcIsac_kLpcShapeCdfVec01Ub164, - WebRtcIsac_kLpcShapeCdfVec01Ub165, - WebRtcIsac_kLpcShapeCdfVec01Ub166 -}; - -/* -* The smallest reconstruction points for quantiztion of LAR coefficients. -*/ -const double WebRtcIsac_kLpcShapeLeftRecPointUb16 -[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME] = -{ - -0.8250, -0.9750, -1.1250, -2.1750, -0.9750, -1.1250, -1.4250, - -2.6250, -1.4250, -1.2750, -1.8750, -3.6750, -1.7250, -1.8750, - -2.3250, -5.4750 -}; - -/* -* Number of reconstruction points of quantizers for LAR coefficients. -*/ -const int16_t WebRtcIsac_kLpcShapeNumRecPointUb16 -[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME] = -{ - 13, 15, 17, 29, 15, 16, 20, 35, 20, - 20, 27, 54, 25, 27, 33, 70 -}; - -/* -* Starting index for entropy decoder to search for the right interval, -* one entry per LAR coefficient -*/ -const uint16_t WebRtcIsac_kLpcShapeEntropySearchUb16 -[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME] = -{ - 6, 7, 8, 14, 7, 8, 10, 17, 10, - 10, 13, 27, 12, 13, 16, 35 -}; - -/* -* LAR quantization step-size. -*/ -const double WebRtcIsac_kLpcShapeQStepSizeUb16 = 0.150000; diff --git a/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h b/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h deleted file mode 100644 index 51101db936..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * lpc_shape_swb16_tables.h - * - * This file declares tables used for entropy coding of LPC shape of - * upper-band signal if the bandwidth is 16 kHz. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_SHAPE_SWB16_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_SHAPE_SWB16_TABLES_H_ - -#include - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -extern const double WebRtcIsac_kMeanLarUb16[UB_LPC_ORDER]; - -extern const double WebRtcIsac_kIintraVecDecorrMatUb16[UB_LPC_ORDER] - [UB_LPC_ORDER]; - -extern const double WebRtcIsac_kInterVecDecorrMatUb16[UB16_LPC_VEC_PER_FRAME] - [UB16_LPC_VEC_PER_FRAME]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub16[14]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec1Ub16[16]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec2Ub16[18]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec3Ub16[30]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec4Ub16[16]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec5Ub16[17]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec6Ub16[21]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec7Ub16[36]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec8Ub16[21]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub160[21]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub161[28]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub162[55]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub163[26]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub164[28]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub165[34]; - -extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub166[71]; - -extern const uint16_t* - WebRtcIsac_kLpcShapeCdfMatUb16[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME]; - -extern const double - WebRtcIsac_kLpcShapeLeftRecPointUb16[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME]; - -extern const int16_t - WebRtcIsac_kLpcShapeNumRecPointUb16[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME]; - -extern const uint16_t - WebRtcIsac_kLpcShapeEntropySearchUb16[UB_LPC_ORDER * - UB16_LPC_VEC_PER_FRAME]; - -extern const double WebRtcIsac_kLpcShapeQStepSizeUb16; - -#endif // MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_SHAPE_SWB16_TABLES_H_ diff --git a/modules/audio_coding/codecs/isac/main/source/lpc_tables.c b/modules/audio_coding/codecs/isac/main/source/lpc_tables.c deleted file mode 100644 index 461b92eb8a..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/lpc_tables.c +++ /dev/null @@ -1,601 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* coding tables for the KLT coefficients */ - -#include "modules/audio_coding/codecs/isac/main/source/lpc_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -/* cdf array for model indicator */ -const uint16_t WebRtcIsac_kQKltModelCdf[4] = { - 0, 15434, 37548, 65535 }; - -/* pointer to cdf array for model indicator */ -const uint16_t *WebRtcIsac_kQKltModelCdfPtr[1] = { - WebRtcIsac_kQKltModelCdf }; - -/* initial cdf index for decoder of model indicator */ -const uint16_t WebRtcIsac_kQKltModelInitIndex[1] = { 1 }; - -/* offset to go from rounded value to quantization index */ -const short WebRtcIsac_kQKltQuantMinGain[12] = { - 3, 6, 4, 6, 6, 9, 5, 16, 11, 34, 32, 47 }; - - -const short WebRtcIsac_kQKltQuantMinShape[108] = { - 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, - 1, 1, 1, 1, 2, 2, 2, 3, 0, 0, - 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, - 1, 1, 1, 2, 2, 3, 0, 0, 0, 0, - 1, 0, 1, 1, 1, 1, 1, 1, 1, 2, - 2, 4, 3, 5, 0, 0, 0, 0, 1, 1, - 1, 1, 1, 1, 2, 1, 2, 2, 3, 4, - 4, 7, 0, 0, 1, 1, 1, 1, 1, 1, - 1, 2, 3, 2, 3, 4, 4, 5, 7, 13, - 0, 1, 1, 2, 3, 2, 2, 2, 4, 4, - 5, 6, 7, 11, 9, 13, 12, 26 }; - -/* maximum quantization index */ -const uint16_t WebRtcIsac_kQKltMaxIndGain[12] = { - 6, 12, 8, 14, 10, 19, 12, 31, 22, 56, 52, 138 }; - -const uint16_t WebRtcIsac_kQKltMaxIndShape[108] = { - 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, - 2, 2, 2, 2, 4, 4, 5, 6, 0, 0, - 0, 0, 1, 0, 0, 0, 0, 1, 2, 2, - 2, 2, 3, 4, 5, 7, 0, 0, 0, 0, - 2, 0, 2, 2, 2, 2, 3, 2, 2, 4, - 4, 6, 6, 9, 0, 0, 0, 0, 2, 2, - 2, 2, 2, 2, 3, 2, 4, 4, 7, 7, - 9, 13, 0, 0, 2, 2, 2, 2, 2, 2, - 3, 4, 5, 4, 6, 8, 8, 10, 16, 25, - 0, 2, 2, 4, 5, 4, 4, 4, 7, 8, - 9, 10, 13, 19, 17, 23, 25, 49 }; - -/* index offset */ -const uint16_t WebRtcIsac_kQKltOffsetGain[12] = { - 0, 7, 20, 29, 44, 55, 75, 88, 120, 143, 200, 253 }; - -const uint16_t WebRtcIsac_kQKltOffsetShape[108] = { - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, - 11, 14, 17, 20, 23, 28, 33, 39, 46, 47, - 48, 49, 50, 52, 53, 54, 55, 56, 58, 61, - 64, 67, 70, 74, 79, 85, 93, 94, 95, 96, - 97, 100, 101, 104, 107, 110, 113, 117, 120, 123, - 128, 133, 140, 147, 157, 158, 159, 160, 161, 164, - 167, 170, 173, 176, 179, 183, 186, 191, 196, 204, - 212, 222, 236, 237, 238, 241, 244, 247, 250, 253, - 256, 260, 265, 271, 276, 283, 292, 301, 312, 329, - 355, 356, 359, 362, 367, 373, 378, 383, 388, 396, - 405, 415, 426, 440, 460, 478, 502, 528 }; - -/* initial cdf index for KLT coefficients */ -const uint16_t WebRtcIsac_kQKltInitIndexGain[12] = { - 3, 6, 4, 7, 5, 10, 6, 16, 11, 28, 26, 69}; - -const uint16_t WebRtcIsac_kQKltInitIndexShape[108] = { - 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, - 1, 1, 1, 1, 2, 2, 3, 3, 0, 0, - 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, - 1, 1, 2, 2, 3, 4, 0, 0, 0, 0, - 1, 0, 1, 1, 1, 1, 2, 1, 1, 2, - 2, 3, 3, 5, 0, 0, 0, 0, 1, 1, - 1, 1, 1, 1, 2, 1, 2, 2, 4, 4, - 5, 7, 0, 0, 1, 1, 1, 1, 1, 1, - 2, 2, 3, 2, 3, 4, 4, 5, 8, 13, - 0, 1, 1, 2, 3, 2, 2, 2, 4, 4, - 5, 5, 7, 10, 9, 12, 13, 25 }; - - -/* quantizer representation levels */ -const double WebRtcIsac_kQKltLevelsGain[392] = { - -2.78127126, -1.76745590, -0.77913790, -0.00437329, 0.79961206, - 1.81775776, 2.81389782, -5.78753143, -4.88384084, -3.89320940, - -2.88133610, -1.92859977, -0.86347396, 0.02003888, 0.86140400, - 1.89667156, 2.97134967, 3.98781964, 4.91727277, 5.82865898, - -4.11195874, -2.80898424, -1.87547977, -0.80943825, -0.00679084, - 0.79573851, 1.83953397, 2.67586037, 3.76274082, -6.10933968, - -4.93034581, -3.89281296, -2.91530625, -1.89684163, -0.85319130, - -0.02275767, 0.86862017, 1.91578276, 2.96107339, 3.96543056, - 4.91369908, 5.91058154, 6.83848343, 8.07136925, -5.87470395, - -4.84703049, -3.84284597, -2.86168446, -1.89290192, -0.82798145, - -0.00080013, 0.82594974, 1.85754329, 2.88351798, 3.96172628, - -8.85684885, -7.87387461, -6.97811862, -5.93256270, -4.94301439, - -3.95513701, -2.96041544, -1.94031192, -0.87961478, -0.00456201, - 0.89911505, 1.91723376, 2.94011511, 3.93302540, 4.97990967, - 5.93133404, 7.02181199, 7.92407762, 8.80155440, 10.04665814, - -4.82396678, -3.85612158, -2.89482244, -1.89558408, -0.90036978, - -0.00677823, 0.90607989, 1.90937981, 2.91175777, 3.91637730, - 4.97565723, 5.84771228, 7.11145863, -16.07879840, -15.03776309, - -13.93905670, -12.95671800, -11.89171202, -10.95820934, -9.95923714, - -8.94357334, -7.99068299, -6.97481009, -5.94826231, -4.96673988, - -3.97490466, -2.97846970, -1.95130435, -0.94215262, -0.01444043, - 0.96770704, 1.95848598, 2.94107862, 3.95666119, 4.97253085, - 5.97191122, 6.93277360, 7.96608727, 8.87958779, 10.00264269, - 10.86560820, 12.07449071, 13.04491775, 13.97507061, 14.91845261, - -10.85696295, -9.83365357, -9.01245635, -7.95915145, -6.95625003, - -5.95362618, -4.93468444, -3.98760978, -2.95044407, -1.97041277, - -0.97701799, -0.00840234, 0.97834289, 1.98361415, 2.97802439, - 3.96415871, 4.95369042, 5.94101770, 6.92756798, 7.94063998, - 8.85951828, 9.97077022, 11.00068503, -33.92030406, -32.81426422, - -32.00000000, -31.13243639, -30.11886909, -29.06017570, -28.12598824, - -27.22045482, -25.81215858, -25.07849962, -23.93018013, -23.02097643, - -21.89529725, -20.99091085, -19.98889048, -18.94327044, -17.96562071, - -16.96126218, -15.95054062, -14.98516200, -13.97101012, -13.02106500, - -11.98438006, -11.03216748, -9.95930286, -8.97043946, -7.98085082, - -6.98360995, -5.98998802, -4.98668173, -4.00032906, -3.00420619, - -1.98701132, -0.99324682, -0.00609324, 0.98297834, 1.99483076, - 3.00305044, 3.97142097, 4.97525759, 5.98612258, 6.97448236, - 7.97575900, 9.01086211, 9.98665542, 11.00541438, 11.98078628, - 12.92352471, 14.06849675, 14.99949430, 15.94904834, 16.97440321, - 18.04040916, 18.88987609, 20.05312391, 21.00000000, 21.79443341, - -31.98578825, -31.00000000, -29.89060567, -28.98555686, -27.97114102, - -26.84935410, -26.02402230, -24.94195278, -23.92336849, -22.95552382, - -21.97932836, -20.96055470, -19.99649553, -19.03436122, -17.96706525, - -17.01139515, -16.01363516, -14.99154248, -14.00298333, -12.99630613, - -11.99955519, -10.99000421, -10.00819092, -8.99763648, -7.98431793, - -7.01769025, -5.99604690, -4.99980697, -3.99334671, -3.01748192, - -2.02051217, -1.00848371, -0.01942358, 1.00477757, 1.95477872, - 2.98593031, 3.98779079, 4.96862849, 6.02694771, 6.93983733, - 7.89874717, 8.99615862, 10.02367921, 10.96293452, 11.84351528, - 12.92207187, 13.85122329, 15.05146877, 15.99371264, 17.00000000, - 18.00000000, 19.00000000, 19.82763573, -47.00000000, -46.00000000, - -44.87138498, -44.00000000, -43.00000000, -42.00000000, -41.00000000, - -39.88966612, -38.98913239, -37.80306486, -37.23584325, -35.94200288, - -34.99881301, -34.11361858, -33.06507360, -32.13129135, -30.90891364, - -29.81511907, -28.99250380, -28.04535391, -26.99767800, -26.04418164, - -24.95687851, -24.04865595, -23.03392645, -21.89366707, -20.93517364, - -19.99388660, -18.91620943, -18.03749683, -16.99532379, -15.98683813, - -15.06421479, -13.99359211, -12.99714098, -11.97022520, -10.98500279, - -9.98834422, -8.95729330, -8.01232284, -7.00253661, -5.99681626, - -5.01207817, -3.95914904, -3.01232178, -1.96615919, -0.97687670, - 0.01228030, 0.98412288, 2.01753544, 3.00580570, 3.97783510, - 4.98846894, 6.01321400, 7.00867732, 8.00416375, 9.01771966, - 9.98637729, 10.98255180, 11.99194163, 13.01807333, 14.00999545, - 15.00118556, 16.00089224, 17.00584148, 17.98251763, 18.99942091, - 19.96917690, 20.97839265, 21.98207297, 23.00171271, 23.99930737, - 24.99746061, 26.00936304, 26.98240132, 28.01126868, 29.01395915, - 29.98153507, 31.01376711, 31.99876818, 33.00475317, 33.99753994, - 34.99493913, 35.98933585, 36.95620160, 37.98428461, 38.99317544, - 40.01832073, 40.98048133, 41.95999283, 42.98232091, 43.96523612, - 44.99574268, 45.99524194, 47.05464025, 48.03821548, 48.99354366, - 49.96400411, 50.98017973, 51.95184408, 52.96291806, 54.00194392, - 54.96603783, 55.95623778, 57.03076595, 58.05889901, 58.99081551, - 59.97928121, 61.05071612, 62.03971580, 63.01286038, 64.01290338, - 65.02074503, 65.99454594, 67.00399425, 67.96571257, 68.95305727, - 69.92030664, 70.95594862, 71.98088567, 73.04764124, 74.00285480, - 75.02696330, 75.89837673, 76.93459997, 78.16266309, 78.83317543, - 80.00000000, 80.87251574, 82.09803524, 83.10671664, 84.00000000, - 84.77023523, 86.00000000, 87.00000000, 87.92946897, 88.69159118, - 90.00000000, 90.90535270 }; - -const double WebRtcIsac_kQKltLevelsShape[578] = { - 0.00032397, 0.00008053, -0.00061202, -0.00012620, 0.00030437, - 0.00054764, -0.00027902, 0.00069360, 0.00029449, -0.80219239, - 0.00091089, -0.74514927, -0.00094283, 0.64030631, -0.60509119, - 0.00035575, 0.61851665, -0.62129957, 0.00375219, 0.60054900, - -0.61554359, 0.00054977, 0.63362016, -1.73118727, -0.65422341, - 0.00524568, 0.66165298, 1.76785515, -1.83182018, -0.65997434, - -0.00011887, 0.67524299, 1.79933938, -1.76344480, -0.72547708, - -0.00133017, 0.73104704, 1.75305377, 2.85164534, -2.80423916, - -1.71959639, -0.75419722, -0.00329945, 0.77196760, 1.72211069, - 2.87339653, 0.00031089, -0.00015311, 0.00018201, -0.00035035, - -0.77357251, 0.00154647, -0.00047625, -0.00045299, 0.00086590, - 0.00044762, -0.83383829, 0.00024787, -0.68526258, -0.00122472, - 0.64643255, -0.60904942, -0.00448987, 0.62309184, -0.59626442, - -0.00574132, 0.62296546, -0.63222115, 0.00013441, 0.63609545, - -0.66911055, -0.00369971, 0.66346095, 2.07281301, -1.77184694, - -0.67640425, -0.00010145, 0.64818392, 1.74948973, -1.69420224, - -0.71943894, -0.00004680, 0.75303493, 1.81075983, 2.80610041, - -2.80005755, -1.79866753, -0.77409777, -0.00084220, 0.80141293, - 1.78291081, 2.73954236, 3.82994169, 0.00015140, -0.00012766, - -0.00034241, -0.00119125, -0.76113497, 0.00069246, 0.76722027, - 0.00132862, -0.69107530, 0.00010656, 0.77061578, -0.78012970, - 0.00095947, 0.77828502, -0.64787758, 0.00217168, 0.63050167, - -0.58601125, 0.00306596, 0.59466308, -0.58603410, 0.00059779, - 0.64257970, 1.76512766, -0.61193600, -0.00259517, 0.59767574, - -0.61026273, 0.00315811, 0.61725479, -1.69169719, -0.65816029, - 0.00067575, 0.65576890, 2.00000000, -1.72689193, -0.69780808, - -0.00040990, 0.70668487, 1.74198458, -3.79028154, -3.00000000, - -1.73194459, -0.70179341, -0.00106695, 0.71302629, 1.76849782, - -2.89332364, -1.78585007, -0.78731491, -0.00132610, 0.79692976, - 1.75247009, 2.97828682, -5.26238694, -3.69559829, -2.87286122, - -1.84908818, -0.84434577, -0.01167975, 0.84641753, 1.84087672, - 2.87628156, 3.83556679, -0.00190204, 0.00092642, 0.00354385, - -0.00012982, -0.67742785, 0.00229509, 0.64935672, -0.58444751, - 0.00470733, 0.57299534, -0.58456202, -0.00097715, 0.64593607, - -0.64060330, -0.00638534, 0.59680157, -0.59287537, 0.00490772, - 0.58919707, -0.60306173, -0.00417464, 0.60562100, -1.75218757, - -0.63018569, -0.00225922, 0.63863300, -0.63949939, -0.00126421, - 0.64268914, -1.75851182, -0.68318060, 0.00510418, 0.69049211, - 1.88178506, -1.71136148, -0.72710534, -0.00815559, 0.73412917, - 1.79996711, -2.77111145, -1.73940498, -0.78212945, 0.01074476, - 0.77688916, 1.76873972, 2.87281379, 3.77554698, -3.75832725, - -2.95463235, -1.80451491, -0.80017226, 0.00149902, 0.80729206, - 1.78265046, 2.89391793, -3.78236148, -2.83640598, -1.82532067, - -0.88844327, -0.00620952, 0.88208030, 1.85757631, 2.81712391, - 3.88430176, 5.16179367, -7.00000000, -5.93805408, -4.87172597, - -3.87524433, -2.89399744, -1.92359563, -0.92136341, -0.00172725, - 0.93087018, 1.90528280, 2.89809686, 3.88085708, 4.89147740, - 5.89078692, -0.00239502, 0.00312564, -1.00000000, 0.00178325, - 1.00000000, -0.62198029, 0.00143254, 0.65344051, -0.59851220, - -0.00676987, 0.61510140, -0.58894151, 0.00385055, 0.59794203, - -0.59808568, -0.00038214, 0.57625703, -0.63009713, -0.01107985, - 0.61278758, -0.64206758, -0.00154369, 0.65480598, 1.80604162, - -1.80909286, -0.67810514, 0.00205762, 0.68571097, 1.79453891, - -3.22682422, -1.73808453, -0.71870305, -0.00738594, 0.71486172, - 1.73005326, -1.66891897, -0.73689615, -0.00616203, 0.74262409, - 1.73807899, -2.92417482, -1.73866741, -0.78133871, 0.00764425, - 0.80027264, 1.78668732, 2.74992588, -4.00000000, -2.75578740, - -1.83697516, -0.83117035, -0.00355191, 0.83527172, 1.82814700, - 2.77377675, 3.80718693, -3.81667698, -2.83575471, -1.83372350, - -0.86579471, 0.00547578, 0.87582281, 1.82858793, 2.87265007, - 3.91405377, -4.87521600, -3.78999094, -2.86437014, -1.86964365, - -0.90618018, 0.00128243, 0.91497811, 1.87374952, 2.83199819, - 3.91519130, 4.76632822, -6.68713448, -6.01252467, -4.94587936, - -3.88795368, -2.91299088, -1.92592211, -0.95504570, -0.00089980, - 0.94565200, 1.93239633, 2.91832808, 3.91363475, 4.88920034, - 5.96471415, 6.83905252, 7.86195009, 8.81571018,-12.96141759, - -11.73039516,-10.96459719, -9.97382433, -9.04414433, -7.89460619, - -6.96628608, -5.93236595, -4.93337924, -3.95479990, -2.96451499, - -1.96635876, -0.97271229, -0.00402238, 0.98343930, 1.98348291, - 2.96641164, 3.95456471, 4.95517089, 5.98975714, 6.90322073, - 7.90468849, 8.85639467, 9.97255498, 10.79006309, 11.81988596, - 0.04950500, -1.00000000, -0.01226628, 1.00000000, -0.59479469, - -0.10438305, 0.59822144, -2.00000000, -0.67109149, -0.09256692, - 0.65171621, 2.00000000, -3.00000000, -1.68391999, -0.76681039, - -0.03354151, 0.71509146, 1.77615472, -2.00000000, -0.68661511, - -0.02497881, 0.66478398, 2.00000000, -2.00000000, -0.67032784, - -0.00920582, 0.64892756, 2.00000000, -2.00000000, -0.68561894, - 0.03641869, 0.73021611, 1.68293863, -4.00000000, -2.72024184, - -1.80096059, -0.81696185, 0.03604685, 0.79232033, 1.70070730, - 3.00000000, -4.00000000, -2.71795670, -1.80482986, -0.86001162, - 0.03764903, 0.87723968, 1.79970771, 2.72685932, 3.67589143, - -5.00000000, -4.00000000, -2.85492548, -1.78996365, -0.83250358, - -0.01376828, 0.84195506, 1.78161105, 2.76754458, 4.00000000, - -6.00000000, -5.00000000, -3.82268811, -2.77563624, -1.82608163, - -0.86486114, -0.02671886, 0.86693165, 1.88422879, 2.86248347, - 3.95632216, -7.00000000, -6.00000000, -5.00000000, -3.77533988, - -2.86391432, -1.87052039, -0.90513658, 0.06271236, 0.91083620, - 1.85734756, 2.86031688, 3.82019418, 4.94420394, 6.00000000, - -11.00000000,-10.00000000, -9.00000000, -8.00000000, -6.91952415, - -6.00000000, -4.92044374, -3.87845165, -2.87392362, -1.88413020, - -0.91915740, 0.00318517, 0.91602800, 1.89664838, 2.88925058, - 3.84123856, 4.78988651, 5.94526812, 6.81953917, 8.00000000, - -9.00000000, -8.00000000, -7.03319143, -5.94530963, -4.86669720, - -3.92438007, -2.88620396, -1.92848070, -0.94365985, 0.01671855, - 0.97349410, 1.93419878, 2.89740109, 3.89662823, 4.83235583, - 5.88106535, 6.80328232, 8.00000000,-13.00000000,-12.00000000, - -11.00000000,-10.00000000, -9.00000000, -7.86033489, -6.83344055, - -5.89844215, -4.90811454, -3.94841298, -2.95820490, -1.98627966, - -0.99161468, -0.02286136, 0.96055651, 1.95052433, 2.93969396, - 3.94304346, 4.88522624, 5.87434241, 6.78309433, 7.87244101, - 9.00000000, 10.00000000,-12.09117356,-11.00000000,-10.00000000, - -8.84766108, -7.86934236, -6.98544896, -5.94233429, -4.95583292, - -3.95575986, -2.97085529, -1.98955811, -0.99359873, -0.00485413, - 0.98298870, 1.98093258, 2.96430203, 3.95540216, 4.96915010, - 5.96775124, 6.99236918, 7.96503302, 8.99864542, 9.85857723, - 10.96541926, 11.91647197, 12.71060069,-26.00000000,-25.00000000, - -24.00585596,-23.11642573,-22.14271284,-20.89800711,-19.87815799, - -19.05036354,-17.88555651,-16.86471209,-15.97711073,-14.94012359, - -14.02661226,-12.98243228,-11.97489256,-10.97402777, -9.96425624, - -9.01085220, -7.97372506, -6.98795002, -5.97271328, -5.00191694, - -3.98055849, -2.98458048, -1.99470442, -0.99656768, -0.00825666, - 1.00272004, 1.99922218, 2.99357669, 4.01407905, 5.01003897, - 5.98115528, 7.00018958, 8.00338125, 8.98981046, 9.98990318, - 10.96341479, 11.96866930, 12.99175139, 13.94580443, 14.95745083, - 15.98992869, 16.97484646, 17.99630043, 18.93396897, 19.88347741, - 20.96532482, 21.92191032, 23.22314702 }; - - -/* cdf tables for quantizer indices */ -const uint16_t WebRtcIsac_kQKltCdfGain[404] = { - 0, 13, 301, 3730, 61784, 65167, 65489, 65535, 0, 17, - 142, 314, 929, 2466, 7678, 56450, 63463, 64740, 65204, 65426, - 65527, 65535, 0, 8, 100, 724, 6301, 60105, 65125, 65510, - 65531, 65535, 0, 13, 117, 368, 1068, 3010, 11928, 53603, - 61177, 63404, 64505, 65108, 65422, 65502, 65531, 65535, 0, 4, - 17, 96, 410, 1859, 12125, 54361, 64103, 65305, 65497, 65535, - 0, 4, 88, 230, 469, 950, 1746, 3228, 6092, 16592, - 44756, 56848, 61256, 63308, 64325, 64920, 65309, 65460, 65502, - 65522, 65535, 0, 88, 352, 1675, 6339, 20749, 46686, 59284, 63525, - 64949, 65359, 65502, 65527, 65535, 0, 13, 38, 63, 117, - 234, 381, 641, 929, 1407, 2043, 2809, 4032, 5753, 8792, - 14407, 24308, 38941, 48947, 55403, 59293, 61411, 62688, 63630, - 64329, 64840, 65188, 65376, 65472, 65506, 65527, 65531, 65535, - 0, 8, 29, 75, 222, 615, 1327, 2801, 5623, 9931, 16094, 24966, - 34419, 43458, 50676, 56186, 60055, 62500, 63936, 64765, 65225, - 65435, 65514, 65535, 0, 8, 13, 15, 17, 21, 33, 59, - 71, 92, 151, 243, 360, 456, 674, 934, 1223, 1583, - 1989, 2504, 3031, 3617, 4354, 5154, 6163, 7411, 8780, 10747, - 12874, 15591, 18974, 23027, 27436, 32020, 36948, 41830, 46205, - 49797, 53042, 56094, 58418, 60360, 61763, 62818, 63559, 64103, - 64509, 64798, 65045, 65162, 65288, 65363, 65447, 65506, 65522, - 65531, 65533, 65535, 0, 4, 6, 25, 38, 71, 138, 264, 519, 808, - 1227, 1825, 2516, 3408, 4279, 5560, 7092, 9197, 11420, 14108, - 16947, 20300, 23926, 27459, 31164, 34827, 38575, 42178, 45540, - 48747, 51444, 54090, 56426, 58460, 60080, 61595, 62734, 63668, - 64275, 64673, 64936, 65112, 65217, 65334, 65426, 65464, 65477, - 65489, 65518, 65527, 65529, 65531, 65533, 65535, 0, 2, 4, 8, 10, - 12, 14, 16, 21, 33, 50, 71, 84, 92, 105, 138, 180, 255, 318, - 377, 435, 473, 511, 590, 682, 758, 913, 1097, 1256, 1449, 1671, - 1884, 2169, 2445, 2772, 3157, 3563, 3944, 4375, 4848, 5334, 5820, - 6448, 7101, 7716, 8378, 9102, 9956, 10752, 11648, 12707, 13670, - 14758, 15910, 17187, 18472, 19627, 20649, 21951, 23169, 24283, - 25552, 26862, 28227, 29391, 30764, 31882, 33213, 34432, 35600, - 36910, 38116, 39464, 40729, 41872, 43144, 44371, 45514, 46762, - 47813, 48968, 50069, 51032, 51974, 52908, 53737, 54603, 55445, - 56282, 56990, 57572, 58191, 58840, 59410, 59887, 60264, 60607, - 60946, 61269, 61516, 61771, 61960, 62198, 62408, 62558, 62776, - 62985, 63207, 63408, 63546, 63739, 63906, 64070, 64237, 64371, - 64551, 64677, 64836, 64999, 65095, 65213, 65284, 65338, 65380, - 65426, 65447, 65472, 65485, 65487, 65489, 65502, 65510, 65512, - 65514, 65516, 65518, 65522, 65531, 65533, 65535 }; - - -const uint16_t WebRtcIsac_kQKltCdfShape[686] = { - 0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, 65535, - 0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, 4, - 65535, 0, 8, 65514, 65535, 0, 29, 65481, 65535, 0, - 121, 65439, 65535, 0, 239, 65284, 65535, 0, 8, 779, - 64999, 65527, 65535, 0, 8, 888, 64693, 65522, 65535, 0, - 29, 2604, 62843, 65497, 65531, 65535, 0, 25, 176, 4576, - 61164, 65275, 65527, 65535, 0, 65535, 0, 65535, 0, 65535, - 0, 65535, 0, 4, 65535, 0, 65535, 0, 65535, 0, - 65535, 0, 65535, 0, 4, 65535, 0, 33, 65502, 65535, - 0, 54, 65481, 65535, 0, 251, 65309, 65535, 0, 611, - 65074, 65535, 0, 1273, 64292, 65527, 65535, 0, 4, 1809, - 63940, 65518, 65535, 0, 88, 4392, 60603, 65426, 65531, 65535, - 0, 25, 419, 7046, 57756, 64961, 65514, 65531, 65535, 0, - 65535, 0, 65535, 0, 65535, 0, 65535, 0, 4, 65531, - 65535, 0, 65535, 0, 8, 65531, 65535, 0, 4, 65527, - 65535, 0, 17, 65510, 65535, 0, 42, 65481, 65535, 0, - 197, 65342, 65531, 65535, 0, 385, 65154, 65535, 0, 1005, - 64522, 65535, 0, 8, 1985, 63469, 65533, 65535, 0, 38, - 3119, 61884, 65514, 65535, 0, 4, 6, 67, 4961, 60804, - 65472, 65535, 0, 17, 565, 9182, 56538, 65087, 65514, 65535, - 0, 8, 63, 327, 2118, 14490, 52774, 63839, 65376, 65522, - 65535, 0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, - 17, 65522, 65535, 0, 59, 65489, 65535, 0, 50, 65522, - 65535, 0, 54, 65489, 65535, 0, 310, 65179, 65535, 0, - 615, 64836, 65535, 0, 4, 1503, 63965, 65535, 0, 2780, - 63383, 65535, 0, 21, 3919, 61051, 65527, 65535, 0, 84, - 6674, 59929, 65435, 65535, 0, 4, 255, 7976, 55784, 65150, - 65518, 65531, 65535, 0, 4, 8, 582, 10726, 53465, 64949, - 65518, 65535, 0, 29, 339, 3006, 17555, 49517, 62956, 65200, - 65497, 65531, 65535, 0, 2, 33, 138, 565, 2324, 7670, - 22089, 45966, 58949, 63479, 64966, 65380, 65518, 65535, 0, 65535, - 0, 65535, 0, 2, 65533, 65535, 0, 46, 65514, 65535, - 0, 414, 65091, 65535, 0, 540, 64911, 65535, 0, 419, - 65162, 65535, 0, 976, 64790, 65535, 0, 2977, 62495, 65531, - 65535, 0, 4, 3852, 61034, 65527, 65535, 0, 4, 29, - 6021, 60243, 65468, 65535, 0, 84, 6711, 58066, 65418, 65535, - 0, 13, 281, 9550, 54917, 65125, 65506, 65535, 0, 2, - 63, 984, 12108, 52644, 64342, 65435, 65527, 65535, 0, 29, - 251, 2014, 14871, 47553, 62881, 65229, 65518, 65535, 0, 13, - 142, 749, 4220, 18497, 45200, 60913, 64823, 65426, 65527, 65535, - 0, 13, 71, 264, 1176, 3789, 10500, 24480, 43488, 56324, - 62315, 64493, 65242, 65464, 65514, 65522, 65531, 65535, 0, 4, - 13, 38, 109, 205, 448, 850, 1708, 3429, 6276, 11371, - 19221, 29734, 40955, 49391, 55411, 59460, 62102, 63793, 64656, - 65150, 65401, 65485, 65522, 65531, 65535, 0, 65535, 0, 2, 65533, - 65535, 0, 1160, 65476, 65535, 0, 2, 6640, 64763, 65533, - 65535, 0, 2, 38, 9923, 61009, 65527, 65535, 0, 2, - 4949, 63092, 65533, 65535, 0, 2, 3090, 63398, 65533, 65535, - 0, 2, 2520, 58744, 65510, 65535, 0, 2, 13, 544, - 8784, 51403, 65148, 65533, 65535, 0, 2, 25, 1017, 10412, - 43550, 63651, 65489, 65527, 65535, 0, 2, 4, 29, 783, - 13377, 52462, 64524, 65495, 65533, 65535, 0, 2, 4, 6, - 100, 1817, 18451, 52590, 63559, 65376, 65531, 65535, 0, 2, - 4, 6, 46, 385, 2562, 11225, 37416, 60488, 65026, 65487, - 65529, 65533, 65535, 0, 2, 4, 6, 8, 10, 12, - 42, 222, 971, 5221, 19811, 45048, 60312, 64486, 65294, 65474, - 65525, 65529, 65533, 65535, 0, 2, 4, 8, 71, 167, - 666, 2533, 7875, 19622, 38082, 54359, 62108, 64633, 65290, 65495, - 65529, 65533, 65535, 0, 2, 4, 6, 8, 10, 13, - 109, 586, 1930, 4949, 11600, 22641, 36125, 48312, 56899, 61495, - 63927, 64932, 65389, 65489, 65518, 65531, 65533, 65535, 0, 4, - 6, 8, 67, 209, 712, 1838, 4195, 8432, 14432, 22834, - 31723, 40523, 48139, 53929, 57865, 60657, 62403, 63584, 64363, - 64907, 65167, 65372, 65472, 65514, 65535, 0, 2, 4, 13, 25, - 42, 46, 50, 75, 113, 147, 281, 448, 657, 909, - 1185, 1591, 1976, 2600, 3676, 5317, 7398, 9914, 12941, 16169, - 19477, 22885, 26464, 29851, 33360, 37228, 41139, 44802, 48654, - 52058, 55181, 57676, 59581, 61022, 62190, 63107, 63676, 64199, - 64547, 64924, 65158, 65313, 65430, 65481, 65518, 65535 }; - - -/* pointers to cdf tables for quantizer indices */ -const uint16_t *WebRtcIsac_kQKltCdfPtrGain[12] = { - WebRtcIsac_kQKltCdfGain +0 +0, WebRtcIsac_kQKltCdfGain +0 +8, - WebRtcIsac_kQKltCdfGain +0 +22, WebRtcIsac_kQKltCdfGain +0 +32, - WebRtcIsac_kQKltCdfGain +0 +48, WebRtcIsac_kQKltCdfGain +0 +60, - WebRtcIsac_kQKltCdfGain +0 +81, WebRtcIsac_kQKltCdfGain +0 +95, - WebRtcIsac_kQKltCdfGain +0 +128, WebRtcIsac_kQKltCdfGain +0 +152, - WebRtcIsac_kQKltCdfGain +0 +210, WebRtcIsac_kQKltCdfGain +0 +264 }; - -const uint16_t *WebRtcIsac_kQKltCdfPtrShape[108] = { - WebRtcIsac_kQKltCdfShape +0 +0, WebRtcIsac_kQKltCdfShape +0 +2, - WebRtcIsac_kQKltCdfShape +0 +4, WebRtcIsac_kQKltCdfShape +0 +6, - WebRtcIsac_kQKltCdfShape +0 +8, WebRtcIsac_kQKltCdfShape +0 +10, - WebRtcIsac_kQKltCdfShape +0 +12, WebRtcIsac_kQKltCdfShape +0 +14, - WebRtcIsac_kQKltCdfShape +0 +16, WebRtcIsac_kQKltCdfShape +0 +18, - WebRtcIsac_kQKltCdfShape +0 +21, WebRtcIsac_kQKltCdfShape +0 +25, - WebRtcIsac_kQKltCdfShape +0 +29, WebRtcIsac_kQKltCdfShape +0 +33, - WebRtcIsac_kQKltCdfShape +0 +37, WebRtcIsac_kQKltCdfShape +0 +43, - WebRtcIsac_kQKltCdfShape +0 +49, WebRtcIsac_kQKltCdfShape +0 +56, - WebRtcIsac_kQKltCdfShape +0 +64, WebRtcIsac_kQKltCdfShape +0 +66, - WebRtcIsac_kQKltCdfShape +0 +68, WebRtcIsac_kQKltCdfShape +0 +70, - WebRtcIsac_kQKltCdfShape +0 +72, WebRtcIsac_kQKltCdfShape +0 +75, - WebRtcIsac_kQKltCdfShape +0 +77, WebRtcIsac_kQKltCdfShape +0 +79, - WebRtcIsac_kQKltCdfShape +0 +81, WebRtcIsac_kQKltCdfShape +0 +83, - WebRtcIsac_kQKltCdfShape +0 +86, WebRtcIsac_kQKltCdfShape +0 +90, - WebRtcIsac_kQKltCdfShape +0 +94, WebRtcIsac_kQKltCdfShape +0 +98, - WebRtcIsac_kQKltCdfShape +0 +102, WebRtcIsac_kQKltCdfShape +0 +107, - WebRtcIsac_kQKltCdfShape +0 +113, WebRtcIsac_kQKltCdfShape +0 +120, - WebRtcIsac_kQKltCdfShape +0 +129, WebRtcIsac_kQKltCdfShape +0 +131, - WebRtcIsac_kQKltCdfShape +0 +133, WebRtcIsac_kQKltCdfShape +0 +135, - WebRtcIsac_kQKltCdfShape +0 +137, WebRtcIsac_kQKltCdfShape +0 +141, - WebRtcIsac_kQKltCdfShape +0 +143, WebRtcIsac_kQKltCdfShape +0 +147, - WebRtcIsac_kQKltCdfShape +0 +151, WebRtcIsac_kQKltCdfShape +0 +155, - WebRtcIsac_kQKltCdfShape +0 +159, WebRtcIsac_kQKltCdfShape +0 +164, - WebRtcIsac_kQKltCdfShape +0 +168, WebRtcIsac_kQKltCdfShape +0 +172, - WebRtcIsac_kQKltCdfShape +0 +178, WebRtcIsac_kQKltCdfShape +0 +184, - WebRtcIsac_kQKltCdfShape +0 +192, WebRtcIsac_kQKltCdfShape +0 +200, - WebRtcIsac_kQKltCdfShape +0 +211, WebRtcIsac_kQKltCdfShape +0 +213, - WebRtcIsac_kQKltCdfShape +0 +215, WebRtcIsac_kQKltCdfShape +0 +217, - WebRtcIsac_kQKltCdfShape +0 +219, WebRtcIsac_kQKltCdfShape +0 +223, - WebRtcIsac_kQKltCdfShape +0 +227, WebRtcIsac_kQKltCdfShape +0 +231, - WebRtcIsac_kQKltCdfShape +0 +235, WebRtcIsac_kQKltCdfShape +0 +239, - WebRtcIsac_kQKltCdfShape +0 +243, WebRtcIsac_kQKltCdfShape +0 +248, - WebRtcIsac_kQKltCdfShape +0 +252, WebRtcIsac_kQKltCdfShape +0 +258, - WebRtcIsac_kQKltCdfShape +0 +264, WebRtcIsac_kQKltCdfShape +0 +273, - WebRtcIsac_kQKltCdfShape +0 +282, WebRtcIsac_kQKltCdfShape +0 +293, - WebRtcIsac_kQKltCdfShape +0 +308, WebRtcIsac_kQKltCdfShape +0 +310, - WebRtcIsac_kQKltCdfShape +0 +312, WebRtcIsac_kQKltCdfShape +0 +316, - WebRtcIsac_kQKltCdfShape +0 +320, WebRtcIsac_kQKltCdfShape +0 +324, - WebRtcIsac_kQKltCdfShape +0 +328, WebRtcIsac_kQKltCdfShape +0 +332, - WebRtcIsac_kQKltCdfShape +0 +336, WebRtcIsac_kQKltCdfShape +0 +341, - WebRtcIsac_kQKltCdfShape +0 +347, WebRtcIsac_kQKltCdfShape +0 +354, - WebRtcIsac_kQKltCdfShape +0 +360, WebRtcIsac_kQKltCdfShape +0 +368, - WebRtcIsac_kQKltCdfShape +0 +378, WebRtcIsac_kQKltCdfShape +0 +388, - WebRtcIsac_kQKltCdfShape +0 +400, WebRtcIsac_kQKltCdfShape +0 +418, - WebRtcIsac_kQKltCdfShape +0 +445, WebRtcIsac_kQKltCdfShape +0 +447, - WebRtcIsac_kQKltCdfShape +0 +451, WebRtcIsac_kQKltCdfShape +0 +455, - WebRtcIsac_kQKltCdfShape +0 +461, WebRtcIsac_kQKltCdfShape +0 +468, - WebRtcIsac_kQKltCdfShape +0 +474, WebRtcIsac_kQKltCdfShape +0 +480, - WebRtcIsac_kQKltCdfShape +0 +486, WebRtcIsac_kQKltCdfShape +0 +495, - WebRtcIsac_kQKltCdfShape +0 +505, WebRtcIsac_kQKltCdfShape +0 +516, - WebRtcIsac_kQKltCdfShape +0 +528, WebRtcIsac_kQKltCdfShape +0 +543, - WebRtcIsac_kQKltCdfShape +0 +564, WebRtcIsac_kQKltCdfShape +0 +583, - WebRtcIsac_kQKltCdfShape +0 +608, WebRtcIsac_kQKltCdfShape +0 +635 }; - - -/* left KLT transforms */ -const double WebRtcIsac_kKltT1Gain[4] = { - -0.79742827, 0.60341375, 0.60341375, 0.79742827 }; - -const double WebRtcIsac_kKltT1Shape[324] = { - 0.00159597, 0.00049320, 0.00513821, 0.00021066, 0.01338581, - -0.00422367, -0.00272072, 0.00935107, 0.02047622, 0.02691189, - 0.00478236, 0.03969702, 0.00886698, 0.04877604, -0.10898362, - -0.05930891, -0.03415047, 0.98889721, 0.00293558, -0.00035282, - 0.01156321, -0.00195341, -0.00937631, 0.01052213, -0.02551163, - 0.01644059, 0.03189927, 0.07754773, -0.08742313, -0.03026338, - 0.05136248, -0.14395974, 0.17725040, 0.22664856, 0.93380230, - 0.07076411, 0.00557890, -0.00222834, 0.01377569, 0.01466808, - 0.02847361, -0.00603178, 0.02382480, -0.01210452, 0.03797267, - -0.02371480, 0.11260335, -0.07366682, 0.00453436, -0.04136941, - -0.07912843, -0.95031418, 0.25295337, -0.05302216, -0.00617554, - -0.00044040, -0.00653778, 0.01097838, 0.01529174, 0.01374431, - -0.00748512, -0.00020034, 0.02432713, 0.11101570, -0.08556891, - 0.09282249, -0.01029446, 0.67556443, -0.67454300, 0.06910063, - 0.20866865, -0.10318050, 0.00932175, 0.00524058, 0.00803610, - -0.00594676, -0.01082578, 0.01069906, 0.00546768, 0.01565291, - 0.06816200, 0.10201227, 0.16812734, 0.22984074, 0.58213170, - -0.54138651, -0.51379962, 0.06847390, -0.01920037, -0.04592324, - -0.00467394, 0.00328858, 0.00377424, -0.00987448, 0.08222096, - -0.00377301, 0.04551941, -0.02592517, 0.16317082, 0.13077530, - 0.22702921, -0.31215289, -0.69645962, -0.38047101, -0.39339411, - 0.11124777, 0.02508035, -0.00708074, 0.00400344, 0.00040331, - 0.01142402, 0.01725406, 0.01635170, 0.14285366, 0.03949233, - -0.05905676, 0.05877154, -0.17497577, -0.32479440, 0.80754464, - -0.38085603, -0.17055430, -0.03168622, -0.07531451, 0.02942002, - -0.02148095, -0.00754114, -0.00322372, 0.00567812, -0.01701521, - -0.12358320, 0.11473564, 0.09070136, 0.06533068, -0.22560802, - 0.19209022, 0.81605094, 0.36592275, -0.09919829, 0.16667122, - 0.16300725, 0.04803807, 0.06739263, -0.00156752, -0.01685302, - -0.00905240, -0.02297836, -0.00469939, 0.06310613, -0.16391930, - 0.10919511, 0.12529293, 0.85581322, -0.32145522, 0.24539076, - 0.07181839, 0.07289591, 0.14066759, 0.10406711, 0.05815518, - 0.01072680, -0.00759339, 0.00053486, -0.00044865, 0.03407361, - 0.01645348, 0.08758579, 0.27722240, 0.53665485, -0.74853376, - -0.01118192, -0.19805430, 0.06130619, -0.09675299, 0.08978480, - 0.03405255, -0.00706867, 0.05102045, 0.03250746, 0.01849966, - -0.01216314, -0.01184187, -0.01579288, 0.00114807, 0.11376166, - 0.88342114, -0.36425379, 0.13863190, 0.12524180, -0.13553892, - 0.04715856, -0.12341103, 0.04531568, 0.01899360, -0.00206897, - 0.00567768, -0.01444163, 0.00411946, -0.00855896, 0.00381663, - -0.01664861, -0.05534280, 0.21328278, 0.20161162, 0.72360394, - 0.59130708, -0.08043791, 0.08757349, -0.13893918, -0.05147377, - 0.02680690, -0.01144070, 0.00625162, -0.00634215, -0.01248947, - -0.00329455, -0.00609625, -0.00136305, -0.05097048, -0.01029851, - 0.25065384, -0.16856837, -0.07123372, 0.15992623, -0.39487617, - -0.79972301, 0.18118185, -0.04826639, -0.01805578, -0.02927253, - -0.16400618, 0.07472763, 0.10376449, 0.01705406, 0.01065801, - -0.01500498, 0.02039914, 0.37776349, -0.84484186, 0.10434286, - 0.15616990, 0.13474456, -0.00906238, -0.25238368, -0.03820885, - -0.10650905, -0.03880833, -0.03660028, -0.09640894, 0.00583314, - 0.01922097, 0.01489911, -0.02431117, -0.09372217, 0.39404721, - -0.84786223, -0.31277121, 0.03193850, 0.01974060, 0.01887901, - 0.00337911, -0.11359599, -0.02792521, -0.03220184, -0.01533311, - 0.00015962, -0.04225043, -0.00933965, 0.00675311, 0.00206060, - 0.15926771, 0.40199829, -0.80792558, -0.35591604, -0.17169764, - 0.02830436, 0.02459982, -0.03438589, 0.00718705, -0.01798329, - -0.01594508, -0.00702430, -0.00952419, -0.00962701, -0.01307212, - -0.01749740, 0.01299602, 0.00587270, -0.36103108, -0.82039266, - -0.43092844, -0.08500097, -0.04361674, -0.00333482, 0.01250434, - -0.02538295, -0.00921797, 0.01645071, -0.01400872, 0.00317607, - 0.00003277, -0.01617646, -0.00616863, -0.00882661, 0.00466157, - 0.00353237, 0.91803104, -0.39503305, -0.02048964, 0.00060125, - 0.01980634, 0.00300109, 0.00313880, 0.00657337, 0.00715163, - 0.00000261, 0.00854276, -0.00154825, -0.00516128, 0.00909527, - 0.00095609, 0.00701196, -0.00221867, -0.00156741 }; - -/* right KLT transforms */ -const double WebRtcIsac_kKltT2Gain[36] = { - 0.14572837, -0.45446306, 0.61990621, -0.52197033, 0.32145074, - -0.11026900, -0.20698282, 0.48962182, -0.27127933, -0.33627476, - 0.65094037, -0.32715751, 0.40262573, -0.47844405, -0.33876075, - 0.44130653, 0.37383966, -0.39964662, -0.51730480, 0.06611973, - 0.49030187, 0.47512886, -0.02141226, -0.51129451, -0.58578569, - -0.39132064, -0.13187771, 0.15649421, 0.40735596, 0.54396897, - 0.40381276, 0.40904942, 0.41179766, 0.41167576, 0.40840251, - 0.40468132 }; - -const double WebRtcIsac_kKltT2Shape[36] = { - 0.13427386, -0.35132558, 0.52506528, -0.59419077, 0.45075085, - -0.16312057, 0.29857439, -0.58660147, 0.34265431, 0.20879510, - -0.56063262, 0.30238345, 0.43308283, -0.41186999, -0.35288681, - 0.42768996, 0.36094634, -0.45284910, -0.47116680, 0.02893449, - 0.54326135, 0.45249040, -0.06264420, -0.52283830, 0.57137758, - 0.44298139, 0.12617554, -0.20819946, -0.42324603, -0.48876443, - 0.39597050, 0.40713935, 0.41389880, 0.41512486, 0.41130400, - 0.40575001 }; - -/* means of log gains and LAR coefficients*/ -const double WebRtcIsac_kLpcMeansGain[12] = { - -6.86881911, -5.35075273, -6.86792680, -5.36200897, -6.86401538, - -5.36921533, -6.86802969, -5.36893966, -6.86538097, -5.36315063, - -6.85535304, -5.35155315 }; - -const double WebRtcIsac_kLpcMeansShape[108] = { - -0.91232981, 0.26258634, -0.33716701, 0.08477430, -0.03378426, - 0.14423909, 0.07036185, 0.06155019, 0.01490385, 0.04138740, - 0.01427317, 0.01288970, 0.83872106, 0.25750199, 0.07988929, - -0.01957923, 0.00831390, 0.01770300, -0.90957164, 0.25732216, - -0.33385344, 0.08735740, -0.03715332, 0.14584917, 0.06998990, - 0.06131968, 0.01504379, 0.04067339, 0.01428039, 0.01406460, - 0.83846243, 0.26169862, 0.08109025, -0.01767055, 0.00970539, - 0.01954310, -0.90490803, 0.24656405, -0.33578607, 0.08843286, - -0.03749139, 0.14443959, 0.07214669, 0.06170993, 0.01449947, - 0.04134309, 0.01314762, 0.01413471, 0.83895203, 0.26748062, - 0.08197507, -0.01781298, 0.00885967, 0.01922394, -0.90922472, - 0.24495889, -0.33921540, 0.08877169, -0.03581332, 0.14199172, - 0.07444032, 0.06185940, 0.01502054, 0.04185113, 0.01276579, - 0.01355457, 0.83645358, 0.26631720, 0.08119697, -0.01835449, - 0.00788512, 0.01846446, -0.90482253, 0.24658310, -0.34019734, - 0.08281090, -0.03486038, 0.14359248, 0.07401336, 0.06001471, - 0.01528421, 0.04254560, 0.01321472, 0.01240799, 0.83857127, - 0.26281654, 0.08174380, -0.02099842, 0.00755176, 0.01699448, - -0.90132307, 0.25174308, -0.33838268, 0.07883863, -0.02877906, - 0.14105407, 0.07220290, 0.06000352, 0.01684879, 0.04226844, - 0.01331331, 0.01269244, 0.83832138, 0.25467485, 0.08118028, - -0.02120528, 0.00747832, 0.01567212 }; diff --git a/modules/audio_coding/codecs/isac/main/source/lpc_tables.h b/modules/audio_coding/codecs/isac/main/source/lpc_tables.h deleted file mode 100644 index 56ff22c06c..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/lpc_tables.h +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * lpc_tables.h - * - * header file for coding tables for the LPC coefficients - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_TABLES_H_ - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/structs.h" - -#define KLT_STEPSIZE 1.00000000 -#define KLT_NUM_AVG_GAIN 0 -#define KLT_NUM_AVG_SHAPE 0 -#define KLT_NUM_MODELS 3 -#define LPC_GAIN_SCALE 4.000f -#define LPC_LOBAND_SCALE 2.100f -#define LPC_LOBAND_ORDER ORDERLO -#define LPC_HIBAND_SCALE 0.450f -#define LPC_HIBAND_ORDER ORDERHI -#define LPC_GAIN_ORDER 2 - -#define LPC_SHAPE_ORDER (LPC_LOBAND_ORDER + LPC_HIBAND_ORDER) - -#define KLT_ORDER_GAIN (LPC_GAIN_ORDER * SUBFRAMES) -#define KLT_ORDER_SHAPE (LPC_SHAPE_ORDER * SUBFRAMES) - -/* cdf array for model indicator */ -extern const uint16_t WebRtcIsac_kQKltModelCdf[KLT_NUM_MODELS + 1]; - -/* pointer to cdf array for model indicator */ -extern const uint16_t* WebRtcIsac_kQKltModelCdfPtr[1]; - -/* initial cdf index for decoder of model indicator */ -extern const uint16_t WebRtcIsac_kQKltModelInitIndex[1]; - -/* offset to go from rounded value to quantization index */ -extern const short WebRtcIsac_kQKltQuantMinGain[12]; - -extern const short WebRtcIsac_kQKltQuantMinShape[108]; - -/* maximum quantization index */ -extern const uint16_t WebRtcIsac_kQKltMaxIndGain[12]; - -extern const uint16_t WebRtcIsac_kQKltMaxIndShape[108]; - -/* index offset */ -extern const uint16_t WebRtcIsac_kQKltOffsetGain[12]; - -extern const uint16_t WebRtcIsac_kQKltOffsetShape[108]; - -/* initial cdf index for KLT coefficients */ -extern const uint16_t WebRtcIsac_kQKltInitIndexGain[12]; - -extern const uint16_t WebRtcIsac_kQKltInitIndexShape[108]; - -/* quantizer representation levels */ -extern const double WebRtcIsac_kQKltLevelsGain[392]; - -extern const double WebRtcIsac_kQKltLevelsShape[578]; - -/* cdf tables for quantizer indices */ -extern const uint16_t WebRtcIsac_kQKltCdfGain[404]; - -extern const uint16_t WebRtcIsac_kQKltCdfShape[686]; - -/* pointers to cdf tables for quantizer indices */ -extern const uint16_t* WebRtcIsac_kQKltCdfPtrGain[12]; - -extern const uint16_t* WebRtcIsac_kQKltCdfPtrShape[108]; - -/* left KLT transforms */ -extern const double WebRtcIsac_kKltT1Gain[4]; - -extern const double WebRtcIsac_kKltT1Shape[324]; - -/* right KLT transforms */ -extern const double WebRtcIsac_kKltT2Gain[36]; - -extern const double WebRtcIsac_kKltT2Shape[36]; - -/* means of log gains and LAR coefficients */ -extern const double WebRtcIsac_kLpcMeansGain[12]; - -extern const double WebRtcIsac_kLpcMeansShape[108]; - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_TABLES_H_ */ diff --git a/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.c b/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.c deleted file mode 100644 index 080432c3a5..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.c +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -/* header file for coding tables for the pitch filter side-info in the entropy coder */ -/********************* Pitch Filter Gain Coefficient Tables ************************/ -/* cdf for quantized pitch filter gains */ -const uint16_t WebRtcIsac_kQPitchGainCdf[255] = { - 0, 2, 4, 6, 64, 901, 903, 905, 16954, 16956, - 16961, 17360, 17362, 17364, 17366, 17368, 17370, 17372, 17374, 17411, - 17514, 17516, 17583, 18790, 18796, 18802, 20760, 20777, 20782, 21722, - 21724, 21728, 21738, 21740, 21742, 21744, 21746, 21748, 22224, 22227, - 22230, 23214, 23229, 23239, 25086, 25108, 25120, 26088, 26094, 26098, - 26175, 26177, 26179, 26181, 26183, 26185, 26484, 26507, 26522, 27705, - 27731, 27750, 29767, 29799, 29817, 30866, 30883, 30885, 31025, 31029, - 31031, 31033, 31035, 31037, 31114, 31126, 31134, 32687, 32722, 32767, - 35718, 35742, 35757, 36943, 36952, 36954, 37115, 37128, 37130, 37132, - 37134, 37136, 37143, 37145, 37152, 38843, 38863, 38897, 47458, 47467, - 47474, 49040, 49061, 49063, 49145, 49157, 49159, 49161, 49163, 49165, - 49167, 49169, 49171, 49757, 49770, 49782, 61333, 61344, 61346, 62860, - 62883, 62885, 62887, 62889, 62891, 62893, 62895, 62897, 62899, 62901, - 62903, 62905, 62907, 62909, 65496, 65498, 65500, 65521, 65523, 65525, - 65527, 65529, 65531, 65533, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535}; - -/* index limits and ranges */ -const int16_t WebRtcIsac_kIndexLowerLimitGain[3] = { - -7, -2, -1}; - -const int16_t WebRtcIsac_kIndexUpperLimitGain[3] = { - 0, 3, 1}; - -const uint16_t WebRtcIsac_kIndexMultsGain[2] = { - 18, 3}; - -/* size of cdf table */ -const uint16_t WebRtcIsac_kQCdfTableSizeGain[1] = { - 256}; - -///////////////////////////FIXED POINT -/* mean values of pitch filter gains in FIXED point */ -const int16_t WebRtcIsac_kQMeanGain1Q12[144] = { - 843, 1092, 1336, 1222, 1405, 1656, 1500, 1815, 1843, 1838, 1839, 1843, 1843, 1843, 1843, 1843, - 1843, 1843, 814, 846, 1092, 1013, 1174, 1383, 1391, 1511, 1584, 1734, 1753, 1843, 1843, 1843, - 1843, 1843, 1843, 1843, 524, 689, 777, 845, 947, 1069, 1090, 1263, 1380, 1447, 1559, 1676, - 1645, 1749, 1843, 1843, 1843, 1843, 81, 477, 563, 611, 706, 806, 849, 1012, 1192, 1128, - 1330, 1489, 1425, 1576, 1826, 1741, 1843, 1843, 0, 290, 305, 356, 488, 575, 602, 741, - 890, 835, 1079, 1196, 1182, 1376, 1519, 1506, 1680, 1843, 0, 47, 97, 69, 289, 381, - 385, 474, 617, 664, 803, 1079, 935, 1160, 1269, 1265, 1506, 1741, 0, 0, 0, 0, - 112, 120, 190, 283, 442, 343, 526, 809, 684, 935, 1134, 1020, 1265, 1506, 0, 0, - 0, 0, 0, 0, 0, 111, 256, 87, 373, 597, 430, 684, 935, 770, 1020, 1265}; - -const int16_t WebRtcIsac_kQMeanGain2Q12[144] = { - 1760, 1525, 1285, 1747, 1671, 1393, 1843, 1826, 1555, 1843, 1784, 1606, 1843, 1843, 1711, 1843, - 1843, 1814, 1389, 1275, 1040, 1564, 1414, 1252, 1610, 1495, 1343, 1753, 1592, 1405, 1804, 1720, - 1475, 1843, 1814, 1581, 1208, 1061, 856, 1349, 1148, 994, 1390, 1253, 1111, 1495, 1343, 1178, - 1770, 1465, 1234, 1814, 1581, 1342, 1040, 793, 713, 1053, 895, 737, 1128, 1003, 861, 1277, - 1094, 981, 1475, 1192, 1019, 1581, 1342, 1098, 855, 570, 483, 833, 648, 540, 948, 744, - 572, 1009, 844, 636, 1234, 934, 685, 1342, 1217, 984, 537, 318, 124, 603, 423, 350, - 687, 479, 322, 791, 581, 430, 987, 671, 488, 1098, 849, 597, 283, 27, 0, 397, - 222, 38, 513, 271, 124, 624, 325, 157, 737, 484, 233, 849, 597, 343, 27, 0, - 0, 141, 0, 0, 256, 69, 0, 370, 87, 0, 484, 229, 0, 597, 343, 87}; - -const int16_t WebRtcIsac_kQMeanGain3Q12[144] = { - 1843, 1843, 1711, 1843, 1818, 1606, 1843, 1827, 1511, 1814, 1639, 1393, 1760, 1525, 1285, 1656, - 1419, 1176, 1835, 1718, 1475, 1841, 1650, 1387, 1648, 1498, 1287, 1600, 1411, 1176, 1522, 1299, - 1040, 1419, 1176, 928, 1773, 1461, 1128, 1532, 1355, 1202, 1429, 1260, 1115, 1398, 1151, 1025, - 1172, 1080, 790, 1176, 928, 677, 1475, 1147, 1019, 1276, 1096, 922, 1214, 1010, 901, 1057, - 893, 800, 1040, 796, 734, 928, 677, 424, 1137, 897, 753, 1120, 830, 710, 875, 751, - 601, 795, 642, 583, 790, 544, 475, 677, 474, 140, 987, 750, 482, 697, 573, 450, - 691, 487, 303, 661, 394, 332, 537, 303, 220, 424, 168, 0, 737, 484, 229, 624, - 348, 153, 441, 261, 136, 397, 166, 51, 283, 27, 0, 168, 0, 0, 484, 229, - 0, 370, 57, 0, 256, 43, 0, 141, 0, 0, 27, 0, 0, 0, 0, 0}; - - -const int16_t WebRtcIsac_kQMeanGain4Q12[144] = { - 1843, 1843, 1843, 1843, 1841, 1843, 1500, 1821, 1843, 1222, 1434, 1656, 843, 1092, 1336, 504, - 757, 1007, 1843, 1843, 1843, 1838, 1791, 1843, 1265, 1505, 1599, 965, 1219, 1425, 730, 821, - 1092, 249, 504, 757, 1783, 1819, 1843, 1351, 1567, 1727, 1096, 1268, 1409, 805, 961, 1131, - 444, 670, 843, 0, 249, 504, 1425, 1655, 1743, 1096, 1324, 1448, 822, 1019, 1199, 490, - 704, 867, 81, 450, 555, 0, 0, 249, 1247, 1428, 1530, 881, 1073, 1283, 610, 759, - 939, 278, 464, 645, 0, 200, 270, 0, 0, 0, 935, 1163, 1410, 528, 790, 1068, - 377, 499, 717, 173, 240, 274, 0, 43, 62, 0, 0, 0, 684, 935, 1182, 343, - 551, 735, 161, 262, 423, 0, 55, 27, 0, 0, 0, 0, 0, 0, 430, 684, - 935, 87, 377, 597, 0, 46, 256, 0, 0, 0, 0, 0, 0, 0, 0, 0}; diff --git a/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.h b/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.h deleted file mode 100644 index 145fd4e6aa..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.h +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * pitch_gain_tables.h - * - * This file contains tables for the pitch filter side-info in the entropy - * coder. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_GAIN_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_GAIN_TABLES_H_ - -#include - -/* header file for coding tables for the pitch filter side-info in the entropy - * coder */ -/********************* Pitch Filter Gain Coefficient Tables - * ************************/ -/* cdf for quantized pitch filter gains */ -extern const uint16_t WebRtcIsac_kQPitchGainCdf[255]; - -/* index limits and ranges */ -extern const int16_t WebRtcIsac_kIndexLowerLimitGain[3]; - -extern const int16_t WebRtcIsac_kIndexUpperLimitGain[3]; -extern const uint16_t WebRtcIsac_kIndexMultsGain[2]; - -/* mean values of pitch filter gains */ -//(Y) -extern const int16_t WebRtcIsac_kQMeanGain1Q12[144]; -extern const int16_t WebRtcIsac_kQMeanGain2Q12[144]; -extern const int16_t WebRtcIsac_kQMeanGain3Q12[144]; -extern const int16_t WebRtcIsac_kQMeanGain4Q12[144]; -//(Y) - -/* size of cdf table */ -extern const uint16_t WebRtcIsac_kQCdfTableSizeGain[1]; - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_GAIN_TABLES_H_ */ diff --git a/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.c b/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.c deleted file mode 100644 index 57d12021ac..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.c +++ /dev/null @@ -1,277 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -/* header file for coding tables for the pitch filter side-info in the entropy coder */ -/********************* Pitch Filter Gain Coefficient Tables ************************/ - -/* tables for use with small pitch gain */ - -/* cdf for quantized pitch filter lags */ -const uint16_t WebRtcIsac_kQPitchLagCdf1Lo[127] = { - 0, 134, 336, 549, 778, 998, 1264, 1512, 1777, 2070, - 2423, 2794, 3051, 3361, 3708, 3979, 4315, 4610, 4933, 5269, - 5575, 5896, 6155, 6480, 6816, 7129, 7477, 7764, 8061, 8358, - 8718, 9020, 9390, 9783, 10177, 10543, 10885, 11342, 11795, 12213, - 12680, 13096, 13524, 13919, 14436, 14903, 15349, 15795, 16267, 16734, - 17266, 17697, 18130, 18632, 19080, 19447, 19884, 20315, 20735, 21288, - 21764, 22264, 22723, 23193, 23680, 24111, 24557, 25022, 25537, 26082, - 26543, 27090, 27620, 28139, 28652, 29149, 29634, 30175, 30692, 31273, - 31866, 32506, 33059, 33650, 34296, 34955, 35629, 36295, 36967, 37726, - 38559, 39458, 40364, 41293, 42256, 43215, 44231, 45253, 46274, 47359, - 48482, 49678, 50810, 51853, 53016, 54148, 55235, 56263, 57282, 58363, - 59288, 60179, 61076, 61806, 62474, 63129, 63656, 64160, 64533, 64856, - 65152, 65535, 65535, 65535, 65535, 65535, 65535}; - -const uint16_t WebRtcIsac_kQPitchLagCdf2Lo[20] = { - 0, 429, 3558, 5861, 8558, 11639, 15210, 19502, 24773, 31983, - 42602, 48567, 52601, 55676, 58160, 60172, 61889, 63235, 65383, 65535}; - -const uint16_t WebRtcIsac_kQPitchLagCdf3Lo[2] = { - 0, 65535}; - -const uint16_t WebRtcIsac_kQPitchLagCdf4Lo[10] = { - 0, 2966, 6368, 11182, 19431, 37793, 48532, 55353, 60626, 65535}; - -const uint16_t *WebRtcIsac_kQPitchLagCdfPtrLo[4] = {WebRtcIsac_kQPitchLagCdf1Lo, WebRtcIsac_kQPitchLagCdf2Lo, WebRtcIsac_kQPitchLagCdf3Lo, WebRtcIsac_kQPitchLagCdf4Lo}; - -/* size of first cdf table */ -const uint16_t WebRtcIsac_kQPitchLagCdfSizeLo[1] = {128}; - -/* index limits and ranges */ -const int16_t WebRtcIsac_kQIndexLowerLimitLagLo[4] = { --140, -9, 0, -4}; - -const int16_t WebRtcIsac_kQIndexUpperLimitLagLo[4] = { --20, 9, 0, 4}; - -/* initial index for arithmetic decoder */ -const uint16_t WebRtcIsac_kQInitIndexLagLo[3] = { - 10, 1, 5}; - -/* mean values of pitch filter lags */ -const double WebRtcIsac_kQMeanLag2Lo[19] = { --17.21385070, -15.82678944, -14.07123081, -12.03003877, -10.01311864, -8.00794627, -5.91162987, -3.89231876, -1.90220980, -0.01879275, - 1.89144232, 3.88123171, 5.92146992, 7.96435361, 9.98923648, 11.98266347, 13.96101002, 15.74855713, 17.10976611}; - -const double WebRtcIsac_kQMeanLag3Lo[1] = { - 0.00000000}; - -const double WebRtcIsac_kQMeanLag4Lo[9] = { --7.76246496, -5.92083980, -3.94095226, -1.89502305, 0.03724681, 1.93054221, 3.96443467, 5.91726366, 7.78434291}; - -const double WebRtcIsac_kQPitchLagStepsizeLo = 2.000000; - - -/* tables for use with medium pitch gain */ - -/* cdf for quantized pitch filter lags */ -const uint16_t WebRtcIsac_kQPitchLagCdf1Mid[255] = { - 0, 28, 61, 88, 121, 149, 233, 331, 475, 559, - 624, 661, 689, 712, 745, 791, 815, 843, 866, 922, - 959, 1024, 1061, 1117, 1178, 1238, 1280, 1350, 1453, 1513, - 1564, 1625, 1671, 1741, 1788, 1904, 2072, 2421, 2626, 2770, - 2840, 2900, 2942, 3012, 3068, 3115, 3147, 3194, 3254, 3319, - 3366, 3520, 3678, 3780, 3850, 3911, 3957, 4032, 4106, 4185, - 4292, 4474, 4683, 4842, 5019, 5191, 5321, 5428, 5540, 5675, - 5763, 5847, 5959, 6127, 6304, 6564, 6839, 7090, 7263, 7421, - 7556, 7728, 7872, 7984, 8142, 8361, 8580, 8743, 8938, 9227, - 9409, 9539, 9674, 9795, 9930, 10060, 10177, 10382, 10614, 10861, - 11038, 11271, 11415, 11629, 11792, 12044, 12193, 12416, 12574, 12821, - 13007, 13235, 13445, 13654, 13901, 14134, 14488, 15000, 15703, 16285, - 16504, 16797, 17086, 17328, 17579, 17807, 17998, 18268, 18538, 18836, - 19087, 19274, 19474, 19716, 19935, 20270, 20833, 21303, 21532, 21741, - 21978, 22207, 22523, 22770, 23054, 23613, 23943, 24204, 24399, 24651, - 24832, 25074, 25270, 25549, 25759, 26015, 26150, 26424, 26713, 27048, - 27342, 27504, 27681, 27854, 28021, 28207, 28412, 28664, 28859, 29064, - 29278, 29548, 29748, 30107, 30377, 30656, 30856, 31164, 31452, 31755, - 32011, 32328, 32626, 32919, 33319, 33789, 34329, 34925, 35396, 35973, - 36443, 36964, 37551, 38156, 38724, 39357, 40023, 40908, 41587, 42602, - 43924, 45037, 45810, 46597, 47421, 48291, 49092, 50051, 51448, 52719, - 53440, 54241, 54944, 55977, 56676, 57299, 57872, 58389, 59059, 59688, - 60237, 60782, 61094, 61573, 61890, 62290, 62658, 63030, 63217, 63454, - 63622, 63882, 64003, 64273, 64427, 64529, 64581, 64697, 64758, 64902, - 65414, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535}; - -const uint16_t WebRtcIsac_kQPitchLagCdf2Mid[36] = { - 0, 71, 335, 581, 836, 1039, 1323, 1795, 2258, 2608, - 3005, 3591, 4243, 5344, 7163, 10583, 16848, 28078, 49448, 57007, - 60357, 61850, 62837, 63437, 63872, 64188, 64377, 64614, 64774, 64949, - 65039, 65115, 65223, 65360, 65474, 65535}; - -const uint16_t WebRtcIsac_kQPitchLagCdf3Mid[2] = { - 0, 65535}; - -const uint16_t WebRtcIsac_kQPitchLagCdf4Mid[20] = { - 0, 28, 246, 459, 667, 1045, 1523, 2337, 4337, 11347, - 44231, 56709, 60781, 62243, 63161, 63969, 64608, 65062, 65502, 65535}; - -const uint16_t *WebRtcIsac_kQPitchLagCdfPtrMid[4] = {WebRtcIsac_kQPitchLagCdf1Mid, WebRtcIsac_kQPitchLagCdf2Mid, WebRtcIsac_kQPitchLagCdf3Mid, WebRtcIsac_kQPitchLagCdf4Mid}; - -/* size of first cdf table */ -const uint16_t WebRtcIsac_kQPitchLagCdfSizeMid[1] = {256}; - -/* index limits and ranges */ -const int16_t WebRtcIsac_kQIndexLowerLimitLagMid[4] = { --280, -17, 0, -9}; - -const int16_t WebRtcIsac_kQIndexUpperLimitLagMid[4] = { --40, 17, 0, 9}; - -/* initial index for arithmetic decoder */ -const uint16_t WebRtcIsac_kQInitIndexLagMid[3] = { - 18, 1, 10}; - -/* mean values of pitch filter lags */ -const double WebRtcIsac_kQMeanLag2Mid[35] = { --16.89183900, -15.86949778, -15.05476653, -14.00664348, -13.02793036, -12.07324237, -11.00542532, -10.11250602, -8.90792971, -8.02474753, --7.00426767, -5.94055287, -4.98251338, -3.91053158, -2.98820425, -1.93524245, -0.92978085, -0.01722509, 0.91317387, 1.92973955, - 2.96908851, 3.93728974, 4.96308471, 5.92244151, 7.08673497, 8.00993708, 9.04656316, 9.98538742, 10.97851694, 11.94772884, - 13.02426166, 14.00039951, 15.01347042, 15.80758023, 16.94086895}; - -const double WebRtcIsac_kQMeanLag3Mid[1] = { - 0.00000000}; - -const double WebRtcIsac_kQMeanLag4Mid[19] = { --8.60409403, -7.89198395, -7.03450280, -5.86260421, -4.93822322, -3.93078706, -2.91302322, -1.91824007, -0.87003282, 0.02822649, - 0.89951758, 1.87495484, 2.91802604, 3.96874074, 5.06571703, 5.93618227, 7.00520185, 7.88497726, 8.64160364}; - -const double WebRtcIsac_kQPitchLagStepsizeMid = 1.000000; - - -/* tables for use with large pitch gain */ - -/* cdf for quantized pitch filter lags */ -const uint16_t WebRtcIsac_kQPitchLagCdf1Hi[511] = { - 0, 7, 18, 33, 69, 105, 156, 228, 315, 612, - 680, 691, 709, 724, 735, 738, 742, 746, 749, 753, - 756, 760, 764, 774, 782, 785, 789, 796, 800, 803, - 807, 814, 818, 822, 829, 832, 847, 854, 858, 869, - 876, 883, 898, 908, 934, 977, 1010, 1050, 1060, 1064, - 1075, 1078, 1086, 1089, 1093, 1104, 1111, 1122, 1133, 1136, - 1151, 1162, 1183, 1209, 1252, 1281, 1339, 1364, 1386, 1401, - 1411, 1415, 1426, 1430, 1433, 1440, 1448, 1455, 1462, 1477, - 1487, 1495, 1502, 1506, 1509, 1516, 1524, 1531, 1535, 1542, - 1553, 1556, 1578, 1589, 1611, 1625, 1639, 1643, 1654, 1665, - 1672, 1687, 1694, 1705, 1708, 1719, 1730, 1744, 1752, 1759, - 1791, 1795, 1820, 1867, 1886, 1915, 1936, 1943, 1965, 1987, - 2041, 2099, 2161, 2175, 2200, 2211, 2226, 2233, 2244, 2251, - 2266, 2280, 2287, 2298, 2309, 2316, 2331, 2342, 2356, 2378, - 2403, 2418, 2447, 2497, 2544, 2602, 2863, 2895, 2903, 2935, - 2950, 2971, 3004, 3011, 3018, 3029, 3040, 3062, 3087, 3127, - 3152, 3170, 3199, 3243, 3293, 3322, 3340, 3377, 3402, 3427, - 3474, 3518, 3543, 3579, 3601, 3637, 3659, 3706, 3731, 3760, - 3818, 3847, 3869, 3901, 3920, 3952, 4068, 4169, 4220, 4271, - 4524, 4571, 4604, 4632, 4672, 4730, 4777, 4806, 4857, 4904, - 4951, 5002, 5031, 5060, 5107, 5150, 5212, 5266, 5331, 5382, - 5432, 5490, 5544, 5610, 5700, 5762, 5812, 5874, 5972, 6022, - 6091, 6163, 6232, 6305, 6402, 6540, 6685, 6880, 7090, 7271, - 7379, 7452, 7542, 7625, 7687, 7770, 7843, 7911, 7966, 8024, - 8096, 8190, 8252, 8320, 8411, 8501, 8585, 8639, 8751, 8842, - 8918, 8986, 9066, 9127, 9203, 9269, 9345, 9406, 9464, 9536, - 9612, 9667, 9735, 9844, 9931, 10036, 10119, 10199, 10260, 10358, - 10441, 10514, 10666, 10734, 10872, 10951, 11053, 11125, 11223, 11324, - 11516, 11664, 11737, 11816, 11892, 12008, 12120, 12200, 12280, 12392, - 12490, 12576, 12685, 12812, 12917, 13003, 13108, 13210, 13300, 13384, - 13470, 13579, 13673, 13771, 13879, 13999, 14136, 14201, 14368, 14614, - 14759, 14867, 14958, 15030, 15121, 15189, 15280, 15385, 15461, 15555, - 15653, 15768, 15884, 15971, 16069, 16145, 16210, 16279, 16380, 16463, - 16539, 16615, 16688, 16818, 16919, 17017, 18041, 18338, 18523, 18649, - 18790, 18917, 19047, 19167, 19315, 19460, 19601, 19731, 19858, 20068, - 20173, 20318, 20466, 20625, 20741, 20911, 21045, 21201, 21396, 21588, - 21816, 22022, 22305, 22547, 22786, 23072, 23322, 23600, 23879, 24168, - 24433, 24769, 25120, 25511, 25895, 26289, 26792, 27219, 27683, 28077, - 28566, 29094, 29546, 29977, 30491, 30991, 31573, 32105, 32594, 33173, - 33788, 34497, 35181, 35833, 36488, 37255, 37921, 38645, 39275, 39894, - 40505, 41167, 41790, 42431, 43096, 43723, 44385, 45134, 45858, 46607, - 47349, 48091, 48768, 49405, 49955, 50555, 51167, 51985, 52611, 53078, - 53494, 53965, 54435, 54996, 55601, 56125, 56563, 56838, 57244, 57566, - 57967, 58297, 58771, 59093, 59419, 59647, 59886, 60143, 60461, 60693, - 60917, 61170, 61416, 61634, 61891, 62122, 62310, 62455, 62632, 62839, - 63103, 63436, 63639, 63805, 63906, 64015, 64192, 64355, 64475, 64558, - 64663, 64742, 64811, 64865, 64916, 64956, 64981, 65025, 65068, 65115, - 65195, 65314, 65419, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, - 65535}; - -const uint16_t WebRtcIsac_kQPitchLagCdf2Hi[68] = { - 0, 7, 11, 22, 37, 52, 56, 59, 81, 85, - 89, 96, 115, 130, 137, 152, 170, 181, 193, 200, - 207, 233, 237, 259, 289, 318, 363, 433, 592, 992, - 1607, 3062, 6149, 12206, 25522, 48368, 58223, 61918, 63640, 64584, - 64943, 65098, 65206, 65268, 65294, 65335, 65350, 65372, 65387, 65402, - 65413, 65420, 65428, 65435, 65439, 65450, 65454, 65468, 65472, 65476, - 65483, 65491, 65498, 65505, 65516, 65520, 65528, 65535}; - -const uint16_t WebRtcIsac_kQPitchLagCdf3Hi[2] = { - 0, 65535}; - -const uint16_t WebRtcIsac_kQPitchLagCdf4Hi[35] = { - 0, 7, 19, 30, 41, 48, 63, 74, 82, 96, - 122, 152, 215, 330, 701, 2611, 10931, 48106, 61177, 64341, - 65112, 65238, 65309, 65338, 65364, 65379, 65401, 65427, 65453, 65465, - 65476, 65490, 65509, 65528, 65535}; - -const uint16_t *WebRtcIsac_kQPitchLagCdfPtrHi[4] = {WebRtcIsac_kQPitchLagCdf1Hi, WebRtcIsac_kQPitchLagCdf2Hi, WebRtcIsac_kQPitchLagCdf3Hi, WebRtcIsac_kQPitchLagCdf4Hi}; - -/* size of first cdf table */ -const uint16_t WebRtcIsac_kQPitchLagCdfSizeHi[1] = {512}; - -/* index limits and ranges */ -const int16_t WebRtcIsac_kQindexLowerLimitLagHi[4] = { --552, -34, 0, -16}; - -const int16_t WebRtcIsac_kQindexUpperLimitLagHi[4] = { --80, 32, 0, 17}; - -/* initial index for arithmetic decoder */ -const uint16_t WebRtcIsac_kQInitIndexLagHi[3] = { - 34, 1, 18}; - -/* mean values of pitch filter lags */ -const double WebRtcIsac_kQMeanLag2Hi[67] = { --17.07263295, -16.50000000, -15.83966081, -15.55613708, -14.96948007, -14.50000000, -14.00000000, -13.48377986, -13.00000000, -12.50000000, --11.93199636, -11.44530414, -11.04197641, -10.39910301, -10.15202337, -9.51322461, -8.93357741, -8.46456632, -8.10270672, -7.53751847, --6.98686404, -6.50000000, -6.08463150, -5.46872991, -5.00864717, -4.50163760, -4.01382410, -3.43856708, -2.96898001, -2.46554810, --1.96861004, -1.47106701, -0.97197237, -0.46561654, -0.00531409, 0.45767857, 0.96777907, 1.47507903, 1.97740425, 2.46695420, - 3.00695774, 3.47167185, 4.02712538, 4.49280007, 5.01087640, 5.48191963, 6.04916550, 6.51511058, 6.97297819, 7.46565499, - 8.01489405, 8.39912001, 8.91819757, 9.50000000, 10.11654065, 10.50000000, 11.03712583, 11.50000000, 12.00000000, 12.38964346, - 12.89466127, 13.43657881, 13.96013840, 14.46279912, 15.00000000, 15.39412269, 15.96662441}; - -const double WebRtcIsac_kQMeanLag3Hi[1] = { - 0.00000000}; - -const double WebRtcIsac_kQMeanLag4Hi[34] = { --7.98331221, -7.47988769, -7.03626557, -6.52708003, -6.06982173, -5.51856292, -5.05827033, -4.45909878, -3.99125864, -3.45308135, --3.02328139, -2.47297273, -1.94341995, -1.44699056, -0.93612243, -0.43012406, 0.01120357, 0.44054812, 0.93199883, 1.45669587, - 1.97218322, 2.50187419, 2.98748690, 3.49343202, 4.01660147, 4.50984306, 5.01402683, 5.58936797, 5.91787793, 6.59998900, - 6.85034315, 7.53503316, 7.87711194, 8.53631648}; - -const double WebRtcIsac_kQPitchLagStepsizeHi = 0.500000; - -/* transform matrix */ -const double WebRtcIsac_kTransform[4][4] = { -{-0.50000000, -0.50000000, -0.50000000, -0.50000000}, -{ 0.67082039, 0.22360680, -0.22360680, -0.67082039}, -{ 0.50000000, -0.50000000, -0.50000000, 0.50000000}, -{ 0.22360680, -0.67082039, 0.67082039, -0.22360680}}; - -/* transpose transform matrix */ -const double WebRtcIsac_kTransformTranspose[4][4] = { -{-0.50000000, 0.67082039, 0.50000000, 0.22360680}, -{-0.50000000, 0.22360680, -0.50000000, -0.67082039}, -{-0.50000000, -0.22360680, -0.50000000, 0.67082039}, -{-0.50000000, -0.67082039, 0.50000000, -0.22360680}}; - diff --git a/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.h b/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.h deleted file mode 100644 index b48e358a5a..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.h +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * pitch_lag_tables.h - * - * This file contains tables for the pitch filter side-info in the entropy - * coder. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_LAG_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_LAG_TABLES_H_ - -#include - -/* header file for coding tables for the pitch filter side-info in the entropy - * coder */ -/********************* Pitch Filter Lag Coefficient Tables - * ************************/ - -/* tables for use with small pitch gain */ - -/* cdfs for quantized pitch lags */ -extern const uint16_t WebRtcIsac_kQPitchLagCdf1Lo[127]; -extern const uint16_t WebRtcIsac_kQPitchLagCdf2Lo[20]; -extern const uint16_t WebRtcIsac_kQPitchLagCdf3Lo[2]; -extern const uint16_t WebRtcIsac_kQPitchLagCdf4Lo[10]; - -extern const uint16_t* WebRtcIsac_kQPitchLagCdfPtrLo[4]; - -/* size of first cdf table */ -extern const uint16_t WebRtcIsac_kQPitchLagCdfSizeLo[1]; - -/* index limits and ranges */ -extern const int16_t WebRtcIsac_kQIndexLowerLimitLagLo[4]; -extern const int16_t WebRtcIsac_kQIndexUpperLimitLagLo[4]; - -/* initial index for arithmetic decoder */ -extern const uint16_t WebRtcIsac_kQInitIndexLagLo[3]; - -/* mean values of pitch filter lags */ -extern const double WebRtcIsac_kQMeanLag2Lo[19]; -extern const double WebRtcIsac_kQMeanLag3Lo[1]; -extern const double WebRtcIsac_kQMeanLag4Lo[9]; - -extern const double WebRtcIsac_kQPitchLagStepsizeLo; - -/* tables for use with medium pitch gain */ - -/* cdfs for quantized pitch lags */ -extern const uint16_t WebRtcIsac_kQPitchLagCdf1Mid[255]; -extern const uint16_t WebRtcIsac_kQPitchLagCdf2Mid[36]; -extern const uint16_t WebRtcIsac_kQPitchLagCdf3Mid[2]; -extern const uint16_t WebRtcIsac_kQPitchLagCdf4Mid[20]; - -extern const uint16_t* WebRtcIsac_kQPitchLagCdfPtrMid[4]; - -/* size of first cdf table */ -extern const uint16_t WebRtcIsac_kQPitchLagCdfSizeMid[1]; - -/* index limits and ranges */ -extern const int16_t WebRtcIsac_kQIndexLowerLimitLagMid[4]; -extern const int16_t WebRtcIsac_kQIndexUpperLimitLagMid[4]; - -/* initial index for arithmetic decoder */ -extern const uint16_t WebRtcIsac_kQInitIndexLagMid[3]; - -/* mean values of pitch filter lags */ -extern const double WebRtcIsac_kQMeanLag2Mid[35]; -extern const double WebRtcIsac_kQMeanLag3Mid[1]; -extern const double WebRtcIsac_kQMeanLag4Mid[19]; - -extern const double WebRtcIsac_kQPitchLagStepsizeMid; - -/* tables for use with large pitch gain */ - -/* cdfs for quantized pitch lags */ -extern const uint16_t WebRtcIsac_kQPitchLagCdf1Hi[511]; -extern const uint16_t WebRtcIsac_kQPitchLagCdf2Hi[68]; -extern const uint16_t WebRtcIsac_kQPitchLagCdf3Hi[2]; -extern const uint16_t WebRtcIsac_kQPitchLagCdf4Hi[35]; - -extern const uint16_t* WebRtcIsac_kQPitchLagCdfPtrHi[4]; - -/* size of first cdf table */ -extern const uint16_t WebRtcIsac_kQPitchLagCdfSizeHi[1]; - -/* index limits and ranges */ -extern const int16_t WebRtcIsac_kQindexLowerLimitLagHi[4]; -extern const int16_t WebRtcIsac_kQindexUpperLimitLagHi[4]; - -/* initial index for arithmetic decoder */ -extern const uint16_t WebRtcIsac_kQInitIndexLagHi[3]; - -/* mean values of pitch filter lags */ -extern const double WebRtcIsac_kQMeanLag2Hi[67]; -extern const double WebRtcIsac_kQMeanLag3Hi[1]; -extern const double WebRtcIsac_kQMeanLag4Hi[34]; - -extern const double WebRtcIsac_kQPitchLagStepsizeHi; - -/* transform matrix */ -extern const double WebRtcIsac_kTransform[4][4]; - -/* transpose transform matrix */ -extern const double WebRtcIsac_kTransformTranspose[4][4]; - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_LAG_TABLES_H_ */ diff --git a/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c b/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c deleted file mode 100644 index 839d5d4586..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h" -#include "modules/audio_coding/codecs/isac/main/source/settings.h" - -/********************* AR Coefficient Tables ************************/ -/* cdf for quantized reflection coefficient 1 */ -const uint16_t WebRtcIsac_kQArRc1Cdf[NUM_AR_RC_QUANT_BAUNDARY] = { - 0, 2, 4, 129, 7707, 57485, 65495, 65527, 65529, 65531, - 65533, 65535}; - -/* cdf for quantized reflection coefficient 2 */ -const uint16_t WebRtcIsac_kQArRc2Cdf[NUM_AR_RC_QUANT_BAUNDARY] = { - 0, 2, 4, 7, 531, 25298, 64525, 65526, 65529, 65531, - 65533, 65535}; - -/* cdf for quantized reflection coefficient 3 */ -const uint16_t WebRtcIsac_kQArRc3Cdf[NUM_AR_RC_QUANT_BAUNDARY] = { - 0, 2, 4, 6, 620, 22898, 64843, 65527, 65529, 65531, - 65533, 65535}; - -/* cdf for quantized reflection coefficient 4 */ -const uint16_t WebRtcIsac_kQArRc4Cdf[NUM_AR_RC_QUANT_BAUNDARY] = { - 0, 2, 4, 6, 35, 10034, 60733, 65506, 65529, 65531, - 65533, 65535}; - -/* cdf for quantized reflection coefficient 5 */ -const uint16_t WebRtcIsac_kQArRc5Cdf[NUM_AR_RC_QUANT_BAUNDARY] = { - 0, 2, 4, 6, 36, 7567, 56727, 65385, 65529, 65531, - 65533, 65535}; - -/* cdf for quantized reflection coefficient 6 */ -const uint16_t WebRtcIsac_kQArRc6Cdf[NUM_AR_RC_QUANT_BAUNDARY] = { - 0, 2, 4, 6, 14, 6579, 57360, 65409, 65529, 65531, - 65533, 65535}; - -/* representation levels for quantized reflection coefficient 1 */ -const int16_t WebRtcIsac_kQArRc1Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = { - -32104, -29007, -23202, -15496, -9279, -2577, 5934, 17535, 24512, 29503, 32104 -}; - -/* representation levels for quantized reflection coefficient 2 */ -const int16_t WebRtcIsac_kQArRc2Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = { - -32104, -29503, -23494, -15261, -7309, -1399, 6158, 16381, 24512, 29503, 32104 -}; - -/* representation levels for quantized reflection coefficient 3 */ -const int16_t WebRtcIsac_kQArRc3Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = { --32104, -29503, -23157, -15186, -7347, -1359, 5829, 17535, 24512, 29503, 32104 -}; - -/* representation levels for quantized reflection coefficient 4 */ -const int16_t WebRtcIsac_kQArRc4Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = { --32104, -29503, -24512, -15362, -6665, -342, 6596, 14585, 24512, 29503, 32104 -}; - -/* representation levels for quantized reflection coefficient 5 */ -const int16_t WebRtcIsac_kQArRc5Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = { --32104, -29503, -24512, -15005, -6564, -106, 7123, 14920, 24512, 29503, 32104 -}; - -/* representation levels for quantized reflection coefficient 6 */ -const int16_t WebRtcIsac_kQArRc6Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = { --32104, -29503, -24512, -15096, -6656, -37, 7036, 14847, 24512, 29503, 32104 -}; - -/* quantization boundary levels for reflection coefficients */ -const int16_t WebRtcIsac_kQArBoundaryLevels[NUM_AR_RC_QUANT_BAUNDARY] = { --32768, -31441, -27566, -21458, -13612, -4663, 4663, 13612, 21458, 27566, 31441, -32767 -}; - -/* initial index for AR reflection coefficient quantizer and cdf table search */ -const uint16_t WebRtcIsac_kQArRcInitIndex[6] = { - 5, 5, 5, 5, 5, 5}; - -/* pointers to AR cdf tables */ -const uint16_t *WebRtcIsac_kQArRcCdfPtr[AR_ORDER] = { - WebRtcIsac_kQArRc1Cdf, WebRtcIsac_kQArRc2Cdf, WebRtcIsac_kQArRc3Cdf, - WebRtcIsac_kQArRc4Cdf, WebRtcIsac_kQArRc5Cdf, WebRtcIsac_kQArRc6Cdf -}; - -/* pointers to AR representation levels tables */ -const int16_t *WebRtcIsac_kQArRcLevelsPtr[AR_ORDER] = { - WebRtcIsac_kQArRc1Levels, WebRtcIsac_kQArRc2Levels, WebRtcIsac_kQArRc3Levels, - WebRtcIsac_kQArRc4Levels, WebRtcIsac_kQArRc5Levels, WebRtcIsac_kQArRc6Levels -}; - - -/******************** GAIN Coefficient Tables ***********************/ -/* cdf for Gain coefficient */ -const uint16_t WebRtcIsac_kQGainCdf[19] = { - 0, 2, 4, 6, 8, 10, 12, 14, 16, 1172, - 11119, 29411, 51699, 64445, 65527, 65529, 65531, 65533, 65535}; - -/* representation levels for quantized squared Gain coefficient */ -const int32_t WebRtcIsac_kQGain2Levels[18] = { -// 17, 28, 46, 76, 128, 215, 364, 709, 1268, 1960, 3405, 6078, 11286, 17827, 51918, 134498, 487432, 2048000}; - 128, 128, 128, 128, 128, 215, 364, 709, 1268, 1960, 3405, 6078, 11286, 17827, 51918, 134498, 487432, 2048000}; -/* quantization boundary levels for squared Gain coefficient */ -const int32_t WebRtcIsac_kQGain2BoundaryLevels[19] = { -0, 21, 35, 59, 99, 166, 280, 475, 815, 1414, 2495, 4505, 8397, 16405, 34431, 81359, 240497, 921600, 0x7FFFFFFF}; - -/* pointers to Gain cdf table */ -const uint16_t *WebRtcIsac_kQGainCdf_ptr[1] = {WebRtcIsac_kQGainCdf}; - -/* Gain initial index for gain quantizer and cdf table search */ -const uint16_t WebRtcIsac_kQGainInitIndex[1] = {11}; - -/************************* Cosine Tables ****************************/ -/* Cosine table */ -const int16_t WebRtcIsac_kCos[6][60] = { -{512, 512, 511, 510, 508, 507, 505, 502, 499, 496, 493, 489, 485, 480, 476, 470, 465, 459, 453, 447, -440, 433, 426, 418, 410, 402, 394, 385, 376, 367, 357, 348, 338, 327, 317, 306, 295, 284, 273, 262, -250, 238, 226, 214, 202, 190, 177, 165, 152, 139, 126, 113, 100, 87, 73, 60, 47, 33, 20, 7}, -{512, 510, 508, 503, 498, 491, 483, 473, 462, 450, 437, 422, 406, 389, 371, 352, 333, 312, 290, 268, -244, 220, 196, 171, 145, 120, 93, 67, 40, 13, -13, -40, -67, -93, -120, -145, -171, -196, -220, -244, --268, -290, -312, -333, -352, -371, -389, -406, -422, -437, -450, -462, -473, -483, -491, -498, -503, -508, -510, -512}, -{512, 508, 502, 493, 480, 465, 447, 426, 402, 376, 348, 317, 284, 250, 214, 177, 139, 100, 60, 20, --20, -60, -100, -139, -177, -214, -250, -284, -317, -348, -376, -402, -426, -447, -465, -480, -493, -502, -508, -512, --512, -508, -502, -493, -480, -465, -447, -426, -402, -376, -348, -317, -284, -250, -214, -177, -139, -100, -60, -20}, -{511, 506, 495, 478, 456, 429, 398, 362, 322, 279, 232, 183, 133, 80, 27, -27, -80, -133, -183, -232, --279, -322, -362, -398, -429, -456, -478, -495, -506, -511, -511, -506, -495, -478, -456, -429, -398, -362, -322, -279, --232, -183, -133, -80, -27, 27, 80, 133, 183, 232, 279, 322, 362, 398, 429, 456, 478, 495, 506, 511}, -{511, 502, 485, 459, 426, 385, 338, 284, 226, 165, 100, 33, -33, -100, -165, -226, -284, -338, -385, -426, --459, -485, -502, -511, -511, -502, -485, -459, -426, -385, -338, -284, -226, -165, -100, -33, 33, 100, 165, 226, -284, 338, 385, 426, 459, 485, 502, 511, 511, 502, 485, 459, 426, 385, 338, 284, 226, 165, 100, 33}, -{510, 498, 473, 437, 389, 333, 268, 196, 120, 40, -40, -120, -196, -268, -333, -389, -437, -473, -498, -510, --510, -498, -473, -437, -389, -333, -268, -196, -120, -40, 40, 120, 196, 268, 333, 389, 437, 473, 498, 510, -510, 498, 473, 437, 389, 333, 268, 196, 120, 40, -40, -120, -196, -268, -333, -389, -437, -473, -498, -510} -}; diff --git a/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h b/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h deleted file mode 100644 index d272be0dc3..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * spectrum_ar_model_tables.h - * - * This file contains definitions of tables with AR coefficients, - * Gain coefficients and cosine tables. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_SPECTRUM_AR_MODEL_TABLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_SPECTRUM_AR_MODEL_TABLES_H_ - -#include "modules/audio_coding/codecs/isac/main/source/structs.h" - -#define NUM_AR_RC_QUANT_BAUNDARY 12 - -/********************* AR Coefficient Tables ************************/ -/* cdf for quantized reflection coefficient 1 */ -extern const uint16_t WebRtcIsac_kQArRc1Cdf[NUM_AR_RC_QUANT_BAUNDARY]; - -/* cdf for quantized reflection coefficient 2 */ -extern const uint16_t WebRtcIsac_kQArRc2Cdf[NUM_AR_RC_QUANT_BAUNDARY]; - -/* cdf for quantized reflection coefficient 3 */ -extern const uint16_t WebRtcIsac_kQArRc3Cdf[NUM_AR_RC_QUANT_BAUNDARY]; - -/* cdf for quantized reflection coefficient 4 */ -extern const uint16_t WebRtcIsac_kQArRc4Cdf[NUM_AR_RC_QUANT_BAUNDARY]; - -/* cdf for quantized reflection coefficient 5 */ -extern const uint16_t WebRtcIsac_kQArRc5Cdf[NUM_AR_RC_QUANT_BAUNDARY]; - -/* cdf for quantized reflection coefficient 6 */ -extern const uint16_t WebRtcIsac_kQArRc6Cdf[NUM_AR_RC_QUANT_BAUNDARY]; - -/* quantization boundary levels for reflection coefficients */ -extern const int16_t WebRtcIsac_kQArBoundaryLevels[NUM_AR_RC_QUANT_BAUNDARY]; - -/* initial indices for AR reflection coefficient quantizer and cdf table search - */ -extern const uint16_t WebRtcIsac_kQArRcInitIndex[AR_ORDER]; - -/* pointers to AR cdf tables */ -extern const uint16_t* WebRtcIsac_kQArRcCdfPtr[AR_ORDER]; - -/* pointers to AR representation levels tables */ -extern const int16_t* WebRtcIsac_kQArRcLevelsPtr[AR_ORDER]; - -/******************** GAIN Coefficient Tables ***********************/ -/* cdf for Gain coefficient */ -extern const uint16_t WebRtcIsac_kQGainCdf[19]; - -/* representation levels for quantized Gain coefficient */ -extern const int32_t WebRtcIsac_kQGain2Levels[18]; - -/* squared quantization boundary levels for Gain coefficient */ -extern const int32_t WebRtcIsac_kQGain2BoundaryLevels[19]; - -/* pointer to Gain cdf table */ -extern const uint16_t* WebRtcIsac_kQGainCdf_ptr[1]; - -/* Gain initial index for gain quantizer and cdf table search */ -extern const uint16_t WebRtcIsac_kQGainInitIndex[1]; - -/************************* Cosine Tables ****************************/ -/* Cosine table */ -extern const int16_t WebRtcIsac_kCos[6][60]; - -#endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_SPECTRUM_AR_MODEL_TABLES_H_ \ - */ diff --git a/modules/audio_coding/codecs/isac/main/source/transform.c b/modules/audio_coding/codecs/isac/main/source/transform.c deleted file mode 100644 index 082ad941c4..0000000000 --- a/modules/audio_coding/codecs/isac/main/source/transform.c +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "modules/audio_coding/codecs/isac/main/source/settings.h" -#include "modules/audio_coding/codecs/isac/main/source/codec.h" -#include "modules/audio_coding/codecs/isac/main/source/os_specific_inline.h" -#include "modules/third_party/fft/fft.h" - -void WebRtcIsac_InitTransform(TransformTables* tables) { - int k; - double fact, phase; - - fact = PI / (FRAMESAMPLES_HALF); - phase = 0.0; - for (k = 0; k < FRAMESAMPLES_HALF; k++) { - tables->costab1[k] = cos(phase); - tables->sintab1[k] = sin(phase); - phase += fact; - } - - fact = PI * ((double) (FRAMESAMPLES_HALF - 1)) / ((double) FRAMESAMPLES_HALF); - phase = 0.5 * fact; - for (k = 0; k < FRAMESAMPLES_QUARTER; k++) { - tables->costab2[k] = cos(phase); - tables->sintab2[k] = sin(phase); - phase += fact; - } -} - -void WebRtcIsac_Time2Spec(const TransformTables* tables, - double* inre1, - double* inre2, - int16_t* outreQ7, - int16_t* outimQ7, - FFTstr* fftstr_obj) { - int k; - int dims[1]; - double tmp1r, tmp1i, xr, xi, yr, yi, fact; - double tmpre[FRAMESAMPLES_HALF], tmpim[FRAMESAMPLES_HALF]; - - - dims[0] = FRAMESAMPLES_HALF; - - - /* Multiply with complex exponentials and combine into one complex vector */ - fact = 0.5 / sqrt(FRAMESAMPLES_HALF); - for (k = 0; k < FRAMESAMPLES_HALF; k++) { - tmp1r = tables->costab1[k]; - tmp1i = tables->sintab1[k]; - tmpre[k] = (inre1[k] * tmp1r + inre2[k] * tmp1i) * fact; - tmpim[k] = (inre2[k] * tmp1r - inre1[k] * tmp1i) * fact; - } - - - /* Get DFT */ - WebRtcIsac_Fftns(1, dims, tmpre, tmpim, -1, 1.0, fftstr_obj); - - /* Use symmetry to separate into two complex vectors and center frames in time around zero */ - for (k = 0; k < FRAMESAMPLES_QUARTER; k++) { - xr = tmpre[k] + tmpre[FRAMESAMPLES_HALF - 1 - k]; - yi = -tmpre[k] + tmpre[FRAMESAMPLES_HALF - 1 - k]; - xi = tmpim[k] - tmpim[FRAMESAMPLES_HALF - 1 - k]; - yr = tmpim[k] + tmpim[FRAMESAMPLES_HALF - 1 - k]; - - tmp1r = tables->costab2[k]; - tmp1i = tables->sintab2[k]; - outreQ7[k] = (int16_t)WebRtcIsac_lrint((xr * tmp1r - xi * tmp1i) * 128.0); - outimQ7[k] = (int16_t)WebRtcIsac_lrint((xr * tmp1i + xi * tmp1r) * 128.0); - outreQ7[FRAMESAMPLES_HALF - 1 - k] = (int16_t)WebRtcIsac_lrint((-yr * tmp1i - yi * tmp1r) * 128.0); - outimQ7[FRAMESAMPLES_HALF - 1 - k] = (int16_t)WebRtcIsac_lrint((-yr * tmp1r + yi * tmp1i) * 128.0); - } -} - -void WebRtcIsac_Spec2time(const TransformTables* tables, - double* inre, - double* inim, - double* outre1, - double* outre2, - FFTstr* fftstr_obj) { - int k; - double tmp1r, tmp1i, xr, xi, yr, yi, fact; - - int dims; - - dims = FRAMESAMPLES_HALF; - - for (k = 0; k < FRAMESAMPLES_QUARTER; k++) { - /* Move zero in time to beginning of frames */ - tmp1r = tables->costab2[k]; - tmp1i = tables->sintab2[k]; - xr = inre[k] * tmp1r + inim[k] * tmp1i; - xi = inim[k] * tmp1r - inre[k] * tmp1i; - yr = -inim[FRAMESAMPLES_HALF - 1 - k] * tmp1r - inre[FRAMESAMPLES_HALF - 1 - k] * tmp1i; - yi = -inre[FRAMESAMPLES_HALF - 1 - k] * tmp1r + inim[FRAMESAMPLES_HALF - 1 - k] * tmp1i; - - /* Combine into one vector, z = x + j * y */ - outre1[k] = xr - yi; - outre1[FRAMESAMPLES_HALF - 1 - k] = xr + yi; - outre2[k] = xi + yr; - outre2[FRAMESAMPLES_HALF - 1 - k] = -xi + yr; - } - - - /* Get IDFT */ - WebRtcIsac_Fftns(1, &dims, outre1, outre2, 1, FRAMESAMPLES_HALF, fftstr_obj); - - - /* Demodulate and separate */ - fact = sqrt(FRAMESAMPLES_HALF); - for (k = 0; k < FRAMESAMPLES_HALF; k++) { - tmp1r = tables->costab1[k]; - tmp1i = tables->sintab1[k]; - xr = (outre1[k] * tmp1r - outre2[k] * tmp1i) * fact; - outre2[k] = (outre2[k] * tmp1r + outre1[k] * tmp1i) * fact; - outre1[k] = xr; - } -} diff --git a/modules/audio_coding/codecs/isac/main/test/ReleaseTest-API/ReleaseTest-API.cc b/modules/audio_coding/codecs/isac/main/test/ReleaseTest-API/ReleaseTest-API.cc deleted file mode 100644 index ee72b07dc3..0000000000 --- a/modules/audio_coding/codecs/isac/main/test/ReleaseTest-API/ReleaseTest-API.cc +++ /dev/null @@ -1,942 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// ReleaseTest-API.cpp : Defines the entry point for the console application. -// - -#include -#include -#include -#include -#include - -#include - -/* include API */ -#include "modules/audio_coding/codecs/isac/main/include/isac.h" -#include "modules/audio_coding/codecs/isac/main/util/utility.h" - -/* Defines */ -#define SEED_FILE \ - "randseed.txt" /* Used when running decoder on garbage data \ - */ -#define MAX_FRAMESAMPLES \ - 960 /* max number of samples per frame \ - (= 60 ms frame & 16 kHz) or \ - (= 30 ms frame & 32 kHz) */ -#define FRAMESAMPLES_10ms 160 /* number of samples per 10ms frame */ -#define SWBFRAMESAMPLES_10ms 320 -//#define FS 16000 /* sampling frequency (Hz) */ - -#ifdef WIN32 -#ifndef CLOCKS_PER_SEC -#define CLOCKS_PER_SEC 1000 /* Runtime statistics */ -#endif -#endif - -int main(int argc, char* argv[]) { - char inname[100], outname[100], bottleneck_file[100], vadfile[100]; - FILE *inp, *outp, *f_bn = NULL, *vadp = NULL, *bandwidthp; - int framecnt, endfile; - - size_t i; - int errtype, VADusage = 0, packetLossPercent = 0; - int16_t CodingMode; - int32_t bottleneck = 0; - int framesize = 30; /* ms */ - int cur_framesmpls, err; - - /* Runtime statistics */ - double starttime, runtime, length_file; - - size_t stream_len = 0; - int declen = 0, declenTC = 0; - bool lostFrame = false; - - int16_t shortdata[SWBFRAMESAMPLES_10ms]; - int16_t vaddata[SWBFRAMESAMPLES_10ms * 3]; - int16_t decoded[MAX_FRAMESAMPLES << 1]; - int16_t decodedTC[MAX_FRAMESAMPLES << 1]; - uint16_t streamdata[500]; - int16_t speechType[1]; - int16_t rateBPS = 0; - int16_t fixedFL = 0; - int16_t payloadSize = 0; - int32_t payloadRate = 0; - int setControlBWE = 0; - short FL, testNum; - char version_number[20]; - FILE* plFile; - int32_t sendBN; - -#if !defined(NDEBUG) - FILE* fy; - double kbps; -#endif - size_t totalbits = 0; - int totalsmpls = 0; - - /* If use GNS file */ - FILE* fp_gns = NULL; - char gns_file[100]; - size_t maxStreamLen30 = 0; - size_t maxStreamLen60 = 0; - short sampFreqKHz = 32; - short samplesIn10Ms; - // FILE logFile; - bool doTransCoding = false; - int32_t rateTransCoding = 0; - uint8_t streamDataTransCoding[1200]; - size_t streamLenTransCoding = 0; - FILE* transCodingFile = NULL; - FILE* transcodingBitstream = NULL; - size_t numTransCodingBytes = 0; - - /* only one structure used for ISAC encoder */ - ISACStruct* ISAC_main_inst = NULL; - ISACStruct* decoderTransCoding = NULL; - - BottleNeckModel BN_data; - -#if !defined(NDEBUG) - fy = fopen("bit_rate.dat", "w"); - fclose(fy); - fy = fopen("bytes_frames.dat", "w"); - fclose(fy); -#endif - - /* Handling wrong input arguments in the command line */ - if ((argc < 3) || (argc > 17)) { - printf("\n\nWrong number of arguments or flag values.\n\n"); - - printf("\n"); - WebRtcIsac_version(version_number); - printf("iSAC-swb version %s \n\n", version_number); - - printf("Usage:\n\n"); - printf("%s [-I] bottleneck_value infile outfile \n\n", argv[0]); - printf("with:\n"); - printf("[-FS num] : sampling frequency in kHz, valid values are\n"); - printf(" 16 & 32, with 16 as default.\n"); - printf("[-I] : if -I option is specified, the coder will use\n"); - printf(" an instantaneous Bottleneck value. If not, it\n"); - printf(" will be an adaptive Bottleneck value.\n"); - printf("[-assign] : Use Assign API.\n"); - printf("[-B num] : the value of the bottleneck provided either\n"); - printf(" as a fixed value in bits/sec (e.g. 25000) or\n"); - printf(" read from a file (e.g. bottleneck.txt)\n"); - printf("[-INITRATE num] : Set a new value for initial rate. Note! Only\n"); - printf(" used in adaptive mode.\n"); - printf("[-FL num] : Set (initial) frame length in msec. Valid\n"); - printf(" lengths are 30 and 60 msec.\n"); - printf("[-FIXED_FL] : Frame length will be fixed to initial value.\n"); - printf("[-MAX num] : Set the limit for the payload size of iSAC\n"); - printf(" in bytes. Minimum 100 maximum 400.\n"); - printf("[-MAXRATE num] : Set the maxrate for iSAC in bits per second.\n"); - printf(" Minimum 32000, maximum 53400.\n"); - printf("[-F num] : if -F option is specified, the test function\n"); - printf(" will run the iSAC API fault scenario\n"); - printf(" specified by the supplied number.\n"); - printf(" F 1 - Call encoder prior to init encoder call\n"); - printf(" F 2 - Call decoder prior to init decoder call\n"); - printf(" F 3 - Call decoder prior to encoder call\n"); - printf(" F 4 - Call decoder with a too short coded\n"); - printf(" sequence\n"); - printf(" F 5 - Call decoder with a too long coded\n"); - printf(" sequence\n"); - printf(" F 6 - Call decoder with random bit stream\n"); - printf(" F 7 - Call init encoder/decoder at random\n"); - printf(" during a call\n"); - printf(" F 8 - Call encoder/decoder without having\n"); - printf(" allocated memory for encoder/decoder\n"); - printf(" instance\n"); - printf(" F 9 - Call decodeB without calling decodeA\n"); - printf(" F 10 - Call decodeB with garbage data\n"); - printf("[-PL num] : if -PL option is specified \n"); - printf("[-T rate file] : test trans-coding with target bottleneck\n"); - printf(" 'rate' bits/sec\n"); - printf(" the output file is written to 'file'\n"); - printf("[-LOOP num] : number of times to repeat coding the input\n"); - printf(" file for stress testing\n"); - // printf("[-CE num] : Test of APIs used by Conference Engine.\n"); - // printf(" CE 1 - getNewBitstream, getBWE \n"); - // printf(" (CE 2 - RESERVED for transcoding)\n"); - // printf(" CE 3 - getSendBWE, setSendBWE. \n"); - // printf("-L filename : write the logging info into file - // (appending)\n"); - printf("infile : Normal speech input file\n"); - printf("outfile : Speech output file\n"); - exit(0); - } - - /* Print version number */ - printf("-------------------------------------------------\n"); - WebRtcIsac_version(version_number); - printf("iSAC version %s \n\n", version_number); - - /* Loop over all command line arguments */ - CodingMode = 0; - testNum = 0; - // logFile = NULL; - char transCodingFileName[500]; - int16_t totFileLoop = 0; - int16_t numFileLoop = 0; - for (i = 1; i + 2 < static_cast(argc); i++) { - if (!strcmp("-LOOP", argv[i])) { - i++; - totFileLoop = (int16_t)atol(argv[i]); - if (totFileLoop <= 0) { - fprintf(stderr, "Invalid number of runs for the given input file, %d.", - totFileLoop); - exit(0); - } - } - - if (!strcmp("-T", argv[i])) { - doTransCoding = true; - i++; - rateTransCoding = atoi(argv[i]); - i++; - strcpy(transCodingFileName, argv[i]); - } - - /* Set Sampling Rate */ - if (!strcmp("-FS", argv[i])) { - i++; - sampFreqKHz = atoi(argv[i]); - } - - /* Instantaneous mode */ - if (!strcmp("-I", argv[i])) { - printf("Instantaneous BottleNeck\n"); - CodingMode = 1; - } - - /* Set (initial) bottleneck value */ - if (!strcmp("-INITRATE", argv[i])) { - rateBPS = atoi(argv[i + 1]); - setControlBWE = 1; - if ((rateBPS < 10000) || (rateBPS > 32000)) { - printf( - "\n%d is not a initial rate. Valid values are in the range " - "10000 to 32000.\n", - rateBPS); - exit(0); - } - printf("New initial rate: %d\n", rateBPS); - i++; - } - - /* Set (initial) framelength */ - if (!strcmp("-FL", argv[i])) { - framesize = atoi(argv[i + 1]); - if ((framesize != 30) && (framesize != 60)) { - printf( - "\n%d is not a valid frame length. Valid length are 30 and 60 " - "msec.\n", - framesize); - exit(0); - } - setControlBWE = 1; - printf("Frame Length: %d\n", framesize); - i++; - } - - /* Fixed frame length */ - if (!strcmp("-FIXED_FL", argv[i])) { - fixedFL = 1; - setControlBWE = 1; - printf("Fixed Frame Length\n"); - } - - /* Set maximum allowed payload size in bytes */ - if (!strcmp("-MAX", argv[i])) { - payloadSize = atoi(argv[i + 1]); - printf("Maximum Payload Size: %d\n", payloadSize); - i++; - } - - /* Set maximum rate in bytes */ - if (!strcmp("-MAXRATE", argv[i])) { - payloadRate = atoi(argv[i + 1]); - printf("Maximum Rate in kbps: %d\n", payloadRate); - i++; - } - - /* Test of fault scenarious */ - if (!strcmp("-F", argv[i])) { - testNum = atoi(argv[i + 1]); - printf("Fault test: %d\n", testNum); - if (testNum < 1 || testNum > 10) { - printf( - "\n%d is not a valid Fault Scenario number. Valid Fault " - "Scenarios are numbered 1-10.\n", - testNum); - exit(0); - } - i++; - } - - /* Packet loss test */ - if (!strcmp("-PL", argv[i])) { - if (isdigit(static_cast(*argv[i + 1]))) { - packetLossPercent = atoi(argv[i + 1]); - if ((packetLossPercent < 0) | (packetLossPercent > 100)) { - printf("\nInvalid packet loss perentage \n"); - exit(0); - } - if (packetLossPercent > 0) { - printf("Simulating %d %% of independent packet loss\n", - packetLossPercent); - } else { - printf("\nNo Packet Loss Is Simulated \n"); - } - } else { - plFile = fopen(argv[i + 1], "rb"); - if (plFile == NULL) { - printf("\n couldn't open the frameloss file: %s\n", argv[i + 1]); - exit(0); - } - printf("Simulating packet loss through the given channel file: %s\n", - argv[i + 1]); - } - i++; - } - - /* Random packetlosses */ - if (!strcmp("-rnd", argv[i])) { - srand((unsigned int)time(NULL)); - printf("Random pattern in lossed packets \n"); - } - - /* Use gns file */ - if (!strcmp("-G", argv[i])) { - sscanf(argv[i + 1], "%s", gns_file); - fp_gns = fopen(gns_file, "rb"); - if (fp_gns == NULL) { - printf("Cannot read file %s.\n", gns_file); - exit(0); - } - i++; - } - - // make it with '-B' - /* Get Bottleneck value */ - if (!strcmp("-B", argv[i])) { - i++; - bottleneck = atoi(argv[i]); - if (bottleneck == 0) { - sscanf(argv[i], "%s", bottleneck_file); - f_bn = fopen(bottleneck_file, "rb"); - if (f_bn == NULL) { - printf( - "Error No value provided for BottleNeck and cannot read file " - "%s.\n", - bottleneck_file); - exit(0); - } else { - printf("reading bottleneck rates from file %s\n\n", bottleneck_file); - if (fscanf(f_bn, "%d", &bottleneck) == EOF) { - /* Set pointer to beginning of file */ - fseek(f_bn, 0L, SEEK_SET); - if (fscanf(f_bn, "%d", &bottleneck) == EOF) { - exit(0); - } - } - - /* Bottleneck is a cosine function - * Matlab code for writing the bottleneck file: - * BottleNeck_10ms = 20e3 + 10e3 * cos((0:5999)/5999*2*pi); - * fid = fopen('bottleneck.txt', 'wb'); - * fprintf(fid, '%d\n', BottleNeck_10ms); fclose(fid); - */ - } - } else { - printf("\nfixed bottleneck rate of %d bits/s\n\n", bottleneck); - } - } - /* Run Conference Engine APIs */ - // Do not test it in the first release - // - // if(!strcmp ("-CE", argv[i])) - // { - // testCE = atoi(argv[i + 1]); - // if(testCE==1) - // { - // i++; - // scale = (float)atof( argv[i+1] ); - // } - // else if(testCE == 2) - // { - // printf("\nCE-test 2 (transcoding) not implemented.\n"); - // exit(0); - // } - // else if(testCE < 1 || testCE > 3) - // { - // printf("\n%d is not a valid CE-test number. Valid CE tests - // are 1-3.\n", testCE); - // exit(0); - // } - // printf("CE-test number: %d\n", testCE); - // i++; - // } - } - - if (CodingMode == 0) { - printf("\nAdaptive BottleNeck\n"); - } - - switch (sampFreqKHz) { - case 16: { - printf("iSAC Wideband.\n"); - samplesIn10Ms = FRAMESAMPLES_10ms; - break; - } - case 32: { - printf("iSAC Supper-Wideband.\n"); - samplesIn10Ms = SWBFRAMESAMPLES_10ms; - break; - } - default: - printf("Unsupported sampling frequency %d kHz", sampFreqKHz); - exit(0); - } - - /* Get Input and Output files */ - sscanf(argv[argc - 2], "%s", inname); - sscanf(argv[argc - 1], "%s", outname); - printf("\nInput file: %s\n", inname); - printf("Output file: %s\n\n", outname); - if ((inp = fopen(inname, "rb")) == NULL) { - printf(" Error iSAC Cannot read file %s.\n", inname); - std::cout << std::flush; - exit(1); - } - - if ((outp = fopen(outname, "wb")) == NULL) { - printf(" Error iSAC Cannot write file %s.\n", outname); - std::cout << std::flush; - getc(stdin); - exit(1); - } - if (VADusage) { - if ((vadp = fopen(vadfile, "rb")) == NULL) { - printf(" Error iSAC Cannot read file %s.\n", vadfile); - std::cout << std::flush; - exit(1); - } - } - - if ((bandwidthp = fopen("bwe.pcm", "wb")) == NULL) { - printf(" Error iSAC Cannot read file %s.\n", "bwe.pcm"); - std::cout << std::flush; - exit(1); - } - - starttime = clock() / (double)CLOCKS_PER_SEC; /* Runtime statistics */ - - /* Initialize the ISAC and BN structs */ - if (testNum != 8) { - err = WebRtcIsac_Create(&ISAC_main_inst); - WebRtcIsac_SetEncSampRate(ISAC_main_inst, sampFreqKHz * 1000); - WebRtcIsac_SetDecSampRate(ISAC_main_inst, - sampFreqKHz >= 32 ? 32000 : 16000); - /* Error check */ - if (err < 0) { - printf("\n\n Error in create.\n\n"); - std::cout << std::flush; - exit(EXIT_FAILURE); - } - } - BN_data.arrival_time = 0; - BN_data.sample_count = 0; - BN_data.rtp_number = 0; - - /* Initialize encoder and decoder */ - framecnt = 0; - endfile = 0; - - if (doTransCoding) { - WebRtcIsac_Create(&decoderTransCoding); - WebRtcIsac_SetEncSampRate(decoderTransCoding, sampFreqKHz * 1000); - WebRtcIsac_SetDecSampRate(decoderTransCoding, - sampFreqKHz >= 32 ? 32000 : 16000); - WebRtcIsac_DecoderInit(decoderTransCoding); - transCodingFile = fopen(transCodingFileName, "wb"); - if (transCodingFile == NULL) { - printf("Could not open %s to output trans-coding.\n", - transCodingFileName); - exit(0); - } - strcat(transCodingFileName, ".bit"); - transcodingBitstream = fopen(transCodingFileName, "wb"); - if (transcodingBitstream == NULL) { - printf("Could not open %s to write the bit-stream of transcoder.\n", - transCodingFileName); - exit(0); - } - } - - if (testNum != 1) { - if (WebRtcIsac_EncoderInit(ISAC_main_inst, CodingMode) < 0) { - printf("Error could not initialize the encoder \n"); - std::cout << std::flush; - return 0; - } - } - if (testNum != 2) - WebRtcIsac_DecoderInit(ISAC_main_inst); - if (CodingMode == 1) { - err = WebRtcIsac_Control(ISAC_main_inst, bottleneck, framesize); - if (err < 0) { - /* exit if returned with error */ - errtype = WebRtcIsac_GetErrorCode(ISAC_main_inst); - printf("\n\n Error in initialization (control): %d.\n\n", errtype); - std::cout << std::flush; - if (testNum == 0) { - exit(EXIT_FAILURE); - } - } - } - - if ((setControlBWE) && (CodingMode == 0)) { - err = WebRtcIsac_ControlBwe(ISAC_main_inst, rateBPS, framesize, fixedFL); - if (err < 0) { - /* exit if returned with error */ - errtype = WebRtcIsac_GetErrorCode(ISAC_main_inst); - - printf("\n\n Error in Control BWE: %d.\n\n", errtype); - std::cout << std::flush; - exit(EXIT_FAILURE); - } - } - - if (payloadSize != 0) { - err = WebRtcIsac_SetMaxPayloadSize(ISAC_main_inst, payloadSize); - if (err < 0) { - /* exit if returned with error */ - errtype = WebRtcIsac_GetErrorCode(ISAC_main_inst); - printf("\n\n Error in SetMaxPayloadSize: %d.\n\n", errtype); - std::cout << std::flush; - exit(EXIT_FAILURE); - } - } - if (payloadRate != 0) { - err = WebRtcIsac_SetMaxRate(ISAC_main_inst, payloadRate); - if (err < 0) { - /* exit if returned with error */ - errtype = WebRtcIsac_GetErrorCode(ISAC_main_inst); - printf("\n\n Error in SetMaxRateInBytes: %d.\n\n", errtype); - std::cout << std::flush; - exit(EXIT_FAILURE); - } - } - - *speechType = 1; - - std::cout << "\n" << std::flush; - - length_file = 0; - int16_t bnIdxTC = 0; - int16_t jitterInfoTC = 0; - while (endfile == 0) { - /* Call init functions at random, fault test number 7 */ - if (testNum == 7 && (rand() % 2 == 0)) { - err = WebRtcIsac_EncoderInit(ISAC_main_inst, CodingMode); - /* Error check */ - if (err < 0) { - errtype = WebRtcIsac_GetErrorCode(ISAC_main_inst); - printf("\n\n Error in encoderinit: %d.\n\n", errtype); - std::cout << std::flush; - } - - WebRtcIsac_DecoderInit(ISAC_main_inst); - } - - cur_framesmpls = 0; - while (1) { - int stream_len_int = 0; - - /* Read 10 ms speech block */ - endfile = readframe(shortdata, inp, samplesIn10Ms); - - if (endfile) { - numFileLoop++; - if (numFileLoop < totFileLoop) { - rewind(inp); - framecnt = 0; - fprintf(stderr, "\n"); - endfile = readframe(shortdata, inp, samplesIn10Ms); - } - } - - if (testNum == 7) { - srand((unsigned int)time(NULL)); - } - - /* iSAC encoding */ - if (!(testNum == 3 && framecnt == 0)) { - stream_len_int = - WebRtcIsac_Encode(ISAC_main_inst, shortdata, (uint8_t*)streamdata); - if ((payloadSize != 0) && (stream_len_int > payloadSize)) { - if (testNum == 0) { - printf("\n\n"); - } - - printf("\nError: Streamsize out of range %d\n", - stream_len_int - payloadSize); - std::cout << std::flush; - } - - WebRtcIsac_GetUplinkBw(ISAC_main_inst, &sendBN); - - if (stream_len_int > 0) { - if (doTransCoding) { - int16_t indexStream; - uint8_t auxUW8; - - /******************** Main Transcoding stream ********************/ - WebRtcIsac_GetDownLinkBwIndex(ISAC_main_inst, &bnIdxTC, - &jitterInfoTC); - int streamLenTransCoding_int = WebRtcIsac_GetNewBitStream( - ISAC_main_inst, bnIdxTC, jitterInfoTC, rateTransCoding, - streamDataTransCoding, false); - if (streamLenTransCoding_int < 0) { - fprintf(stderr, "Error in trans-coding\n"); - exit(0); - } - streamLenTransCoding = - static_cast(streamLenTransCoding_int); - auxUW8 = (uint8_t)(((streamLenTransCoding & 0xFF00) >> 8) & 0x00FF); - if (fwrite(&auxUW8, sizeof(uint8_t), 1, transcodingBitstream) != - 1) { - return -1; - } - - auxUW8 = (uint8_t)(streamLenTransCoding & 0x00FF); - if (fwrite(&auxUW8, sizeof(uint8_t), 1, transcodingBitstream) != - 1) { - return -1; - } - - if (fwrite(streamDataTransCoding, sizeof(uint8_t), - streamLenTransCoding, - transcodingBitstream) != streamLenTransCoding) { - return -1; - } - - WebRtcIsac_ReadBwIndex(streamDataTransCoding, &indexStream); - if (indexStream != bnIdxTC) { - fprintf(stderr, - "Error in inserting Bandwidth index into transcoding " - "stream.\n"); - exit(0); - } - numTransCodingBytes += streamLenTransCoding; - } - } - } else { - break; - } - - if (stream_len_int < 0) { - /* exit if returned with error */ - errtype = WebRtcIsac_GetErrorCode(ISAC_main_inst); - fprintf(stderr, "Error in encoder: %d.\n", errtype); - std::cout << std::flush; - exit(0); - } - stream_len = static_cast(stream_len_int); - - cur_framesmpls += samplesIn10Ms; - /* exit encoder loop if the encoder returned a bitstream */ - if (stream_len != 0) - break; - } - - /* read next bottleneck rate */ - if (f_bn != NULL) { - if (fscanf(f_bn, "%d", &bottleneck) == EOF) { - /* Set pointer to beginning of file */ - fseek(f_bn, 0L, SEEK_SET); - if (fscanf(f_bn, "%d", &bottleneck) == EOF) { - exit(0); - } - } - if (CodingMode == 1) { - WebRtcIsac_Control(ISAC_main_inst, bottleneck, framesize); - } - } - - length_file += cur_framesmpls; - if (cur_framesmpls == (3 * samplesIn10Ms)) { - maxStreamLen30 = - (stream_len > maxStreamLen30) ? stream_len : maxStreamLen30; - } else { - maxStreamLen60 = - (stream_len > maxStreamLen60) ? stream_len : maxStreamLen60; - } - - if (!lostFrame) { - lostFrame = ((rand() % 100) < packetLossPercent); - } else { - lostFrame = false; - } - - // RED. - if (lostFrame) { - int stream_len_int = WebRtcIsac_GetRedPayload( - ISAC_main_inst, reinterpret_cast(streamdata)); - if (stream_len_int < 0) { - fprintf(stderr, "Error getting RED payload\n"); - exit(0); - } - stream_len = static_cast(stream_len_int); - - if (doTransCoding) { - int streamLenTransCoding_int = WebRtcIsac_GetNewBitStream( - ISAC_main_inst, bnIdxTC, jitterInfoTC, rateTransCoding, - streamDataTransCoding, true); - if (streamLenTransCoding_int < 0) { - fprintf(stderr, "Error in RED trans-coding\n"); - exit(0); - } - streamLenTransCoding = static_cast(streamLenTransCoding_int); - } - } - - /* make coded sequence to short be inreasing */ - /* the length the decoder expects */ - if (testNum == 4) { - stream_len += 10; - } - - /* make coded sequence to long be decreasing */ - /* the length the decoder expects */ - if (testNum == 5) { - stream_len -= 10; - } - - if (testNum == 6) { - srand((unsigned int)time(NULL)); - for (i = 0; i < stream_len; i++) { - streamdata[i] = rand(); - } - } - - if (VADusage) { - readframe(vaddata, vadp, samplesIn10Ms * 3); - } - - /* simulate packet handling through NetEq and the modem */ - if (!(testNum == 3 && framecnt == 0)) { - get_arrival_time(cur_framesmpls, stream_len, bottleneck, &BN_data, - sampFreqKHz * 1000, sampFreqKHz * 1000); - } - - if (VADusage && (framecnt > 10 && vaddata[0] == 0)) { - BN_data.rtp_number--; - } else { - /* Error test number 10, garbage data */ - if (testNum == 10) { - /* Test to run decoder with garbage data */ - for (i = 0; i < stream_len; i++) { - streamdata[i] = (short)(streamdata[i]) + (short)rand(); - } - } - - if (testNum != 9) { - err = WebRtcIsac_UpdateBwEstimate( - ISAC_main_inst, reinterpret_cast(streamdata), - stream_len, BN_data.rtp_number, BN_data.sample_count, - BN_data.arrival_time); - - if (err < 0) { - /* exit if returned with error */ - errtype = WebRtcIsac_GetErrorCode(ISAC_main_inst); - if (testNum == 0) { - printf("\n\n"); - } - - printf("Error: in decoder: %d.", errtype); - std::cout << std::flush; - if (testNum == 0) { - printf("\n\n"); - } - } - } - - /* Call getFramelen, only used here for function test */ - err = WebRtcIsac_ReadFrameLen( - ISAC_main_inst, reinterpret_cast(streamdata), &FL); - if (err < 0) { - /* exit if returned with error */ - errtype = WebRtcIsac_GetErrorCode(ISAC_main_inst); - if (testNum == 0) { - printf("\n\n"); - } - printf(" Error: in getFrameLen %d.", errtype); - std::cout << std::flush; - if (testNum == 0) { - printf("\n\n"); - } - } - - // iSAC decoding - - if (lostFrame) { - declen = WebRtcIsac_DecodeRcu( - ISAC_main_inst, reinterpret_cast(streamdata), - stream_len, decoded, speechType); - - if (doTransCoding) { - declenTC = - WebRtcIsac_DecodeRcu(decoderTransCoding, streamDataTransCoding, - streamLenTransCoding, decodedTC, speechType); - } - } else { - declen = WebRtcIsac_Decode(ISAC_main_inst, - reinterpret_cast(streamdata), - stream_len, decoded, speechType); - if (doTransCoding) { - declenTC = - WebRtcIsac_Decode(decoderTransCoding, streamDataTransCoding, - streamLenTransCoding, decodedTC, speechType); - } - } - - if (declen < 0) { - /* exit if returned with error */ - errtype = WebRtcIsac_GetErrorCode(ISAC_main_inst); - if (testNum == 0) { - printf("\n\n"); - } - printf(" Error: in decoder %d.", errtype); - std::cout << std::flush; - if (testNum == 0) { - printf("\n\n"); - } - } - - if (declenTC < 0) { - if (testNum == 0) { - printf("\n\n"); - } - printf(" Error: in decoding the transcoded stream"); - std::cout << std::flush; - if (testNum == 0) { - printf("\n\n"); - } - } - } - /* Write decoded speech frame to file */ - if ((declen > 0) && (numFileLoop == 0)) { - if (fwrite(decoded, sizeof(int16_t), declen, outp) != - static_cast(declen)) { - return -1; - } - } - - if ((declenTC > 0) && (numFileLoop == 0)) { - if (fwrite(decodedTC, sizeof(int16_t), declen, transCodingFile) != - static_cast(declen)) { - return -1; - } - } - - fprintf(stderr, "\rframe = %5d ", framecnt); - fflush(stderr); - framecnt++; - - /* Error test number 10, garbage data */ - // if (testNum == 10) - // { - // /* Test to run decoder with garbage data */ - // if ((seedfile = fopen(SEED_FILE, "a+t")) == NULL) { - // fprintf(stderr, "Error: Could not open file %s\n", SEED_FILE); - // } else { - // fprintf(seedfile, "ok\n\n"); - // fclose(seedfile); - // } - // } - /* Error test number 10, garbage data */ - // if (testNum == 10) { - // /* Test to run decoder with garbage data */ - // for (i = 0; i < stream_len; i++) { - // streamdata[i] = (short) (streamdata[i] + (short) rand()); - // } - // } - - totalsmpls += declen; - totalbits += 8 * stream_len; -#if !defined(NDEBUG) - kbps = ((double)sampFreqKHz * 1000.) / ((double)cur_framesmpls) * 8.0 * - stream_len / 1000.0; // kbits/s - fy = fopen("bit_rate.dat", "a"); - fprintf(fy, "Frame %i = %0.14f\n", framecnt, kbps); - fclose(fy); - -#endif - } - printf("\n"); - printf("total bits = %zu bits\n", totalbits); - printf("measured average bitrate = %0.3f kbits/s\n", - (double)totalbits * (sampFreqKHz) / totalsmpls); - if (doTransCoding) { - printf("Transcoding average bit-rate = %0.3f kbps\n", - (double)numTransCodingBytes * 8.0 * (sampFreqKHz) / totalsmpls); - fclose(transCodingFile); - } - printf("\n"); - - /* Runtime statistics */ - runtime = (double)(clock() / (double)CLOCKS_PER_SEC - starttime); - length_file = length_file / (sampFreqKHz * 1000.); - - printf("\n\nLength of speech file: %.1f s\n", length_file); - printf("Time to run iSAC: %.2f s (%.2f %% of realtime)\n\n", runtime, - (100 * runtime / length_file)); - - if (maxStreamLen30 != 0) { - printf( - "Maximum payload size 30ms Frames %zu" - " bytes (%0.3f kbps)\n", - maxStreamLen30, maxStreamLen30 * 8 / 30.); - } - if (maxStreamLen60 != 0) { - printf( - "Maximum payload size 60ms Frames %zu" - " bytes (%0.3f kbps)\n", - maxStreamLen60, maxStreamLen60 * 8 / 60.); - } - // fprintf(stderr, "\n"); - - fprintf(stderr, " %.1f s", length_file); - fprintf(stderr, " %0.1f kbps", - (double)totalbits * (sampFreqKHz) / totalsmpls); - if (maxStreamLen30 != 0) { - fprintf(stderr, " plmax-30ms %zu bytes (%0.0f kbps)", maxStreamLen30, - maxStreamLen30 * 8 / 30.); - } - if (maxStreamLen60 != 0) { - fprintf(stderr, " plmax-60ms %zu bytes (%0.0f kbps)", maxStreamLen60, - maxStreamLen60 * 8 / 60.); - } - if (doTransCoding) { - fprintf(stderr, " transcoding rate %.0f kbps", - (double)numTransCodingBytes * 8.0 * (sampFreqKHz) / totalsmpls); - } - - fclose(inp); - fclose(outp); - WebRtcIsac_Free(ISAC_main_inst); - - exit(0); -} diff --git a/modules/audio_coding/codecs/isac/main/test/SwitchingSampRate/SwitchingSampRate.cc b/modules/audio_coding/codecs/isac/main/test/SwitchingSampRate/SwitchingSampRate.cc deleted file mode 100644 index 549163fc44..0000000000 --- a/modules/audio_coding/codecs/isac/main/test/SwitchingSampRate/SwitchingSampRate.cc +++ /dev/null @@ -1,425 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// SwitchingSampRate.cpp : Defines the entry point for the console -// application. -// - -#include - -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "modules/audio_coding/codecs/isac/main/include/isac.h" -#include "modules/audio_coding/codecs/isac/main/util/utility.h" - -#define MAX_FILE_NAME 500 -#define MAX_NUM_CLIENTS 2 - -#define NUM_CLIENTS 2 - -int main(int argc, char* argv[]) { - char fileNameWB[MAX_FILE_NAME]; - char fileNameSWB[MAX_FILE_NAME]; - - char outFileName[MAX_NUM_CLIENTS][MAX_FILE_NAME]; - - FILE* inFile[MAX_NUM_CLIENTS]; - FILE* outFile[MAX_NUM_CLIENTS]; - - ISACStruct* codecInstance[MAX_NUM_CLIENTS]; - int32_t resamplerState[MAX_NUM_CLIENTS][8]; - - int encoderSampRate[MAX_NUM_CLIENTS]; - - int minBn = 16000; - int maxBn = 56000; - - int bnWB = 32000; - int bnSWB = 56000; - - strcpy(outFileName[0], "switchSampRate_out1.pcm"); - strcpy(outFileName[1], "switchSampRate_out2.pcm"); - - short clientCntr; - - size_t lenEncodedInBytes[MAX_NUM_CLIENTS]; - unsigned int lenAudioIn10ms[MAX_NUM_CLIENTS]; - size_t lenEncodedInBytesTmp[MAX_NUM_CLIENTS]; - unsigned int lenAudioIn10msTmp[MAX_NUM_CLIENTS]; - BottleNeckModel* packetData[MAX_NUM_CLIENTS]; - - char versionNumber[100]; - short samplesIn10ms[MAX_NUM_CLIENTS]; - int bottleneck[MAX_NUM_CLIENTS]; - - printf("\n\n"); - printf("____________________________________________\n\n"); - WebRtcIsac_version(versionNumber); - printf(" iSAC-swb version %s\n", versionNumber); - printf("____________________________________________\n"); - - fileNameWB[0] = '\0'; - fileNameSWB[0] = '\0'; - - char myFlag[20]; - strcpy(myFlag, "-wb"); - // READ THE WIDEBAND AND SUPER-WIDEBAND FILE NAMES - if (readParamString(argc, argv, myFlag, fileNameWB, MAX_FILE_NAME) <= 0) { - printf("No wideband file is specified"); - } - - strcpy(myFlag, "-swb"); - if (readParamString(argc, argv, myFlag, fileNameSWB, MAX_FILE_NAME) <= 0) { - printf("No super-wideband file is specified"); - } - - // THE FIRST CLIENT STARTS IN WIDEBAND - encoderSampRate[0] = 16000; - OPEN_FILE_RB(inFile[0], fileNameWB); - - // THE SECOND CLIENT STARTS IN SUPER-WIDEBAND - encoderSampRate[1] = 32000; - OPEN_FILE_RB(inFile[1], fileNameSWB); - - strcpy(myFlag, "-I"); - short codingMode = readSwitch(argc, argv, myFlag); - - for (clientCntr = 0; clientCntr < NUM_CLIENTS; clientCntr++) { - codecInstance[clientCntr] = NULL; - - printf("\n"); - printf("Client %d\n", clientCntr + 1); - printf("---------\n"); - printf("Starting %s", (encoderSampRate[clientCntr] == 16000) - ? "wideband" - : "super-wideband"); - - // Open output File Name - OPEN_FILE_WB(outFile[clientCntr], outFileName[clientCntr]); - printf("Output File...................... %s\n", outFileName[clientCntr]); - - samplesIn10ms[clientCntr] = encoderSampRate[clientCntr] * 10; - - if (codingMode == 1) { - bottleneck[clientCntr] = (clientCntr) ? bnSWB : bnWB; - } else { - bottleneck[clientCntr] = (clientCntr) ? minBn : maxBn; - } - - printf("Bottleneck....................... %0.3f kbits/sec \n", - bottleneck[clientCntr] / 1000.0); - - // coding-mode - printf( - "Encoding Mode.................... %s\n", - (codingMode == 1) ? "Channel-Independent (Instantaneous)" : "Adaptive"); - - lenEncodedInBytes[clientCntr] = 0; - lenAudioIn10ms[clientCntr] = 0; - lenEncodedInBytesTmp[clientCntr] = 0; - lenAudioIn10msTmp[clientCntr] = 0; - - packetData[clientCntr] = (BottleNeckModel*)new (BottleNeckModel); - if (packetData[clientCntr] == NULL) { - printf("Could not allocate memory for packetData \n"); - return -1; - } - memset(packetData[clientCntr], 0, sizeof(BottleNeckModel)); - memset(resamplerState[clientCntr], 0, sizeof(int32_t) * 8); - } - - for (clientCntr = 0; clientCntr < NUM_CLIENTS; clientCntr++) { - // Create - if (WebRtcIsac_Create(&codecInstance[clientCntr])) { - printf("Could not creat client %d\n", clientCntr + 1); - return -1; - } - - WebRtcIsac_SetEncSampRate(codecInstance[clientCntr], - encoderSampRate[clientCntr]); - - WebRtcIsac_SetDecSampRate( - codecInstance[clientCntr], - encoderSampRate[clientCntr + (1 - ((clientCntr & 1) << 1))]); - - // Initialize Encoder - if (WebRtcIsac_EncoderInit(codecInstance[clientCntr], codingMode) < 0) { - printf("Could not initialize client, %d\n", clientCntr + 1); - return -1; - } - - WebRtcIsac_DecoderInit(codecInstance[clientCntr]); - - // setup Rate if in Instantaneous mode - if (codingMode != 0) { - // ONLY Clients who are not in Adaptive mode - if (WebRtcIsac_Control(codecInstance[clientCntr], bottleneck[clientCntr], - 30) < 0) { - printf("Could not setup bottleneck and frame-size for client %d\n", - clientCntr + 1); - return -1; - } - } - } - - size_t streamLen; - short numSamplesRead; - size_t lenDecodedAudio; - short senderIdx; - short receiverIdx; - - printf("\n"); - short num10ms[MAX_NUM_CLIENTS]; - memset(num10ms, 0, sizeof(short) * MAX_NUM_CLIENTS); - FILE* arrivalTimeFile1 = fopen("arrivalTime1.dat", "wb"); - FILE* arrivalTimeFile2 = fopen("arrivalTime2.dat", "wb"); - short numPrint[MAX_NUM_CLIENTS]; - memset(numPrint, 0, sizeof(short) * MAX_NUM_CLIENTS); - - // Audio Buffers - short silence10ms[10 * 32]; - memset(silence10ms, 0, 320 * sizeof(short)); - short audioBuff10ms[10 * 32]; - short audioBuff60ms[60 * 32]; - short resampledAudio60ms[60 * 32]; - - unsigned short bitStream[600 + 600]; - short speechType[1]; - - short numSampFreqChanged = 0; - while (numSampFreqChanged < 10) { - for (clientCntr = 0; clientCntr < NUM_CLIENTS; clientCntr++) { - // Encoding/decoding for this pair of clients, if there is - // audio for any of them - // if(audioLeft[clientCntr] || audioLeft[clientCntr + 1]) - //{ - // for(pairCntr = 0; pairCntr < 2; pairCntr++) - //{ - senderIdx = clientCntr; // + pairCntr; - receiverIdx = 1 - clientCntr; // + (1 - pairCntr); - - // if(num10ms[senderIdx] > 6) - //{ - // printf("Too many frames read for client %d", - // senderIdx + 1); - // return -1; - //} - - numSamplesRead = - (short)fread(audioBuff10ms, sizeof(short), samplesIn10ms[senderIdx], - inFile[senderIdx]); - if (numSamplesRead != samplesIn10ms[senderIdx]) { - // file finished switch encoder sampling frequency. - printf("Changing Encoder Sampling frequency in client %d to ", - senderIdx + 1); - fclose(inFile[senderIdx]); - numSampFreqChanged++; - if (encoderSampRate[senderIdx] == 16000) { - printf("super-wideband.\n"); - OPEN_FILE_RB(inFile[senderIdx], fileNameSWB); - encoderSampRate[senderIdx] = 32000; - } else { - printf("wideband.\n"); - OPEN_FILE_RB(inFile[senderIdx], fileNameWB); - encoderSampRate[senderIdx] = 16000; - } - WebRtcIsac_SetEncSampRate(codecInstance[senderIdx], - encoderSampRate[senderIdx]); - WebRtcIsac_SetDecSampRate(codecInstance[receiverIdx], - encoderSampRate[senderIdx]); - - samplesIn10ms[clientCntr] = encoderSampRate[clientCntr] * 10; - - numSamplesRead = - (short)fread(audioBuff10ms, sizeof(short), samplesIn10ms[senderIdx], - inFile[senderIdx]); - if (numSamplesRead != samplesIn10ms[senderIdx]) { - printf(" File %s for client %d has not enough audio\n", - (encoderSampRate[senderIdx] == 16000) ? "wideband" - : "super-wideband", - senderIdx + 1); - return -1; - } - } - num10ms[senderIdx]++; - - // sanity check - // if(num10ms[senderIdx] > 6) - //{ - // printf("Client %d has got more than 60 ms audio and encoded no - // packet.\n", - // senderIdx); - // return -1; - //} - - // Encode - - int streamLen_int = WebRtcIsac_Encode(codecInstance[senderIdx], - audioBuff10ms, (uint8_t*)bitStream); - int16_t ggg; - if (streamLen_int > 0) { - if ((WebRtcIsac_ReadFrameLen( - codecInstance[receiverIdx], - reinterpret_cast(bitStream), &ggg)) < 0) - printf("ERROR\n"); - } - - // Sanity check - if (streamLen_int < 0) { - printf(" Encoder error in client %d \n", senderIdx + 1); - return -1; - } - streamLen = static_cast(streamLen_int); - - if (streamLen > 0) { - // Packet generated; model sending through a channel, do bandwidth - // estimation at the receiver and decode. - lenEncodedInBytes[senderIdx] += streamLen; - lenAudioIn10ms[senderIdx] += (unsigned int)num10ms[senderIdx]; - lenEncodedInBytesTmp[senderIdx] += streamLen; - lenAudioIn10msTmp[senderIdx] += (unsigned int)num10ms[senderIdx]; - - // Print after ~5 sec. - if (lenAudioIn10msTmp[senderIdx] >= 100) { - numPrint[senderIdx]++; - printf(" %d, %6.3f => %6.3f ", senderIdx + 1, - bottleneck[senderIdx] / 1000.0, - lenEncodedInBytesTmp[senderIdx] * 0.8 / - lenAudioIn10msTmp[senderIdx]); - - if (codingMode == 0) { - int32_t bn; - WebRtcIsac_GetUplinkBw(codecInstance[senderIdx], &bn); - printf("[%d] ", bn); - } - // int16_t rateIndexLB; - // int16_t rateIndexUB; - // WebRtcIsac_GetDownLinkBwIndex(codecInstance[receiverIdx], - // &rateIndexLB, &rateIndexUB); - // printf(" (%2d, %2d) ", rateIndexLB, rateIndexUB); - - std::cout << std::flush; - lenEncodedInBytesTmp[senderIdx] = 0; - lenAudioIn10msTmp[senderIdx] = 0; - // if(senderIdx == (NUM_CLIENTS - 1)) - //{ - printf(" %0.1f \n", lenAudioIn10ms[senderIdx] * 10. / 1000); - //} - - // After ~20 sec change the bottleneck. - // if((numPrint[senderIdx] == 4) && (codingMode == 0)) - // { - // numPrint[senderIdx] = 0; - // if(codingMode == 0) - // { - // int newBottleneck = bottleneck[senderIdx] + - // (bottleneckChange[senderIdx] * 1000); - - // if(bottleneckChange[senderIdx] > 0) - // { - // if(newBottleneck >maxBn) - // { - // bottleneckChange[senderIdx] = -1; - // newBottleneck = bottleneck[senderIdx] + - // (bottleneckChange[senderIdx] * 1000); - // if(newBottleneck > minBn) - // { - // bottleneck[senderIdx] = newBottleneck; - // } - // } - // else - // { - // bottleneck[senderIdx] = newBottleneck; - // } - // } - // else - // { - // if(newBottleneck < minBn) - // { - // bottleneckChange[senderIdx] = 1; - // newBottleneck = bottleneck[senderIdx] + - // (bottleneckChange[senderIdx] * 1000); - // if(newBottleneck < maxBn) - // { - // bottleneck[senderIdx] = newBottleneck; - // } - // } - // else - // { - // bottleneck[senderIdx] = newBottleneck; - // } - // } - // } - // } - } - - // model a channel of given bottleneck, to get the receive timestamp - get_arrival_time(num10ms[senderIdx] * samplesIn10ms[senderIdx], - streamLen, bottleneck[senderIdx], - packetData[senderIdx], - encoderSampRate[senderIdx] * 1000, - encoderSampRate[senderIdx] * 1000); - - // Write the arrival time. - if (senderIdx == 0) { - if (fwrite(&(packetData[senderIdx]->arrival_time), - sizeof(unsigned int), 1, arrivalTimeFile1) != 1) { - return -1; - } - } else { - if (fwrite(&(packetData[senderIdx]->arrival_time), - sizeof(unsigned int), 1, arrivalTimeFile2) != 1) { - return -1; - } - } - - // BWE - if (WebRtcIsac_UpdateBwEstimate( - codecInstance[receiverIdx], - reinterpret_cast(bitStream), streamLen, - packetData[senderIdx]->rtp_number, - packetData[senderIdx]->sample_count, - packetData[senderIdx]->arrival_time) < 0) { - printf(" BWE Error at client %d \n", receiverIdx + 1); - return -1; - } - /**/ - // Decode - int lenDecodedAudio_int = - WebRtcIsac_Decode(codecInstance[receiverIdx], - reinterpret_cast(bitStream), - streamLen, audioBuff60ms, speechType); - if (lenDecodedAudio_int < 0) { - printf(" Decoder error in client %d \n", receiverIdx + 1); - return -1; - } - lenDecodedAudio = static_cast(lenDecodedAudio_int); - - if (encoderSampRate[senderIdx] == 16000) { - WebRtcSpl_UpsampleBy2(audioBuff60ms, lenDecodedAudio, - resampledAudio60ms, - resamplerState[receiverIdx]); - if (fwrite(resampledAudio60ms, sizeof(short), lenDecodedAudio << 1, - outFile[receiverIdx]) != lenDecodedAudio << 1) { - return -1; - } - } else { - if (fwrite(audioBuff60ms, sizeof(short), lenDecodedAudio, - outFile[receiverIdx]) != lenDecodedAudio) { - return -1; - } - } - num10ms[senderIdx] = 0; - } - //} - //} - } - } - return 0; -} diff --git a/modules/audio_coding/codecs/isac/main/test/simpleKenny.c b/modules/audio_coding/codecs/isac/main/test/simpleKenny.c deleted file mode 100644 index 4446ff7806..0000000000 --- a/modules/audio_coding/codecs/isac/main/test/simpleKenny.c +++ /dev/null @@ -1,461 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* kenny.c - Main function for the iSAC coder */ - -#include -#include -#include -#include - -#ifdef WIN32 -#include "windows.h" -#ifndef CLOCKS_PER_SEC -#define CLOCKS_PER_SEC 1000 -#endif -#endif - -#include - -/* include API */ -#include "modules/audio_coding/codecs/isac/main/include/isac.h" -#include "modules/audio_coding/codecs/isac/main/util/utility.h" - -/* max number of samples per frame (= 60 ms frame) */ -#define MAX_FRAMESAMPLES_SWB 1920 -/* number of samples per 10ms frame */ -#define FRAMESAMPLES_SWB_10ms 320 -#define FRAMESAMPLES_WB_10ms 160 - -/* sampling frequency (Hz) */ -#define FS_SWB 32000 -#define FS_WB 16000 - -unsigned long framecnt = 0; - -int main(int argc, char* argv[]) { - //--- File IO ---- - FILE* inp; - FILE* outp; - char inname[500]; - char outname[500]; - - /* Runtime statistics */ - double rate; - double rateRCU; - size_t totalbits = 0; - unsigned long totalBitsRCU = 0; - unsigned long totalsmpls = 0; - - int32_t bottleneck = 39; - int frameSize = 30; /* ms */ - int16_t codingMode = 1; - int16_t shortdata[FRAMESAMPLES_SWB_10ms]; - int16_t decoded[MAX_FRAMESAMPLES_SWB]; - int16_t speechType[1]; - int16_t payloadLimit; - int32_t rateLimit; - ISACStruct* ISAC_main_inst; - - size_t stream_len = 0; - int declen = 0; - int16_t err; - int cur_framesmpls; - int endfile; -#ifdef WIN32 - double length_file; - double runtime; - char outDrive[10]; - char outPath[500]; - char outPrefix[500]; - char outSuffix[500]; - char bitrateFileName[500]; - FILE* bitrateFile; - double starttime; - double rateLB = 0; - double rateUB = 0; -#endif - FILE* histFile; - FILE* averageFile; - int sampFreqKHz; - int samplesIn10Ms; - size_t maxStreamLen = 0; - char histFileName[500]; - char averageFileName[500]; - unsigned int hist[600]; - double tmpSumStreamLen = 0; - unsigned int packetCntr = 0; - unsigned int lostPacketCntr = 0; - uint8_t payload[1200]; - uint8_t payloadRCU[1200]; - uint16_t packetLossPercent = 0; - int16_t rcuStreamLen = 0; - int onlyEncode; - int onlyDecode; - - BottleNeckModel packetData; - packetData.arrival_time = 0; - packetData.sample_count = 0; - packetData.rtp_number = 0; - memset(hist, 0, sizeof(hist)); - - /* handling wrong input arguments in the command line */ - if (argc < 5) { - printf("\n\nWrong number of arguments or flag values.\n\n"); - - printf("Usage:\n\n"); - printf("%s infile outfile -bn bottleneck [options]\n\n", argv[0]); - printf("with:\n"); - printf("-I.............. indicates encoding in instantaneous mode.\n"); - printf("-bn bottleneck.. the value of the bottleneck in bit/sec, e.g.\n"); - printf(" 39742, in instantaneous (channel-independent)\n"); - printf(" mode.\n\n"); - printf("infile.......... Normal speech input file\n\n"); - printf("outfile......... Speech output file\n\n"); - printf("OPTIONS\n"); - printf("-------\n"); - printf("-fs sampFreq.... sampling frequency of codec 16 or 32 (default)\n"); - printf(" kHz.\n"); - printf("-plim payloadLim payload limit in bytes, default is the maximum\n"); - printf(" possible.\n"); - printf("-rlim rateLim... rate limit in bits/sec, default is the maximum\n"); - printf(" possible.\n"); - printf("-h file......... record histogram and *append* to 'file'.\n"); - printf("-ave file....... record average rate of 3 sec intervales and\n"); - printf(" *append* to 'file'.\n"); - printf("-ploss.......... packet-loss percentage.\n"); - printf("-enc............ do only encoding and store the bit-stream\n"); - printf("-dec............ the input file is a bit-stream, decode it.\n\n"); - printf("Example usage:\n\n"); - printf("%s speechIn.pcm speechOut.pcm -B 40000 -fs 32\n\n", argv[0]); - - exit(0); - } - - /* Get Bottleneck value */ - bottleneck = readParamInt(argc, argv, "-bn", 50000); - fprintf(stderr, "\nfixed bottleneck rate of %d bits/s\n\n", bottleneck); - - /* Get Input and Output files */ - sscanf(argv[1], "%s", inname); - sscanf(argv[2], "%s", outname); - codingMode = readSwitch(argc, argv, "-I"); - sampFreqKHz = (int16_t)readParamInt(argc, argv, "-fs", 32); - if (readParamString(argc, argv, "-h", histFileName, 500) > 0) { - histFile = fopen(histFileName, "a"); - if (histFile == NULL) { - printf("cannot open hist file %s", histFileName); - exit(0); - } - } else { - // NO recording of hitstogram - histFile = NULL; - } - - packetLossPercent = readParamInt(argc, argv, "-ploss", 0); - - if (readParamString(argc, argv, "-ave", averageFileName, 500) > 0) { - averageFile = fopen(averageFileName, "a"); - if (averageFile == NULL) { - printf("cannot open file to write rate %s", averageFileName); - exit(0); - } - } else { - averageFile = NULL; - } - - onlyEncode = readSwitch(argc, argv, "-enc"); - onlyDecode = readSwitch(argc, argv, "-dec"); - - switch (sampFreqKHz) { - case 16: { - samplesIn10Ms = 160; - break; - } - case 32: { - samplesIn10Ms = 320; - break; - } - default: - printf("A sampling frequency of %d kHz is not supported, valid values are" - " 8 and 16.\n", sampFreqKHz); - exit(-1); - } - payloadLimit = (int16_t)readParamInt(argc, argv, "-plim", 400); - rateLimit = readParamInt(argc, argv, "-rlim", 106800); - - if ((inp = fopen(inname, "rb")) == NULL) { - printf(" iSAC: Cannot read file %s.\n", inname); - exit(1); - } - if ((outp = fopen(outname, "wb")) == NULL) { - printf(" iSAC: Cannot write file %s.\n", outname); - exit(1); - } - -#ifdef WIN32 - _splitpath(outname, outDrive, outPath, outPrefix, outSuffix); - _makepath(bitrateFileName, outDrive, outPath, "bitrate", ".txt"); - - bitrateFile = fopen(bitrateFileName, "a"); - fprintf(bitrateFile, "%% %s \n", inname); -#endif - - printf("\n"); - printf("Input.................... %s\n", inname); - printf("Output................... %s\n", outname); - printf("Encoding Mode............ %s\n", - (codingMode == 1) ? "Channel-Independent" : "Channel-Adaptive"); - printf("Bottleneck............... %d bits/sec\n", bottleneck); - printf("Packet-loss Percentage... %d\n", packetLossPercent); - printf("\n"); - -#ifdef WIN32 - starttime = clock() / (double)CLOCKS_PER_SEC; /* Runtime statistics */ -#endif - - /* Initialize the ISAC and BN structs */ - err = WebRtcIsac_Create(&ISAC_main_inst); - - WebRtcIsac_SetEncSampRate(ISAC_main_inst, sampFreqKHz * 1000); - WebRtcIsac_SetDecSampRate(ISAC_main_inst, sampFreqKHz >= 32 ? 32000 : 16000); - /* Error check */ - if (err < 0) { - fprintf(stderr, "\n\n Error in create.\n\n"); - exit(EXIT_FAILURE); - } - - framecnt = 0; - endfile = 0; - - /* Initialize encoder and decoder */ - if (WebRtcIsac_EncoderInit(ISAC_main_inst, codingMode) < 0) { - printf("cannot initialize encoder\n"); - return -1; - } - WebRtcIsac_DecoderInit(ISAC_main_inst); - - if (codingMode == 1) { - if (WebRtcIsac_Control(ISAC_main_inst, bottleneck, frameSize) < 0) { - printf("cannot set bottleneck\n"); - return -1; - } - } else { - if (WebRtcIsac_ControlBwe(ISAC_main_inst, 15000, 30, 1) < 0) { - printf("cannot configure BWE\n"); - return -1; - } - } - - if (WebRtcIsac_SetMaxPayloadSize(ISAC_main_inst, payloadLimit) < 0) { - printf("cannot set maximum payload size %d.\n", payloadLimit); - return -1; - } - - if (rateLimit < 106800) { - if (WebRtcIsac_SetMaxRate(ISAC_main_inst, rateLimit) < 0) { - printf("cannot set the maximum rate %d.\n", rateLimit); - return -1; - } - } - - while (endfile == 0) { - fprintf(stderr, " \rframe = %7li", framecnt); - - //============== Readind from the file and encoding ================= - cur_framesmpls = 0; - stream_len = 0; - - if (onlyDecode) { - uint8_t auxUW8; - if (fread(&auxUW8, sizeof(uint8_t), 1, inp) < 1) { - break; - } - stream_len = auxUW8 << 8; - if (fread(&auxUW8, sizeof(uint8_t), 1, inp) < 1) { - break; - } - stream_len |= auxUW8; - if (fread(payload, 1, stream_len, inp) < stream_len) { - printf("last payload is corrupted\n"); - break; - } - } else { - while (stream_len == 0) { - int stream_len_int; - - // Read 10 ms speech block - endfile = readframe(shortdata, inp, samplesIn10Ms); - if (endfile) { - break; - } - cur_framesmpls += samplesIn10Ms; - - //-------- iSAC encoding --------- - stream_len_int = WebRtcIsac_Encode(ISAC_main_inst, shortdata, payload); - - if (stream_len_int < 0) { - // exit if returned with error - // errType=WebRtcIsac_GetErrorCode(ISAC_main_inst); - fprintf(stderr, "\nError in encoder\n"); - getc(stdin); - exit(EXIT_FAILURE); - } - stream_len = (size_t)stream_len_int; - } - //=================================================================== - if (endfile) { - break; - } - - rcuStreamLen = WebRtcIsac_GetRedPayload(ISAC_main_inst, payloadRCU); - if (rcuStreamLen < 0) { - fprintf(stderr, "\nError getting RED payload\n"); - getc(stdin); - exit(EXIT_FAILURE); - } - - get_arrival_time(cur_framesmpls, stream_len, bottleneck, &packetData, - sampFreqKHz * 1000, sampFreqKHz * 1000); - if (WebRtcIsac_UpdateBwEstimate( - ISAC_main_inst, payload, stream_len, packetData.rtp_number, - packetData.sample_count, packetData.arrival_time) < 0) { - printf(" BWE Error at client\n"); - return -1; - } - } - - if (endfile) { - break; - } - - maxStreamLen = (stream_len > maxStreamLen) ? stream_len : maxStreamLen; - packetCntr++; - - hist[stream_len]++; - if (averageFile != NULL) { - tmpSumStreamLen += stream_len; - if (packetCntr == 100) { - // kbps - fprintf(averageFile, "%8.3f ", - tmpSumStreamLen * 8.0 / (30.0 * packetCntr)); - packetCntr = 0; - tmpSumStreamLen = 0; - } - } - - if (onlyEncode) { - uint8_t auxUW8; - auxUW8 = (uint8_t)(((stream_len & 0x7F00) >> 8) & 0xFF); - if (fwrite(&auxUW8, sizeof(uint8_t), 1, outp) != 1) { - return -1; - } - - auxUW8 = (uint8_t)(stream_len & 0xFF); - if (fwrite(&auxUW8, sizeof(uint8_t), 1, outp) != 1) { - return -1; - } - if (fwrite(payload, 1, stream_len, outp) != stream_len) { - return -1; - } - } else { - //======================= iSAC decoding =========================== - - if ((rand() % 100) < packetLossPercent) { - declen = WebRtcIsac_DecodeRcu(ISAC_main_inst, payloadRCU, - (size_t)rcuStreamLen, decoded, - speechType); - lostPacketCntr++; - } else { - declen = WebRtcIsac_Decode(ISAC_main_inst, payload, stream_len, decoded, - speechType); - } - if (declen <= 0) { - // errType=WebRtcIsac_GetErrorCode(ISAC_main_inst); - fprintf(stderr, "\nError in decoder.\n"); - getc(stdin); - exit(1); - } - - // Write decoded speech frame to file - if (fwrite(decoded, sizeof(int16_t), declen, outp) != (size_t)declen) { - return -1; - } - cur_framesmpls = declen; - } - // Update Statistics - framecnt++; - totalsmpls += cur_framesmpls; - if (stream_len > 0) { - totalbits += 8 * stream_len; - } - if (rcuStreamLen > 0) { - totalBitsRCU += 8 * rcuStreamLen; - } - } - - rate = ((double)totalbits * (sampFreqKHz)) / (double)totalsmpls; - rateRCU = ((double)totalBitsRCU * (sampFreqKHz)) / (double)totalsmpls; - - printf("\n\n"); - printf("Sampling Rate............... %d kHz\n", sampFreqKHz); - printf("Payload Limit............... %d bytes \n", payloadLimit); - printf("Rate Limit.................. %d bits/sec \n", rateLimit); - -#ifdef WIN32 - fprintf(bitrateFile, "%d %10lu %d %6.3f %6.3f %6.3f\n", - sampFreqKHz, framecnt, bottleneck, rateLB, rateUB, rate); - fclose(bitrateFile); -#endif // WIN32 - - printf("\n"); - printf("Measured bit-rate........... %0.3f kbps\n", rate); - printf("Measured RCU bit-ratre...... %0.3f kbps\n", rateRCU); - printf("Maximum bit-rate/payloadsize %0.3f / %zu\n", - maxStreamLen * 8 / 0.03, maxStreamLen); - printf("Measured packet-loss........ %0.1f%% \n", - 100.0f * (float)lostPacketCntr / (float)packetCntr); - - printf("\n"); - -/* Runtime statistics */ -#ifdef WIN32 - runtime = (double)(clock() / (double)CLOCKS_PER_SEC - starttime); - length_file = ((double)framecnt * (double)declen / (sampFreqKHz * 1000)); - printf("Length of speech file....... %.1f s\n", length_file); - printf("Time to run iSAC............ %.2f s (%.2f %% of realtime)\n\n", - runtime, (100 * runtime / length_file)); -#endif - printf("\n\n_______________________________________________\n"); - - if (histFile != NULL) { - int n; - for (n = 0; n < 600; n++) { - fprintf(histFile, "%6d ", hist[n]); - } - fprintf(histFile, "\n"); - fclose(histFile); - } - if (averageFile != NULL) { - if (packetCntr > 0) { - fprintf(averageFile, "%8.3f ", - tmpSumStreamLen * 8.0 / (30.0 * packetCntr)); - } - fprintf(averageFile, "\n"); - fclose(averageFile); - } - - fclose(inp); - fclose(outp); - - WebRtcIsac_Free(ISAC_main_inst); - - exit(0); -} diff --git a/modules/audio_coding/codecs/isac/main/util/utility.c b/modules/audio_coding/codecs/isac/main/util/utility.c deleted file mode 100644 index 56547b11c2..0000000000 --- a/modules/audio_coding/codecs/isac/main/util/utility.c +++ /dev/null @@ -1,179 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include -#include -#include - -#include "modules/audio_coding/codecs/isac/main/util/utility.h" - -/* function for reading audio data from PCM file */ -int -readframe( - short* data, - FILE* inp, - int length) -{ - short k, rlen, status = 0; - unsigned char* ptrUChar; - ptrUChar = (unsigned char*)data; - - rlen = (short)fread(data, sizeof(short), length, inp); - if (rlen < length) { - for (k = rlen; k < length; k++) - data[k] = 0; - status = 1; - } - - // Assuming that our PCM files are written in Intel machines - for(k = 0; k < length; k++) - { - data[k] = (short)ptrUChar[k<<1] | ((((short)ptrUChar[(k<<1) + 1]) << 8) & 0xFF00); - } - - return status; -} - -short -readSwitch( - int argc, - char* argv[], - char* strID) -{ - short n; - for(n = 0; n < argc; n++) - { - if(strcmp(argv[n], strID) == 0) - { - return 1; - } - } - return 0; -} - -double -readParamDouble( - int argc, - char* argv[], - char* strID, - double defaultVal) -{ - double returnVal = defaultVal; - short n; - for(n = 0; n < argc; n++) - { - if(strcmp(argv[n], strID) == 0) - { - n++; - if(n < argc) - { - returnVal = atof(argv[n]); - } - break; - } - } - return returnVal; -} - -int -readParamInt( - int argc, - char* argv[], - char* strID, - int defaultVal) -{ - int returnVal = defaultVal; - short n; - for(n = 0; n < argc; n++) - { - if(strcmp(argv[n], strID) == 0) - { - n++; - if(n < argc) - { - returnVal = atoi(argv[n]); - } - break; - } - } - return returnVal; -} - -int -readParamString( - int argc, - char* argv[], - char* strID, - char* stringParam, - int maxSize) -{ - int paramLenght = 0; - short n; - for(n = 0; n < argc; n++) - { - if(strcmp(argv[n], strID) == 0) - { - n++; - if(n < argc) - { - strncpy(stringParam, argv[n], maxSize); - paramLenght = (int)strlen(argv[n]); - } - break; - } - } - return paramLenght; -} - -void -get_arrival_time( - int current_framesamples, /* samples */ - size_t packet_size, /* bytes */ - int bottleneck, /* excluding headers; bits/s */ - BottleNeckModel* BN_data, - short senderSampFreqHz, - short receiverSampFreqHz) -{ - unsigned int travelTimeMs; - const int headerSizeByte = 35; - - int headerRate; - - BN_data->whenPackGeneratedMs += (current_framesamples / (senderSampFreqHz / 1000)); - - headerRate = headerSizeByte * 8 * senderSampFreqHz / current_framesamples; /* bits/s */ - - /* everything in samples */ - BN_data->sample_count = BN_data->sample_count + current_framesamples; - - //travelTimeMs = ((packet_size + HeaderSize) * 8 * sampFreqHz) / - // (bottleneck + HeaderRate) - travelTimeMs = (unsigned int)floor((double)((packet_size + headerSizeByte) * 8 * 1000) - / (double)(bottleneck + headerRate) + 0.5); - - if(BN_data->whenPrevPackLeftMs > BN_data->whenPackGeneratedMs) - { - BN_data->whenPrevPackLeftMs += travelTimeMs; - } - else - { - BN_data->whenPrevPackLeftMs = BN_data->whenPackGeneratedMs + - travelTimeMs; - } - - BN_data->arrival_time = (BN_data->whenPrevPackLeftMs * - (receiverSampFreqHz / 1000)); - -// if (BN_data->arrival_time < BN_data->sample_count) -// BN_data->arrival_time = BN_data->sample_count; - - BN_data->rtp_number++; -} diff --git a/modules/audio_coding/codecs/isac/main/util/utility.h b/modules/audio_coding/codecs/isac/main/util/utility.h deleted file mode 100644 index 1acc54251b..0000000000 --- a/modules/audio_coding/codecs/isac/main/util/utility.h +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_UTIL_UTILITY_H_ -#define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_UTIL_UTILITY_H_ - -#include -#include - -#if defined(__cplusplus) -extern "C" { -#endif - -#define OPEN_FILE_WB(filePtr, fullPath) \ - do { \ - if (fullPath != NULL) { \ - filePtr = fopen(fullPath, "wb"); \ - if (filePtr == NULL) { \ - printf("could not open %s to write to.", fullPath); \ - return -1; \ - } \ - } else { \ - filePtr = NULL; \ - } \ - } while (0) - -#define OPEN_FILE_AB(filePtr, fullPath) \ - do { \ - if (fullPath != NULL) { \ - filePtr = fopen(fullPath, "ab"); \ - if (filePtr == NULL) { \ - printf("could not open %s to write to.", fullPath); \ - return -1; \ - } \ - } else { \ - filePtr = NULL; \ - } \ - } while (0) - -#define OPEN_FILE_RB(filePtr, fullPath) \ - do { \ - if (fullPath != NULL) { \ - filePtr = fopen(fullPath, "rb"); \ - if (filePtr == NULL) { \ - printf("could not open %s to read from.", fullPath); \ - return -1; \ - } \ - } else { \ - filePtr = NULL; \ - } \ - } while (0) - -#define WRITE_FILE_D(bufferPtr, len, filePtr) \ - do { \ - if (filePtr != NULL) { \ - double dummy[1000]; \ - int cntr; \ - for (cntr = 0; cntr < (len); cntr++) { \ - dummy[cntr] = (double)bufferPtr[cntr]; \ - } \ - fwrite(dummy, sizeof(double), len, filePtr); \ - fflush(filePtr); \ - } \ - } while (0) - -typedef struct { - unsigned int whenPackGeneratedMs; - unsigned int whenPrevPackLeftMs; - unsigned int sendTimeMs; /* milisecond */ - unsigned int arrival_time; /* samples */ - unsigned int sample_count; /* samples, also used as "send time stamp" */ - unsigned int rtp_number; -} BottleNeckModel; - -void get_arrival_time(int current_framesamples, /* samples */ - size_t packet_size, /* bytes */ - int bottleneck, /* excluding headers; bits/s */ - BottleNeckModel* BN_data, - short senderSampFreqHz, - short receiverSampFreqHz); - -/* function for reading audio data from PCM file */ -int readframe(short* data, FILE* inp, int length); - -short readSwitch(int argc, char* argv[], char* strID); - -double readParamDouble(int argc, char* argv[], char* strID, double defaultVal); - -int readParamInt(int argc, char* argv[], char* strID, int defaultVal); - -int readParamString(int argc, - char* argv[], - char* strID, - char* stringParam, - int maxSize); - -#if defined(__cplusplus) -} -#endif - -#endif diff --git a/modules/audio_coding/codecs/opus/audio_encoder_opus.cc b/modules/audio_coding/codecs/opus/audio_encoder_opus.cc index 6d9d9ae1d0..758287792e 100644 --- a/modules/audio_coding/codecs/opus/audio_encoder_opus.cc +++ b/modules/audio_coding/codecs/opus/audio_encoder_opus.cc @@ -362,8 +362,6 @@ AudioEncoderOpusImpl::AudioEncoderOpusImpl( const AudioNetworkAdaptorCreator& audio_network_adaptor_creator, std::unique_ptr bitrate_smoother) : payload_type_(payload_type), - send_side_bwe_with_overhead_( - !webrtc::field_trial::IsDisabled("WebRTC-SendSideBwe-WithOverhead")), use_stable_target_for_adaptation_(!webrtc::field_trial::IsDisabled( "WebRTC-Audio-StableTargetAdaptation")), adjust_bandwidth_( @@ -521,7 +519,7 @@ void AudioEncoderOpusImpl::OnReceivedUplinkBandwidth( } ApplyAudioNetworkAdaptor(); - } else if (send_side_bwe_with_overhead_) { + } else { if (!overhead_bytes_per_packet_) { RTC_LOG(LS_INFO) << "AudioEncoderOpusImpl: Overhead unknown, target audio bitrate " @@ -534,8 +532,6 @@ void AudioEncoderOpusImpl::OnReceivedUplinkBandwidth( std::min(AudioEncoderOpusConfig::kMaxBitrateBps, std::max(AudioEncoderOpusConfig::kMinBitrateBps, target_audio_bitrate_bps - overhead_bps))); - } else { - SetTargetBitrate(target_audio_bitrate_bps); } } void AudioEncoderOpusImpl::OnReceivedUplinkBandwidth( @@ -813,9 +809,10 @@ ANAStats AudioEncoderOpusImpl::GetANAStats() const { absl::optional > AudioEncoderOpusImpl::GetFrameLengthRange() const { - if (config_.supported_frame_lengths_ms.empty()) { - return absl::nullopt; - } else if (audio_network_adaptor_) { + if (audio_network_adaptor_) { + if (config_.supported_frame_lengths_ms.empty()) { + return absl::nullopt; + } return {{TimeDelta::Millis(config_.supported_frame_lengths_ms.front()), TimeDelta::Millis(config_.supported_frame_lengths_ms.back())}}; } else { diff --git a/modules/audio_coding/codecs/opus/audio_encoder_opus.h b/modules/audio_coding/codecs/opus/audio_encoder_opus.h index 352a23107b..b9f777fe7a 100644 --- a/modules/audio_coding/codecs/opus/audio_encoder_opus.h +++ b/modules/audio_coding/codecs/opus/audio_encoder_opus.h @@ -157,7 +157,6 @@ class AudioEncoderOpusImpl final : public AudioEncoder { AudioEncoderOpusConfig config_; const int payload_type_; - const bool send_side_bwe_with_overhead_; const bool use_stable_target_for_adaptation_; const bool adjust_bandwidth_; bool bitrate_changed_; diff --git a/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc b/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc index 43e8a7a80f..a2ebe43bbe 100644 --- a/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc +++ b/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc @@ -373,9 +373,6 @@ TEST_P(AudioEncoderOpusTest, PacketLossRateUpperBounded) { } TEST_P(AudioEncoderOpusTest, DoNotInvokeSetTargetBitrateIfOverheadUnknown) { - test::ScopedFieldTrials override_field_trials( - "WebRTC-SendSideBwe-WithOverhead/Enabled/"); - auto states = CreateCodec(sample_rate_hz_, 2); states->encoder->OnReceivedUplinkBandwidth(kDefaultOpusRate * 2, @@ -670,6 +667,17 @@ TEST(AudioEncoderOpusTest, TestConfigFromInvalidParams) { config.supported_frame_lengths_ms); } +TEST(AudioEncoderOpusTest, GetFrameLenghtRange) { + AudioEncoderOpusConfig config = + CreateConfigWithParameters({{"maxptime", "10"}, {"ptime", "10"}}); + std::unique_ptr encoder = + AudioEncoderOpus::MakeAudioEncoder(config, kDefaultOpusPayloadType); + auto ptime = webrtc::TimeDelta::Millis(10); + absl::optional> range = { + {ptime, ptime}}; + EXPECT_EQ(encoder->GetFrameLengthRange(), range); +} + // Test that bitrate will be overridden by the "maxaveragebitrate" parameter. // Also test that the "maxaveragebitrate" can't be set to values outside the // range of 6000 and 510000 diff --git a/modules/audio_coding/neteq/audio_decoder_unittest.cc b/modules/audio_coding/neteq/audio_decoder_unittest.cc index bb5c6d167b..fef3c3c1e4 100644 --- a/modules/audio_coding/neteq/audio_decoder_unittest.cc +++ b/modules/audio_coding/neteq/audio_decoder_unittest.cc @@ -22,10 +22,6 @@ #include "modules/audio_coding/codecs/g722/audio_encoder_g722.h" #include "modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h" #include "modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h" -#include "modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h" -#include "modules/audio_coding/codecs/isac/fix/include/audio_encoder_isacfix.h" -#include "modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h" -#include "modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h" #include "modules/audio_coding/codecs/opus/audio_decoder_opus.h" #include "modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h" #include "modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h" @@ -195,8 +191,8 @@ class AudioDecoderTest : public ::testing::Test { processed_samples += frame_size_; } // For some codecs it doesn't make sense to check expected number of bytes, - // since the number can vary for different platforms. Opus and iSAC are - // such codecs. In this case expected_bytes is set to 0. + // since the number can vary for different platforms. Opus is such a codec. + // In this case expected_bytes is set to 0. if (expected_bytes) { EXPECT_EQ(expected_bytes, encoded_bytes); } @@ -347,66 +343,6 @@ class AudioDecoderIlbcTest : public AudioDecoderTest { } }; -class AudioDecoderIsacFloatTest : public AudioDecoderTest { - protected: - AudioDecoderIsacFloatTest() : AudioDecoderTest() { - codec_input_rate_hz_ = 16000; - frame_size_ = 480; - data_length_ = 10 * frame_size_; - AudioEncoderIsacFloatImpl::Config config; - config.payload_type = payload_type_; - config.sample_rate_hz = codec_input_rate_hz_; - config.frame_size_ms = - 1000 * static_cast(frame_size_) / codec_input_rate_hz_; - audio_encoder_.reset(new AudioEncoderIsacFloatImpl(config)); - audio_encoder_->OnReceivedOverhead(kOverheadBytesPerPacket); - - AudioDecoderIsacFloatImpl::Config decoder_config; - decoder_config.sample_rate_hz = codec_input_rate_hz_; - decoder_ = new AudioDecoderIsacFloatImpl(decoder_config); - } -}; - -class AudioDecoderIsacSwbTest : public AudioDecoderTest { - protected: - AudioDecoderIsacSwbTest() : AudioDecoderTest() { - codec_input_rate_hz_ = 32000; - frame_size_ = 960; - data_length_ = 10 * frame_size_; - AudioEncoderIsacFloatImpl::Config config; - config.payload_type = payload_type_; - config.sample_rate_hz = codec_input_rate_hz_; - config.frame_size_ms = - 1000 * static_cast(frame_size_) / codec_input_rate_hz_; - audio_encoder_.reset(new AudioEncoderIsacFloatImpl(config)); - audio_encoder_->OnReceivedOverhead(kOverheadBytesPerPacket); - - AudioDecoderIsacFloatImpl::Config decoder_config; - decoder_config.sample_rate_hz = codec_input_rate_hz_; - decoder_ = new AudioDecoderIsacFloatImpl(decoder_config); - } -}; - -class AudioDecoderIsacFixTest : public AudioDecoderTest { - protected: - AudioDecoderIsacFixTest() : AudioDecoderTest() { - codec_input_rate_hz_ = 16000; - frame_size_ = 480; - data_length_ = 10 * frame_size_; - AudioEncoderIsacFixImpl::Config config; - config.payload_type = payload_type_; - config.sample_rate_hz = codec_input_rate_hz_; - config.frame_size_ms = - 1000 * static_cast(frame_size_) / codec_input_rate_hz_; - audio_encoder_.reset(new AudioEncoderIsacFixImpl(config)); - audio_encoder_->OnReceivedOverhead(kOverheadBytesPerPacket); - - AudioDecoderIsacFixImpl::Config decoder_config; - decoder_config.sample_rate_hz = codec_input_rate_hz_; - decoder_ = new AudioDecoderIsacFixImpl(decoder_config); - } -}; - class AudioDecoderG722Test : public AudioDecoderTest { protected: AudioDecoderG722Test() : AudioDecoderTest() { @@ -533,94 +469,6 @@ TEST_F(AudioDecoderIlbcTest, SetTargetBitrate) { TestSetAndGetTargetBitratesWithFixedCodec(audio_encoder_.get(), 13333); } -TEST_F(AudioDecoderIsacFloatTest, EncodeDecode) { - int tolerance = 3399; - double mse = 434951.0; - int delay = 48; // Delay from input to output. - EncodeDecodeTest(0, tolerance, mse, delay); - ReInitTest(); - EXPECT_FALSE(decoder_->HasDecodePlc()); -} - -TEST_F(AudioDecoderIsacFloatTest, SetTargetBitrate) { - const int overhead_rate = - 8 * kOverheadBytesPerPacket * codec_input_rate_hz_ / frame_size_; - EXPECT_EQ(10000, - SetAndGetTargetBitrate(audio_encoder_.get(), 9999 + overhead_rate)); - EXPECT_EQ(10000, SetAndGetTargetBitrate(audio_encoder_.get(), - 10000 + overhead_rate)); - EXPECT_EQ(23456, SetAndGetTargetBitrate(audio_encoder_.get(), - 23456 + overhead_rate)); - EXPECT_EQ(32000, SetAndGetTargetBitrate(audio_encoder_.get(), - 32000 + overhead_rate)); - EXPECT_EQ(32000, SetAndGetTargetBitrate(audio_encoder_.get(), - 32001 + overhead_rate)); -} - -TEST_F(AudioDecoderIsacSwbTest, EncodeDecode) { - int tolerance = 19757; - double mse = 8.18e6; - int delay = 160; // Delay from input to output. - EncodeDecodeTest(0, tolerance, mse, delay); - ReInitTest(); - EXPECT_FALSE(decoder_->HasDecodePlc()); -} - -TEST_F(AudioDecoderIsacSwbTest, SetTargetBitrate) { - const int overhead_rate = - 8 * kOverheadBytesPerPacket * codec_input_rate_hz_ / frame_size_; - EXPECT_EQ(10000, - SetAndGetTargetBitrate(audio_encoder_.get(), 9999 + overhead_rate)); - EXPECT_EQ(10000, SetAndGetTargetBitrate(audio_encoder_.get(), - 10000 + overhead_rate)); - EXPECT_EQ(23456, SetAndGetTargetBitrate(audio_encoder_.get(), - 23456 + overhead_rate)); - EXPECT_EQ(56000, SetAndGetTargetBitrate(audio_encoder_.get(), - 56000 + overhead_rate)); - EXPECT_EQ(56000, SetAndGetTargetBitrate(audio_encoder_.get(), - 56001 + overhead_rate)); -} - -// Run bit exactness test only for release builds. -#if defined(NDEBUG) -TEST_F(AudioDecoderIsacFixTest, EncodeDecode) { - int tolerance = 11034; - double mse = 3.46e6; - int delay = 54; // Delay from input to output. -#if defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM) - static const int kEncodedBytes = 685; -#elif defined(WEBRTC_MAC) && defined(WEBRTC_ARCH_ARM64) // M1 Mac - static const int kEncodedBytes = 673; -#elif defined(WEBRTC_ARCH_ARM64) - static const int kEncodedBytes = 673; -#elif defined(WEBRTC_WIN) && defined(_MSC_VER) && !defined(__clang__) - static const int kEncodedBytes = 671; -#elif defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_X86_64) - static const int kEncodedBytes = 671; -#else - static const int kEncodedBytes = 671; -#endif - EncodeDecodeTest(kEncodedBytes, tolerance, mse, delay); - ReInitTest(); - EXPECT_FALSE(decoder_->HasDecodePlc()); -} -#endif - -TEST_F(AudioDecoderIsacFixTest, SetTargetBitrate) { - const int overhead_rate = - 8 * kOverheadBytesPerPacket * codec_input_rate_hz_ / frame_size_; - EXPECT_EQ(10000, - SetAndGetTargetBitrate(audio_encoder_.get(), 9999 + overhead_rate)); - EXPECT_EQ(10000, SetAndGetTargetBitrate(audio_encoder_.get(), - 10000 + overhead_rate)); - EXPECT_EQ(23456, SetAndGetTargetBitrate(audio_encoder_.get(), - 23456 + overhead_rate)); - EXPECT_EQ(32000, SetAndGetTargetBitrate(audio_encoder_.get(), - 32000 + overhead_rate)); - EXPECT_EQ(32000, SetAndGetTargetBitrate(audio_encoder_.get(), - 32001 + overhead_rate)); -} - TEST_F(AudioDecoderG722Test, EncodeDecode) { int tolerance = 6176; double mse = 238630.0; diff --git a/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc b/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc deleted file mode 100644 index 6a096c307c..0000000000 --- a/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "absl/flags/flag.h" -#include "modules/audio_coding/codecs/isac/fix/include/isacfix.h" -#include "modules/audio_coding/neteq/tools/neteq_quality_test.h" - -ABSL_FLAG(int, bit_rate_kbps, 32, "Target bit rate (kbps)."); - -using ::testing::InitGoogleTest; - -namespace webrtc { -namespace test { -namespace { -static const int kIsacBlockDurationMs = 30; -static const int kIsacInputSamplingKhz = 16; -static const int kIsacOutputSamplingKhz = 16; -} // namespace - -class NetEqIsacQualityTest : public NetEqQualityTest { - protected: - NetEqIsacQualityTest(); - void SetUp() override; - void TearDown() override; - int EncodeBlock(int16_t* in_data, - size_t block_size_samples, - rtc::Buffer* payload, - size_t max_bytes) override; - - private: - ISACFIX_MainStruct* isac_encoder_; - int bit_rate_kbps_; -}; - -NetEqIsacQualityTest::NetEqIsacQualityTest() - : NetEqQualityTest(kIsacBlockDurationMs, - kIsacInputSamplingKhz, - kIsacOutputSamplingKhz, - SdpAudioFormat("isac", 16000, 1)), - isac_encoder_(NULL), - bit_rate_kbps_(absl::GetFlag(FLAGS_bit_rate_kbps)) { - // Flag validation - RTC_CHECK(absl::GetFlag(FLAGS_bit_rate_kbps) >= 10 && - absl::GetFlag(FLAGS_bit_rate_kbps) <= 32) - << "Invalid bit rate, should be between 10 and 32 kbps."; -} - -void NetEqIsacQualityTest::SetUp() { - ASSERT_EQ(1u, channels_) << "iSAC supports only mono audio."; - // Create encoder memory. - WebRtcIsacfix_Create(&isac_encoder_); - ASSERT_TRUE(isac_encoder_ != NULL); - EXPECT_EQ(0, WebRtcIsacfix_EncoderInit(isac_encoder_, 1)); - // Set bitrate and block length. - EXPECT_EQ(0, WebRtcIsacfix_Control(isac_encoder_, bit_rate_kbps_ * 1000, - kIsacBlockDurationMs)); - NetEqQualityTest::SetUp(); -} - -void NetEqIsacQualityTest::TearDown() { - // Free memory. - EXPECT_EQ(0, WebRtcIsacfix_Free(isac_encoder_)); - NetEqQualityTest::TearDown(); -} - -int NetEqIsacQualityTest::EncodeBlock(int16_t* in_data, - size_t block_size_samples, - rtc::Buffer* payload, - size_t max_bytes) { - // ISAC takes 10 ms for every call. - const int subblocks = kIsacBlockDurationMs / 10; - const int subblock_length = 10 * kIsacInputSamplingKhz; - int value = 0; - - int pointer = 0; - for (int idx = 0; idx < subblocks; idx++, pointer += subblock_length) { - // The Isac encoder does not perform encoding (and returns 0) until it - // receives a sequence of sub-blocks that amount to the frame duration. - EXPECT_EQ(0, value); - payload->AppendData(max_bytes, [&](rtc::ArrayView payload) { - value = WebRtcIsacfix_Encode(isac_encoder_, &in_data[pointer], - payload.data()); - return (value >= 0) ? static_cast(value) : 0; - }); - } - EXPECT_GT(value, 0); - return value; -} - -TEST_F(NetEqIsacQualityTest, Test) { - Simulate(); -} - -} // namespace test -} // namespace webrtc diff --git a/modules/audio_coding/neteq/tools/neteq_delay_analyzer.cc b/modules/audio_coding/neteq/tools/neteq_delay_analyzer.cc index 91c3a1d96b..020199e9ac 100644 --- a/modules/audio_coding/neteq/tools/neteq_delay_analyzer.cc +++ b/modules/audio_coding/neteq/tools/neteq_delay_analyzer.cc @@ -284,6 +284,9 @@ void NetEqDelayAnalyzer::CreatePythonScript( output << " plt.ylabel('relative delay [ms]')" << std::endl; if (!ssrcs_.empty()) { auto ssrc_it = ssrcs_.cbegin(); + output << " plt.legend((\"arrival delay\", \"target delay\", \"playout " + "delay\"))" + << std::endl; output << " plt.title('SSRC: 0x" << std::hex << static_cast(*ssrc_it++); while (ssrc_it != ssrcs_.end()) { diff --git a/modules/audio_coding/neteq/tools/neteq_test.cc b/modules/audio_coding/neteq/tools/neteq_test.cc index 19b1df11a1..a567efe2de 100644 --- a/modules/audio_coding/neteq/tools/neteq_test.cc +++ b/modules/audio_coding/neteq/tools/neteq_test.cc @@ -67,11 +67,11 @@ NetEqTest::NetEqTest(const NetEq::Config& config, std::unique_ptr input, std::unique_ptr output, Callbacks callbacks) - : clock_(0), + : input_(std::move(input)), + clock_(Timestamp::Millis(input_->NextEventTime().value_or(0))), neteq_(neteq_factory ? neteq_factory->CreateNetEq(config, decoder_factory, &clock_) : CreateNetEq(config, &clock_, decoder_factory)), - input_(std::move(input)), output_(std::move(output)), callbacks_(callbacks), sample_rate_hz_(config.sample_rate_hz), @@ -99,7 +99,7 @@ int64_t NetEqTest::Run() { NetEqTest::SimulationStepResult NetEqTest::RunToNextGetAudio() { SimulationStepResult result; const int64_t start_time_ms = *input_->NextEventTime(); - int64_t time_now_ms = start_time_ms; + int64_t time_now_ms = clock_.CurrentTime().ms(); current_state_.packet_iat_ms.clear(); while (!input_->ended()) { @@ -311,10 +311,6 @@ NetEqTest::DecoderMap NetEqTest::StandardDecoderMap() { {8, SdpAudioFormat("pcma", 8000, 1)}, #ifdef WEBRTC_CODEC_ILBC {102, SdpAudioFormat("ilbc", 8000, 1)}, -#endif - {103, SdpAudioFormat("isac", 16000, 1)}, -#if !defined(WEBRTC_ANDROID) - {104, SdpAudioFormat("isac", 32000, 1)}, #endif #ifdef WEBRTC_CODEC_OPUS {111, SdpAudioFormat("opus", 48000, 2)}, diff --git a/modules/audio_coding/neteq/tools/neteq_test.h b/modules/audio_coding/neteq/tools/neteq_test.h index 0a6c24f3d6..1d3eeda453 100644 --- a/modules/audio_coding/neteq/tools/neteq_test.h +++ b/modules/audio_coding/neteq/tools/neteq_test.h @@ -109,11 +109,11 @@ class NetEqTest : public NetEqSimulator { private: void RegisterDecoders(const DecoderMap& codecs); + std::unique_ptr input_; SimulatedClock clock_; absl::optional next_action_; absl::optional last_packet_time_ms_; std::unique_ptr neteq_; - std::unique_ptr input_; std::unique_ptr output_; Callbacks callbacks_; int sample_rate_hz_; diff --git a/modules/audio_coding/neteq/tools/rtp_encode.cc b/modules/audio_coding/neteq/tools/rtp_encode.cc index 6aeeb6d129..8adca927f0 100644 --- a/modules/audio_coding/neteq/tools/rtp_encode.cc +++ b/modules/audio_coding/neteq/tools/rtp_encode.cc @@ -30,7 +30,6 @@ #include "api/audio_codecs/g711/audio_encoder_g711.h" #include "api/audio_codecs/g722/audio_encoder_g722.h" #include "api/audio_codecs/ilbc/audio_encoder_ilbc.h" -#include "api/audio_codecs/isac/audio_encoder_isac.h" #include "api/audio_codecs/opus/audio_encoder_opus.h" #include "modules/audio_coding/codecs/cng/audio_encoder_cng.h" #include "modules/audio_coding/include/audio_coding_module.h" @@ -71,7 +70,6 @@ enum class CodecType { kPcm16b32, kPcm16b48, kIlbc, - kIsac }; struct CodecTypeAndInfo { @@ -94,8 +92,7 @@ const std::map& CodecList() { {"pcm16b_16", {CodecType::kPcm16b16, 94, false}}, {"pcm16b_32", {CodecType::kPcm16b32, 95, false}}, {"pcm16b_48", {CodecType::kPcm16b48, 96, false}}, - {"ilbc", {CodecType::kIlbc, 102, false}}, - {"isac", {CodecType::kIsac, 103, false}}}; + {"ilbc", {CodecType::kIlbc, 102, false}}}; return *codec_list; } @@ -236,11 +233,6 @@ std::unique_ptr CreateEncoder(CodecType codec_type, return AudioEncoderIlbc::MakeAudioEncoder( GetCodecConfig(), payload_type); } - - case CodecType::kIsac: { - return AudioEncoderIsac::MakeAudioEncoder( - GetCodecConfig(), payload_type); - } } RTC_DCHECK_NOTREACHED(); return nullptr; diff --git a/modules/audio_coding/test/EncodeDecodeTest.cc b/modules/audio_coding/test/EncodeDecodeTest.cc index 8d4bcce8df..9f9c4aa74c 100644 --- a/modules/audio_coding/test/EncodeDecodeTest.cc +++ b/modules/audio_coding/test/EncodeDecodeTest.cc @@ -110,9 +110,7 @@ void Receiver::Setup(AudioCodingModule* acm, EXPECT_EQ(0, acm->InitializeReceiver()); if (channels == 1) { - acm->SetReceiveCodecs({{103, {"ISAC", 16000, 1}}, - {104, {"ISAC", 32000, 1}}, - {107, {"L16", 8000, 1}}, + acm->SetReceiveCodecs({{107, {"L16", 8000, 1}}, {108, {"L16", 16000, 1}}, {109, {"L16", 32000, 1}}, {0, {"PCMU", 8000, 1}}, @@ -232,7 +230,6 @@ EncodeDecodeTest::EncodeDecodeTest() = default; void EncodeDecodeTest::Perform() { const std::map send_codecs = { - {103, {"ISAC", 16000, 1}}, {104, {"ISAC", 32000, 1}}, {107, {"L16", 8000, 1}}, {108, {"L16", 16000, 1}}, {109, {"L16", 32000, 1}}, {0, {"PCMU", 8000, 1}}, {8, {"PCMA", 8000, 1}}, diff --git a/modules/audio_coding/test/TestAllCodecs.cc b/modules/audio_coding/test/TestAllCodecs.cc index e93df346f1..b44037d732 100644 --- a/modules/audio_coding/test/TestAllCodecs.cc +++ b/modules/audio_coding/test/TestAllCodecs.cc @@ -130,9 +130,7 @@ void TestAllCodecs::Perform() { acm_a_->InitializeReceiver(); acm_b_->InitializeReceiver(); - acm_b_->SetReceiveCodecs({{103, {"ISAC", 16000, 1}}, - {104, {"ISAC", 32000, 1}}, - {107, {"L16", 8000, 1}}, + acm_b_->SetReceiveCodecs({{107, {"L16", 8000, 1}}, {108, {"L16", 16000, 1}}, {109, {"L16", 32000, 1}}, {111, {"L16", 8000, 2}}, @@ -186,33 +184,6 @@ void TestAllCodecs::Perform() { RegisterSendCodec('A', codec_ilbc, 8000, 15200, 320, 0); Run(channel_a_to_b_); outfile_b_.Close(); -#endif -#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)) - test_count_++; - OpenOutFile(test_count_); - char codec_isac[] = "ISAC"; - RegisterSendCodec('A', codec_isac, 16000, -1, 480, kVariableSize); - Run(channel_a_to_b_); - RegisterSendCodec('A', codec_isac, 16000, -1, 960, kVariableSize); - Run(channel_a_to_b_); - RegisterSendCodec('A', codec_isac, 16000, 15000, 480, kVariableSize); - Run(channel_a_to_b_); - RegisterSendCodec('A', codec_isac, 16000, 32000, 960, kVariableSize); - Run(channel_a_to_b_); - outfile_b_.Close(); -#endif -#ifdef WEBRTC_CODEC_ISAC - test_count_++; - OpenOutFile(test_count_); - RegisterSendCodec('A', codec_isac, 32000, -1, 960, kVariableSize); - Run(channel_a_to_b_); - RegisterSendCodec('A', codec_isac, 32000, 56000, 960, kVariableSize); - Run(channel_a_to_b_); - RegisterSendCodec('A', codec_isac, 32000, 37000, 960, kVariableSize); - Run(channel_a_to_b_); - RegisterSendCodec('A', codec_isac, 32000, 32000, 960, kVariableSize); - Run(channel_a_to_b_); - outfile_b_.Close(); #endif test_count_++; OpenOutFile(test_count_); @@ -319,15 +290,11 @@ void TestAllCodecs::RegisterSendCodec(char side, // Store packet-size in samples, used to validate the received packet. // If G.722, store half the size to compensate for the timestamp bug in the // RFC for G.722. - // If iSAC runs in adaptive mode, packet size in samples can change on the - // fly, so we exclude this test by setting `packet_size_samples_` to -1. int clockrate_hz = sampling_freq_hz; size_t num_channels = 1; if (absl::EqualsIgnoreCase(codec_name, "G722")) { packet_size_samples_ = packet_size / 2; clockrate_hz = sampling_freq_hz / 2; - } else if (absl::EqualsIgnoreCase(codec_name, "ISAC") && (rate == -1)) { - packet_size_samples_ = -1; } else if (absl::EqualsIgnoreCase(codec_name, "OPUS")) { packet_size_samples_ = packet_size; num_channels = 2; diff --git a/modules/audio_coding/test/TestRedFec.cc b/modules/audio_coding/test/TestRedFec.cc index 892fbc83d6..fff48b27bc 100644 --- a/modules/audio_coding/test/TestRedFec.cc +++ b/modules/audio_coding/test/TestRedFec.cc @@ -22,8 +22,6 @@ #include "api/audio_codecs/g711/audio_encoder_g711.h" #include "api/audio_codecs/g722/audio_decoder_g722.h" #include "api/audio_codecs/g722/audio_encoder_g722.h" -#include "api/audio_codecs/isac/audio_decoder_isac_float.h" -#include "api/audio_codecs/isac/audio_encoder_isac_float.h" #include "api/audio_codecs/opus/audio_decoder_opus.h" #include "api/audio_codecs/opus/audio_encoder_opus.h" #include "modules/audio_coding/codecs/cng/audio_encoder_cng.h" @@ -38,12 +36,10 @@ namespace webrtc { TestRedFec::TestRedFec() : encoder_factory_(CreateAudioEncoderFactory()), decoder_factory_(CreateAudioDecoderFactory()), _acmA(AudioCodingModule::Create( @@ -95,19 +91,6 @@ void TestRedFec::Perform() { Run(); _outFileB.Close(); - RegisterSendCodec(_acmA, {"ISAC", 16000, 1}, Vad::kVadVeryAggressive, false); - OpenOutFile(_testCntr); - Run(); - _outFileB.Close(); - - // Switch to a 32 kHz codec; RED should be switched off. - RegisterSendCodec(_acmA, {"ISAC", 32000, 1}, Vad::kVadVeryAggressive, false); - OpenOutFile(_testCntr); - Run(); - _outFileB.Close(); - - RegisterSendCodec(_acmA, {"ISAC", 32000, 1}, absl::nullopt, false); - _channelA2B->SetFECTestWithPacketLoss(true); // Following tests are under packet losses. @@ -118,22 +101,6 @@ void TestRedFec::Perform() { Run(); _outFileB.Close(); - // Switch to a 16 kHz codec, RED should have been switched off. - RegisterSendCodec(_acmA, {"ISAC", 16000, 1}, Vad::kVadVeryAggressive, false); - - OpenOutFile(_testCntr); - Run(); - _outFileB.Close(); - - // Switch to a 32 kHz codec, RED should have been switched off. - RegisterSendCodec(_acmA, {"ISAC", 32000, 1}, Vad::kVadVeryAggressive, false); - - OpenOutFile(_testCntr); - Run(); - _outFileB.Close(); - - RegisterSendCodec(_acmA, {"ISAC", 32000, 1}, absl::nullopt, false); - RegisterSendCodec(_acmA, {"opus", 48000, 2}, absl::nullopt, false); // _channelA2B imposes 25% packet loss rate. diff --git a/modules/audio_coding/test/TestVADDTX.cc b/modules/audio_coding/test/TestVADDTX.cc index cb05deb92a..19367d9bde 100644 --- a/modules/audio_coding/test/TestVADDTX.cc +++ b/modules/audio_coding/test/TestVADDTX.cc @@ -18,8 +18,6 @@ #include "api/audio_codecs/audio_encoder_factory_template.h" #include "api/audio_codecs/ilbc/audio_decoder_ilbc.h" #include "api/audio_codecs/ilbc/audio_encoder_ilbc.h" -#include "api/audio_codecs/isac/audio_decoder_isac_float.h" -#include "api/audio_codecs/isac/audio_encoder_isac_float.h" #include "api/audio_codecs/opus/audio_decoder_opus.h" #include "api/audio_codecs/opus/audio_encoder_opus.h" #include "modules/audio_coding/codecs/cng/audio_encoder_cng.h" @@ -68,12 +66,10 @@ void MonitoringAudioPacketizationCallback::GetStatistics(uint32_t* counter) { } TestVadDtx::TestVadDtx() - : encoder_factory_(CreateAudioEncoderFactory()), - decoder_factory_(CreateAudioDecoderFactory()), + : encoder_factory_( + CreateAudioEncoderFactory()), + decoder_factory_( + CreateAudioDecoderFactory()), acm_send_(AudioCodingModule::Create( AudioCodingModule::Config(decoder_factory_))), acm_receive_(AudioCodingModule::Create( @@ -182,8 +178,6 @@ void TestVadDtx::Run(absl::string_view in_filename, TestWebRtcVadDtx::TestWebRtcVadDtx() : output_file_num_(0) {} void TestWebRtcVadDtx::Perform() { - RunTestCases({"ISAC", 16000, 1}); - RunTestCases({"ISAC", 32000, 1}); RunTestCases({"ILBC", 8000, 1}); RunTestCases({"opus", 48000, 2}); } diff --git a/modules/audio_coding/test/Tester.cc b/modules/audio_coding/test/Tester.cc index 113dbe059e..7612aa43a3 100644 --- a/modules/audio_coding/test/Tester.cc +++ b/modules/audio_coding/test/Tester.cc @@ -21,7 +21,6 @@ #include "modules/audio_coding/test/TestStereo.h" #include "modules/audio_coding/test/TestVADDTX.h" #include "modules/audio_coding/test/TwoWayCommunication.h" -#include "modules/audio_coding/test/iSACTest.h" #include "modules/audio_coding/test/opus_test.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" @@ -42,25 +41,6 @@ TEST(AudioCodingModuleTest, TestRedFec) { webrtc::TestRedFec().Perform(); } -#if defined(WEBRTC_ANDROID) -TEST(AudioCodingModuleTest, DISABLED_TestIsac) { -#else -TEST(AudioCodingModuleTest, TestIsac) { -#endif - webrtc::ISACTest().Perform(); -} - -#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)) && \ - defined(WEBRTC_CODEC_ILBC) -#if defined(WEBRTC_ANDROID) -TEST(AudioCodingModuleTest, DISABLED_TwoWayCommunication) { -#else -TEST(AudioCodingModuleTest, TwoWayCommunication) { -#endif - webrtc::TwoWayCommunication().Perform(); -} -#endif - // Disabled on ios as flaky, see https://crbug.com/webrtc/7057 #if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS) TEST(AudioCodingModuleTest, DISABLED_TestStereo) { diff --git a/modules/audio_coding/test/iSACTest.cc b/modules/audio_coding/test/iSACTest.cc deleted file mode 100644 index 246c485afe..0000000000 --- a/modules/audio_coding/test/iSACTest.cc +++ /dev/null @@ -1,273 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/test/iSACTest.h" - -#include -#include - -#include "absl/strings/match.h" -#include "api/audio_codecs/builtin_audio_decoder_factory.h" -#include "api/audio_codecs/isac/audio_encoder_isac_float.h" -#include "rtc_base/strings/string_builder.h" -#include "rtc_base/time_utils.h" -#include "test/gmock.h" -#include "test/gtest.h" -#include "test/testsupport/file_utils.h" - -namespace webrtc { - -using ::testing::AnyOf; -using ::testing::Eq; -using ::testing::StrCaseEq; - -namespace { - -constexpr int kISAC16kPayloadType = 103; -constexpr int kISAC32kPayloadType = 104; -const SdpAudioFormat kISAC16kFormat = {"ISAC", 16000, 1}; -const SdpAudioFormat kISAC32kFormat = {"ISAC", 32000, 1}; - -AudioEncoderIsacFloat::Config TweakConfig( - AudioEncoderIsacFloat::Config config, - const ACMTestISACConfig& test_config) { - if (test_config.currentRateBitPerSec > 0) { - config.bit_rate = test_config.currentRateBitPerSec; - } - if (test_config.currentFrameSizeMsec != 0) { - config.frame_size_ms = test_config.currentFrameSizeMsec; - } - EXPECT_THAT(config.IsOk(), Eq(true)); - return config; -} - -void SetISACConfigDefault(ACMTestISACConfig& isacConfig) { - isacConfig.currentRateBitPerSec = 0; - isacConfig.currentFrameSizeMsec = 0; - isacConfig.encodingMode = -1; - isacConfig.initRateBitPerSec = 0; - isacConfig.initFrameSizeInMsec = 0; - isacConfig.enforceFrameSize = false; -} - -} // namespace - -ISACTest::ISACTest() - : _acmA(AudioCodingModule::Create( - AudioCodingModule::Config(CreateBuiltinAudioDecoderFactory()))), - _acmB(AudioCodingModule::Create( - AudioCodingModule::Config(CreateBuiltinAudioDecoderFactory()))) {} - -ISACTest::~ISACTest() {} - -void ISACTest::Setup() { - // Register both iSAC-wb & iSAC-swb in both sides as receiver codecs. - std::map receive_codecs = { - {kISAC16kPayloadType, kISAC16kFormat}, - {kISAC32kPayloadType, kISAC32kFormat}}; - _acmA->SetReceiveCodecs(receive_codecs); - _acmB->SetReceiveCodecs(receive_codecs); - - //--- Set A-to-B channel - _channel_A2B.reset(new Channel); - EXPECT_EQ(0, _acmA->RegisterTransportCallback(_channel_A2B.get())); - _channel_A2B->RegisterReceiverACM(_acmB.get()); - - //--- Set B-to-A channel - _channel_B2A.reset(new Channel); - EXPECT_EQ(0, _acmB->RegisterTransportCallback(_channel_B2A.get())); - _channel_B2A->RegisterReceiverACM(_acmA.get()); - - file_name_swb_ = - webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"); - - _acmB->SetEncoder(AudioEncoderIsacFloat::MakeAudioEncoder( - *AudioEncoderIsacFloat::SdpToConfig(kISAC16kFormat), - kISAC16kPayloadType)); - _acmA->SetEncoder(AudioEncoderIsacFloat::MakeAudioEncoder( - *AudioEncoderIsacFloat::SdpToConfig(kISAC32kFormat), - kISAC32kPayloadType)); - - _inFileA.Open(file_name_swb_, 32000, "rb"); - // Set test length to 500 ms (50 blocks of 10 ms each). - _inFileA.SetNum10MsBlocksToRead(50); - // Fast-forward 1 second (100 blocks) since the files start with silence. - _inFileA.FastForward(100); - std::string fileNameA = webrtc::test::OutputPath() + "testisac_a.pcm"; - std::string fileNameB = webrtc::test::OutputPath() + "testisac_b.pcm"; - _outFileA.Open(fileNameA, 32000, "wb"); - _outFileB.Open(fileNameB, 32000, "wb"); - - while (!_inFileA.EndOfFile()) { - Run10ms(); - } - - _inFileA.Close(); - _outFileA.Close(); - _outFileB.Close(); -} - -void ISACTest::Perform() { - Setup(); - - int16_t testNr = 0; - ACMTestISACConfig wbISACConfig; - ACMTestISACConfig swbISACConfig; - - SetISACConfigDefault(wbISACConfig); - SetISACConfigDefault(swbISACConfig); - - wbISACConfig.currentRateBitPerSec = -1; - swbISACConfig.currentRateBitPerSec = -1; - testNr++; - EncodeDecode(testNr, wbISACConfig, swbISACConfig); - - SetISACConfigDefault(wbISACConfig); - SetISACConfigDefault(swbISACConfig); - testNr++; - EncodeDecode(testNr, wbISACConfig, swbISACConfig); - - testNr++; - SwitchingSamplingRate(testNr, 4); -} - -void ISACTest::Run10ms() { - AudioFrame audioFrame; - EXPECT_GT(_inFileA.Read10MsData(audioFrame), 0); - EXPECT_GE(_acmA->Add10MsData(audioFrame), 0); - EXPECT_GE(_acmB->Add10MsData(audioFrame), 0); - bool muted; - EXPECT_EQ(0, _acmA->PlayoutData10Ms(32000, &audioFrame, &muted)); - ASSERT_FALSE(muted); - _outFileA.Write10MsData(audioFrame); - EXPECT_EQ(0, _acmB->PlayoutData10Ms(32000, &audioFrame, &muted)); - ASSERT_FALSE(muted); - _outFileB.Write10MsData(audioFrame); -} - -void ISACTest::EncodeDecode(int testNr, - ACMTestISACConfig& wbISACConfig, - ACMTestISACConfig& swbISACConfig) { - // Files in Side A and B - _inFileA.Open(file_name_swb_, 32000, "rb", true); - _inFileB.Open(file_name_swb_, 32000, "rb", true); - - std::string file_name_out; - rtc::StringBuilder file_stream_a; - rtc::StringBuilder file_stream_b; - file_stream_a << webrtc::test::OutputPath(); - file_stream_b << webrtc::test::OutputPath(); - file_stream_a << "out_iSACTest_A_" << testNr << ".pcm"; - file_stream_b << "out_iSACTest_B_" << testNr << ".pcm"; - file_name_out = file_stream_a.str(); - _outFileA.Open(file_name_out, 32000, "wb"); - file_name_out = file_stream_b.str(); - _outFileB.Open(file_name_out, 32000, "wb"); - - // Side A is sending super-wideband, and side B is sending wideband. - _acmA->SetEncoder(AudioEncoderIsacFloat::MakeAudioEncoder( - TweakConfig(*AudioEncoderIsacFloat::SdpToConfig(kISAC32kFormat), - swbISACConfig), - kISAC32kPayloadType)); - _acmB->SetEncoder(AudioEncoderIsacFloat::MakeAudioEncoder( - TweakConfig(*AudioEncoderIsacFloat::SdpToConfig(kISAC16kFormat), - wbISACConfig), - kISAC16kPayloadType)); - - _channel_A2B->ResetStats(); - _channel_B2A->ResetStats(); - - while (!(_inFileA.EndOfFile() || _inFileA.Rewinded())) { - Run10ms(); - } - - _channel_A2B->ResetStats(); - _channel_B2A->ResetStats(); - - _outFileA.Close(); - _outFileB.Close(); - _inFileA.Close(); - _inFileB.Close(); -} - -void ISACTest::SwitchingSamplingRate(int testNr, int maxSampRateChange) { - // Files in Side A - _inFileA.Open(file_name_swb_, 32000, "rb"); - _inFileB.Open(file_name_swb_, 32000, "rb"); - - std::string file_name_out; - rtc::StringBuilder file_stream_a; - rtc::StringBuilder file_stream_b; - file_stream_a << webrtc::test::OutputPath(); - file_stream_b << webrtc::test::OutputPath(); - file_stream_a << "out_iSACTest_A_" << testNr << ".pcm"; - file_stream_b << "out_iSACTest_B_" << testNr << ".pcm"; - file_name_out = file_stream_a.str(); - _outFileA.Open(file_name_out, 32000, "wb"); - file_name_out = file_stream_b.str(); - _outFileB.Open(file_name_out, 32000, "wb"); - - // Start with side A sending super-wideband and side B seding wideband. - // Toggle sending wideband/super-wideband in this test. - _acmA->SetEncoder(AudioEncoderIsacFloat::MakeAudioEncoder( - *AudioEncoderIsacFloat::SdpToConfig(kISAC32kFormat), - kISAC32kPayloadType)); - _acmB->SetEncoder(AudioEncoderIsacFloat::MakeAudioEncoder( - *AudioEncoderIsacFloat::SdpToConfig(kISAC16kFormat), - kISAC16kPayloadType)); - - int numSendCodecChanged = 0; - while (numSendCodecChanged < (maxSampRateChange << 1)) { - Run10ms(); - if (_inFileA.EndOfFile()) { - if (_inFileA.SamplingFrequency() == 16000) { - // Switch side A to send super-wideband. - _inFileA.Close(); - _inFileA.Open(file_name_swb_, 32000, "rb"); - _acmA->SetEncoder(AudioEncoderIsacFloat::MakeAudioEncoder( - *AudioEncoderIsacFloat::SdpToConfig(kISAC32kFormat), - kISAC32kPayloadType)); - } else { - // Switch side A to send wideband. - _inFileA.Close(); - _inFileA.Open(file_name_swb_, 32000, "rb"); - _acmA->SetEncoder(AudioEncoderIsacFloat::MakeAudioEncoder( - *AudioEncoderIsacFloat::SdpToConfig(kISAC16kFormat), - kISAC16kPayloadType)); - } - numSendCodecChanged++; - } - - if (_inFileB.EndOfFile()) { - if (_inFileB.SamplingFrequency() == 16000) { - // Switch side B to send super-wideband. - _inFileB.Close(); - _inFileB.Open(file_name_swb_, 32000, "rb"); - _acmB->SetEncoder(AudioEncoderIsacFloat::MakeAudioEncoder( - *AudioEncoderIsacFloat::SdpToConfig(kISAC32kFormat), - kISAC32kPayloadType)); - } else { - // Switch side B to send wideband. - _inFileB.Close(); - _inFileB.Open(file_name_swb_, 32000, "rb"); - _acmB->SetEncoder(AudioEncoderIsacFloat::MakeAudioEncoder( - *AudioEncoderIsacFloat::SdpToConfig(kISAC16kFormat), - kISAC16kPayloadType)); - } - numSendCodecChanged++; - } - } - _outFileA.Close(); - _outFileB.Close(); - _inFileA.Close(); - _inFileB.Close(); -} - -} // namespace webrtc diff --git a/modules/audio_coding/test/iSACTest.h b/modules/audio_coding/test/iSACTest.h deleted file mode 100644 index f6efeeac1c..0000000000 --- a/modules/audio_coding/test/iSACTest.h +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_TEST_ISACTEST_H_ -#define MODULES_AUDIO_CODING_TEST_ISACTEST_H_ - -#include - -#include - -#include "modules/audio_coding/include/audio_coding_module.h" -#include "modules/audio_coding/test/Channel.h" -#include "modules/audio_coding/test/PCMFile.h" - -namespace webrtc { - -struct ACMTestISACConfig { - int32_t currentRateBitPerSec; - int16_t currentFrameSizeMsec; - int16_t encodingMode; - uint32_t initRateBitPerSec; - int16_t initFrameSizeInMsec; - bool enforceFrameSize; -}; - -class ISACTest { - public: - ISACTest(); - ~ISACTest(); - - void Perform(); - - private: - void Setup(); - - void Run10ms(); - - void EncodeDecode(int testNr, - ACMTestISACConfig& wbISACConfig, - ACMTestISACConfig& swbISACConfig); - - void SwitchingSamplingRate(int testNr, int maxSampRateChange); - - std::unique_ptr _acmA; - std::unique_ptr _acmB; - - std::unique_ptr _channel_A2B; - std::unique_ptr _channel_B2A; - - PCMFile _inFileA; - PCMFile _inFileB; - - PCMFile _outFileA; - PCMFile _outFileB; - - std::string file_name_swb_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_CODING_TEST_ISACTEST_H_ diff --git a/modules/audio_mixer/frame_combiner.cc b/modules/audio_mixer/frame_combiner.cc index e31eea595f..96c62f6b0d 100644 --- a/modules/audio_mixer/frame_combiner.cc +++ b/modules/audio_mixer/frame_combiner.cc @@ -98,9 +98,7 @@ void MixToFloatFrame(rtc::ArrayView mix_list, RTC_DCHECK_LE(samples_per_channel, FrameCombiner::kMaximumChannelSize); RTC_DCHECK_LE(number_of_channels, FrameCombiner::kMaximumNumberOfChannels); // Clear the mixing buffer. - for (auto& one_channel_buffer : *mixing_buffer) { - std::fill(one_channel_buffer.begin(), one_channel_buffer.end(), 0.f); - } + *mixing_buffer = {}; // Convert to FloatS16 and mix. for (size_t i = 0; i < mix_list.size(); ++i) { @@ -166,8 +164,6 @@ void FrameCombiner::Combine(rtc::ArrayView mix_list, AudioFrame* audio_frame_for_mixing) { RTC_DCHECK(audio_frame_for_mixing); - LogMixingStats(mix_list, sample_rate, number_of_streams); - SetAudioFrameFields(mix_list, number_of_channels, sample_rate, number_of_streams, audio_frame_for_mixing); @@ -214,32 +210,4 @@ void FrameCombiner::Combine(rtc::ArrayView mix_list, InterleaveToAudioFrame(mixing_buffer_view, audio_frame_for_mixing); } -void FrameCombiner::LogMixingStats( - rtc::ArrayView mix_list, - int sample_rate, - size_t number_of_streams) const { - // Log every second. - uma_logging_counter_++; - if (uma_logging_counter_ > 1000 / AudioMixerImpl::kFrameDurationInMs) { - uma_logging_counter_ = 0; - RTC_HISTOGRAM_COUNTS_100("WebRTC.Audio.AudioMixer.NumIncomingStreams", - static_cast(number_of_streams)); - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.Audio.AudioMixer.NumIncomingActiveStreams2", - rtc::dchecked_cast(mix_list.size()), /*min=*/1, /*max=*/16, - /*bucket_count=*/16); - - using NativeRate = AudioProcessing::NativeRate; - static constexpr NativeRate native_rates[] = { - NativeRate::kSampleRate8kHz, NativeRate::kSampleRate16kHz, - NativeRate::kSampleRate32kHz, NativeRate::kSampleRate48kHz}; - const auto* rate_position = std::lower_bound( - std::begin(native_rates), std::end(native_rates), sample_rate); - RTC_HISTOGRAM_ENUMERATION( - "WebRTC.Audio.AudioMixer.MixingRate", - std::distance(std::begin(native_rates), rate_position), - arraysize(native_rates)); - } -} - } // namespace webrtc diff --git a/modules/audio_mixer/frame_combiner.h b/modules/audio_mixer/frame_combiner.h index 9ddf81e41e..4c858e1d99 100644 --- a/modules/audio_mixer/frame_combiner.h +++ b/modules/audio_mixer/frame_combiner.h @@ -47,15 +47,10 @@ class FrameCombiner { kMaximumNumberOfChannels>; private: - void LogMixingStats(rtc::ArrayView mix_list, - int sample_rate, - size_t number_of_streams) const; - std::unique_ptr data_dumper_; std::unique_ptr mixing_buffer_; Limiter limiter_; const bool use_limiter_; - mutable int uma_logging_counter_ = 0; }; } // namespace webrtc diff --git a/modules/audio_processing/BUILD.gn b/modules/audio_processing/BUILD.gn index 18d99e54c1..3e6b201ab6 100644 --- a/modules/audio_processing/BUILD.gn +++ b/modules/audio_processing/BUILD.gn @@ -138,10 +138,15 @@ rtc_library("gain_controller2") { "../../rtc_base:logging", "../../rtc_base:stringutils", "../../system_wrappers:field_trial", - "agc2:adaptive_digital", + "agc2:adaptive_digital_gain_controller", + "agc2:common", "agc2:cpu_features", "agc2:fixed_digital", "agc2:gain_applier", + "agc2:input_volume_controller", + "agc2:noise_level_estimator", + "agc2:saturation_protector", + "agc2:speech_level_estimator", "agc2:vad_wrapper", ] } @@ -192,6 +197,7 @@ rtc_library("audio_processing") { "../../rtc_base:sanitizer", "../../rtc_base:swap_queue", "../../rtc_base:timeutils", + "../../rtc_base/experiments:field_trial_parser", "../../rtc_base/synchronization:mutex", "../../rtc_base/system:rtc_export", "../../system_wrappers", @@ -202,9 +208,9 @@ rtc_library("audio_processing") { "aec_dump:aec_dump", "aecm:aecm_core", "agc", - "agc:analog_gain_stats_reporter", "agc:gain_control_interface", "agc:legacy_agc", + "agc2:input_volume_stats_reporter", "capture_levels_adjuster", "ns", "transient:transient_suppressor_api", @@ -415,11 +421,15 @@ if (rtc_include_tests) { "../audio_coding:neteq_input_audio_tools", "aec_dump:mock_aec_dump_unittests", "agc:agc_unittests", - "agc2:adaptive_digital_unittests", + "agc2:adaptive_digital_gain_controller_unittest", "agc2:biquad_filter_unittests", "agc2:fixed_digital_unittests", + "agc2:gain_applier_unittest", "agc2:input_volume_controller_unittests", + "agc2:input_volume_stats_reporter_unittests", "agc2:noise_estimator_unittests", + "agc2:saturation_protector_unittest", + "agc2:speech_level_estimator_unittest", "agc2:test_utils", "agc2:vad_wrapper_unittests", "agc2/rnn_vad:unittests", diff --git a/modules/audio_processing/OWNERS b/modules/audio_processing/OWNERS index ca9bc46323..f5dc59ea35 100644 --- a/modules/audio_processing/OWNERS +++ b/modules/audio_processing/OWNERS @@ -1,8 +1,8 @@ -aleloi@webrtc.org alessiob@webrtc.org +devicentepena@webrtc.org gustaf@webrtc.org henrik.lundin@webrtc.org ivoc@webrtc.org -minyue@webrtc.org +lionelk@webrtc.org peah@webrtc.org saza@webrtc.org diff --git a/modules/audio_processing/aec3/BUILD.gn b/modules/audio_processing/aec3/BUILD.gn index 679ce48747..f5eb5d5951 100644 --- a/modules/audio_processing/aec3/BUILD.gn +++ b/modules/audio_processing/aec3/BUILD.gn @@ -228,6 +228,7 @@ rtc_source_set("matched_filter") { deps = [ ":aec3_common", "../../../api:array_view", + "../../../rtc_base:gtest_prod", "../../../rtc_base/system:arch", ] absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] diff --git a/modules/audio_processing/aec3/echo_path_delay_estimator_unittest.cc b/modules/audio_processing/aec3/echo_path_delay_estimator_unittest.cc index 810b0ae185..e2c101fb04 100644 --- a/modules/audio_processing/aec3/echo_path_delay_estimator_unittest.cc +++ b/modules/audio_processing/aec3/echo_path_delay_estimator_unittest.cc @@ -70,7 +70,6 @@ TEST(EchoPathDelayEstimator, DelayEstimation) { constexpr size_t kNumCaptureChannels = 1; constexpr int kSampleRateHz = 48000; constexpr size_t kNumBands = NumBandsForRate(kSampleRateHz); - Random random_generator(42U); Block render(kNumBands, kNumRenderChannels); Block capture(/*num_bands=*/1, kNumCaptureChannels); diff --git a/modules/audio_processing/aec3/matched_filter.cc b/modules/audio_processing/aec3/matched_filter.cc index c5e394ad2f..a9054825c6 100644 --- a/modules/audio_processing/aec3/matched_filter.cc +++ b/modules/audio_processing/aec3/matched_filter.cc @@ -29,7 +29,9 @@ #include "modules/audio_processing/aec3/downsampled_render_buffer.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" +#include "system_wrappers/include/field_trial.h" namespace { @@ -53,23 +55,89 @@ void UpdateAccumulatedError( } } -size_t ComputePreEchoLag(const rtc::ArrayView accumulated_error, - size_t lag, - size_t alignment_shift_winner) { +size_t ComputePreEchoLag( + const webrtc::MatchedFilter::PreEchoConfiguration& pre_echo_configuration, + const rtc::ArrayView accumulated_error, + size_t lag, + size_t alignment_shift_winner) { + RTC_DCHECK_GE(lag, alignment_shift_winner); size_t pre_echo_lag_estimate = lag - alignment_shift_winner; size_t maximum_pre_echo_lag = std::min(pre_echo_lag_estimate / kAccumulatedErrorSubSampleRate, accumulated_error.size()); - for (size_t k = 1; k < maximum_pre_echo_lag; ++k) { - if (accumulated_error[k] < 0.5f * accumulated_error[k - 1] && - accumulated_error[k] < 0.5f) { - pre_echo_lag_estimate = (k + 1) * kAccumulatedErrorSubSampleRate - 1; + switch (pre_echo_configuration.mode) { + case 0: + // Mode 0: Pre echo lag is defined as the first coefficient with an error + // lower than a threshold with a certain decrease slope. + for (size_t k = 1; k < maximum_pre_echo_lag; ++k) { + if (accumulated_error[k] < + pre_echo_configuration.threshold * accumulated_error[k - 1] && + accumulated_error[k] < pre_echo_configuration.threshold) { + pre_echo_lag_estimate = (k + 1) * kAccumulatedErrorSubSampleRate - 1; + break; + } + } + break; + case 1: + // Mode 1: Pre echo lag is defined as the first coefficient with an error + // lower than a certain threshold. + for (size_t k = 0; k < maximum_pre_echo_lag; ++k) { + if (accumulated_error[k] < pre_echo_configuration.threshold) { + pre_echo_lag_estimate = (k + 1) * kAccumulatedErrorSubSampleRate - 1; + break; + } + } + break; + case 2: + // Mode 2: Pre echo lag is defined as the closest coefficient to the lag + // with an error lower than a certain threshold. + for (int k = static_cast(maximum_pre_echo_lag) - 1; k >= 0; --k) { + if (accumulated_error[k] > pre_echo_configuration.threshold) { + break; + } + pre_echo_lag_estimate = (k + 1) * kAccumulatedErrorSubSampleRate - 1; + } + break; + default: + RTC_DCHECK_NOTREACHED(); break; - } } return pre_echo_lag_estimate + alignment_shift_winner; } +webrtc::MatchedFilter::PreEchoConfiguration FetchPreEchoConfiguration() { + float threshold = 0.5f; + int mode = 0; + const std::string pre_echo_configuration_field_trial = + webrtc::field_trial::FindFullName("WebRTC-Aec3PreEchoConfiguration"); + webrtc::FieldTrialParameter threshold_field_trial_parameter( + /*key=*/"threshold", /*default_value=*/threshold); + webrtc::FieldTrialParameter mode_field_trial_parameter( + /*key=*/"mode", /*default_value=*/mode); + webrtc::ParseFieldTrial( + {&threshold_field_trial_parameter, &mode_field_trial_parameter}, + pre_echo_configuration_field_trial); + float threshold_read = + static_cast(threshold_field_trial_parameter.Get()); + int mode_read = mode_field_trial_parameter.Get(); + if (threshold_read < 1.0f && threshold_read > 0.0f) { + threshold = threshold_read; + } else { + RTC_LOG(LS_ERROR) + << "AEC3: Pre echo configuration: wrong input, threshold = " + << threshold_read << "."; + } + if (mode_read >= 0 && mode_read <= 3) { + mode = mode_read; + } else { + RTC_LOG(LS_ERROR) << "AEC3: Pre echo configuration: wrong input, mode = " + << mode_read << "."; + } + RTC_LOG(LS_INFO) << "AEC3: Pre echo configuration: threshold = " << threshold + << ", mode = " << mode << "."; + return {.threshold = threshold, .mode = mode}; +} + } // namespace namespace webrtc { @@ -612,7 +680,8 @@ MatchedFilter::MatchedFilter(ApmDataDumper* data_dumper, smoothing_fast_(smoothing_fast), smoothing_slow_(smoothing_slow), matching_filter_threshold_(matching_filter_threshold), - detect_pre_echo_(detect_pre_echo) { + detect_pre_echo_(detect_pre_echo), + pre_echo_config_(FetchPreEchoConfiguration()) { RTC_DCHECK(data_dumper); RTC_DCHECK_LT(0, window_size_sub_blocks); RTC_DCHECK((kBlockSize % sub_block_size) == 0); @@ -753,7 +822,8 @@ void MatchedFilter::Update(const DownsampledRenderBuffer& render_buffer, 1.0f / error_sum_anchor); } reported_lag_estimate_->pre_echo_lag = ComputePreEchoLag( - accumulated_error_[winner_index], winner_lag_.value(), + pre_echo_config_, accumulated_error_[winner_index], + winner_lag_.value(), winner_index * filter_intra_lag_shift_ /*alignment_shift_winner*/); } last_detected_best_lag_filter_ = winner_index; @@ -794,9 +864,10 @@ void MatchedFilter::Dump() { "aec3_correlator_error_" + std::to_string(n) + "_h"; data_dumper_->DumpRaw(dumper_error.c_str(), accumulated_error_[n]); - size_t pre_echo_lag = ComputePreEchoLag( - accumulated_error_[n], lag_estimate + n * filter_intra_lag_shift_, - n * filter_intra_lag_shift_); + size_t pre_echo_lag = + ComputePreEchoLag(pre_echo_config_, accumulated_error_[n], + lag_estimate + n * filter_intra_lag_shift_, + n * filter_intra_lag_shift_); std::string dumper_pre_lag = "aec3_correlator_pre_echo_lag_" + std::to_string(n); data_dumper_->DumpRaw(dumper_pre_lag.c_str(), pre_echo_lag); diff --git a/modules/audio_processing/aec3/matched_filter.h b/modules/audio_processing/aec3/matched_filter.h index 760d5e39fd..1560fb02f1 100644 --- a/modules/audio_processing/aec3/matched_filter.h +++ b/modules/audio_processing/aec3/matched_filter.h @@ -18,6 +18,7 @@ #include "absl/types/optional.h" #include "api/array_view.h" #include "modules/audio_processing/aec3/aec3_common.h" +#include "rtc_base/gtest_prod_util.h" #include "rtc_base/system/arch.h" namespace webrtc { @@ -105,6 +106,11 @@ class MatchedFilter { size_t pre_echo_lag = 0; }; + struct PreEchoConfiguration { + const float threshold; + const int mode; + }; + MatchedFilter(ApmDataDumper* data_dumper, Aec3Optimization optimization, size_t sub_block_size, @@ -147,6 +153,15 @@ class MatchedFilter { size_t downsampling_factor) const; private: + FRIEND_TEST_ALL_PREFIXES(MatchedFilterFieldTrialTest, + PreEchoConfigurationTest); + FRIEND_TEST_ALL_PREFIXES(MatchedFilterFieldTrialTest, + WrongPreEchoConfigurationTest); + + // Only for testing. Gets the pre echo detection configuration. + const PreEchoConfiguration& GetPreEchoConfiguration() const { + return pre_echo_config_; + } void Dump(); ApmDataDumper* const data_dumper_; @@ -166,6 +181,7 @@ class MatchedFilter { const float smoothing_slow_; const float matching_filter_threshold_; const bool detect_pre_echo_; + const PreEchoConfiguration pre_echo_config_; }; } // namespace webrtc diff --git a/modules/audio_processing/aec3/matched_filter_lag_aggregator.cc b/modules/audio_processing/aec3/matched_filter_lag_aggregator.cc index 17f517a001..bea7868a91 100644 --- a/modules/audio_processing/aec3/matched_filter_lag_aggregator.cc +++ b/modules/audio_processing/aec3/matched_filter_lag_aggregator.cc @@ -18,6 +18,8 @@ namespace webrtc { namespace { +constexpr int kPreEchoHistogramDataNotUpdated = -1; + int GetDownSamplingBlockSizeLog2(int down_sampling_factor) { int down_sampling_factor_log2 = 0; down_sampling_factor >>= 1; @@ -129,7 +131,7 @@ MatchedFilterLagAggregator::PreEchoLagAggregator::PreEchoLagAggregator( void MatchedFilterLagAggregator::PreEchoLagAggregator::Reset() { std::fill(histogram_.begin(), histogram_.end(), 0); - histogram_data_.fill(0); + histogram_data_.fill(kPreEchoHistogramDataNotUpdated); histogram_data_index_ = 0; pre_echo_candidate_ = 0; } @@ -141,7 +143,10 @@ void MatchedFilterLagAggregator::PreEchoLagAggregator::Aggregate( pre_echo_block_size < static_cast(histogram_.size())); pre_echo_block_size = rtc::SafeClamp(pre_echo_block_size, 0, histogram_.size() - 1); - if (histogram_[histogram_data_[histogram_data_index_]] > 0) { + // Remove the oldest point from the `histogram_`, it ignores the initial + // points where no updates have been done to the `histogram_data_` array. + if (histogram_data_[histogram_data_index_] != + kPreEchoHistogramDataNotUpdated) { --histogram_[histogram_data_[histogram_data_index_]]; } histogram_data_[histogram_data_index_] = pre_echo_block_size; diff --git a/modules/audio_processing/aec3/matched_filter_unittest.cc b/modules/audio_processing/aec3/matched_filter_unittest.cc index b080308191..0a04c7809c 100644 --- a/modules/audio_processing/aec3/matched_filter_unittest.cc +++ b/modules/audio_processing/aec3/matched_filter_unittest.cc @@ -27,6 +27,7 @@ #include "rtc_base/random.h" #include "rtc_base/strings/string_builder.h" #include "system_wrappers/include/cpu_features_wrapper.h" +#include "test/field_trial.h" #include "test/gtest.h" namespace webrtc { @@ -555,4 +556,57 @@ INSTANTIATE_TEST_SUITE_P(_, #endif } // namespace aec3 + +TEST(MatchedFilterFieldTrialTest, PreEchoConfigurationTest) { + float threshold_in = 0.1f; + int mode_in = 2; + rtc::StringBuilder field_trial_name; + field_trial_name << "WebRTC-Aec3PreEchoConfiguration/threshold:" + << threshold_in << ",mode:" << mode_in << "/"; + webrtc::test::ScopedFieldTrials field_trials(field_trial_name.str()); + ApmDataDumper data_dumper(0); + EchoCanceller3Config config; + MatchedFilter matched_filter( + &data_dumper, DetectOptimization(), + kBlockSize / config.delay.down_sampling_factor, + aec3::kWindowSizeSubBlocks, aec3::kNumMatchedFilters, + aec3::kAlignmentShiftSubBlocks, + config.render_levels.poor_excitation_render_limit, + config.delay.delay_estimate_smoothing, + config.delay.delay_estimate_smoothing_delay_found, + config.delay.delay_candidate_detection_threshold, + config.delay.detect_pre_echo); + + auto& pre_echo_config = matched_filter.GetPreEchoConfiguration(); + EXPECT_EQ(pre_echo_config.threshold, threshold_in); + EXPECT_EQ(pre_echo_config.mode, mode_in); +} + +TEST(MatchedFilterFieldTrialTest, WrongPreEchoConfigurationTest) { + constexpr float kDefaultThreshold = 0.5f; + constexpr int kDefaultMode = 0; + float threshold_in = -0.1f; + int mode_in = 5; + rtc::StringBuilder field_trial_name; + field_trial_name << "WebRTC-Aec3PreEchoConfiguration/threshold:" + << threshold_in << ",mode:" << mode_in << "/"; + webrtc::test::ScopedFieldTrials field_trials(field_trial_name.str()); + ApmDataDumper data_dumper(0); + EchoCanceller3Config config; + MatchedFilter matched_filter( + &data_dumper, DetectOptimization(), + kBlockSize / config.delay.down_sampling_factor, + aec3::kWindowSizeSubBlocks, aec3::kNumMatchedFilters, + aec3::kAlignmentShiftSubBlocks, + config.render_levels.poor_excitation_render_limit, + config.delay.delay_estimate_smoothing, + config.delay.delay_estimate_smoothing_delay_found, + config.delay.delay_candidate_detection_threshold, + config.delay.detect_pre_echo); + + auto& pre_echo_config = matched_filter.GetPreEchoConfiguration(); + EXPECT_EQ(pre_echo_config.threshold, kDefaultThreshold); + EXPECT_EQ(pre_echo_config.mode, kDefaultMode); +} + } // namespace webrtc diff --git a/modules/audio_processing/agc/BUILD.gn b/modules/audio_processing/agc/BUILD.gn index 2a4ec059ff..508f901b08 100644 --- a/modules/audio_processing/agc/BUILD.gn +++ b/modules/audio_processing/agc/BUILD.gn @@ -36,25 +36,12 @@ rtc_library("agc") { "../../../system_wrappers:metrics", "../agc2:clipping_predictor", "../agc2:gain_map", + "../agc2:input_volume_stats_reporter", "../vad", ] absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } -rtc_library("analog_gain_stats_reporter") { - sources = [ - "analog_gain_stats_reporter.cc", - "analog_gain_stats_reporter.h", - ] - deps = [ - "../../../rtc_base:gtest_prod", - "../../../rtc_base:logging", - "../../../rtc_base:safe_minmax", - "../../../system_wrappers:metrics", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] -} - rtc_library("level_estimation") { sources = [ "agc.cc", @@ -109,7 +96,6 @@ if (rtc_include_tests) { testonly = true sources = [ "agc_manager_direct_unittest.cc", - "analog_gain_stats_reporter_unittest.cc", "loudness_histogram_unittest.cc", "mock_agc.h", ] @@ -117,7 +103,6 @@ if (rtc_include_tests) { deps = [ ":agc", - ":analog_gain_stats_reporter", ":gain_control_interface", ":level_estimation", "..:mocks", diff --git a/modules/audio_processing/agc/agc_manager_direct.cc b/modules/audio_processing/agc/agc_manager_direct.cc index 4114eaacf9..b8ad4a8bb9 100644 --- a/modules/audio_processing/agc/agc_manager_direct.cc +++ b/modules/audio_processing/agc/agc_manager_direct.cc @@ -17,6 +17,7 @@ #include "common_audio/include/audio_util.h" #include "modules/audio_processing/agc/gain_control.h" #include "modules/audio_processing/agc2/gain_map_internal.h" +#include "modules/audio_processing/agc2/input_volume_stats_reporter.h" #include "modules/audio_processing/include/audio_frame_view.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -87,10 +88,6 @@ absl::optional GetMinMicLevelOverride() { } } -int ClampLevel(int mic_level, int min_mic_level) { - return rtc::SafeClamp(mic_level, min_mic_level, kMaxMicLevel); -} - int LevelFromGainError(int gain_error, int level, int min_mic_level) { RTC_DCHECK_GE(level, 0); RTC_DCHECK_LE(level, kMaxMicLevel); @@ -164,7 +161,6 @@ int GetSpeechLevelErrorDb(float speech_level_dbfs, float speech_probability) { } // namespace MonoAgc::MonoAgc(ApmDataDumper* data_dumper, - int startup_min_level, int clipped_level_min, bool disable_digital_adaptive, int min_mic_level) @@ -176,7 +172,6 @@ MonoAgc::MonoAgc(ApmDataDumper* data_dumper, target_compression_(kDefaultCompressionGain), compression_(target_compression_), compression_accumulator_(compression_), - startup_min_level_(ClampLevel(startup_min_level, min_mic_level_)), clipped_level_min_(clipped_level_min) {} MonoAgc::~MonoAgc() = default; @@ -347,9 +342,8 @@ int MonoAgc::CheckVolumeAndReset() { } RTC_DLOG(LS_INFO) << "[agc] Initial GetMicVolume()=" << level; - int minLevel = startup_ ? startup_min_level_ : min_mic_level_; - if (level < minLevel) { - level = minLevel; + if (level < min_mic_level_) { + level = min_mic_level_; RTC_DLOG(LS_INFO) << "[agc] Initial volume too low, raising to " << level; recommended_input_volume_ = level; } @@ -414,22 +408,12 @@ void MonoAgc::UpdateGain(int rms_error_db) { int old_level = level_; SetLevel(LevelFromGainError(residual_gain, level_, min_mic_level_)); if (old_level != level_) { - // level_ was updated by SetLevel; log the new value. - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.AgcSetLevel", level_, 1, - kMaxMicLevel, 50); // Reset the AGC since the level has changed. agc_->Reset(); } } void MonoAgc::UpdateCompressor() { - calls_since_last_gain_log_++; - if (calls_since_last_gain_log_ == 100) { - calls_since_last_gain_log_ = 0; - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc.DigitalGainApplied", - compression_, 0, kMaxCompressionGain, - kMaxCompressionGain + 1); - } if (compression_ == target_compression_) { return; } @@ -454,9 +438,6 @@ void MonoAgc::UpdateCompressor() { // Set the new compression gain. if (new_compression != compression_) { - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc.DigitalGainUpdated", - new_compression, 0, kMaxCompressionGain, - kMaxCompressionGain + 1); compression_ = new_compression; compression_accumulator_ = new_compression; new_compression_to_set_ = compression_; @@ -504,15 +485,12 @@ AgcManagerDirect::AgcManagerDirect(int num_capture_channels, << " (overridden: " << (min_mic_level_override_.has_value() ? "yes" : "no") << ")"; - RTC_LOG(LS_INFO) << "[agc] Startup min volume: " - << analog_config.startup_min_volume; for (size_t ch = 0; ch < channel_agcs_.size(); ++ch) { ApmDataDumper* data_dumper_ch = ch == 0 ? data_dumper_.get() : nullptr; channel_agcs_[ch] = std::make_unique( - data_dumper_ch, analog_config.startup_min_volume, - analog_config.clipped_level_min, disable_digital_adaptive_, - min_mic_level); + data_dumper_ch, analog_config.clipped_level_min, + disable_digital_adaptive_, min_mic_level); } RTC_DCHECK(!channel_agcs_.empty()); RTC_DCHECK_GT(clipped_level_step_, 0); @@ -647,6 +625,7 @@ void AgcManagerDirect::Process(const AudioBuffer& audio_buffer, absl::optional speech_probability, absl::optional speech_level_dbfs) { AggregateChannelLevels(); + const int volume_after_clipping_handling = recommended_input_volume_; if (!capture_output_used_) { return; @@ -669,6 +648,12 @@ void AgcManagerDirect::Process(const AudioBuffer& audio_buffer, } AggregateChannelLevels(); + if (volume_after_clipping_handling != recommended_input_volume_) { + // The recommended input volume was adjusted in order to match the target + // level. + UpdateHistogramOnRecommendedInputVolumeChangeToMatchTarget( + recommended_input_volume_); + } } absl::optional AgcManagerDirect::GetDigitalComressionGain() { diff --git a/modules/audio_processing/agc/agc_manager_direct.h b/modules/audio_processing/agc/agc_manager_direct.h index 53d63adef4..adb2f5a63f 100644 --- a/modules/audio_processing/agc/agc_manager_direct.h +++ b/modules/audio_processing/agc/agc_manager_direct.h @@ -189,7 +189,6 @@ class AgcManagerDirect final { class MonoAgc { public: MonoAgc(ApmDataDumper* data_dumper, - int startup_min_level, int clipped_level_min, bool disable_digital_adaptive, int min_mic_level); @@ -228,7 +227,6 @@ class MonoAgc { // Only used for testing. void set_agc(Agc* agc) { agc_.reset(agc); } int min_mic_level() const { return min_mic_level_; } - int startup_min_level() const { return startup_min_level_; } private: // Sets a new input volume, after first checking that it hasn't been updated @@ -256,8 +254,6 @@ class MonoAgc { bool capture_output_used_ = true; bool check_volume_on_next_process_ = true; bool startup_ = true; - int startup_min_level_; - int calls_since_last_gain_log_ = 0; // TODO(bugs.webrtc.org/7494): Create a separate member for the applied // input volume. diff --git a/modules/audio_processing/agc/agc_manager_direct_unittest.cc b/modules/audio_processing/agc/agc_manager_direct_unittest.cc index 685f6a9fec..379df85463 100644 --- a/modules/audio_processing/agc/agc_manager_direct_unittest.cc +++ b/modules/audio_processing/agc/agc_manager_direct_unittest.cc @@ -1443,8 +1443,6 @@ TEST(AgcManagerDirectTest, AgcMinMicLevelExperimentDefault) { CreateAgcManagerDirect(kInitialInputVolume, kClippedLevelStep, kClippedRatioThreshold, kClippedWaitFrames); EXPECT_EQ(manager->channel_agcs_[0]->min_mic_level(), kMinMicLevel); - EXPECT_EQ(manager->channel_agcs_[0]->startup_min_level(), - kInitialInputVolume); } TEST(AgcManagerDirectTest, AgcMinMicLevelExperimentDisabled) { @@ -1455,8 +1453,6 @@ TEST(AgcManagerDirectTest, AgcMinMicLevelExperimentDisabled) { CreateAgcManagerDirect(kInitialInputVolume, kClippedLevelStep, kClippedRatioThreshold, kClippedWaitFrames); EXPECT_EQ(manager->channel_agcs_[0]->min_mic_level(), kMinMicLevel); - EXPECT_EQ(manager->channel_agcs_[0]->startup_min_level(), - kInitialInputVolume); } } @@ -1469,8 +1465,6 @@ TEST(AgcManagerDirectTest, AgcMinMicLevelExperimentOutOfRangeAbove) { CreateAgcManagerDirect(kInitialInputVolume, kClippedLevelStep, kClippedRatioThreshold, kClippedWaitFrames); EXPECT_EQ(manager->channel_agcs_[0]->min_mic_level(), kMinMicLevel); - EXPECT_EQ(manager->channel_agcs_[0]->startup_min_level(), - kInitialInputVolume); } // Checks that a field-trial parameter outside of the valid range [0,255] is @@ -1482,8 +1476,6 @@ TEST(AgcManagerDirectTest, AgcMinMicLevelExperimentOutOfRangeBelow) { CreateAgcManagerDirect(kInitialInputVolume, kClippedLevelStep, kClippedRatioThreshold, kClippedWaitFrames); EXPECT_EQ(manager->channel_agcs_[0]->min_mic_level(), kMinMicLevel); - EXPECT_EQ(manager->channel_agcs_[0]->startup_min_level(), - kInitialInputVolume); } // Verifies that a valid experiment changes the minimum microphone level. The @@ -1500,8 +1492,6 @@ TEST(AgcManagerDirectTest, AgcMinMicLevelExperimentEnabled50) { CreateAgcManagerDirect(kInitialInputVolume, kClippedLevelStep, kClippedRatioThreshold, kClippedWaitFrames); EXPECT_EQ(manager->channel_agcs_[0]->min_mic_level(), kMinMicLevelOverride); - EXPECT_EQ(manager->channel_agcs_[0]->startup_min_level(), - kInitialInputVolume); } } diff --git a/modules/audio_processing/agc/analog_gain_stats_reporter.cc b/modules/audio_processing/agc/analog_gain_stats_reporter.cc deleted file mode 100644 index 0d8753a7c8..0000000000 --- a/modules/audio_processing/agc/analog_gain_stats_reporter.cc +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/agc/analog_gain_stats_reporter.h" - -#include - -#include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_minmax.h" -#include "system_wrappers/include/metrics.h" - -namespace webrtc { -namespace { - -constexpr int kFramesIn60Seconds = 6000; -constexpr int kMinGain = 0; -constexpr int kMaxGain = 255; -constexpr int kMaxUpdate = kMaxGain - kMinGain; - -float ComputeAverageUpdate(int sum_updates, int num_updates) { - RTC_DCHECK_GE(sum_updates, 0); - RTC_DCHECK_LE(sum_updates, kMaxUpdate * kFramesIn60Seconds); - RTC_DCHECK_GE(num_updates, 0); - RTC_DCHECK_LE(num_updates, kFramesIn60Seconds); - if (num_updates == 0) { - return 0.0f; - } - return std::round(static_cast(sum_updates) / - static_cast(num_updates)); -} -} // namespace - -AnalogGainStatsReporter::AnalogGainStatsReporter() = default; - -AnalogGainStatsReporter::~AnalogGainStatsReporter() = default; - -void AnalogGainStatsReporter::UpdateStatistics(int analog_mic_level) { - RTC_DCHECK_GE(analog_mic_level, kMinGain); - RTC_DCHECK_LE(analog_mic_level, kMaxGain); - if (previous_analog_mic_level_.has_value() && - analog_mic_level != previous_analog_mic_level_.value()) { - const int level_change = - analog_mic_level - previous_analog_mic_level_.value(); - if (level_change < 0) { - ++level_update_stats_.num_decreases; - level_update_stats_.sum_decreases -= level_change; - } else { - ++level_update_stats_.num_increases; - level_update_stats_.sum_increases += level_change; - } - } - // Periodically log analog gain change metrics. - if (++log_level_update_stats_counter_ >= kFramesIn60Seconds) { - LogLevelUpdateStats(); - level_update_stats_ = {}; - log_level_update_stats_counter_ = 0; - } - previous_analog_mic_level_ = analog_mic_level; -} - -void AnalogGainStatsReporter::LogLevelUpdateStats() const { - const float average_decrease = ComputeAverageUpdate( - level_update_stats_.sum_decreases, level_update_stats_.num_decreases); - const float average_increase = ComputeAverageUpdate( - level_update_stats_.sum_increases, level_update_stats_.num_increases); - const int num_updates = - level_update_stats_.num_decreases + level_update_stats_.num_increases; - const float average_update = ComputeAverageUpdate( - level_update_stats_.sum_decreases + level_update_stats_.sum_increases, - num_updates); - RTC_DLOG(LS_INFO) << "Analog gain update rate: " - << "num_updates=" << num_updates - << ", num_decreases=" << level_update_stats_.num_decreases - << ", num_increases=" << level_update_stats_.num_increases; - RTC_DLOG(LS_INFO) << "Analog gain update average: " - << "average_update=" << average_update - << ", average_decrease=" << average_decrease - << ", average_increase=" << average_increase; - RTC_HISTOGRAM_COUNTS_LINEAR( - /*name=*/"WebRTC.Audio.ApmAnalogGainDecreaseRate", - /*sample=*/level_update_stats_.num_decreases, - /*min=*/1, - /*max=*/kFramesIn60Seconds, - /*bucket_count=*/50); - if (level_update_stats_.num_decreases > 0) { - RTC_HISTOGRAM_COUNTS_LINEAR( - /*name=*/"WebRTC.Audio.ApmAnalogGainDecreaseAverage", - /*sample=*/average_decrease, - /*min=*/1, - /*max=*/kMaxUpdate, - /*bucket_count=*/50); - } - RTC_HISTOGRAM_COUNTS_LINEAR( - /*name=*/"WebRTC.Audio.ApmAnalogGainIncreaseRate", - /*sample=*/level_update_stats_.num_increases, - /*min=*/1, - /*max=*/kFramesIn60Seconds, - /*bucket_count=*/50); - if (level_update_stats_.num_increases > 0) { - RTC_HISTOGRAM_COUNTS_LINEAR( - /*name=*/"WebRTC.Audio.ApmAnalogGainIncreaseAverage", - /*sample=*/average_increase, - /*min=*/1, - /*max=*/kMaxUpdate, - /*bucket_count=*/50); - } - RTC_HISTOGRAM_COUNTS_LINEAR( - /*name=*/"WebRTC.Audio.ApmAnalogGainUpdateRate", - /*sample=*/num_updates, - /*min=*/1, - /*max=*/kFramesIn60Seconds, - /*bucket_count=*/50); - if (num_updates > 0) { - RTC_HISTOGRAM_COUNTS_LINEAR( - /*name=*/"WebRTC.Audio.ApmAnalogGainUpdateAverage", - /*sample=*/average_update, - /*min=*/1, - /*max=*/kMaxUpdate, - /*bucket_count=*/50); - } -} - -} // namespace webrtc diff --git a/modules/audio_processing/agc/analog_gain_stats_reporter.h b/modules/audio_processing/agc/analog_gain_stats_reporter.h deleted file mode 100644 index c9442e8a43..0000000000 --- a/modules/audio_processing/agc/analog_gain_stats_reporter.h +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_AGC_ANALOG_GAIN_STATS_REPORTER_H_ -#define MODULES_AUDIO_PROCESSING_AGC_ANALOG_GAIN_STATS_REPORTER_H_ - -#include "absl/types/optional.h" -#include "rtc_base/gtest_prod_util.h" - -namespace webrtc { - -// Analog gain statistics calculator. Computes aggregate stats based on the -// framewise mic levels processed in `UpdateStatistics()`. Periodically logs the -// statistics into a histogram. -class AnalogGainStatsReporter { - public: - AnalogGainStatsReporter(); - AnalogGainStatsReporter(const AnalogGainStatsReporter&) = delete; - AnalogGainStatsReporter operator=(const AnalogGainStatsReporter&) = delete; - ~AnalogGainStatsReporter(); - - // Updates the stats based on the `analog_mic_level`. Periodically logs the - // stats into a histogram. - void UpdateStatistics(int analog_mic_level); - - private: - FRIEND_TEST_ALL_PREFIXES(AnalogGainStatsReporterTest, - CheckLevelUpdateStatsForEmptyStats); - FRIEND_TEST_ALL_PREFIXES(AnalogGainStatsReporterTest, - CheckLevelUpdateStatsAfterNoGainChange); - FRIEND_TEST_ALL_PREFIXES(AnalogGainStatsReporterTest, - CheckLevelUpdateStatsAfterGainIncrease); - FRIEND_TEST_ALL_PREFIXES(AnalogGainStatsReporterTest, - CheckLevelUpdateStatsAfterGainDecrease); - FRIEND_TEST_ALL_PREFIXES(AnalogGainStatsReporterTest, - CheckLevelUpdateStatsAfterReset); - - // Stores analog gain update stats to enable calculation of update rate and - // average update separately for gain increases and decreases. - struct LevelUpdateStats { - int num_decreases = 0; - int num_increases = 0; - int sum_decreases = 0; - int sum_increases = 0; - } level_update_stats_; - - // Returns a copy of the stored statistics. Use only for testing. - const LevelUpdateStats level_update_stats() const { - return level_update_stats_; - } - - // Computes aggregate stat and logs them into a histogram. - void LogLevelUpdateStats() const; - - int log_level_update_stats_counter_ = 0; - absl::optional previous_analog_mic_level_ = absl::nullopt; -}; -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_AGC_ANALOG_GAIN_STATS_REPORTER_H_ diff --git a/modules/audio_processing/agc/analog_gain_stats_reporter_unittest.cc b/modules/audio_processing/agc/analog_gain_stats_reporter_unittest.cc deleted file mode 100644 index bc9559094b..0000000000 --- a/modules/audio_processing/agc/analog_gain_stats_reporter_unittest.cc +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/agc/analog_gain_stats_reporter.h" - -#include "system_wrappers/include/metrics.h" -#include "test/gmock.h" - -namespace webrtc { -namespace { - -constexpr int kFramesIn60Seconds = 6000; - -class AnalogGainStatsReporterTest : public ::testing::Test { - public: - AnalogGainStatsReporterTest() {} - - protected: - void SetUp() override { metrics::Reset(); } -}; - -TEST_F(AnalogGainStatsReporterTest, CheckLogLevelUpdateStatsEmpty) { - AnalogGainStatsReporter stats_reporter; - constexpr int kMicLevel = 10; - stats_reporter.UpdateStatistics(kMicLevel); - // Update almost until the periodic logging and reset. - for (int i = 0; i < kFramesIn60Seconds - 2; i += 2) { - stats_reporter.UpdateStatistics(kMicLevel + 2); - stats_reporter.UpdateStatistics(kMicLevel); - } - EXPECT_METRIC_THAT(metrics::Samples("WebRTC.Audio.ApmAnalogGainUpdateRate"), - ::testing::ElementsAre()); - EXPECT_METRIC_THAT(metrics::Samples("WebRTC.Audio.ApmAnalogGainDecreaseRate"), - ::testing::ElementsAre()); - EXPECT_METRIC_THAT(metrics::Samples("WebRTC.Audio.ApmAnalogGainIncreaseRate"), - ::testing::ElementsAre()); - EXPECT_METRIC_THAT( - metrics::Samples("WebRTC.Audio.ApmAnalogGainUpdateAverage"), - ::testing::ElementsAre()); - EXPECT_METRIC_THAT( - metrics::Samples("WebRTC.Audio.ApmAnalogGainDecreaseAverage"), - ::testing::ElementsAre()); - EXPECT_METRIC_THAT( - metrics::Samples("WebRTC.Audio.ApmAnalogGainIncreaseAverage"), - ::testing::ElementsAre()); -} - -TEST_F(AnalogGainStatsReporterTest, CheckLogLevelUpdateStatsNotEmpty) { - AnalogGainStatsReporter stats_reporter; - constexpr int kMicLevel = 10; - stats_reporter.UpdateStatistics(kMicLevel); - // Update until periodic logging. - for (int i = 0; i < kFramesIn60Seconds; i += 2) { - stats_reporter.UpdateStatistics(kMicLevel + 2); - stats_reporter.UpdateStatistics(kMicLevel); - } - // Update until periodic logging. - for (int i = 0; i < kFramesIn60Seconds; i += 2) { - stats_reporter.UpdateStatistics(kMicLevel + 3); - stats_reporter.UpdateStatistics(kMicLevel); - } - EXPECT_METRIC_THAT( - metrics::Samples("WebRTC.Audio.ApmAnalogGainUpdateRate"), - ::testing::ElementsAre(::testing::Pair(kFramesIn60Seconds - 1, 1), - ::testing::Pair(kFramesIn60Seconds, 1))); - EXPECT_METRIC_THAT( - metrics::Samples("WebRTC.Audio.ApmAnalogGainDecreaseRate"), - ::testing::ElementsAre(::testing::Pair(kFramesIn60Seconds / 2 - 1, 1), - ::testing::Pair(kFramesIn60Seconds / 2, 1))); - EXPECT_METRIC_THAT( - metrics::Samples("WebRTC.Audio.ApmAnalogGainIncreaseRate"), - ::testing::ElementsAre(::testing::Pair(kFramesIn60Seconds / 2, 2))); - EXPECT_METRIC_THAT( - metrics::Samples("WebRTC.Audio.ApmAnalogGainUpdateAverage"), - ::testing::ElementsAre(::testing::Pair(2, 1), ::testing::Pair(3, 1))); - EXPECT_METRIC_THAT( - metrics::Samples("WebRTC.Audio.ApmAnalogGainDecreaseAverage"), - ::testing::ElementsAre(::testing::Pair(2, 1), ::testing::Pair(3, 1))); - EXPECT_METRIC_THAT( - metrics::Samples("WebRTC.Audio.ApmAnalogGainIncreaseAverage"), - ::testing::ElementsAre(::testing::Pair(2, 1), ::testing::Pair(3, 1))); -} -} // namespace - -TEST_F(AnalogGainStatsReporterTest, CheckLevelUpdateStatsForEmptyStats) { - AnalogGainStatsReporter stats_reporter; - const auto& update_stats = stats_reporter.level_update_stats(); - EXPECT_EQ(update_stats.num_decreases, 0); - EXPECT_EQ(update_stats.sum_decreases, 0); - EXPECT_EQ(update_stats.num_increases, 0); - EXPECT_EQ(update_stats.sum_increases, 0); -} - -TEST_F(AnalogGainStatsReporterTest, CheckLevelUpdateStatsAfterNoGainChange) { - constexpr int kMicLevel = 10; - AnalogGainStatsReporter stats_reporter; - stats_reporter.UpdateStatistics(kMicLevel); - stats_reporter.UpdateStatistics(kMicLevel); - stats_reporter.UpdateStatistics(kMicLevel); - const auto& update_stats = stats_reporter.level_update_stats(); - EXPECT_EQ(update_stats.num_decreases, 0); - EXPECT_EQ(update_stats.sum_decreases, 0); - EXPECT_EQ(update_stats.num_increases, 0); - EXPECT_EQ(update_stats.sum_increases, 0); -} - -TEST_F(AnalogGainStatsReporterTest, CheckLevelUpdateStatsAfterGainIncrease) { - constexpr int kMicLevel = 10; - AnalogGainStatsReporter stats_reporter; - stats_reporter.UpdateStatistics(kMicLevel); - stats_reporter.UpdateStatistics(kMicLevel + 4); - stats_reporter.UpdateStatistics(kMicLevel + 5); - const auto& update_stats = stats_reporter.level_update_stats(); - EXPECT_EQ(update_stats.num_decreases, 0); - EXPECT_EQ(update_stats.sum_decreases, 0); - EXPECT_EQ(update_stats.num_increases, 2); - EXPECT_EQ(update_stats.sum_increases, 5); -} - -TEST_F(AnalogGainStatsReporterTest, CheckLevelUpdateStatsAfterGainDecrease) { - constexpr int kMicLevel = 10; - AnalogGainStatsReporter stats_reporter; - stats_reporter.UpdateStatistics(kMicLevel); - stats_reporter.UpdateStatistics(kMicLevel - 4); - stats_reporter.UpdateStatistics(kMicLevel - 5); - const auto& stats_update = stats_reporter.level_update_stats(); - EXPECT_EQ(stats_update.num_decreases, 2); - EXPECT_EQ(stats_update.sum_decreases, 5); - EXPECT_EQ(stats_update.num_increases, 0); - EXPECT_EQ(stats_update.sum_increases, 0); -} - -TEST_F(AnalogGainStatsReporterTest, CheckLevelUpdateStatsAfterReset) { - AnalogGainStatsReporter stats_reporter; - constexpr int kMicLevel = 10; - stats_reporter.UpdateStatistics(kMicLevel); - // Update until the periodic reset. - for (int i = 0; i < kFramesIn60Seconds - 2; i += 2) { - stats_reporter.UpdateStatistics(kMicLevel + 2); - stats_reporter.UpdateStatistics(kMicLevel); - } - const auto& stats_before_reset = stats_reporter.level_update_stats(); - EXPECT_EQ(stats_before_reset.num_decreases, kFramesIn60Seconds / 2 - 1); - EXPECT_EQ(stats_before_reset.sum_decreases, kFramesIn60Seconds - 2); - EXPECT_EQ(stats_before_reset.num_increases, kFramesIn60Seconds / 2 - 1); - EXPECT_EQ(stats_before_reset.sum_increases, kFramesIn60Seconds - 2); - stats_reporter.UpdateStatistics(kMicLevel + 2); - const auto& stats_during_reset = stats_reporter.level_update_stats(); - EXPECT_EQ(stats_during_reset.num_decreases, 0); - EXPECT_EQ(stats_during_reset.sum_decreases, 0); - EXPECT_EQ(stats_during_reset.num_increases, 0); - EXPECT_EQ(stats_during_reset.sum_increases, 0); - stats_reporter.UpdateStatistics(kMicLevel); - stats_reporter.UpdateStatistics(kMicLevel + 3); - const auto& stats_after_reset = stats_reporter.level_update_stats(); - EXPECT_EQ(stats_after_reset.num_decreases, 1); - EXPECT_EQ(stats_after_reset.sum_decreases, 2); - EXPECT_EQ(stats_after_reset.num_increases, 1); - EXPECT_EQ(stats_after_reset.sum_increases, 3); -} - -} // namespace webrtc diff --git a/modules/audio_processing/agc2/BUILD.gn b/modules/audio_processing/agc2/BUILD.gn index d39e3279b4..bd59ad3dae 100644 --- a/modules/audio_processing/agc2/BUILD.gn +++ b/modules/audio_processing/agc2/BUILD.gn @@ -8,21 +8,59 @@ import("../../../webrtc.gni") -group("agc2") { +rtc_library("speech_level_estimator") { + sources = [ + "speech_level_estimator.cc", + "speech_level_estimator.h", + ] + + visibility = [ + "..:gain_controller2", + "./*", + ] + + configs += [ "..:apm_debug_dump" ] + deps = [ - ":adaptive_digital", - ":fixed_digital", + ":common", + "..:api", + "..:apm_logging", + "../../../api:array_view", + "../../../rtc_base:checks", + "../../../rtc_base:logging", + "../../../rtc_base:safe_minmax", ] } -rtc_library("adaptive_digital") { +rtc_library("adaptive_digital_gain_controller") { sources = [ - "adaptive_digital_gain_applier.cc", - "adaptive_digital_gain_applier.h", "adaptive_digital_gain_controller.cc", "adaptive_digital_gain_controller.h", - "adaptive_mode_level_estimator.cc", - "adaptive_mode_level_estimator.h", + ] + + visibility = [ + "..:gain_controller2", + "./*", + ] + + configs += [ "..:apm_debug_dump" ] + + deps = [ + ":common", + ":gain_applier", + "..:api", + "..:apm_logging", + "..:audio_frame_view", + "../../../common_audio", + "../../../rtc_base:checks", + "../../../rtc_base:logging", + "../../../rtc_base:safe_minmax", + "../../../system_wrappers:metrics", + ] +} + +rtc_library("saturation_protector") { + sources = [ "saturation_protector.cc", "saturation_protector.h", "saturation_protector_buffer.cc", @@ -38,20 +76,10 @@ rtc_library("adaptive_digital") { deps = [ ":common", - ":cpu_features", - ":gain_applier", - ":noise_level_estimator", - ":vad_wrapper", - "..:api", "..:apm_logging", - "..:audio_frame_view", - "../../../api:array_view", - "../../../common_audio", "../../../rtc_base:checks", - "../../../rtc_base:logging", "../../../rtc_base:safe_compare", "../../../rtc_base:safe_minmax", - "../../../system_wrappers:metrics", ] absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] @@ -164,6 +192,8 @@ rtc_source_set("gain_map") { rtc_library("input_volume_controller") { sources = [ + "input_volume_controller.cc", + "input_volume_controller.h", "speech_probability_buffer.cc", "speech_probability_buffer.h", ] @@ -173,10 +203,27 @@ rtc_library("input_volume_controller") { "./*", ] + configs += [ "..:apm_debug_dump" ] + deps = [ + ":clipping_predictor", + ":gain_map", + ":input_volume_stats_reporter", + "..:api", + "..:audio_buffer", + "..:audio_frame_view", + "../../../api:array_view", + "../../../rtc_base:checks", "../../../rtc_base:checks", "../../../rtc_base:gtest_prod", + "../../../rtc_base:gtest_prod", + "../../../rtc_base:logging", + "../../../rtc_base:safe_minmax", + "../../../system_wrappers:field_trial", + "../../../system_wrappers:metrics", ] + + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("noise_level_estimator") { @@ -184,7 +231,6 @@ rtc_library("noise_level_estimator") { "noise_level_estimator.cc", "noise_level_estimator.h", ] - visibility = [ "./*" ] deps = [ ":biquad_filter", "..:apm_logging", @@ -194,6 +240,11 @@ rtc_library("noise_level_estimator") { "../../../system_wrappers", ] + visibility = [ + "..:gain_controller2", + "./*", + ] + configs += [ "..:apm_debug_dump" ] } @@ -244,28 +295,66 @@ rtc_library("cpu_features") { ] } -rtc_library("adaptive_digital_unittests") { +rtc_library("speech_level_estimator_unittest") { testonly = true configs += [ "..:apm_debug_dump" ] - sources = [ - "adaptive_digital_gain_applier_unittest.cc", - "adaptive_mode_level_estimator_unittest.cc", - "gain_applier_unittest.cc", - "saturation_protector_buffer_unittest.cc", - "saturation_protector_unittest.cc", - ] + sources = [ "speech_level_estimator_unittest.cc" ] deps = [ - ":adaptive_digital", ":common", - ":gain_applier", + ":speech_level_estimator", + "..:api", + "..:apm_logging", + "../../../rtc_base:gunit_helpers", + "../../../test:test_support", + ] +} + +rtc_library("adaptive_digital_gain_controller_unittest") { + testonly = true + configs += [ "..:apm_debug_dump" ] + + sources = [ "adaptive_digital_gain_controller_unittest.cc" ] + + deps = [ + ":adaptive_digital_gain_controller", + ":common", ":test_utils", "..:api", "..:apm_logging", "..:audio_frame_view", - "../../../api:array_view", "../../../common_audio", - "../../../rtc_base:checks", + "../../../rtc_base:gunit_helpers", + "../../../test:test_support", + ] +} + +rtc_library("gain_applier_unittest") { + testonly = true + configs += [ "..:apm_debug_dump" ] + + sources = [ "gain_applier_unittest.cc" ] + deps = [ + ":gain_applier", + ":test_utils", + "..:audio_frame_view", + "../../../rtc_base:gunit_helpers", + "../../../test:test_support", + ] +} + +rtc_library("saturation_protector_unittest") { + testonly = true + configs += [ "..:apm_debug_dump" ] + + sources = [ + "saturation_protector_buffer_unittest.cc", + "saturation_protector_unittest.cc", + ] + deps = [ + ":common", + ":saturation_protector", + "..:apm_logging", "../../../rtc_base:gunit_helpers", "../../../test:test_support", ] @@ -314,6 +403,7 @@ rtc_library("input_volume_controller_unittests") { sources = [ "clipping_predictor_level_buffer_unittest.cc", "clipping_predictor_unittest.cc", + "input_volume_controller_unittest.cc", "speech_probability_buffer_unittest.cc", ] @@ -323,10 +413,18 @@ rtc_library("input_volume_controller_unittests") { ":clipping_predictor", ":gain_map", ":input_volume_controller", + "..:api", + "../../../api:array_view", "../../../rtc_base:checks", "../../../rtc_base:random", "../../../rtc_base:safe_conversions", + "../../../rtc_base:safe_minmax", + "../../../rtc_base:stringutils", + "../../../system_wrappers:metrics", + "../../../test:field_trial", + "../../../test:fileutils", "../../../test:test_support", + "//testing/gtest", ] absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] @@ -381,3 +479,33 @@ rtc_library("test_utils") { "../../../rtc_base:random", ] } + +rtc_library("input_volume_stats_reporter") { + sources = [ + "input_volume_stats_reporter.cc", + "input_volume_stats_reporter.h", + ] + deps = [ + "../../../rtc_base:gtest_prod", + "../../../rtc_base:logging", + "../../../rtc_base:safe_minmax", + "../../../rtc_base:stringutils", + "../../../system_wrappers:metrics", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("input_volume_stats_reporter_unittests") { + testonly = true + sources = [ "input_volume_stats_reporter_unittest.cc" ] + deps = [ + ":input_volume_stats_reporter", + "../../../rtc_base:stringutils", + "../../../system_wrappers:metrics", + "../../../test:test_support", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] +} diff --git a/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc b/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc deleted file mode 100644 index a34f598874..0000000000 --- a/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc +++ /dev/null @@ -1,267 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/agc2/adaptive_digital_gain_applier.h" - -#include - -#include "common_audio/include/audio_util.h" -#include "modules/audio_processing/agc2/agc2_common.h" -#include "modules/audio_processing/logging/apm_data_dumper.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_minmax.h" -#include "system_wrappers/include/metrics.h" - -namespace webrtc { -namespace { - -using AdaptiveDigitalConfig = - AudioProcessing::Config::GainController2::AdaptiveDigital; - -constexpr int kHeadroomHistogramMin = 0; -constexpr int kHeadroomHistogramMax = 50; -constexpr int kGainDbHistogramMax = 30; - -// Computes the gain for `input_level_dbfs` to reach `-config.headroom_db`. -// Clamps the gain in [0, `config.max_gain_db`]. `config.headroom_db` is a -// safety margin to allow transient peaks to exceed the target peak level -// without clipping. -float ComputeGainDb(float input_level_dbfs, - const AdaptiveDigitalConfig& config) { - // If the level is very low, apply the maximum gain. - if (input_level_dbfs < -(config.headroom_db + config.max_gain_db)) { - return config.max_gain_db; - } - // We expect to end up here most of the time: the level is below - // -headroom, but we can boost it to -headroom. - if (input_level_dbfs < -config.headroom_db) { - return -config.headroom_db - input_level_dbfs; - } - // The level is too high and we can't boost. - RTC_DCHECK_GE(input_level_dbfs, -config.headroom_db); - return 0.0f; -} - -// Returns `target_gain_db` if applying such a gain to `input_noise_level_dbfs` -// does not exceed `max_output_noise_level_dbfs`. Otherwise lowers and returns -// `target_gain_db` so that the output noise level equals -// `max_output_noise_level_dbfs`. -float LimitGainByNoise(float target_gain_db, - float input_noise_level_dbfs, - float max_output_noise_level_dbfs, - ApmDataDumper& apm_data_dumper) { - const float max_allowed_gain_db = - max_output_noise_level_dbfs - input_noise_level_dbfs; - apm_data_dumper.DumpRaw("agc2_adaptive_gain_applier_max_allowed_gain_db", - max_allowed_gain_db); - return std::min(target_gain_db, std::max(max_allowed_gain_db, 0.0f)); -} - -float LimitGainByLowConfidence(float target_gain_db, - float last_gain_db, - float limiter_audio_level_dbfs, - bool estimate_is_confident) { - if (estimate_is_confident || - limiter_audio_level_dbfs <= kLimiterThresholdForAgcGainDbfs) { - return target_gain_db; - } - const float limiter_level_dbfs_before_gain = - limiter_audio_level_dbfs - last_gain_db; - - // Compute a new gain so that `limiter_level_dbfs_before_gain` + - // `new_target_gain_db` is not great than `kLimiterThresholdForAgcGainDbfs`. - const float new_target_gain_db = std::max( - kLimiterThresholdForAgcGainDbfs - limiter_level_dbfs_before_gain, 0.0f); - return std::min(new_target_gain_db, target_gain_db); -} - -// Computes how the gain should change during this frame. -// Return the gain difference in db to 'last_gain_db'. -float ComputeGainChangeThisFrameDb(float target_gain_db, - float last_gain_db, - bool gain_increase_allowed, - float max_gain_decrease_db, - float max_gain_increase_db) { - RTC_DCHECK_GT(max_gain_decrease_db, 0); - RTC_DCHECK_GT(max_gain_increase_db, 0); - float target_gain_difference_db = target_gain_db - last_gain_db; - if (!gain_increase_allowed) { - target_gain_difference_db = std::min(target_gain_difference_db, 0.0f); - } - return rtc::SafeClamp(target_gain_difference_db, -max_gain_decrease_db, - max_gain_increase_db); -} - -// Copies the (multichannel) audio samples from `src` into `dst`. -void CopyAudio(AudioFrameView src, - std::vector>& dst) { - RTC_DCHECK_GT(src.num_channels(), 0); - RTC_DCHECK_GT(src.samples_per_channel(), 0); - RTC_DCHECK_EQ(dst.size(), src.num_channels()); - for (int c = 0; c < src.num_channels(); ++c) { - rtc::ArrayView channel_view = src.channel(c); - RTC_DCHECK_EQ(channel_view.size(), src.samples_per_channel()); - RTC_DCHECK_EQ(dst[c].size(), src.samples_per_channel()); - std::copy(channel_view.begin(), channel_view.end(), dst[c].begin()); - } -} - -} // namespace - -AdaptiveDigitalGainApplier::AdaptiveDigitalGainApplier( - ApmDataDumper* apm_data_dumper, - const AudioProcessing::Config::GainController2::AdaptiveDigital& config, - int sample_rate_hz, - int num_channels) - : apm_data_dumper_(apm_data_dumper), - gain_applier_( - /*hard_clip_samples=*/false, - /*initial_gain_factor=*/DbToRatio(config.initial_gain_db)), - config_(config), - max_gain_change_db_per_10ms_(config_.max_gain_change_db_per_second * - kFrameDurationMs / 1000.0f), - calls_since_last_gain_log_(0), - frames_to_gain_increase_allowed_( - config_.adjacent_speech_frames_threshold), - last_gain_db_(config_.initial_gain_db) { - RTC_DCHECK_GT(max_gain_change_db_per_10ms_, 0.0f); - RTC_DCHECK_GE(frames_to_gain_increase_allowed_, 1); - RTC_DCHECK_GE(config_.max_output_noise_level_dbfs, -90.0f); - RTC_DCHECK_LE(config_.max_output_noise_level_dbfs, 0.0f); - Initialize(sample_rate_hz, num_channels); -} - -void AdaptiveDigitalGainApplier::Initialize(int sample_rate_hz, - int num_channels) { - if (!config_.dry_run) { - return; - } - RTC_DCHECK_GT(sample_rate_hz, 0); - RTC_DCHECK_GT(num_channels, 0); - int frame_size = rtc::CheckedDivExact(sample_rate_hz, 100); - bool sample_rate_changed = - dry_run_frame_.empty() || // Handle initialization. - dry_run_frame_[0].size() != static_cast(frame_size); - bool num_channels_changed = - dry_run_channels_.size() != static_cast(num_channels); - if (sample_rate_changed || num_channels_changed) { - // Resize the multichannel audio vector and update the channel pointers. - dry_run_frame_.resize(num_channels); - dry_run_channels_.resize(num_channels); - for (int c = 0; c < num_channels; ++c) { - dry_run_frame_[c].resize(frame_size); - dry_run_channels_[c] = dry_run_frame_[c].data(); - } - } -} - -void AdaptiveDigitalGainApplier::Process(const FrameInfo& info, - AudioFrameView frame) { - RTC_DCHECK_GE(info.speech_level_dbfs, -150.0f); - RTC_DCHECK_GE(frame.num_channels(), 1); - RTC_DCHECK( - frame.samples_per_channel() == 80 || frame.samples_per_channel() == 160 || - frame.samples_per_channel() == 320 || frame.samples_per_channel() == 480) - << "`frame` does not look like a 10 ms frame for an APM supported sample " - "rate"; - - // Compute the input level used to select the desired gain. - RTC_DCHECK_GT(info.headroom_db, 0.0f); - const float input_level_dbfs = info.speech_level_dbfs + info.headroom_db; - - const float target_gain_db = LimitGainByLowConfidence( - LimitGainByNoise(ComputeGainDb(input_level_dbfs, config_), - info.noise_rms_dbfs, config_.max_output_noise_level_dbfs, - *apm_data_dumper_), - last_gain_db_, info.limiter_envelope_dbfs, info.speech_level_reliable); - - // Forbid increasing the gain until enough adjacent speech frames are - // observed. - bool first_confident_speech_frame = false; - if (info.speech_probability < kVadConfidenceThreshold) { - frames_to_gain_increase_allowed_ = config_.adjacent_speech_frames_threshold; - } else if (frames_to_gain_increase_allowed_ > 0) { - frames_to_gain_increase_allowed_--; - first_confident_speech_frame = frames_to_gain_increase_allowed_ == 0; - } - apm_data_dumper_->DumpRaw( - "agc2_adaptive_gain_applier_frames_to_gain_increase_allowed", - frames_to_gain_increase_allowed_); - - const bool gain_increase_allowed = frames_to_gain_increase_allowed_ == 0; - - float max_gain_increase_db = max_gain_change_db_per_10ms_; - if (first_confident_speech_frame) { - // No gain increase happened while waiting for a long enough speech - // sequence. Therefore, temporarily allow a faster gain increase. - RTC_DCHECK(gain_increase_allowed); - max_gain_increase_db *= config_.adjacent_speech_frames_threshold; - } - - const float gain_change_this_frame_db = ComputeGainChangeThisFrameDb( - target_gain_db, last_gain_db_, gain_increase_allowed, - /*max_gain_decrease_db=*/max_gain_change_db_per_10ms_, - max_gain_increase_db); - - apm_data_dumper_->DumpRaw("agc2_adaptive_gain_applier_want_to_change_by_db", - target_gain_db - last_gain_db_); - apm_data_dumper_->DumpRaw("agc2_adaptive_gain_applier_will_change_by_db", - gain_change_this_frame_db); - - // Optimization: avoid calling math functions if gain does not - // change. - if (gain_change_this_frame_db != 0.f) { - gain_applier_.SetGainFactor( - DbToRatio(last_gain_db_ + gain_change_this_frame_db)); - } - - // Modify `frame` only if not running in "dry run" mode. - if (!config_.dry_run) { - gain_applier_.ApplyGain(frame); - } else { - // Copy `frame` so that `ApplyGain()` is called (on a copy). - CopyAudio(frame, dry_run_frame_); - RTC_DCHECK(!dry_run_channels_.empty()); - AudioFrameView frame_copy(&dry_run_channels_[0], - frame.num_channels(), - frame.samples_per_channel()); - gain_applier_.ApplyGain(frame_copy); - } - - // Remember that the gain has changed for the next iteration. - last_gain_db_ = last_gain_db_ + gain_change_this_frame_db; - apm_data_dumper_->DumpRaw("agc2_adaptive_gain_applier_applied_gain_db", - last_gain_db_); - - // Log every 10 seconds. - calls_since_last_gain_log_++; - if (calls_since_last_gain_log_ == 1000) { - calls_since_last_gain_log_ = 0; - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc2.EstimatedSpeechLevel", - -info.speech_level_dbfs, 0, 100, 101); - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc2.EstimatedNoiseLevel", - -info.noise_rms_dbfs, 0, 100, 101); - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.Audio.Agc2.Headroom", info.headroom_db, kHeadroomHistogramMin, - kHeadroomHistogramMax, - kHeadroomHistogramMax - kHeadroomHistogramMin + 1); - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc2.DigitalGainApplied", - last_gain_db_, 0, kGainDbHistogramMax, - kGainDbHistogramMax + 1); - RTC_LOG(LS_INFO) << "AGC2 adaptive digital" - << " | speech_dbfs: " << info.speech_level_dbfs - << " | noise_dbfs: " << info.noise_rms_dbfs - << " | headroom_db: " << info.headroom_db - << " | gain_db: " << last_gain_db_; - } -} - -} // namespace webrtc diff --git a/modules/audio_processing/agc2/adaptive_digital_gain_applier.h b/modules/audio_processing/agc2/adaptive_digital_gain_applier.h deleted file mode 100644 index dc84c1e238..0000000000 --- a/modules/audio_processing/agc2/adaptive_digital_gain_applier.h +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_DIGITAL_GAIN_APPLIER_H_ -#define MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_DIGITAL_GAIN_APPLIER_H_ - -#include - -#include "modules/audio_processing/agc2/gain_applier.h" -#include "modules/audio_processing/include/audio_frame_view.h" -#include "modules/audio_processing/include/audio_processing.h" - -namespace webrtc { - -class ApmDataDumper; - -// TODO(bugs.webrtc.org/7494): Split into `GainAdaptor` and `GainApplier`. -// Selects the target digital gain, decides when and how quickly to adapt to the -// target and applies the current gain to 10 ms frames. -class AdaptiveDigitalGainApplier { - public: - // Information about a frame to process. - struct FrameInfo { - float speech_probability; // Probability of speech in the [0, 1] range. - float speech_level_dbfs; // Estimated speech level (dBFS). - bool speech_level_reliable; // True with reliable speech level estimation. - float noise_rms_dbfs; // Estimated noise RMS level (dBFS). - float headroom_db; // Headroom (dB). - float limiter_envelope_dbfs; // Envelope level from the limiter (dBFS). - }; - - AdaptiveDigitalGainApplier( - ApmDataDumper* apm_data_dumper, - const AudioProcessing::Config::GainController2::AdaptiveDigital& config, - int sample_rate_hz, - int num_channels); - AdaptiveDigitalGainApplier(const AdaptiveDigitalGainApplier&) = delete; - AdaptiveDigitalGainApplier& operator=(const AdaptiveDigitalGainApplier&) = - delete; - - void Initialize(int sample_rate_hz, int num_channels); - - // Analyzes `info`, updates the digital gain and applies it to a 10 ms - // `frame`. Supports any sample rate supported by APM. - void Process(const FrameInfo& info, AudioFrameView frame); - - private: - ApmDataDumper* const apm_data_dumper_; - GainApplier gain_applier_; - - const AudioProcessing::Config::GainController2::AdaptiveDigital config_; - const float max_gain_change_db_per_10ms_; - - int calls_since_last_gain_log_; - int frames_to_gain_increase_allowed_; - float last_gain_db_; - - std::vector> dry_run_frame_; - std::vector dry_run_channels_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_DIGITAL_GAIN_APPLIER_H_ diff --git a/modules/audio_processing/agc2/adaptive_digital_gain_controller.cc b/modules/audio_processing/agc2/adaptive_digital_gain_controller.cc index c579ced55d..e8edab602c 100644 --- a/modules/audio_processing/agc2/adaptive_digital_gain_controller.cc +++ b/modules/audio_processing/agc2/adaptive_digital_gain_controller.cc @@ -13,30 +13,91 @@ #include #include "common_audio/include/audio_util.h" -#include "modules/audio_processing/agc2/vad_wrapper.h" +#include "modules/audio_processing/agc2/agc2_common.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_minmax.h" +#include "system_wrappers/include/metrics.h" namespace webrtc { namespace { -// Peak and RMS audio levels in dBFS. -struct AudioLevels { - float peak_dbfs; - float rms_dbfs; -}; +using AdaptiveDigitalConfig = + AudioProcessing::Config::GainController2::AdaptiveDigital; -// Computes the audio levels for the first channel in `frame`. -AudioLevels ComputeAudioLevels(AudioFrameView frame) { - float peak = 0.0f; - float rms = 0.0f; - for (const auto& x : frame.channel(0)) { - peak = std::max(std::fabs(x), peak); - rms += x * x; +constexpr int kHeadroomHistogramMin = 0; +constexpr int kHeadroomHistogramMax = 50; +constexpr int kGainDbHistogramMax = 30; + +// Computes the gain for `input_level_dbfs` to reach `-config.headroom_db`. +// Clamps the gain in [0, `config.max_gain_db`]. `config.headroom_db` is a +// safety margin to allow transient peaks to exceed the target peak level +// without clipping. +float ComputeGainDb(float input_level_dbfs, + const AdaptiveDigitalConfig& config) { + // If the level is very low, apply the maximum gain. + if (input_level_dbfs < -(config.headroom_db + config.max_gain_db)) { + return config.max_gain_db; } - return {FloatS16ToDbfs(peak), - FloatS16ToDbfs(std::sqrt(rms / frame.samples_per_channel()))}; + // We expect to end up here most of the time: the level is below + // -headroom, but we can boost it to -headroom. + if (input_level_dbfs < -config.headroom_db) { + return -config.headroom_db - input_level_dbfs; + } + // The level is too high and we can't boost. + RTC_DCHECK_GE(input_level_dbfs, -config.headroom_db); + return 0.0f; +} + +// Returns `target_gain_db` if applying such a gain to `input_noise_level_dbfs` +// does not exceed `max_output_noise_level_dbfs`. Otherwise lowers and returns +// `target_gain_db` so that the output noise level equals +// `max_output_noise_level_dbfs`. +float LimitGainByNoise(float target_gain_db, + float input_noise_level_dbfs, + float max_output_noise_level_dbfs, + ApmDataDumper& apm_data_dumper) { + const float max_allowed_gain_db = + max_output_noise_level_dbfs - input_noise_level_dbfs; + apm_data_dumper.DumpRaw("agc2_adaptive_gain_applier_max_allowed_gain_db", + max_allowed_gain_db); + return std::min(target_gain_db, std::max(max_allowed_gain_db, 0.0f)); +} + +float LimitGainByLowConfidence(float target_gain_db, + float last_gain_db, + float limiter_audio_level_dbfs, + bool estimate_is_confident) { + if (estimate_is_confident || + limiter_audio_level_dbfs <= kLimiterThresholdForAgcGainDbfs) { + return target_gain_db; + } + const float limiter_level_dbfs_before_gain = + limiter_audio_level_dbfs - last_gain_db; + + // Compute a new gain so that `limiter_level_dbfs_before_gain` + + // `new_target_gain_db` is not great than `kLimiterThresholdForAgcGainDbfs`. + const float new_target_gain_db = std::max( + kLimiterThresholdForAgcGainDbfs - limiter_level_dbfs_before_gain, 0.0f); + return std::min(new_target_gain_db, target_gain_db); +} + +// Computes how the gain should change during this frame. +// Return the gain difference in db to 'last_gain_db'. +float ComputeGainChangeThisFrameDb(float target_gain_db, + float last_gain_db, + bool gain_increase_allowed, + float max_gain_decrease_db, + float max_gain_increase_db) { + RTC_DCHECK_GT(max_gain_decrease_db, 0); + RTC_DCHECK_GT(max_gain_increase_db, 0); + float target_gain_difference_db = target_gain_db - last_gain_db; + if (!gain_increase_allowed) { + target_gain_difference_db = std::min(target_gain_difference_db, 0.0f); + } + return rtc::SafeClamp(target_gain_difference_db, -max_gain_decrease_db, + max_gain_increase_db); } } // namespace @@ -44,72 +105,112 @@ AudioLevels ComputeAudioLevels(AudioFrameView frame) { AdaptiveDigitalGainController::AdaptiveDigitalGainController( ApmDataDumper* apm_data_dumper, const AudioProcessing::Config::GainController2::AdaptiveDigital& config, - int sample_rate_hz, - int num_channels) - : speech_level_estimator_(apm_data_dumper, config), - gain_controller_(apm_data_dumper, config, sample_rate_hz, num_channels), - apm_data_dumper_(apm_data_dumper), - noise_level_estimator_(CreateNoiseFloorEstimator(apm_data_dumper)), - saturation_protector_( - CreateSaturationProtector(kSaturationProtectorInitialHeadroomDb, - config.adjacent_speech_frames_threshold, - apm_data_dumper)) { - RTC_DCHECK(apm_data_dumper); - RTC_DCHECK(noise_level_estimator_); - RTC_DCHECK(saturation_protector_); + int adjacent_speech_frames_threshold) + : apm_data_dumper_(apm_data_dumper), + gain_applier_( + /*hard_clip_samples=*/false, + /*initial_gain_factor=*/DbToRatio(config.initial_gain_db)), + config_(config), + adjacent_speech_frames_threshold_(adjacent_speech_frames_threshold), + max_gain_change_db_per_10ms_(config_.max_gain_change_db_per_second * + kFrameDurationMs / 1000.0f), + calls_since_last_gain_log_(0), + frames_to_gain_increase_allowed_(adjacent_speech_frames_threshold), + last_gain_db_(config_.initial_gain_db) { + RTC_DCHECK_GT(max_gain_change_db_per_10ms_, 0.0f); + RTC_DCHECK_GE(frames_to_gain_increase_allowed_, 1); + RTC_DCHECK_GE(config_.max_output_noise_level_dbfs, -90.0f); + RTC_DCHECK_LE(config_.max_output_noise_level_dbfs, 0.0f); } -AdaptiveDigitalGainController::~AdaptiveDigitalGainController() = default; +void AdaptiveDigitalGainController::Process(const FrameInfo& info, + AudioFrameView frame) { + RTC_DCHECK_GE(info.speech_level_dbfs, -150.0f); + RTC_DCHECK_GE(frame.num_channels(), 1); + RTC_DCHECK( + frame.samples_per_channel() == 80 || frame.samples_per_channel() == 160 || + frame.samples_per_channel() == 320 || frame.samples_per_channel() == 480) + << "`frame` does not look like a 10 ms frame for an APM supported sample " + "rate"; -void AdaptiveDigitalGainController::Initialize(int sample_rate_hz, - int num_channels) { - gain_controller_.Initialize(sample_rate_hz, num_channels); -} + // Compute the input level used to select the desired gain. + RTC_DCHECK_GT(info.headroom_db, 0.0f); + const float input_level_dbfs = info.speech_level_dbfs + info.headroom_db; -void AdaptiveDigitalGainController::Process(AudioFrameView frame, - float speech_probability, - float limiter_envelope) { - AudioLevels levels = ComputeAudioLevels(frame); - apm_data_dumper_->DumpRaw("agc2_input_rms_dbfs", levels.rms_dbfs); - apm_data_dumper_->DumpRaw("agc2_input_peak_dbfs", levels.peak_dbfs); + const float target_gain_db = LimitGainByLowConfidence( + LimitGainByNoise(ComputeGainDb(input_level_dbfs, config_), + info.noise_rms_dbfs, config_.max_output_noise_level_dbfs, + *apm_data_dumper_), + last_gain_db_, info.limiter_envelope_dbfs, info.speech_level_reliable); - AdaptiveDigitalGainApplier::FrameInfo info; + // Forbid increasing the gain until enough adjacent speech frames are + // observed. + bool first_confident_speech_frame = false; + if (info.speech_probability < kVadConfidenceThreshold) { + frames_to_gain_increase_allowed_ = adjacent_speech_frames_threshold_; + } else if (frames_to_gain_increase_allowed_ > 0) { + frames_to_gain_increase_allowed_--; + first_confident_speech_frame = frames_to_gain_increase_allowed_ == 0; + } + apm_data_dumper_->DumpRaw( + "agc2_adaptive_gain_applier_frames_to_gain_increase_allowed", + frames_to_gain_increase_allowed_); - info.speech_probability = speech_probability; + const bool gain_increase_allowed = frames_to_gain_increase_allowed_ == 0; - speech_level_estimator_.Update(levels.rms_dbfs, levels.peak_dbfs, - info.speech_probability); - info.speech_level_dbfs = speech_level_estimator_.level_dbfs(); - info.speech_level_reliable = speech_level_estimator_.IsConfident(); - apm_data_dumper_->DumpRaw("agc2_speech_level_dbfs", info.speech_level_dbfs); - apm_data_dumper_->DumpRaw("agc2_speech_level_reliable", - info.speech_level_reliable); + float max_gain_increase_db = max_gain_change_db_per_10ms_; + if (first_confident_speech_frame) { + // No gain increase happened while waiting for a long enough speech + // sequence. Therefore, temporarily allow a faster gain increase. + RTC_DCHECK(gain_increase_allowed); + max_gain_increase_db *= adjacent_speech_frames_threshold_; + } - info.noise_rms_dbfs = noise_level_estimator_->Analyze(frame); - apm_data_dumper_->DumpRaw("agc2_noise_rms_dbfs", info.noise_rms_dbfs); + const float gain_change_this_frame_db = ComputeGainChangeThisFrameDb( + target_gain_db, last_gain_db_, gain_increase_allowed, + /*max_gain_decrease_db=*/max_gain_change_db_per_10ms_, + max_gain_increase_db); - saturation_protector_->Analyze(info.speech_probability, levels.peak_dbfs, - info.speech_level_dbfs); - info.headroom_db = saturation_protector_->HeadroomDb(); - apm_data_dumper_->DumpRaw("agc2_headroom_db", info.headroom_db); + apm_data_dumper_->DumpRaw("agc2_adaptive_gain_applier_want_to_change_by_db", + target_gain_db - last_gain_db_); + apm_data_dumper_->DumpRaw("agc2_adaptive_gain_applier_will_change_by_db", + gain_change_this_frame_db); - info.limiter_envelope_dbfs = FloatS16ToDbfs(limiter_envelope); - apm_data_dumper_->DumpRaw("agc2_limiter_envelope_dbfs", - info.limiter_envelope_dbfs); + // Optimization: avoid calling math functions if gain does not + // change. + if (gain_change_this_frame_db != 0.f) { + gain_applier_.SetGainFactor( + DbToRatio(last_gain_db_ + gain_change_this_frame_db)); + } - gain_controller_.Process(info, frame); -} + gain_applier_.ApplyGain(frame); -void AdaptiveDigitalGainController::HandleInputGainChange() { - speech_level_estimator_.Reset(); - saturation_protector_->Reset(); -} + // Remember that the gain has changed for the next iteration. + last_gain_db_ = last_gain_db_ + gain_change_this_frame_db; + apm_data_dumper_->DumpRaw("agc2_adaptive_gain_applier_applied_gain_db", + last_gain_db_); -absl::optional -AdaptiveDigitalGainController::GetSpeechLevelDbfsIfConfident() const { - return speech_level_estimator_.IsConfident() - ? absl::optional(speech_level_estimator_.level_dbfs()) - : absl::nullopt; + // Log every 10 seconds. + calls_since_last_gain_log_++; + if (calls_since_last_gain_log_ == 1000) { + calls_since_last_gain_log_ = 0; + RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc2.EstimatedSpeechLevel", + -info.speech_level_dbfs, 0, 100, 101); + RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc2.EstimatedNoiseLevel", + -info.noise_rms_dbfs, 0, 100, 101); + RTC_HISTOGRAM_COUNTS_LINEAR( + "WebRTC.Audio.Agc2.Headroom", info.headroom_db, kHeadroomHistogramMin, + kHeadroomHistogramMax, + kHeadroomHistogramMax - kHeadroomHistogramMin + 1); + RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc2.DigitalGainApplied", + last_gain_db_, 0, kGainDbHistogramMax, + kGainDbHistogramMax + 1); + RTC_LOG(LS_INFO) << "AGC2 adaptive digital" + << " | speech_dbfs: " << info.speech_level_dbfs + << " | noise_dbfs: " << info.noise_rms_dbfs + << " | headroom_db: " << info.headroom_db + << " | gain_db: " << last_gain_db_; + } } } // namespace webrtc diff --git a/modules/audio_processing/agc2/adaptive_digital_gain_controller.h b/modules/audio_processing/agc2/adaptive_digital_gain_controller.h index af7f0238ec..01335e79db 100644 --- a/modules/audio_processing/agc2/adaptive_digital_gain_controller.h +++ b/modules/audio_processing/agc2/adaptive_digital_gain_controller.h @@ -11,56 +11,54 @@ #ifndef MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_DIGITAL_GAIN_CONTROLLER_H_ #define MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_DIGITAL_GAIN_CONTROLLER_H_ -#include +#include -#include "absl/types/optional.h" -#include "modules/audio_processing/agc2/adaptive_digital_gain_applier.h" -#include "modules/audio_processing/agc2/adaptive_mode_level_estimator.h" -#include "modules/audio_processing/agc2/noise_level_estimator.h" -#include "modules/audio_processing/agc2/saturation_protector.h" +#include "modules/audio_processing/agc2/gain_applier.h" #include "modules/audio_processing/include/audio_frame_view.h" #include "modules/audio_processing/include/audio_processing.h" namespace webrtc { + class ApmDataDumper; -// Gain controller that adapts and applies a variable digital gain to meet the -// target level, which is determined by the given configuration. +// Selects the target digital gain, decides when and how quickly to adapt to the +// target and applies the current gain to 10 ms frames. class AdaptiveDigitalGainController { public: + // Information about a frame to process. + struct FrameInfo { + float speech_probability; // Probability of speech in the [0, 1] range. + float speech_level_dbfs; // Estimated speech level (dBFS). + bool speech_level_reliable; // True with reliable speech level estimation. + float noise_rms_dbfs; // Estimated noise RMS level (dBFS). + float headroom_db; // Headroom (dB). + // TODO(bugs.webrtc.org/7494): Remove `limiter_envelope_dbfs`. + float limiter_envelope_dbfs; // Envelope level from the limiter (dBFS). + }; + AdaptiveDigitalGainController( ApmDataDumper* apm_data_dumper, const AudioProcessing::Config::GainController2::AdaptiveDigital& config, - int sample_rate_hz, - int num_channels); + int adjacent_speech_frames_threshold); AdaptiveDigitalGainController(const AdaptiveDigitalGainController&) = delete; AdaptiveDigitalGainController& operator=( const AdaptiveDigitalGainController&) = delete; - ~AdaptiveDigitalGainController(); - // Detects and handles changes of sample rate and or number of channels. - void Initialize(int sample_rate_hz, int num_channels); - - // Analyzes `frame`, adapts the current digital gain and applies it to - // `frame`. - // TODO(bugs.webrtc.org/7494): Remove `limiter_envelope`. - void Process(AudioFrameView frame, - float speech_probability, - float limiter_envelope); - - // Handles a gain change applied to the input signal (e.g., analog gain). - void HandleInputGainChange(); - - // Returns the most recent speech level (dBFs) if the estimator is confident. - // Otherwise returns absl::nullopt. - absl::optional GetSpeechLevelDbfsIfConfident() const; + // Analyzes `info`, updates the digital gain and applies it to a 10 ms + // `frame`. Supports any sample rate supported by APM. + void Process(const FrameInfo& info, AudioFrameView frame); private: - AdaptiveModeLevelEstimator speech_level_estimator_; - AdaptiveDigitalGainApplier gain_controller_; ApmDataDumper* const apm_data_dumper_; - std::unique_ptr noise_level_estimator_; - std::unique_ptr saturation_protector_; + GainApplier gain_applier_; + + const AudioProcessing::Config::GainController2::AdaptiveDigital config_; + const int adjacent_speech_frames_threshold_; + const float max_gain_change_db_per_10ms_; + + int calls_since_last_gain_log_; + int frames_to_gain_increase_allowed_; + float last_gain_db_; }; } // namespace webrtc diff --git a/modules/audio_processing/agc2/adaptive_digital_gain_applier_unittest.cc b/modules/audio_processing/agc2/adaptive_digital_gain_controller_unittest.cc similarity index 61% rename from modules/audio_processing/agc2/adaptive_digital_gain_applier_unittest.cc rename to modules/audio_processing/agc2/adaptive_digital_gain_controller_unittest.cc index ea7485f512..e95cbb5067 100644 --- a/modules/audio_processing/agc2/adaptive_digital_gain_applier_unittest.cc +++ b/modules/audio_processing/agc2/adaptive_digital_gain_controller_unittest.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_processing/agc2/adaptive_digital_gain_applier.h" +#include "modules/audio_processing/agc2/adaptive_digital_gain_controller.h" #include #include @@ -48,28 +48,26 @@ using AdaptiveDigitalConfig = constexpr AdaptiveDigitalConfig kDefaultConfig{}; -// Helper to create initialized `AdaptiveDigitalGainApplier` objects. +// Helper to create initialized `AdaptiveDigitalGainController` objects. struct GainApplierHelper { GainApplierHelper(const AdaptiveDigitalConfig& config, - int sample_rate_hz, - int num_channels) + int adjacent_speech_frames_threshold) : apm_data_dumper(0), - gain_applier( - std::make_unique(&apm_data_dumper, - config, - sample_rate_hz, - num_channels)) {} + gain_applier(std::make_unique( + &apm_data_dumper, + config, + adjacent_speech_frames_threshold)) {} ApmDataDumper apm_data_dumper; - std::unique_ptr gain_applier; + std::unique_ptr gain_applier; }; // Returns a `FrameInfo` sample to simulate noiseless speech detected with // maximum probability and with level, headroom and limiter envelope chosen // so that the resulting gain equals the default initial adaptive digital gain // i.e., no gain adaptation is expected. -AdaptiveDigitalGainApplier::FrameInfo GetFrameInfoToNotAdapt( +AdaptiveDigitalGainController::FrameInfo GetFrameInfoToNotAdapt( const AdaptiveDigitalConfig& config) { - AdaptiveDigitalGainApplier::FrameInfo info; + AdaptiveDigitalGainController::FrameInfo info; info.speech_probability = kMaxSpeechProbability; info.speech_level_dbfs = -config.initial_gain_db - config.headroom_db; info.speech_level_reliable = true; @@ -79,8 +77,9 @@ AdaptiveDigitalGainApplier::FrameInfo GetFrameInfoToNotAdapt( return info; } -TEST(GainController2AdaptiveGainApplier, GainApplierShouldNotCrash) { - GainApplierHelper helper(kDefaultConfig, /*sample_rate_hz=*/48000, kStereo); +TEST(GainController2AdaptiveDigitalGainControllerTest, + GainApplierShouldNotCrash) { + GainApplierHelper helper(kDefaultConfig, kAdjacentSpeechFramesThreshold); // Make one call with reasonable audio level values and settings. VectorFloatFrame fake_audio(kStereo, kFrameLen10ms48kHz, 10000.0f); helper.gain_applier->Process(GetFrameInfoToNotAdapt(kDefaultConfig), @@ -88,15 +87,15 @@ TEST(GainController2AdaptiveGainApplier, GainApplierShouldNotCrash) { } // Checks that the maximum allowed gain is applied. -TEST(GainController2AdaptiveGainApplier, MaxGainApplied) { +TEST(GainController2AdaptiveDigitalGainControllerTest, MaxGainApplied) { constexpr int kNumFramesToAdapt = static_cast(kDefaultConfig.max_gain_db / GetMaxGainChangePerFrameDb( kDefaultConfig.max_gain_change_db_per_second)) + kNumExtraFrames; - GainApplierHelper helper(kDefaultConfig, /*sample_rate_hz=*/8000, kMono); - AdaptiveDigitalGainApplier::FrameInfo info = + GainApplierHelper helper(kDefaultConfig, kAdjacentSpeechFramesThreshold); + AdaptiveDigitalGainController::FrameInfo info = GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs = -60.0f; float applied_gain; @@ -109,8 +108,8 @@ TEST(GainController2AdaptiveGainApplier, MaxGainApplied) { EXPECT_NEAR(applied_gain_db, kDefaultConfig.max_gain_db, 0.1f); } -TEST(GainController2AdaptiveGainApplier, GainDoesNotChangeFast) { - GainApplierHelper helper(kDefaultConfig, /*sample_rate_hz=*/8000, kMono); +TEST(GainController2AdaptiveDigitalGainControllerTest, GainDoesNotChangeFast) { + GainApplierHelper helper(kDefaultConfig, kAdjacentSpeechFramesThreshold); constexpr float initial_level_dbfs = -25.0f; constexpr float kMaxGainChangeDbPerFrame = @@ -125,7 +124,7 @@ TEST(GainController2AdaptiveGainApplier, GainDoesNotChangeFast) { for (int i = 0; i < kNumFramesToAdapt; ++i) { SCOPED_TRACE(i); VectorFloatFrame fake_audio(kMono, kFrameLen10ms8kHz, 1.0f); - AdaptiveDigitalGainApplier::FrameInfo info = + AdaptiveDigitalGainController::FrameInfo info = GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs = initial_level_dbfs; helper.gain_applier->Process(info, fake_audio.float_frame_view()); @@ -139,7 +138,7 @@ TEST(GainController2AdaptiveGainApplier, GainDoesNotChangeFast) { for (int i = 0; i < kNumFramesToAdapt; ++i) { SCOPED_TRACE(i); VectorFloatFrame fake_audio(kMono, kFrameLen10ms8kHz, 1.0f); - AdaptiveDigitalGainApplier::FrameInfo info = + AdaptiveDigitalGainController::FrameInfo info = GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs = 0.f; helper.gain_applier->Process(info, fake_audio.float_frame_view()); @@ -150,13 +149,13 @@ TEST(GainController2AdaptiveGainApplier, GainDoesNotChangeFast) { } } -TEST(GainController2AdaptiveGainApplier, GainIsRampedInAFrame) { - GainApplierHelper helper(kDefaultConfig, /*sample_rate_hz=*/48000, kMono); +TEST(GainController2AdaptiveDigitalGainControllerTest, GainIsRampedInAFrame) { + GainApplierHelper helper(kDefaultConfig, kAdjacentSpeechFramesThreshold); constexpr float initial_level_dbfs = -25.0f; VectorFloatFrame fake_audio(kMono, kFrameLen10ms48kHz, 1.0f); - AdaptiveDigitalGainApplier::FrameInfo info = + AdaptiveDigitalGainController::FrameInfo info = GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs = initial_level_dbfs; helper.gain_applier->Process(info, fake_audio.float_frame_view()); @@ -176,8 +175,8 @@ TEST(GainController2AdaptiveGainApplier, GainIsRampedInAFrame) { EXPECT_LE(maximal_difference, max_change_per_sample); } -TEST(GainController2AdaptiveGainApplier, NoiseLimitsGain) { - GainApplierHelper helper(kDefaultConfig, /*sample_rate_hz=*/48000, kMono); +TEST(GainController2AdaptiveDigitalGainControllerTest, NoiseLimitsGain) { + GainApplierHelper helper(kDefaultConfig, kAdjacentSpeechFramesThreshold); constexpr float initial_level_dbfs = -25.0f; constexpr int num_initial_frames = @@ -190,7 +189,7 @@ TEST(GainController2AdaptiveGainApplier, NoiseLimitsGain) { for (int i = 0; i < num_initial_frames + num_frames; ++i) { VectorFloatFrame fake_audio(kMono, kFrameLen10ms48kHz, 1.0f); - AdaptiveDigitalGainApplier::FrameInfo info = + AdaptiveDigitalGainController::FrameInfo info = GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs = initial_level_dbfs; info.noise_rms_dbfs = kWithNoiseDbfs; @@ -207,19 +206,20 @@ TEST(GainController2AdaptiveGainApplier, NoiseLimitsGain) { } } -TEST(GainController2GainApplier, CanHandlePositiveSpeechLevels) { - GainApplierHelper helper(kDefaultConfig, /*sample_rate_hz=*/48000, kStereo); +TEST(GainController2AdaptiveDigitalGainControllerTest, + CanHandlePositiveSpeechLevels) { + GainApplierHelper helper(kDefaultConfig, kAdjacentSpeechFramesThreshold); // Make one call with positive audio level values and settings. VectorFloatFrame fake_audio(kStereo, kFrameLen10ms48kHz, 10000.0f); - AdaptiveDigitalGainApplier::FrameInfo info = + AdaptiveDigitalGainController::FrameInfo info = GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs = 5.0f; helper.gain_applier->Process(info, fake_audio.float_frame_view()); } -TEST(GainController2GainApplier, AudioLevelLimitsGain) { - GainApplierHelper helper(kDefaultConfig, /*sample_rate_hz=*/48000, kMono); +TEST(GainController2AdaptiveDigitalGainControllerTest, AudioLevelLimitsGain) { + GainApplierHelper helper(kDefaultConfig, kAdjacentSpeechFramesThreshold); constexpr float initial_level_dbfs = -25.0f; constexpr int num_initial_frames = @@ -232,7 +232,7 @@ TEST(GainController2GainApplier, AudioLevelLimitsGain) { for (int i = 0; i < num_initial_frames + num_frames; ++i) { VectorFloatFrame fake_audio(kMono, kFrameLen10ms48kHz, 1.0f); - AdaptiveDigitalGainApplier::FrameInfo info = + AdaptiveDigitalGainController::FrameInfo info = GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs = initial_level_dbfs; info.limiter_envelope_dbfs = 1.0f; @@ -250,23 +250,23 @@ TEST(GainController2GainApplier, AudioLevelLimitsGain) { } } -class AdaptiveDigitalGainApplierTest : public ::testing::TestWithParam { +class AdaptiveDigitalGainControllerParametrizedTest + : public ::testing::TestWithParam { protected: int adjacent_speech_frames_threshold() const { return GetParam(); } }; -TEST_P(AdaptiveDigitalGainApplierTest, +TEST_P(AdaptiveDigitalGainControllerParametrizedTest, DoNotIncreaseGainWithTooFewSpeechFrames) { - AdaptiveDigitalConfig config; - config.adjacent_speech_frames_threshold = adjacent_speech_frames_threshold(); - GainApplierHelper helper(config, /*sample_rate_hz=*/48000, kMono); + GainApplierHelper helper(kDefaultConfig, adjacent_speech_frames_threshold()); // Lower the speech level so that the target gain will be increased. - AdaptiveDigitalGainApplier::FrameInfo info = GetFrameInfoToNotAdapt(config); + AdaptiveDigitalGainController::FrameInfo info = + GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs -= 12.0f; float prev_gain = 0.0f; - for (int i = 0; i < config.adjacent_speech_frames_threshold; ++i) { + for (int i = 0; i < adjacent_speech_frames_threshold(); ++i) { SCOPED_TRACE(i); VectorFloatFrame audio(kMono, kFrameLen10ms48kHz, 1.0f); helper.gain_applier->Process(info, audio.float_frame_view()); @@ -278,17 +278,17 @@ TEST_P(AdaptiveDigitalGainApplierTest, } } -TEST_P(AdaptiveDigitalGainApplierTest, IncreaseGainWithEnoughSpeechFrames) { - AdaptiveDigitalConfig config; - config.adjacent_speech_frames_threshold = adjacent_speech_frames_threshold(); - GainApplierHelper helper(config, /*sample_rate_hz=*/48000, kMono); +TEST_P(AdaptiveDigitalGainControllerParametrizedTest, + IncreaseGainWithEnoughSpeechFrames) { + GainApplierHelper helper(kDefaultConfig, adjacent_speech_frames_threshold()); // Lower the speech level so that the target gain will be increased. - AdaptiveDigitalGainApplier::FrameInfo info = GetFrameInfoToNotAdapt(config); + AdaptiveDigitalGainController::FrameInfo info = + GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs -= 12.0f; float prev_gain = 0.0f; - for (int i = 0; i < config.adjacent_speech_frames_threshold; ++i) { + for (int i = 0; i < adjacent_speech_frames_threshold(); ++i) { SCOPED_TRACE(i); VectorFloatFrame audio(kMono, kFrameLen10ms48kHz, 1.0f); helper.gain_applier->Process(info, audio.float_frame_view()); @@ -303,71 +303,10 @@ TEST_P(AdaptiveDigitalGainApplierTest, IncreaseGainWithEnoughSpeechFrames) { EXPECT_GT(audio.float_frame_view().channel(0)[0], prev_gain); } -INSTANTIATE_TEST_SUITE_P(GainController2, - AdaptiveDigitalGainApplierTest, - ::testing::Values(1, 7, 31)); - -// Checks that the input is never modified when running in dry run mode. -TEST(GainController2GainApplier, DryRunDoesNotChangeInput) { - AdaptiveDigitalConfig config; - config.dry_run = true; - GainApplierHelper helper(config, /*sample_rate_hz=*/8000, kMono); - - // Simulate an input signal with log speech level. - AdaptiveDigitalGainApplier::FrameInfo info = GetFrameInfoToNotAdapt(config); - info.speech_level_dbfs = -60.0f; - const int num_frames_to_adapt = - static_cast( - config.max_gain_db / - GetMaxGainChangePerFrameDb(config.max_gain_change_db_per_second)) + - kNumExtraFrames; - constexpr float kPcmSamples = 123.456f; - // Run the gain applier and check that the PCM samples are not modified. - for (int i = 0; i < num_frames_to_adapt; ++i) { - SCOPED_TRACE(i); - VectorFloatFrame fake_audio(kMono, kFrameLen10ms8kHz, kPcmSamples); - helper.gain_applier->Process(info, fake_audio.float_frame_view()); - EXPECT_FLOAT_EQ(fake_audio.float_frame_view().channel(0)[0], kPcmSamples); - } -} - -// Checks that no sample is modified before and after the sample rate changes. -TEST(GainController2GainApplier, DryRunHandlesSampleRateChange) { - AdaptiveDigitalConfig config; - config.dry_run = true; - GainApplierHelper helper(config, /*sample_rate_hz=*/8000, kMono); - - AdaptiveDigitalGainApplier::FrameInfo info = GetFrameInfoToNotAdapt(config); - info.speech_level_dbfs = -60.0f; - constexpr float kPcmSamples = 123.456f; - VectorFloatFrame fake_audio_8k(kMono, kFrameLen10ms8kHz, kPcmSamples); - helper.gain_applier->Process(info, fake_audio_8k.float_frame_view()); - EXPECT_FLOAT_EQ(fake_audio_8k.float_frame_view().channel(0)[0], kPcmSamples); - helper.gain_applier->Initialize(/*sample_rate_hz=*/48000, kMono); - VectorFloatFrame fake_audio_48k(kMono, kFrameLen10ms48kHz, kPcmSamples); - helper.gain_applier->Process(info, fake_audio_48k.float_frame_view()); - EXPECT_FLOAT_EQ(fake_audio_48k.float_frame_view().channel(0)[0], kPcmSamples); -} - -// Checks that no sample is modified before and after the number of channels -// changes. -TEST(GainController2GainApplier, DryRunHandlesNumChannelsChange) { - AdaptiveDigitalConfig config; - config.dry_run = true; - GainApplierHelper helper(config, /*sample_rate_hz=*/8000, kMono); - - AdaptiveDigitalGainApplier::FrameInfo info = GetFrameInfoToNotAdapt(config); - info.speech_level_dbfs = -60.0f; - constexpr float kPcmSamples = 123.456f; - VectorFloatFrame fake_audio_8k(kMono, kFrameLen10ms8kHz, kPcmSamples); - helper.gain_applier->Process(info, fake_audio_8k.float_frame_view()); - EXPECT_FLOAT_EQ(fake_audio_8k.float_frame_view().channel(0)[0], kPcmSamples); - VectorFloatFrame fake_audio_48k(kStereo, kFrameLen10ms8kHz, kPcmSamples); - helper.gain_applier->Initialize(/*sample_rate_hz=*/8000, kStereo); - helper.gain_applier->Process(info, fake_audio_48k.float_frame_view()); - EXPECT_FLOAT_EQ(fake_audio_48k.float_frame_view().channel(0)[0], kPcmSamples); - EXPECT_FLOAT_EQ(fake_audio_48k.float_frame_view().channel(1)[0], kPcmSamples); -} +INSTANTIATE_TEST_SUITE_P( + GainController2, + AdaptiveDigitalGainControllerParametrizedTest, + ::testing::Values(1, 7, 31, kAdjacentSpeechFramesThreshold)); } // namespace } // namespace webrtc diff --git a/modules/audio_processing/agc2/agc2_common.h b/modules/audio_processing/agc2/agc2_common.h index 4af85527b8..4597bcd015 100644 --- a/modules/audio_processing/agc2/agc2_common.h +++ b/modules/audio_processing/agc2/agc2_common.h @@ -29,11 +29,16 @@ constexpr int kMaximalNumberOfSamplesPerChannel = 480; // At what limiter levels should we start decreasing the adaptive digital gain. constexpr float kLimiterThresholdForAgcGainDbfs = -1.0f; -// This is the threshold for speech. Speech frames are used for updating the -// speech level, measuring the amount of speech, and decide when to allow target -// gain changes. +// Number of milliseconds to wait to periodically reset the VAD. +constexpr int kVadResetPeriodMs = 1500; + +// Speech probability threshold to detect speech activity. constexpr float kVadConfidenceThreshold = 0.95f; +// Minimum number of adjacent speech frames having a sufficiently high speech +// probability to reliably detect speech activity. +constexpr int kAdjacentSpeechFramesThreshold = 12; + // Number of milliseconds of speech frames to observe to make the estimator // confident. constexpr float kLevelEstimatorTimeToConfidenceMs = 400; diff --git a/modules/audio_processing/agc2/clipping_predictor.cc b/modules/audio_processing/agc2/clipping_predictor.cc index 2bf5fb2e32..fd759c63e8 100644 --- a/modules/audio_processing/agc2/clipping_predictor.cc +++ b/modules/audio_processing/agc2/clipping_predictor.cc @@ -25,30 +25,31 @@ namespace { constexpr int kClippingPredictorMaxGainChange = 15; -// Estimates the new level from the gain error; a copy of the function -// `LevelFromGainError` in agc_manager_direct.cc. -int LevelFromGainError(int gain_error, - int level, - int min_mic_level, - int max_mic_level) { - RTC_DCHECK_GE(level, 0); - RTC_DCHECK_LE(level, max_mic_level); - if (gain_error == 0) { - return level; +// Returns an input volume in the [`min_input_volume`, `max_input_volume`] range +// that reduces `gain_error_db`, which is a gain error estimated when +// `input_volume` was applied, according to a fixed gain map. +int ComputeVolumeUpdate(int gain_error_db, + int input_volume, + int min_input_volume, + int max_input_volume) { + RTC_DCHECK_GE(input_volume, 0); + RTC_DCHECK_LE(input_volume, max_input_volume); + if (gain_error_db == 0) { + return input_volume; } - int new_level = level; - if (gain_error > 0) { - while (kGainMap[new_level] - kGainMap[level] < gain_error && - new_level < max_mic_level) { - ++new_level; + int new_volume = input_volume; + if (gain_error_db > 0) { + while (kGainMap[new_volume] - kGainMap[input_volume] < gain_error_db && + new_volume < max_input_volume) { + ++new_volume; } } else { - while (kGainMap[new_level] - kGainMap[level] > gain_error && - new_level > min_mic_level) { - --new_level; + while (kGainMap[new_volume] - kGainMap[input_volume] > gain_error_db && + new_volume > min_input_volume) { + --new_volume; } } - return new_level; + return new_volume; } float ComputeCrestFactor(const ClippingPredictorLevelBuffer::Level& level) { @@ -298,8 +299,8 @@ class ClippingPeakPredictor : public ClippingPredictor { rtc::SafeClamp(-static_cast(std::ceil(estimate_db.value())), -kClippingPredictorMaxGainChange, 0); step = - std::max(level - LevelFromGainError(estimated_gain_change, level, - min_mic_level, max_mic_level), + std::max(level - ComputeVolumeUpdate(estimated_gain_change, level, + min_mic_level, max_mic_level), default_step); } const int new_level = @@ -354,10 +355,10 @@ std::unique_ptr CreateClippingPredictor( const AudioProcessing::Config::GainController1::AnalogGainController:: ClippingPredictor& config) { if (!config.enabled) { - RTC_LOG(LS_INFO) << "[agc] Clipping prediction disabled."; + RTC_LOG(LS_INFO) << "[AGC2] Clipping prediction disabled."; return nullptr; } - RTC_LOG(LS_INFO) << "[agc] Clipping prediction enabled."; + RTC_LOG(LS_INFO) << "[AGC2] Clipping prediction enabled."; using ClippingPredictorMode = AudioProcessing::Config::GainController1:: AnalogGainController::ClippingPredictor::Mode; switch (config.mode) { diff --git a/modules/audio_processing/agc2/gain_map_internal.h b/modules/audio_processing/agc2/gain_map_internal.h index 75e421899f..7c669fc9dd 100644 --- a/modules/audio_processing/agc2/gain_map_internal.h +++ b/modules/audio_processing/agc2/gain_map_internal.h @@ -13,8 +13,14 @@ namespace webrtc { -static const int kGainMapSize = 256; -// Uses parameters: si = 2, sf = 0.25, D = 8/256 +static constexpr int kGainMapSize = 256; +// Maps input volumes, which are values in the [0, 255] range, to gains in dB. +// The values below are generated with numpy as follows: +// SI = 2 # Initial slope. +// SF = 0.25 # Final slope. +// D = 8/256 # Quantization factor. +// x = np.linspace(0, 255, 256) # Input volumes. +// y = (SF * x + (SI - SF) * (1 - np.exp(-D*x)) / D - 56).round() static const int kGainMap[kGainMapSize] = { -56, -54, -52, -50, -48, -47, -45, -43, -42, -40, -38, -37, -35, -34, -33, -31, -30, -29, -27, -26, -25, -24, -23, -22, -20, -19, -18, -17, -16, -15, diff --git a/modules/audio_processing/agc2/input_volume_controller.cc b/modules/audio_processing/agc2/input_volume_controller.cc new file mode 100644 index 0000000000..bcc650fb3e --- /dev/null +++ b/modules/audio_processing/agc2/input_volume_controller.cc @@ -0,0 +1,580 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/agc2/input_volume_controller.h" + +#include +#include + +#include "api/array_view.h" +#include "modules/audio_processing/agc2/gain_map_internal.h" +#include "modules/audio_processing/agc2/input_volume_stats_reporter.h" +#include "modules/audio_processing/include/audio_frame_view.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_minmax.h" +#include "system_wrappers/include/field_trial.h" +#include "system_wrappers/include/metrics.h" + +namespace webrtc { + +namespace { + +// Amount of error we tolerate in the microphone input volume (presumably due to +// OS quantization) before we assume the user has manually adjusted the volume. +constexpr int kVolumeQuantizationSlack = 25; + +constexpr int kMaxInputVolume = 255; +static_assert(kGainMapSize > kMaxInputVolume, "gain map too small"); + +// Maximum absolute RMS error. +constexpr int KMaxAbsRmsErrorDbfs = 15; +static_assert(KMaxAbsRmsErrorDbfs > 0, ""); + +using Agc1ClippingPredictorConfig = AudioProcessing::Config::GainController1:: + AnalogGainController::ClippingPredictor; + +// TODO(webrtc:7494): Hardcode clipping predictor parameters and remove this +// function after no longer needed in the ctor. +Agc1ClippingPredictorConfig CreateClippingPredictorConfig(bool enabled) { + Agc1ClippingPredictorConfig config; + config.enabled = enabled; + + return config; +} + +// Returns an input volume in the [`min_input_volume`, `kMaxInputVolume`] range +// that reduces `gain_error_db`, which is a gain error estimated when +// `input_volume` was applied, according to a fixed gain map. +int ComputeVolumeUpdate(int gain_error_db, + int input_volume, + int min_input_volume) { + RTC_DCHECK_GE(input_volume, 0); + RTC_DCHECK_LE(input_volume, kMaxInputVolume); + if (gain_error_db == 0) { + return input_volume; + } + + int new_volume = input_volume; + if (gain_error_db > 0) { + while (kGainMap[new_volume] - kGainMap[input_volume] < gain_error_db && + new_volume < kMaxInputVolume) { + ++new_volume; + } + } else { + while (kGainMap[new_volume] - kGainMap[input_volume] > gain_error_db && + new_volume > min_input_volume) { + --new_volume; + } + } + return new_volume; +} + +// Returns the proportion of samples in the buffer which are at full-scale +// (and presumably clipped). +float ComputeClippedRatio(const float* const* audio, + size_t num_channels, + size_t samples_per_channel) { + RTC_DCHECK_GT(samples_per_channel, 0); + int num_clipped = 0; + for (size_t ch = 0; ch < num_channels; ++ch) { + int num_clipped_in_ch = 0; + for (size_t i = 0; i < samples_per_channel; ++i) { + RTC_DCHECK(audio[ch]); + if (audio[ch][i] >= 32767.0f || audio[ch][i] <= -32768.0f) { + ++num_clipped_in_ch; + } + } + num_clipped = std::max(num_clipped, num_clipped_in_ch); + } + return static_cast(num_clipped) / (samples_per_channel); +} + +void LogClippingMetrics(int clipping_rate) { + RTC_LOG(LS_INFO) << "[AGC2] Input clipping rate: " << clipping_rate << "%"; + RTC_HISTOGRAM_COUNTS_LINEAR(/*name=*/"WebRTC.Audio.Agc.InputClippingRate", + /*sample=*/clipping_rate, /*min=*/0, /*max=*/100, + /*bucket_count=*/50); +} + +// Compares `speech_level_dbfs` to the [`target_range_min_dbfs`, +// `target_range_max_dbfs`] range and returns the error to be compensated via +// input volume adjustment. Returns a positive value when the level is below +// the range, a negative value when the level is above the range, zero +// otherwise. +int GetSpeechLevelRmsErrorDb(float speech_level_dbfs, + int target_range_min_dbfs, + int target_range_max_dbfs) { + constexpr float kMinSpeechLevelDbfs = -90.0f; + constexpr float kMaxSpeechLevelDbfs = 30.0f; + RTC_DCHECK_GE(speech_level_dbfs, kMinSpeechLevelDbfs); + RTC_DCHECK_LE(speech_level_dbfs, kMaxSpeechLevelDbfs); + speech_level_dbfs = rtc::SafeClamp( + speech_level_dbfs, kMinSpeechLevelDbfs, kMaxSpeechLevelDbfs); + + int rms_error_db = 0; + if (speech_level_dbfs > target_range_max_dbfs) { + rms_error_db = std::round(target_range_max_dbfs - speech_level_dbfs); + } else if (speech_level_dbfs < target_range_min_dbfs) { + rms_error_db = std::round(target_range_min_dbfs - speech_level_dbfs); + } + + return rms_error_db; +} + +} // namespace + +MonoInputVolumeController::MonoInputVolumeController( + int min_input_volume_after_clipping, + int min_input_volume, + int update_input_volume_wait_frames, + float speech_probability_threshold, + float speech_ratio_threshold) + : min_input_volume_(min_input_volume), + min_input_volume_after_clipping_(min_input_volume_after_clipping), + max_input_volume_(kMaxInputVolume), + update_input_volume_wait_frames_( + std::max(update_input_volume_wait_frames, 1)), + speech_probability_threshold_(speech_probability_threshold), + speech_ratio_threshold_(speech_ratio_threshold) { + RTC_DCHECK_GE(min_input_volume_, 0); + RTC_DCHECK_LE(min_input_volume_, 255); + RTC_DCHECK_GE(min_input_volume_after_clipping_, 0); + RTC_DCHECK_LE(min_input_volume_after_clipping_, 255); + RTC_DCHECK_GE(max_input_volume_, 0); + RTC_DCHECK_LE(max_input_volume_, 255); + RTC_DCHECK_GE(update_input_volume_wait_frames_, 0); + RTC_DCHECK_GE(speech_probability_threshold_, 0.0f); + RTC_DCHECK_LE(speech_probability_threshold_, 1.0f); + RTC_DCHECK_GE(speech_ratio_threshold_, 0.0f); + RTC_DCHECK_LE(speech_ratio_threshold_, 1.0f); +} + +MonoInputVolumeController::~MonoInputVolumeController() = default; + +void MonoInputVolumeController::Initialize() { + max_input_volume_ = kMaxInputVolume; + capture_output_used_ = true; + check_volume_on_next_process_ = true; + frames_since_update_input_volume_ = 0; + speech_frames_since_update_input_volume_ = 0; + is_first_frame_ = true; +} + +// A speeh segment is considered active if at least +// `update_input_volume_wait_frames_` new frames have been processed since the +// previous update and the ratio of non-silence frames (i.e., frames with a +// `speech_probability` higher than `speech_probability_threshold_`) is at least +// `speech_ratio_threshold_`. +void MonoInputVolumeController::Process(absl::optional rms_error_db, + float speech_probability) { + if (check_volume_on_next_process_) { + check_volume_on_next_process_ = false; + // We have to wait until the first process call to check the volume, + // because Chromium doesn't guarantee it to be valid any earlier. + CheckVolumeAndReset(); + } + + // Count frames with a high speech probability as speech. + if (speech_probability >= speech_probability_threshold_) { + ++speech_frames_since_update_input_volume_; + } + + // Reset the counters and maybe update the input volume. + if (++frames_since_update_input_volume_ >= update_input_volume_wait_frames_) { + const float speech_ratio = + static_cast(speech_frames_since_update_input_volume_) / + static_cast(update_input_volume_wait_frames_); + + // Always reset the counters regardless of whether the volume changes or + // not. + frames_since_update_input_volume_ = 0; + speech_frames_since_update_input_volume_ = 0; + + // Update the input volume if allowed. + if (!is_first_frame_ && speech_ratio >= speech_ratio_threshold_ && + rms_error_db.has_value()) { + UpdateInputVolume(*rms_error_db); + } + } + + is_first_frame_ = false; +} + +void MonoInputVolumeController::HandleClipping(int clipped_level_step) { + RTC_DCHECK_GT(clipped_level_step, 0); + // Always decrease the maximum input volume, even if the current input volume + // is below threshold. + SetMaxLevel(std::max(min_input_volume_after_clipping_, + max_input_volume_ - clipped_level_step)); + if (log_to_histograms_) { + RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.AgcClippingAdjustmentAllowed", + last_recommended_input_volume_ - clipped_level_step >= + min_input_volume_after_clipping_); + } + if (last_recommended_input_volume_ > min_input_volume_after_clipping_) { + // Don't try to adjust the input volume if we're already below the limit. As + // a consequence, if the user has brought the input volume above the limit, + // we will still not react until the postproc updates the input volume. + SetInputVolume( + std::max(min_input_volume_after_clipping_, + last_recommended_input_volume_ - clipped_level_step)); + frames_since_update_input_volume_ = 0; + speech_frames_since_update_input_volume_ = 0; + is_first_frame_ = false; + } +} + +void MonoInputVolumeController::SetInputVolume(int new_volume) { + int applied_input_volume = recommended_input_volume_; + if (applied_input_volume == 0) { + RTC_DLOG(LS_INFO) + << "[AGC2] The applied input volume is zero, taking no action."; + return; + } + if (applied_input_volume < 0 || applied_input_volume > kMaxInputVolume) { + RTC_LOG(LS_ERROR) << "[AGC2] Invalid value for the applied input volume: " + << applied_input_volume; + return; + } + + // Detect manual input volume adjustments by checking if the + // `applied_input_volume` is outside of the `[last_recommended_input_volume_ - + // kVolumeQuantizationSlack, last_recommended_input_volume_ + + // kVolumeQuantizationSlack]` range. + if (applied_input_volume > + last_recommended_input_volume_ + kVolumeQuantizationSlack || + applied_input_volume < + last_recommended_input_volume_ - kVolumeQuantizationSlack) { + RTC_DLOG(LS_INFO) + << "[AGC2] The input volume was manually adjusted. Updating " + "stored input volume from " + << last_recommended_input_volume_ << " to " << applied_input_volume; + last_recommended_input_volume_ = applied_input_volume; + // Always allow the user to increase the volume. + if (last_recommended_input_volume_ > max_input_volume_) { + SetMaxLevel(last_recommended_input_volume_); + } + // Take no action in this case, since we can't be sure when the volume + // was manually adjusted. + frames_since_update_input_volume_ = 0; + speech_frames_since_update_input_volume_ = 0; + is_first_frame_ = false; + return; + } + + new_volume = std::min(new_volume, max_input_volume_); + if (new_volume == last_recommended_input_volume_) { + return; + } + + recommended_input_volume_ = new_volume; + RTC_DLOG(LS_INFO) << "[AGC2] Applied input volume: " << applied_input_volume + << " | last recommended input volume: " + << last_recommended_input_volume_ + << " | newly recommended input volume: " << new_volume; + last_recommended_input_volume_ = new_volume; +} + +void MonoInputVolumeController::SetMaxLevel(int input_volume) { + RTC_DCHECK_GE(input_volume, min_input_volume_after_clipping_); + max_input_volume_ = input_volume; + RTC_DLOG(LS_INFO) << "[AGC2] Maximum input volume updated: " + << max_input_volume_; +} + +void MonoInputVolumeController::HandleCaptureOutputUsedChange( + bool capture_output_used) { + if (capture_output_used_ == capture_output_used) { + return; + } + capture_output_used_ = capture_output_used; + + if (capture_output_used) { + // When we start using the output, we should reset things to be safe. + check_volume_on_next_process_ = true; + } +} + +int MonoInputVolumeController::CheckVolumeAndReset() { + int input_volume = recommended_input_volume_; + // Reasons for taking action at startup: + // 1) A person starting a call is expected to be heard. + // 2) Independent of interpretation of `input_volume` == 0 we should raise it + // so the AGC can do its job properly. + if (input_volume == 0 && !startup_) { + RTC_DLOG(LS_INFO) + << "[AGC2] The applied input volume is zero, taking no action."; + return 0; + } + if (input_volume < 0 || input_volume > kMaxInputVolume) { + RTC_LOG(LS_ERROR) << "[AGC2] Invalid value for the applied input volume: " + << input_volume; + return -1; + } + RTC_DLOG(LS_INFO) << "[AGC2] Initial input volume: " << input_volume; + + if (input_volume < min_input_volume_) { + input_volume = min_input_volume_; + RTC_DLOG(LS_INFO) + << "[AGC2] The initial input volume is too low, raising to " + << input_volume; + recommended_input_volume_ = input_volume; + } + + last_recommended_input_volume_ = input_volume; + startup_ = false; + frames_since_update_input_volume_ = 0; + speech_frames_since_update_input_volume_ = 0; + is_first_frame_ = true; + + return 0; +} + +void MonoInputVolumeController::UpdateInputVolume(int rms_error_db) { + RTC_DLOG(LS_INFO) << "[AGC2] RMS error: " << rms_error_db << " dB"; + // Prevent too large microphone input volume changes by clamping the RMS + // error. + rms_error_db = + rtc::SafeClamp(rms_error_db, -KMaxAbsRmsErrorDbfs, KMaxAbsRmsErrorDbfs); + if (rms_error_db == 0) { + return; + } + SetInputVolume(ComputeVolumeUpdate( + rms_error_db, last_recommended_input_volume_, min_input_volume_)); +} + +InputVolumeController::InputVolumeController(int num_capture_channels, + const Config& config) + : num_capture_channels_(num_capture_channels), + min_input_volume_(config.min_input_volume), + capture_output_used_(true), + clipped_level_step_(config.clipped_level_step), + clipped_ratio_threshold_(config.clipped_ratio_threshold), + clipped_wait_frames_(config.clipped_wait_frames), + clipping_predictor_(CreateClippingPredictor( + num_capture_channels, + CreateClippingPredictorConfig(config.enable_clipping_predictor))), + use_clipping_predictor_step_( + !!clipping_predictor_ && + CreateClippingPredictorConfig(config.enable_clipping_predictor) + .use_predicted_step), + frames_since_clipped_(config.clipped_wait_frames), + clipping_rate_log_counter_(0), + clipping_rate_log_(0.0f), + target_range_max_dbfs_(config.target_range_max_dbfs), + target_range_min_dbfs_(config.target_range_min_dbfs), + channel_controllers_(num_capture_channels) { + RTC_LOG(LS_INFO) + << "[AGC2] Input volume controller enabled. Minimum input volume: " + << min_input_volume_; + + for (auto& controller : channel_controllers_) { + controller = std::make_unique( + config.clipped_level_min, min_input_volume_, + config.update_input_volume_wait_frames, + config.speech_probability_threshold, config.speech_ratio_threshold); + } + + RTC_DCHECK(!channel_controllers_.empty()); + RTC_DCHECK_GT(clipped_level_step_, 0); + RTC_DCHECK_LE(clipped_level_step_, 255); + RTC_DCHECK_GT(clipped_ratio_threshold_, 0.0f); + RTC_DCHECK_LT(clipped_ratio_threshold_, 1.0f); + RTC_DCHECK_GT(clipped_wait_frames_, 0); + channel_controllers_[0]->ActivateLogging(); +} + +InputVolumeController::~InputVolumeController() {} + +void InputVolumeController::Initialize() { + for (auto& controller : channel_controllers_) { + controller->Initialize(); + } + capture_output_used_ = true; + + AggregateChannelLevels(); + clipping_rate_log_ = 0.0f; + clipping_rate_log_counter_ = 0; + + applied_input_volume_ = absl::nullopt; +} + +void InputVolumeController::AnalyzeInputAudio(int applied_input_volume, + const AudioBuffer& audio_buffer) { + RTC_DCHECK_GE(applied_input_volume, 0); + RTC_DCHECK_LE(applied_input_volume, 255); + + SetAppliedInputVolume(applied_input_volume); + + RTC_DCHECK_EQ(audio_buffer.num_channels(), channel_controllers_.size()); + const float* const* audio = audio_buffer.channels_const(); + size_t samples_per_channel = audio_buffer.num_frames(); + RTC_DCHECK(audio); + + AggregateChannelLevels(); + if (!capture_output_used_) { + return; + } + + if (!!clipping_predictor_) { + AudioFrameView frame = AudioFrameView( + audio, num_capture_channels_, static_cast(samples_per_channel)); + clipping_predictor_->Analyze(frame); + } + + // Check for clipped samples. We do this in the preprocessing phase in order + // to catch clipped echo as well. + // + // If we find a sufficiently clipped frame, drop the current microphone + // input volume and enforce a new maximum input volume, dropped the same + // amount from the current maximum. This harsh treatment is an effort to avoid + // repeated clipped echo events. + float clipped_ratio = + ComputeClippedRatio(audio, num_capture_channels_, samples_per_channel); + clipping_rate_log_ = std::max(clipped_ratio, clipping_rate_log_); + clipping_rate_log_counter_++; + constexpr int kNumFramesIn30Seconds = 3000; + if (clipping_rate_log_counter_ == kNumFramesIn30Seconds) { + LogClippingMetrics(std::round(100.0f * clipping_rate_log_)); + clipping_rate_log_ = 0.0f; + clipping_rate_log_counter_ = 0; + } + + if (frames_since_clipped_ < clipped_wait_frames_) { + ++frames_since_clipped_; + return; + } + + const bool clipping_detected = clipped_ratio > clipped_ratio_threshold_; + bool clipping_predicted = false; + int predicted_step = 0; + if (!!clipping_predictor_) { + for (int channel = 0; channel < num_capture_channels_; ++channel) { + const auto step = clipping_predictor_->EstimateClippedLevelStep( + channel, recommended_input_volume_, clipped_level_step_, + channel_controllers_[channel]->min_input_volume_after_clipping(), + kMaxInputVolume); + if (step.has_value()) { + predicted_step = std::max(predicted_step, step.value()); + clipping_predicted = true; + } + } + } + + if (clipping_detected) { + RTC_DLOG(LS_INFO) << "[AGC2] Clipping detected (ratio: " << clipped_ratio + << ")"; + } + + int step = clipped_level_step_; + if (clipping_predicted) { + predicted_step = std::max(predicted_step, clipped_level_step_); + RTC_DLOG(LS_INFO) << "[AGC2] Clipping predicted (volume down step: " + << predicted_step << ")"; + if (use_clipping_predictor_step_) { + step = predicted_step; + } + } + + if (clipping_detected || + (clipping_predicted && use_clipping_predictor_step_)) { + for (auto& state_ch : channel_controllers_) { + state_ch->HandleClipping(step); + } + frames_since_clipped_ = 0; + if (!!clipping_predictor_) { + clipping_predictor_->Reset(); + } + } + + AggregateChannelLevels(); +} + +absl::optional InputVolumeController::RecommendInputVolume( + float speech_probability, + absl::optional speech_level_dbfs) { + // Only process if applied input volume is set. + if (!applied_input_volume_.has_value()) { + RTC_LOG(LS_ERROR) << "[AGC2] Applied input volume not set."; + return absl::nullopt; + } + + AggregateChannelLevels(); + const int volume_after_clipping_handling = recommended_input_volume_; + + if (!capture_output_used_) { + return applied_input_volume_; + } + + absl::optional rms_error_db; + if (speech_level_dbfs.has_value()) { + // Compute the error for all frames (both speech and non-speech frames). + rms_error_db = GetSpeechLevelRmsErrorDb( + *speech_level_dbfs, target_range_min_dbfs_, target_range_max_dbfs_); + } + + for (auto& controller : channel_controllers_) { + controller->Process(rms_error_db, speech_probability); + } + + AggregateChannelLevels(); + if (volume_after_clipping_handling != recommended_input_volume_) { + // The recommended input volume was adjusted in order to match the target + // level. + UpdateHistogramOnRecommendedInputVolumeChangeToMatchTarget( + recommended_input_volume_); + } + + applied_input_volume_ = absl::nullopt; + return recommended_input_volume(); +} + +void InputVolumeController::HandleCaptureOutputUsedChange( + bool capture_output_used) { + for (auto& controller : channel_controllers_) { + controller->HandleCaptureOutputUsedChange(capture_output_used); + } + + capture_output_used_ = capture_output_used; +} + +void InputVolumeController::SetAppliedInputVolume(int input_volume) { + applied_input_volume_ = input_volume; + + for (auto& controller : channel_controllers_) { + controller->set_stream_analog_level(input_volume); + } + + AggregateChannelLevels(); +} + +void InputVolumeController::AggregateChannelLevels() { + int new_recommended_input_volume = + channel_controllers_[0]->recommended_analog_level(); + channel_controlling_gain_ = 0; + for (size_t ch = 1; ch < channel_controllers_.size(); ++ch) { + int input_volume = channel_controllers_[ch]->recommended_analog_level(); + if (input_volume < new_recommended_input_volume) { + new_recommended_input_volume = input_volume; + channel_controlling_gain_ = static_cast(ch); + } + } + + // Enforce the minimum input volume when a recommendation is made. + if (applied_input_volume_.has_value() && *applied_input_volume_ > 0) { + new_recommended_input_volume = + std::max(new_recommended_input_volume, min_input_volume_); + } + + recommended_input_volume_ = new_recommended_input_volume; +} + +} // namespace webrtc diff --git a/modules/audio_processing/agc2/input_volume_controller.h b/modules/audio_processing/agc2/input_volume_controller.h new file mode 100644 index 0000000000..40eae8879e --- /dev/null +++ b/modules/audio_processing/agc2/input_volume_controller.h @@ -0,0 +1,282 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_AGC2_INPUT_VOLUME_CONTROLLER_H_ +#define MODULES_AUDIO_PROCESSING_AGC2_INPUT_VOLUME_CONTROLLER_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "modules/audio_processing/agc2/clipping_predictor.h" +#include "modules/audio_processing/audio_buffer.h" +#include "modules/audio_processing/include/audio_processing.h" +#include "rtc_base/gtest_prod_util.h" + +namespace webrtc { + +class MonoInputVolumeController; + +// The input volume controller recommends what volume to use, handles volume +// changes and clipping detection and prediction. In particular, it handles +// changes triggered by the user (e.g., volume set to zero by a HW mute button). +// This class is not thread-safe. +// TODO(bugs.webrtc.org/7494): Use applied/recommended input volume naming +// convention. +class InputVolumeController final { + public: + // Config for the constructor. + struct Config { + // Minimum input volume that can be recommended. Not enforced when the + // applied input volume is zero outside startup. + int min_input_volume = 20; + // Lowest input volume level that will be applied in response to clipping. + int clipped_level_min = 70; + // Amount input volume level is lowered with every clipping event. Limited + // to (0, 255]. + int clipped_level_step = 15; + // Proportion of clipped samples required to declare a clipping event. + // Limited to (0.0f, 1.0f). + float clipped_ratio_threshold = 0.1f; + // Time in frames to wait after a clipping event before checking again. + // Limited to values higher than 0. + int clipped_wait_frames = 300; + // Enables clipping prediction functionality. + bool enable_clipping_predictor = false; + // Speech level target range (dBFS). If the speech level is in the range + // [`target_range_min_dbfs`, `target_range_max_dbfs`], no input volume + // adjustments are done based on the speech level. For speech levels below + // and above the range, the targets `target_range_min_dbfs` and + // `target_range_max_dbfs` are used, respectively. + int target_range_max_dbfs = -30; + int target_range_min_dbfs = -50; + // Number of wait frames between the recommended input volume updates. + int update_input_volume_wait_frames = 100; + // Speech probability threshold: speech probabilities below the threshold + // are considered silence. Limited to [0.0f, 1.0f]. + float speech_probability_threshold = 0.7f; + // Minimum speech frame ratio for volume updates to be allowed. Limited to + // [0.0f, 1.0f]. + float speech_ratio_threshold = 0.9f; + }; + + // Ctor. `num_capture_channels` specifies the number of channels for the audio + // passed to `AnalyzePreProcess()` and `Process()`. Clamps + // `config.startup_min_level` in the [12, 255] range. + InputVolumeController(int num_capture_channels, const Config& config); + + ~InputVolumeController(); + InputVolumeController(const InputVolumeController&) = delete; + InputVolumeController& operator=(const InputVolumeController&) = delete; + + // TODO(webrtc:7494): Integrate initialization into ctor and remove. + void Initialize(); + + // Analyzes `audio_buffer` before `RecommendInputVolume()` is called so tha + // the analysis can be performed before digital processing operations take + // place (e.g., echo cancellation). The analysis consists of input clipping + // detection and prediction (if enabled). + void AnalyzeInputAudio(int applied_input_volume, + const AudioBuffer& audio_buffer); + + // Adjusts the recommended input volume upwards/downwards based on the result + // of `AnalyzeInputAudio()` and on `speech_level_dbfs` (if specified). Must + // be called after `AnalyzeInputAudio()`. The value of `speech_probability` + // is expected to be in the range [0, 1] and `speech_level_dbfs` in the range + // [-90, 30] and both should be estimated after echo cancellation and noise + // suppression are applied. Returns a non-empty input volume recommendation if + // available. If `capture_output_used_` is true, returns the applied input + // volume. + absl::optional RecommendInputVolume( + float speech_probability, + absl::optional speech_level_dbfs); + + // Stores whether the capture output will be used or not. Call when the + // capture stream output has been flagged to be used/not-used. If unused, the + // controller disregards all incoming audio. + void HandleCaptureOutputUsedChange(bool capture_output_used); + + // Returns true if clipping prediction is enabled. + // TODO(bugs.webrtc.org/7494): Deprecate this method. + bool clipping_predictor_enabled() const { return !!clipping_predictor_; } + + // Returns true if clipping prediction is used to adjust the input volume. + // TODO(bugs.webrtc.org/7494): Deprecate this method. + bool use_clipping_predictor_step() const { + return use_clipping_predictor_step_; + } + + // Only use for testing: Use `RecommendInputVolume()` elsewhere. + // Returns the value of a member variable, needed for testing + // `AnalyzeInputAudio()`. + int recommended_input_volume() const { return recommended_input_volume_; } + + // Only use for testing. + bool capture_output_used() const { return capture_output_used_; } + + private: + friend class InputVolumeControllerTestHelper; + + FRIEND_TEST_ALL_PREFIXES(InputVolumeControllerTest, MinInputVolumeDefault); + FRIEND_TEST_ALL_PREFIXES(InputVolumeControllerTest, MinInputVolumeDisabled); + FRIEND_TEST_ALL_PREFIXES(InputVolumeControllerTest, + MinInputVolumeOutOfRangeAbove); + FRIEND_TEST_ALL_PREFIXES(InputVolumeControllerTest, + MinInputVolumeOutOfRangeBelow); + FRIEND_TEST_ALL_PREFIXES(InputVolumeControllerTest, MinInputVolumeEnabled50); + FRIEND_TEST_ALL_PREFIXES(InputVolumeControllerParametrizedTest, + ClippingParametersVerified); + + // Sets the applied input volume and resets the recommended input volume. + void SetAppliedInputVolume(int level); + + void AggregateChannelLevels(); + + const int num_capture_channels_; + + // Minimum input volume that can be recommended. + const int min_input_volume_; + + // TODO(bugs.webrtc.org/7494): Once + // `AudioProcessingImpl::recommended_stream_analog_level()` becomes a trivial + // getter, leave uninitialized. + // Recommended input volume. After `SetAppliedInputVolume()` is called it + // holds holds the observed input volume. Possibly updated by + // `AnalyzePreProcess()` and `Process()`; after these calls, holds the + // recommended input volume. + int recommended_input_volume_ = 0; + // Applied input volume. After `SetAppliedInputVolume()` is called it holds + // the current applied volume. + absl::optional applied_input_volume_; + + bool capture_output_used_; + + // Clipping detection and prediction. + const int clipped_level_step_; + const float clipped_ratio_threshold_; + const int clipped_wait_frames_; + const std::unique_ptr clipping_predictor_; + const bool use_clipping_predictor_step_; + int frames_since_clipped_; + int clipping_rate_log_counter_; + float clipping_rate_log_; + + // Target range minimum and maximum. If the seech level is in the range + // [`target_range_min_dbfs`, `target_range_max_dbfs`], no volume adjustments + // take place. Instead, the digital gain controller is assumed to adapt to + // compensate for the speech level RMS error. + const int target_range_max_dbfs_; + const int target_range_min_dbfs_; + + // Channel controllers updating the gain upwards/downwards. + std::vector> channel_controllers_; + int channel_controlling_gain_ = 0; +}; + +// TODO(bugs.webrtc.org/7494): Use applied/recommended input volume naming +// convention. +class MonoInputVolumeController { + public: + MonoInputVolumeController(int min_input_volume_after_clipping, + int min_input_volume, + int update_input_volume_wait_frames, + float speech_probability_threshold, + float speech_ratio_threshold); + ~MonoInputVolumeController(); + MonoInputVolumeController(const MonoInputVolumeController&) = delete; + MonoInputVolumeController& operator=(const MonoInputVolumeController&) = + delete; + + void Initialize(); + void HandleCaptureOutputUsedChange(bool capture_output_used); + + // Sets the current input volume. + void set_stream_analog_level(int input_volume) { + recommended_input_volume_ = input_volume; + } + + // Lowers the recommended input volume in response to clipping based on the + // suggested reduction `clipped_level_step`. Must be called after + // `set_stream_analog_level()`. + void HandleClipping(int clipped_level_step); + + // TODO(bugs.webrtc.org/7494): Rename, audio not passed to the method anymore. + // Adjusts the recommended input volume upwards/downwards depending on the + // result of `HandleClipping()` and on `rms_error_dbfs`. Updates are only + // allowed for active speech segments and when `rms_error_dbfs` is not empty. + // Must be called after `HandleClipping()`. + void Process(absl::optional rms_error_dbfs, float speech_probability); + + // Returns the recommended input volume. Must be called after `Process()`. + int recommended_analog_level() const { return recommended_input_volume_; } + + void ActivateLogging() { log_to_histograms_ = true; } + + int min_input_volume_after_clipping() const { + return min_input_volume_after_clipping_; + } + + // Only used for testing. + int min_input_volume() const { return min_input_volume_; } + + private: + // Sets a new input volume, after first checking that it hasn't been updated + // by the user, in which case no action is taken. + void SetInputVolume(int new_volume); + + // Sets the maximum input volume that the input volume controller is allowed + // to apply. The volume must be at least `kClippedLevelMin`. + void SetMaxLevel(int level); + + int CheckVolumeAndReset(); + + // Updates the recommended input volume. If the volume slider needs to be + // moved, we check first if the user has adjusted it, in which case we take no + // action and cache the updated level. + void UpdateInputVolume(int rms_error_dbfs); + + const int min_input_volume_; + const int min_input_volume_after_clipping_; + int max_input_volume_; + + int last_recommended_input_volume_ = 0; + + bool capture_output_used_ = true; + bool check_volume_on_next_process_ = true; + bool startup_ = true; + + // TODO(bugs.webrtc.org/7494): Create a separate member for the applied + // input volume. + // Recommended input volume. After `set_stream_analog_level()` is + // called, it holds the observed applied input volume. Possibly updated by + // `HandleClipping()` and `Process()`; after these calls, holds the + // recommended input volume. + int recommended_input_volume_ = 0; + + bool log_to_histograms_ = false; + + // Counters for frames and speech frames since the last update in the + // recommended input volume. + const int update_input_volume_wait_frames_; + int frames_since_update_input_volume_ = 0; + int speech_frames_since_update_input_volume_ = 0; + bool is_first_frame_ = true; + + // Speech probability threshold for a frame to be considered speech (instead + // of silence). Limited to [0.0f, 1.0f]. + const float speech_probability_threshold_; + // Minimum ratio of speech frames. Limited to [0.0f, 1.0f]. + const float speech_ratio_threshold_; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_AGC2_INPUT_VOLUME_CONTROLLER_H_ diff --git a/modules/audio_processing/agc2/input_volume_controller_unittest.cc b/modules/audio_processing/agc2/input_volume_controller_unittest.cc new file mode 100644 index 0000000000..638cfd1df3 --- /dev/null +++ b/modules/audio_processing/agc2/input_volume_controller_unittest.cc @@ -0,0 +1,1787 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/agc2/input_volume_controller.h" + +#include +#include +#include +#include +#include + +#include "rtc_base/numerics/safe_minmax.h" +#include "rtc_base/strings/string_builder.h" +#include "system_wrappers/include/metrics.h" +#include "test/field_trial.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" + +using ::testing::_; +using ::testing::AtLeast; +using ::testing::DoAll; +using ::testing::Return; +using ::testing::SetArgPointee; + +namespace webrtc { +namespace { + +constexpr int kSampleRateHz = 32000; +constexpr int kNumChannels = 1; +constexpr int kInitialInputVolume = 128; +constexpr int kClippedMin = 165; // Arbitrary, but different from the default. +constexpr float kAboveClippedThreshold = 0.2f; +constexpr int kMinMicLevel = 20; +constexpr int kClippedLevelStep = 15; +constexpr float kClippedRatioThreshold = 0.1f; +constexpr int kClippedWaitFrames = 300; +constexpr float kHighSpeechProbability = 0.7f; +constexpr float kLowSpeechProbability = 0.1f; +constexpr float kSpeechLevel = -25.0f; +constexpr float kSpeechProbabilityThreshold = 0.5f; +constexpr float kSpeechRatioThreshold = 0.8f; + +constexpr float kMinSample = std::numeric_limits::min(); +constexpr float kMaxSample = std::numeric_limits::max(); + +using ClippingPredictorConfig = AudioProcessing::Config::GainController1:: + AnalogGainController::ClippingPredictor; + +using InputVolumeControllerConfig = InputVolumeController::Config; + +constexpr ClippingPredictorConfig kDefaultClippingPredictorConfig{}; + +std::unique_ptr CreateInputVolumeController( + int clipped_level_step = kClippedLevelStep, + float clipped_ratio_threshold = kClippedRatioThreshold, + int clipped_wait_frames = kClippedWaitFrames, + bool enable_clipping_predictor = false, + int update_input_volume_wait_frames = 0) { + InputVolumeControllerConfig config{ + .min_input_volume = kMinMicLevel, + .clipped_level_min = kClippedMin, + .clipped_level_step = clipped_level_step, + .clipped_ratio_threshold = clipped_ratio_threshold, + .clipped_wait_frames = clipped_wait_frames, + .enable_clipping_predictor = enable_clipping_predictor, + .target_range_max_dbfs = -18, + .target_range_min_dbfs = -30, + .update_input_volume_wait_frames = update_input_volume_wait_frames, + .speech_probability_threshold = kSpeechProbabilityThreshold, + .speech_ratio_threshold = kSpeechRatioThreshold, + }; + + return std::make_unique(/*num_capture_channels=*/1, + config); +} + +// (Over)writes `samples_value` for the samples in `audio_buffer`. +// When `clipped_ratio`, a value in [0, 1], is greater than 0, the corresponding +// fraction of the frame is set to a full scale value to simulate clipping. +void WriteAudioBufferSamples(float samples_value, + float clipped_ratio, + AudioBuffer& audio_buffer) { + RTC_DCHECK_GE(samples_value, kMinSample); + RTC_DCHECK_LE(samples_value, kMaxSample); + RTC_DCHECK_GE(clipped_ratio, 0.0f); + RTC_DCHECK_LE(clipped_ratio, 1.0f); + int num_channels = audio_buffer.num_channels(); + int num_samples = audio_buffer.num_frames(); + int num_clipping_samples = clipped_ratio * num_samples; + for (int ch = 0; ch < num_channels; ++ch) { + int i = 0; + for (; i < num_clipping_samples; ++i) { + audio_buffer.channels()[ch][i] = 32767.0f; + } + for (; i < num_samples; ++i) { + audio_buffer.channels()[ch][i] = samples_value; + } + } +} + +// (Over)writes samples in `audio_buffer`. Alternates samples `samples_value` +// and zero. +void WriteAlternatingAudioBufferSamples(float samples_value, + AudioBuffer& audio_buffer) { + RTC_DCHECK_GE(samples_value, kMinSample); + RTC_DCHECK_LE(samples_value, kMaxSample); + const int num_channels = audio_buffer.num_channels(); + const int num_frames = audio_buffer.num_frames(); + for (int ch = 0; ch < num_channels; ++ch) { + for (int i = 0; i < num_frames; i += 2) { + audio_buffer.channels()[ch][i] = samples_value; + audio_buffer.channels()[ch][i + 1] = 0.0f; + } + } +} + +// Reads a given number of 10 ms chunks from a PCM file and feeds them to +// `InputVolumeController`. +class SpeechSamplesReader { + private: + // Recording properties. + static constexpr int kPcmSampleRateHz = 16000; + static constexpr int kPcmNumChannels = 1; + static constexpr int kPcmBytesPerSamples = sizeof(int16_t); + + public: + SpeechSamplesReader() + : is_(test::ResourcePath("audio_processing/agc/agc_audio", "pcm"), + std::ios::binary | std::ios::ate), + audio_buffer_(kPcmSampleRateHz, + kPcmNumChannels, + kPcmSampleRateHz, + kPcmNumChannels, + kPcmSampleRateHz, + kPcmNumChannels), + buffer_(audio_buffer_.num_frames()), + buffer_num_bytes_(buffer_.size() * kPcmBytesPerSamples) { + RTC_CHECK(is_); + } + + // Reads `num_frames` 10 ms frames from the beginning of the PCM file, applies + // `gain_db` and feeds the frames into `controller` by calling + // `AnalyzeInputAudio()` and `RecommendInputVolume()` for each frame. Reads + // the number of 10 ms frames available in the PCM file if `num_frames` is too + // large - i.e., does not loop. `speech_probability` and `speech_level_dbfs` + // are passed to `RecommendInputVolume()`. + int Feed(int num_frames, + int applied_input_volume, + int gain_db, + float speech_probability, + absl::optional speech_level_dbfs, + InputVolumeController& controller) { + RTC_DCHECK(controller.capture_output_used()); + + float gain = std::pow(10.0f, gain_db / 20.0f); // From dB to linear gain. + is_.seekg(0, is_.beg); // Start from the beginning of the PCM file. + + // Read and feed frames. + for (int i = 0; i < num_frames; ++i) { + is_.read(reinterpret_cast(buffer_.data()), buffer_num_bytes_); + if (is_.gcount() < buffer_num_bytes_) { + // EOF reached. Stop. + break; + } + // Apply gain and copy samples into `audio_buffer_`. + std::transform(buffer_.begin(), buffer_.end(), + audio_buffer_.channels()[0], [gain](int16_t v) -> float { + return rtc::SafeClamp(static_cast(v) * gain, + kMinSample, kMaxSample); + }); + controller.AnalyzeInputAudio(applied_input_volume, audio_buffer_); + const auto recommended_input_volume = controller.RecommendInputVolume( + speech_probability, speech_level_dbfs); + + // Expect no errors: Applied volume set for every frame; + // `RecommendInputVolume()` returns a non-empty value. + EXPECT_TRUE(recommended_input_volume.has_value()); + + applied_input_volume = *recommended_input_volume; + } + return applied_input_volume; + } + + private: + std::ifstream is_; + AudioBuffer audio_buffer_; + std::vector buffer_; + const std::streamsize buffer_num_bytes_; +}; + +// Runs the MonoInputVolumeControl processing sequence following the API +// contract. Returns the updated recommended input volume. +float UpdateRecommendedInputVolume(MonoInputVolumeController& mono_controller, + int applied_input_volume, + float speech_probability, + absl::optional rms_error_dbfs) { + mono_controller.set_stream_analog_level(applied_input_volume); + EXPECT_EQ(mono_controller.recommended_analog_level(), applied_input_volume); + mono_controller.Process(rms_error_dbfs, speech_probability); + return mono_controller.recommended_analog_level(); +} + +} // namespace + +// TODO(bugs.webrtc.org/12874): Use constexpr struct with designated +// initializers once fixed. +constexpr InputVolumeControllerConfig GetInputVolumeControllerTestConfig() { + InputVolumeControllerConfig config{ + .clipped_level_min = kClippedMin, + .clipped_level_step = kClippedLevelStep, + .clipped_ratio_threshold = kClippedRatioThreshold, + .clipped_wait_frames = kClippedWaitFrames, + .enable_clipping_predictor = kDefaultClippingPredictorConfig.enabled, + .target_range_max_dbfs = -18, + .target_range_min_dbfs = -30, + .update_input_volume_wait_frames = 0, + .speech_probability_threshold = 0.5f, + .speech_ratio_threshold = 1.0f, + }; + return config; +} + +// Helper class that provides an `InputVolumeController` instance with an +// `AudioBuffer` instance and `CallAgcSequence()`, a helper method that runs the +// `InputVolumeController` instance on the `AudioBuffer` one by sticking to the +// API contract. +class InputVolumeControllerTestHelper { + public: + // Ctor. Initializes `audio_buffer` with zeros. + // TODO(bugs.webrtc.org/7494): Remove the default argument. + InputVolumeControllerTestHelper(const InputVolumeController::Config& config = + GetInputVolumeControllerTestConfig()) + : audio_buffer(kSampleRateHz, + kNumChannels, + kSampleRateHz, + kNumChannels, + kSampleRateHz, + kNumChannels), + controller(/*num_capture_channels=*/1, config) { + controller.Initialize(); + WriteAudioBufferSamples(/*samples_value=*/0.0f, /*clipped_ratio=*/0.0f, + audio_buffer); + } + + // Calls the sequence of `InputVolumeController` methods according to the API + // contract, namely: + // - Sets the applied input volume; + // - Uses `audio_buffer` to call `AnalyzeInputAudio()` and + // `RecommendInputVolume()`; + // Returns the recommended input volume. + absl::optional CallAgcSequence(int applied_input_volume, + float speech_probability, + absl::optional speech_level_dbfs, + int num_calls = 1) { + RTC_DCHECK_GE(num_calls, 1); + absl::optional volume = applied_input_volume; + for (int i = 0; i < num_calls; ++i) { + // Repeat the initial volume if `RecommendInputVolume()` doesn't return a + // value. + controller.AnalyzeInputAudio(volume.value_or(applied_input_volume), + audio_buffer); + volume = controller.RecommendInputVolume(speech_probability, + speech_level_dbfs); + + // Allow deviation from the API contract: `RecommendInputVolume()` doesn't + // return a recommended input volume. + if (volume.has_value()) { + EXPECT_EQ(*volume, controller.recommended_input_volume()); + } + } + return volume; + } + + // Deprecated. + // TODO(bugs.webrtc.org/7494): Let the caller write `audio_buffer` and use + // `CallAgcSequence()`. + int CallRecommendInputVolume(int num_calls, + int initial_volume, + float speech_probability, + absl::optional speech_level_dbfs) { + RTC_DCHECK(controller.capture_output_used()); + + // Create non-clipping audio for `AnalyzeInputAudio()`. + WriteAlternatingAudioBufferSamples(0.1f * kMaxSample, audio_buffer); + int volume = initial_volume; + for (int i = 0; i < num_calls; ++i) { + controller.AnalyzeInputAudio(volume, audio_buffer); + const auto recommended_input_volume = controller.RecommendInputVolume( + speech_probability, speech_level_dbfs); + + // Expect no errors: Applied volume set for every frame; + // `RecommendInputVolume()` returns a non-empty value. + EXPECT_TRUE(recommended_input_volume.has_value()); + + volume = *recommended_input_volume; + } + return volume; + } + + // Deprecated. + // TODO(bugs.webrtc.org/7494): Let the caller write `audio_buffer` and use + // `CallAgcSequence()`. + void CallAnalyzeInputAudio(int num_calls, float clipped_ratio) { + RTC_DCHECK(controller.capture_output_used()); + + RTC_DCHECK_GE(clipped_ratio, 0.0f); + RTC_DCHECK_LE(clipped_ratio, 1.0f); + WriteAudioBufferSamples(/*samples_value=*/0.0f, clipped_ratio, + audio_buffer); + for (int i = 0; i < num_calls; ++i) { + controller.AnalyzeInputAudio(controller.recommended_input_volume(), + audio_buffer); + } + } + + AudioBuffer audio_buffer; + InputVolumeController controller; +}; + +class InputVolumeControllerParametrizedTest + : public ::testing::TestWithParam {}; + +TEST_P(InputVolumeControllerParametrizedTest, + StartupMinVolumeConfigurationRespectedWhenAppliedInputVolumeAboveMin) { + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = GetParam()}); + + EXPECT_EQ(*helper.CallAgcSequence(/*applied_input_volume=*/128, + /*speech_probability=*/0.9f, + /*speech_level_dbfs=*/-80), + 128); +} + +TEST_P( + InputVolumeControllerParametrizedTest, + StartupMinVolumeConfigurationRespectedWhenAppliedInputVolumeMaybeBelowMin) { + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = GetParam()}); + + EXPECT_GE(*helper.CallAgcSequence(/*applied_input_volume=*/10, + /*speech_probability=*/0.9f, + /*speech_level_dbfs=*/-80), + 10); +} + +TEST_P(InputVolumeControllerParametrizedTest, + StartupMinVolumeRespectedWhenAppliedVolumeNonZero) { + const int kMinInputVolume = GetParam(); + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = kMinInputVolume, + .target_range_min_dbfs = -30, + .update_input_volume_wait_frames = 1, + .speech_probability_threshold = 0.5f, + .speech_ratio_threshold = 0.5f}); + + // Volume change possible; speech level below the digital gain window. + int volume = *helper.CallAgcSequence(/*applied_input_volume=*/1, + /*speech_probability=*/0.9f, + /*speech_level_dbfs=*/-80); + + EXPECT_EQ(volume, kMinInputVolume); +} + +TEST_P(InputVolumeControllerParametrizedTest, + MinVolumeRepeatedlyRespectedWhenAppliedVolumeNonZero) { + const int kMinInputVolume = GetParam(); + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = kMinInputVolume, + .target_range_min_dbfs = -30, + .update_input_volume_wait_frames = 1, + .speech_probability_threshold = 0.5f, + .speech_ratio_threshold = 0.5f}); + + // Volume change possible; speech level below the digital gain window. + for (int i = 0; i < 100; ++i) { + const int volume = *helper.CallAgcSequence(/*applied_input_volume=*/1, + /*speech_probability=*/0.9f, + /*speech_level_dbfs=*/-80); + EXPECT_GE(volume, kMinInputVolume); + } +} + +TEST_P(InputVolumeControllerParametrizedTest, + StartupMinVolumeRespectedOnceWhenAppliedVolumeZero) { + const int kMinInputVolume = GetParam(); + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = kMinInputVolume, + .target_range_min_dbfs = -30, + .update_input_volume_wait_frames = 1, + .speech_probability_threshold = 0.5f, + .speech_ratio_threshold = 0.5f}); + + int volume = *helper.CallAgcSequence(/*applied_input_volume=*/0, + /*speech_probability=*/0.9f, + /*speech_level_dbfs=*/-80); + + EXPECT_EQ(volume, kMinInputVolume); + + // No change of volume regardless of a speech level below the digital gain + // window; applied volume is zero. + volume = *helper.CallAgcSequence(/*applied_input_volume=*/0, + /*speech_probability=*/0.9f, + /*speech_level_dbfs=*/-80); + + EXPECT_EQ(volume, 0); +} + +TEST_P(InputVolumeControllerParametrizedTest, MicVolumeResponseToRmsError) { + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + config.min_input_volume = GetParam(); + InputVolumeControllerTestHelper helper(config); + int volume = *helper.CallAgcSequence(kInitialInputVolume, + kHighSpeechProbability, kSpeechLevel); + + // Inside the digital gain's window; no change of volume. + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -23.0f); + + // Inside the digital gain's window; no change of volume. + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -28.0f); + + // Above the digital gain's window; volume should be increased. + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -29.0f); + EXPECT_EQ(volume, 128); + + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -38.0f); + EXPECT_EQ(volume, 156); + + // Inside the digital gain's window; no change of volume. + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -23.0f); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -18.0f); + + // Below the digial gain's window; volume should be decreased. + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -17.0f); + EXPECT_EQ(volume, 155); + + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -17.0f); + EXPECT_EQ(volume, 151); + + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -9.0f); + EXPECT_EQ(volume, 119); +} + +TEST_P(InputVolumeControllerParametrizedTest, MicVolumeIsLimited) { + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + const int min_input_volume = GetParam(); + config.min_input_volume = min_input_volume; + InputVolumeControllerTestHelper helper(config); + int volume = *helper.CallAgcSequence(kInitialInputVolume, + kHighSpeechProbability, kSpeechLevel); + + // Maximum upwards change is limited. + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -48.0f); + EXPECT_EQ(volume, 183); + + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -48.0f); + EXPECT_EQ(volume, 243); + + // Won't go higher than the maximum. + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -48.0f); + EXPECT_EQ(volume, 255); + + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -17.0f); + EXPECT_EQ(volume, 254); + + // Maximum downwards change is limited. + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, 22.0f); + EXPECT_EQ(volume, 194); + + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, 22.0f); + EXPECT_EQ(volume, 137); + + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, 22.0f); + EXPECT_EQ(volume, 88); + + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, 22.0f); + EXPECT_EQ(volume, 54); + + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, 22.0f); + EXPECT_EQ(volume, 33); + + // Won't go lower than the minimum. + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, 22.0f); + EXPECT_EQ(volume, std::max(18, min_input_volume)); + + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, 22.0f); + EXPECT_EQ(volume, std::max(12, min_input_volume)); +} + +TEST_P(InputVolumeControllerParametrizedTest, NoActionWhileMuted) { + InputVolumeControllerTestHelper helper_1( + /*config=*/{.min_input_volume = GetParam()}); + InputVolumeControllerTestHelper helper_2( + /*config=*/{.min_input_volume = GetParam()}); + + int volume_1 = *helper_1.CallAgcSequence(/*applied_input_volume=*/255, + kHighSpeechProbability, kSpeechLevel, + /*num_calls=*/1); + int volume_2 = *helper_2.CallAgcSequence(/*applied_input_volume=*/255, + kHighSpeechProbability, kSpeechLevel, + /*num_calls=*/1); + + EXPECT_EQ(volume_1, 255); + EXPECT_EQ(volume_2, 255); + + helper_2.controller.HandleCaptureOutputUsedChange(false); + + WriteAlternatingAudioBufferSamples(kMaxSample, helper_1.audio_buffer); + WriteAlternatingAudioBufferSamples(kMaxSample, helper_2.audio_buffer); + + volume_1 = + *helper_1.CallAgcSequence(volume_1, kHighSpeechProbability, kSpeechLevel, + /*num_calls=*/1); + volume_2 = + *helper_2.CallAgcSequence(volume_2, kHighSpeechProbability, kSpeechLevel, + /*num_calls=*/1); + + EXPECT_LT(volume_1, 255); + EXPECT_EQ(volume_2, 255); +} + +TEST_P(InputVolumeControllerParametrizedTest, + UnmutingChecksVolumeWithoutRaising) { + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = GetParam()}); + helper.CallAgcSequence(kInitialInputVolume, kHighSpeechProbability, + kSpeechLevel); + + helper.controller.HandleCaptureOutputUsedChange(false); + helper.controller.HandleCaptureOutputUsedChange(true); + + constexpr int kInputVolume = 127; + + // SetMicVolume should not be called. + EXPECT_EQ( + helper.CallRecommendInputVolume(/*num_calls=*/1, kInputVolume, + kHighSpeechProbability, kSpeechLevel), + kInputVolume); +} + +TEST_P(InputVolumeControllerParametrizedTest, UnmutingRaisesTooLowVolume) { + const int min_input_volume = GetParam(); + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = min_input_volume}); + helper.CallAgcSequence(kInitialInputVolume, kHighSpeechProbability, + kSpeechLevel); + + helper.controller.HandleCaptureOutputUsedChange(false); + helper.controller.HandleCaptureOutputUsedChange(true); + + constexpr int kInputVolume = 11; + + EXPECT_EQ( + helper.CallRecommendInputVolume(/*num_calls=*/1, kInputVolume, + kHighSpeechProbability, kSpeechLevel), + min_input_volume); +} + +TEST_P(InputVolumeControllerParametrizedTest, + ManualLevelChangeResultsInNoSetMicCall) { + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + config.min_input_volume = GetParam(); + InputVolumeControllerTestHelper helper(config); + int volume = *helper.CallAgcSequence(kInitialInputVolume, + kHighSpeechProbability, kSpeechLevel); + + // GetMicVolume returns a value outside of the quantization slack, indicating + // a manual volume change. + ASSERT_NE(volume, 154); + volume = helper.CallRecommendInputVolume( + /*num_calls=*/1, /*initial_volume=*/154, kHighSpeechProbability, -29.0f); + EXPECT_EQ(volume, 154); + + // Do the same thing, except downwards now. + volume = helper.CallRecommendInputVolume( + /*num_calls=*/1, /*initial_volume=*/100, kHighSpeechProbability, -17.0f); + EXPECT_EQ(volume, 100); + + // And finally verify the AGC continues working without a manual change. + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -17.0f); + EXPECT_EQ(volume, 99); +} + +TEST_P(InputVolumeControllerParametrizedTest, + RecoveryAfterManualLevelChangeFromMax) { + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + config.min_input_volume = GetParam(); + InputVolumeControllerTestHelper helper(config); + int volume = *helper.CallAgcSequence(kInitialInputVolume, + kHighSpeechProbability, kSpeechLevel); + + // Force the mic up to max volume. Takes a few steps due to the residual + // gain limitation. + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -48.0f); + EXPECT_EQ(volume, 183); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -48.0f); + EXPECT_EQ(volume, 243); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -48.0f); + EXPECT_EQ(volume, 255); + + // Manual change does not result in SetMicVolume call. + volume = helper.CallRecommendInputVolume( + /*num_calls=*/1, /*initial_volume=*/50, kHighSpeechProbability, -17.0f); + EXPECT_EQ(helper.controller.recommended_input_volume(), 50); + + // Continues working as usual afterwards. + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -38.0f); + + EXPECT_EQ(volume, 65); +} + +// Checks that the minimum input volume is enforced during the upward adjustment +// of the input volume. +TEST_P(InputVolumeControllerParametrizedTest, + EnforceMinInputVolumeDuringUpwardsAdjustment) { + const int min_input_volume = GetParam(); + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + config.min_input_volume = min_input_volume; + InputVolumeControllerTestHelper helper(config); + int volume = *helper.CallAgcSequence(kInitialInputVolume, + kHighSpeechProbability, kSpeechLevel); + + // Manual change below min, but strictly positive, otherwise no action will be + // taken. + volume = helper.CallRecommendInputVolume( + /*num_calls=*/1, /*initial_volume=*/1, kHighSpeechProbability, -17.0f); + + // Trigger an upward adjustment of the input volume. + EXPECT_EQ(volume, min_input_volume); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -29.0f); + EXPECT_EQ(volume, min_input_volume); + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -30.0f); + EXPECT_EQ(volume, min_input_volume); + + // After a number of consistently low speech level observations, the input + // volume is eventually raised above the minimum. + volume = helper.CallRecommendInputVolume(/*num_calls=*/10, volume, + kHighSpeechProbability, -38.0f); + EXPECT_GT(volume, min_input_volume); +} + +// Checks that, when the min mic level override is specified, AGC immediately +// applies the minimum mic level after the mic level is manually set below the +// minimum gain to enforce. +TEST_P(InputVolumeControllerParametrizedTest, + RecoveryAfterManualLevelChangeBelowMin) { + const int min_input_volume = GetParam(); + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = min_input_volume}); + int volume = *helper.CallAgcSequence(kInitialInputVolume, + kHighSpeechProbability, kSpeechLevel); + + // Manual change below min, but strictly positive, otherwise + // AGC won't take any action. + volume = helper.CallRecommendInputVolume( + /*num_calls=*/1, /*initial_volume=*/1, kHighSpeechProbability, -17.0f); + EXPECT_EQ(volume, min_input_volume); +} + +TEST_P(InputVolumeControllerParametrizedTest, NoClippingHasNoImpact) { + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = GetParam()}); + helper.CallAgcSequence(kInitialInputVolume, kHighSpeechProbability, + kSpeechLevel); + + helper.CallAnalyzeInputAudio(/*num_calls=*/100, /*clipped_ratio=*/0); + EXPECT_EQ(helper.controller.recommended_input_volume(), 128); +} + +TEST_P(InputVolumeControllerParametrizedTest, + ClippingUnderThresholdHasNoImpact) { + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = GetParam()}); + helper.CallAgcSequence(kInitialInputVolume, kHighSpeechProbability, + kSpeechLevel); + + helper.CallAnalyzeInputAudio(/*num_calls=*/1, /*clipped_ratio=*/0.099); + EXPECT_EQ(helper.controller.recommended_input_volume(), 128); +} + +TEST_P(InputVolumeControllerParametrizedTest, ClippingLowersVolume) { + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = GetParam()}); + helper.CallAgcSequence(/*applied_input_volume=*/255, kHighSpeechProbability, + kSpeechLevel); + + helper.CallAnalyzeInputAudio(/*num_calls=*/1, /*clipped_ratio=*/0.2); + EXPECT_EQ(helper.controller.recommended_input_volume(), 240); +} + +TEST_P(InputVolumeControllerParametrizedTest, + WaitingPeriodBetweenClippingChecks) { + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = GetParam()}); + helper.CallAgcSequence(/*applied_input_volume=*/255, kHighSpeechProbability, + kSpeechLevel); + + helper.CallAnalyzeInputAudio(/*num_calls=*/1, + /*clipped_ratio=*/kAboveClippedThreshold); + EXPECT_EQ(helper.controller.recommended_input_volume(), 240); + + helper.CallAnalyzeInputAudio(/*num_calls=*/300, + /*clipped_ratio=*/kAboveClippedThreshold); + EXPECT_EQ(helper.controller.recommended_input_volume(), 240); + + helper.CallAnalyzeInputAudio(/*num_calls=*/1, + /*clipped_ratio=*/kAboveClippedThreshold); + EXPECT_EQ(helper.controller.recommended_input_volume(), 225); +} + +TEST_P(InputVolumeControllerParametrizedTest, ClippingLoweringIsLimited) { + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + config.min_input_volume = GetParam(); + InputVolumeControllerTestHelper helper(config); + helper.CallAgcSequence(/*applied_input_volume=*/180, kHighSpeechProbability, + kSpeechLevel); + + helper.CallAnalyzeInputAudio(/*num_calls=*/1, + /*clipped_ratio=*/kAboveClippedThreshold); + EXPECT_EQ(helper.controller.recommended_input_volume(), kClippedMin); + + helper.CallAnalyzeInputAudio(/*num_calls=*/1000, + /*clipped_ratio=*/kAboveClippedThreshold); + EXPECT_EQ(helper.controller.recommended_input_volume(), kClippedMin); +} + +TEST_P(InputVolumeControllerParametrizedTest, + ClippingMaxIsRespectedWhenEqualToLevel) { + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = GetParam()}); + helper.CallAgcSequence(/*applied_input_volume=*/255, kHighSpeechProbability, + kSpeechLevel); + + helper.CallAnalyzeInputAudio(/*num_calls=*/1, + /*clipped_ratio=*/kAboveClippedThreshold); + EXPECT_EQ(helper.controller.recommended_input_volume(), 240); + + helper.CallRecommendInputVolume(/*num_calls=*/10, /*initial_volume=*/240, + kHighSpeechProbability, -48.0f); + EXPECT_EQ(helper.controller.recommended_input_volume(), 240); +} + +TEST_P(InputVolumeControllerParametrizedTest, + ClippingMaxIsRespectedWhenHigherThanLevel) { + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + config.min_input_volume = GetParam(); + InputVolumeControllerTestHelper helper(config); + helper.CallAgcSequence(/*applied_input_volume=*/200, kHighSpeechProbability, + kSpeechLevel); + + helper.CallAnalyzeInputAudio(/*num_calls=*/1, + /*clipped_ratio=*/kAboveClippedThreshold); + int volume = helper.controller.recommended_input_volume(); + EXPECT_EQ(volume, 185); + + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -58.0f); + EXPECT_EQ(volume, 240); + volume = helper.CallRecommendInputVolume(/*num_calls=*/10, volume, + kHighSpeechProbability, -58.0f); + EXPECT_EQ(volume, 240); +} + +TEST_P(InputVolumeControllerParametrizedTest, UserCanRaiseVolumeAfterClipping) { + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + config.min_input_volume = GetParam(); + InputVolumeControllerTestHelper helper(config); + helper.CallAgcSequence(/*applied_input_volume=*/225, kHighSpeechProbability, + kSpeechLevel); + + helper.CallAnalyzeInputAudio(/*num_calls=*/1, + /*clipped_ratio=*/kAboveClippedThreshold); + EXPECT_EQ(helper.controller.recommended_input_volume(), 210); + + // User changed the volume. + int volume = helper.CallRecommendInputVolume( + /*num_calls=*/1, /*initial_volume-*/ 250, kHighSpeechProbability, -32.0f); + EXPECT_EQ(volume, 250); + + // Move down... + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -8.0f); + EXPECT_EQ(volume, 210); + // And back up to the new max established by the user. + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -58.0f); + EXPECT_EQ(volume, 250); + // Will not move above new maximum. + volume = helper.CallRecommendInputVolume(/*num_calls=*/1, volume, + kHighSpeechProbability, -48.0f); + EXPECT_EQ(volume, 250); +} + +TEST_P(InputVolumeControllerParametrizedTest, + ClippingDoesNotPullLowVolumeBackUp) { + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + config.min_input_volume = GetParam(); + InputVolumeControllerTestHelper helper(config); + helper.CallAgcSequence(/*applied_input_volume=*/80, kHighSpeechProbability, + kSpeechLevel); + + int initial_volume = helper.controller.recommended_input_volume(); + helper.CallAnalyzeInputAudio(/*num_calls=*/1, + /*clipped_ratio=*/kAboveClippedThreshold); + EXPECT_EQ(helper.controller.recommended_input_volume(), initial_volume); +} + +TEST_P(InputVolumeControllerParametrizedTest, TakesNoActionOnZeroMicVolume) { + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = GetParam()}); + helper.CallAgcSequence(kInitialInputVolume, kHighSpeechProbability, + kSpeechLevel); + + EXPECT_EQ( + helper.CallRecommendInputVolume(/*num_calls=*/10, /*initial_volume=*/0, + kHighSpeechProbability, -48.0f), + 0); +} + +TEST_P(InputVolumeControllerParametrizedTest, ClippingDetectionLowersVolume) { + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + config.min_input_volume = GetParam(); + InputVolumeControllerTestHelper helper(config); + int volume = *helper.CallAgcSequence(/*applied_input_volume=*/255, + kHighSpeechProbability, kSpeechLevel, + /*num_calls=*/1); + + EXPECT_EQ(volume, 255); + + WriteAlternatingAudioBufferSamples(0.99f * kMaxSample, helper.audio_buffer); + volume = *helper.CallAgcSequence(volume, kHighSpeechProbability, kSpeechLevel, + /*num_calls=*/100); + + EXPECT_EQ(volume, 255); + + WriteAlternatingAudioBufferSamples(kMaxSample, helper.audio_buffer); + volume = *helper.CallAgcSequence(volume, kHighSpeechProbability, kSpeechLevel, + /*num_calls=*/100); + + EXPECT_EQ(volume, 240); +} + +// TODO(bugs.webrtc.org/12774): Test the bahavior of `clipped_level_step`. +// TODO(bugs.webrtc.org/12774): Test the bahavior of `clipped_ratio_threshold`. +// TODO(bugs.webrtc.org/12774): Test the bahavior of `clipped_wait_frames`. +// Verifies that configurable clipping parameters are initialized as intended. +TEST_P(InputVolumeControllerParametrizedTest, ClippingParametersVerified) { + std::unique_ptr controller = + CreateInputVolumeController(kClippedLevelStep, kClippedRatioThreshold, + kClippedWaitFrames); + controller->Initialize(); + EXPECT_EQ(controller->clipped_level_step_, kClippedLevelStep); + EXPECT_EQ(controller->clipped_ratio_threshold_, kClippedRatioThreshold); + EXPECT_EQ(controller->clipped_wait_frames_, kClippedWaitFrames); + std::unique_ptr controller_custom = + CreateInputVolumeController(/*clipped_level_step=*/10, + /*clipped_ratio_threshold=*/0.2f, + /*clipped_wait_frames=*/50); + controller_custom->Initialize(); + EXPECT_EQ(controller_custom->clipped_level_step_, 10); + EXPECT_EQ(controller_custom->clipped_ratio_threshold_, 0.2f); + EXPECT_EQ(controller_custom->clipped_wait_frames_, 50); +} + +TEST_P(InputVolumeControllerParametrizedTest, + DisableClippingPredictorDisablesClippingPredictor) { + std::unique_ptr controller = + CreateInputVolumeController(kClippedLevelStep, kClippedRatioThreshold, + kClippedWaitFrames, + /*enable_clipping_predictor=*/false); + controller->Initialize(); + + EXPECT_FALSE(controller->clipping_predictor_enabled()); + EXPECT_FALSE(controller->use_clipping_predictor_step()); +} + +TEST_P(InputVolumeControllerParametrizedTest, + EnableClippingPredictorEnablesClippingPredictor) { + std::unique_ptr controller = + CreateInputVolumeController(kClippedLevelStep, kClippedRatioThreshold, + kClippedWaitFrames, + /*enable_clipping_predictor=*/true); + controller->Initialize(); + + EXPECT_TRUE(controller->clipping_predictor_enabled()); + EXPECT_TRUE(controller->use_clipping_predictor_step()); +} + +TEST_P(InputVolumeControllerParametrizedTest, + DisableClippingPredictorDoesNotLowerVolume) { + int volume = 255; + InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); + config.enable_clipping_predictor = false; + auto helper = InputVolumeControllerTestHelper(config); + helper.controller.Initialize(); + + EXPECT_FALSE(helper.controller.clipping_predictor_enabled()); + EXPECT_FALSE(helper.controller.use_clipping_predictor_step()); + + // Expect no change if clipping prediction is enabled. + for (int j = 0; j < 31; ++j) { + WriteAlternatingAudioBufferSamples(0.99f * kMaxSample, helper.audio_buffer); + volume = + *helper.CallAgcSequence(volume, kLowSpeechProbability, kSpeechLevel, + /*num_calls=*/5); + + WriteAudioBufferSamples(0.99f * kMaxSample, /*clipped_ratio=*/0.0f, + helper.audio_buffer); + volume = + *helper.CallAgcSequence(volume, kLowSpeechProbability, kSpeechLevel, + /*num_calls=*/5); + + EXPECT_EQ(volume, 255); + } +} + +// TODO(bugs.webrtc.org/7494): Split into several smaller tests. +TEST_P(InputVolumeControllerParametrizedTest, + UsedClippingPredictionsProduceLowerAnalogLevels) { + constexpr int kInitialLevel = 255; + constexpr float kCloseToClippingPeakRatio = 0.99f; + int volume_1 = kInitialLevel; + int volume_2 = kInitialLevel; + + // Create two helpers, one with clipping prediction and one without. + auto config_1 = GetInputVolumeControllerTestConfig(); + auto config_2 = GetInputVolumeControllerTestConfig(); + config_1.enable_clipping_predictor = true; + config_2.enable_clipping_predictor = false; + auto helper_1 = InputVolumeControllerTestHelper(config_1); + auto helper_2 = InputVolumeControllerTestHelper(config_2); + helper_1.controller.Initialize(); + helper_2.controller.Initialize(); + + EXPECT_TRUE(helper_1.controller.clipping_predictor_enabled()); + EXPECT_FALSE(helper_2.controller.clipping_predictor_enabled()); + EXPECT_TRUE(helper_1.controller.use_clipping_predictor_step()); + + // Expect a change if clipping prediction is enabled. + WriteAlternatingAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + helper_1.audio_buffer); + WriteAlternatingAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 5); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 5); + + WriteAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + /*clipped_ratio=*/0.0f, helper_1.audio_buffer); + WriteAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + /*clipped_ratio=*/0.0f, helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 5); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 5); + + EXPECT_EQ(volume_1, kInitialLevel - kClippedLevelStep); + EXPECT_EQ(volume_2, kInitialLevel); + + // Expect no change during waiting. + for (int i = 0; i < kClippedWaitFrames / 10; ++i) { + WriteAlternatingAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + helper_1.audio_buffer); + WriteAlternatingAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 5); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 5); + + WriteAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + /*clipped_ratio=*/0.0f, helper_1.audio_buffer); + WriteAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + /*clipped_ratio=*/0.0f, helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 5); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 5); + + EXPECT_EQ(volume_1, kInitialLevel - kClippedLevelStep); + EXPECT_EQ(volume_2, kInitialLevel); + } + + // Expect a change when the prediction step is used. + WriteAlternatingAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + helper_1.audio_buffer); + WriteAlternatingAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 5); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 5); + + WriteAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + /*clipped_ratio=*/0.0f, helper_1.audio_buffer); + WriteAudioBufferSamples(kCloseToClippingPeakRatio * kMaxSample, + /*clipped_ratio=*/0.0f, helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 5); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 5); + + EXPECT_EQ(volume_1, kInitialLevel - 2 * kClippedLevelStep); + EXPECT_EQ(volume_2, kInitialLevel); + + // Expect no change when clipping is not detected or predicted. + for (int i = 0; i < 2 * kClippedWaitFrames / 10; ++i) { + WriteAlternatingAudioBufferSamples(/*samples_value=*/0.0f, + helper_1.audio_buffer); + WriteAlternatingAudioBufferSamples(/*samples_value=*/0.0f, + helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 5); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 5); + + WriteAudioBufferSamples(/*samples_value=*/0.0f, /*clipped_ratio=*/0.0f, + helper_1.audio_buffer); + WriteAudioBufferSamples(/*samples_value=*/0.0f, /*clipped_ratio=*/0.0f, + helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 5); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 5); + } + + EXPECT_EQ(volume_1, kInitialLevel - 2 * kClippedLevelStep); + EXPECT_EQ(volume_2, kInitialLevel); + + // Expect a change for clipping frames. + WriteAlternatingAudioBufferSamples(kMaxSample, helper_1.audio_buffer); + WriteAlternatingAudioBufferSamples(kMaxSample, helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 1); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 1); + + EXPECT_EQ(volume_1, kInitialLevel - 3 * kClippedLevelStep); + EXPECT_EQ(volume_2, kInitialLevel - kClippedLevelStep); + + // Expect no change during waiting. + for (int i = 0; i < kClippedWaitFrames / 10; ++i) { + WriteAlternatingAudioBufferSamples(kMaxSample, helper_1.audio_buffer); + WriteAlternatingAudioBufferSamples(kMaxSample, helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 5); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 5); + + WriteAudioBufferSamples(kMaxSample, /*clipped_ratio=*/1.0f, + helper_1.audio_buffer); + WriteAudioBufferSamples(kMaxSample, /*clipped_ratio=*/1.0f, + helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 5); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 5); + } + + EXPECT_EQ(volume_1, kInitialLevel - 3 * kClippedLevelStep); + EXPECT_EQ(volume_2, kInitialLevel - kClippedLevelStep); + + // Expect a change for clipping frames. + WriteAlternatingAudioBufferSamples(kMaxSample, helper_1.audio_buffer); + WriteAlternatingAudioBufferSamples(kMaxSample, helper_2.audio_buffer); + volume_1 = *helper_1.CallAgcSequence(volume_1, kLowSpeechProbability, + kSpeechLevel, 1); + volume_2 = *helper_2.CallAgcSequence(volume_2, kLowSpeechProbability, + kSpeechLevel, 1); + + EXPECT_EQ(volume_1, kInitialLevel - 4 * kClippedLevelStep); + EXPECT_EQ(volume_2, kInitialLevel - 2 * kClippedLevelStep); +} + +// Checks that passing an empty speech level has no effect on the input volume. +TEST_P(InputVolumeControllerParametrizedTest, EmptyRmsErrorHasNoEffect) { + InputVolumeController controller(kNumChannels, + GetInputVolumeControllerTestConfig()); + controller.Initialize(); + + // Feed speech with low energy that would trigger an upward adapation of + // the analog level if an speech level was not low and the RMS level empty. + constexpr int kNumFrames = 125; + constexpr int kGainDb = -20; + SpeechSamplesReader reader; + int volume = reader.Feed(kNumFrames, kInitialInputVolume, kGainDb, + kLowSpeechProbability, absl::nullopt, controller); + + // Check that no adaptation occurs. + ASSERT_EQ(volume, kInitialInputVolume); +} + +// Checks that the recommended input volume is not updated unless enough +// frames have been processed after the previous update. +TEST(InputVolumeControllerTest, UpdateInputVolumeWaitFramesIsEffective) { + constexpr int kInputVolume = kInitialInputVolume; + std::unique_ptr controller_wait_0 = + CreateInputVolumeController(kClippedLevelStep, kClippedRatioThreshold, + kClippedWaitFrames, + /*enable_clipping_predictor=*/false, + /*update_input_volume_wait_frames=*/0); + std::unique_ptr controller_wait_100 = + CreateInputVolumeController(kClippedLevelStep, kClippedRatioThreshold, + kClippedWaitFrames, + /*enable_clipping_predictor=*/false, + /*update_input_volume_wait_frames=*/100); + controller_wait_0->Initialize(); + controller_wait_100->Initialize(); + + SpeechSamplesReader reader_1; + SpeechSamplesReader reader_2; + int volume_wait_0 = reader_1.Feed( + /*num_frames=*/99, kInputVolume, /*gain_db=*/0, kHighSpeechProbability, + /*speech_level_dbfs=*/-42.0f, *controller_wait_0); + int volume_wait_100 = reader_2.Feed( + /*num_frames=*/99, kInputVolume, /*gain_db=*/0, kHighSpeechProbability, + /*speech_level_dbfs=*/-42.0f, *controller_wait_100); + + // Check that adaptation only occurs if enough frames have been processed. + ASSERT_GT(volume_wait_0, kInputVolume); + ASSERT_EQ(volume_wait_100, kInputVolume); + + volume_wait_0 = + reader_1.Feed(/*num_frames=*/1, volume_wait_0, + /*gain_db=*/0, kHighSpeechProbability, + /*speech_level_dbfs=*/-42.0f, *controller_wait_0); + volume_wait_100 = + reader_2.Feed(/*num_frames=*/1, volume_wait_100, + /*gain_db=*/0, kHighSpeechProbability, + /*speech_level_dbfs=*/-42.0f, *controller_wait_100); + + // Check that adaptation only occurs when enough frames have been processed. + ASSERT_GT(volume_wait_0, kInputVolume); + ASSERT_GT(volume_wait_100, kInputVolume); +} + +INSTANTIATE_TEST_SUITE_P(, + InputVolumeControllerParametrizedTest, + ::testing::Values(12, 20)); + +TEST(InputVolumeControllerTest, + MinInputVolumeEnforcedWithClippingWhenAboveClippedLevelMin) { + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = 80, .clipped_level_min = 70}); + + // Trigger a downward adjustment caused by clipping input. Use a low speech + // probability to limit the volume changes to clipping handling. + WriteAudioBufferSamples(/*samples_value=*/4000.0f, /*clipped_ratio=*/0.8f, + helper.audio_buffer); + constexpr int kNumCalls = 800; + helper.CallAgcSequence(/*applied_input_volume=*/100, kLowSpeechProbability, + /*speech_level_dbfs=*/-18.0f, kNumCalls); + + EXPECT_EQ(helper.controller.recommended_input_volume(), 80); +} + +TEST(InputVolumeControllerTest, + ClippedlevelMinEnforcedWithClippingWhenAboveMinInputVolume) { + InputVolumeControllerTestHelper helper( + /*config=*/{.min_input_volume = 70, .clipped_level_min = 80}); + + // Trigger a downward adjustment caused by clipping input. Use a low speech + // probability to limit the volume changes to clipping handling. + WriteAudioBufferSamples(/*samples_value=*/4000.0f, /*clipped_ratio=*/0.8f, + helper.audio_buffer); + constexpr int kNumCalls = 800; + helper.CallAgcSequence(/*applied_input_volume=*/100, kLowSpeechProbability, + /*speech_level_dbfs=*/-18.0f, kNumCalls); + + EXPECT_EQ(helper.controller.recommended_input_volume(), 80); +} + +TEST(InputVolumeControllerTest, SpeechRatioThresholdIsEffective) { + constexpr int kInputVolume = kInitialInputVolume; + // Create two input volume controllers with 10 frames between volume updates + // and the minimum speech ratio of 0.8 and speech probability threshold 0.5. + std::unique_ptr controller_1 = + CreateInputVolumeController(kClippedLevelStep, kClippedRatioThreshold, + kClippedWaitFrames, + /*enable_clipping_predictor=*/false, + /*update_input_volume_wait_frames=*/10); + std::unique_ptr controller_2 = + CreateInputVolumeController(kClippedLevelStep, kClippedRatioThreshold, + kClippedWaitFrames, + /*enable_clipping_predictor=*/false, + /*update_input_volume_wait_frames=*/10); + controller_1->Initialize(); + controller_2->Initialize(); + + SpeechSamplesReader reader_1; + SpeechSamplesReader reader_2; + + int volume_1 = reader_1.Feed(/*num_frames=*/1, kInputVolume, /*gain_db=*/0, + /*speech_probability=*/0.7f, + /*speech_level_dbfs=*/-42.0f, *controller_1); + int volume_2 = reader_2.Feed(/*num_frames=*/1, kInputVolume, /*gain_db=*/0, + /*speech_probability=*/0.4f, + /*speech_level_dbfs=*/-42.0f, *controller_2); + + ASSERT_EQ(volume_1, kInputVolume); + ASSERT_EQ(volume_2, kInputVolume); + + volume_1 = reader_1.Feed(/*num_frames=*/2, volume_1, /*gain_db=*/0, + /*speech_probability=*/0.4f, + /*speech_level_dbfs=*/-42.0f, *controller_1); + volume_2 = reader_2.Feed(/*num_frames=*/2, volume_2, /*gain_db=*/0, + /*speech_probability=*/0.4f, + /*speech_level_dbfs=*/-42.0f, *controller_2); + + ASSERT_EQ(volume_1, kInputVolume); + ASSERT_EQ(volume_2, kInputVolume); + + volume_1 = reader_1.Feed( + /*num_frames=*/7, volume_1, /*gain_db=*/0, + /*speech_probability=*/0.7f, /*speech_level_dbfs=*/-42.0f, *controller_1); + volume_2 = reader_2.Feed( + /*num_frames=*/7, volume_2, /*gain_db=*/0, + /*speech_probability=*/0.7f, /*speech_level_dbfs=*/-42.0f, *controller_2); + + ASSERT_GT(volume_1, kInputVolume); + ASSERT_EQ(volume_2, kInputVolume); +} + +TEST(InputVolumeControllerTest, SpeechProbabilityThresholdIsEffective) { + constexpr int kInputVolume = kInitialInputVolume; + // Create two input volume controllers with the exact same settings and + // 10 frames between volume updates. + std::unique_ptr controller_1 = + CreateInputVolumeController(kClippedLevelStep, kClippedRatioThreshold, + kClippedWaitFrames, + /*enable_clipping_predictor=*/false, + /*update_input_volume_wait_frames=*/10); + std::unique_ptr controller_2 = + CreateInputVolumeController(kClippedLevelStep, kClippedRatioThreshold, + kClippedWaitFrames, + /*enable_clipping_predictor=*/false, + /*update_input_volume_wait_frames=*/10); + controller_1->Initialize(); + controller_2->Initialize(); + + SpeechSamplesReader reader_1; + SpeechSamplesReader reader_2; + + // Process with two sets of inputs: Use `reader_1` to process inputs + // that make the volume to be adjusted after enough frames have been + // processsed and `reader_2` to process inputs that won't make the volume + // to be adjusted. + int volume_1 = reader_1.Feed(/*num_frames=*/1, kInputVolume, /*gain_db=*/0, + /*speech_probability=*/0.5f, + /*speech_level_dbfs=*/-42.0f, *controller_1); + int volume_2 = reader_2.Feed(/*num_frames=*/1, kInputVolume, /*gain_db=*/0, + /*speech_probability=*/0.49f, + /*speech_level_dbfs=*/-42.0f, *controller_2); + + ASSERT_EQ(volume_1, kInputVolume); + ASSERT_EQ(volume_2, kInputVolume); + + reader_1.Feed(/*num_frames=*/2, volume_1, /*gain_db=*/0, + /*speech_probability=*/0.49f, /*speech_level_dbfs=*/-42.0f, + *controller_1); + reader_2.Feed(/*num_frames=*/2, volume_2, /*gain_db=*/0, + /*speech_probability=*/0.49f, /*speech_level_dbfs=*/-42.0f, + *controller_2); + + ASSERT_EQ(volume_1, kInputVolume); + ASSERT_EQ(volume_2, kInputVolume); + + volume_1 = reader_1.Feed( + /*num_frames=*/7, volume_1, /*gain_db=*/0, + /*speech_probability=*/0.5f, /*speech_level_dbfs=*/-42.0f, *controller_1); + volume_2 = reader_2.Feed( + /*num_frames=*/7, volume_2, /*gain_db=*/0, + /*speech_probability=*/0.5f, /*speech_level_dbfs=*/-42.0f, *controller_2); + + ASSERT_GT(volume_1, kInputVolume); + ASSERT_EQ(volume_2, kInputVolume); +} + +TEST(InputVolumeControllerTest, + DoNotLogRecommendedInputVolumeOnChangeToMatchTarget) { + metrics::Reset(); + + SpeechSamplesReader reader; + auto controller = CreateInputVolumeController(); + controller->Initialize(); + // Trigger a downward volume change by inputting audio that clips. Pass a + // speech level that falls in the target range to make sure that the + // adaptation is not made to match the target range. + constexpr int kStartupVolume = 255; + const int volume = reader.Feed(/*num_frames=*/14, kStartupVolume, + /*gain_db=*/50, kHighSpeechProbability, + /*speech_level_dbfs=*/-20.0f, *controller); + ASSERT_LT(volume, kStartupVolume); + EXPECT_METRIC_THAT( + metrics::Samples( + "WebRTC.Audio.Apm.RecommendedInputVolume.OnChangeToMatchTarget"), + ::testing::IsEmpty()); +} + +TEST(InputVolumeControllerTest, + LogRecommendedInputVolumeOnUpwardChangeToMatchTarget) { + metrics::Reset(); + + SpeechSamplesReader reader; + auto controller = CreateInputVolumeController(); + controller->Initialize(); + constexpr int kStartupVolume = 100; + // Trigger an upward volume change by inputting audio that does not clip and + // by passing a speech level below the target range. + const int volume = reader.Feed(/*num_frames=*/14, kStartupVolume, + /*gain_db=*/-6, kHighSpeechProbability, + /*speech_level_dbfs=*/-50.0f, *controller); + ASSERT_GT(volume, kStartupVolume); + EXPECT_METRIC_THAT( + metrics::Samples( + "WebRTC.Audio.Apm.RecommendedInputVolume.OnChangeToMatchTarget"), + ::testing::Not(::testing::IsEmpty())); +} + +TEST(InputVolumeControllerTest, + LogRecommendedInputVolumeOnDownwardChangeToMatchTarget) { + metrics::Reset(); + + SpeechSamplesReader reader; + auto controller = CreateInputVolumeController(); + controller->Initialize(); + constexpr int kStartupVolume = 100; + // Trigger a downward volume change by inputting audio that does not clip and + // by passing a speech level above the target range. + const int volume = reader.Feed(/*num_frames=*/14, kStartupVolume, + /*gain_db=*/-6, kHighSpeechProbability, + /*speech_level_dbfs=*/-5.0f, *controller); + ASSERT_LT(volume, kStartupVolume); + EXPECT_METRIC_THAT( + metrics::Samples( + "WebRTC.Audio.Apm.RecommendedInputVolume.OnChangeToMatchTarget"), + ::testing::Not(::testing::IsEmpty())); +} + +TEST(MonoInputVolumeControllerTest, CheckHandleClippingLowersVolume) { + constexpr int kInitialInputVolume = 100; + constexpr int kInputVolumeStep = 29; + MonoInputVolumeController mono_controller( + /*clipped_level_min=*/70, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/3, kHighSpeechProbability, + kSpeechRatioThreshold); + mono_controller.Initialize(); + + UpdateRecommendedInputVolume(mono_controller, kInitialInputVolume, + kLowSpeechProbability, + /*rms_error_dbfs*/ -10.0f); + + mono_controller.HandleClipping(kInputVolumeStep); + + EXPECT_EQ(mono_controller.recommended_analog_level(), + kInitialInputVolume - kInputVolumeStep); +} + +TEST(MonoInputVolumeControllerTest, + CheckProcessNegativeRmsErrorDecreasesInputVolume) { + constexpr int kInitialInputVolume = 100; + MonoInputVolumeController mono_controller( + /*clipped_level_min=*/64, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/3, kHighSpeechProbability, + kSpeechRatioThreshold); + mono_controller.Initialize(); + + int volume = UpdateRecommendedInputVolume( + mono_controller, kInitialInputVolume, kHighSpeechProbability, -10.0f); + volume = UpdateRecommendedInputVolume(mono_controller, volume, + kHighSpeechProbability, -10.0f); + volume = UpdateRecommendedInputVolume(mono_controller, volume, + kHighSpeechProbability, -10.0f); + + EXPECT_LT(volume, kInitialInputVolume); +} + +TEST(MonoInputVolumeControllerTest, + CheckProcessPositiveRmsErrorIncreasesInputVolume) { + constexpr int kInitialInputVolume = 100; + MonoInputVolumeController mono_controller( + /*clipped_level_min=*/64, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/3, kHighSpeechProbability, + kSpeechRatioThreshold); + mono_controller.Initialize(); + + int volume = UpdateRecommendedInputVolume( + mono_controller, kInitialInputVolume, kHighSpeechProbability, 10.0f); + volume = UpdateRecommendedInputVolume(mono_controller, volume, + kHighSpeechProbability, 10.0f); + volume = UpdateRecommendedInputVolume(mono_controller, volume, + kHighSpeechProbability, 10.0f); + + EXPECT_GT(volume, kInitialInputVolume); +} + +TEST(MonoInputVolumeControllerTest, + CheckProcessNegativeRmsErrorDecreasesInputVolumeWithLimit) { + constexpr int kInitialInputVolume = 100; + MonoInputVolumeController mono_controller_1( + /*clipped_level_min=*/64, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + MonoInputVolumeController mono_controller_2( + /*clipped_level_min=*/64, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + MonoInputVolumeController mono_controller_3( + /*clipped_level_min=*/64, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/2, + /*speech_probability_threshold=*/0.7, + /*speech_ratio_threshold=*/0.8); + mono_controller_1.Initialize(); + mono_controller_2.Initialize(); + mono_controller_3.Initialize(); + + // Process RMS errors in the range + // [`-kMaxResidualGainChange`, `kMaxResidualGainChange`]. + int volume_1 = UpdateRecommendedInputVolume( + mono_controller_1, kInitialInputVolume, kHighSpeechProbability, -14.0f); + volume_1 = UpdateRecommendedInputVolume(mono_controller_1, volume_1, + kHighSpeechProbability, -14.0f); + // Process RMS errors outside the range + // [`-kMaxResidualGainChange`, `kMaxResidualGainChange`]. + int volume_2 = UpdateRecommendedInputVolume( + mono_controller_2, kInitialInputVolume, kHighSpeechProbability, -15.0f); + int volume_3 = UpdateRecommendedInputVolume( + mono_controller_3, kInitialInputVolume, kHighSpeechProbability, -30.0f); + volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, + kHighSpeechProbability, -15.0f); + volume_3 = UpdateRecommendedInputVolume(mono_controller_3, volume_3, + kHighSpeechProbability, -30.0f); + + EXPECT_LT(volume_1, kInitialInputVolume); + EXPECT_LT(volume_2, volume_1); + EXPECT_EQ(volume_2, volume_3); +} + +TEST(MonoInputVolumeControllerTest, + CheckProcessPositiveRmsErrorIncreasesInputVolumeWithLimit) { + constexpr int kInitialInputVolume = 100; + MonoInputVolumeController mono_controller_1( + /*clipped_level_min=*/64, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + MonoInputVolumeController mono_controller_2( + /*clipped_level_min=*/64, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + MonoInputVolumeController mono_controller_3( + /*clipped_level_min=*/64, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + mono_controller_1.Initialize(); + mono_controller_2.Initialize(); + mono_controller_3.Initialize(); + + // Process RMS errors in the range + // [`-kMaxResidualGainChange`, `kMaxResidualGainChange`]. + int volume_1 = UpdateRecommendedInputVolume( + mono_controller_1, kInitialInputVolume, kHighSpeechProbability, 14.0f); + volume_1 = UpdateRecommendedInputVolume(mono_controller_1, volume_1, + kHighSpeechProbability, 14.0f); + // Process RMS errors outside the range + // [`-kMaxResidualGainChange`, `kMaxResidualGainChange`]. + int volume_2 = UpdateRecommendedInputVolume( + mono_controller_2, kInitialInputVolume, kHighSpeechProbability, 15.0f); + int volume_3 = UpdateRecommendedInputVolume( + mono_controller_3, kInitialInputVolume, kHighSpeechProbability, 30.0f); + volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, + kHighSpeechProbability, 15.0f); + volume_3 = UpdateRecommendedInputVolume(mono_controller_3, volume_3, + kHighSpeechProbability, 30.0f); + + EXPECT_GT(volume_1, kInitialInputVolume); + EXPECT_GT(volume_2, volume_1); + EXPECT_EQ(volume_2, volume_3); +} + +TEST(MonoInputVolumeControllerTest, + CheckProcessRmsErrorDecreasesInputVolumeRepeatedly) { + constexpr int kInitialInputVolume = 100; + MonoInputVolumeController mono_controller( + /*clipped_level_min=*/64, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + mono_controller.Initialize(); + + int volume_before = UpdateRecommendedInputVolume( + mono_controller, kInitialInputVolume, kHighSpeechProbability, -10.0f); + volume_before = UpdateRecommendedInputVolume(mono_controller, volume_before, + kHighSpeechProbability, -10.0f); + + EXPECT_LT(volume_before, kInitialInputVolume); + + int volume_after = UpdateRecommendedInputVolume( + mono_controller, volume_before, kHighSpeechProbability, -10.0f); + volume_after = UpdateRecommendedInputVolume(mono_controller, volume_after, + kHighSpeechProbability, -10.0f); + + EXPECT_LT(volume_after, volume_before); +} + +TEST(MonoInputVolumeControllerTest, + CheckProcessPositiveRmsErrorIncreasesInputVolumeRepeatedly) { + constexpr int kInitialInputVolume = 100; + MonoInputVolumeController mono_controller( + /*clipped_level_min=*/64, + /*min_mic_level=*/32, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + mono_controller.Initialize(); + + int volume_before = UpdateRecommendedInputVolume( + mono_controller, kInitialInputVolume, kHighSpeechProbability, 10.0f); + volume_before = UpdateRecommendedInputVolume(mono_controller, volume_before, + kHighSpeechProbability, 10.0f); + + EXPECT_GT(volume_before, kInitialInputVolume); + + int volume_after = UpdateRecommendedInputVolume( + mono_controller, volume_before, kHighSpeechProbability, 10.0f); + volume_after = UpdateRecommendedInputVolume(mono_controller, volume_after, + kHighSpeechProbability, 10.0f); + + EXPECT_GT(volume_after, volume_before); +} + +TEST(MonoInputVolumeControllerTest, CheckClippedLevelMinIsEffective) { + constexpr int kInitialInputVolume = 100; + constexpr int kClippedLevelMin = 70; + MonoInputVolumeController mono_controller_1( + kClippedLevelMin, + /*min_mic_level=*/84, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + MonoInputVolumeController mono_controller_2( + kClippedLevelMin, + /*min_mic_level=*/84, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + mono_controller_1.Initialize(); + mono_controller_2.Initialize(); + + // Process one frame to reset the state for `HandleClipping()`. + EXPECT_EQ(UpdateRecommendedInputVolume(mono_controller_1, kInitialInputVolume, + kLowSpeechProbability, -10.0f), + kInitialInputVolume); + EXPECT_EQ(UpdateRecommendedInputVolume(mono_controller_2, kInitialInputVolume, + kLowSpeechProbability, -10.0f), + kInitialInputVolume); + + mono_controller_1.HandleClipping(29); + mono_controller_2.HandleClipping(31); + + EXPECT_EQ(mono_controller_2.recommended_analog_level(), kClippedLevelMin); + EXPECT_LT(mono_controller_2.recommended_analog_level(), + mono_controller_1.recommended_analog_level()); +} + +TEST(MonoInputVolumeControllerTest, CheckMinMicLevelIsEffective) { + constexpr int kInitialInputVolume = 100; + constexpr int kMinMicLevel = 64; + MonoInputVolumeController mono_controller_1( + /*clipped_level_min=*/64, kMinMicLevel, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + MonoInputVolumeController mono_controller_2( + /*clipped_level_min=*/64, kMinMicLevel, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + mono_controller_1.Initialize(); + mono_controller_2.Initialize(); + + int volume_1 = UpdateRecommendedInputVolume( + mono_controller_1, kInitialInputVolume, kHighSpeechProbability, -10.0f); + int volume_2 = UpdateRecommendedInputVolume( + mono_controller_2, kInitialInputVolume, kHighSpeechProbability, -10.0f); + + EXPECT_EQ(volume_1, kInitialInputVolume); + EXPECT_EQ(volume_2, kInitialInputVolume); + + volume_1 = UpdateRecommendedInputVolume(mono_controller_1, volume_1, + kHighSpeechProbability, -10.0f); + volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, + kHighSpeechProbability, -30.0f); + + EXPECT_LT(volume_1, kInitialInputVolume); + EXPECT_LT(volume_2, volume_1); + EXPECT_EQ(volume_2, kMinMicLevel); +} + +TEST(MonoInputVolumeControllerTest, + CheckUpdateInputVolumeWaitFramesIsEffective) { + constexpr int kInitialInputVolume = 100; + MonoInputVolumeController mono_controller_1( + /*clipped_level_min=*/64, + /*min_mic_level=*/84, + /*update_input_volume_wait_frames=*/1, kHighSpeechProbability, + kSpeechRatioThreshold); + MonoInputVolumeController mono_controller_2( + /*clipped_level_min=*/64, + /*min_mic_level=*/84, + /*update_input_volume_wait_frames=*/3, kHighSpeechProbability, + kSpeechRatioThreshold); + mono_controller_1.Initialize(); + mono_controller_2.Initialize(); + + int volume_1 = UpdateRecommendedInputVolume( + mono_controller_1, kInitialInputVolume, kHighSpeechProbability, -10.0f); + int volume_2 = UpdateRecommendedInputVolume( + mono_controller_2, kInitialInputVolume, kHighSpeechProbability, -10.0f); + + EXPECT_EQ(volume_1, kInitialInputVolume); + EXPECT_EQ(volume_2, kInitialInputVolume); + + volume_1 = UpdateRecommendedInputVolume(mono_controller_1, volume_1, + kHighSpeechProbability, -10.0f); + volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, + kHighSpeechProbability, -10.0f); + + EXPECT_LT(volume_1, kInitialInputVolume); + EXPECT_EQ(volume_2, kInitialInputVolume); + + volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, + kHighSpeechProbability, -10.0f); + + EXPECT_LT(volume_2, kInitialInputVolume); +} + +TEST(MonoInputVolumeControllerTest, + CheckSpeechProbabilityThresholdIsEffective) { + constexpr int kInitialInputVolume = 100; + constexpr float kSpeechProbabilityThreshold = 0.8f; + MonoInputVolumeController mono_controller_1( + /*clipped_level_min=*/64, + /*min_mic_level=*/84, + /*update_input_volume_wait_frames=*/2, kSpeechProbabilityThreshold, + kSpeechRatioThreshold); + MonoInputVolumeController mono_controller_2( + /*clipped_level_min=*/64, + /*min_mic_level=*/84, + /*update_input_volume_wait_frames=*/2, kSpeechProbabilityThreshold, + kSpeechRatioThreshold); + mono_controller_1.Initialize(); + mono_controller_2.Initialize(); + + int volume_1 = + UpdateRecommendedInputVolume(mono_controller_1, kInitialInputVolume, + kSpeechProbabilityThreshold, -10.0f); + int volume_2 = + UpdateRecommendedInputVolume(mono_controller_2, kInitialInputVolume, + kSpeechProbabilityThreshold, -10.0f); + + EXPECT_EQ(volume_1, kInitialInputVolume); + EXPECT_EQ(volume_2, kInitialInputVolume); + + volume_1 = UpdateRecommendedInputVolume( + mono_controller_1, volume_1, kSpeechProbabilityThreshold - 0.1f, -10.0f); + volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, + kSpeechProbabilityThreshold, -10.0f); + + EXPECT_EQ(volume_1, kInitialInputVolume); + EXPECT_LT(volume_2, volume_1); +} + +TEST(MonoInputVolumeControllerTest, CheckSpeechRatioThresholdIsEffective) { + constexpr int kInitialInputVolume = 100; + MonoInputVolumeController mono_controller_1( + /*clipped_level_min=*/64, + /*min_mic_level=*/84, + /*update_input_volume_wait_frames=*/4, kHighSpeechProbability, + /*speech_ratio_threshold=*/0.75f); + MonoInputVolumeController mono_controller_2( + /*clipped_level_min=*/64, + /*min_mic_level=*/84, + /*update_input_volume_wait_frames=*/4, kHighSpeechProbability, + /*speech_ratio_threshold=*/0.75f); + mono_controller_1.Initialize(); + mono_controller_2.Initialize(); + + int volume_1 = UpdateRecommendedInputVolume( + mono_controller_1, kInitialInputVolume, kHighSpeechProbability, -10.0f); + int volume_2 = UpdateRecommendedInputVolume( + mono_controller_2, kInitialInputVolume, kHighSpeechProbability, -10.0f); + + volume_1 = UpdateRecommendedInputVolume(mono_controller_1, volume_1, + kHighSpeechProbability, -10.0f); + volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, + kHighSpeechProbability, -10.0f); + + volume_1 = UpdateRecommendedInputVolume(mono_controller_1, volume_1, + kLowSpeechProbability, -10.0f); + volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, + kLowSpeechProbability, -10.0f); + + EXPECT_EQ(volume_1, kInitialInputVolume); + EXPECT_EQ(volume_2, kInitialInputVolume); + + volume_1 = UpdateRecommendedInputVolume(mono_controller_1, volume_1, + kLowSpeechProbability, -10.0f); + volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, + kHighSpeechProbability, -10.0f); + + EXPECT_EQ(volume_1, kInitialInputVolume); + EXPECT_LT(volume_2, volume_1); +} + +TEST(MonoInputVolumeControllerTest, + CheckProcessEmptyRmsErrorDoesNotLowerVolume) { + constexpr int kInitialInputVolume = 100; + MonoInputVolumeController mono_controller_1( + /*clipped_level_min=*/64, + /*min_mic_level=*/84, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + MonoInputVolumeController mono_controller_2( + /*clipped_level_min=*/64, + /*min_mic_level=*/84, + /*update_input_volume_wait_frames=*/2, kHighSpeechProbability, + kSpeechRatioThreshold); + mono_controller_1.Initialize(); + mono_controller_2.Initialize(); + + int volume_1 = UpdateRecommendedInputVolume( + mono_controller_1, kInitialInputVolume, kHighSpeechProbability, -10.0f); + int volume_2 = UpdateRecommendedInputVolume( + mono_controller_2, kInitialInputVolume, kHighSpeechProbability, -10.0f); + + EXPECT_EQ(volume_1, kInitialInputVolume); + EXPECT_EQ(volume_2, kInitialInputVolume); + + volume_1 = UpdateRecommendedInputVolume( + mono_controller_1, volume_1, kHighSpeechProbability, absl::nullopt); + volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, + kHighSpeechProbability, -10.0f); + + EXPECT_EQ(volume_1, kInitialInputVolume); + EXPECT_LT(volume_2, volume_1); +} + +} // namespace webrtc diff --git a/modules/audio_processing/agc2/input_volume_stats_reporter.cc b/modules/audio_processing/agc2/input_volume_stats_reporter.cc new file mode 100644 index 0000000000..05624b1f92 --- /dev/null +++ b/modules/audio_processing/agc2/input_volume_stats_reporter.cc @@ -0,0 +1,171 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/agc2/input_volume_stats_reporter.h" + +#include + +#include "absl/strings/string_view.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_minmax.h" +#include "rtc_base/strings/string_builder.h" +#include "system_wrappers/include/metrics.h" + +namespace webrtc { +namespace { + +using InputVolumeType = InputVolumeStatsReporter::InputVolumeType; + +constexpr int kFramesIn60Seconds = 6000; +constexpr int kMinInputVolume = 0; +constexpr int kMaxInputVolume = 255; +constexpr int kMaxUpdate = kMaxInputVolume - kMinInputVolume; + +int ComputeAverageUpdate(int sum_updates, int num_updates) { + RTC_DCHECK_GE(sum_updates, 0); + RTC_DCHECK_LE(sum_updates, kMaxUpdate * kFramesIn60Seconds); + RTC_DCHECK_GE(num_updates, 0); + RTC_DCHECK_LE(num_updates, kFramesIn60Seconds); + if (num_updates == 0) { + return 0; + } + return std::round(static_cast(sum_updates) / + static_cast(num_updates)); +} + +constexpr absl::string_view MetricNamePrefix( + InputVolumeType input_volume_type) { + switch (input_volume_type) { + case InputVolumeType::kApplied: + return "WebRTC.Audio.Apm.AppliedInputVolume."; + case InputVolumeType::kRecommended: + return "WebRTC.Audio.Apm.RecommendedInputVolume."; + } +} + +metrics::Histogram* CreateVolumeHistogram(InputVolumeType input_volume_type) { + char buffer[64]; + rtc::SimpleStringBuilder builder(buffer); + builder << MetricNamePrefix(input_volume_type) << "OnChange"; + return metrics::HistogramFactoryGetCountsLinear(/*name=*/builder.str(), + /*min=*/1, + /*max=*/kMaxInputVolume, + /*bucket_count=*/50); +} + +metrics::Histogram* CreateRateHistogram(InputVolumeType input_volume_type, + absl::string_view name) { + char buffer[64]; + rtc::SimpleStringBuilder builder(buffer); + builder << MetricNamePrefix(input_volume_type) << name; + return metrics::HistogramFactoryGetCountsLinear(/*name=*/builder.str(), + /*min=*/1, + /*max=*/kFramesIn60Seconds, + /*bucket_count=*/50); +} + +metrics::Histogram* CreateAverageHistogram(InputVolumeType input_volume_type, + absl::string_view name) { + char buffer[64]; + rtc::SimpleStringBuilder builder(buffer); + builder << MetricNamePrefix(input_volume_type) << name; + return metrics::HistogramFactoryGetCountsLinear(/*name=*/builder.str(), + /*min=*/1, + /*max=*/kMaxUpdate, + /*bucket_count=*/50); +} + +} // namespace + +InputVolumeStatsReporter::InputVolumeStatsReporter(InputVolumeType type) + : histograms_( + {.on_volume_change = CreateVolumeHistogram(type), + .decrease_rate = CreateRateHistogram(type, "DecreaseRate"), + .decrease_average = CreateAverageHistogram(type, "DecreaseAverage"), + .increase_rate = CreateRateHistogram(type, "IncreaseRate"), + .increase_average = CreateAverageHistogram(type, "IncreaseAverage"), + .update_rate = CreateRateHistogram(type, "UpdateRate"), + .update_average = CreateAverageHistogram(type, "UpdateAverage")}), + cannot_log_stats_(!histograms_.AllPointersSet()) { + if (cannot_log_stats_) { + RTC_LOG(LS_WARNING) << "Will not log any `" << MetricNamePrefix(type) + << "*` histogram stats."; + } +} + +InputVolumeStatsReporter::~InputVolumeStatsReporter() = default; + +void InputVolumeStatsReporter::UpdateStatistics(int input_volume) { + if (cannot_log_stats_) { + // Since the stats cannot be logged, do not bother updating them. + return; + } + + RTC_DCHECK_GE(input_volume, kMinInputVolume); + RTC_DCHECK_LE(input_volume, kMaxInputVolume); + if (previous_input_volume_.has_value() && + input_volume != previous_input_volume_.value()) { + // Update stats when the input volume changes. + metrics::HistogramAdd(histograms_.on_volume_change, input_volume); + // Update stats that are periodically logged. + const int volume_change = input_volume - previous_input_volume_.value(); + if (volume_change < 0) { + ++volume_update_stats_.num_decreases; + volume_update_stats_.sum_decreases -= volume_change; + } else { + ++volume_update_stats_.num_increases; + volume_update_stats_.sum_increases += volume_change; + } + } + // Periodically log input volume change metrics. + if (++log_volume_update_stats_counter_ >= kFramesIn60Seconds) { + LogVolumeUpdateStats(); + volume_update_stats_ = {}; + log_volume_update_stats_counter_ = 0; + } + previous_input_volume_ = input_volume; +} + +void InputVolumeStatsReporter::LogVolumeUpdateStats() const { + // Decrease rate and average. + metrics::HistogramAdd(histograms_.decrease_rate, + volume_update_stats_.num_decreases); + if (volume_update_stats_.num_decreases > 0) { + int average_decrease = ComputeAverageUpdate( + volume_update_stats_.sum_decreases, volume_update_stats_.num_decreases); + metrics::HistogramAdd(histograms_.decrease_average, average_decrease); + } + // Increase rate and average. + metrics::HistogramAdd(histograms_.increase_rate, + volume_update_stats_.num_increases); + if (volume_update_stats_.num_increases > 0) { + int average_increase = ComputeAverageUpdate( + volume_update_stats_.sum_increases, volume_update_stats_.num_increases); + metrics::HistogramAdd(histograms_.increase_average, average_increase); + } + // Update rate and average. + int num_updates = + volume_update_stats_.num_decreases + volume_update_stats_.num_increases; + metrics::HistogramAdd(histograms_.update_rate, num_updates); + if (num_updates > 0) { + int average_update = ComputeAverageUpdate( + volume_update_stats_.sum_decreases + volume_update_stats_.sum_increases, + num_updates); + metrics::HistogramAdd(histograms_.update_average, average_update); + } +} + +void UpdateHistogramOnRecommendedInputVolumeChangeToMatchTarget(int volume) { + RTC_HISTOGRAM_COUNTS_LINEAR( + "WebRTC.Audio.Apm.RecommendedInputVolume.OnChangeToMatchTarget", volume, + 1, kMaxInputVolume, 50); +} + +} // namespace webrtc diff --git a/modules/audio_processing/agc2/input_volume_stats_reporter.h b/modules/audio_processing/agc2/input_volume_stats_reporter.h new file mode 100644 index 0000000000..31b110031c --- /dev/null +++ b/modules/audio_processing/agc2/input_volume_stats_reporter.h @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_AGC2_INPUT_VOLUME_STATS_REPORTER_H_ +#define MODULES_AUDIO_PROCESSING_AGC2_INPUT_VOLUME_STATS_REPORTER_H_ + +#include "absl/types/optional.h" +#include "rtc_base/gtest_prod_util.h" +#include "system_wrappers/include/metrics.h" + +namespace webrtc { + +// Input volume statistics calculator. Computes aggregate stats based on the +// framewise input volume observed by `UpdateStatistics()`. Periodically logs +// the statistics into a histogram. +class InputVolumeStatsReporter { + public: + enum class InputVolumeType { + kApplied = 0, + kRecommended = 1, + }; + + explicit InputVolumeStatsReporter(InputVolumeType input_volume_type); + InputVolumeStatsReporter(const InputVolumeStatsReporter&) = delete; + InputVolumeStatsReporter operator=(const InputVolumeStatsReporter&) = delete; + ~InputVolumeStatsReporter(); + + // Updates the stats based on `input_volume`. Periodically logs the stats into + // a histogram. + void UpdateStatistics(int input_volume); + + private: + FRIEND_TEST_ALL_PREFIXES(InputVolumeStatsReporterTest, + CheckVolumeUpdateStatsForEmptyStats); + FRIEND_TEST_ALL_PREFIXES(InputVolumeStatsReporterTest, + CheckVolumeUpdateStatsAfterNoVolumeChange); + FRIEND_TEST_ALL_PREFIXES(InputVolumeStatsReporterTest, + CheckVolumeUpdateStatsAfterVolumeIncrease); + FRIEND_TEST_ALL_PREFIXES(InputVolumeStatsReporterTest, + CheckVolumeUpdateStatsAfterVolumeDecrease); + FRIEND_TEST_ALL_PREFIXES(InputVolumeStatsReporterTest, + CheckVolumeUpdateStatsAfterReset); + + // Stores input volume update stats to enable calculation of update rate and + // average update separately for volume increases and decreases. + struct VolumeUpdateStats { + int num_decreases = 0; + int num_increases = 0; + int sum_decreases = 0; + int sum_increases = 0; + } volume_update_stats_; + + // Returns a copy of the stored statistics. Use only for testing. + VolumeUpdateStats volume_update_stats() const { return volume_update_stats_; } + + // Computes aggregate stat and logs them into a histogram. + void LogVolumeUpdateStats() const; + + // Histograms. + struct Histograms { + metrics::Histogram* const on_volume_change; + metrics::Histogram* const decrease_rate; + metrics::Histogram* const decrease_average; + metrics::Histogram* const increase_rate; + metrics::Histogram* const increase_average; + metrics::Histogram* const update_rate; + metrics::Histogram* const update_average; + bool AllPointersSet() const { + return !!on_volume_change && !!decrease_rate && !!decrease_average && + !!increase_rate && !!increase_average && !!update_rate && + !!update_average; + } + } histograms_; + + // True if the stats cannot be logged. + const bool cannot_log_stats_; + + int log_volume_update_stats_counter_ = 0; + absl::optional previous_input_volume_ = absl::nullopt; +}; + +// Updates the histogram that keeps track of recommended input volume changes +// required in order to match the target level in the input volume adaptation +// process. +void UpdateHistogramOnRecommendedInputVolumeChangeToMatchTarget(int volume); + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_AGC2_INPUT_VOLUME_STATS_REPORTER_H_ diff --git a/modules/audio_processing/agc2/input_volume_stats_reporter_unittest.cc b/modules/audio_processing/agc2/input_volume_stats_reporter_unittest.cc new file mode 100644 index 0000000000..e762c1fb59 --- /dev/null +++ b/modules/audio_processing/agc2/input_volume_stats_reporter_unittest.cc @@ -0,0 +1,246 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/agc2/input_volume_stats_reporter.h" + +#include "absl/strings/string_view.h" +#include "rtc_base/strings/string_builder.h" +#include "system_wrappers/include/metrics.h" +#include "test/gmock.h" + +namespace webrtc { +namespace { + +using InputVolumeType = InputVolumeStatsReporter::InputVolumeType; + +constexpr int kFramesIn60Seconds = 6000; + +constexpr absl::string_view kLabelPrefix = "WebRTC.Audio.Apm."; + +class InputVolumeStatsReporterTest + : public ::testing::TestWithParam { + public: + InputVolumeStatsReporterTest() { metrics::Reset(); } + + protected: + InputVolumeType InputVolumeType() const { return GetParam(); } + std::string VolumeLabel() const { + return (rtc::StringBuilder(kLabelPrefix) << VolumeTypeLabel() << "OnChange") + .str(); + } + std::string DecreaseRateLabel() const { + return (rtc::StringBuilder(kLabelPrefix) + << VolumeTypeLabel() << "DecreaseRate") + .str(); + } + std::string DecreaseAverageLabel() const { + return (rtc::StringBuilder(kLabelPrefix) + << VolumeTypeLabel() << "DecreaseAverage") + .str(); + } + std::string IncreaseRateLabel() const { + return (rtc::StringBuilder(kLabelPrefix) + << VolumeTypeLabel() << "IncreaseRate") + .str(); + } + std::string IncreaseAverageLabel() const { + return (rtc::StringBuilder(kLabelPrefix) + << VolumeTypeLabel() << "IncreaseAverage") + .str(); + } + std::string UpdateRateLabel() const { + return (rtc::StringBuilder(kLabelPrefix) + << VolumeTypeLabel() << "UpdateRate") + .str(); + } + std::string UpdateAverageLabel() const { + return (rtc::StringBuilder(kLabelPrefix) + << VolumeTypeLabel() << "UpdateAverage") + .str(); + } + + private: + absl::string_view VolumeTypeLabel() const { + switch (InputVolumeType()) { + case InputVolumeType::kApplied: + return "AppliedInputVolume."; + case InputVolumeType::kRecommended: + return "RecommendedInputVolume."; + } + } +}; + +TEST_P(InputVolumeStatsReporterTest, CheckVolumeOnChangeIsEmpty) { + InputVolumeStatsReporter stats_reporter(InputVolumeType()); + stats_reporter.UpdateStatistics(10); + EXPECT_METRIC_THAT(metrics::Samples(VolumeLabel()), ::testing::ElementsAre()); +} + +TEST_P(InputVolumeStatsReporterTest, CheckRateAverageStatsEmpty) { + InputVolumeStatsReporter stats_reporter(InputVolumeType()); + constexpr int kInputVolume = 10; + stats_reporter.UpdateStatistics(kInputVolume); + // Update almost until the periodic logging and reset. + for (int i = 0; i < kFramesIn60Seconds - 2; i += 2) { + stats_reporter.UpdateStatistics(kInputVolume + 2); + stats_reporter.UpdateStatistics(kInputVolume); + } + EXPECT_METRIC_THAT(metrics::Samples(UpdateRateLabel()), + ::testing::ElementsAre()); + EXPECT_METRIC_THAT(metrics::Samples(DecreaseRateLabel()), + ::testing::ElementsAre()); + EXPECT_METRIC_THAT(metrics::Samples(IncreaseRateLabel()), + ::testing::ElementsAre()); + EXPECT_METRIC_THAT(metrics::Samples(UpdateAverageLabel()), + ::testing::ElementsAre()); + EXPECT_METRIC_THAT(metrics::Samples(DecreaseAverageLabel()), + ::testing::ElementsAre()); + EXPECT_METRIC_THAT(metrics::Samples(IncreaseAverageLabel()), + ::testing::ElementsAre()); +} + +TEST_P(InputVolumeStatsReporterTest, CheckSamples) { + InputVolumeStatsReporter stats_reporter(InputVolumeType()); + + constexpr int kInputVolume1 = 10; + stats_reporter.UpdateStatistics(kInputVolume1); + // Update until periodic logging. + constexpr int kInputVolume2 = 12; + for (int i = 0; i < kFramesIn60Seconds; i += 2) { + stats_reporter.UpdateStatistics(kInputVolume2); + stats_reporter.UpdateStatistics(kInputVolume1); + } + // Update until periodic logging. + constexpr int kInputVolume3 = 13; + for (int i = 0; i < kFramesIn60Seconds; i += 2) { + stats_reporter.UpdateStatistics(kInputVolume3); + stats_reporter.UpdateStatistics(kInputVolume1); + } + + // Check volume changes stats. + EXPECT_METRIC_THAT( + metrics::Samples(VolumeLabel()), + ::testing::ElementsAre( + ::testing::Pair(kInputVolume1, kFramesIn60Seconds), + ::testing::Pair(kInputVolume2, kFramesIn60Seconds / 2), + ::testing::Pair(kInputVolume3, kFramesIn60Seconds / 2))); + + // Check volume change rate stats. + EXPECT_METRIC_THAT( + metrics::Samples(UpdateRateLabel()), + ::testing::ElementsAre(::testing::Pair(kFramesIn60Seconds - 1, 1), + ::testing::Pair(kFramesIn60Seconds, 1))); + EXPECT_METRIC_THAT( + metrics::Samples(DecreaseRateLabel()), + ::testing::ElementsAre(::testing::Pair(kFramesIn60Seconds / 2 - 1, 1), + ::testing::Pair(kFramesIn60Seconds / 2, 1))); + EXPECT_METRIC_THAT( + metrics::Samples(IncreaseRateLabel()), + ::testing::ElementsAre(::testing::Pair(kFramesIn60Seconds / 2, 2))); + + // Check volume change average stats. + EXPECT_METRIC_THAT( + metrics::Samples(UpdateAverageLabel()), + ::testing::ElementsAre(::testing::Pair(2, 1), ::testing::Pair(3, 1))); + EXPECT_METRIC_THAT( + metrics::Samples(DecreaseAverageLabel()), + ::testing::ElementsAre(::testing::Pair(2, 1), ::testing::Pair(3, 1))); + EXPECT_METRIC_THAT( + metrics::Samples(IncreaseAverageLabel()), + ::testing::ElementsAre(::testing::Pair(2, 1), ::testing::Pair(3, 1))); +} +} // namespace + +TEST_P(InputVolumeStatsReporterTest, CheckVolumeUpdateStatsForEmptyStats) { + InputVolumeStatsReporter stats_reporter(InputVolumeType()); + const auto& update_stats = stats_reporter.volume_update_stats(); + EXPECT_EQ(update_stats.num_decreases, 0); + EXPECT_EQ(update_stats.sum_decreases, 0); + EXPECT_EQ(update_stats.num_increases, 0); + EXPECT_EQ(update_stats.sum_increases, 0); +} + +TEST_P(InputVolumeStatsReporterTest, + CheckVolumeUpdateStatsAfterNoVolumeChange) { + constexpr int kInputVolume = 10; + InputVolumeStatsReporter stats_reporter(InputVolumeType()); + stats_reporter.UpdateStatistics(kInputVolume); + stats_reporter.UpdateStatistics(kInputVolume); + stats_reporter.UpdateStatistics(kInputVolume); + const auto& update_stats = stats_reporter.volume_update_stats(); + EXPECT_EQ(update_stats.num_decreases, 0); + EXPECT_EQ(update_stats.sum_decreases, 0); + EXPECT_EQ(update_stats.num_increases, 0); + EXPECT_EQ(update_stats.sum_increases, 0); +} + +TEST_P(InputVolumeStatsReporterTest, + CheckVolumeUpdateStatsAfterVolumeIncrease) { + constexpr int kInputVolume = 10; + InputVolumeStatsReporter stats_reporter(InputVolumeType()); + stats_reporter.UpdateStatistics(kInputVolume); + stats_reporter.UpdateStatistics(kInputVolume + 4); + stats_reporter.UpdateStatistics(kInputVolume + 5); + const auto& update_stats = stats_reporter.volume_update_stats(); + EXPECT_EQ(update_stats.num_decreases, 0); + EXPECT_EQ(update_stats.sum_decreases, 0); + EXPECT_EQ(update_stats.num_increases, 2); + EXPECT_EQ(update_stats.sum_increases, 5); +} + +TEST_P(InputVolumeStatsReporterTest, + CheckVolumeUpdateStatsAfterVolumeDecrease) { + constexpr int kInputVolume = 10; + InputVolumeStatsReporter stats_reporter(InputVolumeType()); + stats_reporter.UpdateStatistics(kInputVolume); + stats_reporter.UpdateStatistics(kInputVolume - 4); + stats_reporter.UpdateStatistics(kInputVolume - 5); + const auto& stats_update = stats_reporter.volume_update_stats(); + EXPECT_EQ(stats_update.num_decreases, 2); + EXPECT_EQ(stats_update.sum_decreases, 5); + EXPECT_EQ(stats_update.num_increases, 0); + EXPECT_EQ(stats_update.sum_increases, 0); +} + +TEST_P(InputVolumeStatsReporterTest, CheckVolumeUpdateStatsAfterReset) { + InputVolumeStatsReporter stats_reporter(InputVolumeType()); + constexpr int kInputVolume = 10; + stats_reporter.UpdateStatistics(kInputVolume); + // Update until the periodic reset. + for (int i = 0; i < kFramesIn60Seconds - 2; i += 2) { + stats_reporter.UpdateStatistics(kInputVolume + 2); + stats_reporter.UpdateStatistics(kInputVolume); + } + const auto& stats_before_reset = stats_reporter.volume_update_stats(); + EXPECT_EQ(stats_before_reset.num_decreases, kFramesIn60Seconds / 2 - 1); + EXPECT_EQ(stats_before_reset.sum_decreases, kFramesIn60Seconds - 2); + EXPECT_EQ(stats_before_reset.num_increases, kFramesIn60Seconds / 2 - 1); + EXPECT_EQ(stats_before_reset.sum_increases, kFramesIn60Seconds - 2); + stats_reporter.UpdateStatistics(kInputVolume + 2); + const auto& stats_during_reset = stats_reporter.volume_update_stats(); + EXPECT_EQ(stats_during_reset.num_decreases, 0); + EXPECT_EQ(stats_during_reset.sum_decreases, 0); + EXPECT_EQ(stats_during_reset.num_increases, 0); + EXPECT_EQ(stats_during_reset.sum_increases, 0); + stats_reporter.UpdateStatistics(kInputVolume); + stats_reporter.UpdateStatistics(kInputVolume + 3); + const auto& stats_after_reset = stats_reporter.volume_update_stats(); + EXPECT_EQ(stats_after_reset.num_decreases, 1); + EXPECT_EQ(stats_after_reset.sum_decreases, 2); + EXPECT_EQ(stats_after_reset.num_increases, 1); + EXPECT_EQ(stats_after_reset.sum_increases, 3); +} + +INSTANTIATE_TEST_SUITE_P(, + InputVolumeStatsReporterTest, + ::testing::Values(InputVolumeType::kApplied, + InputVolumeType::kRecommended)); + +} // namespace webrtc diff --git a/modules/audio_processing/agc2/noise_level_estimator.cc b/modules/audio_processing/agc2/noise_level_estimator.cc index 9fb1c24b65..691513b509 100644 --- a/modules/audio_processing/agc2/noise_level_estimator.cc +++ b/modules/audio_processing/agc2/noise_level_estimator.cc @@ -72,6 +72,7 @@ class NoiseFloorEstimator : public NoiseLevelEstimator { "noise levels."); NoiseFloorEstimator(ApmDataDumper* data_dumper) : data_dumper_(data_dumper) { + RTC_DCHECK(data_dumper_); // Initially assume that 48 kHz will be used. `Analyze()` will detect the // used sample rate and call `Initialize()` again if needed. Initialize(/*sample_rate_hz=*/48000); @@ -91,8 +92,9 @@ class NoiseFloorEstimator : public NoiseLevelEstimator { const float frame_energy = FrameEnergy(frame); if (frame_energy <= min_noise_energy_) { // Ignore frames when muted or below the minimum measurable energy. - data_dumper_->DumpRaw("agc2_noise_floor_estimator_preliminary_level", - noise_energy_); + if (data_dumper_) + data_dumper_->DumpRaw("agc2_noise_floor_estimator_preliminary_level", + noise_energy_); return EnergyToDbfs(noise_energy_, static_cast(frame.samples_per_channel())); } @@ -104,8 +106,9 @@ class NoiseFloorEstimator : public NoiseLevelEstimator { preliminary_noise_energy_ = frame_energy; preliminary_noise_energy_set_ = true; } - data_dumper_->DumpRaw("agc2_noise_floor_estimator_preliminary_level", - preliminary_noise_energy_); + if (data_dumper_) + data_dumper_->DumpRaw("agc2_noise_floor_estimator_preliminary_level", + preliminary_noise_energy_); if (counter_ == 0) { // Full period observed. @@ -128,8 +131,13 @@ class NoiseFloorEstimator : public NoiseLevelEstimator { noise_energy_ = std::min(noise_energy_, preliminary_noise_energy_); counter_--; } - return EnergyToDbfs(noise_energy_, - static_cast(frame.samples_per_channel())); + + float noise_rms_dbfs = EnergyToDbfs( + noise_energy_, static_cast(frame.samples_per_channel())); + if (data_dumper_) + data_dumper_->DumpRaw("agc2_noise_rms_dbfs", noise_rms_dbfs); + + return noise_rms_dbfs; } private: diff --git a/modules/audio_processing/agc2/adaptive_mode_level_estimator.cc b/modules/audio_processing/agc2/speech_level_estimator.cc similarity index 79% rename from modules/audio_processing/agc2/adaptive_mode_level_estimator.cc rename to modules/audio_processing/agc2/speech_level_estimator.cc index fe021fec05..7bf3252116 100644 --- a/modules/audio_processing/agc2/adaptive_mode_level_estimator.cc +++ b/modules/audio_processing/agc2/speech_level_estimator.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_processing/agc2/adaptive_mode_level_estimator.h" +#include "modules/audio_processing/agc2/speech_level_estimator.h" #include "modules/audio_processing/agc2/agc2_common.h" #include "modules/audio_processing/logging/apm_data_dumper.h" @@ -32,34 +32,37 @@ float GetInitialSpeechLevelEstimateDbfs( } // namespace -bool AdaptiveModeLevelEstimator::LevelEstimatorState::operator==( - const AdaptiveModeLevelEstimator::LevelEstimatorState& b) const { +bool SpeechLevelEstimator::LevelEstimatorState::operator==( + const SpeechLevelEstimator::LevelEstimatorState& b) const { return time_to_confidence_ms == b.time_to_confidence_ms && level_dbfs.numerator == b.level_dbfs.numerator && level_dbfs.denominator == b.level_dbfs.denominator; } -float AdaptiveModeLevelEstimator::LevelEstimatorState::Ratio::GetRatio() const { +float SpeechLevelEstimator::LevelEstimatorState::Ratio::GetRatio() const { RTC_DCHECK_NE(denominator, 0.f); return numerator / denominator; } -AdaptiveModeLevelEstimator::AdaptiveModeLevelEstimator( +SpeechLevelEstimator::SpeechLevelEstimator( ApmDataDumper* apm_data_dumper, - const AudioProcessing::Config::GainController2::AdaptiveDigital& config) + const AudioProcessing::Config::GainController2::AdaptiveDigital& config, + int adjacent_speech_frames_threshold) : apm_data_dumper_(apm_data_dumper), initial_speech_level_dbfs_(GetInitialSpeechLevelEstimateDbfs(config)), - adjacent_speech_frames_threshold_( - config.adjacent_speech_frames_threshold), - level_dbfs_(initial_speech_level_dbfs_) { + adjacent_speech_frames_threshold_(adjacent_speech_frames_threshold), + level_dbfs_(initial_speech_level_dbfs_), + // TODO(bugs.webrtc.org/7494): Remove init below when AGC2 input volume + // controller temporal dependency removed. + is_confident_(false) { RTC_DCHECK(apm_data_dumper_); RTC_DCHECK_GE(adjacent_speech_frames_threshold_, 1); Reset(); } -void AdaptiveModeLevelEstimator::Update(float rms_dbfs, - float peak_dbfs, - float speech_probability) { +void SpeechLevelEstimator::Update(float rms_dbfs, + float peak_dbfs, + float speech_probability) { RTC_DCHECK_GT(rms_dbfs, -150.0f); RTC_DCHECK_LT(rms_dbfs, 50.0f); RTC_DCHECK_GT(peak_dbfs, -150.0f); @@ -110,40 +113,47 @@ void AdaptiveModeLevelEstimator::Update(float rms_dbfs, level_dbfs_ = ClampLevelEstimateDbfs(level_dbfs); } } + UpdateIsConfident(); DumpDebugData(); } -bool AdaptiveModeLevelEstimator::IsConfident() const { +void SpeechLevelEstimator::UpdateIsConfident() { if (adjacent_speech_frames_threshold_ == 1) { // Ignore `reliable_state_` when a single frame is enough to update the // level estimate (because it is not used). - return preliminary_state_.time_to_confidence_ms == 0; + is_confident_ = preliminary_state_.time_to_confidence_ms == 0; + return; } // Once confident, it remains confident. RTC_DCHECK(reliable_state_.time_to_confidence_ms != 0 || preliminary_state_.time_to_confidence_ms == 0); // During the first long enough speech sequence, `reliable_state_` must be // ignored since `preliminary_state_` is used. - return reliable_state_.time_to_confidence_ms == 0 || - (num_adjacent_speech_frames_ >= adjacent_speech_frames_threshold_ && - preliminary_state_.time_to_confidence_ms == 0); + is_confident_ = + reliable_state_.time_to_confidence_ms == 0 || + (num_adjacent_speech_frames_ >= adjacent_speech_frames_threshold_ && + preliminary_state_.time_to_confidence_ms == 0); } -void AdaptiveModeLevelEstimator::Reset() { +void SpeechLevelEstimator::Reset() { ResetLevelEstimatorState(preliminary_state_); ResetLevelEstimatorState(reliable_state_); level_dbfs_ = initial_speech_level_dbfs_; num_adjacent_speech_frames_ = 0; } -void AdaptiveModeLevelEstimator::ResetLevelEstimatorState( +void SpeechLevelEstimator::ResetLevelEstimatorState( LevelEstimatorState& state) const { state.time_to_confidence_ms = kLevelEstimatorTimeToConfidenceMs; state.level_dbfs.numerator = initial_speech_level_dbfs_; state.level_dbfs.denominator = 1.0f; } -void AdaptiveModeLevelEstimator::DumpDebugData() const { +void SpeechLevelEstimator::DumpDebugData() const { + if (!apm_data_dumper_) + return; + apm_data_dumper_->DumpRaw("agc2_speech_level_dbfs", level_dbfs_); + apm_data_dumper_->DumpRaw("agc2_speech_level_is_confident", is_confident_); apm_data_dumper_->DumpRaw( "agc2_adaptive_level_estimator_num_adjacent_speech_frames", num_adjacent_speech_frames_); diff --git a/modules/audio_processing/agc2/adaptive_mode_level_estimator.h b/modules/audio_processing/agc2/speech_level_estimator.h similarity index 74% rename from modules/audio_processing/agc2/adaptive_mode_level_estimator.h rename to modules/audio_processing/agc2/speech_level_estimator.h index 989c8c3572..4d9f106ba9 100644 --- a/modules/audio_processing/agc2/adaptive_mode_level_estimator.h +++ b/modules/audio_processing/agc2/speech_level_estimator.h @@ -8,36 +8,37 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_MODE_LEVEL_ESTIMATOR_H_ -#define MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_MODE_LEVEL_ESTIMATOR_H_ +#ifndef MODULES_AUDIO_PROCESSING_AGC2_SPEECH_LEVEL_ESTIMATOR_H_ +#define MODULES_AUDIO_PROCESSING_AGC2_SPEECH_LEVEL_ESTIMATOR_H_ #include #include #include "modules/audio_processing/agc2/agc2_common.h" -#include "modules/audio_processing/agc2/vad_wrapper.h" #include "modules/audio_processing/include/audio_processing.h" namespace webrtc { class ApmDataDumper; -// Level estimator for the digital adaptive gain controller. -class AdaptiveModeLevelEstimator { +// Active speech level estimator based on the analysis of the following +// framewise properties: RMS level (dBFS), peak level (dBFS), speech +// probability. +class SpeechLevelEstimator { public: - AdaptiveModeLevelEstimator( + SpeechLevelEstimator( ApmDataDumper* apm_data_dumper, - const AudioProcessing::Config::GainController2::AdaptiveDigital& config); - AdaptiveModeLevelEstimator(const AdaptiveModeLevelEstimator&) = delete; - AdaptiveModeLevelEstimator& operator=(const AdaptiveModeLevelEstimator&) = - delete; + const AudioProcessing::Config::GainController2::AdaptiveDigital& config, + int adjacent_speech_frames_threshold); + SpeechLevelEstimator(const SpeechLevelEstimator&) = delete; + SpeechLevelEstimator& operator=(const SpeechLevelEstimator&) = delete; // Updates the level estimation. void Update(float rms_dbfs, float peak_dbfs, float speech_probability); // Returns the estimated speech plus noise level. float level_dbfs() const { return level_dbfs_; } // Returns true if the estimator is confident on its current estimate. - bool IsConfident() const; + bool is_confident() const { return is_confident_; } void Reset(); @@ -58,6 +59,8 @@ class AdaptiveModeLevelEstimator { }; static_assert(std::is_trivially_copyable::value, ""); + void UpdateIsConfident(); + void ResetLevelEstimatorState(LevelEstimatorState& state) const; void DumpDebugData() const; @@ -69,9 +72,10 @@ class AdaptiveModeLevelEstimator { LevelEstimatorState preliminary_state_; LevelEstimatorState reliable_state_; float level_dbfs_; + bool is_confident_; int num_adjacent_speech_frames_; }; } // namespace webrtc -#endif // MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_MODE_LEVEL_ESTIMATOR_H_ +#endif // MODULES_AUDIO_PROCESSING_AGC2_SPEECH_LEVEL_ESTIMATOR_H_ diff --git a/modules/audio_processing/agc2/adaptive_mode_level_estimator_unittest.cc b/modules/audio_processing/agc2/speech_level_estimator_unittest.cc similarity index 84% rename from modules/audio_processing/agc2/adaptive_mode_level_estimator_unittest.cc rename to modules/audio_processing/agc2/speech_level_estimator_unittest.cc index 684fca188a..e1c5f85434 100644 --- a/modules/audio_processing/agc2/adaptive_mode_level_estimator_unittest.cc +++ b/modules/audio_processing/agc2/speech_level_estimator_unittest.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_processing/agc2/adaptive_mode_level_estimator.h" +#include "modules/audio_processing/agc2/speech_level_estimator.h" #include @@ -36,19 +36,12 @@ void RunOnConstantLevel(int num_iterations, float rms_dbfs, float peak_dbfs, float speech_probability, - AdaptiveModeLevelEstimator& level_estimator) { + SpeechLevelEstimator& level_estimator) { for (int i = 0; i < num_iterations; ++i) { level_estimator.Update(rms_dbfs, peak_dbfs, speech_probability); } } -constexpr AdaptiveDigitalConfig GetAdaptiveDigitalConfig( - int adjacent_speech_frames_threshold) { - AdaptiveDigitalConfig config; - config.adjacent_speech_frames_threshold = adjacent_speech_frames_threshold; - return config; -} - constexpr float kNoSpeechProbability = 0.0f; constexpr float kLowSpeechProbability = kVadConfidenceThreshold / 2.0f; constexpr float kMaxSpeechProbability = 1.0f; @@ -57,9 +50,10 @@ constexpr float kMaxSpeechProbability = 1.0f; struct TestLevelEstimator { explicit TestLevelEstimator(int adjacent_speech_frames_threshold) : data_dumper(0), - estimator(std::make_unique( + estimator(std::make_unique( &data_dumper, - GetAdaptiveDigitalConfig(adjacent_speech_frames_threshold))), + AdaptiveDigitalConfig{}, + adjacent_speech_frames_threshold)), initial_speech_level_dbfs(estimator->level_dbfs()), level_rms_dbfs(initial_speech_level_dbfs / 2.0f), level_peak_dbfs(initial_speech_level_dbfs / 3.0f) { @@ -70,14 +64,14 @@ struct TestLevelEstimator { "level is wide enough for the tests"; } ApmDataDumper data_dumper; - std::unique_ptr estimator; + std::unique_ptr estimator; const float initial_speech_level_dbfs; const float level_rms_dbfs; const float level_peak_dbfs; }; // Checks that the level estimator converges to a constant input speech level. -TEST(GainController2AdaptiveModeLevelEstimator, LevelStabilizes) { +TEST(GainController2SpeechLevelEstimator, LevelStabilizes) { TestLevelEstimator level_estimator(/*adjacent_speech_frames_threshold=*/1); RunOnConstantLevel(/*num_iterations=*/kNumFramesToConfidence, level_estimator.level_rms_dbfs, @@ -93,30 +87,29 @@ TEST(GainController2AdaptiveModeLevelEstimator, LevelStabilizes) { // Checks that the level controller does not become confident when too few // speech frames are observed. -TEST(GainController2AdaptiveModeLevelEstimator, IsNotConfident) { +TEST(GainController2SpeechLevelEstimator, IsNotConfident) { TestLevelEstimator level_estimator(/*adjacent_speech_frames_threshold=*/1); RunOnConstantLevel(/*num_iterations=*/kNumFramesToConfidence / 2, level_estimator.level_rms_dbfs, level_estimator.level_peak_dbfs, kMaxSpeechProbability, *level_estimator.estimator); - EXPECT_FALSE(level_estimator.estimator->IsConfident()); + EXPECT_FALSE(level_estimator.estimator->is_confident()); } // Checks that the level controller becomes confident when enough speech frames // are observed. -TEST(GainController2AdaptiveModeLevelEstimator, IsConfident) { +TEST(GainController2SpeechLevelEstimator, IsConfident) { TestLevelEstimator level_estimator(/*adjacent_speech_frames_threshold=*/1); RunOnConstantLevel(/*num_iterations=*/kNumFramesToConfidence, level_estimator.level_rms_dbfs, level_estimator.level_peak_dbfs, kMaxSpeechProbability, *level_estimator.estimator); - EXPECT_TRUE(level_estimator.estimator->IsConfident()); + EXPECT_TRUE(level_estimator.estimator->is_confident()); } // Checks that the estimated level is not affected by the level of non-speech // frames. -TEST(GainController2AdaptiveModeLevelEstimator, - EstimatorIgnoresNonSpeechFrames) { +TEST(GainController2SpeechLevelEstimator, EstimatorIgnoresNonSpeechFrames) { TestLevelEstimator level_estimator(/*adjacent_speech_frames_threshold=*/1); // Simulate speech. RunOnConstantLevel(/*num_iterations=*/kNumFramesToConfidence, @@ -134,8 +127,7 @@ TEST(GainController2AdaptiveModeLevelEstimator, } // Checks the convergence speed of the estimator before it becomes confident. -TEST(GainController2AdaptiveModeLevelEstimator, - ConvergenceSpeedBeforeConfidence) { +TEST(GainController2SpeechLevelEstimator, ConvergenceSpeedBeforeConfidence) { TestLevelEstimator level_estimator(/*adjacent_speech_frames_threshold=*/1); RunOnConstantLevel(/*num_iterations=*/kNumFramesToConfidence, level_estimator.level_rms_dbfs, @@ -147,8 +139,7 @@ TEST(GainController2AdaptiveModeLevelEstimator, } // Checks the convergence speed of the estimator after it becomes confident. -TEST(GainController2AdaptiveModeLevelEstimator, - ConvergenceSpeedAfterConfidence) { +TEST(GainController2SpeechLevelEstimator, ConvergenceSpeedAfterConfidence) { TestLevelEstimator level_estimator(/*adjacent_speech_frames_threshold=*/1); // Reach confidence using the initial level estimate. RunOnConstantLevel( @@ -159,7 +150,7 @@ TEST(GainController2AdaptiveModeLevelEstimator, // No estimate change should occur, but confidence is achieved. ASSERT_FLOAT_EQ(level_estimator.estimator->level_dbfs(), level_estimator.initial_speech_level_dbfs); - ASSERT_TRUE(level_estimator.estimator->IsConfident()); + ASSERT_TRUE(level_estimator.estimator->is_confident()); // After confidence. constexpr float kConvergenceTimeAfterConfidenceNumFrames = 600; // 6 seconds. static_assert( @@ -173,14 +164,13 @@ TEST(GainController2AdaptiveModeLevelEstimator, kConvergenceSpeedTestsLevelTolerance); } -class AdaptiveModeLevelEstimatorParametrization +class SpeechLevelEstimatorParametrization : public ::testing::TestWithParam { protected: int adjacent_speech_frames_threshold() const { return GetParam(); } }; -TEST_P(AdaptiveModeLevelEstimatorParametrization, - DoNotAdaptToShortSpeechSegments) { +TEST_P(SpeechLevelEstimatorParametrization, DoNotAdaptToShortSpeechSegments) { TestLevelEstimator level_estimator(adjacent_speech_frames_threshold()); const float initial_level = level_estimator.estimator->level_dbfs(); ASSERT_LT(initial_level, level_estimator.level_peak_dbfs); @@ -197,7 +187,7 @@ TEST_P(AdaptiveModeLevelEstimatorParametrization, EXPECT_EQ(initial_level, level_estimator.estimator->level_dbfs()); } -TEST_P(AdaptiveModeLevelEstimatorParametrization, AdaptToEnoughSpeechSegments) { +TEST_P(SpeechLevelEstimatorParametrization, AdaptToEnoughSpeechSegments) { TestLevelEstimator level_estimator(adjacent_speech_frames_threshold()); const float initial_level = level_estimator.estimator->level_dbfs(); ASSERT_LT(initial_level, level_estimator.level_peak_dbfs); @@ -210,7 +200,7 @@ TEST_P(AdaptiveModeLevelEstimatorParametrization, AdaptToEnoughSpeechSegments) { } INSTANTIATE_TEST_SUITE_P(GainController2, - AdaptiveModeLevelEstimatorParametrization, + SpeechLevelEstimatorParametrization, ::testing::Values(1, 9, 17)); } // namespace diff --git a/modules/audio_processing/agc2/vad_wrapper.cc b/modules/audio_processing/agc2/vad_wrapper.cc index 91448f8d86..af6325dea7 100644 --- a/modules/audio_processing/agc2/vad_wrapper.cc +++ b/modules/audio_processing/agc2/vad_wrapper.cc @@ -52,6 +52,13 @@ class MonoVadImpl : public VoiceActivityDetectorWrapper::MonoVad { } // namespace +VoiceActivityDetectorWrapper::VoiceActivityDetectorWrapper( + const AvailableCpuFeatures& cpu_features, + int sample_rate_hz) + : VoiceActivityDetectorWrapper(kVadResetPeriodMs, + cpu_features, + sample_rate_hz) {} + VoiceActivityDetectorWrapper::VoiceActivityDetectorWrapper( int vad_reset_period_ms, const AvailableCpuFeatures& cpu_features, diff --git a/modules/audio_processing/agc2/vad_wrapper.h b/modules/audio_processing/agc2/vad_wrapper.h index 6df0ead271..459c471630 100644 --- a/modules/audio_processing/agc2/vad_wrapper.h +++ b/modules/audio_processing/agc2/vad_wrapper.h @@ -40,6 +40,10 @@ class VoiceActivityDetectorWrapper { virtual float Analyze(rtc::ArrayView frame) = 0; }; + // Ctor. Uses `cpu_features` to instantiate the default VAD. + VoiceActivityDetectorWrapper(const AvailableCpuFeatures& cpu_features, + int sample_rate_hz); + // Ctor. `vad_reset_period_ms` indicates the period in milliseconds to call // `MonoVad::Reset()`; it must be equal to or greater than the duration of two // frames. Uses `cpu_features` to instantiate the default VAD. diff --git a/modules/audio_processing/audio_buffer.h b/modules/audio_processing/audio_buffer.h index d866b8bce5..b9ea3000a2 100644 --- a/modules/audio_processing/audio_buffer.h +++ b/modules/audio_processing/audio_buffer.h @@ -32,7 +32,7 @@ enum Band { kBand0To8kHz = 0, kBand8To16kHz = 1, kBand16To24kHz = 2 }; class AudioBuffer { public: static const int kSplitBandSize = 160; - static const size_t kMaxSampleRate = 384000; + static const int kMaxSampleRate = 384000; AudioBuffer(size_t input_rate, size_t input_num_channels, size_t buffer_rate, diff --git a/modules/audio_processing/audio_processing_impl.cc b/modules/audio_processing/audio_processing_impl.cc index 4faa929d84..df31dc0c4a 100644 --- a/modules/audio_processing/audio_processing_impl.cc +++ b/modules/audio_processing/audio_processing_impl.cc @@ -12,6 +12,7 @@ #include #include +#include #include #include #include @@ -30,6 +31,7 @@ #include "modules/audio_processing/logging/apm_data_dumper.h" #include "modules/audio_processing/optionally_built_submodule_creators.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" @@ -67,29 +69,6 @@ bool UseSetupSpecificDefaultAec3Congfig() { "WebRTC-Aec3SetupSpecificDefaultConfigDefaultsKillSwitch"); } -// If the "WebRTC-Audio-TransientSuppressorVadMode" field trial is unspecified, -// returns `TransientSuppressor::VadMode::kDefault`, otherwise parses the field -// trial and returns the specified mode: -// - WebRTC-Audio-TransientSuppressorVadMode/Enabled-Default returns `kDefault`; -// - WebRTC-Audio-TransientSuppressorVadMode/Enabled-RnnVad returns `kRnnVad`; -// - WebRTC-Audio-TransientSuppressorVadMode/Enabled-NoVad returns `kNoVad`. -TransientSuppressor::VadMode GetTransientSuppressorVadMode() { - constexpr char kFieldTrial[] = "WebRTC-Audio-TransientSuppressorVadMode"; - std::string full_name = webrtc::field_trial::FindFullName(kFieldTrial); - if (full_name.empty() || absl::EndsWith(full_name, "-Default")) { - return TransientSuppressor::VadMode::kDefault; - } - if (absl::EndsWith(full_name, "-RnnVad")) { - return TransientSuppressor::VadMode::kRnnVad; - } - if (absl::EndsWith(full_name, "-NoVad")) { - return TransientSuppressor::VadMode::kNoVad; - } - // Fallback to default. - RTC_LOG(LS_WARNING) << "Invalid parameter for " << kFieldTrial; - return TransientSuppressor::VadMode::kDefault; -} - // Identify the native processing rate that best handles a sample rate. int SuitableProcessRate(int minimum_rate, int max_splitting_rate, @@ -143,6 +122,199 @@ void PackRenderAudioBufferForEchoDetector(const AudioBuffer& audio, audio.channels_const()[0] + audio.num_frames()); } +// Options for gracefully handling processing errors. +enum class FormatErrorOutputOption { + kOutputExactCopyOfInput, + kOutputBroadcastCopyOfFirstInputChannel, + kOutputSilence, + kDoNothing +}; + +enum class AudioFormatValidity { + // Format is supported by APM. + kValidAndSupported, + // Format has a reasonable interpretation but is not supported. + kValidButUnsupportedSampleRate, + // The remaining enums values signal that the audio does not have a reasonable + // interpretation and cannot be used. + kInvalidSampleRate, + kInvalidChannelCount +}; + +AudioFormatValidity ValidateAudioFormat(const StreamConfig& config) { + if (config.sample_rate_hz() < 0) + return AudioFormatValidity::kInvalidSampleRate; + if (config.num_channels() == 0) + return AudioFormatValidity::kInvalidChannelCount; + + // Format has a reasonable interpretation, but may still be unsupported. + if (config.sample_rate_hz() < 8000 || + config.sample_rate_hz() > AudioBuffer::kMaxSampleRate) + return AudioFormatValidity::kValidButUnsupportedSampleRate; + + // Format is fully supported. + return AudioFormatValidity::kValidAndSupported; +} + +int AudioFormatValidityToErrorCode(AudioFormatValidity validity) { + switch (validity) { + case AudioFormatValidity::kValidAndSupported: + return AudioProcessing::kNoError; + case AudioFormatValidity::kValidButUnsupportedSampleRate: // fall-through + case AudioFormatValidity::kInvalidSampleRate: + return AudioProcessing::kBadSampleRateError; + case AudioFormatValidity::kInvalidChannelCount: + return AudioProcessing::kBadNumberChannelsError; + } + RTC_DCHECK(false); +} + +// Returns an AudioProcessing::Error together with the best possible option for +// output audio content. +std::pair ChooseErrorOutputOption( + const StreamConfig& input_config, + const StreamConfig& output_config) { + AudioFormatValidity input_validity = ValidateAudioFormat(input_config); + AudioFormatValidity output_validity = ValidateAudioFormat(output_config); + + if (input_validity == AudioFormatValidity::kValidAndSupported && + output_validity == AudioFormatValidity::kValidAndSupported && + (output_config.num_channels() == 1 || + output_config.num_channels() == input_config.num_channels())) { + return {AudioProcessing::kNoError, FormatErrorOutputOption::kDoNothing}; + } + + int error_code = AudioFormatValidityToErrorCode(input_validity); + if (error_code == AudioProcessing::kNoError) { + error_code = AudioFormatValidityToErrorCode(output_validity); + } + if (error_code == AudioProcessing::kNoError) { + // The individual formats are valid but there is some error - must be + // channel mismatch. + error_code = AudioProcessing::kBadNumberChannelsError; + } + + FormatErrorOutputOption output_option; + if (output_validity != AudioFormatValidity::kValidAndSupported && + output_validity != AudioFormatValidity::kValidButUnsupportedSampleRate) { + // The output format is uninterpretable: cannot do anything. + output_option = FormatErrorOutputOption::kDoNothing; + } else if (input_validity != AudioFormatValidity::kValidAndSupported && + input_validity != + AudioFormatValidity::kValidButUnsupportedSampleRate) { + // The input format is uninterpretable: cannot use it, must output silence. + output_option = FormatErrorOutputOption::kOutputSilence; + } else if (input_config.sample_rate_hz() != output_config.sample_rate_hz()) { + // Sample rates do not match: Cannot copy input into output, output silence. + // Note: If the sample rates are in a supported range, we could resample. + // However, that would significantly increase complexity of this error + // handling code. + output_option = FormatErrorOutputOption::kOutputSilence; + } else if (input_config.num_channels() != output_config.num_channels()) { + // Channel counts do not match: We cannot easily map input channels to + // output channels. + output_option = + FormatErrorOutputOption::kOutputBroadcastCopyOfFirstInputChannel; + } else { + // The formats match exactly. + RTC_DCHECK(input_config == output_config); + output_option = FormatErrorOutputOption::kOutputExactCopyOfInput; + } + return std::make_pair(error_code, output_option); +} + +// Checks if the audio format is supported. If not, the output is populated in a +// best-effort manner and an APM error code is returned. +int HandleUnsupportedAudioFormats(const int16_t* const src, + const StreamConfig& input_config, + const StreamConfig& output_config, + int16_t* const dest) { + RTC_DCHECK(src); + RTC_DCHECK(dest); + + auto [error_code, output_option] = + ChooseErrorOutputOption(input_config, output_config); + if (error_code == AudioProcessing::kNoError) + return AudioProcessing::kNoError; + + const size_t num_output_channels = output_config.num_channels(); + switch (output_option) { + case FormatErrorOutputOption::kOutputSilence: + memset(dest, 0, output_config.num_samples() * sizeof(int16_t)); + break; + case FormatErrorOutputOption::kOutputBroadcastCopyOfFirstInputChannel: + for (size_t i = 0; i < output_config.num_frames(); ++i) { + int16_t sample = src[input_config.num_channels() * i]; + for (size_t ch = 0; ch < num_output_channels; ++ch) { + dest[ch + num_output_channels * i] = sample; + } + } + break; + case FormatErrorOutputOption::kOutputExactCopyOfInput: + memcpy(dest, src, output_config.num_samples() * sizeof(int16_t)); + break; + case FormatErrorOutputOption::kDoNothing: + break; + } + return error_code; +} + +// Checks if the audio format is supported. If not, the output is populated in a +// best-effort manner and an APM error code is returned. +int HandleUnsupportedAudioFormats(const float* const* src, + const StreamConfig& input_config, + const StreamConfig& output_config, + float* const* dest) { + RTC_DCHECK(src); + RTC_DCHECK(dest); + for (size_t i = 0; i < input_config.num_channels(); ++i) { + RTC_DCHECK(src[i]); + } + for (size_t i = 0; i < output_config.num_channels(); ++i) { + RTC_DCHECK(dest[i]); + } + + auto [error_code, output_option] = + ChooseErrorOutputOption(input_config, output_config); + if (error_code == AudioProcessing::kNoError) + return AudioProcessing::kNoError; + + const size_t num_output_channels = output_config.num_channels(); + switch (output_option) { + case FormatErrorOutputOption::kOutputSilence: + for (size_t ch = 0; ch < num_output_channels; ++ch) { + memset(dest[ch], 0, output_config.num_frames() * sizeof(float)); + } + break; + case FormatErrorOutputOption::kOutputBroadcastCopyOfFirstInputChannel: + for (size_t ch = 0; ch < num_output_channels; ++ch) { + memcpy(dest[ch], src[0], output_config.num_frames() * sizeof(float)); + } + break; + case FormatErrorOutputOption::kOutputExactCopyOfInput: + for (size_t ch = 0; ch < num_output_channels; ++ch) { + memcpy(dest[ch], src[ch], output_config.num_frames() * sizeof(float)); + } + break; + case FormatErrorOutputOption::kDoNothing: + break; + } + return error_code; +} + +using DownmixMethod = AudioProcessing::Config::Pipeline::DownmixMethod; + +void SetDownmixMethod(AudioBuffer& buffer, DownmixMethod method) { + switch (method) { + case DownmixMethod::kAverageChannels: + buffer.set_downmixing_by_averaging(); + break; + case DownmixMethod::kUseFirstChannel: + buffer.set_downmixing_to_specific_channel(/*channel=*/0); + break; + } +} + constexpr int kUnspecifiedDataDumpInputVolume = -100; } // namespace @@ -150,6 +322,229 @@ constexpr int kUnspecifiedDataDumpInputVolume = -100; // Throughout webrtc, it's assumed that success is represented by zero. static_assert(AudioProcessing::kNoError == 0, "kNoError must be zero"); +absl::optional +AudioProcessingImpl::GetGainController2ExperimentParams() { + constexpr char kFieldTrialName[] = "WebRTC-Audio-GainController2"; + + if (!field_trial::IsEnabled(kFieldTrialName)) { + return absl::nullopt; + } + + FieldTrialFlag enabled("Enabled", false); + + // Whether the gain control should switch to AGC2. Enabled by default. + FieldTrialParameter switch_to_agc2("switch_to_agc2", true); + + // AGC2 input volume controller configuration. + constexpr InputVolumeController::Config kDefaultInputVolumeControllerConfig; + FieldTrialConstrained min_input_volume( + "min_input_volume", kDefaultInputVolumeControllerConfig.min_input_volume, + 0, 255); + FieldTrialConstrained clipped_level_min( + "clipped_level_min", + kDefaultInputVolumeControllerConfig.clipped_level_min, 0, 255); + FieldTrialConstrained clipped_level_step( + "clipped_level_step", + kDefaultInputVolumeControllerConfig.clipped_level_step, 0, 255); + FieldTrialConstrained clipped_ratio_threshold( + "clipped_ratio_threshold", + kDefaultInputVolumeControllerConfig.clipped_ratio_threshold, 0, 1); + FieldTrialConstrained clipped_wait_frames( + "clipped_wait_frames", + kDefaultInputVolumeControllerConfig.clipped_wait_frames, 0, + absl::nullopt); + FieldTrialParameter enable_clipping_predictor( + "enable_clipping_predictor", + kDefaultInputVolumeControllerConfig.enable_clipping_predictor); + FieldTrialConstrained target_range_max_dbfs( + "target_range_max_dbfs", + kDefaultInputVolumeControllerConfig.target_range_max_dbfs, -90, 30); + FieldTrialConstrained target_range_min_dbfs( + "target_range_min_dbfs", + kDefaultInputVolumeControllerConfig.target_range_min_dbfs, -90, 30); + FieldTrialConstrained update_input_volume_wait_frames( + "update_input_volume_wait_frames", + kDefaultInputVolumeControllerConfig.update_input_volume_wait_frames, 0, + absl::nullopt); + FieldTrialConstrained speech_probability_threshold( + "speech_probability_threshold", + kDefaultInputVolumeControllerConfig.speech_probability_threshold, 0, 1); + FieldTrialConstrained speech_ratio_threshold( + "speech_ratio_threshold", + kDefaultInputVolumeControllerConfig.speech_ratio_threshold, 0, 1); + + // AGC2 adaptive digital controller configuration. + constexpr AudioProcessing::Config::GainController2::AdaptiveDigital + kDefaultAdaptiveDigitalConfig; + FieldTrialConstrained headroom_db( + "headroom_db", kDefaultAdaptiveDigitalConfig.headroom_db, 0, + absl::nullopt); + FieldTrialConstrained max_gain_db( + "max_gain_db", kDefaultAdaptiveDigitalConfig.max_gain_db, 0, + absl::nullopt); + FieldTrialConstrained initial_gain_db( + "initial_gain_db", kDefaultAdaptiveDigitalConfig.initial_gain_db, 0, + absl::nullopt); + FieldTrialConstrained max_gain_change_db_per_second( + "max_gain_change_db_per_second", + kDefaultAdaptiveDigitalConfig.max_gain_change_db_per_second, 0, + absl::nullopt); + FieldTrialConstrained max_output_noise_level_dbfs( + "max_output_noise_level_dbfs", + kDefaultAdaptiveDigitalConfig.max_output_noise_level_dbfs, absl::nullopt, + 0); + + // Transient suppressor. + FieldTrialParameter disallow_transient_suppressor_usage( + "disallow_transient_suppressor_usage", false); + + // Field-trial based override for the input volume controller and adaptive + // digital configs. + ParseFieldTrial( + {&enabled, &switch_to_agc2, &min_input_volume, &clipped_level_min, + &clipped_level_step, &clipped_ratio_threshold, &clipped_wait_frames, + &enable_clipping_predictor, &target_range_max_dbfs, + &target_range_min_dbfs, &update_input_volume_wait_frames, + &speech_probability_threshold, &speech_ratio_threshold, &headroom_db, + &max_gain_db, &initial_gain_db, &max_gain_change_db_per_second, + &max_output_noise_level_dbfs, &disallow_transient_suppressor_usage}, + field_trial::FindFullName(kFieldTrialName)); + // Checked already by `IsEnabled()` before parsing, therefore always true. + RTC_DCHECK(enabled); + + const bool do_not_change_agc_config = !switch_to_agc2.Get(); + if (do_not_change_agc_config && !disallow_transient_suppressor_usage.Get()) { + // Return an unspecifed value since, in this case, both the AGC2 and TS + // configurations won't be adjusted. + return absl::nullopt; + } + using Params = AudioProcessingImpl::GainController2ExperimentParams; + if (do_not_change_agc_config) { + // Return a value that leaves the AGC2 config unchanged and that always + // disables TS. + return Params{.agc2_config = absl::nullopt, + .disallow_transient_suppressor_usage = true}; + } + // Return a value that switches all the gain control to AGC2. + return Params{ + .agc2_config = + Params::Agc2Config{ + .input_volume_controller = + { + .min_input_volume = min_input_volume.Get(), + .clipped_level_min = clipped_level_min.Get(), + .clipped_level_step = clipped_level_step.Get(), + .clipped_ratio_threshold = + static_cast(clipped_ratio_threshold.Get()), + .clipped_wait_frames = clipped_wait_frames.Get(), + .enable_clipping_predictor = + enable_clipping_predictor.Get(), + .target_range_max_dbfs = target_range_max_dbfs.Get(), + .target_range_min_dbfs = target_range_min_dbfs.Get(), + .update_input_volume_wait_frames = + update_input_volume_wait_frames.Get(), + .speech_probability_threshold = static_cast( + speech_probability_threshold.Get()), + .speech_ratio_threshold = + static_cast(speech_ratio_threshold.Get()), + }, + .adaptive_digital_controller = + { + .headroom_db = static_cast(headroom_db.Get()), + .max_gain_db = static_cast(max_gain_db.Get()), + .initial_gain_db = + static_cast(initial_gain_db.Get()), + .max_gain_change_db_per_second = static_cast( + max_gain_change_db_per_second.Get()), + .max_output_noise_level_dbfs = + static_cast(max_output_noise_level_dbfs.Get()), + }}, + .disallow_transient_suppressor_usage = + disallow_transient_suppressor_usage.Get()}; +} + +AudioProcessing::Config AudioProcessingImpl::AdjustConfig( + const AudioProcessing::Config& config, + const absl::optional& + experiment_params) { + if (!experiment_params.has_value() || + (!experiment_params->agc2_config.has_value() && + !experiment_params->disallow_transient_suppressor_usage)) { + // When the experiment parameters are unspecified or when the AGC and TS + // configuration are not overridden, return the unmodified configuration. + return config; + } + + AudioProcessing::Config adjusted_config = config; + + // Override the transient suppressor configuration. + if (experiment_params->disallow_transient_suppressor_usage) { + adjusted_config.transient_suppression.enabled = false; + } + + // Override the auto gain control configuration if the AGC1 analog gain + // controller is active and `experiment_params->agc2_config` is specified. + const bool agc1_analog_enabled = + config.gain_controller1.enabled && + (config.gain_controller1.mode == + AudioProcessing::Config::GainController1::kAdaptiveAnalog || + config.gain_controller1.analog_gain_controller.enabled); + if (agc1_analog_enabled && experiment_params->agc2_config.has_value()) { + // Check that the unadjusted AGC config meets the preconditions. + const bool hybrid_agc_config_detected = + config.gain_controller1.enabled && + config.gain_controller1.analog_gain_controller.enabled && + !config.gain_controller1.analog_gain_controller + .enable_digital_adaptive && + config.gain_controller2.enabled && + config.gain_controller2.adaptive_digital.enabled; + const bool full_agc1_config_detected = + config.gain_controller1.enabled && + config.gain_controller1.analog_gain_controller.enabled && + config.gain_controller1.analog_gain_controller + .enable_digital_adaptive && + !config.gain_controller2.enabled; + const bool one_and_only_one_input_volume_controller = + hybrid_agc_config_detected != full_agc1_config_detected; + if (!one_and_only_one_input_volume_controller || + config.gain_controller2.input_volume_controller.enabled) { + RTC_LOG(LS_ERROR) << "Cannot adjust AGC config (precondition failed)"; + if (!one_and_only_one_input_volume_controller) + RTC_LOG(LS_ERROR) + << "One and only one input volume controller must be enabled."; + if (config.gain_controller2.input_volume_controller.enabled) + RTC_LOG(LS_ERROR) + << "The AGC2 input volume controller must be disabled."; + } else { + adjusted_config.gain_controller1.enabled = false; + adjusted_config.gain_controller1.analog_gain_controller.enabled = false; + + adjusted_config.gain_controller2.enabled = true; + adjusted_config.gain_controller2.input_volume_controller.enabled = true; + adjusted_config.gain_controller2.adaptive_digital = + experiment_params->agc2_config->adaptive_digital_controller; + adjusted_config.gain_controller2.adaptive_digital.enabled = true; + } + } + + return adjusted_config; +} + +TransientSuppressor::VadMode AudioProcessingImpl::GetTransientSuppressorVadMode( + const absl::optional& + params) { + if (params.has_value() && params->agc2_config.has_value() && + !params->disallow_transient_suppressor_usage) { + // When the experiment is active, the gain control switches to AGC2 and TS + // can be active, use the RNN VAD to control TS. This choice will also + // disable the internal RNN VAD in AGC2. + return TransientSuppressor::VadMode::kRnnVad; + } + // If TS is disabled, the returned value does not matter. If enabled, use the + // default VAD. + return TransientSuppressor::VadMode::kDefault; +} + AudioProcessingImpl::SubmoduleStates::SubmoduleStates( bool capture_post_processor_enabled, bool render_pre_processor_enabled, @@ -267,15 +662,17 @@ AudioProcessingImpl::AudioProcessingImpl( : data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)), use_setup_specific_default_aec3_config_( UseSetupSpecificDefaultAec3Congfig()), + gain_controller2_experiment_params_(GetGainController2ExperimentParams()), use_denormal_disabler_( !field_trial::IsEnabled("WebRTC-ApmDenormalDisablerKillSwitch")), - transient_suppressor_vad_mode_(GetTransientSuppressorVadMode()), + transient_suppressor_vad_mode_( + GetTransientSuppressorVadMode(gain_controller2_experiment_params_)), capture_runtime_settings_(RuntimeSettingQueueSize()), render_runtime_settings_(RuntimeSettingQueueSize()), capture_runtime_settings_enqueuer_(&capture_runtime_settings_), render_runtime_settings_enqueuer_(&render_runtime_settings_), echo_control_factory_(std::move(echo_control_factory)), - config_(config), + config_(AdjustConfig(config, gain_controller2_experiment_params_)), submodule_states_(!!capture_post_processor, !!render_pre_processor, !!capture_analyzer), @@ -291,7 +688,11 @@ AudioProcessingImpl::AudioProcessingImpl( MinimizeProcessingForUnusedOutput(), field_trial::IsEnabled("WebRTC-TransientSuppressorForcedOff")), capture_(), - capture_nonlocked_() { + capture_nonlocked_(), + applied_input_volume_stats_reporter_( + InputVolumeStatsReporter::InputVolumeType::kApplied), + recommended_input_volume_stats_reporter_( + InputVolumeStatsReporter::InputVolumeType::kRecommended) { RTC_LOG(LS_INFO) << "Injected APM submodules:" "\nEcho control factory: " << !!echo_control_factory_ @@ -301,9 +702,11 @@ AudioProcessingImpl::AudioProcessingImpl( << !!submodules_.capture_post_processor << "\nRender pre processor: " << !!submodules_.render_pre_processor; - RTC_LOG(LS_INFO) << "Denormal disabler: " - << (DenormalDisabler::IsSupported() ? "supported" - : "unsupported"); + if (!DenormalDisabler::IsSupported()) { + RTC_LOG(LS_INFO) << "Denormal disabler unsupported"; + } + + RTC_LOG(LS_INFO) << "AudioProcessing: " << config_.ToString(); // Mark Echo Controller enabled if a factory is injected. capture_nonlocked_.echo_controller_enabled = @@ -326,18 +729,23 @@ int AudioProcessingImpl::Initialize(const ProcessingConfig& processing_config) { // Run in a single-threaded manner during initialization. MutexLock lock_render(&mutex_render_); MutexLock lock_capture(&mutex_capture_); - return InitializeLocked(processing_config); + InitializeLocked(processing_config); + return kNoError; } -int AudioProcessingImpl::MaybeInitializeRender( - const ProcessingConfig& processing_config) { - // Called from both threads. Thread check is therefore not possible. +void AudioProcessingImpl::MaybeInitializeRender( + const StreamConfig& input_config, + const StreamConfig& output_config) { + ProcessingConfig processing_config = formats_.api_format; + processing_config.reverse_input_stream() = input_config; + processing_config.reverse_output_stream() = output_config; + if (processing_config == formats_.api_format) { - return kNoError; + return; } MutexLock lock_capture(&mutex_capture_); - return InitializeLocked(processing_config); + InitializeLocked(processing_config); } void AudioProcessingImpl::InitializeLocked() { @@ -377,6 +785,8 @@ void AudioProcessingImpl::InitializeLocked() { formats_.api_format.output_stream().num_channels(), formats_.api_format.output_stream().sample_rate_hz(), formats_.api_format.output_stream().num_channels())); + SetDownmixMethod(*capture_.capture_audio, + config_.pipeline.capture_downmix_method); if (capture_nonlocked_.capture_processing_format.sample_rate_hz() < formats_.api_format.output_stream().sample_rate_hz() && @@ -388,6 +798,8 @@ void AudioProcessingImpl::InitializeLocked() { formats_.api_format.output_stream().num_channels(), formats_.api_format.output_stream().sample_rate_hz(), formats_.api_format.output_stream().num_channels())); + SetDownmixMethod(*capture_.capture_fullband_audio, + config_.pipeline.capture_downmix_method); } else { capture_.capture_fullband_audio.reset(); } @@ -412,25 +824,9 @@ void AudioProcessingImpl::InitializeLocked() { } } -int AudioProcessingImpl::InitializeLocked(const ProcessingConfig& config) { +void AudioProcessingImpl::InitializeLocked(const ProcessingConfig& config) { UpdateActiveSubmoduleStates(); - for (const auto& stream : config.streams) { - if (stream.num_channels() > 0 && stream.sample_rate_hz() <= 0) { - return kBadSampleRateError; - } - } - - const size_t num_in_channels = config.input_stream().num_channels(); - const size_t num_out_channels = config.output_stream().num_channels(); - - // Need at least one input channel. - // Need either one output channel or as many outputs as there are inputs. - if (num_in_channels == 0 || - !(num_out_channels == 1 || num_out_channels == num_in_channels)) { - return kBadNumberChannelsError; - } - formats_.api_format = config; // Choose maximum rate to use for the split filtering. @@ -504,50 +900,59 @@ int AudioProcessingImpl::InitializeLocked(const ProcessingConfig& config) { } InitializeLocked(); - return kNoError; } void AudioProcessingImpl::ApplyConfig(const AudioProcessing::Config& config) { - RTC_LOG(LS_INFO) << "AudioProcessing::ApplyConfig: " << config.ToString(); - // Run in a single-threaded manner when applying the settings. MutexLock lock_render(&mutex_render_); MutexLock lock_capture(&mutex_capture_); + const auto adjusted_config = + AdjustConfig(config, gain_controller2_experiment_params_); + RTC_LOG(LS_INFO) << "AudioProcessing::ApplyConfig: " + << adjusted_config.ToString(); + const bool pipeline_config_changed = config_.pipeline.multi_channel_render != - config.pipeline.multi_channel_render || + adjusted_config.pipeline.multi_channel_render || config_.pipeline.multi_channel_capture != - config.pipeline.multi_channel_capture || + adjusted_config.pipeline.multi_channel_capture || config_.pipeline.maximum_internal_processing_rate != - config.pipeline.maximum_internal_processing_rate; + adjusted_config.pipeline.maximum_internal_processing_rate || + config_.pipeline.capture_downmix_method != + adjusted_config.pipeline.capture_downmix_method; const bool aec_config_changed = - config_.echo_canceller.enabled != config.echo_canceller.enabled || - config_.echo_canceller.mobile_mode != config.echo_canceller.mobile_mode; + config_.echo_canceller.enabled != + adjusted_config.echo_canceller.enabled || + config_.echo_canceller.mobile_mode != + adjusted_config.echo_canceller.mobile_mode; const bool agc1_config_changed = - config_.gain_controller1 != config.gain_controller1; + config_.gain_controller1 != adjusted_config.gain_controller1; const bool agc2_config_changed = - config_.gain_controller2 != config.gain_controller2; + config_.gain_controller2 != adjusted_config.gain_controller2; const bool ns_config_changed = - config_.noise_suppression.enabled != config.noise_suppression.enabled || - config_.noise_suppression.level != config.noise_suppression.level; + config_.noise_suppression.enabled != + adjusted_config.noise_suppression.enabled || + config_.noise_suppression.level != + adjusted_config.noise_suppression.level; const bool ts_config_changed = config_.transient_suppression.enabled != - config.transient_suppression.enabled; + adjusted_config.transient_suppression.enabled; const bool pre_amplifier_config_changed = - config_.pre_amplifier.enabled != config.pre_amplifier.enabled || + config_.pre_amplifier.enabled != adjusted_config.pre_amplifier.enabled || config_.pre_amplifier.fixed_gain_factor != - config.pre_amplifier.fixed_gain_factor; + adjusted_config.pre_amplifier.fixed_gain_factor; const bool gain_adjustment_config_changed = - config_.capture_level_adjustment != config.capture_level_adjustment; + config_.capture_level_adjustment != + adjusted_config.capture_level_adjustment; - config_ = config; + config_ = adjusted_config; if (aec_config_changed) { InitializeEchoController(); @@ -663,6 +1068,10 @@ void AudioProcessingImpl::HandleCaptureOutputUsedSetting( submodules_.noise_suppressor->SetCaptureOutputUsage( capture_.capture_output_used); } + if (submodules_.gain_controller2) { + submodules_.gain_controller2->SetCaptureOutputUsed( + capture_.capture_output_used); + } } void AudioProcessingImpl::SetRuntimeSetting(RuntimeSetting setting) { @@ -713,13 +1122,12 @@ bool AudioProcessingImpl::RuntimeSettingEnqueuer::Enqueue( const bool successful_insert = runtime_settings_.Insert(&setting); if (!successful_insert) { - RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.ApmRuntimeSettingCannotEnqueue", 1); RTC_LOG(LS_ERROR) << "Cannot enqueue a new runtime setting."; } return successful_insert; } -int AudioProcessingImpl::MaybeInitializeCapture( +void AudioProcessingImpl::MaybeInitializeCapture( const StreamConfig& input_config, const StreamConfig& output_config) { ProcessingConfig processing_config; @@ -748,9 +1156,8 @@ int AudioProcessingImpl::MaybeInitializeCapture( processing_config = formats_.api_format; processing_config.input_stream() = input_config; processing_config.output_stream() = output_config; - RETURN_ON_ERR(InitializeLocked(processing_config)); + InitializeLocked(processing_config); } - return kNoError; } int AudioProcessingImpl::ProcessStream(const float* const* src, @@ -758,14 +1165,12 @@ int AudioProcessingImpl::ProcessStream(const float* const* src, const StreamConfig& output_config, float* const* dest) { TRACE_EVENT0("webrtc", "AudioProcessing::ProcessStream_StreamConfig"); - if (!src || !dest) { - return kNullPointerError; - } - - RETURN_ON_ERR(MaybeInitializeCapture(input_config, output_config)); + DenormalDisabler denormal_disabler(use_denormal_disabler_); + RETURN_ON_ERR( + HandleUnsupportedAudioFormats(src, input_config, output_config, dest)); + MaybeInitializeCapture(input_config, output_config); MutexLock lock_capture(&mutex_capture_); - DenormalDisabler denormal_disabler(use_denormal_disabler_); if (aec_dump_) { RecordUnprocessedCaptureStream(src); @@ -838,7 +1243,9 @@ void AudioProcessingImpl::HandleCaptureRuntimeSettings() { // TODO(bugs.chromium.org/9138): Log setting handling by Aec Dump. break; case RuntimeSetting::Type::kCaptureCompressionGain: { - if (!submodules_.agc_manager) { + if (!submodules_.agc_manager && + !(submodules_.gain_controller2 && + config_.gain_controller2.input_volume_controller.enabled)) { float value; setting.GetFloat(&value); int int_value = static_cast(value + .5f); @@ -1057,7 +1464,10 @@ int AudioProcessingImpl::ProcessStream(const int16_t* const src, const StreamConfig& output_config, int16_t* const dest) { TRACE_EVENT0("webrtc", "AudioProcessing::ProcessStream_AudioFrame"); - RETURN_ON_ERR(MaybeInitializeCapture(input_config, output_config)); + + RETURN_ON_ERR( + HandleUnsupportedAudioFormats(src, input_config, output_config, dest)); + MaybeInitializeCapture(input_config, output_config); MutexLock lock_capture(&mutex_capture_); DenormalDisabler denormal_disabler(use_denormal_disabler_); @@ -1137,8 +1547,7 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { } if (capture_.applied_input_volume.has_value()) { - // Log the applied input volume only when available. - input_volume_stats_reporter_.UpdateStatistics( + applied_input_volume_stats_reporter_.UpdateStatistics( *capture_.applied_input_volume); } @@ -1172,6 +1581,16 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { submodules_.agc_manager->AnalyzePreProcess(*capture_buffer); } + if (submodules_.gain_controller2 && + config_.gain_controller2.input_volume_controller.enabled) { + // Expect the volume to be available if the input controller is enabled. + RTC_DCHECK(capture_.applied_input_volume.has_value()); + if (capture_.applied_input_volume.has_value()) { + submodules_.gain_controller2->Analyze(*capture_.applied_input_volume, + *capture_buffer); + } + } + if (submodule_states_.CaptureMultiBandSubModulesActive() && SampleRateSupportsMultiBand( capture_nonlocked_.capture_processing_format.sample_rate_hz())) { @@ -1325,6 +1744,8 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { } if (submodules_.gain_controller2) { + // TODO(bugs.webrtc.org/7494): Let AGC2 detect applied input volume + // changes. submodules_.gain_controller2->Process( voice_probability, capture_.applied_input_volume_changed, capture_buffer); @@ -1368,6 +1789,10 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { stats_reporter_.UpdateStatistics(capture_.stats); UpdateRecommendedInputVolumeLocked(); + if (capture_.recommended_input_volume.has_value()) { + recommended_input_volume_stats_reporter_.UpdateStatistics( + *capture_.recommended_input_volume); + } if (submodules_.capture_levels_adjuster) { submodules_.capture_levels_adjuster->ApplyPostLevelAdjustment( @@ -1411,6 +1836,15 @@ int AudioProcessingImpl::AnalyzeReverseStream( const StreamConfig& reverse_config) { TRACE_EVENT0("webrtc", "AudioProcessing::AnalyzeReverseStream_StreamConfig"); MutexLock lock(&mutex_render_); + DenormalDisabler denormal_disabler(use_denormal_disabler_); + RTC_DCHECK(data); + for (size_t i = 0; i < reverse_config.num_channels(); ++i) { + RTC_DCHECK(data[i]); + } + RETURN_ON_ERR( + AudioFormatValidityToErrorCode(ValidateAudioFormat(reverse_config))); + + MaybeInitializeRender(reverse_config, reverse_config); return AnalyzeReverseStreamLocked(data, reverse_config, reverse_config); } @@ -1421,8 +1855,13 @@ int AudioProcessingImpl::ProcessReverseStream(const float* const* src, TRACE_EVENT0("webrtc", "AudioProcessing::ProcessReverseStream_StreamConfig"); MutexLock lock(&mutex_render_); DenormalDisabler denormal_disabler(use_denormal_disabler_); + RETURN_ON_ERR( + HandleUnsupportedAudioFormats(src, input_config, output_config, dest)); + + MaybeInitializeRender(input_config, output_config); RETURN_ON_ERR(AnalyzeReverseStreamLocked(src, input_config, output_config)); + if (submodule_states_.RenderMultiBandProcessingActive() || submodule_states_.RenderFullBandProcessingActive()) { render_.render_audio->CopyTo(formats_.api_format.reverse_output_stream(), @@ -1443,24 +1882,6 @@ int AudioProcessingImpl::AnalyzeReverseStreamLocked( const float* const* src, const StreamConfig& input_config, const StreamConfig& output_config) { - if (src == nullptr) { - return kNullPointerError; - } - - if (input_config.num_channels() == 0) { - return kBadNumberChannelsError; - } - - ProcessingConfig processing_config = formats_.api_format; - processing_config.reverse_input_stream() = input_config; - processing_config.reverse_output_stream() = output_config; - - RETURN_ON_ERR(MaybeInitializeRender(processing_config)); - RTC_DCHECK_EQ(input_config.num_frames(), - formats_.api_format.reverse_input_stream().num_frames()); - - DenormalDisabler denormal_disabler(use_denormal_disabler_); - if (aec_dump_) { const size_t channel_size = formats_.api_format.reverse_input_stream().num_frames(); @@ -1480,28 +1901,12 @@ int AudioProcessingImpl::ProcessReverseStream(const int16_t* const src, int16_t* const dest) { TRACE_EVENT0("webrtc", "AudioProcessing::ProcessReverseStream_AudioFrame"); - if (input_config.num_channels() <= 0) { - return AudioProcessing::Error::kBadNumberChannelsError; - } - MutexLock lock(&mutex_render_); DenormalDisabler denormal_disabler(use_denormal_disabler_); - ProcessingConfig processing_config = formats_.api_format; - processing_config.reverse_input_stream().set_sample_rate_hz( - input_config.sample_rate_hz()); - processing_config.reverse_input_stream().set_num_channels( - input_config.num_channels()); - processing_config.reverse_output_stream().set_sample_rate_hz( - output_config.sample_rate_hz()); - processing_config.reverse_output_stream().set_num_channels( - output_config.num_channels()); - - RETURN_ON_ERR(MaybeInitializeRender(processing_config)); - if (input_config.num_frames() != - formats_.api_format.reverse_input_stream().num_frames()) { - return kBadDataLengthError; - } + RETURN_ON_ERR( + HandleUnsupportedAudioFormats(src, input_config, output_config, dest)); + MaybeInitializeRender(input_config, output_config); if (aec_dump_) { aec_dump_->WriteRenderStreamMessage(src, input_config.num_frames(), @@ -1609,12 +2014,6 @@ void AudioProcessingImpl::set_stream_key_pressed(bool key_pressed) { } void AudioProcessingImpl::set_stream_analog_level(int level) { - // Check that input volume emulation is disabled since, when enabled, there is - // no externally applied input volume to notify to APM. - RTC_DCHECK( - !submodules_.capture_levels_adjuster || - !config_.capture_level_adjustment.analog_mic_gain_emulation.enabled); - MutexLock lock_capture(&mutex_capture_); set_stream_analog_level_locked(level); } @@ -1676,6 +2075,13 @@ void AudioProcessingImpl::UpdateRecommendedInputVolumeLocked() { return; } + if (submodules_.gain_controller2 && + config_.gain_controller2.input_volume_controller.enabled) { + capture_.recommended_input_volume = + submodules_.gain_controller2->recommended_input_volume(); + return; + } + capture_.recommended_input_volume = capture_.applied_input_volume; } @@ -1874,6 +2280,16 @@ void AudioProcessingImpl::InitializeEchoController() { } void AudioProcessingImpl::InitializeGainController1() { + if (config_.gain_controller2.enabled && + config_.gain_controller2.input_volume_controller.enabled && + config_.gain_controller1.enabled && + (config_.gain_controller1.mode == + AudioProcessing::Config::GainController1::kAdaptiveAnalog || + config_.gain_controller1.analog_gain_controller.enabled)) { + RTC_LOG(LS_ERROR) << "APM configuration not valid: " + << "Multiple input volume controllers enabled."; + } + if (!config_.gain_controller1.enabled) { submodules_.agc_manager.reset(); submodules_.gain_control.reset(); @@ -1944,9 +2360,19 @@ void AudioProcessingImpl::InitializeGainController2(bool config_has_changed) { if (!submodules_.gain_controller2 || config_has_changed) { const bool use_internal_vad = transient_suppressor_vad_mode_ != TransientSuppressor::VadMode::kRnnVad; + const bool input_volume_controller_config_overridden = + gain_controller2_experiment_params_.has_value() && + gain_controller2_experiment_params_->agc2_config.has_value(); + const InputVolumeController::Config input_volume_controller_config = + input_volume_controller_config_overridden + ? gain_controller2_experiment_params_->agc2_config + ->input_volume_controller + : InputVolumeController::Config{}; submodules_.gain_controller2 = std::make_unique( - config_.gain_controller2, proc_fullband_sample_rate_hz(), - num_input_channels(), use_internal_vad); + config_.gain_controller2, input_volume_controller_config, + proc_fullband_sample_rate_hz(), num_proc_channels(), use_internal_vad); + submodules_.gain_controller2->SetCaptureOutputUsed( + capture_.capture_output_used); } } @@ -1958,7 +2384,8 @@ void AudioProcessingImpl::InitializeVoiceActivityDetector( const bool use_vad = transient_suppressor_vad_mode_ == TransientSuppressor::VadMode::kRnnVad && config_.gain_controller2.enabled && - config_.gain_controller2.adaptive_digital.enabled; + (config_.gain_controller2.adaptive_digital.enabled || + config_.gain_controller2.input_volume_controller.enabled); if (!use_vad) { submodules_.voice_activity_detector.reset(); return; @@ -1968,7 +2395,6 @@ void AudioProcessingImpl::InitializeVoiceActivityDetector( // TODO(bugs.webrtc.org/13663): Cache CPU features in APM and use here. submodules_.voice_activity_detector = std::make_unique( - config_.gain_controller2.adaptive_digital.vad_reset_period_ms, submodules_.gain_controller2->GetCpuFeatures(), proc_fullband_sample_rate_hz()); } @@ -2064,10 +2490,6 @@ void AudioProcessingImpl::WriteAecDumpConfigMessage(bool forced) { std::string experiments_description = ""; // TODO(peah): Add semicolon-separated concatenations of experiment // descriptions for other submodules. - if (config_.gain_controller1.analog_gain_controller.clipped_level_min != - kClippedLevelMin) { - experiments_description += "AgcClippingLevelExperiment;"; - } if (!!submodules_.capture_post_processor) { experiments_description += "CapturePostProcessor;"; } diff --git a/modules/audio_processing/audio_processing_impl.h b/modules/audio_processing/audio_processing_impl.h index db123bed27..acbe5397d4 100644 --- a/modules/audio_processing/audio_processing_impl.h +++ b/modules/audio_processing/audio_processing_impl.h @@ -26,8 +26,8 @@ #include "api/function_view.h" #include "modules/audio_processing/aec3/echo_canceller3.h" #include "modules/audio_processing/agc/agc_manager_direct.h" -#include "modules/audio_processing/agc/analog_gain_stats_reporter.h" #include "modules/audio_processing/agc/gain_control.h" +#include "modules/audio_processing/agc2/input_volume_stats_reporter.h" #include "modules/audio_processing/audio_buffer.h" #include "modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.h" #include "modules/audio_processing/echo_control_mobile_impl.h" @@ -163,6 +163,9 @@ class AudioProcessingImpl : public AudioProcessing { ReinitializeTransientSuppressor); FRIEND_TEST_ALL_PREFIXES(ApmWithSubmodulesExcludedTest, BitexactWithDisabledModules); + FRIEND_TEST_ALL_PREFIXES( + AudioProcessingImplGainController2FieldTrialParametrizedTest, + ConfigAdjustedWhenExperimentEnabled); void set_stream_analog_level_locked(int level) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); @@ -191,6 +194,45 @@ class AudioProcessingImpl : public AudioProcessing { static std::atomic instance_count_; const bool use_setup_specific_default_aec3_config_; + // Parameters for the "GainController2" experiment which determines whether + // the following APM sub-modules are created and, if so, their configurations: + // AGC2 (`gain_controller2`), AGC1 (`gain_control`, `agc_manager`) and TS + // (`transient_suppressor`). + // TODO(bugs.webrtc.org/7494): Remove when the "WebRTC-Audio-GainController2" + // field trial is removed. + struct GainController2ExperimentParams { + struct Agc2Config { + InputVolumeController::Config input_volume_controller; + AudioProcessing::Config::GainController2::AdaptiveDigital + adaptive_digital_controller; + }; + // When `agc2_config` is specified, all gain control switches to AGC2 and + // the configuration is overridden. + absl::optional agc2_config; + // When true, the transient suppressor submodule is never created regardless + // of the APM configuration. + bool disallow_transient_suppressor_usage; + }; + // Specified when the "WebRTC-Audio-GainController2" field trial is specified. + // TODO(bugs.webrtc.org/7494): Remove when the "WebRTC-Audio-GainController2" + // field trial is removed. + const absl::optional + gain_controller2_experiment_params_; + + // Parses the "WebRTC-Audio-GainController2" field trial. If disabled, returns + // an unspecified value. + static absl::optional + GetGainController2ExperimentParams(); + + // When `experiment_params` is specified, returns an APM configuration + // modified according to the experiment parameters. Otherwise returns + // `config`. + static AudioProcessing::Config AdjustConfig( + const AudioProcessing::Config& config, + const absl::optional& experiment_params); + static TransientSuppressor::VadMode GetTransientSuppressorVadMode( + const absl::optional& experiment_params); + const bool use_denormal_disabler_; const TransientSuppressor::VadMode transient_suppressor_vad_mode_; @@ -251,12 +293,13 @@ class AudioProcessingImpl : public AudioProcessing { // capture thread blocks the render thread. // Called by render: Holds the render lock when reading the format struct and // acquires both locks if reinitialization is required. - int MaybeInitializeRender(const ProcessingConfig& processing_config) + void MaybeInitializeRender(const StreamConfig& input_config, + const StreamConfig& output_config) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_render_); - // Called by capture: Holds the capture lock when reading the format struct - // and acquires both locks if reinitialization is needed. - int MaybeInitializeCapture(const StreamConfig& input_config, - const StreamConfig& output_config); + // Called by capture: Acquires and releases the capture lock to read the + // format struct and acquires both locks if reinitialization is needed. + void MaybeInitializeCapture(const StreamConfig& input_config, + const StreamConfig& output_config); // Method for updating the state keeping track of the active submodules. // Returns a bool indicating whether the state has changed. @@ -265,7 +308,7 @@ class AudioProcessingImpl : public AudioProcessing { // Methods requiring APM running in a single-threaded manner, requiring both // the render and capture lock to be acquired. - int InitializeLocked(const ProcessingConfig& config) + void InitializeLocked(const ProcessingConfig& config) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_render_, mutex_capture_); void InitializeResidualEchoDetector() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_render_, mutex_capture_); @@ -324,7 +367,6 @@ class AudioProcessingImpl : public AudioProcessing { // Render-side exclusive methods possibly running APM in a multi-threaded // manner that are called with the render lock already acquired. - // TODO(ekm): Remove once all clients updated to new interface. int AnalyzeReverseStreamLocked(const float* const* src, const StreamConfig& input_config, const StreamConfig& output_config) @@ -542,7 +584,9 @@ class AudioProcessingImpl : public AudioProcessing { RmsLevel capture_output_rms_ RTC_GUARDED_BY(mutex_capture_); int capture_rms_interval_counter_ RTC_GUARDED_BY(mutex_capture_) = 0; - AnalogGainStatsReporter input_volume_stats_reporter_ + InputVolumeStatsReporter applied_input_volume_stats_reporter_ + RTC_GUARDED_BY(mutex_capture_); + InputVolumeStatsReporter recommended_input_volume_stats_reporter_ RTC_GUARDED_BY(mutex_capture_); // RingRTC change to RingRTC change to make it possible to share an APM. diff --git a/modules/audio_processing/audio_processing_impl_unittest.cc b/modules/audio_processing/audio_processing_impl_unittest.cc index e67c5ee4e5..e48a5d8883 100644 --- a/modules/audio_processing/audio_processing_impl_unittest.cc +++ b/modules/audio_processing/audio_processing_impl_unittest.cc @@ -10,6 +10,7 @@ #include "modules/audio_processing/audio_processing_impl.h" +#include #include #include #include @@ -131,26 +132,6 @@ class TestRenderPreProcessor : public CustomProcessing { static constexpr float ProcessSample(float x) { return 2.f * x; } }; -// Creates a simple `AudioProcessing` instance for APM input volume testing -// with analog and digital AGC enabled and minimum volume `startup_min_volume` -// at the startup. -rtc::scoped_refptr CreateApmForInputVolumeTest( - int startup_min_volume) { - webrtc::AudioProcessing::Config config; - // Enable AGC1 analog. - config.gain_controller1.enabled = true; - config.gain_controller1.analog_gain_controller.enabled = true; - config.gain_controller1.analog_gain_controller.startup_min_volume = - startup_min_volume; - // Enable AGC2 digital. - config.gain_controller2.enabled = true; - config.gain_controller2.adaptive_digital.enabled = true; - - auto apm(AudioProcessingBuilder().Create()); - apm->ApplyConfig(config); - return apm; -} - // Runs `apm` input processing for volume adjustments for `num_frames` random // frames starting from the volume `initial_volume`. This includes three steps: // 1) Set the input volume 2) Process the stream 3) Set the new recommended @@ -177,77 +158,6 @@ int ProcessInputVolume(AudioProcessing& apm, return recommended_input_volume; } -constexpr char kMinMicLevelFieldTrial[] = - "WebRTC-Audio-2ndAgcMinMicLevelExperiment"; -constexpr int kMinInputVolume = 12; - -std::string GetMinMicLevelExperimentFieldTrial(absl::optional value) { - char field_trial_buffer[64]; - rtc::SimpleStringBuilder builder(field_trial_buffer); - if (value.has_value()) { - RTC_DCHECK_GE(*value, 0); - RTC_DCHECK_LE(*value, 255); - builder << kMinMicLevelFieldTrial << "/Enabled-" << *value << "/"; - } else { - builder << kMinMicLevelFieldTrial << "/Disabled/"; - } - return builder.str(); -} - -// TODO(webrtc:7494): Remove the fieldtrial from the input volume tests when -// "WebRTC-Audio-2ndAgcMinMicLevelExperiment" is removed. -class InputVolumeStartupParameterizedTest - : public ::testing::TestWithParam< - std::tuple>> { - protected: - InputVolumeStartupParameterizedTest() - : field_trials_( - GetMinMicLevelExperimentFieldTrial(std::get<2>(GetParam()))) {} - int GetMinStartupVolume() const { return std::get<0>(GetParam()); } - int GetStartupVolume() const { return std::get<1>(GetParam()); } - int GetMinVolume() const { - return std::get<2>(GetParam()).value_or(kMinInputVolume); - } - - private: - test::ScopedFieldTrials field_trials_; -}; - -class InputVolumeNotZeroParameterizedTest - : public ::testing::TestWithParam< - std::tuple>> { - protected: - InputVolumeNotZeroParameterizedTest() - : field_trials_( - GetMinMicLevelExperimentFieldTrial(std::get<2>(GetParam()))) {} - int GetStartupVolume() const { return std::get<0>(GetParam()); } - int GetVolume() const { return std::get<1>(GetParam()); } - int GetMinVolume() const { - return std::get<2>(GetParam()).value_or(kMinInputVolume); - } - bool GetMinMicLevelExperimentEnabled() { - return std::get<2>(GetParam()).has_value(); - } - - private: - test::ScopedFieldTrials field_trials_; -}; - -class InputVolumeZeroParameterizedTest - : public ::testing::TestWithParam>> { - protected: - InputVolumeZeroParameterizedTest() - : field_trials_( - GetMinMicLevelExperimentFieldTrial(std::get<1>(GetParam()))) {} - int GetStartupVolume() const { return std::get<0>(GetParam()); } - int GetMinVolume() const { - return std::get<1>(GetParam()).value_or(kMinInputVolume); - } - - private: - test::ScopedFieldTrials field_trials_; -}; - } // namespace TEST(AudioProcessingImplTest, AudioParameterChangeTriggersInit) { @@ -275,11 +185,9 @@ TEST(AudioProcessingImplTest, AudioParameterChangeTriggersInit) { EXPECT_NOERR(mock.ProcessStream(frame.data(), config, config, frame.data())); // New number of channels. - // TODO(peah): Investigate why this causes 2 inits. config = StreamConfig(32000, 2); EXPECT_CALL(mock, InitializeLocked).Times(2); EXPECT_NOERR(mock.ProcessStream(frame.data(), config, config, frame.data())); - // ProcessStream sets num_channels_ == num_output_channels. EXPECT_NOERR( mock.ProcessReverseStream(frame.data(), config, config, frame.data())); @@ -618,15 +526,16 @@ TEST(AudioProcessingImplTest, TEST(AudioProcessingImplTest, ProcessWithAgc2AndTransientSuppressorVadModeDefault) { webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-TransientSuppressorVadMode/Enabled-Default/"); - rtc::scoped_refptr apm = AudioProcessingBuilder().Create(); + "WebRTC-Audio-GainController2/Disabled/"); + auto apm = AudioProcessingBuilder() + .SetConfig({.gain_controller1{.enabled = false}}) + .Create(); ASSERT_EQ(apm->Initialize(), AudioProcessing::kNoError); webrtc::AudioProcessing::Config apm_config; - // Disable AGC1 analog. apm_config.gain_controller1.enabled = false; - // Enable AGC2 digital. apm_config.gain_controller2.enabled = true; apm_config.gain_controller2.adaptive_digital.enabled = true; + apm_config.transient_suppression.enabled = true; apm->ApplyConfig(apm_config); constexpr int kSampleRateHz = 48000; constexpr int kNumChannels = 1; @@ -650,15 +559,14 @@ TEST(AudioProcessingImplTest, TEST(AudioProcessingImplTest, ProcessWithAgc2AndTransientSuppressorVadModeRnnVad) { webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-TransientSuppressorVadMode/Enabled-RnnVad/"); + "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:true/"); rtc::scoped_refptr apm = AudioProcessingBuilder().Create(); ASSERT_EQ(apm->Initialize(), AudioProcessing::kNoError); webrtc::AudioProcessing::Config apm_config; - // Disable AGC1 analog. apm_config.gain_controller1.enabled = false; - // Enable AGC2 digital. apm_config.gain_controller2.enabled = true; apm_config.gain_controller2.adaptive_digital.enabled = true; + apm_config.transient_suppression.enabled = true; apm->ApplyConfig(apm_config); constexpr int kSampleRateHz = 48000; constexpr int kNumChannels = 1; @@ -934,118 +842,650 @@ TEST(ApmWithSubmodulesExcludedTest, ToggleTransientSuppressor) { } } -// Tests that the minimum startup volume is applied at the startup. -TEST_P(InputVolumeStartupParameterizedTest, - VerifyStartupMinVolumeAppliedAtStartup) { - const int applied_startup_input_volume = GetStartupVolume(); - const int startup_min_volume = GetMinStartupVolume(); - const int min_volume = std::max(startup_min_volume, GetMinVolume()); - const int expected_volume = - std::max(applied_startup_input_volume, min_volume); - auto apm(CreateApmForInputVolumeTest(startup_min_volume)); +class ApmInputVolumeControllerParametrizedTest + : public ::testing::TestWithParam< + std::tuple> { + protected: + ApmInputVolumeControllerParametrizedTest() + : sample_rate_hz_(std::get<0>(GetParam())), + num_channels_(std::get<1>(GetParam())), + channels_(num_channels_), + channel_pointers_(num_channels_) { + const int frame_size = sample_rate_hz_ / 100; + for (int c = 0; c < num_channels_; ++c) { + channels_[c].resize(frame_size); + channel_pointers_[c] = channels_[c].data(); + std::fill(channels_[c].begin(), channels_[c].end(), 0.0f); + } + } - const int recommended_input_volume = - ProcessInputVolume(*apm, /*num_frames=*/1, applied_startup_input_volume); + int sample_rate_hz() const { return sample_rate_hz_; } + int num_channels() const { return num_channels_; } + AudioProcessing::Config GetConfig() const { return std::get<2>(GetParam()); } - ASSERT_EQ(recommended_input_volume, expected_volume); + float* const* channel_pointers() { return channel_pointers_.data(); } + + private: + const int sample_rate_hz_; + const int num_channels_; + std::vector> channels_; + std::vector channel_pointers_; +}; + +TEST_P(ApmInputVolumeControllerParametrizedTest, + EnforceMinInputVolumeAtStartupWithZeroVolume) { + const StreamConfig stream_config(sample_rate_hz(), num_channels()); + auto apm = AudioProcessingBuilder().SetConfig(GetConfig()).Create(); + + apm->set_stream_analog_level(0); + apm->ProcessStream(channel_pointers(), stream_config, stream_config, + channel_pointers()); + EXPECT_GT(apm->recommended_stream_analog_level(), 0); } -// Tests that the minimum input volume is applied if the volume is manually -// adjusted to a non-zero value only if -// "WebRTC-Audio-2ndAgcMinMicLevelExperiment" is enabled. -TEST_P(InputVolumeNotZeroParameterizedTest, - VerifyMinVolumeMaybeAppliedAfterManualVolumeAdjustments) { - constexpr int kStartupMinVolume = 0; - const int applied_startup_input_volume = GetStartupVolume(); - const int applied_input_volume = GetVolume(); - const int expected_volume = std::max(applied_input_volume, GetMinVolume()); - auto apm(CreateApmForInputVolumeTest(kStartupMinVolume)); +TEST_P(ApmInputVolumeControllerParametrizedTest, + EnforceMinInputVolumeAtStartupWithNonZeroVolume) { + const StreamConfig stream_config(sample_rate_hz(), num_channels()); + auto apm = AudioProcessingBuilder().SetConfig(GetConfig()).Create(); - ProcessInputVolume(*apm, /*num_frames=*/1, applied_startup_input_volume); - const int recommended_input_volume = - ProcessInputVolume(*apm, /*num_frames=*/1, applied_input_volume); + constexpr int kStartupVolume = 3; + apm->set_stream_analog_level(kStartupVolume); + apm->ProcessStream(channel_pointers(), stream_config, stream_config, + channel_pointers()); + EXPECT_GT(apm->recommended_stream_analog_level(), kStartupVolume); +} - ASSERT_NE(applied_input_volume, 0); - if (GetMinMicLevelExperimentEnabled()) { - ASSERT_EQ(recommended_input_volume, expected_volume); - } else { - ASSERT_EQ(recommended_input_volume, applied_input_volume); +TEST_P(ApmInputVolumeControllerParametrizedTest, + EnforceMinInputVolumeAfterManualVolumeAdjustment) { + const auto config = GetConfig(); + if (config.gain_controller1.enabled) { + // After a downward manual adjustment, AGC1 slowly converges to the minimum + // input volume. + GTEST_SKIP() << "Does not apply to AGC1"; + } + const StreamConfig stream_config(sample_rate_hz(), num_channels()); + auto apm = AudioProcessingBuilder().SetConfig(GetConfig()).Create(); + + apm->set_stream_analog_level(20); + apm->ProcessStream(channel_pointers(), stream_config, stream_config, + channel_pointers()); + constexpr int kManuallyAdjustedVolume = 3; + apm->set_stream_analog_level(kManuallyAdjustedVolume); + apm->ProcessStream(channel_pointers(), stream_config, stream_config, + channel_pointers()); + EXPECT_GT(apm->recommended_stream_analog_level(), kManuallyAdjustedVolume); +} + +TEST_P(ApmInputVolumeControllerParametrizedTest, + DoNotEnforceMinInputVolumeAfterManualVolumeAdjustmentToZero) { + const StreamConfig stream_config(sample_rate_hz(), num_channels()); + auto apm = AudioProcessingBuilder().SetConfig(GetConfig()).Create(); + + apm->set_stream_analog_level(100); + apm->ProcessStream(channel_pointers(), stream_config, stream_config, + channel_pointers()); + apm->set_stream_analog_level(0); + apm->ProcessStream(channel_pointers(), stream_config, stream_config, + channel_pointers()); + EXPECT_EQ(apm->recommended_stream_analog_level(), 0); +} + +INSTANTIATE_TEST_SUITE_P( + AudioProcessingImplTest, + ApmInputVolumeControllerParametrizedTest, + ::testing::Combine( + ::testing::Values(8000, 16000, 32000, 48000), // Sample rates. + ::testing::Values(1, 2), // Number of channels. + ::testing::Values( + // Full AGC1. + AudioProcessing::Config{ + .gain_controller1 = {.enabled = true, + .analog_gain_controller = + {.enabled = true, + .enable_digital_adaptive = true}}, + .gain_controller2 = {.enabled = false}}, + // Hybrid AGC. + AudioProcessing::Config{ + .gain_controller1 = {.enabled = true, + .analog_gain_controller = + {.enabled = true, + .enable_digital_adaptive = false}}, + .gain_controller2 = {.enabled = true, + .adaptive_digital = {.enabled = true}}}))); + +// When the input volume is not emulated and no input volume controller is +// active, the recommended volume must always be the applied volume. +TEST(AudioProcessingImplTest, + RecommendAppliedInputVolumeWithNoAgcWithNoEmulation) { + auto apm = AudioProcessingBuilder() + .SetConfig({.capture_level_adjustment = {.enabled = false}, + .gain_controller1 = {.enabled = false}}) + .Create(); + + constexpr int kOneFrame = 1; + EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/123), 123); + EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/59), 59); + EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/135), 135); +} + +// When the input volume is emulated, the recommended volume must always be the +// applied volume and at any time it must not be that set in the input volume +// emulator. +// TODO(bugs.webrtc.org/14581): Enable when APM fixed to let this test pass. +TEST(AudioProcessingImplTest, + DISABLED_RecommendAppliedInputVolumeWithNoAgcWithEmulation) { + auto apm = + AudioProcessingBuilder() + .SetConfig({.capture_level_adjustment = {.enabled = true, + .analog_mic_gain_emulation{ + .enabled = true, + .initial_level = 255}}, + .gain_controller1 = {.enabled = false}}) + .Create(); + + constexpr int kOneFrame = 1; + EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/123), 123); + EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/59), 59); + EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/135), 135); +} + +// Even if there is an enabled input volume controller, when the input volume is +// emulated, the recommended volume is always the applied volume because the +// active controller must only adjust the internally emulated volume and leave +// the externally applied volume unchanged. +// TODO(bugs.webrtc.org/14581): Enable when APM fixed to let this test pass. +TEST(AudioProcessingImplTest, + DISABLED_RecommendAppliedInputVolumeWithAgcWithEmulation) { + auto apm = + AudioProcessingBuilder() + .SetConfig({.capture_level_adjustment = {.enabled = true, + .analog_mic_gain_emulation{ + .enabled = true}}, + .gain_controller1 = {.enabled = true, + .analog_gain_controller{ + .enabled = true, + }}}) + .Create(); + + constexpr int kOneFrame = 1; + EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/123), 123); + EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/59), 59); + EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/135), 135); +} + +TEST(AudioProcessingImplTest, + Agc2FieldTrialDoNotSwitchToFullAgc2WhenNoAgcIsActive) { + constexpr AudioProcessing::Config kOriginal{ + .gain_controller1{.enabled = false}, + .gain_controller2{.enabled = false}, + }; + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:true/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); + EXPECT_EQ(adjusted.gain_controller1, kOriginal.gain_controller1); + EXPECT_EQ(adjusted.gain_controller2, kOriginal.gain_controller2); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(kOriginal); + adjusted = apm->GetConfig(); + EXPECT_EQ(adjusted.gain_controller1, kOriginal.gain_controller1); + EXPECT_EQ(adjusted.gain_controller2, kOriginal.gain_controller2); +} + +TEST(AudioProcessingImplTest, + Agc2FieldTrialDoNotSwitchToFullAgc2WithAgc1Agc2InputVolumeControllers) { + constexpr AudioProcessing::Config kOriginal{ + .gain_controller1{.enabled = true, + .analog_gain_controller{.enabled = true}}, + .gain_controller2{.enabled = true, + .input_volume_controller{.enabled = true}}, + }; + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:true/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); + EXPECT_EQ(adjusted.gain_controller1, kOriginal.gain_controller1); + EXPECT_EQ(adjusted.gain_controller2, kOriginal.gain_controller2); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(kOriginal); + adjusted = apm->GetConfig(); + EXPECT_EQ(adjusted.gain_controller1, kOriginal.gain_controller1); + EXPECT_EQ(adjusted.gain_controller2, kOriginal.gain_controller2); +} + +class Agc2FieldTrialParametrizedTest + : public ::testing::TestWithParam {}; + +TEST_P(Agc2FieldTrialParametrizedTest, DoNotChangeConfigIfDisabled) { + const AudioProcessing::Config original = GetParam(); + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Disabled/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(original).Create()->GetConfig(); + EXPECT_EQ(adjusted.gain_controller1, original.gain_controller1); + EXPECT_EQ(adjusted.gain_controller2, original.gain_controller2); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(original); + adjusted = apm->GetConfig(); + EXPECT_EQ(adjusted.gain_controller1, original.gain_controller1); + EXPECT_EQ(adjusted.gain_controller2, original.gain_controller2); +} + +TEST_P(Agc2FieldTrialParametrizedTest, DoNotChangeConfigIfNoOverride) { + const AudioProcessing::Config original = GetParam(); + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled," + "switch_to_agc2:false," + "disallow_transient_suppressor_usage:false/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(original).Create()->GetConfig(); + EXPECT_EQ(adjusted.gain_controller1, original.gain_controller1); + EXPECT_EQ(adjusted.gain_controller2, original.gain_controller2); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(original); + adjusted = apm->GetConfig(); + EXPECT_EQ(adjusted.gain_controller1, original.gain_controller1); + EXPECT_EQ(adjusted.gain_controller2, original.gain_controller2); +} + +TEST_P(Agc2FieldTrialParametrizedTest, DoNotSwitchToFullAgc2) { + const AudioProcessing::Config original = GetParam(); + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:false/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(original).Create()->GetConfig(); + EXPECT_EQ(adjusted.gain_controller1, original.gain_controller1); + EXPECT_EQ(adjusted.gain_controller2, original.gain_controller2); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(original); + adjusted = apm->GetConfig(); + EXPECT_EQ(adjusted.gain_controller1, original.gain_controller1); + EXPECT_EQ(adjusted.gain_controller2, original.gain_controller2); +} + +TEST_P(Agc2FieldTrialParametrizedTest, SwitchToFullAgc2) { + const AudioProcessing::Config original = GetParam(); + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:true/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(original).Create()->GetConfig(); + EXPECT_FALSE(adjusted.gain_controller1.enabled); + EXPECT_TRUE(adjusted.gain_controller2.enabled); + EXPECT_TRUE(adjusted.gain_controller2.input_volume_controller.enabled); + EXPECT_TRUE(adjusted.gain_controller2.adaptive_digital.enabled); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(original); + adjusted = apm->GetConfig(); + EXPECT_FALSE(adjusted.gain_controller1.enabled); + EXPECT_TRUE(adjusted.gain_controller2.enabled); + EXPECT_TRUE(adjusted.gain_controller2.input_volume_controller.enabled); + EXPECT_TRUE(adjusted.gain_controller2.adaptive_digital.enabled); +} + +TEST_P(Agc2FieldTrialParametrizedTest, + SwitchToFullAgc2AndOverrideInputVolumeControllerParameters) { + const AudioProcessing::Config original = GetParam(); + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:true," + "min_input_volume:123," + "clipped_level_min:20," + "clipped_level_step:30," + "clipped_ratio_threshold:0.4," + "clipped_wait_frames:50," + "enable_clipping_predictor:true," + "target_range_max_dbfs:-6," + "target_range_min_dbfs:-70," + "update_input_volume_wait_frames:80," + "speech_probability_threshold:0.9," + "speech_ratio_threshold:1.0/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(original).Create()->GetConfig(); + EXPECT_FALSE(adjusted.gain_controller1.enabled); + EXPECT_TRUE(adjusted.gain_controller2.enabled); + EXPECT_TRUE(adjusted.gain_controller2.input_volume_controller.enabled); + EXPECT_TRUE(adjusted.gain_controller2.adaptive_digital.enabled); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(original); + adjusted = apm->GetConfig(); + EXPECT_FALSE(adjusted.gain_controller1.enabled); + EXPECT_TRUE(adjusted.gain_controller2.enabled); + EXPECT_TRUE(adjusted.gain_controller2.input_volume_controller.enabled); + EXPECT_TRUE(adjusted.gain_controller2.adaptive_digital.enabled); +} + +TEST_P(Agc2FieldTrialParametrizedTest, + SwitchToFullAgc2AndOverrideAdaptiveDigitalControllerParameters) { + const AudioProcessing::Config original = GetParam(); + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:true," + "headroom_db:10," + "max_gain_db:20," + "initial_gain_db:7," + "max_gain_change_db_per_second:5," + "max_output_noise_level_dbfs:-40/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(original).Create()->GetConfig(); + EXPECT_FALSE(adjusted.gain_controller1.enabled); + EXPECT_TRUE(adjusted.gain_controller2.enabled); + EXPECT_TRUE(adjusted.gain_controller2.input_volume_controller.enabled); + EXPECT_TRUE(adjusted.gain_controller2.adaptive_digital.enabled); + ASSERT_NE(adjusted.gain_controller2.adaptive_digital, + original.gain_controller2.adaptive_digital); + EXPECT_EQ(adjusted.gain_controller2.adaptive_digital.headroom_db, 10); + EXPECT_EQ(adjusted.gain_controller2.adaptive_digital.max_gain_db, 20); + EXPECT_EQ(adjusted.gain_controller2.adaptive_digital.initial_gain_db, 7); + EXPECT_EQ( + adjusted.gain_controller2.adaptive_digital.max_gain_change_db_per_second, + 5); + EXPECT_EQ( + adjusted.gain_controller2.adaptive_digital.max_output_noise_level_dbfs, + -40); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(original); + adjusted = apm->GetConfig(); + EXPECT_FALSE(adjusted.gain_controller1.enabled); + EXPECT_TRUE(adjusted.gain_controller2.enabled); + EXPECT_TRUE(adjusted.gain_controller2.input_volume_controller.enabled); + EXPECT_TRUE(adjusted.gain_controller2.adaptive_digital.enabled); + ASSERT_NE(adjusted.gain_controller2.adaptive_digital, + original.gain_controller2.adaptive_digital); + EXPECT_EQ(adjusted.gain_controller2.adaptive_digital.headroom_db, 10); + EXPECT_EQ(adjusted.gain_controller2.adaptive_digital.max_gain_db, 20); + EXPECT_EQ(adjusted.gain_controller2.adaptive_digital.initial_gain_db, 7); + EXPECT_EQ( + adjusted.gain_controller2.adaptive_digital.max_gain_change_db_per_second, + 5); + EXPECT_EQ( + adjusted.gain_controller2.adaptive_digital.max_output_noise_level_dbfs, + -40); +} + +TEST_P(Agc2FieldTrialParametrizedTest, ProcessSucceedsWithTs) { + AudioProcessing::Config config = GetParam(); + config.transient_suppression.enabled = true; + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Disabled/"); + auto apm = AudioProcessingBuilder().SetConfig(config).Create(); + + constexpr int kSampleRateHz = 48000; + constexpr int kNumChannels = 1; + std::array buffer; + float* channel_pointers[] = {buffer.data()}; + StreamConfig stream_config(kSampleRateHz, kNumChannels); + Random random_generator(2341U); + constexpr int kFramesToProcess = 10; + int volume = 100; + for (int i = 0; i < kFramesToProcess; ++i) { + SCOPED_TRACE(i); + RandomizeSampleVector(&random_generator, buffer); + apm->set_stream_analog_level(volume); + ASSERT_EQ(apm->ProcessStream(channel_pointers, stream_config, stream_config, + channel_pointers), + kNoErr); + volume = apm->recommended_stream_analog_level(); } } -// Tests that the minimum input volume is not applied if the volume is manually -// adjusted to zero. -TEST_P(InputVolumeZeroParameterizedTest, - VerifyMinVolumeNotAppliedAfterManualVolumeAdjustments) { - constexpr int kStartupMinVolume = 0; - constexpr int kZeroVolume = 0; - const int applied_startup_input_volume = GetStartupVolume(); - auto apm(CreateApmForInputVolumeTest(kStartupMinVolume)); +TEST_P(Agc2FieldTrialParametrizedTest, ProcessSucceedsWithoutTs) { + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled," + "switch_to_agc2:false," + "disallow_transient_suppressor_usage:true/"); + auto apm = AudioProcessingBuilder().SetConfig(GetParam()).Create(); - const int recommended_input_volume_after_startup = - ProcessInputVolume(*apm, /*num_frames=*/1, applied_startup_input_volume); - const int recommended_input_volume = - ProcessInputVolume(*apm, /*num_frames=*/1, kZeroVolume); - - ASSERT_NE(recommended_input_volume, recommended_input_volume_after_startup); - ASSERT_EQ(recommended_input_volume, kZeroVolume); -} - -// Tests that the minimum input volume is applied if the volume is not zero -// before it is automatically adjusted. -TEST_P(InputVolumeNotZeroParameterizedTest, - VerifyMinVolumeAppliedAfterAutomaticVolumeAdjustments) { - constexpr int kStartupMinVolume = 0; - const int applied_startup_input_volume = GetStartupVolume(); - const int applied_input_volume = GetVolume(); - auto apm(CreateApmForInputVolumeTest(kStartupMinVolume)); - - ProcessInputVolume(*apm, /*num_frames=*/1, applied_startup_input_volume); - const int recommended_input_volume = - ProcessInputVolume(*apm, /*num_frames=*/400, applied_input_volume); - - ASSERT_NE(applied_input_volume, 0); - if (recommended_input_volume != applied_input_volume) { - ASSERT_GE(recommended_input_volume, GetMinVolume()); + constexpr int kSampleRateHz = 48000; + constexpr int kNumChannels = 1; + std::array buffer; + float* channel_pointers[] = {buffer.data()}; + StreamConfig stream_config(kSampleRateHz, kNumChannels); + Random random_generator(2341U); + constexpr int kFramesToProcess = 10; + int volume = 100; + for (int i = 0; i < kFramesToProcess; ++i) { + SCOPED_TRACE(i); + RandomizeSampleVector(&random_generator, buffer); + apm->set_stream_analog_level(volume); + ASSERT_EQ(apm->ProcessStream(channel_pointers, stream_config, stream_config, + channel_pointers), + kNoErr); + volume = apm->recommended_stream_analog_level(); } } -// Tests that the minimum input volume is not applied if the volume is zero -// before it is automatically adjusted. -TEST_P(InputVolumeZeroParameterizedTest, - VerifyMinVolumeNotAppliedAfterAutomaticVolumeAdjustments) { - constexpr int kStartupMinVolume = 0; - constexpr int kZeroVolume = 0; - const int applied_startup_input_volume = GetStartupVolume(); - auto apm(CreateApmForInputVolumeTest(kStartupMinVolume)); +TEST_P(Agc2FieldTrialParametrizedTest, + ProcessSucceedsWhenSwitchToFullAgc2WithTs) { + AudioProcessing::Config config = GetParam(); + config.transient_suppression.enabled = true; + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled," + "switch_to_agc2:true," + "disallow_transient_suppressor_usage:false/"); + auto apm = AudioProcessingBuilder().SetConfig(config).Create(); - const int recommended_input_volume_after_startup = - ProcessInputVolume(*apm, /*num_frames=*/1, applied_startup_input_volume); - const int recommended_input_volume = - ProcessInputVolume(*apm, /*num_frames=*/400, kZeroVolume); - - ASSERT_NE(recommended_input_volume, recommended_input_volume_after_startup); - ASSERT_EQ(recommended_input_volume, kZeroVolume); + constexpr int kSampleRateHz = 48000; + constexpr int kNumChannels = 1; + std::array buffer; + float* channel_pointers[] = {buffer.data()}; + StreamConfig stream_config(kSampleRateHz, kNumChannels); + Random random_generator(2341U); + constexpr int kFramesToProcess = 10; + int volume = 100; + for (int i = 0; i < kFramesToProcess; ++i) { + SCOPED_TRACE(i); + RandomizeSampleVector(&random_generator, buffer); + apm->set_stream_analog_level(volume); + ASSERT_EQ(apm->ProcessStream(channel_pointers, stream_config, stream_config, + channel_pointers), + kNoErr); + volume = apm->recommended_stream_analog_level(); + } } -INSTANTIATE_TEST_SUITE_P(AudioProcessingImplTest, - InputVolumeStartupParameterizedTest, - ::testing::Combine(::testing::Values(0, 5, 15), - ::testing::Values(0, 5, 30), - ::testing::Values(absl::nullopt, - 20))); +TEST_P(Agc2FieldTrialParametrizedTest, + ProcessSucceedsWhenSwitchToFullAgc2WithoutTs) { + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled," + "switch_to_agc2:true," + "disallow_transient_suppressor_usage:true/"); + auto apm = AudioProcessingBuilder().SetConfig(GetParam()).Create(); -INSTANTIATE_TEST_SUITE_P(AudioProcessingImplTest, - InputVolumeNotZeroParameterizedTest, - ::testing::Combine(::testing::Values(0, 5, 15), - ::testing::Values(1, 5, 30), - ::testing::Values(absl::nullopt, - 20))); + constexpr int kSampleRateHz = 48000; + constexpr int kNumChannels = 1; + std::array buffer; + float* channel_pointers[] = {buffer.data()}; + StreamConfig stream_config(kSampleRateHz, kNumChannels); + Random random_generator(2341U); + constexpr int kFramesToProcess = 10; + int volume = 100; + for (int i = 0; i < kFramesToProcess; ++i) { + SCOPED_TRACE(i); + RandomizeSampleVector(&random_generator, buffer); + apm->set_stream_analog_level(volume); + ASSERT_EQ(apm->ProcessStream(channel_pointers, stream_config, stream_config, + channel_pointers), + kNoErr); + volume = apm->recommended_stream_analog_level(); + } +} -INSTANTIATE_TEST_SUITE_P(AudioProcessingImplTest, - InputVolumeZeroParameterizedTest, - ::testing::Combine(::testing::Values(0, 5, 15), - ::testing::Values(absl::nullopt, - 20))); +INSTANTIATE_TEST_SUITE_P( + AudioProcessingImplTest, + Agc2FieldTrialParametrizedTest, + ::testing::Values( + // Full AGC1. + AudioProcessing::Config{ + .gain_controller1 = + {.enabled = true, + .analog_gain_controller = {.enabled = true, + .enable_digital_adaptive = true}}, + .gain_controller2 = {.enabled = false}}, + // Hybrid AGC. + AudioProcessing::Config{ + .gain_controller1 = + {.enabled = true, + .analog_gain_controller = {.enabled = true, + .enable_digital_adaptive = false}}, + .gain_controller2 = {.enabled = true, + .adaptive_digital = {.enabled = true}}})); + +TEST(AudioProcessingImplTest, CanDisableTransientSuppressor) { + constexpr AudioProcessing::Config kOriginal = { + .transient_suppression = {.enabled = false}}; + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); + EXPECT_FALSE(adjusted.transient_suppression.enabled); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(kOriginal); + adjusted = apm->GetConfig(); + EXPECT_FALSE(apm->GetConfig().transient_suppression.enabled); +} + +TEST(AudioProcessingImplTest, CanEnableTs) { + constexpr AudioProcessing::Config kOriginal = { + .transient_suppression = {.enabled = true}}; + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); + EXPECT_TRUE(adjusted.transient_suppression.enabled); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(kOriginal); + adjusted = apm->GetConfig(); + EXPECT_TRUE(adjusted.transient_suppression.enabled); +} + +TEST(AudioProcessingImplTest, CanDisableTsWithAgc2FieldTrialDisabled) { + constexpr AudioProcessing::Config kOriginal = { + .transient_suppression = {.enabled = false}}; + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Disabled/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); + EXPECT_FALSE(adjusted.transient_suppression.enabled); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(kOriginal); + adjusted = apm->GetConfig(); + EXPECT_FALSE(apm->GetConfig().transient_suppression.enabled); +} + +TEST(AudioProcessingImplTest, CanEnableTsWithAgc2FieldTrialDisabled) { + constexpr AudioProcessing::Config kOriginal = { + .transient_suppression = {.enabled = true}}; + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Disabled/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); + EXPECT_TRUE(adjusted.transient_suppression.enabled); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(kOriginal); + adjusted = apm->GetConfig(); + EXPECT_TRUE(adjusted.transient_suppression.enabled); +} + +TEST(AudioProcessingImplTest, + CanDisableTsWithAgc2FieldTrialEnabledAndUsageAllowed) { + constexpr AudioProcessing::Config kOriginal = { + .transient_suppression = {.enabled = false}}; + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled," + "disallow_transient_suppressor_usage:false/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); + EXPECT_FALSE(adjusted.transient_suppression.enabled); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(kOriginal); + adjusted = apm->GetConfig(); + EXPECT_FALSE(adjusted.transient_suppression.enabled); +} + +TEST(AudioProcessingImplTest, + CanEnableTsWithAgc2FieldTrialEnabledAndUsageAllowed) { + constexpr AudioProcessing::Config kOriginal = { + .transient_suppression = {.enabled = true}}; + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled," + "disallow_transient_suppressor_usage:false/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); + EXPECT_TRUE(adjusted.transient_suppression.enabled); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(kOriginal); + adjusted = apm->GetConfig(); + EXPECT_TRUE(adjusted.transient_suppression.enabled); +} + +TEST(AudioProcessingImplTest, + CannotEnableTsWithAgc2FieldTrialEnabledAndUsageDisallowed) { + constexpr AudioProcessing::Config kOriginal = { + .transient_suppression = {.enabled = true}}; + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Audio-GainController2/Enabled," + "disallow_transient_suppressor_usage:true/"); + + // Test config application via `AudioProcessing` ctor. + auto adjusted = + AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); + EXPECT_FALSE(adjusted.transient_suppression.enabled); + + // Test config application via `AudioProcessing::ApplyConfig()`. + auto apm = AudioProcessingBuilder().Create(); + apm->ApplyConfig(kOriginal); + adjusted = apm->GetConfig(); + EXPECT_FALSE(apm->GetConfig().transient_suppression.enabled); +} } // namespace webrtc diff --git a/modules/audio_processing/audio_processing_unittest.cc b/modules/audio_processing/audio_processing_unittest.cc index 326ae4871e..e320e71405 100644 --- a/modules/audio_processing/audio_processing_unittest.cc +++ b/modules/audio_processing/audio_processing_unittest.cc @@ -3062,10 +3062,6 @@ TEST(AudioProcessing, GainController2ConfigEqual) { b_adaptive.enabled = a_adaptive.enabled; EXPECT_EQ(a, b); - Toggle(a_adaptive.dry_run); - b_adaptive.dry_run = a_adaptive.dry_run; - EXPECT_EQ(a, b); - a_adaptive.headroom_db += 1.0f; b_adaptive.headroom_db = a_adaptive.headroom_db; EXPECT_EQ(a, b); @@ -3078,15 +3074,6 @@ TEST(AudioProcessing, GainController2ConfigEqual) { b_adaptive.initial_gain_db = a_adaptive.initial_gain_db; EXPECT_EQ(a, b); - a_adaptive.vad_reset_period_ms++; - b_adaptive.vad_reset_period_ms = a_adaptive.vad_reset_period_ms; - EXPECT_EQ(a, b); - - a_adaptive.adjacent_speech_frames_threshold++; - b_adaptive.adjacent_speech_frames_threshold = - a_adaptive.adjacent_speech_frames_threshold; - EXPECT_EQ(a, b); - a_adaptive.max_gain_change_db_per_second += 1.0f; b_adaptive.max_gain_change_db_per_second = a_adaptive.max_gain_change_db_per_second; @@ -3119,10 +3106,6 @@ TEST(AudioProcessing, GainController2ConfigNotEqual) { EXPECT_NE(a, b); a_adaptive = b_adaptive; - Toggle(a_adaptive.dry_run); - EXPECT_NE(a, b); - a_adaptive = b_adaptive; - a_adaptive.headroom_db += 1.0f; EXPECT_NE(a, b); a_adaptive = b_adaptive; @@ -3135,14 +3118,6 @@ TEST(AudioProcessing, GainController2ConfigNotEqual) { EXPECT_NE(a, b); a_adaptive = b_adaptive; - a_adaptive.vad_reset_period_ms++; - EXPECT_NE(a, b); - a_adaptive = b_adaptive; - - a_adaptive.adjacent_speech_frames_threshold++; - EXPECT_NE(a, b); - a_adaptive = b_adaptive; - a_adaptive.max_gain_change_db_per_second += 1.0f; EXPECT_NE(a, b); a_adaptive = b_adaptive; @@ -3152,4 +3127,315 @@ TEST(AudioProcessing, GainController2ConfigNotEqual) { a_adaptive = b_adaptive; } +struct ApmFormatHandlingTestParams { + enum class ExpectedOutput { + kErrorAndUnmodified, + kErrorAndSilence, + kErrorAndCopyOfFirstChannel, + kErrorAndExactCopy, + kNoError + }; + + StreamConfig input_config; + StreamConfig output_config; + ExpectedOutput expected_output; +}; + +class ApmFormatHandlingTest + : public ::testing::TestWithParam< + std::tuple> { + public: + ApmFormatHandlingTest() + : stream_direction_(std::get<0>(GetParam())), + test_params_(std::get<1>(GetParam())) {} + + protected: + ::testing::Message ProduceDebugMessage() { + return ::testing::Message() + << "input sample_rate_hz=" + << test_params_.input_config.sample_rate_hz() + << " num_channels=" << test_params_.input_config.num_channels() + << ", output sample_rate_hz=" + << test_params_.output_config.sample_rate_hz() + << " num_channels=" << test_params_.output_config.num_channels() + << ", stream_direction=" << stream_direction_ << ", expected_output=" + << static_cast(test_params_.expected_output); + } + + StreamDirection stream_direction_; + ApmFormatHandlingTestParams test_params_; +}; + +INSTANTIATE_TEST_SUITE_P( + FormatValidation, + ApmFormatHandlingTest, + testing::Combine( + ::testing::Values(kForward, kReverse), + ::testing::Values( + // Test cases with values on the boundary of legal ranges. + ApmFormatHandlingTestParams{ + StreamConfig(16000, 1), StreamConfig(8000, 1), + ApmFormatHandlingTestParams::ExpectedOutput::kNoError}, + ApmFormatHandlingTestParams{ + StreamConfig(8000, 1), StreamConfig(16000, 1), + ApmFormatHandlingTestParams::ExpectedOutput::kNoError}, + ApmFormatHandlingTestParams{ + StreamConfig(384000, 1), StreamConfig(16000, 1), + ApmFormatHandlingTestParams::ExpectedOutput::kNoError}, + ApmFormatHandlingTestParams{ + StreamConfig(16000, 1), StreamConfig(384000, 1), + ApmFormatHandlingTestParams::ExpectedOutput::kNoError}, + ApmFormatHandlingTestParams{ + StreamConfig(16000, 2), StreamConfig(16000, 1), + ApmFormatHandlingTestParams::ExpectedOutput::kNoError}, + ApmFormatHandlingTestParams{ + StreamConfig(16000, 3), StreamConfig(16000, 3), + ApmFormatHandlingTestParams::ExpectedOutput::kNoError}, + + // Supported but incompatible formats. + ApmFormatHandlingTestParams{ + StreamConfig(16000, 3), StreamConfig(16000, 2), + ApmFormatHandlingTestParams::ExpectedOutput:: + kErrorAndCopyOfFirstChannel}, + ApmFormatHandlingTestParams{ + StreamConfig(16000, 3), StreamConfig(16000, 4), + ApmFormatHandlingTestParams::ExpectedOutput:: + kErrorAndCopyOfFirstChannel}, + + // Unsupported format and input / output mismatch. + ApmFormatHandlingTestParams{ + StreamConfig(7900, 1), StreamConfig(16000, 1), + ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndSilence}, + ApmFormatHandlingTestParams{ + StreamConfig(16000, 1), StreamConfig(7900, 1), + ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndSilence}, + ApmFormatHandlingTestParams{ + StreamConfig(390000, 1), StreamConfig(16000, 1), + ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndSilence}, + ApmFormatHandlingTestParams{ + StreamConfig(16000, 1), StreamConfig(390000, 1), + ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndSilence}, + ApmFormatHandlingTestParams{ + StreamConfig(-16000, 1), StreamConfig(16000, 1), + ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndSilence}, + + // Unsupported format but input / output formats match. + ApmFormatHandlingTestParams{StreamConfig(7900, 1), + StreamConfig(7900, 1), + ApmFormatHandlingTestParams:: + ExpectedOutput::kErrorAndExactCopy}, + ApmFormatHandlingTestParams{StreamConfig(390000, 1), + StreamConfig(390000, 1), + ApmFormatHandlingTestParams:: + ExpectedOutput::kErrorAndExactCopy}, + + // Unsupported but identical sample rate, channel mismatch. + ApmFormatHandlingTestParams{ + StreamConfig(7900, 1), StreamConfig(7900, 2), + ApmFormatHandlingTestParams::ExpectedOutput:: + kErrorAndCopyOfFirstChannel}, + ApmFormatHandlingTestParams{ + StreamConfig(7900, 2), StreamConfig(7900, 1), + ApmFormatHandlingTestParams::ExpectedOutput:: + kErrorAndCopyOfFirstChannel}, + + // Test cases with meaningless output format. + ApmFormatHandlingTestParams{ + StreamConfig(16000, 1), StreamConfig(-16000, 1), + ApmFormatHandlingTestParams::ExpectedOutput:: + kErrorAndUnmodified}, + ApmFormatHandlingTestParams{ + StreamConfig(-16000, 1), StreamConfig(-16000, 1), + ApmFormatHandlingTestParams::ExpectedOutput:: + kErrorAndUnmodified}))); + +TEST_P(ApmFormatHandlingTest, IntApi) { + SCOPED_TRACE(ProduceDebugMessage()); + + // Set up input and output data. + const size_t num_input_samples = + test_params_.input_config.num_channels() * + std::abs(test_params_.input_config.sample_rate_hz() / 100); + const size_t num_output_samples = + test_params_.output_config.num_channels() * + std::abs(test_params_.output_config.sample_rate_hz() / 100); + std::vector input_block(num_input_samples); + for (int i = 0; i < static_cast(input_block.size()); ++i) { + input_block[i] = i; + } + std::vector output_block(num_output_samples); + constexpr int kUnlikelyOffset = 37; + for (int i = 0; i < static_cast(output_block.size()); ++i) { + output_block[i] = i - kUnlikelyOffset; + } + + // Call APM. + rtc::scoped_refptr ap = + AudioProcessingBuilderForTesting().Create(); + int error; + if (stream_direction_ == kForward) { + error = ap->ProcessStream(input_block.data(), test_params_.input_config, + test_params_.output_config, output_block.data()); + } else { + error = ap->ProcessReverseStream( + input_block.data(), test_params_.input_config, + test_params_.output_config, output_block.data()); + } + + // Check output. + switch (test_params_.expected_output) { + case ApmFormatHandlingTestParams::ExpectedOutput::kNoError: + EXPECT_EQ(error, AudioProcessing::kNoError); + break; + case ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndUnmodified: + EXPECT_NE(error, AudioProcessing::kNoError); + for (int i = 0; i < static_cast(output_block.size()); ++i) { + EXPECT_EQ(output_block[i], i - kUnlikelyOffset); + } + break; + case ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndSilence: + EXPECT_NE(error, AudioProcessing::kNoError); + for (int i = 0; i < static_cast(output_block.size()); ++i) { + EXPECT_EQ(output_block[i], 0); + } + break; + case ApmFormatHandlingTestParams::ExpectedOutput:: + kErrorAndCopyOfFirstChannel: + EXPECT_NE(error, AudioProcessing::kNoError); + for (size_t ch = 0; ch < test_params_.output_config.num_channels(); + ++ch) { + for (size_t i = 0; i < test_params_.output_config.num_frames(); ++i) { + EXPECT_EQ( + output_block[ch + i * test_params_.output_config.num_channels()], + static_cast(i * + test_params_.input_config.num_channels())); + } + } + break; + case ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndExactCopy: + EXPECT_NE(error, AudioProcessing::kNoError); + for (int i = 0; i < static_cast(output_block.size()); ++i) { + EXPECT_EQ(output_block[i], i); + } + break; + } +} + +TEST_P(ApmFormatHandlingTest, FloatApi) { + SCOPED_TRACE(ProduceDebugMessage()); + + // Set up input and output data. + const size_t input_samples_per_channel = + std::abs(test_params_.input_config.sample_rate_hz()) / 100; + const size_t output_samples_per_channel = + std::abs(test_params_.output_config.sample_rate_hz()) / 100; + const size_t input_num_channels = test_params_.input_config.num_channels(); + const size_t output_num_channels = test_params_.output_config.num_channels(); + ChannelBuffer input_block(input_samples_per_channel, + input_num_channels); + ChannelBuffer output_block(output_samples_per_channel, + output_num_channels); + for (size_t ch = 0; ch < input_num_channels; ++ch) { + for (size_t i = 0; i < input_samples_per_channel; ++i) { + input_block.channels()[ch][i] = ch + i * input_num_channels; + } + } + constexpr int kUnlikelyOffset = 37; + for (size_t ch = 0; ch < output_num_channels; ++ch) { + for (size_t i = 0; i < output_samples_per_channel; ++i) { + output_block.channels()[ch][i] = + ch + i * output_num_channels - kUnlikelyOffset; + } + } + + // Call APM. + rtc::scoped_refptr ap = + AudioProcessingBuilderForTesting().Create(); + int error; + if (stream_direction_ == kForward) { + error = + ap->ProcessStream(input_block.channels(), test_params_.input_config, + test_params_.output_config, output_block.channels()); + } else { + error = ap->ProcessReverseStream( + input_block.channels(), test_params_.input_config, + test_params_.output_config, output_block.channels()); + } + + // Check output. + switch (test_params_.expected_output) { + case ApmFormatHandlingTestParams::ExpectedOutput::kNoError: + EXPECT_EQ(error, AudioProcessing::kNoError); + break; + case ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndUnmodified: + EXPECT_NE(error, AudioProcessing::kNoError); + for (size_t ch = 0; ch < output_num_channels; ++ch) { + for (size_t i = 0; i < output_samples_per_channel; ++i) { + EXPECT_EQ(output_block.channels()[ch][i], + ch + i * output_num_channels - kUnlikelyOffset); + } + } + break; + case ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndSilence: + EXPECT_NE(error, AudioProcessing::kNoError); + for (size_t ch = 0; ch < output_num_channels; ++ch) { + for (size_t i = 0; i < output_samples_per_channel; ++i) { + EXPECT_EQ(output_block.channels()[ch][i], 0); + } + } + break; + case ApmFormatHandlingTestParams::ExpectedOutput:: + kErrorAndCopyOfFirstChannel: + EXPECT_NE(error, AudioProcessing::kNoError); + for (size_t ch = 0; ch < output_num_channels; ++ch) { + for (size_t i = 0; i < output_samples_per_channel; ++i) { + EXPECT_EQ(output_block.channels()[ch][i], + input_block.channels()[0][i]); + } + } + break; + case ApmFormatHandlingTestParams::ExpectedOutput::kErrorAndExactCopy: + EXPECT_NE(error, AudioProcessing::kNoError); + for (size_t ch = 0; ch < output_num_channels; ++ch) { + for (size_t i = 0; i < output_samples_per_channel; ++i) { + EXPECT_EQ(output_block.channels()[ch][i], + input_block.channels()[ch][i]); + } + } + break; + } +} + +TEST(ApmAnalyzeReverseStreamFormatTest, AnalyzeReverseStream) { + for (auto&& [input_config, expect_error] : + {std::tuple(StreamConfig(16000, 2), /*expect_error=*/false), + std::tuple(StreamConfig(8000, 1), /*expect_error=*/false), + std::tuple(StreamConfig(384000, 1), /*expect_error=*/false), + std::tuple(StreamConfig(7900, 1), /*expect_error=*/true), + std::tuple(StreamConfig(390000, 1), /*expect_error=*/true), + std::tuple(StreamConfig(16000, 0), /*expect_error=*/true), + std::tuple(StreamConfig(-16000, 0), /*expect_error=*/true)}) { + SCOPED_TRACE(::testing::Message() + << "sample_rate_hz=" << input_config.sample_rate_hz() + << " num_channels=" << input_config.num_channels()); + + // Set up input data. + ChannelBuffer input_block( + std::abs(input_config.sample_rate_hz()) / 100, + input_config.num_channels()); + + // Call APM. + rtc::scoped_refptr ap = + AudioProcessingBuilderForTesting().Create(); + int error = ap->AnalyzeReverseStream(input_block.channels(), input_config); + + // Check output. + if (expect_error) { + EXPECT_NE(error, AudioProcessing::kNoError); + } else { + EXPECT_EQ(error, AudioProcessing::kNoError); + } + } +} + } // namespace webrtc diff --git a/modules/audio_processing/gain_control_impl.cc b/modules/audio_processing/gain_control_impl.cc index 3fac1f7f56..edc49d1401 100644 --- a/modules/audio_processing/gain_control_impl.cc +++ b/modules/audio_processing/gain_control_impl.cc @@ -39,12 +39,8 @@ int16_t MapSetting(GainControl::Mode mode) { return -1; } -// Checks whether the legacy digital gain application should be used. -bool UseLegacyDigitalGainApplier() { - return field_trial::IsEnabled("WebRTC-UseLegacyDigitalGainApplier"); -} - -// Floating point variant of WebRtcAgc_Process. +// Applies the sub-frame `gains` to all the bands in `out` and clamps the output +// in the signed 16 bit range. void ApplyDigitalGain(const int32_t gains[11], size_t num_bands, float* const* out) { @@ -97,7 +93,6 @@ int GainControlImpl::instance_counter_ = 0; GainControlImpl::GainControlImpl() : data_dumper_(new ApmDataDumper(instance_counter_)), - use_legacy_gain_applier_(UseLegacyDigitalGainApplier()), mode_(kAdaptiveAnalog), minimum_capture_level_(0), maximum_capture_level_(255), @@ -236,26 +231,9 @@ int GainControlImpl::ProcessCaptureAudio(AudioBuffer* audio, } } - if (use_legacy_gain_applier_) { - for (size_t ch = 0; ch < mono_agcs_.size(); ++ch) { - int16_t split_band_data[AudioBuffer::kMaxNumBands] - [AudioBuffer::kMaxSplitFrameLength]; - int16_t* split_bands[AudioBuffer::kMaxNumBands] = { - split_band_data[0], split_band_data[1], split_band_data[2]}; - audio->ExportSplitChannelData(ch, split_bands); - - int err_process = WebRtcAgc_Process( - mono_agcs_[ch]->state, mono_agcs_[index_to_apply]->gains, split_bands, - audio->num_bands(), split_bands); - RTC_DCHECK_EQ(err_process, 0); - - audio->ImportSplitChannelData(ch, split_bands); - } - } else { - for (size_t ch = 0; ch < mono_agcs_.size(); ++ch) { - ApplyDigitalGain(mono_agcs_[index_to_apply]->gains, audio->num_bands(), - audio->split_bands(ch)); - } + for (size_t ch = 0; ch < mono_agcs_.size(); ++ch) { + ApplyDigitalGain(mono_agcs_[index_to_apply]->gains, audio->num_bands(), + audio->split_bands(ch)); } RTC_DCHECK_LT(0ul, *num_proc_channels_); diff --git a/modules/audio_processing/gain_control_impl.h b/modules/audio_processing/gain_control_impl.h index b65d697945..8aea8f2e95 100644 --- a/modules/audio_processing/gain_control_impl.h +++ b/modules/audio_processing/gain_control_impl.h @@ -68,7 +68,6 @@ class GainControlImpl : public GainControl { std::unique_ptr data_dumper_; - const bool use_legacy_gain_applier_; Mode mode_; int minimum_capture_level_; int maximum_capture_level_; diff --git a/modules/audio_processing/gain_controller2.cc b/modules/audio_processing/gain_controller2.cc index aebac525a1..9beaf00823 100644 --- a/modules/audio_processing/gain_controller2.cc +++ b/modules/audio_processing/gain_controller2.cc @@ -14,6 +14,7 @@ #include #include "common_audio/include/audio_util.h" +#include "modules/audio_processing/agc2/agc2_common.h" #include "modules/audio_processing/agc2/cpu_features.h" #include "modules/audio_processing/audio_buffer.h" #include "modules/audio_processing/include/audio_frame_view.h" @@ -27,6 +28,7 @@ namespace webrtc { namespace { using Agc2Config = AudioProcessing::Config::GainController2; +using InputVolumeControllerConfig = InputVolumeController::Config; constexpr int kLogLimiterStatsPeriodMs = 30'000; constexpr int kFrameLengthMs = 10; @@ -48,68 +50,95 @@ AvailableCpuFeatures GetAllowedCpuFeatures() { return features; } -// Creates an adaptive digital gain controller if enabled. -std::unique_ptr CreateAdaptiveDigitalController( - const Agc2Config::AdaptiveDigital& config, - int sample_rate_hz, - int num_channels, - ApmDataDumper* data_dumper) { - if (config.enabled) { - return std::make_unique( - data_dumper, config, sample_rate_hz, num_channels); +// Peak and RMS audio levels in dBFS. +struct AudioLevels { + float peak_dbfs; + float rms_dbfs; +}; + +// Speech level info. +struct SpeechLevel { + bool is_confident; + float rms_dbfs; +}; + +// Computes the audio levels for the first channel in `frame`. +AudioLevels ComputeAudioLevels(AudioFrameView frame, + ApmDataDumper& data_dumper) { + float peak = 0.0f; + float rms = 0.0f; + for (const auto& x : frame.channel(0)) { + peak = std::max(std::fabs(x), peak); + rms += x * x; } - return nullptr; + AudioLevels levels{ + FloatS16ToDbfs(peak), + FloatS16ToDbfs(std::sqrt(rms / frame.samples_per_channel()))}; + data_dumper.DumpRaw("agc2_input_rms_dbfs", levels.rms_dbfs); + data_dumper.DumpRaw("agc2_input_peak_dbfs", levels.peak_dbfs); + return levels; } } // namespace std::atomic GainController2::instance_count_(0); -GainController2::GainController2(const Agc2Config& config, - int sample_rate_hz, - int num_channels, - bool use_internal_vad) +GainController2::GainController2( + const Agc2Config& config, + const InputVolumeControllerConfig& input_volume_controller_config, + int sample_rate_hz, + int num_channels, + bool use_internal_vad) : cpu_features_(GetAllowedCpuFeatures()), data_dumper_(instance_count_.fetch_add(1) + 1), fixed_gain_applier_( /*hard_clip_samples=*/false, /*initial_gain_factor=*/DbToRatio(config.fixed_digital.gain_db)), - adaptive_digital_controller_( - CreateAdaptiveDigitalController(config.adaptive_digital, - sample_rate_hz, - num_channels, - &data_dumper_)), limiter_(sample_rate_hz, &data_dumper_, /*histogram_name_prefix=*/"Agc2"), calls_since_last_limiter_log_(0) { RTC_DCHECK(Validate(config)); data_dumper_.InitiateNewSetOfRecordings(); - const bool use_vad = config.adaptive_digital.enabled; - if (use_vad && use_internal_vad) { - // TODO(bugs.webrtc.org/7494): Move `vad_reset_period_ms` from adaptive - // digital to gain controller 2 config. - vad_ = std::make_unique( - config.adaptive_digital.vad_reset_period_ms, cpu_features_, - sample_rate_hz); + + if (config.input_volume_controller.enabled || + config.adaptive_digital.enabled) { + // Create dependencies. + speech_level_estimator_ = std::make_unique( + &data_dumper_, config.adaptive_digital, kAdjacentSpeechFramesThreshold); + if (use_internal_vad) + vad_ = std::make_unique( + kVadResetPeriodMs, cpu_features_, sample_rate_hz); + } + + if (config.input_volume_controller.enabled) { + // Create controller. + input_volume_controller_ = std::make_unique( + num_channels, input_volume_controller_config); + // TODO(bugs.webrtc.org/7494): Call `Initialize` in ctor and remove method. + input_volume_controller_->Initialize(); + } + + if (config.adaptive_digital.enabled) { + // Create dependencies. + noise_level_estimator_ = CreateNoiseFloorEstimator(&data_dumper_); + saturation_protector_ = CreateSaturationProtector( + kSaturationProtectorInitialHeadroomDb, kAdjacentSpeechFramesThreshold, + &data_dumper_); + // Create controller. + adaptive_digital_controller_ = + std::make_unique( + &data_dumper_, config.adaptive_digital, + kAdjacentSpeechFramesThreshold); } } GainController2::~GainController2() = default; -void GainController2::Initialize(int sample_rate_hz, int num_channels) { - RTC_DCHECK(sample_rate_hz == AudioProcessing::kSampleRate8kHz || - sample_rate_hz == AudioProcessing::kSampleRate16kHz || - sample_rate_hz == AudioProcessing::kSampleRate32kHz || - sample_rate_hz == AudioProcessing::kSampleRate48kHz); - // TODO(bugs.webrtc.org/7494): Initialize `fixed_gain_applier_`. - limiter_.SetSampleRate(sample_rate_hz); - if (vad_) { - vad_->Initialize(sample_rate_hz); +// TODO(webrtc:7494): Pass the flag also to the other components. +void GainController2::SetCaptureOutputUsed(bool capture_output_used) { + if (input_volume_controller_) { + input_volume_controller_->HandleCaptureOutputUsedChange( + capture_output_used); } - if (adaptive_digital_controller_) { - adaptive_digital_controller_->Initialize(sample_rate_hz, num_channels); - } - data_dumper_.InitiateNewSetOfRecordings(); - calls_since_last_limiter_log_ = 0; } void GainController2::SetFixedGainDb(float gain_db) { @@ -122,39 +151,112 @@ void GainController2::SetFixedGainDb(float gain_db) { fixed_gain_applier_.SetGainFactor(gain_factor); } +void GainController2::Analyze(int applied_input_volume, + const AudioBuffer& audio_buffer) { + recommended_input_volume_ = absl::nullopt; + + RTC_DCHECK_GE(applied_input_volume, 0); + RTC_DCHECK_LE(applied_input_volume, 255); + + if (input_volume_controller_) { + input_volume_controller_->AnalyzeInputAudio(applied_input_volume, + audio_buffer); + } +} + void GainController2::Process(absl::optional speech_probability, bool input_volume_changed, AudioBuffer* audio) { + recommended_input_volume_ = absl::nullopt; + data_dumper_.DumpRaw("agc2_applied_input_volume_changed", input_volume_changed); - if (input_volume_changed && !!adaptive_digital_controller_) { - adaptive_digital_controller_->HandleInputGainChange(); + if (input_volume_changed) { + // Handle input volume changes. + if (speech_level_estimator_) + speech_level_estimator_->Reset(); + if (saturation_protector_) + saturation_protector_->Reset(); } AudioFrameView float_frame(audio->channels(), audio->num_channels(), audio->num_frames()); + // Compute speech probability. if (vad_) { speech_probability = vad_->Analyze(float_frame); } else if (speech_probability.has_value()) { - RTC_DCHECK_GE(speech_probability.value(), 0.0f); - RTC_DCHECK_LE(speech_probability.value(), 1.0f); + RTC_DCHECK_GE(*speech_probability, 0.0f); + RTC_DCHECK_LE(*speech_probability, 1.0f); } - if (speech_probability.has_value()) { - data_dumper_.DumpRaw("agc2_speech_probability", speech_probability.value()); + // The speech probability may not be defined at this step (e.g., when the + // fixed digital controller alone is enabled). + if (speech_probability.has_value()) + data_dumper_.DumpRaw("agc2_speech_probability", *speech_probability); + + // Compute audio, noise and speech levels. + AudioLevels audio_levels = ComputeAudioLevels(float_frame, data_dumper_); + absl::optional noise_rms_dbfs; + if (noise_level_estimator_) { + // TODO(bugs.webrtc.org/7494): Pass `audio_levels` to remove duplicated + // computation in `noise_level_estimator_`. + noise_rms_dbfs = noise_level_estimator_->Analyze(float_frame); } - fixed_gain_applier_.ApplyGain(float_frame); - if (adaptive_digital_controller_) { + absl::optional speech_level; + if (speech_level_estimator_) { RTC_DCHECK(speech_probability.has_value()); - adaptive_digital_controller_->Process( - float_frame, speech_probability.value(), limiter_.LastAudioLevel()); + speech_level_estimator_->Update( + audio_levels.rms_dbfs, audio_levels.peak_dbfs, *speech_probability); + speech_level = + SpeechLevel{.is_confident = speech_level_estimator_->is_confident(), + .rms_dbfs = speech_level_estimator_->level_dbfs()}; } + + // Update the recommended input volume. + if (input_volume_controller_) { + RTC_DCHECK(speech_level.has_value()); + RTC_DCHECK(speech_probability.has_value()); + if (speech_probability.has_value()) { + recommended_input_volume_ = + input_volume_controller_->RecommendInputVolume( + *speech_probability, + speech_level->is_confident + ? absl::optional(speech_level->rms_dbfs) + : absl::nullopt); + } + } + + if (adaptive_digital_controller_) { + RTC_DCHECK(saturation_protector_); + RTC_DCHECK(speech_probability.has_value()); + RTC_DCHECK(speech_level.has_value()); + saturation_protector_->Analyze(*speech_probability, audio_levels.peak_dbfs, + speech_level->rms_dbfs); + float headroom_db = saturation_protector_->HeadroomDb(); + data_dumper_.DumpRaw("agc2_headroom_db", headroom_db); + float limiter_envelope_dbfs = FloatS16ToDbfs(limiter_.LastAudioLevel()); + data_dumper_.DumpRaw("agc2_limiter_envelope_dbfs", limiter_envelope_dbfs); + RTC_DCHECK(noise_rms_dbfs.has_value()); + adaptive_digital_controller_->Process( + /*info=*/{.speech_probability = *speech_probability, + .speech_level_dbfs = speech_level->rms_dbfs, + .speech_level_reliable = speech_level->is_confident, + .noise_rms_dbfs = *noise_rms_dbfs, + .headroom_db = headroom_db, + .limiter_envelope_dbfs = limiter_envelope_dbfs}, + float_frame); + } + + // TODO(bugs.webrtc.org/7494): Pass `audio_levels` to remove duplicated + // computation in `limiter_`. + fixed_gain_applier_.ApplyGain(float_frame); + limiter_.Process(float_frame); // Periodically log limiter stats. if (++calls_since_last_limiter_log_ == kLogLimiterStatsPeriodNumFrames) { calls_since_last_limiter_log_ = 0; InterpolatedGainCurve::Stats stats = limiter_.GetGainCurveStats(); - RTC_LOG(LS_INFO) << "AGC2 limiter stats" + RTC_LOG(LS_INFO) << "[AGC2] limiter stats" << " | identity: " << stats.look_ups_identity_region << " | knee: " << stats.look_ups_knee_region << " | limiter: " << stats.look_ups_limiter_region @@ -166,7 +268,7 @@ bool GainController2::Validate( const AudioProcessing::Config::GainController2& config) { const auto& fixed = config.fixed_digital; const auto& adaptive = config.adaptive_digital; - return fixed.gain_db >= 0.0f && fixed.gain_db < 50.f && + return fixed.gain_db >= 0.0f && fixed.gain_db < 50.0f && adaptive.headroom_db >= 0.0f && adaptive.max_gain_db > 0.0f && adaptive.initial_gain_db >= 0.0f && adaptive.max_gain_change_db_per_second > 0.0f && diff --git a/modules/audio_processing/gain_controller2.h b/modules/audio_processing/gain_controller2.h index ec3816fee1..43b5828d35 100644 --- a/modules/audio_processing/gain_controller2.h +++ b/modules/audio_processing/gain_controller2.h @@ -18,7 +18,11 @@ #include "modules/audio_processing/agc2/adaptive_digital_gain_controller.h" #include "modules/audio_processing/agc2/cpu_features.h" #include "modules/audio_processing/agc2/gain_applier.h" +#include "modules/audio_processing/agc2/input_volume_controller.h" #include "modules/audio_processing/agc2/limiter.h" +#include "modules/audio_processing/agc2/noise_level_estimator.h" +#include "modules/audio_processing/agc2/saturation_protector.h" +#include "modules/audio_processing/agc2/speech_level_estimator.h" #include "modules/audio_processing/agc2/vad_wrapper.h" #include "modules/audio_processing/include/audio_processing.h" #include "modules/audio_processing/logging/apm_data_dumper.h" @@ -33,26 +37,37 @@ class GainController2 { public: // Ctor. If `use_internal_vad` is true, an internal voice activity // detector is used for digital adaptive gain. - GainController2(const AudioProcessing::Config::GainController2& config, - int sample_rate_hz, - int num_channels, - bool use_internal_vad); + GainController2( + const AudioProcessing::Config::GainController2& config, + const InputVolumeController::Config& input_volume_controller_config, + int sample_rate_hz, + int num_channels, + bool use_internal_vad); GainController2(const GainController2&) = delete; GainController2& operator=(const GainController2&) = delete; ~GainController2(); - // Detects and handles changes of sample rate and/or number of channels. - void Initialize(int sample_rate_hz, int num_channels); - // Sets the fixed digital gain. void SetFixedGainDb(float gain_db); - // Applies fixed and adaptive digital gains to `audio` and runs a limiter. - // If the internal VAD is used, `speech_probability` is ignored. Otherwise - // `speech_probability` is used for digital adaptive gain if it's available - // (limited to values [0.0, 1.0]). Handles input volume changes; if the caller - // cannot determine whether an input volume change occurred, set - // `input_volume_changed` to false. + // Updates the input volume controller about whether the capture output is + // used or not. + void SetCaptureOutputUsed(bool capture_output_used); + + // Analyzes `audio_buffer` before `Process()` is called so that the analysis + // can be performed before digital processing operations take place (e.g., + // echo cancellation). The analysis consists of input clipping detection and + // prediction (if enabled). The value of `applied_input_volume` is limited to + // [0, 255]. + void Analyze(int applied_input_volume, const AudioBuffer& audio_buffer); + + // Updates the recommended input volume, applies the adaptive digital and the + // fixed digital gains and runs a limiter on `audio`. + // When the internal VAD is not used, `speech_probability` should be specified + // and in the [0, 1] range. Otherwise ignores `speech_probability` and + // computes the speech probability via `vad_`. + // Handles input volume changes; if the caller cannot determine whether an + // input volume change occurred, set `input_volume_changed` to false. void Process(absl::optional speech_probability, bool input_volume_changed, AudioBuffer* audio); @@ -61,15 +76,33 @@ class GainController2 { AvailableCpuFeatures GetCpuFeatures() const { return cpu_features_; } + absl::optional recommended_input_volume() const { + return recommended_input_volume_; + } + private: static std::atomic instance_count_; const AvailableCpuFeatures cpu_features_; ApmDataDumper data_dumper_; + GainApplier fixed_gain_applier_; + std::unique_ptr noise_level_estimator_; std::unique_ptr vad_; + std::unique_ptr speech_level_estimator_; + std::unique_ptr input_volume_controller_; + // TODO(bugs.webrtc.org/7494): Rename to `CrestFactorEstimator`. + std::unique_ptr saturation_protector_; std::unique_ptr adaptive_digital_controller_; Limiter limiter_; + int calls_since_last_limiter_log_; + + // TODO(bugs.webrtc.org/7494): Remove intermediate storing at this level once + // APM refactoring is completed. + // Recommended input volume from `InputVolumecontroller`. Non-empty after + // `Process()` if input volume controller is enabled and + // `InputVolumeController::Process()` has returned a non-empty value. + absl::optional recommended_input_volume_; }; } // namespace webrtc diff --git a/modules/audio_processing/gain_controller2_unittest.cc b/modules/audio_processing/gain_controller2_unittest.cc index 83ea5f1343..c3d0e5947a 100644 --- a/modules/audio_processing/gain_controller2_unittest.cc +++ b/modules/audio_processing/gain_controller2_unittest.cc @@ -22,13 +22,18 @@ #include "modules/audio_processing/test/audio_buffer_tools.h" #include "modules/audio_processing/test/bitexactness_tools.h" #include "rtc_base/checks.h" +#include "test/gmock.h" #include "test/gtest.h" namespace webrtc { namespace test { namespace { +using ::testing::Eq; +using ::testing::Optional; + using Agc2Config = AudioProcessing::Config::GainController2; +using InputVolumeControllerConfig = InputVolumeController::Config; // Sets all the samples in `ab` to `value`. void SetAudioBufferSamples(float value, AudioBuffer& ab) { @@ -40,13 +45,19 @@ void SetAudioBufferSamples(float value, AudioBuffer& ab) { float RunAgc2WithConstantInput(GainController2& agc2, float input_level, int num_frames, - int sample_rate_hz) { + int sample_rate_hz, + int num_channels = 1, + int applied_initial_volume = 0) { const int num_samples = rtc::CheckedDivExact(sample_rate_hz, 100); - AudioBuffer ab(sample_rate_hz, 1, sample_rate_hz, 1, sample_rate_hz, 1); + AudioBuffer ab(sample_rate_hz, num_channels, sample_rate_hz, num_channels, + sample_rate_hz, num_channels); // Give time to the level estimator to converge. for (int i = 0; i < num_frames + 1; ++i) { SetAudioBufferSamples(input_level, ab); + const auto applied_volume = agc2.recommended_input_volume(); + agc2.Analyze(applied_volume.value_or(applied_initial_volume), ab); + agc2.Process(/*speech_probability=*/absl::nullopt, /*input_volume_changed=*/false, &ab); } @@ -62,11 +73,25 @@ std::unique_ptr CreateAgc2FixedDigitalMode( config.adaptive_digital.enabled = false; config.fixed_digital.gain_db = fixed_gain_db; EXPECT_TRUE(GainController2::Validate(config)); - return std::make_unique(config, sample_rate_hz, - /*num_channels=*/1, - /*use_internal_vad=*/true); + return std::make_unique( + config, InputVolumeControllerConfig{}, sample_rate_hz, + /*num_channels=*/1, + /*use_internal_vad=*/true); } +constexpr InputVolumeControllerConfig kTestInputVolumeControllerConfig{ + .clipped_level_min = 20, + .clipped_level_step = 30, + .clipped_ratio_threshold = 0.4, + .clipped_wait_frames = 50, + .enable_clipping_predictor = true, + .target_range_max_dbfs = -6, + .target_range_min_dbfs = -70, + .update_input_volume_wait_frames = 100, + .speech_probability_threshold = 0.9, + .speech_ratio_threshold = 1, +}; + } // namespace TEST(GainController2, CheckDefaultConfig) { @@ -137,10 +162,139 @@ TEST(GainController2, CheckAdaptiveDigitalMaxOutputNoiseLevelConfig) { EXPECT_TRUE(GainController2::Validate(config)); } +TEST(GainController2, + CheckGetRecommendedInputVolumeWhenInputVolumeControllerNotEnabled) { + constexpr float kHighInputLevel = 32767.0f; + constexpr float kLowInputLevel = 1000.0f; + constexpr int kInitialInputVolume = 100; + constexpr int kNumChannels = 2; + constexpr int kNumFrames = 5; + constexpr int kSampleRateHz = 16000; + + Agc2Config config; + config.input_volume_controller.enabled = false; + + auto gain_controller = std::make_unique( + config, InputVolumeControllerConfig{}, kSampleRateHz, kNumChannels, + /*use_internal_vad=*/true); + + EXPECT_FALSE(gain_controller->recommended_input_volume().has_value()); + + // Run AGC for a signal with no clipping or detected speech. + RunAgc2WithConstantInput(*gain_controller, kLowInputLevel, kNumFrames, + kSampleRateHz, kNumChannels, kInitialInputVolume); + + EXPECT_FALSE(gain_controller->recommended_input_volume().has_value()); + + // Run AGC for a signal with clipping. + RunAgc2WithConstantInput(*gain_controller, kHighInputLevel, kNumFrames, + kSampleRateHz, kNumChannels, kInitialInputVolume); + + EXPECT_FALSE(gain_controller->recommended_input_volume().has_value()); +} + +TEST( + GainController2, + CheckGetRecommendedInputVolumeWhenInputVolumeControllerNotEnabledAndSpecificConfigUsed) { + constexpr float kHighInputLevel = 32767.0f; + constexpr float kLowInputLevel = 1000.0f; + constexpr int kInitialInputVolume = 100; + constexpr int kNumChannels = 2; + constexpr int kNumFrames = 5; + constexpr int kSampleRateHz = 16000; + + Agc2Config config; + config.input_volume_controller.enabled = false; + + auto gain_controller = std::make_unique( + config, kTestInputVolumeControllerConfig, kSampleRateHz, kNumChannels, + /*use_internal_vad=*/true); + + EXPECT_FALSE(gain_controller->recommended_input_volume().has_value()); + + // Run AGC for a signal with no clipping or detected speech. + RunAgc2WithConstantInput(*gain_controller, kLowInputLevel, kNumFrames, + kSampleRateHz, kNumChannels, kInitialInputVolume); + + EXPECT_FALSE(gain_controller->recommended_input_volume().has_value()); + + // Run AGC for a signal with clipping. + RunAgc2WithConstantInput(*gain_controller, kHighInputLevel, kNumFrames, + kSampleRateHz, kNumChannels, kInitialInputVolume); + + EXPECT_FALSE(gain_controller->recommended_input_volume().has_value()); +} + +TEST(GainController2, + CheckGetRecommendedInputVolumeWhenInputVolumeControllerEnabled) { + constexpr float kHighInputLevel = 32767.0f; + constexpr float kLowInputLevel = 1000.0f; + constexpr int kInitialInputVolume = 100; + constexpr int kNumChannels = 2; + constexpr int kNumFrames = 5; + constexpr int kSampleRateHz = 16000; + + Agc2Config config; + config.input_volume_controller.enabled = true; + config.adaptive_digital.enabled = true; + + auto gain_controller = std::make_unique( + config, InputVolumeControllerConfig{}, kSampleRateHz, kNumChannels, + /*use_internal_vad=*/true); + + EXPECT_FALSE(gain_controller->recommended_input_volume().has_value()); + + // Run AGC for a signal with no clipping or detected speech. + RunAgc2WithConstantInput(*gain_controller, kLowInputLevel, kNumFrames, + kSampleRateHz, kNumChannels, kInitialInputVolume); + + EXPECT_TRUE(gain_controller->recommended_input_volume().has_value()); + + // Run AGC for a signal with clipping. + RunAgc2WithConstantInput(*gain_controller, kHighInputLevel, kNumFrames, + kSampleRateHz, kNumChannels, kInitialInputVolume); + + EXPECT_TRUE(gain_controller->recommended_input_volume().has_value()); +} + +TEST( + GainController2, + CheckGetRecommendedInputVolumeWhenInputVolumeControllerEnabledAndSpecificConfigUsed) { + constexpr float kHighInputLevel = 32767.0f; + constexpr float kLowInputLevel = 1000.0f; + constexpr int kInitialInputVolume = 100; + constexpr int kNumChannels = 2; + constexpr int kNumFrames = 5; + constexpr int kSampleRateHz = 16000; + + Agc2Config config; + config.input_volume_controller.enabled = true; + config.adaptive_digital.enabled = true; + + auto gain_controller = std::make_unique( + config, kTestInputVolumeControllerConfig, kSampleRateHz, kNumChannels, + /*use_internal_vad=*/true); + + EXPECT_FALSE(gain_controller->recommended_input_volume().has_value()); + + // Run AGC for a signal with no clipping or detected speech. + RunAgc2WithConstantInput(*gain_controller, kLowInputLevel, kNumFrames, + kSampleRateHz, kNumChannels, kInitialInputVolume); + + EXPECT_TRUE(gain_controller->recommended_input_volume().has_value()); + + // Run AGC for a signal with clipping. + RunAgc2WithConstantInput(*gain_controller, kHighInputLevel, kNumFrames, + kSampleRateHz, kNumChannels, kInitialInputVolume); + + EXPECT_TRUE(gain_controller->recommended_input_volume().has_value()); +} + // Checks that the default config is applied. TEST(GainController2, ApplyDefaultConfig) { auto gain_controller2 = std::make_unique( - Agc2Config{}, /*sample_rate_hz=*/16000, /*num_channels=*/2, + Agc2Config{}, InputVolumeControllerConfig{}, + /*sample_rate_hz=*/16000, /*num_channels=*/2, /*use_internal_vad=*/true); EXPECT_TRUE(gain_controller2.get()); } @@ -256,7 +410,8 @@ TEST(GainController2, CheckFinalGainWithAdaptiveDigitalController) { Agc2Config config; config.fixed_digital.gain_db = 0.0f; config.adaptive_digital.enabled = true; - GainController2 agc2(config, kSampleRateHz, kStereo, + GainController2 agc2(config, /*input_volume_controller_config=*/{}, + kSampleRateHz, kStereo, /*use_internal_vad=*/true); test::InputAudioFile input_file( @@ -311,9 +466,11 @@ TEST(GainController2, Agc2Config config; config.fixed_digital.gain_db = 0.0f; config.adaptive_digital.enabled = true; - GainController2 agc2(config, kSampleRateHz, kStereo, + GainController2 agc2(config, /*input_volume_controller_config=*/{}, + kSampleRateHz, kStereo, /*use_internal_vad=*/true); - GainController2 agc2_reference(config, kSampleRateHz, kStereo, + GainController2 agc2_reference(config, /*input_volume_controller_config=*/{}, + kSampleRateHz, kStereo, /*use_internal_vad=*/true); test::InputAudioFile input_file( @@ -378,9 +535,11 @@ TEST(GainController2, Agc2Config config; config.fixed_digital.gain_db = 0.0f; config.adaptive_digital.enabled = true; - GainController2 agc2(config, kSampleRateHz, kStereo, + GainController2 agc2(config, /*input_volume_controller_config=*/{}, + kSampleRateHz, kStereo, /*use_internal_vad=*/false); - GainController2 agc2_reference(config, kSampleRateHz, kStereo, + GainController2 agc2_reference(config, /*input_volume_controller_config=*/{}, + kSampleRateHz, kStereo, /*use_internal_vad=*/true); test::InputAudioFile input_file( @@ -447,12 +606,13 @@ TEST(GainController2, Agc2Config config; config.fixed_digital.gain_db = 0.0f; config.adaptive_digital.enabled = true; - GainController2 agc2(config, kSampleRateHz, kStereo, + GainController2 agc2(config, /*input_volume_controller_config=*/{}, + kSampleRateHz, kStereo, /*use_internal_vad=*/false); - GainController2 agc2_reference(config, kSampleRateHz, kStereo, + GainController2 agc2_reference(config, /*input_volume_controller_config=*/{}, + kSampleRateHz, kStereo, /*use_internal_vad=*/true); - VoiceActivityDetectorWrapper vad(config.adaptive_digital.vad_reset_period_ms, - GetAvailableCpuFeatures(), kSampleRateHz); + VoiceActivityDetectorWrapper vad(GetAvailableCpuFeatures(), kSampleRateHz); test::InputAudioFile input_file( test::GetApmCaptureTestVectorFileName(kSampleRateHz), /*loop_at_end=*/true); diff --git a/modules/audio_processing/include/audio_processing.cc b/modules/audio_processing/include/audio_processing.cc index 86edaee087..13ddcc588a 100644 --- a/modules/audio_processing/include/audio_processing.cc +++ b/modules/audio_processing/include/audio_processing.cc @@ -87,20 +87,23 @@ bool Agc1Config::operator==(const Agc1Config& rhs) const { bool Agc2Config::AdaptiveDigital::operator==( const Agc2Config::AdaptiveDigital& rhs) const { - return enabled == rhs.enabled && dry_run == rhs.dry_run && - headroom_db == rhs.headroom_db && max_gain_db == rhs.max_gain_db && + return enabled == rhs.enabled && headroom_db == rhs.headroom_db && + max_gain_db == rhs.max_gain_db && initial_gain_db == rhs.initial_gain_db && - vad_reset_period_ms == rhs.vad_reset_period_ms && - adjacent_speech_frames_threshold == - rhs.adjacent_speech_frames_threshold && max_gain_change_db_per_second == rhs.max_gain_change_db_per_second && max_output_noise_level_dbfs == rhs.max_output_noise_level_dbfs; } +bool Agc2Config::InputVolumeController::operator==( + const Agc2Config::InputVolumeController& rhs) const { + return enabled == rhs.enabled; +} + bool Agc2Config::operator==(const Agc2Config& rhs) const { return enabled == rhs.enabled && fixed_digital.gain_db == rhs.fixed_digital.gain_db && - adaptive_digital == rhs.adaptive_digital; + adaptive_digital == rhs.adaptive_digital && + input_volume_controller == rhs.input_volume_controller; } bool AudioProcessing::Config::CaptureLevelAdjustment::operator==( @@ -191,20 +194,16 @@ std::string AudioProcessing::Config::ToString() const { << gain_controller2.fixed_digital.gain_db << " }, adaptive_digital: { enabled: " << gain_controller2.adaptive_digital.enabled - << ", dry_run: " << gain_controller2.adaptive_digital.dry_run << ", headroom_db: " << gain_controller2.adaptive_digital.headroom_db << ", max_gain_db: " << gain_controller2.adaptive_digital.max_gain_db << ", initial_gain_db: " << gain_controller2.adaptive_digital.initial_gain_db - << ", vad_reset_period_ms: " - << gain_controller2.adaptive_digital.vad_reset_period_ms - << ", adjacent_speech_frames_threshold: " - << gain_controller2.adaptive_digital.adjacent_speech_frames_threshold << ", max_gain_change_db_per_second: " << gain_controller2.adaptive_digital.max_gain_change_db_per_second << ", max_output_noise_level_dbfs: " << gain_controller2.adaptive_digital.max_output_noise_level_dbfs - << "}}"; + << " }, input_volume_control : { enabled " + << gain_controller2.input_volume_controller.enabled << "}}"; return builder.str(); } diff --git a/modules/audio_processing/include/audio_processing.h b/modules/audio_processing/include/audio_processing.h index 5b9c54f22f..4904bd5d8e 100644 --- a/modules/audio_processing/include/audio_processing.h +++ b/modules/audio_processing/include/audio_processing.h @@ -51,19 +51,6 @@ class EchoDetector; class CustomAudioAnalyzer; class CustomProcessing; -// Use to enable experimental gain control (AGC). At startup the experimental -// AGC moves the microphone volume up to `startup_min_volume` if the current -// microphone volume is set too low. The value is clamped to its operating range -// [12, 255]. Here, 255 maps to 100%. -// -// Must be provided through AudioProcessingBuilder().Create(config). -#if defined(WEBRTC_CHROMIUM_BUILD) -static constexpr int kAgcStartupMinVolume = 85; -#else -static constexpr int kAgcStartupMinVolume = 0; -#endif // defined(WEBRTC_CHROMIUM_BUILD) -static constexpr int kClippedLevelMin = 70; - // The Audio Processing Module (APM) provides a collection of voice processing // components designed for real-time communications software. // @@ -94,11 +81,12 @@ static constexpr int kClippedLevelMin = 70; // setter. // // APM accepts only linear PCM audio data in chunks of ~10 ms (see -// AudioProcessing::GetFrameSize() for details). The int16 interfaces use -// interleaved data, while the float interfaces use deinterleaved data. +// AudioProcessing::GetFrameSize() for details) and sample rates ranging from +// 8000 Hz to 384000 Hz. The int16 interfaces use interleaved data, while the +// float interfaces use deinterleaved data. // // Usage example, omitting error checking: -// AudioProcessing* apm = AudioProcessingBuilder().Create(); +// rtc::scoped_refptr apm = AudioProcessingBuilder().Create(); // // AudioProcessing::Config config; // config.echo_canceller.enabled = true; @@ -116,9 +104,6 @@ static constexpr int kClippedLevelMin = 70; // // apm->ApplyConfig(config) // -// apm->noise_reduction()->set_level(kHighSuppression); -// apm->noise_reduction()->Enable(true); -// // // Start a voice call... // // // ... Render frame arrives bound for the audio HAL ... @@ -140,7 +125,7 @@ static constexpr int kClippedLevelMin = 70; // apm->Initialize(); // // // Close the application... -// delete apm; +// apm.reset(); // class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { public: @@ -161,6 +146,12 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { struct RTC_EXPORT Config { // Sets the properties of the audio processing pipeline. struct RTC_EXPORT Pipeline { + // Ways to downmix a multi-channel track to mono. + enum class DownmixMethod { + kAverageChannels, // Average across channels. + kUseFirstChannel // Use the first channel. + }; + // Maximum allowed processing rate used internally. May only be set to // 32000 or 48000 and any differing values will be treated as 48000. int maximum_internal_processing_rate = 48000; @@ -169,6 +160,9 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // Allow multi-channel processing of capture audio when AEC3 is active // or a custom AEC is injected.. bool multi_channel_capture = false; + // Indicates how to downmix multi-channel capture audio to mono (when + // needed). + DownmixMethod capture_downmix_method = DownmixMethod::kAverageChannels; } pipeline; // Enabled the pre-amplifier. It amplifies the capture signal @@ -287,11 +281,11 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // Enables the analog gain controller functionality. struct AnalogGainController { bool enabled = true; - // TODO(bugs.webrtc.org/1275566): Describe `startup_min_volume`. - int startup_min_volume = kAgcStartupMinVolume; + // TODO(bugs.webrtc.org/7494): Deprecated. Stop using and remove. + int startup_min_volume = 0; // Lowest analog microphone level that will be applied in response to // clipping. - int clipped_level_min = kClippedLevelMin; + int clipped_level_min = 70; // If true, an adaptive digital gain is applied. bool enable_digital_adaptive = true; // Amount the microphone level is lowered with every clipping event. @@ -334,42 +328,56 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { } analog_gain_controller; } gain_controller1; - // Enables the next generation AGC functionality. This feature replaces the - // standard methods of gain control in the previous AGC. Enabling this - // submodule enables an adaptive digital AGC followed by a limiter. By - // setting `fixed_gain_db`, the limiter can be turned into a compressor that - // first applies a fixed gain. The adaptive digital AGC can be turned off by - // setting |adaptive_digital_mode=false|. + // Parameters for AGC2, an Automatic Gain Control (AGC) sub-module which + // replaces the AGC sub-module parametrized by `gain_controller1`. + // AGC2 brings the captured audio signal to the desired level by combining + // three different controllers (namely, input volume controller, adapative + // digital controller and fixed digital controller) and a limiter. + // TODO(bugs.webrtc.org:7494): Name `GainController` when AGC1 removed. struct RTC_EXPORT GainController2 { bool operator==(const GainController2& rhs) const; bool operator!=(const GainController2& rhs) const { return !(*this == rhs); } + // AGC2 must be created if and only if `enabled` is true. bool enabled = false; - struct FixedDigital { - float gain_db = 0.0f; - } fixed_digital; + + // Parameters for the input volume controller, which adjusts the input + // volume applied when the audio is captured (e.g., microphone volume on + // a soundcard, input volume on HAL). + struct InputVolumeController { + bool operator==(const InputVolumeController& rhs) const; + bool operator!=(const InputVolumeController& rhs) const { + return !(*this == rhs); + } + bool enabled = false; + } input_volume_controller; + + // Parameters for the adaptive digital controller, which adjusts and + // applies a digital gain after echo cancellation and after noise + // suppression. struct RTC_EXPORT AdaptiveDigital { bool operator==(const AdaptiveDigital& rhs) const; bool operator!=(const AdaptiveDigital& rhs) const { return !(*this == rhs); } - bool enabled = false; - // When true, the adaptive digital controller runs but the signal is not - // modified. - bool dry_run = false; float headroom_db = 6.0f; - // TODO(bugs.webrtc.org/7494): Consider removing and inferring from - // `max_output_noise_level_dbfs`. float max_gain_db = 30.0f; float initial_gain_db = 8.0f; - int vad_reset_period_ms = 1500; - int adjacent_speech_frames_threshold = 12; float max_gain_change_db_per_second = 3.0f; float max_output_noise_level_dbfs = -50.0f; } adaptive_digital; + + // Parameters for the fixed digital controller, which applies a fixed + // digital gain after the adaptive digital controller and before the + // limiter. + struct FixedDigital { + // By setting `gain_db` to a value greater than zero, the limiter can be + // turned into a compressor that first applies a fixed gain. + float gain_db = 0.0f; + } fixed_digital; } gain_controller2; std::string ToString() const; diff --git a/modules/audio_processing/test/debug_dump_test.cc b/modules/audio_processing/test/debug_dump_test.cc index d69d3a4eea..cded5de217 100644 --- a/modules/audio_processing/test/debug_dump_test.cc +++ b/modules/audio_processing/test/debug_dump_test.cc @@ -352,8 +352,6 @@ TEST_F(DebugDumpTest, VerifyCombinedExperimentalStringInclusive) { apm_config.echo_canceller.enabled = true; apm_config.gain_controller1.analog_gain_controller.enabled = true; apm_config.gain_controller1.analog_gain_controller.startup_min_volume = 0; - // Arbitrarily set clipping gain to 17, which will never be the default. - apm_config.gain_controller1.analog_gain_controller.clipped_level_min = 17; DebugDumpGenerator generator(apm_config); generator.StartRecording(); generator.Process(100); @@ -371,8 +369,6 @@ TEST_F(DebugDumpTest, VerifyCombinedExperimentalStringInclusive) { ASSERT_TRUE(msg->has_experiments_description()); EXPECT_PRED_FORMAT2(::testing::IsSubstring, "EchoController", msg->experiments_description().c_str()); - EXPECT_PRED_FORMAT2(::testing::IsSubstring, "AgcClippingLevelExperiment", - msg->experiments_description().c_str()); } } } @@ -426,33 +422,6 @@ TEST_F(DebugDumpTest, VerifyAec3ExperimentalString) { } } -TEST_F(DebugDumpTest, VerifyAgcClippingLevelExperimentalString) { - AudioProcessing::Config apm_config; - apm_config.gain_controller1.analog_gain_controller.enabled = true; - apm_config.gain_controller1.analog_gain_controller.startup_min_volume = 0; - // Arbitrarily set clipping gain to 17, which will never be the default. - apm_config.gain_controller1.analog_gain_controller.clipped_level_min = 17; - DebugDumpGenerator generator(apm_config); - generator.StartRecording(); - generator.Process(100); - generator.StopRecording(); - - DebugDumpReplayer debug_dump_replayer_; - - ASSERT_TRUE(debug_dump_replayer_.SetDumpFile(generator.dump_file_name())); - - while (const absl::optional event = - debug_dump_replayer_.GetNextEvent()) { - debug_dump_replayer_.RunNextEvent(); - if (event->type() == audioproc::Event::CONFIG) { - const audioproc::Config* msg = &event->config(); - ASSERT_TRUE(msg->has_experiments_description()); - EXPECT_PRED_FORMAT2(::testing::IsSubstring, "AgcClippingLevelExperiment", - msg->experiments_description().c_str()); - } - } -} - TEST_F(DebugDumpTest, VerifyEmptyExperimentalString) { DebugDumpGenerator generator(/*apm_config=*/{}); generator.StartRecording(); diff --git a/modules/congestion_controller/BUILD.gn b/modules/congestion_controller/BUILD.gn index 774fc84d67..13b0d539b8 100644 --- a/modules/congestion_controller/BUILD.gn +++ b/modules/congestion_controller/BUILD.gn @@ -27,7 +27,6 @@ rtc_library("congestion_controller") { ] deps = [ - "../../api/transport:field_trial_based_config", "../../api/transport:network_control", "../../api/units:data_rate", "../../api/units:time_delta", diff --git a/modules/congestion_controller/goog_cc/BUILD.gn b/modules/congestion_controller/goog_cc/BUILD.gn index 1335a1ebc7..150201e1bd 100644 --- a/modules/congestion_controller/goog_cc/BUILD.gn +++ b/modules/congestion_controller/goog_cc/BUILD.gn @@ -27,6 +27,7 @@ rtc_library("goog_cc") { ":alr_detector", ":delay_based_bwe", ":estimators", + ":loss_based_bwe_v2", ":probe_controller", ":pushback_controller", ":send_side_bwe", diff --git a/modules/congestion_controller/goog_cc/delay_based_bwe.h b/modules/congestion_controller/goog_cc/delay_based_bwe.h index 21dff35735..e91a1dff54 100644 --- a/modules/congestion_controller/goog_cc/delay_based_bwe.h +++ b/modules/congestion_controller/goog_cc/delay_based_bwe.h @@ -82,6 +82,7 @@ class DelayBasedBwe { DataRate TriggerOveruse(Timestamp at_time, absl::optional link_capacity); DataRate last_estimate() const { return prev_bitrate_; } + BandwidthUsage last_state() const { return prev_state_; } private: friend class GoogCcStatePrinter; diff --git a/modules/congestion_controller/goog_cc/goog_cc_network_control.cc b/modules/congestion_controller/goog_cc/goog_cc_network_control.cc index 4bd3780cba..29a0e9b1cb 100644 --- a/modules/congestion_controller/goog_cc/goog_cc_network_control.cc +++ b/modules/congestion_controller/goog_cc/goog_cc_network_control.cc @@ -22,11 +22,13 @@ #include #include "absl/strings/match.h" +#include "api/network_state_predictor.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" #include "modules/congestion_controller/goog_cc/alr_detector.h" +#include "modules/congestion_controller/goog_cc/loss_based_bwe_v2.h" #include "modules/congestion_controller/goog_cc/probe_controller.h" #include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" @@ -59,6 +61,26 @@ bool IsEnabled(const FieldTrialsView* config, absl::string_view key) { bool IsNotDisabled(const FieldTrialsView* config, absl::string_view key) { return !absl::StartsWith(config->Lookup(key), "Disabled"); } + +BandwidthLimitedCause GetBandwidthLimitedCause( + LossBasedState loss_based_state, + BandwidthUsage bandwidth_usage, + bool not_probe_if_delay_increased) { + if (not_probe_if_delay_increased && + (bandwidth_usage == BandwidthUsage::kBwOverusing || + bandwidth_usage == BandwidthUsage::kBwUnderusing)) { + return BandwidthLimitedCause::kDelayBasedLimitedDelayIncreased; + } + switch (loss_based_state) { + case LossBasedState::kDecreasing: + return BandwidthLimitedCause::kLossLimitedBweDecreasing; + case LossBasedState::kIncreasing: + return BandwidthLimitedCause::kLossLimitedBweIncreasing; + default: + return BandwidthLimitedCause::kDelayBasedLimited; + } +} + } // namespace GoogCcNetworkController::GoogCcNetworkController(NetworkControllerConfig config, @@ -79,13 +101,9 @@ GoogCcNetworkController::GoogCcNetworkController(NetworkControllerConfig config, "WebRTC-Bwe-LimitProbesLowerThanThroughputEstimate")), rate_control_settings_( RateControlSettings::ParseFromKeyValueConfig(key_value_config_)), - loss_based_stable_rate_( - IsEnabled(key_value_config_, "WebRTC-Bwe-LossBasedStableRate")), pace_at_max_of_bwe_and_lower_link_capacity_( IsEnabled(key_value_config_, "WebRTC-Bwe-PaceAtMaxOfBweAndLowerLinkCapacity")), - pace_at_loss_based_bwe_when_loss_( - IsEnabled(key_value_config_, "WebRTC-Bwe-PaceAtLossBaseBweWhenLoss")), probe_controller_( new ProbeController(key_value_config_, config.event_log)), congestion_window_pushback_controller_( @@ -117,8 +135,7 @@ GoogCcNetworkController::GoogCcNetworkController(NetworkControllerConfig config, config.stream_based_config.min_total_allocated_bitrate.value_or( DataRate::Zero())), max_padding_rate_(config.stream_based_config.max_padding_rate.value_or( - DataRate::Zero())), - max_total_allocated_bitrate_(DataRate::Zero()) { + DataRate::Zero())) { RTC_DCHECK(config.constraints.at_time.IsFinite()); ParseFieldTrial( {&safe_reset_on_route_change_, &safe_reset_acknowledged_rate_}, @@ -191,8 +208,6 @@ NetworkControlUpdate GoogCcNetworkController::OnProcessInterval( *total_bitrate, msg.at_time); update.probe_cluster_configs.insert(update.probe_cluster_configs.end(), probes.begin(), probes.end()); - - max_total_allocated_bitrate_ = *total_bitrate; } initial_config_.reset(); } @@ -286,17 +301,12 @@ NetworkControlUpdate GoogCcNetworkController::OnStreamsConfig( if (msg.requests_alr_probing) { probe_controller_->EnablePeriodicAlrProbing(*msg.requests_alr_probing); } - if (msg.max_total_allocated_bitrate && - *msg.max_total_allocated_bitrate != max_total_allocated_bitrate_) { - if (rate_control_settings_.TriggerProbeOnMaxAllocatedBitrateChange()) { - update.probe_cluster_configs = - probe_controller_->OnMaxTotalAllocatedBitrate( - *msg.max_total_allocated_bitrate, msg.at_time); - } else { - probe_controller_->SetMaxBitrate(*msg.max_total_allocated_bitrate); - } - max_total_allocated_bitrate_ = *msg.max_total_allocated_bitrate; + if (msg.max_total_allocated_bitrate) { + update.probe_cluster_configs = + probe_controller_->OnMaxTotalAllocatedBitrate( + *msg.max_total_allocated_bitrate, msg.at_time); } + bool pacing_changed = false; if (msg.pacing_factor && *msg.pacing_factor != pacing_factor_) { pacing_factor_ = *msg.pacing_factor; @@ -550,7 +560,8 @@ NetworkControlUpdate GoogCcNetworkController::OnTransportPacketsFeedback( result.target_bitrate); } bandwidth_estimation_->UpdateLossBasedEstimator( - report, result.delay_detector_state, probe_bitrate); + report, result.delay_detector_state, probe_bitrate, + estimate_ ? estimate_->link_capacity_upper : DataRate::PlusInfinity()); if (result.updated) { // Update the estimate in the ProbeController, in case we want to probe. MaybeTriggerOnNetworkChanged(&update, report.feedback_time); @@ -614,10 +625,6 @@ void GoogCcNetworkController::MaybeTriggerOnNetworkChanged( uint8_t fraction_loss = bandwidth_estimation_->fraction_loss(); TimeDelta round_trip_time = bandwidth_estimation_->round_trip_time(); DataRate loss_based_target_rate = bandwidth_estimation_->target_rate(); - bool bwe_limited_due_to_packet_loss = - loss_based_target_rate.IsFinite() && - bandwidth_estimation_->delay_based_limit().IsFinite() && - loss_based_target_rate < bandwidth_estimation_->delay_based_limit(); DataRate pushback_target_rate = loss_based_target_rate; BWE_TEST_LOGGING_PLOT(1, "fraction_loss_%", at_time.ms(), @@ -642,11 +649,7 @@ void GoogCcNetworkController::MaybeTriggerOnNetworkChanged( } DataRate stable_target_rate = bandwidth_estimation_->GetEstimatedLinkCapacity(); - if (loss_based_stable_rate_) { - stable_target_rate = std::min(stable_target_rate, loss_based_target_rate); - } else { - stable_target_rate = std::min(stable_target_rate, pushback_target_rate); - } + stable_target_rate = std::min(stable_target_rate, pushback_target_rate); if ((loss_based_target_rate != last_loss_based_target_rate_) || (fraction_loss != last_estimated_fraction_loss_) || @@ -680,7 +683,12 @@ void GoogCcNetworkController::MaybeTriggerOnNetworkChanged( update->target_rate = target_rate_msg; auto probes = probe_controller_->SetEstimatedBitrate( - loss_based_target_rate, bwe_limited_due_to_packet_loss, at_time); + loss_based_target_rate, + GetBandwidthLimitedCause( + bandwidth_estimation_->loss_based_state(), + delay_based_bwe_->last_state(), + probe_controller_->DontProbeIfDelayIncreased()), + at_time); update->probe_cluster_configs.insert(update->probe_cluster_configs.end(), probes.begin(), probes.end()); update->pacer_config = GetPacingRates(at_time); @@ -694,10 +702,7 @@ PacerConfig GoogCcNetworkController::GetPacingRates(Timestamp at_time) const { // Pacing rate is based on target rate before congestion window pushback, // because we don't want to build queues in the pacer when pushback occurs. DataRate pacing_rate = DataRate::Zero(); - if ((pace_at_max_of_bwe_and_lower_link_capacity_ || - (pace_at_loss_based_bwe_when_loss_ && - last_loss_based_target_rate_ >= delay_based_bwe_->last_estimate())) && - estimate_) { + if (pace_at_max_of_bwe_and_lower_link_capacity_ && estimate_) { pacing_rate = std::max({min_total_allocated_bitrate_, estimate_->link_capacity_lower, last_loss_based_target_rate_}) * diff --git a/modules/congestion_controller/goog_cc/goog_cc_network_control.h b/modules/congestion_controller/goog_cc/goog_cc_network_control.h index 884b572740..37a064e37c 100644 --- a/modules/congestion_controller/goog_cc/goog_cc_network_control.h +++ b/modules/congestion_controller/goog_cc/goog_cc_network_control.h @@ -93,9 +93,7 @@ class GoogCcNetworkController : public NetworkControllerInterface { const bool ignore_probes_lower_than_network_estimate_; const bool limit_probes_lower_than_throughput_estimate_; const RateControlSettings rate_control_settings_; - const bool loss_based_stable_rate_; const bool pace_at_max_of_bwe_and_lower_link_capacity_; - const bool pace_at_loss_based_bwe_when_loss_; const std::unique_ptr probe_controller_; const std::unique_ptr @@ -138,7 +136,6 @@ class GoogCcNetworkController : public NetworkControllerInterface { double pacing_factor_; DataRate min_total_allocated_bitrate_; DataRate max_padding_rate_; - DataRate max_total_allocated_bitrate_; bool previously_in_alr_ = false; diff --git a/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc b/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc index 8ba556c20e..7e051f505b 100644 --- a/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc +++ b/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc @@ -140,7 +140,6 @@ absl::optional PacketTransmissionAndFeedbackBlock( // Scenarios: void UpdatesTargetRateBasedOnLinkCapacity(absl::string_view test_name = "") { - ScopedFieldTrials trial("WebRTC-SendSideBwe-WithOverhead/Enabled/"); auto factory = CreateFeedbackOnlyFactory(); Scenario s("googcc_unit/target_capacity" + std::string(test_name), false); CallClientConfig config; @@ -665,55 +664,6 @@ DataRate AverageBitrateAfterCrossInducedLoss(absl::string_view name) { s.TimeSinceStart(); } -TEST(GoogCcScenario, LossBasedRecoversFasterAfterCrossInducedLoss) { - // This test acts as a reference for the test below, showing that without the - // trial, we have worse behavior. - DataRate average_bitrate_without_loss_based = - AverageBitrateAfterCrossInducedLoss("googcc_unit/no_cross_loss_based"); - - // We recover bitrate better when subject to loss spikes from cross traffic - // when loss based controller is used. - ScopedFieldTrials trial("WebRTC-Bwe-LossBasedControl/Enabled/"); - DataRate average_bitrate_with_loss_based = - AverageBitrateAfterCrossInducedLoss("googcc_unit/cross_loss_based"); - - EXPECT_GE(average_bitrate_with_loss_based, - average_bitrate_without_loss_based * 1.05); -} - -TEST(GoogCcScenario, LossBasedEstimatorCapsRateAtModerateLoss) { - ScopedFieldTrials trial("WebRTC-Bwe-LossBasedControl/Enabled/"); - Scenario s("googcc_unit/moderate_loss_channel", false); - CallClientConfig config; - config.transport.rates.min_rate = DataRate::KilobitsPerSec(10); - config.transport.rates.max_rate = DataRate::KilobitsPerSec(5000); - config.transport.rates.start_rate = DataRate::KilobitsPerSec(1000); - - NetworkSimulationConfig network; - network.bandwidth = DataRate::KilobitsPerSec(2000); - network.delay = TimeDelta::Millis(100); - // 3% loss rate is in the moderate loss rate region at 2000 kbps, limiting the - // bitrate increase. - network.loss_rate = 0.03; - auto send_net = s.CreateMutableSimulationNode(network); - auto* client = s.CreateClient("send", std::move(config)); - auto* route = s.CreateRoutes(client, {send_net->node()}, - s.CreateClient("return", CallClientConfig()), - {s.CreateSimulationNode(network)}); - s.CreateVideoStream(route->forward(), VideoStreamConfig()); - // Allow the controller to stabilize at the lower bitrate. - s.RunFor(TimeDelta::Seconds(1)); - // This increase in capacity would cause the target bitrate to increase to - // over 4000 kbps without LossBasedControl. - send_net->UpdateConfig([](NetworkSimulationConfig* c) { - c->bandwidth = DataRate::KilobitsPerSec(5000); - }); - s.RunFor(TimeDelta::Seconds(20)); - // Using LossBasedControl, the bitrate will not increase over 2500 kbps since - // we have detected moderate loss. - EXPECT_LT(client->target_rate().kbps(), 2500); -} - TEST(GoogCcScenario, MaintainsLowRateInSafeResetTrial) { const DataRate kLinkCapacity = DataRate::KilobitsPerSec(200); const DataRate kStartRate = DataRate::KilobitsPerSec(300); @@ -769,9 +719,7 @@ TEST(GoogCcScenario, CutsHighRateInSafeResetTrial) { } TEST(GoogCcScenario, DetectsHighRateInSafeResetTrial) { - ScopedFieldTrials trial( - "WebRTC-Bwe-SafeResetOnRouteChange/Enabled,ack/" - "WebRTC-SendSideBwe-WithOverhead/Enabled/"); + ScopedFieldTrials trial("WebRTC-Bwe-SafeResetOnRouteChange/Enabled,ack/"); const DataRate kInitialLinkCapacity = DataRate::KilobitsPerSec(200); const DataRate kNewLinkCapacity = DataRate::KilobitsPerSec(800); const DataRate kStartRate = DataRate::KilobitsPerSec(300); @@ -947,11 +895,39 @@ TEST(GoogCcScenario, FastRampupOnRembCapLifted) { EXPECT_GT(final_estimate.kbps(), 1500); } -TEST(GoogCcScenario, SlowRampupOnRembCapLiftedWithFieldTrial) { - ScopedFieldTrials trial("WebRTC-Bwe-ReceiverLimitCapsOnly/Disabled/"); - DataRate final_estimate = - RunRembDipScenario("googcc_unit/legacy_slow_rampup_on_remb_cap_lifted"); - EXPECT_LT(final_estimate.kbps(), 1000); +TEST(GoogCcScenario, FallbackToLossBasedBweWithoutPacketFeedback) { + const DataRate kLinkCapacity = DataRate::KilobitsPerSec(1000); + const DataRate kStartRate = DataRate::KilobitsPerSec(1000); + + Scenario s("googcc_unit/high_loss_channel", false); + auto* net = s.CreateMutableSimulationNode([&](NetworkSimulationConfig* c) { + c->bandwidth = kLinkCapacity; + c->delay = TimeDelta::Millis(100); + }); + auto* client = s.CreateClient("send", [&](CallClientConfig* c) { + c->transport.rates.start_rate = kStartRate; + }); + auto* route = s.CreateRoutes( + client, {net->node()}, s.CreateClient("return", CallClientConfig()), + {s.CreateSimulationNode(NetworkSimulationConfig())}); + + // Create a config without packet feedback. + VideoStreamConfig video_config; + video_config.stream.packet_feedback = false; + s.CreateVideoStream(route->forward(), video_config); + + s.RunFor(TimeDelta::Seconds(20)); + // Bandwith does not backoff because network is normal. + EXPECT_GE(client->target_rate().kbps(), 500); + + // Update the network to create high loss ratio + net->UpdateConfig([](NetworkSimulationConfig* c) { + c->loss_rate = 0.15; + }); + s.RunFor(TimeDelta::Seconds(20)); + + // Bandwidth decreases thanks to loss based bwe v0. + EXPECT_LE(client->target_rate().kbps(), 300); } } // namespace test diff --git a/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc b/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc index 7c54218895..b4d3ae8c1f 100644 --- a/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc +++ b/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc @@ -138,8 +138,7 @@ bool LossBasedBweV2::IsReady() const { num_observations_ > 0; } -LossBasedBweV2::Result LossBasedBweV2::GetLossBasedResult( - DataRate delay_based_limit) const { +LossBasedBweV2::Result LossBasedBweV2::GetLossBasedResult() const { Result result; result.state = current_state_; if (!IsReady()) { @@ -156,16 +155,16 @@ LossBasedBweV2::Result LossBasedBweV2::GetLossBasedResult( "statistics before it can be used."; } } - result.bandwidth_estimate = IsValid(delay_based_limit) - ? delay_based_limit + result.bandwidth_estimate = IsValid(delay_based_estimate_) + ? delay_based_estimate_ : DataRate::PlusInfinity(); return result; } - if (IsValid(delay_based_limit)) { + if (IsValid(delay_based_estimate_)) { result.bandwidth_estimate = std::min({current_estimate_.loss_limited_bandwidth, - GetInstantUpperBound(), delay_based_limit}); + GetInstantUpperBound(), delay_based_estimate_}); } else { result.bandwidth_estimate = std::min( current_estimate_.loss_limited_bandwidth, GetInstantUpperBound()); @@ -191,13 +190,21 @@ void LossBasedBweV2::SetBandwidthEstimate(DataRate bandwidth_estimate) { } } -void LossBasedBweV2::SetMinBitrate(DataRate min_bitrate) { +void LossBasedBweV2::SetMinMaxBitrate(DataRate min_bitrate, + DataRate max_bitrate) { if (IsValid(min_bitrate)) { min_bitrate_ = min_bitrate; } else { RTC_LOG(LS_WARNING) << "The min bitrate must be finite: " << ToString(min_bitrate); } + + if (IsValid(max_bitrate)) { + max_bitrate_ = max_bitrate; + } else { + RTC_LOG(LS_WARNING) << "The max bitrate must be finite: " + << ToString(max_bitrate); + } } void LossBasedBweV2::SetProbeBitrate(absl::optional probe_bitrate) { @@ -212,7 +219,10 @@ void LossBasedBweV2::UpdateBandwidthEstimate( rtc::ArrayView packet_results, DataRate delay_based_estimate, BandwidthUsage delay_detector_state, - absl::optional probe_bitrate) { + absl::optional probe_bitrate, + DataRate upper_link_capacity) { + delay_based_estimate_ = delay_based_estimate; + upper_link_capacity_ = upper_link_capacity; if (!IsEnabled()) { RTC_LOG(LS_WARNING) << "The estimator must be enabled before it can be used."; @@ -237,7 +247,7 @@ void LossBasedBweV2::UpdateBandwidthEstimate( ChannelParameters best_candidate = current_estimate_; double objective_max = std::numeric_limits::lowest(); - for (ChannelParameters candidate : GetCandidates(delay_based_estimate)) { + for (ChannelParameters candidate : GetCandidates()) { NewtonsMethodUpdate(candidate); const double candidate_objective = GetObjective(candidate); @@ -299,13 +309,12 @@ void LossBasedBweV2::UpdateBandwidthEstimate( } } - if (IsEstimateIncreasingWhenLossLimited(best_candidate)) { + if (IsEstimateIncreasingWhenLossLimited(best_candidate) && + best_candidate.loss_limited_bandwidth < delay_based_estimate) { current_state_ = LossBasedState::kIncreasing; - } else if (IsValid(delay_based_estimate) && - best_candidate.loss_limited_bandwidth < delay_based_estimate) { + } else if (best_candidate.loss_limited_bandwidth < delay_based_estimate_) { current_state_ = LossBasedState::kDecreasing; - } else if (IsValid(delay_based_estimate) && - best_candidate.loss_limited_bandwidth == delay_based_estimate) { + } else if (best_candidate.loss_limited_bandwidth >= delay_based_estimate_) { current_state_ = LossBasedState::kDelayBasedEstimate; } current_estimate_ = best_candidate; @@ -336,64 +345,64 @@ bool LossBasedBweV2::IsEstimateIncreasingWhenLossLimited( // configuration for the `LossBasedBweV2` which is explicitly enabled. absl::optional LossBasedBweV2::CreateConfig( const FieldTrialsView* key_value_config) { - FieldTrialParameter enabled("Enabled", false); + FieldTrialParameter enabled("Enabled", true); FieldTrialParameter bandwidth_rampup_upper_bound_factor( - "BwRampupUpperBoundFactor", 1.1); + "BwRampupUpperBoundFactor", 1000000.0); FieldTrialParameter rampup_acceleration_max_factor( "BwRampupAccelMaxFactor", 0.0); FieldTrialParameter rampup_acceleration_maxout_time( "BwRampupAccelMaxoutTime", TimeDelta::Seconds(60)); FieldTrialList candidate_factors("CandidateFactors", - {1.05, 1.0, 0.95}); + {1.02, 1.0, 0.95}); FieldTrialParameter higher_bandwidth_bias_factor("HigherBwBiasFactor", - 0.00001); + 0.0002); FieldTrialParameter higher_log_bandwidth_bias_factor( - "HigherLogBwBiasFactor", 0.001); + "HigherLogBwBiasFactor", 0.02); FieldTrialParameter inherent_loss_lower_bound( "InherentLossLowerBound", 1.0e-3); FieldTrialParameter loss_threshold_of_high_bandwidth_preference( - "LossThresholdOfHighBandwidthPreference", 0.99); + "LossThresholdOfHighBandwidthPreference", 0.15); FieldTrialParameter bandwidth_preference_smoothing_factor( "BandwidthPreferenceSmoothingFactor", 0.002); FieldTrialParameter inherent_loss_upper_bound_bandwidth_balance( - "InherentLossUpperBoundBwBalance", DataRate::KilobitsPerSec(15.0)); + "InherentLossUpperBoundBwBalance", DataRate::KilobitsPerSec(75.0)); FieldTrialParameter inherent_loss_upper_bound_offset( "InherentLossUpperBoundOffset", 0.05); FieldTrialParameter initial_inherent_loss_estimate( "InitialInherentLossEstimate", 0.01); FieldTrialParameter newton_iterations("NewtonIterations", 1); - FieldTrialParameter newton_step_size("NewtonStepSize", 0.5); + FieldTrialParameter newton_step_size("NewtonStepSize", 0.75); FieldTrialParameter append_acknowledged_rate_candidate( "AckedRateCandidate", true); FieldTrialParameter append_delay_based_estimate_candidate( - "DelayBasedCandidate", false); + "DelayBasedCandidate", true); FieldTrialParameter observation_duration_lower_bound( - "ObservationDurationLowerBound", TimeDelta::Seconds(1)); + "ObservationDurationLowerBound", TimeDelta::Millis(250)); FieldTrialParameter observation_window_size("ObservationWindowSize", 20); FieldTrialParameter sending_rate_smoothing_factor( "SendingRateSmoothingFactor", 0.0); FieldTrialParameter instant_upper_bound_temporal_weight_factor( - "InstantUpperBoundTemporalWeightFactor", 0.99); + "InstantUpperBoundTemporalWeightFactor", 0.9); FieldTrialParameter instant_upper_bound_bandwidth_balance( - "InstantUpperBoundBwBalance", DataRate::KilobitsPerSec(15.0)); + "InstantUpperBoundBwBalance", DataRate::KilobitsPerSec(75.0)); FieldTrialParameter instant_upper_bound_loss_offset( "InstantUpperBoundLossOffset", 0.05); FieldTrialParameter temporal_weight_factor("TemporalWeightFactor", - 0.99); + 0.9); FieldTrialParameter bandwidth_backoff_lower_bound_factor( "BwBackoffLowerBoundFactor", 1.0); FieldTrialParameter trendline_integration_enabled( "TrendlineIntegrationEnabled", false); FieldTrialParameter trendline_observations_window_size( "TrendlineObservationsWindowSize", 20); - FieldTrialParameter max_increase_factor("MaxIncreaseFactor", 1000.0); + FieldTrialParameter max_increase_factor("MaxIncreaseFactor", 1.3); FieldTrialParameter delayed_increase_window( "DelayedIncreaseWindow", TimeDelta::Millis(300)); FieldTrialParameter use_acked_bitrate_only_when_overusing( "UseAckedBitrateOnlyWhenOverusing", false); FieldTrialParameter not_increase_if_inherent_loss_less_than_average_loss( - "NotIncreaseIfInherentLossLessThanAverageLoss", false); + "NotIncreaseIfInherentLossLessThanAverageLoss", true); FieldTrialParameter high_loss_rate_threshold("HighLossRateThreshold", 1.0); FieldTrialParameter bandwidth_cap_at_high_loss_rate( @@ -402,6 +411,8 @@ absl::optional LossBasedBweV2::CreateConfig( "SlopeOfBweHighLossFunc", 1000); FieldTrialParameter probe_integration_enabled("ProbeIntegrationEnabled", false); + FieldTrialParameter bound_by_upper_link_capacity_when_loss_limited( + "BoundByUpperLinkCapacityWhenLossLimited", true); if (key_value_config) { ParseFieldTrial({&enabled, &bandwidth_rampup_upper_bound_factor, @@ -437,7 +448,8 @@ absl::optional LossBasedBweV2::CreateConfig( &probe_integration_enabled, &high_loss_rate_threshold, &bandwidth_cap_at_high_loss_rate, - &slope_of_bwe_high_loss_func}, + &slope_of_bwe_high_loss_func, + &bound_by_upper_link_capacity_when_loss_limited}, key_value_config->Lookup("WebRTC-Bwe-LossBasedBweV2")); } @@ -498,6 +510,8 @@ absl::optional LossBasedBweV2::CreateConfig( bandwidth_cap_at_high_loss_rate.Get(); config->slope_of_bwe_high_loss_func = slope_of_bwe_high_loss_func.Get(); config->probe_integration_enabled = probe_integration_enabled.Get(); + config->bound_by_upper_link_capacity_when_loss_limited = + bound_by_upper_link_capacity_when_loss_limited.Get(); return config; } @@ -708,19 +722,19 @@ double LossBasedBweV2::GetAverageReportedLossRatio() const { return num_lost_packets / num_packets; } -DataRate LossBasedBweV2::GetCandidateBandwidthUpperBound( - DataRate delay_based_estimate) const { - DataRate candidate_bandwidth_upper_bound = DataRate::PlusInfinity(); - if (IsBandwidthLimitedDueToLoss()) { +DataRate LossBasedBweV2::GetCandidateBandwidthUpperBound() const { + DataRate candidate_bandwidth_upper_bound = max_bitrate_; + if (IsBandwidthLimitedDueToLoss() && + IsValid(bandwidth_limit_in_current_window_)) { candidate_bandwidth_upper_bound = bandwidth_limit_in_current_window_; } if (config_->trendline_integration_enabled) { candidate_bandwidth_upper_bound = std::min(GetInstantUpperBound(), candidate_bandwidth_upper_bound); - if (IsValid(delay_based_estimate)) { + if (IsValid(delay_based_estimate_)) { candidate_bandwidth_upper_bound = - std::min(delay_based_estimate, candidate_bandwidth_upper_bound); + std::min(delay_based_estimate_, candidate_bandwidth_upper_bound); } } @@ -742,8 +756,8 @@ DataRate LossBasedBweV2::GetCandidateBandwidthUpperBound( return candidate_bandwidth_upper_bound; } -std::vector LossBasedBweV2::GetCandidates( - DataRate delay_based_estimate) const { +std::vector LossBasedBweV2::GetCandidates() + const { std::vector bandwidths; bool can_increase_bitrate = TrendlineEsimateAllowBitrateIncrease(); for (double candidate_factor : config_->candidate_factors) { @@ -761,16 +775,16 @@ std::vector LossBasedBweV2::GetCandidates( config_->bandwidth_backoff_lower_bound_factor); } - if (IsValid(delay_based_estimate) && + if (IsValid(delay_based_estimate_) && config_->append_delay_based_estimate_candidate) { if (can_increase_bitrate && - delay_based_estimate > current_estimate_.loss_limited_bandwidth) { - bandwidths.push_back(delay_based_estimate); + delay_based_estimate_ > current_estimate_.loss_limited_bandwidth) { + bandwidths.push_back(delay_based_estimate_); } } const DataRate candidate_bandwidth_upper_bound = - GetCandidateBandwidthUpperBound(delay_based_estimate); + GetCandidateBandwidthUpperBound(); std::vector candidates; candidates.resize(bandwidths.size()); @@ -918,11 +932,11 @@ DataRate LossBasedBweV2::GetSendingRate( } DataRate LossBasedBweV2::GetInstantUpperBound() const { - return cached_instant_upper_bound_.value_or(DataRate::PlusInfinity()); + return cached_instant_upper_bound_.value_or(max_bitrate_); } void LossBasedBweV2::CalculateInstantUpperBound() { - DataRate instant_limit = DataRate::PlusInfinity(); + DataRate instant_limit = max_bitrate_; const double average_reported_loss_ratio = GetAverageReportedLossRatio(); if (average_reported_loss_ratio > config_->instant_upper_bound_loss_offset) { instant_limit = config_->instant_upper_bound_bandwidth_balance / @@ -938,6 +952,12 @@ void LossBasedBweV2::CalculateInstantUpperBound() { } } + if (IsBandwidthLimitedDueToLoss()) { + if (IsValid(upper_link_capacity_) && + config_->bound_by_upper_link_capacity_when_loss_limited) { + instant_limit = std::min(instant_limit, upper_link_capacity_); + } + } cached_instant_upper_bound_ = instant_limit; } diff --git a/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h b/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h index 6d3b409957..9ff9cb74c6 100644 --- a/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h +++ b/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h @@ -57,16 +57,17 @@ class LossBasedBweV2 { bool IsReady() const; // Returns `DataRate::PlusInfinity` if no BWE can be calculated. - Result GetLossBasedResult(DataRate delay_based_limit) const; + Result GetLossBasedResult() const; void SetAcknowledgedBitrate(DataRate acknowledged_bitrate); void SetBandwidthEstimate(DataRate bandwidth_estimate); - void SetMinBitrate(DataRate min_bitrate); + void SetMinMaxBitrate(DataRate min_bitrate, DataRate max_bitrate); void UpdateBandwidthEstimate( rtc::ArrayView packet_results, DataRate delay_based_estimate, BandwidthUsage delay_detector_state, - absl::optional probe_bitrate); + absl::optional probe_bitrate, + DataRate upper_link_capacity); private: struct ChannelParameters { @@ -110,6 +111,7 @@ class LossBasedBweV2 { DataRate bandwidth_cap_at_high_loss_rate = DataRate::MinusInfinity(); double slope_of_bwe_high_loss_func = 1000.0; bool probe_integration_enabled = false; + bool bound_by_upper_link_capacity_when_loss_limited = false; }; struct Derivatives { @@ -139,9 +141,8 @@ class LossBasedBweV2 { // Returns `0.0` if not enough loss statistics have been received. double GetAverageReportedLossRatio() const; - std::vector GetCandidates( - DataRate delay_based_estimate) const; - DataRate GetCandidateBandwidthUpperBound(DataRate delay_based_estimate) const; + std::vector GetCandidates() const; + DataRate GetCandidateBandwidthUpperBound() const; Derivatives GetDerivatives(const ChannelParameters& channel_parameters) const; double GetFeasibleInherentLoss( const ChannelParameters& channel_parameters) const; @@ -190,8 +191,11 @@ class LossBasedBweV2 { Timestamp recovering_after_loss_timestamp_ = Timestamp::MinusInfinity(); DataRate bandwidth_limit_in_current_window_ = DataRate::PlusInfinity(); DataRate min_bitrate_ = DataRate::KilobitsPerSec(1); + DataRate max_bitrate_ = DataRate::PlusInfinity(); LossBasedState current_state_ = LossBasedState::kDelayBasedEstimate; DataRate probe_bitrate_ = DataRate::PlusInfinity(); + DataRate delay_based_estimate_ = DataRate::PlusInfinity(); + DataRate upper_link_capacity_ = DataRate::PlusInfinity(); }; } // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/loss_based_bwe_v2_test.cc b/modules/congestion_controller/goog_cc/loss_based_bwe_v2_test.cc index 10b408ac1c..c303c29d68 100644 --- a/modules/congestion_controller/goog_cc/loss_based_bwe_v2_test.cc +++ b/modules/congestion_controller/goog_cc/loss_based_bwe_v2_test.cc @@ -201,12 +201,15 @@ TEST_P(LossBasedBweV2Test, ReturnsDelayBasedEstimateWhenDisabled) { Config(/*enabled=*/false, /*valid=*/true, /*trendline_integration_enabled=*/GetParam())); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); - - EXPECT_EQ(loss_based_bandwidth_estimator - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::KilobitsPerSec(100)) - .bandwidth_estimate, - DataRate::KilobitsPerSec(100)); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + /*packet_results=*/{}, + /*delay_based_estimate=*/DataRate::KilobitsPerSec(100), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + EXPECT_EQ( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + DataRate::KilobitsPerSec(100)); } TEST_P(LossBasedBweV2Test, @@ -215,12 +218,15 @@ TEST_P(LossBasedBweV2Test, Config(/*enabled=*/true, /*valid=*/false, /*trendline_integration_enabled=*/GetParam())); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); - - EXPECT_EQ(loss_based_bandwidth_estimator - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::KilobitsPerSec(100)) - .bandwidth_estimate, - DataRate::KilobitsPerSec(100)); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + /*packet_results=*/{}, + /*delay_based_estimate=*/DataRate::KilobitsPerSec(100), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + EXPECT_EQ( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + DataRate::KilobitsPerSec(100)); } TEST_P(LossBasedBweV2Test, @@ -237,14 +243,14 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); EXPECT_TRUE(loss_based_bandwidth_estimator.IsReady()); - EXPECT_TRUE( - loss_based_bandwidth_estimator - .GetLossBasedResult(/*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate.IsFinite()); + EXPECT_TRUE(loss_based_bandwidth_estimator.GetLossBasedResult() + .bandwidth_estimate.IsFinite()); } TEST_P(LossBasedBweV2Test, NoBandwidthEstimateGivenNoInitialization) { @@ -257,14 +263,14 @@ TEST_P(LossBasedBweV2Test, NoBandwidthEstimateGivenNoInitialization) { LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); EXPECT_FALSE(loss_based_bandwidth_estimator.IsReady()); - EXPECT_TRUE( - loss_based_bandwidth_estimator - .GetLossBasedResult(/*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate.IsPlusInfinity()); + EXPECT_TRUE(loss_based_bandwidth_estimator.GetLossBasedResult() + .bandwidth_estimate.IsPlusInfinity()); } TEST_P(LossBasedBweV2Test, NoBandwidthEstimateGivenNotEnoughFeedback) { @@ -290,20 +296,18 @@ TEST_P(LossBasedBweV2Test, NoBandwidthEstimateGivenNotEnoughFeedback) { DataRate::KilobitsPerSec(600)); EXPECT_FALSE(loss_based_bandwidth_estimator.IsReady()); - EXPECT_TRUE( - loss_based_bandwidth_estimator - .GetLossBasedResult(/*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate.IsPlusInfinity()); + EXPECT_TRUE(loss_based_bandwidth_estimator.GetLossBasedResult() + .bandwidth_estimate.IsPlusInfinity()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - not_enough_feedback, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + not_enough_feedback, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); EXPECT_FALSE(loss_based_bandwidth_estimator.IsReady()); - EXPECT_TRUE( - loss_based_bandwidth_estimator - .GetLossBasedResult(/*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate.IsPlusInfinity()); + EXPECT_TRUE(loss_based_bandwidth_estimator.GetLossBasedResult() + .bandwidth_estimate.IsPlusInfinity()); } TEST_P(LossBasedBweV2Test, @@ -324,33 +328,31 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); - EXPECT_NE(loss_based_bandwidth_estimator - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate, - DataRate::KilobitsPerSec(600)); + EXPECT_NE( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); - EXPECT_EQ(loss_based_bandwidth_estimator - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate, - DataRate::KilobitsPerSec(600)); + EXPECT_EQ( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); - EXPECT_NE(loss_based_bandwidth_estimator - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate, - DataRate::KilobitsPerSec(600)); + EXPECT_NE( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + DataRate::KilobitsPerSec(600)); } TEST_P(LossBasedBweV2Test, @@ -374,42 +376,41 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator_2.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator_1.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); loss_based_bandwidth_estimator_2.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); - EXPECT_EQ(loss_based_bandwidth_estimator_1 - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate, - DataRate::KilobitsPerSec(660)); + EXPECT_EQ( + loss_based_bandwidth_estimator_1.GetLossBasedResult().bandwidth_estimate, + DataRate::KilobitsPerSec(660)); loss_based_bandwidth_estimator_1.SetAcknowledgedBitrate( DataRate::KilobitsPerSec(900)); - EXPECT_EQ(loss_based_bandwidth_estimator_1 - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate, - DataRate::KilobitsPerSec(660)); + EXPECT_EQ( + loss_based_bandwidth_estimator_1.GetLossBasedResult().bandwidth_estimate, + DataRate::KilobitsPerSec(660)); loss_based_bandwidth_estimator_1.UpdateBandwidthEstimate( - enough_feedback_2, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); loss_based_bandwidth_estimator_2.UpdateBandwidthEstimate( - enough_feedback_2, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); - EXPECT_NE(loss_based_bandwidth_estimator_1 - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate, - loss_based_bandwidth_estimator_2 - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate); + EXPECT_NE( + loss_based_bandwidth_estimator_1.GetLossBasedResult().bandwidth_estimate, + loss_based_bandwidth_estimator_2.GetLossBasedResult().bandwidth_estimate); } TEST_P(LossBasedBweV2Test, @@ -426,14 +427,14 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_no_received_packets, DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + enough_feedback_no_received_packets, + /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); - EXPECT_EQ(loss_based_bandwidth_estimator - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate, - DataRate::KilobitsPerSec(100)); + EXPECT_EQ( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + DataRate::KilobitsPerSec(100)); } TEST_P(LossBasedBweV2Test, BandwidthEstimateNotIncreaseWhenNetworkUnderusing) { @@ -457,21 +458,20 @@ TEST_P(LossBasedBweV2Test, BandwidthEstimateNotIncreaseWhenNetworkUnderusing) { loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), - BandwidthUsage::kBwUnderusing, /*probe_estimate=*/absl::nullopt); - EXPECT_LE(loss_based_bandwidth_estimator - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate, - DataRate::KilobitsPerSec(600)); + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwUnderusing, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + EXPECT_LE( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); - EXPECT_LE(loss_based_bandwidth_estimator - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate, - DataRate::KilobitsPerSec(600)); + enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + EXPECT_LE( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + DataRate::KilobitsPerSec(600)); } // When network is normal, estimate can increase but never be higher than @@ -495,25 +495,25 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // If the delay based estimate is infinity, then loss based estimate increases // and not bounded by delay based estimate. - EXPECT_GT(loss_based_bandwidth_estimator - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate, - DataRate::KilobitsPerSec(600)); + EXPECT_GT( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + enough_feedback_2, /*delay_based_estimate=*/DataRate::KilobitsPerSec(500), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // If the delay based estimate is not infinity, then loss based estimate is // bounded by delay based estimate. - EXPECT_EQ(loss_based_bandwidth_estimator - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::KilobitsPerSec(500)) - .bandwidth_estimate, - DataRate::KilobitsPerSec(500)); + EXPECT_EQ( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + DataRate::KilobitsPerSec(500)); } // When loss based bwe receives a strong signal of overusing and an increase in @@ -540,19 +540,21 @@ TEST_P(LossBasedBweV2Test, UseAckedBitrateForEmegencyBackOff) { loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_bitrate); // Update estimate when network is overusing, and 50% loss rate. loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), BandwidthUsage::kBwOverusing, - /*probe_estimate=*/absl::nullopt); + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwOverusing, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // Update estimate again when network is continuously overusing, and 100% // loss rate. loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, DataRate::PlusInfinity(), BandwidthUsage::kBwOverusing, - /*probe_estimate=*/absl::nullopt); + enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwOverusing, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // The estimate bitrate now is backed off based on acked bitrate. - EXPECT_LE(loss_based_bandwidth_estimator - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate, - acked_bitrate); + EXPECT_LE( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + acked_bitrate); } // When receiving the same packet feedback, loss based bwe ignores the feedback @@ -571,21 +573,21 @@ TEST_P(LossBasedBweV2Test, NoBweChangeIfObservationDurationUnchanged) { DataRate::KilobitsPerSec(300)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); - DataRate estimate_1 = loss_based_bandwidth_estimator - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate; + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + DataRate estimate_1 = + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; // Use the same feedback and check if the estimate is unchanged. loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); - DataRate estimate_2 = loss_based_bandwidth_estimator - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate; + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + DataRate estimate_2 = + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; EXPECT_EQ(estimate_2, estimate_1); } @@ -608,20 +610,20 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); - DataRate estimate_1 = loss_based_bandwidth_estimator - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate; + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + DataRate estimate_1 = + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); - DataRate estimate_2 = loss_based_bandwidth_estimator - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate; + enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + DataRate estimate_2 = + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; EXPECT_EQ(estimate_2, estimate_1); } @@ -644,20 +646,19 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); - DataRate estimate_1 = loss_based_bandwidth_estimator - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate; + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + DataRate estimate_1 = + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, DataRate::PlusInfinity(), - BandwidthUsage::kBwUnderusing, /*probe_estimate=*/absl::nullopt); - DataRate estimate_2 = loss_based_bandwidth_estimator - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate; + enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwUnderusing, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + DataRate estimate_2 = + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; EXPECT_LE(estimate_2, estimate_1); } @@ -687,20 +688,20 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.SetAcknowledgedBitrate( DataRate::KilobitsPerSec(300)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, DataRate::PlusInfinity(), BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); - DataRate estimate_1 = loss_based_bandwidth_estimator - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate; + enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + DataRate estimate_1 = + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, DataRate::PlusInfinity(), BandwidthUsage::kBwOverusing, - /*probe_estimate=*/absl::nullopt); - DataRate estimate_2 = loss_based_bandwidth_estimator - .GetLossBasedResult( - /*delay_based_limit=*/DataRate::PlusInfinity()) - .bandwidth_estimate; + enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwOverusing, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + DataRate estimate_2 = + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; EXPECT_LT(estimate_2, estimate_1); } @@ -723,17 +724,17 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); EXPECT_EQ( - loss_based_bandwidth_estimator.GetLossBasedResult(delay_based_estimate) - .bandwidth_estimate, + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, delay_based_estimate); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); EXPECT_EQ( - loss_based_bandwidth_estimator.GetLossBasedResult(delay_based_estimate) - .bandwidth_estimate, + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, delay_based_estimate); } @@ -745,7 +746,7 @@ TEST_P(LossBasedBweV2Test, "ObservationWindowSize:2,ObservationDurationLowerBound:200ms," "InstantUpperBoundBwBalance:10000kbps," "DelayBasedCandidate:true,MaxIncreaseFactor:1.5,BwRampupUpperBoundFactor:" - "2.0/"); + "2.0,NotIncreaseIfInherentLossLessThanAverageLoss:false/"); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); DataRate acked_rate = DataRate::KilobitsPerSec(300); @@ -759,9 +760,10 @@ TEST_P(LossBasedBweV2Test, /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); LossBasedBweV2::Result result_at_loss = - loss_based_bandwidth_estimator.GetLossBasedResult(delay_based_estimate); + loss_based_bandwidth_estimator.GetLossBasedResult(); // Network recovers after loss. std::vector enough_feedback_2 = @@ -772,18 +774,124 @@ TEST_P(LossBasedBweV2Test, DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); LossBasedBweV2::Result result_after_recovery = - loss_based_bandwidth_estimator.GetLossBasedResult(delay_based_estimate); + loss_based_bandwidth_estimator.GetLossBasedResult(); EXPECT_EQ(result_after_recovery.bandwidth_estimate, result_at_loss.bandwidth_estimate * 1.5); } +TEST_P(LossBasedBweV2Test, + LossBasedStateIsDelayBasedEstimateAfterNetworkRecovering) { + ExplicitKeyValueConfig key_value_config( + "WebRTC-Bwe-LossBasedBweV2/" + "Enabled:true,CandidateFactors:100|1|0.5,AckedRateCandidate:true," + "ObservationWindowSize:2,ObservationDurationLowerBound:200ms," + "InstantUpperBoundBwBalance:10000kbps," + "DelayBasedCandidate:true,MaxIncreaseFactor:100," + "BwRampupUpperBoundFactor:" + "2.0,NotIncreaseIfInherentLossLessThanAverageLoss:false/"); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + DataRate delay_based_estimate = DataRate::KilobitsPerSec(600); + DataRate acked_rate = DataRate::KilobitsPerSec(300); + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(600)); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_rate); + + // Create some loss to create the loss limited scenario. + std::vector enough_feedback_1 = + CreatePacketResultsWith100pLossRate( + /*first_packet_timestamp=*/Timestamp::Zero()); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + ASSERT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); + + // Network recovers after loss. + std::vector enough_feedback_2 = + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate( + DataRate::KilobitsPerSec(600)); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDelayBasedEstimate); + + // Network recovers continuing. + std::vector enough_feedback_3 = + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound * 2); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate( + DataRate::KilobitsPerSec(600)); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_3, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDelayBasedEstimate); +} + +TEST_P(LossBasedBweV2Test, + LossBasedStateIsNotDelayBasedEstimateIfDelayBasedEsimtateInfinite) { + ExplicitKeyValueConfig key_value_config( + "WebRTC-Bwe-LossBasedBweV2/" + "Enabled:true,CandidateFactors:100|1|0.5,AckedRateCandidate:true," + "ObservationWindowSize:2,ObservationDurationLowerBound:200ms," + "InstantUpperBoundBwBalance:10000kbps," + "DelayBasedCandidate:true,MaxIncreaseFactor:100," + "BwRampupUpperBoundFactor:" + "2.0/"); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + DataRate delay_based_estimate = DataRate::PlusInfinity(); + DataRate acked_rate = DataRate::KilobitsPerSec(300); + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(600)); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_rate); + + // Create some loss to create the loss limited scenario. + std::vector enough_feedback_1 = + CreatePacketResultsWith100pLossRate( + /*first_packet_timestamp=*/Timestamp::Zero()); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + ASSERT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); + + // Network recovers after loss. + std::vector enough_feedback_2 = + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate( + DataRate::KilobitsPerSec(600)); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + EXPECT_NE(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDelayBasedEstimate); +} + // After loss based bwe backs off, the next estimate is capped by // a factor of acked bitrate. TEST_P(LossBasedBweV2Test, IncreaseByFactorOfAckedBitrateAfterLossBasedBweBacksOff) { + ExplicitKeyValueConfig key_value_config( + "WebRTC-Bwe-LossBasedBweV2/" + "Enabled:true,LossThresholdOfHighBandwidthPreference:0.99," + "BwRampupUpperBoundFactor:1.2," + "InherentLossUpperBoundOffset:0.9,ObservationDurationLowerBound:200ms/"); std::vector enough_feedback_1 = CreatePacketResultsWith100pLossRate( /*first_packet_timestamp=*/Timestamp::Zero()); @@ -791,9 +899,6 @@ TEST_P(LossBasedBweV2Test, CreatePacketResultsWith10pLossRate( /*first_packet_timestamp=*/Timestamp::Zero() + kObservationDurationLowerBound); - ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); @@ -803,7 +908,8 @@ TEST_P(LossBasedBweV2Test, DataRate::KilobitsPerSec(300)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // Change the acked bitrate to make sure that the estimate is bounded by a // factor of acked bitrate. @@ -811,12 +917,12 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_bitrate); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // The estimate is capped by acked_bitrate * BwRampupUpperBoundFactor. DataRate estimate_2 = - loss_based_bandwidth_estimator.GetLossBasedResult(delay_based_estimate) - .bandwidth_estimate; + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; EXPECT_EQ(estimate_2, acked_bitrate * 1.2); } @@ -847,29 +953,30 @@ TEST_P(LossBasedBweV2Test, DataRate::KilobitsPerSec(300)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // Increase the acknowledged bitrate to make sure that the estimate is not // capped too low. loss_based_bandwidth_estimator.SetAcknowledgedBitrate( DataRate::KilobitsPerSec(5000)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // The estimate is capped by current_estimate * kMaxIncreaseFactor because // it recently backed off. DataRate estimate_2 = - loss_based_bandwidth_estimator.GetLossBasedResult(delay_based_estimate) - .bandwidth_estimate; + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_3, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // The latest estimate is the same as the previous estimate since the sent // packets were sent within the DelayedIncreaseWindow. EXPECT_EQ( - loss_based_bandwidth_estimator.GetLossBasedResult(delay_based_estimate) - .bandwidth_estimate, + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, estimate_2); } @@ -898,28 +1005,29 @@ TEST_P(LossBasedBweV2Test, KeepIncreasingEstimateAfterDelayedIncreaseWindow) { DataRate::KilobitsPerSec(300)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // Increase the acknowledged bitrate to make sure that the estimate is not // capped too low. loss_based_bandwidth_estimator.SetAcknowledgedBitrate( DataRate::KilobitsPerSec(5000)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // The estimate is capped by current_estimate * kMaxIncreaseFactor because it // recently backed off. DataRate estimate_2 = - loss_based_bandwidth_estimator.GetLossBasedResult(delay_based_estimate) - .bandwidth_estimate; + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_3, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // The estimate can continue increasing after the DelayedIncreaseWindow. EXPECT_GE( - loss_based_bandwidth_estimator.GetLossBasedResult(delay_based_estimate) - .bandwidth_estimate, + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, estimate_2); } @@ -942,7 +1050,8 @@ TEST_P(LossBasedBweV2Test, NotIncreaseIfInherentLossLessThanAverageLoss) { /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_10p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); std::vector enough_feedback_10p_loss_2 = CreatePacketResultsWith10pLossRate( @@ -950,12 +1059,12 @@ TEST_P(LossBasedBweV2Test, NotIncreaseIfInherentLossLessThanAverageLoss) { kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_10p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // Do not increase the bitrate because inherent loss is less than average loss EXPECT_EQ( - loss_based_bandwidth_estimator.GetLossBasedResult(delay_based_estimate) - .bandwidth_estimate, + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, DataRate::KilobitsPerSec(600)); } @@ -968,7 +1077,7 @@ TEST_P(LossBasedBweV2Test, "DelayBasedCandidate:true,InstantUpperBoundBwBalance:100kbps," "ObservationDurationLowerBound:200ms,HigherBwBiasFactor:1000," "HigherLogBwBiasFactor:1000,LossThresholdOfHighBandwidthPreference:0." - "20/"); + "20,NotIncreaseIfInherentLossLessThanAverageLoss:false/"); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); @@ -980,7 +1089,8 @@ TEST_P(LossBasedBweV2Test, /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_10p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); std::vector enough_feedback_10p_loss_2 = CreatePacketResultsWith10pLossRate( @@ -988,13 +1098,13 @@ TEST_P(LossBasedBweV2Test, kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_10p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // Because LossThresholdOfHighBandwidthPreference is 20%, the average loss is // 10%, bandwidth estimate should increase. EXPECT_GT( - loss_based_bandwidth_estimator.GetLossBasedResult(delay_based_estimate) - .bandwidth_estimate, + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, DataRate::KilobitsPerSec(600)); } @@ -1019,7 +1129,8 @@ TEST_P(LossBasedBweV2Test, /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_10p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); std::vector enough_feedback_10p_loss_2 = CreatePacketResultsWith10pLossRate( @@ -1027,13 +1138,13 @@ TEST_P(LossBasedBweV2Test, kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_10p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // Because LossThresholdOfHighBandwidthPreference is 5%, the average loss is // 10%, bandwidth estimate should decrease. EXPECT_LT( - loss_based_bandwidth_estimator.GetLossBasedResult(delay_based_estimate) - .bandwidth_estimate, + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, DataRate::KilobitsPerSec(600)); } @@ -1058,7 +1169,8 @@ TEST_P(LossBasedBweV2Test, UseProbeResultWhenRecoveringFromLoss) { /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt); + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // Network recovers after loss. DataRate probe_estimate = DataRate::KilobitsPerSec(300); @@ -1068,13 +1180,139 @@ TEST_P(LossBasedBweV2Test, UseProbeResultWhenRecoveringFromLoss) { kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - probe_estimate); + probe_estimate, /*upper_link_capacity=*/DataRate::PlusInfinity()); LossBasedBweV2::Result result_after_recovery = - loss_based_bandwidth_estimator.GetLossBasedResult(delay_based_estimate); + loss_based_bandwidth_estimator.GetLossBasedResult(); EXPECT_EQ(result_after_recovery.bandwidth_estimate, probe_estimate); } +// If BoundByUpperLinkCapacityWhenLossLimited is enabled, the estimate is +// bounded by the upper link capacity when bandwidth is loss limited. +TEST_P(LossBasedBweV2Test, BoundEstimateByUpperLinkCapacityWhenLossLimited) { + ExplicitKeyValueConfig key_value_config( + "WebRTC-Bwe-LossBasedBweV2/" + "Enabled:true,CandidateFactors:1.2|1|0.5,AckedRateCandidate:true," + "ObservationWindowSize:2,ObservationDurationLowerBound:200ms," + "InstantUpperBoundBwBalance:10000kbps," + "DelayBasedCandidate:true,MaxIncreaseFactor:1000," + "BwRampupUpperBoundFactor:2.0,BoundByUpperLinkCapacityWhenLossLimited:" + "true/"); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); + DataRate acked_rate = DataRate::KilobitsPerSec(300); + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(600)); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_rate); + + // Create some loss to create the loss limited scenario. + std::vector enough_feedback_1 = + CreatePacketResultsWith100pLossRate( + /*first_packet_timestamp=*/Timestamp::Zero()); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + + // Network recovers after loss. + DataRate upper_link_capacity = DataRate::KilobitsPerSec(10); + std::vector enough_feedback_2 = + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, upper_link_capacity); + + LossBasedBweV2::Result result_after_recovery = + loss_based_bandwidth_estimator.GetLossBasedResult(); + EXPECT_EQ(result_after_recovery.bandwidth_estimate, upper_link_capacity); +} + +// If BoundByUpperLinkCapacityWhenLossLimited is enabled, the estimate is not +// bounded by the upper link capacity when bandwidth is not loss limited. +TEST_P(LossBasedBweV2Test, + NotBoundEstimateByUpperLinkCapacityWhenNotLossLimited) { + ExplicitKeyValueConfig key_value_config( + "WebRTC-Bwe-LossBasedBweV2/" + "Enabled:true,CandidateFactors:1.2|1|0.5,AckedRateCandidate:true," + "ObservationWindowSize:2,ObservationDurationLowerBound:200ms," + "InstantUpperBoundBwBalance:10000kbps," + "DelayBasedCandidate:true,MaxIncreaseFactor:1000," + "BwRampupUpperBoundFactor:2.0,BoundByUpperLinkCapacityWhenLossLimited:" + "true/"); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); + DataRate acked_rate = DataRate::KilobitsPerSec(300); + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(600)); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_rate); + + // Create a normal network without loss + std::vector enough_feedback_1 = + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero()); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + + DataRate upper_link_capacity = DataRate::KilobitsPerSec(10); + std::vector enough_feedback_2 = + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, upper_link_capacity); + + LossBasedBweV2::Result loss_based_result = + loss_based_bandwidth_estimator.GetLossBasedResult(); + EXPECT_GT(loss_based_result.bandwidth_estimate, upper_link_capacity); +} + +// If BoundByUpperLinkCapacityWhenLossLimited is disabled, the estimate is not +// bounded by the upper link capacity. +TEST_P(LossBasedBweV2Test, NotBoundEstimateByUpperLinkCapacity) { + ExplicitKeyValueConfig key_value_config( + "WebRTC-Bwe-LossBasedBweV2/" + "Enabled:true,CandidateFactors:1.2|1|0.5,AckedRateCandidate:true," + "ObservationWindowSize:2,ObservationDurationLowerBound:200ms," + "InstantUpperBoundBwBalance:10000kbps," + "DelayBasedCandidate:true,MaxIncreaseFactor:1000," + "BwRampupUpperBoundFactor:2.0,BoundByUpperLinkCapacityWhenLossLimited:" + "false/"); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); + DataRate acked_rate = DataRate::KilobitsPerSec(300); + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(600)); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_rate); + + // Create some loss to create the loss limited scenario. + std::vector enough_feedback_1 = + CreatePacketResultsWith100pLossRate( + /*first_packet_timestamp=*/Timestamp::Zero()); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + + // Network recovers after loss. + DataRate upper_link_capacity = DataRate::KilobitsPerSec(10); + std::vector enough_feedback_2 = + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, + /*probe_estimate=*/absl::nullopt, upper_link_capacity); + + LossBasedBweV2::Result result_after_recovery = + loss_based_bandwidth_estimator.GetLossBasedResult(); + EXPECT_GT(result_after_recovery.bandwidth_estimate, upper_link_capacity); +} + TEST_P(LossBasedBweV2Test, StricterBoundUsingHighLossRateThresholdAt10pLossRate) { ExplicitKeyValueConfig key_value_config( @@ -1086,7 +1324,9 @@ TEST_P(LossBasedBweV2Test, "HigherLogBwBiasFactor:1000,LossThresholdOfHighBandwidthPreference:0." "05,HighLossRateThreshold:0.09/"); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); - loss_based_bandwidth_estimator.SetMinBitrate(DataRate::KilobitsPerSec(10)); + loss_based_bandwidth_estimator.SetMinMaxBitrate( + /*min_bitrate=*/DataRate::KilobitsPerSec(10), + /*max_bitrate=*/DataRate::KilobitsPerSec(1000000)); DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); @@ -1096,7 +1336,8 @@ TEST_P(LossBasedBweV2Test, /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_10p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); std::vector enough_feedback_10p_loss_2 = CreatePacketResultsWith10pLossRate( @@ -1104,13 +1345,13 @@ TEST_P(LossBasedBweV2Test, kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_10p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // At 10% loss rate and high loss rate threshold to be 10%, cap the estimate // to be 500 * 1000-0.1 = 400kbps. EXPECT_EQ( - loss_based_bandwidth_estimator.GetLossBasedResult(delay_based_estimate) - .bandwidth_estimate, + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, DataRate::KilobitsPerSec(400)); } @@ -1125,7 +1366,9 @@ TEST_P(LossBasedBweV2Test, "HigherLogBwBiasFactor:1000,LossThresholdOfHighBandwidthPreference:0." "05,HighLossRateThreshold:0.3/"); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); - loss_based_bandwidth_estimator.SetMinBitrate(DataRate::KilobitsPerSec(10)); + loss_based_bandwidth_estimator.SetMinMaxBitrate( + /*min_bitrate=*/DataRate::KilobitsPerSec(10), + /*max_bitrate=*/DataRate::KilobitsPerSec(1000000)); DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); @@ -1135,7 +1378,8 @@ TEST_P(LossBasedBweV2Test, /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_50p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); std::vector enough_feedback_50p_loss_2 = CreatePacketResultsWith50pLossRate( @@ -1143,13 +1387,13 @@ TEST_P(LossBasedBweV2Test, kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_50p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // At 50% loss rate and high loss rate threshold to be 30%, cap the estimate // to be the min bitrate. EXPECT_EQ( - loss_based_bandwidth_estimator.GetLossBasedResult(delay_based_estimate) - .bandwidth_estimate, + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, DataRate::KilobitsPerSec(10)); } @@ -1164,7 +1408,9 @@ TEST_P(LossBasedBweV2Test, "HigherLogBwBiasFactor:1000,LossThresholdOfHighBandwidthPreference:0." "05,HighLossRateThreshold:0.3/"); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); - loss_based_bandwidth_estimator.SetMinBitrate(DataRate::KilobitsPerSec(10)); + loss_based_bandwidth_estimator.SetMinMaxBitrate( + /*min_bitrate=*/DataRate::KilobitsPerSec(10), + /*max_bitrate=*/DataRate::KilobitsPerSec(1000000)); DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); @@ -1174,7 +1420,8 @@ TEST_P(LossBasedBweV2Test, /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_100p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); std::vector enough_feedback_100p_loss_2 = CreatePacketResultsWith100pLossRate( @@ -1182,13 +1429,13 @@ TEST_P(LossBasedBweV2Test, kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_100p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // At 100% loss rate and high loss rate threshold to be 30%, cap the estimate // to be the min bitrate. EXPECT_EQ( - loss_based_bandwidth_estimator.GetLossBasedResult(delay_based_estimate) - .bandwidth_estimate, + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, DataRate::KilobitsPerSec(10)); } @@ -1202,7 +1449,9 @@ TEST_P(LossBasedBweV2Test, EstimateRecoversAfterHighLoss) { "HigherLogBwBiasFactor:1000,LossThresholdOfHighBandwidthPreference:0." "05,HighLossRateThreshold:0.3/"); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); - loss_based_bandwidth_estimator.SetMinBitrate(DataRate::KilobitsPerSec(10)); + loss_based_bandwidth_estimator.SetMinMaxBitrate( + /*min_bitrate=*/DataRate::KilobitsPerSec(10), + /*max_bitrate=*/DataRate::KilobitsPerSec(1000000)); DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); @@ -1212,13 +1461,13 @@ TEST_P(LossBasedBweV2Test, EstimateRecoversAfterHighLoss) { /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_100p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // Make sure that the estimate is set to min bitrate because of 100% loss // rate. EXPECT_EQ( - loss_based_bandwidth_estimator.GetLossBasedResult(delay_based_estimate) - .bandwidth_estimate, + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, DataRate::KilobitsPerSec(10)); // Create some feedbacks with 0 loss rate to simulate network recovering. @@ -1228,7 +1477,8 @@ TEST_P(LossBasedBweV2Test, EstimateRecoversAfterHighLoss) { kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_0p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); std::vector enough_feedback_0p_loss_2 = CreatePacketResultsWithReceivedPackets( @@ -1236,15 +1486,38 @@ TEST_P(LossBasedBweV2Test, EstimateRecoversAfterHighLoss) { kObservationDurationLowerBound * 2); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_0p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt); + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); // The estimate increases as network recovers. EXPECT_GT( - loss_based_bandwidth_estimator.GetLossBasedResult(delay_based_estimate) - .bandwidth_estimate, + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, DataRate::KilobitsPerSec(10)); } +TEST_P(LossBasedBweV2Test, EstimateIsNotHigherThanMaxBitrate) { + ExplicitKeyValueConfig key_value_config( + Config(/*enabled=*/true, /*valid=*/true, + /*trendline_integration_enabled=*/GetParam())); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + loss_based_bandwidth_estimator.SetMinMaxBitrate( + /*min_bitrate=*/DataRate::KilobitsPerSec(10), + /*max_bitrate=*/DataRate::KilobitsPerSec(1000)); + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(1000)); + std::vector enough_feedback = + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero()); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + enough_feedback, /*delay_based_estimate=*/DataRate::PlusInfinity(), + BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, + /*upper_link_capacity=*/DataRate::PlusInfinity()); + + EXPECT_LE( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + DataRate::KilobitsPerSec(1000)); +} + INSTANTIATE_TEST_SUITE_P(LossBasedBweV2Tests, LossBasedBweV2Test, ::testing::Bool()); diff --git a/modules/congestion_controller/goog_cc/probe_controller.cc b/modules/congestion_controller/goog_cc/probe_controller.cc index 501f14b874..1af943c4cb 100644 --- a/modules/congestion_controller/goog_cc/probe_controller.cc +++ b/modules/congestion_controller/goog_cc/probe_controller.cc @@ -87,13 +87,17 @@ ProbeControllerConfig::ProbeControllerConfig( alr_probe_scale("alr_scale", 2), network_state_estimate_probing_interval("network_state_interval", TimeDelta::PlusInfinity()), - network_state_estimate_fast_rampup_rate("network_state_fast_rampup_rate", - 0), - network_state_estimate_drop_down_rate("network_state_drop_down_rate", 0), + probe_if_estimate_lower_than_network_state_estimate_ratio( + "est_lower_than_network_ratio", + 0), + estimate_lower_than_network_state_estimate_probing_interval( + "est_lower_than_network_interval", + TimeDelta::Seconds(3)), network_state_probe_scale("network_state_scale", 1.0), network_state_probe_duration("network_state_probe_duration", TimeDelta::Millis(15)), + probe_on_max_allocated_bitrate_change("probe_max_allocation", true), first_allocation_probe_scale("alloc_p1", 1), second_allocation_probe_scale("alloc_p2", 2), allocation_allow_further_probing("alloc_probe_further", false), @@ -102,21 +106,33 @@ ProbeControllerConfig::ProbeControllerConfig( min_probe_duration("min_probe_duration", TimeDelta::Millis(15)), limit_probe_target_rate_to_loss_bwe("limit_probe_target_rate_to_loss_bwe", false), + loss_limited_probe_scale("loss_limited_scale", 1.5), skip_if_estimate_larger_than_fraction_of_max( "skip_if_est_larger_than_fraction_of_max", - 0.0) { - ParseFieldTrial( - {&first_exponential_probe_scale, &second_exponential_probe_scale, - &further_exponential_probe_scale, &further_probe_threshold, - &alr_probing_interval, &alr_probe_scale, &first_allocation_probe_scale, - &second_allocation_probe_scale, &allocation_allow_further_probing, - &min_probe_duration, &network_state_estimate_probing_interval, - &network_state_estimate_fast_rampup_rate, - &network_state_estimate_drop_down_rate, &network_state_probe_scale, - &network_state_probe_duration, &min_probe_packets_sent, - &limit_probe_target_rate_to_loss_bwe, - &skip_if_estimate_larger_than_fraction_of_max}, - key_value_config->Lookup("WebRTC-Bwe-ProbingConfiguration")); + 0.0), + not_probe_if_delay_increased("not_probe_if_delay_increased", false) { + ParseFieldTrial({&first_exponential_probe_scale, + &second_exponential_probe_scale, + &further_exponential_probe_scale, + &further_probe_threshold, + &alr_probing_interval, + &alr_probe_scale, + &probe_on_max_allocated_bitrate_change, + &first_allocation_probe_scale, + &second_allocation_probe_scale, + &allocation_allow_further_probing, + &min_probe_duration, + &network_state_estimate_probing_interval, + &probe_if_estimate_lower_than_network_state_estimate_ratio, + &estimate_lower_than_network_state_estimate_probing_interval, + &network_state_probe_scale, + &network_state_probe_duration, + &min_probe_packets_sent, + &limit_probe_target_rate_to_loss_bwe, + &loss_limited_probe_scale, + &skip_if_estimate_larger_than_fraction_of_max, + ¬_probe_if_delay_increased}, + key_value_config->Lookup("WebRTC-Bwe-ProbingConfiguration")); // Specialized keys overriding subsets of WebRTC-Bwe-ProbingConfiguration ParseFieldTrial( @@ -124,8 +140,9 @@ ProbeControllerConfig::ProbeControllerConfig( key_value_config->Lookup("WebRTC-Bwe-InitialProbing")); ParseFieldTrial({&further_exponential_probe_scale, &further_probe_threshold}, key_value_config->Lookup("WebRTC-Bwe-ExponentialProbing")); - ParseFieldTrial({&alr_probing_interval, &alr_probe_scale}, - key_value_config->Lookup("WebRTC-Bwe-AlrProbing")); + ParseFieldTrial( + {&alr_probing_interval, &alr_probe_scale, &loss_limited_probe_scale}, + key_value_config->Lookup("WebRTC-Bwe-AlrProbing")); ParseFieldTrial( {&first_allocation_probe_scale, &second_allocation_probe_scale, &allocation_allow_further_probing, &allocation_probe_max}, @@ -183,17 +200,6 @@ std::vector ProbeController::SetBitrates( // estimate then initiate probing. if (!estimated_bitrate_.IsZero() && old_max_bitrate < max_bitrate_ && estimated_bitrate_ < max_bitrate_) { - // The assumption is that if we jump more than 20% in the bandwidth - // estimate or if the bandwidth estimate is within 90% of the new - // max bitrate then the probing attempt was successful. - mid_call_probing_succcess_threshold_ = - std::min(estimated_bitrate_ * 1.2, max_bitrate_ * 0.9); - mid_call_probing_waiting_for_result_ = true; - mid_call_probing_bitrate_ = max_bitrate_; - - RTC_HISTOGRAM_COUNTS_10000("WebRTC.BWE.MidCallProbing.Initiated", - max_bitrate_.kbps()); - return InitiateProbing(at_time, {max_bitrate_}, false); } break; @@ -207,7 +213,8 @@ std::vector ProbeController::OnMaxTotalAllocatedBitrate( const bool in_alr = alr_start_time_.has_value(); const bool allow_allocation_probe = in_alr; - if (state_ == State::kProbingComplete && + if (config_.probe_on_max_allocated_bitrate_change && + state_ == State::kProbingComplete && max_total_allocated_bitrate != max_total_allocated_bitrate_ && estimated_bitrate_ < max_bitrate_ && estimated_bitrate_ < max_total_allocated_bitrate && @@ -271,42 +278,36 @@ std::vector ProbeController::InitiateExponentialProbing( std::vector ProbeController::SetEstimatedBitrate( DataRate bitrate, - bool bwe_limited_due_to_packet_loss, + BandwidthLimitedCause bandwidth_limited_cause, Timestamp at_time) { - if (bwe_limited_due_to_packet_loss != bwe_limited_due_to_packet_loss_ && - config_.limit_probe_target_rate_to_loss_bwe) { - state_ = State::kProbingComplete; - } - bwe_limited_due_to_packet_loss_ = bwe_limited_due_to_packet_loss; + bandwidth_limited_cause_ = bandwidth_limited_cause; if (bitrate < kBitrateDropThreshold * estimated_bitrate_) { time_of_last_large_drop_ = at_time; bitrate_before_last_large_drop_ = estimated_bitrate_; } estimated_bitrate_ = bitrate; - if (mid_call_probing_waiting_for_result_ && - bitrate >= mid_call_probing_succcess_threshold_) { - RTC_HISTOGRAM_COUNTS_10000("WebRTC.BWE.MidCallProbing.Success", - mid_call_probing_bitrate_.kbps()); - RTC_HISTOGRAM_COUNTS_10000("WebRTC.BWE.MidCallProbing.ProbedKbps", - bitrate.kbps()); - mid_call_probing_waiting_for_result_ = false; - } - std::vector pending_probes; if (state_ == State::kWaitingForProbingResult) { // Continue probing if probing results indicate channel has greater // capacity. + DataRate network_state_estimate_probe_further_limit = + config_.network_state_estimate_probing_interval->IsFinite() && + network_estimate_ + ? network_estimate_->link_capacity_upper * + config_.further_probe_threshold + : DataRate::PlusInfinity(); RTC_LOG(LS_INFO) << "Measured bitrate: " << bitrate << " Minimum to probe further: " - << min_bitrate_to_probe_further_; + << min_bitrate_to_probe_further_ << " upper limit: " + << network_state_estimate_probe_further_limit; - if (bitrate > min_bitrate_to_probe_further_) { - pending_probes = InitiateProbing( + if (bitrate > min_bitrate_to_probe_further_ && + bitrate <= network_state_estimate_probe_further_limit) { + return InitiateProbing( at_time, {config_.further_exponential_probe_scale * bitrate}, true); } } - - return pending_probes; + return {}; } void ProbeController::EnablePeriodicAlrProbing(bool enable) { @@ -361,36 +362,14 @@ std::vector ProbeController::RequestProbe( return std::vector(); } -void ProbeController::SetMaxBitrate(DataRate max_bitrate) { - max_bitrate_ = max_bitrate; -} - void ProbeController::SetNetworkStateEstimate( webrtc::NetworkStateEstimate estimate) { - if (config_.network_state_estimate_fast_rampup_rate > 0 && - estimated_bitrate_ < estimate.link_capacity_upper && - (!network_estimate_ || - estimate.link_capacity_upper >= - config_.network_state_estimate_fast_rampup_rate * - network_estimate_->link_capacity_upper)) { - send_probe_on_next_process_interval_ = true; - } - if (config_.network_state_estimate_drop_down_rate > 0 && network_estimate_ && - !estimate.link_capacity_upper.IsZero() && - (estimated_bitrate_ > estimate.link_capacity_upper || - bwe_limited_due_to_packet_loss_) && - estimate.link_capacity_upper <= - config_.network_state_estimate_drop_down_rate * - network_estimate_->link_capacity_upper) { - send_probe_on_next_process_interval_ = true; - } - network_estimate_ = estimate; } void ProbeController::Reset(Timestamp at_time) { network_available_ = true; - bwe_limited_due_to_packet_loss_ = false; + bandwidth_limited_cause_ = BandwidthLimitedCause::kDelayBasedLimited; state_ = State::kInit; min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); time_last_probing_initiated_ = Timestamp::Zero(); @@ -401,11 +380,9 @@ void ProbeController::Reset(Timestamp at_time) { Timestamp now = at_time; last_bwe_drop_probing_time_ = now; alr_end_time_.reset(); - mid_call_probing_waiting_for_result_ = false; time_of_last_large_drop_ = now; bitrate_before_last_large_drop_ = DataRate::Zero(); max_total_allocated_bitrate_ = DataRate::Zero(); - send_probe_on_next_process_interval_ = false; } bool ProbeController::TimeForAlrProbe(Timestamp at_time) const { @@ -419,21 +396,40 @@ bool ProbeController::TimeForAlrProbe(Timestamp at_time) const { } bool ProbeController::TimeForNetworkStateProbe(Timestamp at_time) const { - if (config_.network_state_estimate_probing_interval->IsFinite() && - network_estimate_ && network_estimate_->link_capacity_upper.IsFinite() && - estimated_bitrate_ < network_estimate_->link_capacity_upper) { + if (!network_estimate_ || + network_estimate_->link_capacity_upper.IsInfinite()) { + return false; + } + + bool probe_due_to_low_estimate = + bandwidth_limited_cause_ == BandwidthLimitedCause::kDelayBasedLimited && + estimated_bitrate_ < + config_.probe_if_estimate_lower_than_network_state_estimate_ratio * + network_estimate_->link_capacity_upper; + if (probe_due_to_low_estimate && + config_.estimate_lower_than_network_state_estimate_probing_interval + ->IsFinite()) { + Timestamp next_probe_time = + time_last_probing_initiated_ + + config_.estimate_lower_than_network_state_estimate_probing_interval; + return at_time >= next_probe_time; + } + + bool periodic_probe = + estimated_bitrate_ < network_estimate_->link_capacity_upper; + if (periodic_probe && + config_.network_state_estimate_probing_interval->IsFinite()) { Timestamp next_probe_time = time_last_probing_initiated_ + config_.network_state_estimate_probing_interval; return at_time >= next_probe_time; } + return false; } std::vector ProbeController::Process(Timestamp at_time) { if (at_time - time_last_probing_initiated_ > kMaxWaitingTimeForProbingResult) { - mid_call_probing_waiting_for_result_ = false; - if (state_ == State::kWaitingForProbingResult) { RTC_LOG(LS_INFO) << "kWaitingForProbingResult: timeout"; state_ = State::kProbingComplete; @@ -443,8 +439,7 @@ std::vector ProbeController::Process(Timestamp at_time) { if (estimated_bitrate_.IsZero() || state_ != State::kProbingComplete) { return {}; } - if (send_probe_on_next_process_interval_ || TimeForAlrProbe(at_time) || - TimeForNetworkStateProbe(at_time)) { + if (TimeForAlrProbe(at_time) || TimeForNetworkStateProbe(at_time)) { return InitiateProbing( at_time, {estimated_bitrate_ * config_.alr_probe_scale}, true); } @@ -459,27 +454,19 @@ std::vector ProbeController::InitiateProbing( DataRate network_estimate = network_estimate_ ? network_estimate_->link_capacity_upper : DataRate::PlusInfinity(); + DataRate max_probe_rate = + max_total_allocated_bitrate_.IsZero() + ? max_bitrate_ + : std::min(max_total_allocated_bitrate_, max_bitrate_); if (std::min(network_estimate, estimated_bitrate_) > - config_.skip_if_estimate_larger_than_fraction_of_max * max_bitrate_) { + config_.skip_if_estimate_larger_than_fraction_of_max * max_probe_rate) { + state_ = State::kProbingComplete; + min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); return {}; } } DataRate max_probe_bitrate = max_bitrate_; - if (bwe_limited_due_to_packet_loss_ && - config_.limit_probe_target_rate_to_loss_bwe) { - max_probe_bitrate = std::min(estimated_bitrate_, max_bitrate_); - } - if (config_.network_state_estimate_probing_interval->IsFinite() && - network_estimate_ && network_estimate_->link_capacity_upper.IsFinite()) { - if (network_estimate_->link_capacity_upper.IsZero()) { - RTC_LOG(LS_INFO) << "Not sending probe, Network state estimate is zero"; - return {}; - } - max_probe_bitrate = - std::min(max_probe_bitrate, network_estimate_->link_capacity_upper * - config_.network_state_probe_scale); - } if (max_total_allocated_bitrate_ > DataRate::Zero()) { // If a max allocated bitrate has been configured, allow probing up to 2x // that rate. This allows some overhead to account for bursty streams, @@ -491,12 +478,47 @@ std::vector ProbeController::InitiateProbing( std::min(max_probe_bitrate, max_total_allocated_bitrate_ * 2); } - send_probe_on_next_process_interval_ = false; + DataRate estimate_capped_bitrate = DataRate::PlusInfinity(); + if (config_.limit_probe_target_rate_to_loss_bwe) { + switch (bandwidth_limited_cause_) { + case BandwidthLimitedCause::kLossLimitedBweDecreasing: + // If bandwidth estimate is decreasing because of packet loss, do not + // send probes. + return {}; + case BandwidthLimitedCause::kLossLimitedBweIncreasing: + estimate_capped_bitrate = + std::min(max_probe_bitrate, + estimated_bitrate_ * config_.loss_limited_probe_scale); + break; + case BandwidthLimitedCause::kDelayBasedLimited: + break; + default: + break; + } + } + if (config_.not_probe_if_delay_increased && + bandwidth_limited_cause_ == + BandwidthLimitedCause::kDelayBasedLimitedDelayIncreased) { + return {}; + } + + if (config_.network_state_estimate_probing_interval->IsFinite() && + network_estimate_ && network_estimate_->link_capacity_upper.IsFinite()) { + if (network_estimate_->link_capacity_upper.IsZero()) { + RTC_LOG(LS_INFO) << "Not sending probe, Network state estimate is zero"; + return {}; + } + estimate_capped_bitrate = + std::min({estimate_capped_bitrate, max_probe_bitrate, + network_estimate_->link_capacity_upper * + config_.network_state_probe_scale}); + } std::vector pending_probes; for (DataRate bitrate : bitrates_to_probe) { RTC_DCHECK(!bitrate.IsZero()); + bitrate = std::min(bitrate, estimate_capped_bitrate); if (bitrate > max_probe_bitrate) { bitrate = max_probe_bitrate; probe_further = false; @@ -521,8 +543,11 @@ std::vector ProbeController::InitiateProbing( time_last_probing_initiated_ = now; if (probe_further) { state_ = State::kWaitingForProbingResult; + // Dont expect probe results to be larger than a fraction of the actual + // probe rate. min_bitrate_to_probe_further_ = - (*(bitrates_to_probe.end() - 1)) * config_.further_probe_threshold; + std::min(estimate_capped_bitrate, (*(bitrates_to_probe.end() - 1))) * + config_.further_probe_threshold; } else { state_ = State::kProbingComplete; min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); diff --git a/modules/congestion_controller/goog_cc/probe_controller.h b/modules/congestion_controller/goog_cc/probe_controller.h index e1ee08fc99..aa8b526ab0 100644 --- a/modules/congestion_controller/goog_cc/probe_controller.h +++ b/modules/congestion_controller/goog_cc/probe_controller.h @@ -50,18 +50,19 @@ struct ProbeControllerConfig { // Configures how often we send probes if NetworkStateEstimate is available. FieldTrialParameter network_state_estimate_probing_interval; - // If the network state estimate increase more than this rate, a probe is sent - // the next process interval. - FieldTrialParameter network_state_estimate_fast_rampup_rate; - // If the network state estimate decreases more than this rate, a probe is - // sent the next process interval. - FieldTrialParameter network_state_estimate_drop_down_rate; + // Periodically probe as long as the the ratio beteeen current estimate and + // NetworkStateEstimate is lower then this. + FieldTrialParameter + probe_if_estimate_lower_than_network_state_estimate_ratio; + FieldTrialParameter + estimate_lower_than_network_state_estimate_probing_interval; FieldTrialParameter network_state_probe_scale; // Overrides min_probe_duration if network_state_estimate_probing_interval // is set and a network state estimate is known. FieldTrialParameter network_state_probe_duration; // Configures the probes emitted by changed to the allocated bitrate. + FieldTrialParameter probe_on_max_allocated_bitrate_change; FieldTrialOptional first_allocation_probe_scale; FieldTrialOptional second_allocation_probe_scale; FieldTrialFlag allocation_allow_further_probing; @@ -71,12 +72,24 @@ struct ProbeControllerConfig { FieldTrialParameter min_probe_packets_sent; // The minimum probing duration. FieldTrialParameter min_probe_duration; - // Max limit the target rate of a probe to current estimate if BWE is loss - // limited. + // Periodically probe when bandwidth estimate is loss limited. FieldTrialParameter limit_probe_target_rate_to_loss_bwe; + FieldTrialParameter loss_limited_probe_scale; // Dont send a probe if min(estimate, network state estimate) is larger than // this fraction of the set max bitrate. FieldTrialParameter skip_if_estimate_larger_than_fraction_of_max; + // Do not send probes if network is either overusing or underusing. + FieldTrialParameter not_probe_if_delay_increased; +}; + +// Reason that bandwidth estimate is limited. Bandwidth estimate can be limited +// by either delay based bwe, or loss based bwe when it increases/decreases the +// estimate. +enum class BandwidthLimitedCause { + kLossLimitedBweIncreasing = 0, + kLossLimitedBweDecreasing = 1, + kDelayBasedLimited = 2, + kDelayBasedLimitedDelayIncreased = 3, }; // This class controls initiation of probing to estimate initial channel @@ -108,7 +121,7 @@ class ProbeController { ABSL_MUST_USE_RESULT std::vector SetEstimatedBitrate( DataRate bitrate, - bool bwe_limited_due_to_packet_loss, + BandwidthLimitedCause bandwidth_limited_cause, Timestamp at_time); void EnablePeriodicAlrProbing(bool enable); @@ -119,8 +132,6 @@ class ProbeController { ABSL_MUST_USE_RESULT std::vector RequestProbe( Timestamp at_time); - // Sets a new maximum probing bitrate, without generating a new probe cluster. - void SetMaxBitrate(DataRate max_bitrate); void SetNetworkStateEstimate(webrtc::NetworkStateEstimate estimate); // Resets the ProbeController to a state equivalent to as if it was just @@ -130,6 +141,11 @@ class ProbeController { ABSL_MUST_USE_RESULT std::vector Process( Timestamp at_time); + // Gets the value of field trial not_probe_if_delay_increased. + bool DontProbeIfDelayIncreased() { + return config_.not_probe_if_delay_increased; + } + private: enum class State { // Initial state where no probing has been triggered yet. @@ -150,12 +166,12 @@ class ProbeController { bool TimeForNetworkStateProbe(Timestamp at_time) const; bool network_available_; - bool bwe_limited_due_to_packet_loss_; + BandwidthLimitedCause bandwidth_limited_cause_ = + BandwidthLimitedCause::kDelayBasedLimited; State state_; DataRate min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); Timestamp time_last_probing_initiated_ = Timestamp::MinusInfinity(); DataRate estimated_bitrate_ = DataRate::Zero(); - bool send_probe_on_next_process_interval_; absl::optional network_estimate_; DataRate start_bitrate_ = DataRate::Zero(); DataRate max_bitrate_ = DataRate::PlusInfinity(); @@ -168,10 +184,6 @@ class ProbeController { DataRate max_total_allocated_bitrate_ = DataRate::Zero(); const bool in_rapid_recovery_experiment_; - // For WebRTC.BWE.MidCallProbing.* metric. - bool mid_call_probing_waiting_for_result_; - DataRate mid_call_probing_bitrate_ = DataRate::Zero(); - DataRate mid_call_probing_succcess_threshold_ = DataRate::Zero(); RtcEventLog* event_log_; int32_t next_probe_cluster_id_ = 1; diff --git a/modules/congestion_controller/goog_cc/probe_controller_unittest.cc b/modules/congestion_controller/goog_cc/probe_controller_unittest.cc index 2b2d71205e..e6a5c8ceef 100644 --- a/modules/congestion_controller/goog_cc/probe_controller_unittest.cc +++ b/modules/congestion_controller/goog_cc/probe_controller_unittest.cc @@ -102,7 +102,7 @@ TEST(ProbeControllerTest, ProbeOnlyWhenNetworkIsUp) { {.at_time = fixture.CurrentTime(), .network_available = false}); probes = probe_controller->SetBitrates(kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 0u); + EXPECT_TRUE(probes.empty()); probes = probe_controller->OnNetworkAvailability( {.at_time = fixture.CurrentTime(), .network_available = true}); EXPECT_GE(probes.size(), 2u); @@ -138,7 +138,7 @@ TEST(ProbeControllerTest, InitiatesProbingOnMaxBitrateIncrease) { // Long enough to time out exponential probing. fixture.AdvanceTime(kExponentialProbingTimeout); probes = probe_controller->SetEstimatedBitrate( - kStartBitrate, /*bwe_limited_due_to_packet_loss=*/false, + kStartBitrate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); probes = probe_controller->Process(fixture.CurrentTime()); probes = probe_controller->SetBitrates( @@ -148,7 +148,7 @@ TEST(ProbeControllerTest, InitiatesProbingOnMaxBitrateIncrease) { EXPECT_EQ(probes[0].target_data_rate.bps(), kMaxBitrate.bps() + 100); } -TEST(ProbeControllerTest, ProbesOnMaxBitrateIncreaseOnlyWhenInAlr) { +TEST(ProbeControllerTest, ProbesOnMaxAllocatedBitrateIncreaseOnlyWhenInAlr) { ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); @@ -156,18 +156,19 @@ TEST(ProbeControllerTest, ProbesOnMaxBitrateIncreaseOnlyWhenInAlr) { kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( kMaxBitrate - DataRate::BitsPerSec(1), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); // Wait long enough to time out exponential probing. fixture.AdvanceTime(kExponentialProbingTimeout); probes = probe_controller->Process(fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 0u); + EXPECT_TRUE(probes.empty()); // Probe when in alr. probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); probes = probe_controller->OnMaxTotalAllocatedBitrate( kMaxBitrate + DataRate::BitsPerSec(1), fixture.CurrentTime()); EXPECT_EQ(probes.size(), 2u); + EXPECT_EQ(probes.at(0).target_data_rate, kMaxBitrate); // Do not probe when not in alr. probe_controller->SetAlrStartTimeMs(absl::nullopt); @@ -176,6 +177,30 @@ TEST(ProbeControllerTest, ProbesOnMaxBitrateIncreaseOnlyWhenInAlr) { EXPECT_TRUE(probes.empty()); } +TEST(ProbeControllerTest, CanDisableProbingOnMaxTotalAllocatedBitrateIncrease) { + ProbeControllerFixture fixture( + "WebRTC-Bwe-ProbingConfiguration/" + "probe_max_allocation:false/"); + std::unique_ptr probe_controller = + fixture.CreateController(); + + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + probes = probe_controller->SetEstimatedBitrate( + kMaxBitrate - DataRate::BitsPerSec(1), + BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); + fixture.AdvanceTime(kExponentialProbingTimeout); + probes = probe_controller->Process(fixture.CurrentTime()); + ASSERT_TRUE(probes.empty()); + probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); + + // Do no probe, since probe_max_allocation:false. + probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); + probes = probe_controller->OnMaxTotalAllocatedBitrate( + kMaxBitrate + DataRate::BitsPerSec(1), fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); +} + TEST(ProbeControllerTest, InitiatesProbingOnMaxBitrateIncreaseAtMaxBitrate) { ProbeControllerFixture fixture; std::unique_ptr probe_controller = @@ -185,11 +210,11 @@ TEST(ProbeControllerTest, InitiatesProbingOnMaxBitrateIncreaseAtMaxBitrate) { // Long enough to time out exponential probing. fixture.AdvanceTime(kExponentialProbingTimeout); probes = probe_controller->SetEstimatedBitrate( - kStartBitrate, /*bwe_limited_due_to_packet_loss=*/false, + kStartBitrate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); probes = probe_controller->Process(fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - kMaxBitrate, /*bwe_limited_due_to_packet_loss=*/false, + kMaxBitrate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate + DataRate::BitsPerSec(100), @@ -209,13 +234,13 @@ TEST(ProbeControllerTest, TestExponentialProbing) { // Repeated probe should only be sent when estimated bitrate climbs above // 0.7 * 6 * kStartBitrate = 1260. probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(1000), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 0u); + DataRate::BitsPerSec(1000), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(1800), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(1800), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); EXPECT_EQ(probes.size(), 1u); EXPECT_EQ(probes[0].target_data_rate.bps(), 2 * 1800); } @@ -231,9 +256,9 @@ TEST(ProbeControllerTest, TestExponentialProbingTimeout) { probes = probe_controller->Process(fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(1800), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 0u); + DataRate::BitsPerSec(1800), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); } TEST(ProbeControllerTest, RequestProbeInAlr) { @@ -244,15 +269,15 @@ TEST(ProbeControllerTest, RequestProbeInAlr) { kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); EXPECT_GE(probes.size(), 2u); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); fixture.AdvanceTime(kAlrProbeInterval + TimeDelta::Millis(1)); probes = probe_controller->Process(fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(250), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(250), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); probes = probe_controller->RequestProbe(fixture.CurrentTime()); EXPECT_EQ(probes.size(), 1u); @@ -267,15 +292,15 @@ TEST(ProbeControllerTest, RequestProbeWhenAlrEndedRecently) { kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); EXPECT_EQ(probes.size(), 2u); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); probe_controller->SetAlrStartTimeMs(absl::nullopt); fixture.AdvanceTime(kAlrProbeInterval + TimeDelta::Millis(1)); probes = probe_controller->Process(fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(250), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(250), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); probe_controller->SetAlrEndedTimeMs(fixture.CurrentTime().ms()); fixture.AdvanceTime(kAlrEndedTimeout - TimeDelta::Millis(1)); probes = probe_controller->RequestProbe(fixture.CurrentTime()); @@ -292,19 +317,19 @@ TEST(ProbeControllerTest, RequestProbeWhenAlrNotEndedRecently) { kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); EXPECT_EQ(probes.size(), 2u); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); probe_controller->SetAlrStartTimeMs(absl::nullopt); fixture.AdvanceTime(kAlrProbeInterval + TimeDelta::Millis(1)); probes = probe_controller->Process(fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(250), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(250), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); probe_controller->SetAlrEndedTimeMs(fixture.CurrentTime().ms()); fixture.AdvanceTime(kAlrEndedTimeout + TimeDelta::Millis(1)); probes = probe_controller->RequestProbe(fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 0u); + EXPECT_TRUE(probes.empty()); } TEST(ProbeControllerTest, RequestProbeWhenBweDropNotRecent) { @@ -315,18 +340,18 @@ TEST(ProbeControllerTest, RequestProbeWhenBweDropNotRecent) { kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); EXPECT_EQ(probes.size(), 2u); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); fixture.AdvanceTime(kAlrProbeInterval + TimeDelta::Millis(1)); probes = probe_controller->Process(fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(250), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(250), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); fixture.AdvanceTime(kBitrateDropTimeout + TimeDelta::Millis(1)); probes = probe_controller->RequestProbe(fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 0u); + EXPECT_TRUE(probes.empty()); } TEST(ProbeControllerTest, PeriodicProbing) { @@ -338,8 +363,8 @@ TEST(ProbeControllerTest, PeriodicProbing) { kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); EXPECT_EQ(probes.size(), 2u); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); Timestamp start_time = fixture.CurrentTime(); @@ -351,26 +376,26 @@ TEST(ProbeControllerTest, PeriodicProbing) { EXPECT_EQ(probes[0].target_data_rate.bps(), 1000); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); // The following probe should be sent at 10s into ALR. probe_controller->SetAlrStartTimeMs(start_time.ms()); fixture.AdvanceTime(TimeDelta::Seconds(4)); probes = probe_controller->Process(fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 0u); + DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); probe_controller->SetAlrStartTimeMs(start_time.ms()); fixture.AdvanceTime(TimeDelta::Seconds(1)); probes = probe_controller->Process(fixture.CurrentTime()); EXPECT_EQ(probes.size(), 1u); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 0u); + DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); } TEST(ProbeControllerTest, PeriodicProbingAfterReset) { @@ -389,7 +414,7 @@ TEST(ProbeControllerTest, PeriodicProbingAfterReset) { probes = probe_controller->Process(fixture.CurrentTime()); // Since bitrates are not yet set, no probe is sent event though we are in ALR // mode. - EXPECT_EQ(probes.size(), 0u); + EXPECT_TRUE(probes.empty()); probes = probe_controller->SetBitrates(kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); @@ -413,15 +438,15 @@ TEST(ProbeControllerTest, TestExponentialProbingOverflow) { fixture.CurrentTime()); // Verify that probe bitrate is capped at the specified max bitrate. probes = probe_controller->SetEstimatedBitrate( - 60 * kMbpsMultiplier, /*bwe_limited_due_to_packet_loss=*/false, + 60 * kMbpsMultiplier, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); EXPECT_EQ(probes.size(), 1u); EXPECT_EQ(probes[0].target_data_rate, 100 * kMbpsMultiplier); // Verify that repeated probes aren't sent. probes = probe_controller->SetEstimatedBitrate( - 100 * kMbpsMultiplier, /*bwe_limited_due_to_packet_loss=*/false, + 100 * kMbpsMultiplier, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 0u); + EXPECT_TRUE(probes.empty()); } TEST(ProbeControllerTest, TestAllocatedBitrateCap) { @@ -440,7 +465,7 @@ TEST(ProbeControllerTest, TestAllocatedBitrateCap) { DataRate estimated_bitrate = kMaxBitrate / 10; probes = probe_controller->SetEstimatedBitrate( - estimated_bitrate, /*bwe_limited_due_to_packet_loss=*/false, + estimated_bitrate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); // Set a max allocated bitrate below the current estimate. @@ -486,13 +511,13 @@ TEST(ProbeControllerTest, ConfigurableProbingFieldTrial) { // Repeated probe should only be sent when estimated bitrate climbs above // 0.8 * 5 * kStartBitrateBps = 1200. probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(1100), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(1100), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); EXPECT_EQ(probes.size(), 0u); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(1250), - /*bwe_limited_due_to_packet_loss=*/false, fixture.CurrentTime()); + DataRate::BitsPerSec(1250), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); EXPECT_EQ(probes.size(), 1u); EXPECT_EQ(probes[0].target_data_rate.bps(), 3 * 1250); @@ -516,7 +541,7 @@ TEST(ProbeControllerTest, LimitAlrProbeWhenLossBasedBweLimited) { auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), /*bwe_limited_due_to_packet_loss=*/false, + DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); // Expect the controller to send a new probe after 5s has passed. probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); @@ -525,20 +550,20 @@ TEST(ProbeControllerTest, LimitAlrProbeWhenLossBasedBweLimited) { ASSERT_EQ(probes.size(), 1u); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), /*bwe_limited_due_to_packet_loss*/ true, - fixture.CurrentTime()); + DataRate::BitsPerSec(500), + BandwidthLimitedCause::kLossLimitedBweIncreasing, fixture.CurrentTime()); fixture.AdvanceTime(TimeDelta::Seconds(6)); probes = probe_controller->Process(fixture.CurrentTime()); ASSERT_EQ(probes.size(), 1u); - EXPECT_EQ(probes[0].target_data_rate, DataRate::BitsPerSec(500)); + EXPECT_EQ(probes[0].target_data_rate, 1.5 * DataRate::BitsPerSec(500)); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), /*bwe_limited_due_to_packet_loss=*/false, - fixture.CurrentTime()); + 1.5 * DataRate::BitsPerSec(500), + BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); fixture.AdvanceTime(TimeDelta::Seconds(6)); probes = probe_controller->Process(fixture.CurrentTime()); - ASSERT_TRUE(!probes.empty()); - EXPECT_GT(probes[0].target_data_rate, DataRate::BitsPerSec(500)); + ASSERT_FALSE(probes.empty()); + EXPECT_GT(probes[0].target_data_rate, 1.5 * 1.5 * DataRate::BitsPerSec(500)); } TEST(ProbeControllerTest, PeriodicProbeAtUpperNetworkStateEstimate) { @@ -550,7 +575,7 @@ TEST(ProbeControllerTest, PeriodicProbeAtUpperNetworkStateEstimate) { auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(5000), /*bwe_limited_due_to_packet_loss=*/false, + DataRate::BitsPerSec(5000), BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); // Expect the controller to send a new probe after 5s has passed. NetworkStateEstimate state_estimate; @@ -578,32 +603,24 @@ TEST(ProbeControllerTest, auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), /*bwe_limited_due_to_packet_loss=*/false, + DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); // Expect the controller to send a new probe after 5s has passed. NetworkStateEstimate state_estimate; - state_estimate.link_capacity_upper = DataRate::KilobitsPerSec(600); + state_estimate.link_capacity_upper = DataRate::BitsPerSec(700); probe_controller->SetNetworkStateEstimate(state_estimate); fixture.AdvanceTime(TimeDelta::Seconds(5)); probes = probe_controller->Process(fixture.CurrentTime()); ASSERT_EQ(probes.size(), 1u); probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), /*bwe_limited_due_to_packet_loss=*/true, - fixture.CurrentTime()); + DataRate::BitsPerSec(500), + BandwidthLimitedCause::kLossLimitedBweIncreasing, fixture.CurrentTime()); // Expect the controller to send a new probe after 5s has passed. fixture.AdvanceTime(TimeDelta::Seconds(5)); probes = probe_controller->Process(fixture.CurrentTime()); - ASSERT_TRUE(!probes.empty()); - EXPECT_EQ(probes[0].target_data_rate, DataRate::BitsPerSec(500)); - - probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(500), /*bwe_limited_due_to_packet_loss=*/false, - fixture.CurrentTime()); - fixture.AdvanceTime(TimeDelta::Seconds(5)); - probes = probe_controller->Process(fixture.CurrentTime()); - ASSERT_TRUE(!probes.empty()); - EXPECT_GT(probes[0].target_data_rate, DataRate::BitsPerSec(500)); + ASSERT_FALSE(probes.empty()); + EXPECT_EQ(probes[0].target_data_rate, DataRate::BitsPerSec(700)); } TEST(ProbeControllerTest, AlrProbesLimitedByNetworkStateEstimate) { @@ -615,7 +632,7 @@ TEST(ProbeControllerTest, AlrProbesLimitedByNetworkStateEstimate) { auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::KilobitsPerSec(6), /*bwe_limited_due_to_packet_loss=*/false, + DataRate::KilobitsPerSec(6), BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); @@ -643,7 +660,7 @@ TEST(ProbeControllerTest, CanSetLongerProbeDurationAfterNetworkStateEstimate) { auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - DataRate::KilobitsPerSec(5), /*bwe_limited_due_to_packet_loss=*/false, + DataRate::KilobitsPerSec(5), BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); ASSERT_FALSE(probes.empty()); EXPECT_LT(probes[0].target_duration, TimeDelta::Millis(100)); @@ -657,88 +674,153 @@ TEST(ProbeControllerTest, CanSetLongerProbeDurationAfterNetworkStateEstimate) { EXPECT_EQ(probes[0].target_duration, TimeDelta::Millis(100)); } -TEST(ProbeControllerTest, ProbeAfterLargeNetworkStateIncrease) { +TEST(ProbeControllerTest, ProbeInAlrIfLossBasedIncreasing) { ProbeControllerFixture fixture( "WebRTC-Bwe-ProbingConfiguration/" - "network_state_interval:5s,network_state_fast_rampup_rate:2.0/"); + "limit_probe_target_rate_to_loss_bwe:true/"); + std::unique_ptr probe_controller = + fixture.CreateController(); + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + probe_controller->EnablePeriodicAlrProbing(true); + probes = probe_controller->SetEstimatedBitrate( + kStartBitrate, BandwidthLimitedCause::kLossLimitedBweIncreasing, + fixture.CurrentTime()); + + // Wait long enough to time out exponential probing. + fixture.AdvanceTime(kExponentialProbingTimeout); + probes = probe_controller->Process(fixture.CurrentTime()); + ASSERT_TRUE(probes.empty()); + + // Probe when in alr. + probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); + fixture.AdvanceTime(kAlrProbeInterval + TimeDelta::Millis(1)); + probes = probe_controller->Process(fixture.CurrentTime()); + ASSERT_EQ(probes.size(), 1u); + EXPECT_EQ(probes.at(0).target_data_rate, 1.5 * kStartBitrate); +} + +TEST(ProbeControllerTest, ProbeFurtherInAlrIfLossBasedIncreasing) { + ProbeControllerFixture fixture( + "WebRTC-Bwe-ProbingConfiguration/" + "limit_probe_target_rate_to_loss_bwe:true/"); + std::unique_ptr probe_controller = + fixture.CreateController(); + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + probe_controller->EnablePeriodicAlrProbing(true); + probes = probe_controller->SetEstimatedBitrate( + kStartBitrate, BandwidthLimitedCause::kLossLimitedBweIncreasing, + fixture.CurrentTime()); + + // Wait long enough to time out exponential probing. + fixture.AdvanceTime(kExponentialProbingTimeout); + probes = probe_controller->Process(fixture.CurrentTime()); + ASSERT_TRUE(probes.empty()); + + // Probe when in alr. + probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); + fixture.AdvanceTime(kAlrProbeInterval + TimeDelta::Millis(1)); + probes = probe_controller->Process(fixture.CurrentTime()); + ASSERT_EQ(probes.size(), 1u); + ASSERT_EQ(probes.at(0).target_data_rate, 1.5 * kStartBitrate); + + probes = probe_controller->SetEstimatedBitrate( + 1.5 * kStartBitrate, BandwidthLimitedCause::kLossLimitedBweIncreasing, + fixture.CurrentTime()); + ASSERT_EQ(probes.size(), 1u); + EXPECT_EQ(probes[0].target_data_rate, 1.5 * 1.5 * kStartBitrate); +} + +TEST(ProbeControllerTest, NotProbeWhenInAlrIfLossBasedDecreases) { + ProbeControllerFixture fixture( + "WebRTC-Bwe-ProbingConfiguration/" + "network_state_interval:5s,limit_probe_target_rate_to_loss_bwe:true/"); + std::unique_ptr probe_controller = + fixture.CreateController(); + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + probe_controller->EnablePeriodicAlrProbing(true); + probes = probe_controller->SetEstimatedBitrate( + kStartBitrate, BandwidthLimitedCause::kLossLimitedBweDecreasing, + fixture.CurrentTime()); + + // Wait long enough to time out exponential probing. + fixture.AdvanceTime(kExponentialProbingTimeout); + probes = probe_controller->Process(fixture.CurrentTime()); + ASSERT_TRUE(probes.empty()); + + // Not probe in alr when loss based estimate decreases. + probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); + fixture.AdvanceTime(kAlrProbeInterval + TimeDelta::Millis(1)); + probes = probe_controller->Process(fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); +} + +TEST(ProbeControllerTest, NotProbeIfLossBasedIncreasingOutsideAlr) { + ProbeControllerFixture fixture( + "WebRTC-Bwe-ProbingConfiguration/" + "limit_probe_target_rate_to_loss_bwe:true/"); + std::unique_ptr probe_controller = + fixture.CreateController(); + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + probe_controller->EnablePeriodicAlrProbing(true); + probes = probe_controller->SetEstimatedBitrate( + kStartBitrate, BandwidthLimitedCause::kLossLimitedBweIncreasing, + fixture.CurrentTime()); + + // Wait long enough to time out exponential probing. + fixture.AdvanceTime(kExponentialProbingTimeout); + probes = probe_controller->Process(fixture.CurrentTime()); + ASSERT_TRUE(probes.empty()); + + probe_controller->SetAlrStartTimeMs(absl::nullopt); + fixture.AdvanceTime(kAlrProbeInterval + TimeDelta::Millis(1)); + probes = probe_controller->Process(fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); +} + +TEST(ProbeControllerTest, ProbeFurtherWhenLossBasedIsSameAsDelayBasedEstimate) { + ProbeControllerFixture fixture( + "WebRTC-Bwe-ProbingConfiguration/" + "network_state_interval:5s,limit_probe_target_rate_to_loss_bwe:true/"); std::unique_ptr probe_controller = fixture.CreateController(); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); - probes = probe_controller->SetEstimatedBitrate( - kStartBitrate, /*bwe_limited_due_to_packet_loss=*/false, - fixture.CurrentTime()); + ASSERT_FALSE(probes.empty()); + // Need to wait at least one second before process can trigger a new probe. fixture.AdvanceTime(TimeDelta::Millis(1100)); probes = probe_controller->Process(fixture.CurrentTime()); - EXPECT_TRUE(probes.empty()); + ASSERT_TRUE(probes.empty()); NetworkStateEstimate state_estimate; - state_estimate.link_capacity_upper = kStartBitrate; + state_estimate.link_capacity_upper = 5 * kStartBitrate; probe_controller->SetNetworkStateEstimate(state_estimate); - // No probe since NetworkStateEstimate is not higher than the set - // estimated bitrate. + fixture.AdvanceTime(TimeDelta::Seconds(5)); probes = probe_controller->Process(fixture.CurrentTime()); - EXPECT_TRUE(probes.empty()); + ASSERT_FALSE(probes.empty()); - // If NetworkState increase just a bit, dont expect the probe to be sent - // immediately. - state_estimate.link_capacity_upper = kStartBitrate * 1.4; - probe_controller->SetNetworkStateEstimate(state_estimate); - probes = probe_controller->Process(fixture.CurrentTime()); - EXPECT_TRUE(probes.empty()); - - // If NetworkState increase dramatically, expect a probe to be sent. - state_estimate.link_capacity_upper = kStartBitrate * 1.4 * 2; - probe_controller->SetNetworkStateEstimate(state_estimate); - probes = probe_controller->Process(fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 1u); -} - -TEST(ProbeControllerTest, ProbeAfterLargeNetworkStateDrop) { - ProbeControllerFixture fixture( - "WebRTC-Bwe-ProbingConfiguration/" - "network_state_interval:5s,network_state_drop_down_rate:0.5/"); - std::unique_ptr probe_controller = - fixture.CreateController(); - - auto probes = probe_controller->SetBitrates( - kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + DataRate probe_target_rate = probes[0].target_data_rate; + EXPECT_LT(probe_target_rate, state_estimate.link_capacity_upper); + // Expect that more probes are sent if BWE is the same as delay based + // estimate. probes = probe_controller->SetEstimatedBitrate( - kStartBitrate, /*bwe_limited_due_to_packet_loss=*/false, + probe_target_rate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); - // Need to wait at least one second before process can trigger a new probe. - fixture.AdvanceTime(TimeDelta::Millis(1100)); - probes = probe_controller->Process(fixture.CurrentTime()); - EXPECT_TRUE(probes.empty()); - - NetworkStateEstimate state_estimate; - state_estimate.link_capacity_upper = kStartBitrate; - probe_controller->SetNetworkStateEstimate(state_estimate); - // No probe since NetworkStateEstimate is not lower than the set - // estimated bitrate. - probes = probe_controller->Process(fixture.CurrentTime()); - EXPECT_TRUE(probes.empty()); - - // If NetworkState decrease just a bit, dont expect the probe to be sent - // immediately. - state_estimate.link_capacity_upper = kStartBitrate * 0.9; - probe_controller->SetNetworkStateEstimate(state_estimate); - probes = probe_controller->Process(fixture.CurrentTime()); - EXPECT_TRUE(probes.empty()); - - // If NetworkState decrease dramatically, expect a probe to be sent. - state_estimate.link_capacity_upper = kStartBitrate * 0.9 * 0.5; - probe_controller->SetNetworkStateEstimate(state_estimate); - probes = probe_controller->Process(fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 1u); + ASSERT_FALSE(probes.empty()); + EXPECT_EQ(probes[0].target_data_rate, 2 * probe_target_rate); } -TEST(ProbeControllerTest, ProbeAfterLargeNetworkStateDropLossLimited) { +TEST(ProbeControllerTest, ProbeIfEstimateLowerThanNetworkStateEstimate) { + // Periodic probe every 1 second if estimate is lower than 50% of the + // NetworkStateEstimate. ProbeControllerFixture fixture( - "WebRTC-Bwe-ProbingConfiguration/" - "network_state_interval:5s,network_state_drop_down_rate:0.5,limit_probe_" + "WebRTC-Bwe-ProbingConfiguration/est_lower_than_network_interval:1s," + "est_lower_than_network_ratio:0.5,limit_probe_" "target_rate_to_loss_bwe:true/"); std::unique_ptr probe_controller = fixture.CreateController(); @@ -746,7 +828,7 @@ TEST(ProbeControllerTest, ProbeAfterLargeNetworkStateDropLossLimited) { auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - kStartBitrate, /*bwe_limited_due_to_packet_loss=*/false, + kStartBitrate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); // Need to wait at least one second before process can trigger a new probe. fixture.AdvanceTime(TimeDelta::Millis(1100)); @@ -759,17 +841,25 @@ TEST(ProbeControllerTest, ProbeAfterLargeNetworkStateDropLossLimited) { probes = probe_controller->Process(fixture.CurrentTime()); EXPECT_TRUE(probes.empty()); - // Loss limited. - probes = probe_controller->SetEstimatedBitrate( - kStartBitrate / 3, /*bwe_limited_due_to_packet_loss=*/true, - fixture.CurrentTime()); - // If NetworkState decrease dramatically, expect a probe to be sent. - // But limited to loss based estimate. - state_estimate.link_capacity_upper = kStartBitrate / 2; + state_estimate.link_capacity_upper = kStartBitrate * 3; probe_controller->SetNetworkStateEstimate(state_estimate); probes = probe_controller->Process(fixture.CurrentTime()); ASSERT_EQ(probes.size(), 1u); - EXPECT_EQ(probes[0].target_data_rate, kStartBitrate / 3); + EXPECT_GT(probes[0].target_data_rate, kStartBitrate); + + // If network state not increased, send another probe. + fixture.AdvanceTime(TimeDelta::Millis(1100)); + probes = probe_controller->Process(fixture.CurrentTime()); + EXPECT_FALSE(probes.empty()); + + // Stop probing if estimate increase. We might probe further here though. + probes = probe_controller->SetEstimatedBitrate( + 2 * kStartBitrate, BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + // No more periodic probes. + fixture.AdvanceTime(TimeDelta::Millis(1100)); + probes = probe_controller->Process(fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); } TEST(ProbeControllerTest, DontProbeFurtherWhenLossLimited) { @@ -797,8 +887,8 @@ TEST(ProbeControllerTest, DontProbeFurtherWhenLossLimited) { EXPECT_LT(probes[0].target_data_rate, state_estimate.link_capacity_upper); // Expect that no more probes are sent immediately if BWE is loss limited. probes = probe_controller->SetEstimatedBitrate( - probes[0].target_data_rate, /*bwe_limited_due_to_packet_loss=*/true, - fixture.CurrentTime()); + probes[0].target_data_rate, + BandwidthLimitedCause::kLossLimitedBweDecreasing, fixture.CurrentTime()); EXPECT_TRUE(probes.empty()); } @@ -827,12 +917,51 @@ TEST(ProbeControllerTest, ProbeFurtherWhenDelayBasedLimited) { EXPECT_LT(probes[0].target_data_rate, state_estimate.link_capacity_upper); // Since the probe was successfull, expect to continue probing. probes = probe_controller->SetEstimatedBitrate( - probes[0].target_data_rate, /*bwe_limited_due_to_packet_loss=*/false, + probes[0].target_data_rate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); EXPECT_FALSE(probes.empty()); EXPECT_EQ(probes[0].target_data_rate, state_estimate.link_capacity_upper); } +TEST(ProbeControllerTest, + ProbeFurtherIfNetworkStateEstimateIncreaseAfterProbeSent) { + ProbeControllerFixture fixture( + "WebRTC-Bwe-ProbingConfiguration/" + "network_state_interval:5s,limit_probe_target_rate_to_loss_bwe:true/"); + std::unique_ptr probe_controller = + fixture.CreateController(); + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + ASSERT_FALSE(probes.empty()); + NetworkStateEstimate state_estimate; + state_estimate.link_capacity_upper = 1.2 * probes[0].target_data_rate / 2; + probe_controller->SetNetworkStateEstimate(state_estimate); + // No immediate further probing since probe result is low. + probes = probe_controller->SetEstimatedBitrate( + probes[0].target_data_rate / 2, BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + ASSERT_TRUE(probes.empty()); + + fixture.AdvanceTime(TimeDelta::Seconds(5)); + probes = probe_controller->Process(fixture.CurrentTime()); + ASSERT_FALSE(probes.empty()); + EXPECT_LE(probes[0].target_data_rate, state_estimate.link_capacity_upper); + // If the network state estimate increase above the threshold to probe + // further, and the probe suceeed, expect a new probe. + state_estimate.link_capacity_upper = 3 * kStartBitrate; + probe_controller->SetNetworkStateEstimate(state_estimate); + probes = probe_controller->SetEstimatedBitrate( + probes[0].target_data_rate, BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + EXPECT_FALSE(probes.empty()); + + // But no more probes if estimate is close to the link capacity. + probes = probe_controller->SetEstimatedBitrate( + state_estimate.link_capacity_upper * 0.9, + BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); +} + TEST(ProbeControllerTest, SkipAlrProbeIfEstimateLargerThanMaxProbe) { ProbeControllerFixture fixture( "WebRTC-Bwe-ProbingConfiguration/" @@ -845,7 +974,7 @@ TEST(ProbeControllerTest, SkipAlrProbeIfEstimateLargerThanMaxProbe) { ASSERT_FALSE(probes.empty()); probes = probe_controller->SetEstimatedBitrate( - kMaxBitrate, /*bwe_limited_due_to_packet_loss=*/false, + kMaxBitrate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); EXPECT_TRUE(probes.empty()); @@ -855,8 +984,39 @@ TEST(ProbeControllerTest, SkipAlrProbeIfEstimateLargerThanMaxProbe) { EXPECT_TRUE(probes.empty()); // But if the max rate increase, A new probe is sent. - probe_controller->SetMaxBitrate(2 * kMaxBitrate); + probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, 2 * kMaxBitrate, fixture.CurrentTime()); + EXPECT_FALSE(probes.empty()); +} + +TEST(ProbeControllerTest, + SkipAlrProbeIfEstimateLargerThanFractionOfMaxAllocated) { + ProbeControllerFixture fixture( + "WebRTC-Bwe-ProbingConfiguration/" + "skip_if_est_larger_than_fraction_of_max:1.0/"); + std::unique_ptr probe_controller = + fixture.CreateController(); + probe_controller->EnablePeriodicAlrProbing(true); + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + ASSERT_FALSE(probes.empty()); + probes = probe_controller->SetEstimatedBitrate( + kMaxBitrate / 2, BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + + fixture.AdvanceTime(TimeDelta::Seconds(10)); + probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); + probes = probe_controller->OnMaxTotalAllocatedBitrate(kMaxBitrate / 2, + fixture.CurrentTime()); + // No probes since total allocated is not higher than the current estimate. + EXPECT_TRUE(probes.empty()); + fixture.AdvanceTime(TimeDelta::Seconds(2)); probes = probe_controller->Process(fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); + + // But if the max allocated increase, A new probe is sent. + probes = probe_controller->OnMaxTotalAllocatedBitrate( + kMaxBitrate / 2 + DataRate::BitsPerSec(1), fixture.CurrentTime()); EXPECT_FALSE(probes.empty()); } @@ -873,7 +1033,7 @@ TEST(ProbeControllerTest, SkipNetworkStateProbeIfEstimateLargerThanMaxProbe) { probe_controller->SetNetworkStateEstimate( {.link_capacity_upper = 2 * kMaxBitrate}); probes = probe_controller->SetEstimatedBitrate( - kMaxBitrate, /*bwe_limited_due_to_packet_loss=*/false, + kMaxBitrate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); EXPECT_TRUE(probes.empty()); @@ -886,7 +1046,7 @@ TEST(ProbeControllerTest, SendsProbeIfNetworkStateEstimateLowerThanMaxProbe) { ProbeControllerFixture fixture( "WebRTC-Bwe-ProbingConfiguration/" "network_state_interval:2s,skip_if_est_larger_than_fraction_of_max:0.9," - "network_state_drop_down_rate:0.5/"); + "/"); std::unique_ptr probe_controller = fixture.CreateController(); auto probes = probe_controller->SetBitrates( @@ -895,16 +1055,19 @@ TEST(ProbeControllerTest, SendsProbeIfNetworkStateEstimateLowerThanMaxProbe) { probe_controller->SetNetworkStateEstimate( {.link_capacity_upper = 2 * kMaxBitrate}); probes = probe_controller->SetEstimatedBitrate( - kMaxBitrate, /*bwe_limited_due_to_packet_loss=*/false, + kMaxBitrate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); EXPECT_TRUE(probes.empty()); - // Need to wait at least one second before process can trigger a new probe. - fixture.AdvanceTime(TimeDelta::Millis(1100)); + // Need to wait at least two seconds before process can trigger a new probe. + fixture.AdvanceTime(TimeDelta::Millis(2100)); - // Sends a probe immediately if NetworkState estimate decrease. + probes = probe_controller->SetEstimatedBitrate( + kStartBitrate, BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); probe_controller->SetNetworkStateEstimate( - {.link_capacity_upper = kStartBitrate}); + {.link_capacity_upper = 2 * kStartBitrate}); probes = probe_controller->Process(fixture.CurrentTime()); EXPECT_FALSE(probes.empty()); } @@ -912,14 +1075,13 @@ TEST(ProbeControllerTest, SendsProbeIfNetworkStateEstimateLowerThanMaxProbe) { TEST(ProbeControllerTest, DontSendProbeIfNetworkStateEstimateIsZero) { ProbeControllerFixture fixture( "WebRTC-Bwe-ProbingConfiguration/" - "network_state_interval:5s,network_state_drop_down_rate:0.5,limit_probe_" - "target_rate_to_loss_bwe:true/"); + "network_state_interval:5s,limit_probe_target_rate_to_loss_bwe:true/"); std::unique_ptr probe_controller = fixture.CreateController(); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( - kStartBitrate, /*bwe_limited_due_to_packet_loss=*/false, + kStartBitrate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); probe_controller->SetNetworkStateEstimate( {.link_capacity_upper = kStartBitrate}); @@ -936,5 +1098,34 @@ TEST(ProbeControllerTest, DontSendProbeIfNetworkStateEstimateIsZero) { probes = probe_controller->Process(fixture.CurrentTime()); EXPECT_TRUE(probes.empty()); } + +TEST(ProbeControllerTest, DontProbeIfDelayIncreased) { + ProbeControllerFixture fixture( + "WebRTC-Bwe-ProbingConfiguration/" + "network_state_interval:5s,not_probe_if_delay_increased:true/"); + std::unique_ptr probe_controller = + fixture.CreateController(); + + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + ASSERT_FALSE(probes.empty()); + + // Need to wait at least one second before process can trigger a new probe. + fixture.AdvanceTime(TimeDelta::Millis(1100)); + probes = probe_controller->Process(fixture.CurrentTime()); + ASSERT_TRUE(probes.empty()); + + NetworkStateEstimate state_estimate; + state_estimate.link_capacity_upper = 3 * kStartBitrate; + probe_controller->SetNetworkStateEstimate(state_estimate); + probes = probe_controller->SetEstimatedBitrate( + kStartBitrate, BandwidthLimitedCause::kDelayBasedLimitedDelayIncreased, + fixture.CurrentTime()); + ASSERT_TRUE(probes.empty()); + + fixture.AdvanceTime(TimeDelta::Seconds(5)); + probes = probe_controller->Process(fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); +} } // namespace test } // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc index 0a88e3d079..1e4db1ffaf 100644 --- a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc +++ b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc @@ -246,7 +246,8 @@ SendSideBandwidthEstimation::SendSideBandwidthEstimation( ParseFieldTrial({&disable_receiver_limit_caps_only_}, key_value_config->Lookup("WebRTC-Bwe-ReceiverLimitCapsOnly")); if (LossBasedBandwidthEstimatorV2Enabled()) { - loss_based_bandwidth_estimator_v2_.SetMinBitrate(min_bitrate_configured_); + loss_based_bandwidth_estimator_v2_.SetMinMaxBitrate( + min_bitrate_configured_, max_bitrate_configured_); } } @@ -308,6 +309,8 @@ void SendSideBandwidthEstimation::SetMinMaxBitrate(DataRate min_bitrate, } else { max_bitrate_configured_ = kDefaultMaxBitrate; } + loss_based_bandwidth_estimator_v2_.SetMinMaxBitrate(min_bitrate_configured_, + max_bitrate_configured_); } int SendSideBandwidthEstimation::GetMinBitrate() const { @@ -325,10 +328,6 @@ LossBasedState SendSideBandwidthEstimation::loss_based_state() const { return loss_based_state_; } -DataRate SendSideBandwidthEstimation::delay_based_limit() const { - return delay_based_limit_; -} - DataRate SendSideBandwidthEstimation::GetEstimatedLinkCapacity() const { return link_capacity_.estimate(); } @@ -370,7 +369,8 @@ void SendSideBandwidthEstimation::SetAcknowledgedRate( void SendSideBandwidthEstimation::UpdateLossBasedEstimator( const TransportPacketsFeedback& report, BandwidthUsage delay_detector_state, - absl::optional probe_bitrate) { + absl::optional probe_bitrate, + DataRate upper_link_capacity) { if (LossBasedBandwidthEstimatorV1Enabled()) { loss_based_bandwidth_estimator_v1_.UpdateLossStatistics( report.packet_feedbacks, report.feedback_time); @@ -378,7 +378,7 @@ void SendSideBandwidthEstimation::UpdateLossBasedEstimator( if (LossBasedBandwidthEstimatorV2Enabled()) { loss_based_bandwidth_estimator_v2_.UpdateBandwidthEstimate( report.packet_feedbacks, delay_based_limit_, delay_detector_state, - probe_bitrate); + probe_bitrate, upper_link_capacity); UpdateEstimate(report.feedback_time); } } @@ -527,8 +527,7 @@ void SendSideBandwidthEstimation::UpdateEstimate(Timestamp at_time) { if (LossBasedBandwidthEstimatorV2ReadyForUse()) { LossBasedBweV2::Result result = - loss_based_bandwidth_estimator_v2_.GetLossBasedResult( - delay_based_limit_); + loss_based_bandwidth_estimator_v2_.GetLossBasedResult(); loss_based_state_ = result.state; UpdateTargetBitrate(result.bandwidth_estimate, at_time); return; diff --git a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h index fc8b750552..77510236d3 100644 --- a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h +++ b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h @@ -86,7 +86,6 @@ class SendSideBandwidthEstimation { DataRate target_rate() const; LossBasedState loss_based_state() const; - DataRate delay_based_limit() const; uint8_t fraction_loss() const { return last_fraction_loss_; } TimeDelta round_trip_time() const { return last_round_trip_time_; } @@ -121,7 +120,8 @@ class SendSideBandwidthEstimation { Timestamp at_time); void UpdateLossBasedEstimator(const TransportPacketsFeedback& report, BandwidthUsage delay_detector_state, - absl::optional probe_bitrate); + absl::optional probe_bitrate, + DataRate upper_link_capacity); private: friend class GoogCcStatePrinter; diff --git a/modules/congestion_controller/include/receive_side_congestion_controller.h b/modules/congestion_controller/include/receive_side_congestion_controller.h index 96ee8a6e3d..7696396016 100644 --- a/modules/congestion_controller/include/receive_side_congestion_controller.h +++ b/modules/congestion_controller/include/receive_side_congestion_controller.h @@ -14,7 +14,6 @@ #include #include -#include "api/transport/field_trial_based_config.h" #include "api/transport/network_control.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" @@ -78,7 +77,6 @@ class ReceiveSideCongestionController : public CallStatsObserver { void PickEstimator() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); Clock& clock_; - const FieldTrialBasedConfig field_trial_config_; RembThrottler remb_throttler_; RemoteEstimatorProxy remote_estimator_proxy_; diff --git a/modules/congestion_controller/receive_side_congestion_controller.cc b/modules/congestion_controller/receive_side_congestion_controller.cc index 4f238835e4..e43b020f6e 100644 --- a/modules/congestion_controller/receive_side_congestion_controller.cc +++ b/modules/congestion_controller/receive_side_congestion_controller.cc @@ -84,7 +84,6 @@ ReceiveSideCongestionController::ReceiveSideCongestionController( : clock_(*clock), remb_throttler_(std::move(remb_sender), clock), remote_estimator_proxy_(std::move(feedback_sender), - &field_trial_config_, network_state_estimator), rbe_(new RemoteBitrateEstimatorSingleStream(&remb_throttler_, clock)), using_absolute_send_time_(false), diff --git a/modules/desktop_capture/BUILD.gn b/modules/desktop_capture/BUILD.gn index a124a074ee..76b95b7efd 100644 --- a/modules/desktop_capture/BUILD.gn +++ b/modules/desktop_capture/BUILD.gn @@ -7,9 +7,7 @@ # be found in the AUTHORS file in the root of the source tree. import("//build/config/linux/gtk/gtk.gni") -import("//build/config/linux/pkg_config.gni") import("//build/config/ui.gni") -import("//tools/generate_stubs/rules.gni") import("../../webrtc.gni") use_desktop_capture_differ_sse2 = current_cpu == "x86" || current_cpu == "x64" @@ -84,7 +82,7 @@ if (rtc_include_tests) { ] if ((is_linux || is_chromeos) && rtc_use_pipewire) { - configs += [ ":gio" ] + configs += [ "../portal:gio" ] } public_configs = [ ":x11_config" ] @@ -95,6 +93,45 @@ if (rtc_include_tests) { } } + if ((is_linux || is_chromeos) && rtc_use_pipewire) { + rtc_test("shared_screencast_stream_test") { + testonly = true + + sources = [ + "linux/wayland/shared_screencast_stream_unittest.cc", + "linux/wayland/test/test_screencast_stream_provider.cc", + "linux/wayland/test/test_screencast_stream_provider.h", + ] + + configs += [ "../portal:pipewire_all" ] + + deps = [ + ":desktop_capture", + ":desktop_capture_mock", + ":primitives", + "../../rtc_base:checks", + "../../rtc_base:logging", + "../../rtc_base:random", + "../../rtc_base:timeutils", + "../portal", + + # TODO(bugs.webrtc.org/9987): Remove this dep on rtc_base:rtc_base once + # rtc_base:threading is fully defined. + "../../rtc_base:rtc_base", + "../../rtc_base:task_queue_for_test", + "../../rtc_base:threading", + "../../system_wrappers", + "../../test:test_main", + "../../test:test_support", + "//api/units:time_delta", + "//rtc_base:rtc_event", + ] + + data = [ "../../third_party/pipewire" ] + public_configs = [ "../portal:pipewire_config" ] + } + } + rtc_library("desktop_capture_unittests") { testonly = true @@ -118,7 +155,7 @@ if (rtc_include_tests) { ] if ((is_linux || is_chromeos) && rtc_use_pipewire) { - configs += [ ":gio" ] + configs += [ "../portal:gio" ] } deps = [ @@ -228,7 +265,7 @@ if (rtc_include_tests) { ] if ((is_linux || is_chromeos) && rtc_use_pipewire) { - configs += [ ":gio" ] + configs += [ "../portal:gio" ] } deps = [ @@ -239,78 +276,10 @@ if (rtc_include_tests) { } } -if (is_linux || is_chromeos) { - if (rtc_use_pipewire) { - pkg_config("gio") { - packages = [ - "gio-2.0", - "gio-unix-2.0", - ] - } - - pkg_config("pipewire") { - packages = [ "libpipewire-0.3" ] - if (!rtc_link_pipewire) { - ignore_libs = true - } - } - - pkg_config("gbm") { - packages = [ "gbm" ] - } - pkg_config("egl") { - packages = [ "egl" ] - } - pkg_config("epoxy") { - packages = [ "epoxy" ] - ignore_libs = true - } - pkg_config("libdrm") { - packages = [ "libdrm" ] - if (!rtc_link_pipewire) { - ignore_libs = true - } - } - - if (!rtc_link_pipewire) { - # When libpipewire is not directly linked, use stubs to allow for dlopening of - # the binary. - generate_stubs("pipewire_stubs") { - configs = [ - "../../:common_config", - ":pipewire", - ":libdrm", - ] - deps = [ "../../rtc_base" ] - extra_header = "linux/wayland/pipewire_stub_header.fragment" - logging_function = "RTC_LOG(LS_VERBOSE)" - logging_include = "rtc_base/logging.h" - output_name = "linux/wayland/pipewire_stubs" - path_from_source = "modules/desktop_capture/linux/wayland" - sigs = [ - "linux/wayland/pipewire.sigs", - "linux/wayland/drm.sigs", - ] - if (!build_with_chromium) { - macro_include = "rtc_base/system/no_cfi_icall.h" - macro_deps = [ "../../rtc_base/system:no_cfi_icall" ] - } - } - } - - config("pipewire_config") { - defines = [ "WEBRTC_USE_PIPEWIRE" ] - if (!rtc_link_pipewire) { - defines += [ "WEBRTC_DLOPEN_PIPEWIRE" ] - } - - # Chromecast build config overrides `WEBRTC_USE_PIPEWIRE` even when - # `rtc_use_pipewire` is not set, which causes pipewire_config to not be - # included in targets. More details in: webrtc:13898 - if (is_linux && !is_castos) { - defines += [ "WEBRTC_USE_GIO" ] - } - } +# TODO(bugs.webrtc.org/14187): remove when all users are gone +if ((is_linux || is_chromeos) && rtc_use_pipewire) { + config("pipewire_config") { + configs = [ "../portal:pipewire_config" ] } } @@ -594,7 +563,6 @@ rtc_library("desktop_capture") { "linux/wayland/portal_request_response.h", "linux/wayland/restore_token_manager.cc", "linux/wayland/restore_token_manager.h", - "linux/wayland/scoped_glib.cc", "linux/wayland/scoped_glib.h", "linux/wayland/screen_capture_portal_interface.cc", "linux/wayland/screen_capture_portal_interface.h", @@ -604,27 +572,18 @@ rtc_library("desktop_capture") { "linux/wayland/screencast_stream_utils.h", "linux/wayland/shared_screencast_stream.cc", "linux/wayland/shared_screencast_stream.h", - "linux/wayland/xdg_desktop_portal_utils.cc", "linux/wayland/xdg_desktop_portal_utils.h", "linux/wayland/xdg_session_details.h", ] - configs += [ - ":gio", - ":pipewire", - ":gbm", - ":egl", - ":epoxy", - ":libdrm", + configs += [ "../portal:pipewire_all" ] + + public_configs += [ "../portal:pipewire_config" ] + + deps += [ + "../../rtc_base:sanitizer", + "../portal", ] - - if (!rtc_link_pipewire) { - deps += [ ":pipewire_stubs" ] - } - - public_configs += [ ":pipewire_config" ] - - deps += [ "../../rtc_base:sanitizer" ] } if (rtc_enable_win_wgc) { diff --git a/modules/desktop_capture/desktop_capture_metadata.h b/modules/desktop_capture/desktop_capture_metadata.h index faca156e33..49a20e729c 100644 --- a/modules/desktop_capture/desktop_capture_metadata.h +++ b/modules/desktop_capture/desktop_capture_metadata.h @@ -12,7 +12,7 @@ #define MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_METADATA_H_ #if defined(WEBRTC_USE_GIO) -#include "modules/desktop_capture/linux/wayland/xdg_session_details.h" +#include "modules/portal/xdg_session_details.h" #endif // defined(WEBRTC_USE_GIO) namespace webrtc { diff --git a/modules/desktop_capture/desktop_capture_options.h b/modules/desktop_capture/desktop_capture_options.h index 4ee5259bb4..67dffee08a 100644 --- a/modules/desktop_capture/desktop_capture_options.h +++ b/modules/desktop_capture/desktop_capture_options.h @@ -105,6 +105,17 @@ class RTC_EXPORT DesktopCaptureOptions { detect_updated_region_ = detect_updated_region; } + // Indicates that the capturer should try to include the cursor in the frame. + // If it is able to do so it will set `DesktopFrame::may_contain_cursor()`. + // Not all capturers will support including the cursor. If this value is false + // or the cursor otherwise cannot be included in the frame, then cursor + // metadata will be sent, though the capturer may choose to always send cursor + // metadata. + bool prefer_cursor_embedded() const { return prefer_cursor_embedded_; } + void set_prefer_cursor_embedded(bool prefer_cursor_embedded) { + prefer_cursor_embedded_ = prefer_cursor_embedded; + } + #if defined(WEBRTC_WIN) // Enumerating windows owned by the current process on Windows has some // complications due to |GetWindowText*()| APIs potentially causing a @@ -194,6 +205,13 @@ class RTC_EXPORT DesktopCaptureOptions { void set_height(uint32_t height) { height_ = height; } uint32_t get_height() const { return height_; } + + void set_pipewire_use_damage_region(bool use_damage_regions) { + pipewire_use_damage_region_ = use_damage_regions; + } + bool pipewire_use_damage_region() const { + return pipewire_use_damage_region_; + } #endif private: @@ -230,8 +248,10 @@ class RTC_EXPORT DesktopCaptureOptions { #endif bool disable_effects_ = true; bool detect_updated_region_ = false; + bool prefer_cursor_embedded_ = false; #if defined(WEBRTC_USE_PIPEWIRE) bool allow_pipewire_ = false; + bool pipewire_use_damage_region_ = true; uint32_t width_ = 0; uint32_t height_ = 0; #endif diff --git a/modules/desktop_capture/desktop_capture_types.h b/modules/desktop_capture/desktop_capture_types.h index fd5f881122..9627076eea 100644 --- a/modules/desktop_capture/desktop_capture_types.h +++ b/modules/desktop_capture/desktop_capture_types.h @@ -25,6 +25,8 @@ typedef intptr_t WindowId; const WindowId kNullWindowId = 0; +const int64_t kInvalidDisplayId = -1; + // Type used to identify screens on the desktop. Values are platform-specific: // - On Windows: integer display device index. // - On OSX: CGDirectDisplayID cast to intptr_t. @@ -33,9 +35,9 @@ const WindowId kNullWindowId = 0; // On Windows, ScreenId is implementation dependent: sending a ScreenId from one // implementation to another usually won't work correctly. #if defined(CHROMEOS) - typedef int64_t ScreenId; +typedef int64_t ScreenId; #else - typedef intptr_t ScreenId; +typedef intptr_t ScreenId; #endif // The screen id corresponds to all screen combined together. diff --git a/modules/desktop_capture/desktop_capturer.h b/modules/desktop_capture/desktop_capturer.h index 513bf50c57..3e8f0dcac5 100644 --- a/modules/desktop_capture/desktop_capturer.h +++ b/modules/desktop_capture/desktop_capturer.h @@ -86,6 +86,10 @@ class RTC_EXPORT DesktopCapturer { // TODO(https://crbug.com/1369162): Remove or refactor this value. WindowId in_process_id = kNullWindowId; #endif + + // The display's unique ID. If no ID is defined, it will hold the value + // kInvalidDisplayId. + int64_t display_id = kInvalidDisplayId; }; typedef std::vector SourceList; diff --git a/modules/desktop_capture/linux/wayland/base_capturer_pipewire.cc b/modules/desktop_capture/linux/wayland/base_capturer_pipewire.cc index 2a63fc7a3c..fce023782f 100644 --- a/modules/desktop_capture/linux/wayland/base_capturer_pipewire.cc +++ b/modules/desktop_capture/linux/wayland/base_capturer_pipewire.cc @@ -13,10 +13,10 @@ #include "modules/desktop_capture/desktop_capture_options.h" #include "modules/desktop_capture/desktop_capturer.h" #include "modules/desktop_capture/linux/wayland/restore_token_manager.h" -#include "modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h" +#include "modules/portal/pipewire_utils.h" +#include "modules/portal/xdg_desktop_portal_utils.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "screencast_portal.h" namespace webrtc { @@ -28,6 +28,18 @@ using xdg_portal::SessionDetails; } // namespace +// static +bool BaseCapturerPipeWire::IsSupported() { + // Unfortunately, the best way we have to check if PipeWire is available is + // to try to initialize it. + // InitializePipeWire should prevent us from repeatedly initializing PipeWire, + // but we also don't really expect support to change without the application + // restarting. + static bool supported = + DesktopCapturer::IsRunningUnderWayland() && InitializePipeWire(); + return supported; +} + BaseCapturerPipeWire::BaseCapturerPipeWire(const DesktopCaptureOptions& options, CaptureType type) : BaseCapturerPipeWire(options, @@ -42,6 +54,8 @@ BaseCapturerPipeWire::BaseCapturerPipeWire( is_screencast_portal_(false), portal_(std::move(portal)) { source_id_ = RestoreTokenManager::GetInstance().GetUnusedId(); + options_.screencast_stream()->SetUseDamageRegion( + options_.pipewire_use_damage_region()); } BaseCapturerPipeWire::~BaseCapturerPipeWire() { @@ -58,7 +72,8 @@ void BaseCapturerPipeWire::OnScreenCastRequestResult(RequestResponse result, capturer_failed_ = false; if (result != RequestResponse::kSuccess || !options_.screencast_stream()->StartScreenCastStream( - stream_node_id, fd, options_.get_width(), options_.get_height())) { + stream_node_id, fd, options_.get_width(), options_.get_height(), + options_.prefer_cursor_embedded())) { capturer_failed_ = true; RTC_LOG(LS_ERROR) << "ScreenCastPortal failed: " << static_cast(result); diff --git a/modules/desktop_capture/linux/wayland/base_capturer_pipewire.h b/modules/desktop_capture/linux/wayland/base_capturer_pipewire.h index a852f44ade..c5c122c14c 100644 --- a/modules/desktop_capture/linux/wayland/base_capturer_pipewire.h +++ b/modules/desktop_capture/linux/wayland/base_capturer_pipewire.h @@ -14,12 +14,12 @@ #include "modules/desktop_capture/delegated_source_list_controller.h" #include "modules/desktop_capture/desktop_capture_options.h" #include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/linux/wayland/portal_request_response.h" #include "modules/desktop_capture/linux/wayland/screen_capture_portal_interface.h" #include "modules/desktop_capture/linux/wayland/screencast_portal.h" #include "modules/desktop_capture/linux/wayland/shared_screencast_stream.h" -#include "modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h" -#include "modules/desktop_capture/linux/wayland/xdg_session_details.h" +#include "modules/portal/portal_request_response.h" +#include "modules/portal/xdg_desktop_portal_utils.h" +#include "modules/portal/xdg_session_details.h" namespace webrtc { @@ -27,6 +27,12 @@ class BaseCapturerPipeWire : public DesktopCapturer, public DelegatedSourceListController, public ScreenCastPortal::PortalNotifier { public: + // Returns whether or not the current system can support capture via PipeWire. + // This will only be true on Wayland systems that also have PipeWire + // available, and thus may require dlopening PipeWire to determine if it is + // available. + static bool IsSupported(); + BaseCapturerPipeWire(const DesktopCaptureOptions& options, CaptureType type); BaseCapturerPipeWire( const DesktopCaptureOptions& options, diff --git a/modules/desktop_capture/linux/wayland/drm.sigs b/modules/desktop_capture/linux/wayland/drm.sigs deleted file mode 100644 index 226979fe16..0000000000 --- a/modules/desktop_capture/linux/wayland/drm.sigs +++ /dev/null @@ -1,11 +0,0 @@ -// Copyright 2021 The WebRTC project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -//------------------------------------------------ -// Functions from DRM used in capturer code. -//-------- - -// xf86drm.h -int drmGetDevices2(uint32_t flags, drmDevicePtr devices[], int max_devices); -void drmFreeDevices(drmDevicePtr devices[], int count); diff --git a/modules/desktop_capture/linux/wayland/portal_request_response.h b/modules/desktop_capture/linux/wayland/portal_request_response.h index dde9ac5eff..2589479347 100644 --- a/modules/desktop_capture/linux/wayland/portal_request_response.h +++ b/modules/desktop_capture/linux/wayland/portal_request_response.h @@ -11,24 +11,7 @@ #ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_PORTAL_REQUEST_RESPONSE_H_ #define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_PORTAL_REQUEST_RESPONSE_H_ -namespace webrtc { -namespace xdg_portal { +// TODO(bugs.webrtc.org/14187): remove when all users are gone +#include "modules/portal/portal_request_response.h" -// Contains type of responses that can be observed when making a request to -// a desktop portal interface. -enum class RequestResponse { - // Unknown, the initialized status. - kUnknown, - // Success, the request is carried out. - kSuccess, - // The user cancelled the interaction. - kUserCancelled, - // The user interaction was ended in some other way. - kError, - - kMaxValue = kError, -}; - -} // namespace xdg_portal -} // namespace webrtc #endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_PORTAL_REQUEST_RESPONSE_H_ diff --git a/modules/desktop_capture/linux/wayland/scoped_glib.h b/modules/desktop_capture/linux/wayland/scoped_glib.h index 908bd6f77d..1361f84328 100644 --- a/modules/desktop_capture/linux/wayland/scoped_glib.h +++ b/modules/desktop_capture/linux/wayland/scoped_glib.h @@ -11,55 +11,7 @@ #ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCOPED_GLIB_H_ #define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCOPED_GLIB_H_ -#include - -#include "rtc_base/checks.h" - -namespace webrtc { - -template -class Scoped { - public: - Scoped() {} - explicit Scoped(T* val) { ptr_ = val; } - ~Scoped() { RTC_DCHECK_NOTREACHED(); } - - T* operator->() const { return ptr_; } - - explicit operator bool() const { return ptr_ != nullptr; } - - bool operator!() const { return ptr_ == nullptr; } - - T* get() const { return ptr_; } - - T** receive() { - RTC_CHECK(!ptr_); - return &ptr_; - } - - Scoped& operator=(T* val) { - RTC_DCHECK(val); - ptr_ = val; - return *this; - } - - protected: - T* ptr_ = nullptr; -}; - -template <> -Scoped::~Scoped(); -template <> -Scoped::~Scoped(); -template <> -Scoped::~Scoped(); -template <> -Scoped::~Scoped(); -template <> -Scoped::~Scoped(); -template <> -Scoped::~Scoped(); - -} // namespace webrtc +// TODO(bugs.webrtc.org/14187): remove when all users are gone +#include "modules/portal/scoped_glib.h" #endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCOPED_GLIB_H_ diff --git a/modules/desktop_capture/linux/wayland/screen_capture_portal_interface.cc b/modules/desktop_capture/linux/wayland/screen_capture_portal_interface.cc index 02d9d2e806..1c7cc379df 100644 --- a/modules/desktop_capture/linux/wayland/screen_capture_portal_interface.cc +++ b/modules/desktop_capture/linux/wayland/screen_capture_portal_interface.cc @@ -8,10 +8,10 @@ * be found in the AUTHORS file in the root of the source tree. */ #include "modules/desktop_capture/linux/wayland/screen_capture_portal_interface.h" -#include "modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h" #include +#include "modules/portal/xdg_desktop_portal_utils.h" #include "rtc_base/logging.h" namespace webrtc { diff --git a/modules/desktop_capture/linux/wayland/screen_capture_portal_interface.h b/modules/desktop_capture/linux/wayland/screen_capture_portal_interface.h index 59aaf134e7..deb57a4707 100644 --- a/modules/desktop_capture/linux/wayland/screen_capture_portal_interface.h +++ b/modules/desktop_capture/linux/wayland/screen_capture_portal_interface.h @@ -15,10 +15,10 @@ #include -#include "modules/desktop_capture/linux/wayland/portal_request_response.h" -#include "modules/desktop_capture/linux/wayland/scoped_glib.h" -#include "modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h" -#include "modules/desktop_capture/linux/wayland/xdg_session_details.h" +#include "modules/portal/portal_request_response.h" +#include "modules/portal/scoped_glib.h" +#include "modules/portal/xdg_desktop_portal_utils.h" +#include "modules/portal/xdg_session_details.h" namespace webrtc { namespace xdg_portal { diff --git a/modules/desktop_capture/linux/wayland/screencast_portal.cc b/modules/desktop_capture/linux/wayland/screencast_portal.cc index f9cdb08533..abfade56e7 100644 --- a/modules/desktop_capture/linux/wayland/screencast_portal.cc +++ b/modules/desktop_capture/linux/wayland/screencast_portal.cc @@ -13,8 +13,8 @@ #include #include -#include "modules/desktop_capture/linux/wayland/scoped_glib.h" -#include "modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h" +#include "modules/portal/scoped_glib.h" +#include "modules/portal/xdg_desktop_portal_utils.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -56,9 +56,12 @@ ScreenCastPortal::ScreenCastPortal( PortalNotifier* notifier, ProxyRequestResponseHandler proxy_request_response_handler, SourcesRequestResponseSignalHandler sources_request_response_signal_handler, - gpointer user_data) + gpointer user_data, + bool prefer_cursor_embedded) : notifier_(notifier), capture_source_type_(ToCaptureSourceType(type)), + cursor_mode_(prefer_cursor_embedded ? CursorMode::kEmbedded + : CursorMode::kMetadata), proxy_request_response_handler_(proxy_request_response_handler), sources_request_response_signal_handler_( sources_request_response_signal_handler), diff --git a/modules/desktop_capture/linux/wayland/screencast_portal.h b/modules/desktop_capture/linux/wayland/screencast_portal.h index 96e4ba3ac9..ffb198f387 100644 --- a/modules/desktop_capture/linux/wayland/screencast_portal.h +++ b/modules/desktop_capture/linux/wayland/screencast_portal.h @@ -16,10 +16,10 @@ #include #include "modules/desktop_capture/desktop_capture_types.h" -#include "modules/desktop_capture/linux/wayland/portal_request_response.h" #include "modules/desktop_capture/linux/wayland/screen_capture_portal_interface.h" -#include "modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h" -#include "modules/desktop_capture/linux/wayland/xdg_session_details.h" +#include "modules/portal/portal_request_response.h" +#include "modules/portal/xdg_desktop_portal_utils.h" +#include "modules/portal/xdg_session_details.h" namespace webrtc { @@ -84,7 +84,10 @@ class ScreenCastPortal : public xdg_portal::ScreenCapturePortalInterface { ProxyRequestResponseHandler proxy_request_response_handler, SourcesRequestResponseSignalHandler sources_request_response_signal_handler, - gpointer user_data); + gpointer user_data, + // TODO(chromium:1291247): Remove the default option once + // downstream has been adjusted. + bool prefer_cursor_embedded = false); ~ScreenCastPortal(); @@ -140,7 +143,7 @@ class ScreenCastPortal : public xdg_portal::ScreenCapturePortalInterface { CaptureSourceType capture_source_type_ = ScreenCastPortal::CaptureSourceType::kScreen; - CursorMode cursor_mode_ = ScreenCastPortal::CursorMode::kMetadata; + CursorMode cursor_mode_ = CursorMode::kMetadata; PersistMode persist_mode_ = ScreenCastPortal::PersistMode::kDoNotPersist; diff --git a/modules/desktop_capture/linux/wayland/screencast_stream_utils.cc b/modules/desktop_capture/linux/wayland/screencast_stream_utils.cc index dc0784791d..0c4900d1cd 100644 --- a/modules/desktop_capture/linux/wayland/screencast_stream_utils.cc +++ b/modules/desktop_capture/linux/wayland/screencast_stream_utils.cc @@ -27,15 +27,6 @@ namespace webrtc { -PipeWireThreadLoopLock::PipeWireThreadLoopLock(pw_thread_loop* loop) - : loop_(loop) { - pw_thread_loop_lock(loop_); -} - -PipeWireThreadLoopLock::~PipeWireThreadLoopLock() { - pw_thread_loop_unlock(loop_); -} - PipeWireVersion PipeWireVersion::Parse(const absl::string_view& version) { std::vector parsed_version = rtc::split(version, '.'); diff --git a/modules/desktop_capture/linux/wayland/screencast_stream_utils.h b/modules/desktop_capture/linux/wayland/screencast_stream_utils.h index 70262c2e39..e04d7db931 100644 --- a/modules/desktop_capture/linux/wayland/screencast_stream_utils.h +++ b/modules/desktop_capture/linux/wayland/screencast_stream_utils.h @@ -18,23 +18,12 @@ #include "rtc_base/string_encode.h" -struct pw_thread_loop; struct spa_pod; struct spa_pod_builder; struct spa_rectangle; namespace webrtc { -// Locks pw_thread_loop in the current scope -class PipeWireThreadLoopLock { - public: - explicit PipeWireThreadLoopLock(pw_thread_loop* loop); - ~PipeWireThreadLoopLock(); - - private: - pw_thread_loop* const loop_; -}; - struct PipeWireVersion { static PipeWireVersion Parse(const absl::string_view& version); diff --git a/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc b/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc index 10950d7727..71bde9b212 100644 --- a/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc +++ b/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc @@ -21,29 +21,16 @@ #include "absl/memory/memory.h" #include "modules/desktop_capture/linux/wayland/egl_dmabuf.h" #include "modules/desktop_capture/linux/wayland/screencast_stream_utils.h" -#include "modules/desktop_capture/screen_capture_frame_queue.h" -#include "modules/desktop_capture/shared_desktop_frame.h" +#include "modules/portal/pipewire_utils.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/sanitizer.h" #include "rtc_base/synchronization/mutex.h" -#if defined(WEBRTC_DLOPEN_PIPEWIRE) -#include "modules/desktop_capture/linux/wayland/pipewire_stubs.h" -using modules_desktop_capture_linux_wayland::InitializeStubs; -using modules_desktop_capture_linux_wayland::kModuleDrm; -using modules_desktop_capture_linux_wayland::kModulePipewire; -using modules_desktop_capture_linux_wayland::StubPathMap; -#endif // defined(WEBRTC_DLOPEN_PIPEWIRE) - namespace webrtc { const int kBytesPerPixel = 4; - -#if defined(WEBRTC_DLOPEN_PIPEWIRE) -const char kPipeWireLib[] = "libpipewire-0.3.so.0"; -const char kDrmLib[] = "libdrm.so.2"; -#endif +const int kVideoDamageRegionCount = 16; constexpr int kCursorBpp = 4; constexpr int CursorMetaSize(int w, int h) { @@ -90,10 +77,17 @@ class SharedScreenCastStreamPrivate { bool StartScreenCastStream(uint32_t stream_node_id, int fd, uint32_t width = 0, - uint32_t height = 0); + uint32_t height = 0, + bool is_cursor_embedded = false); void UpdateScreenCastStreamResolution(uint32_t width, uint32_t height); + void SetUseDamageRegion(bool use_damage_region) { + use_damage_region_ = use_damage_region; + } + void SetObserver(SharedScreenCastStream::Observer* observer) { + observer_ = observer; + } void StopScreenCastStream(); - std::unique_ptr CaptureFrame(); + std::unique_ptr CaptureFrame(); std::unique_ptr CaptureCursor(); DesktopVector CaptureCursorPosition(); @@ -101,6 +95,12 @@ class SharedScreenCastStreamPrivate { // Stops the streams and cleans up any in-use elements. void StopAndCleanupStream(); + SharedScreenCastStream::Observer* observer_ = nullptr; + + // Track damage region updates that were reported since the last time + // frame was captured + DesktopRegion damage_region_; + uint32_t pw_stream_node_id_ = 0; DesktopSize stream_size_ = {}; @@ -142,6 +142,12 @@ class SharedScreenCastStreamPrivate { // Resolution changes are processed during buffer processing. bool pending_resolution_change_ RTC_GUARDED_BY(&resolution_lock_) = false; + bool use_damage_region_ = true; + + // Specifies whether the pipewire stream has been initialized with a request + // to embed cursor into the captured frames. + bool is_cursor_embedded_ = false; + // event handlers pw_core_events pw_core_events_ = {}; pw_stream_events pw_stream_events_ = {}; @@ -223,6 +229,10 @@ void SharedScreenCastStreamPrivate::OnStreamStateChanged( RTC_LOG(LS_ERROR) << "PipeWire stream state error: " << error_message; break; case PW_STREAM_STATE_PAUSED: + if (that->observer_ && old_state != PW_STREAM_STATE_STREAMING) { + that->observer_->OnStreamConfigured(); + } + break; case PW_STREAM_STATE_STREAMING: case PW_STREAM_STATE_UNCONNECTED: case PW_STREAM_STATE_CONNECTING: @@ -294,9 +304,10 @@ void SharedScreenCastStreamPrivate::OnStreamParamChanged( params.push_back(reinterpret_cast(spa_pod_builder_add_object( &builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, SPA_PARAM_META_type, SPA_POD_Id(SPA_META_VideoDamage), SPA_PARAM_META_size, - SPA_POD_CHOICE_RANGE_Int(sizeof(struct spa_meta_region) * 16, - sizeof(struct spa_meta_region) * 1, - sizeof(struct spa_meta_region) * 16)))); + SPA_POD_CHOICE_RANGE_Int( + sizeof(struct spa_meta_region) * kVideoDamageRegionCount, + sizeof(struct spa_meta_region) * 1, + sizeof(struct spa_meta_region) * kVideoDamageRegionCount)))); pw_stream_update_params(that->pw_stream_, params.data(), params.size()); } @@ -374,24 +385,15 @@ bool SharedScreenCastStreamPrivate::StartScreenCastStream( uint32_t stream_node_id, int fd, uint32_t width, - uint32_t height) { + uint32_t height, + bool is_cursor_embedded) { width_ = width; height_ = height; -#if defined(WEBRTC_DLOPEN_PIPEWIRE) - StubPathMap paths; - - // Check if the PipeWire and DRM libraries are available. - paths[kModulePipewire].push_back(kPipeWireLib); - paths[kModuleDrm].push_back(kDrmLib); - - if (!InitializeStubs(paths)) { - RTC_LOG(LS_ERROR) - << "One of following libraries is missing on your system:\n" - << " - PipeWire (" << kPipeWireLib << ")\n" - << " - drm (" << kDrmLib << ")"; + is_cursor_embedded_ = is_cursor_embedded; + if (!InitializePipeWire()) { + RTC_LOG(LS_ERROR) << "Unable to open PipeWire library"; return false; } -#endif // defined(WEBRTC_DLOPEN_PIPEWIRE) egl_dmabuf_ = std::make_unique(); pw_stream_node_id_ = stream_node_id; @@ -579,15 +581,21 @@ void SharedScreenCastStreamPrivate::StopAndCleanupStream() { pw_main_loop_ = nullptr; } -std::unique_ptr SharedScreenCastStreamPrivate::CaptureFrame() { +std::unique_ptr +SharedScreenCastStreamPrivate::CaptureFrame() { webrtc::MutexLock lock(&queue_lock_); if (!pw_stream_ || !queue_.current_frame()) { - return std::unique_ptr{}; + return std::unique_ptr{}; } std::unique_ptr frame = queue_.current_frame()->Share(); - return std::move(frame); + if (use_damage_region_) { + frame->mutable_updated_region()->Swap(&damage_region_); + damage_region_.Clear(); + } + + return frame; } std::unique_ptr SharedScreenCastStreamPrivate::CaptureCursor() { @@ -632,8 +640,16 @@ void SharedScreenCastStreamPrivate::ProcessBuffer(pw_buffer* buffer) { DesktopRect::MakeWH(bitmap->size.width, bitmap->size.height)); mouse_cursor_ = std::make_unique( mouse_frame, DesktopVector(cursor->hotspot.x, cursor->hotspot.y)); + + if (observer_) { + observer_->OnCursorShapeChanged(); + } } mouse_cursor_position_.set(cursor->position.x, cursor->position.y); + + if (observer_) { + observer_->OnCursorPositionChanged(); + } } } @@ -708,6 +724,9 @@ void SharedScreenCastStreamPrivate::ProcessBuffer(pw_buffer* buffer) { } if (!src) { + if (observer_) { + observer_->OnFailedToProcessBuffer(); + } return; } @@ -734,6 +753,11 @@ void SharedScreenCastStreamPrivate::ProcessBuffer(pw_buffer* buffer) { videocrop_metadata->region.size.height > static_cast(stream_size_.height()))) { RTC_LOG(LS_ERROR) << "Stream metadata sizes are wrong!"; + + if (observer_) { + observer_->OnFailedToProcessBuffer(); + } + return; } @@ -799,6 +823,10 @@ void SharedScreenCastStreamPrivate::ProcessBuffer(pw_buffer* buffer) { queue_.MoveToNextFrame(); if (queue_.current_frame() && queue_.current_frame()->IsShared()) { RTC_DLOG(LS_WARNING) << "Overwriting frame that is still shared"; + + if (observer_) { + observer_->OnFailedToProcessBuffer(); + } } if (!queue_.current_frame() || @@ -823,8 +851,38 @@ void SharedScreenCastStreamPrivate::ProcessBuffer(pw_buffer* buffer) { } } - queue_.current_frame()->mutable_updated_region()->SetRect( - DesktopRect::MakeSize(queue_.current_frame()->size())); + if (observer_) { + observer_->OnDesktopFrameChanged(); + } + + if (use_damage_region_) { + const struct spa_meta* video_damage = static_cast( + spa_buffer_find_meta(spa_buffer, SPA_META_VideoDamage)); + if (video_damage) { + spa_meta_region* meta_region; + + queue_.current_frame()->mutable_updated_region()->Clear(); + + spa_meta_for_each(meta_region, video_damage) { + // Skip empty regions + if (meta_region->region.size.width == 0 || + meta_region->region.size.height == 0) { + continue; + } + + damage_region_.AddRect(DesktopRect::MakeXYWH( + meta_region->region.position.x, meta_region->region.position.y, + meta_region->region.size.width, meta_region->region.size.height)); + } + } else { + damage_region_.SetRect( + DesktopRect::MakeSize(queue_.current_frame()->size())); + } + } else { + queue_.current_frame()->mutable_updated_region()->SetRect( + DesktopRect::MakeSize(queue_.current_frame()->size())); + } + queue_.current_frame()->set_may_contain_cursor(is_cursor_embedded_); } void SharedScreenCastStreamPrivate::ConvertRGBxToBGRx(uint8_t* frame, @@ -856,8 +914,10 @@ bool SharedScreenCastStream::StartScreenCastStream(uint32_t stream_node_id) { bool SharedScreenCastStream::StartScreenCastStream(uint32_t stream_node_id, int fd, uint32_t width, - uint32_t height) { - return private_->StartScreenCastStream(stream_node_id, fd, width, height); + uint32_t height, + bool is_cursor_embedded) { + return private_->StartScreenCastStream(stream_node_id, fd, width, height, + is_cursor_embedded); } void SharedScreenCastStream::UpdateScreenCastStreamResolution(uint32_t width, @@ -865,11 +925,20 @@ void SharedScreenCastStream::UpdateScreenCastStreamResolution(uint32_t width, private_->UpdateScreenCastStreamResolution(width, height); } +void SharedScreenCastStream::SetUseDamageRegion(bool use_damage_region) { + private_->SetUseDamageRegion(use_damage_region); +} + +void SharedScreenCastStream::SetObserver( + SharedScreenCastStream::Observer* observer) { + private_->SetObserver(observer); +} + void SharedScreenCastStream::StopScreenCastStream() { private_->StopScreenCastStream(); } -std::unique_ptr SharedScreenCastStream::CaptureFrame() { +std::unique_ptr SharedScreenCastStream::CaptureFrame() { return private_->CaptureFrame(); } diff --git a/modules/desktop_capture/linux/wayland/shared_screencast_stream.h b/modules/desktop_capture/linux/wayland/shared_screencast_stream.h index 66a3f45bdb..9cdd3d89be 100644 --- a/modules/desktop_capture/linux/wayland/shared_screencast_stream.h +++ b/modules/desktop_capture/linux/wayland/shared_screencast_stream.h @@ -16,8 +16,9 @@ #include "absl/types/optional.h" #include "api/ref_counted_base.h" #include "api/scoped_refptr.h" -#include "modules/desktop_capture/desktop_frame.h" #include "modules/desktop_capture/mouse_cursor.h" +#include "modules/desktop_capture/screen_capture_frame_queue.h" +#include "modules/desktop_capture/shared_desktop_frame.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -27,14 +28,30 @@ class SharedScreenCastStreamPrivate; class RTC_EXPORT SharedScreenCastStream : public rtc::RefCountedNonVirtual { public: + class Observer { + public: + virtual void OnCursorPositionChanged() = 0; + virtual void OnCursorShapeChanged() = 0; + virtual void OnDesktopFrameChanged() = 0; + virtual void OnFailedToProcessBuffer() = 0; + virtual void OnStreamConfigured() = 0; + + protected: + Observer() = default; + virtual ~Observer() = default; + }; + static rtc::scoped_refptr CreateDefault(); bool StartScreenCastStream(uint32_t stream_node_id); bool StartScreenCastStream(uint32_t stream_node_id, int fd, uint32_t width = 0, - uint32_t height = 0); + uint32_t height = 0, + bool is_cursor_embedded = false); void UpdateScreenCastStreamResolution(uint32_t width, uint32_t height); + void SetUseDamageRegion(bool use_damage_region); + void SetObserver(SharedScreenCastStream::Observer* observer); void StopScreenCastStream(); // Below functions return the most recent information we get from a @@ -47,7 +64,7 @@ class RTC_EXPORT SharedScreenCastStream // Returns the most recent screen/window frame we obtained from PipeWire // buffer. Will return an empty frame in case we didn't manage to get a frame // from PipeWire buffer. - std::unique_ptr CaptureFrame(); + std::unique_ptr CaptureFrame(); // Returns the most recent mouse cursor image. Will return an nullptr cursor // in case we didn't manage to get a cursor from PipeWire buffer. NOTE: the @@ -65,6 +82,8 @@ class RTC_EXPORT SharedScreenCastStream SharedScreenCastStream(); private: + friend class SharedScreenCastStreamPrivate; + SharedScreenCastStream(const SharedScreenCastStream&) = delete; SharedScreenCastStream& operator=(const SharedScreenCastStream&) = delete; diff --git a/modules/desktop_capture/linux/wayland/shared_screencast_stream_unittest.cc b/modules/desktop_capture/linux/wayland/shared_screencast_stream_unittest.cc new file mode 100644 index 0000000000..1de5f19013 --- /dev/null +++ b/modules/desktop_capture/linux/wayland/shared_screencast_stream_unittest.cc @@ -0,0 +1,160 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/desktop_capture/linux/wayland/shared_screencast_stream.h" + +#include +#include + +#include "api/units/time_delta.h" +#include "modules/desktop_capture/desktop_capturer.h" +#include "modules/desktop_capture/desktop_frame.h" +#include "modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.h" +#include "modules/desktop_capture/rgba_color.h" +#include "rtc_base/event.h" +#include "test/gmock.h" +#include "test/gtest.h" + +using ::testing::_; +using ::testing::AtLeast; +using ::testing::Ge; +using ::testing::Invoke; + +namespace webrtc { + +constexpr TimeDelta kShortWait = TimeDelta::Seconds(5); +constexpr TimeDelta kLongWait = TimeDelta::Seconds(15); + +constexpr int kBytesPerPixel = 4; +constexpr int32_t kWidth = 800; +constexpr int32_t kHeight = 640; + +class PipeWireStreamTest : public ::testing::Test, + public TestScreenCastStreamProvider::Observer, + public SharedScreenCastStream::Observer { + public: + PipeWireStreamTest() = default; + ~PipeWireStreamTest() = default; + + // FakeScreenCastPortal::Observer + MOCK_METHOD(void, OnBufferAdded, (), (override)); + MOCK_METHOD(void, OnFrameRecorded, (), (override)); + MOCK_METHOD(void, OnStreamReady, (uint32_t stream_node_id), (override)); + MOCK_METHOD(void, OnStartStreaming, (), (override)); + MOCK_METHOD(void, OnStopStreaming, (), (override)); + + // SharedScreenCastStream::Observer + MOCK_METHOD(void, OnCursorPositionChanged, (), (override)); + MOCK_METHOD(void, OnCursorShapeChanged, (), (override)); + MOCK_METHOD(void, OnDesktopFrameChanged, (), (override)); + MOCK_METHOD(void, OnFailedToProcessBuffer, (), (override)); + MOCK_METHOD(void, OnStreamConfigured, (), (override)); + + void SetUp() override { + shared_screencast_stream_ = SharedScreenCastStream::CreateDefault(); + shared_screencast_stream_->SetObserver(this); + test_screencast_stream_provider_ = + std::make_unique(this, kWidth, kHeight); + } + + void StartScreenCastStream(uint32_t stream_node_id) { + shared_screencast_stream_->StartScreenCastStream(stream_node_id); + } + + protected: + uint recorded_frames_ = 0; + bool streaming_ = false; + std::unique_ptr + test_screencast_stream_provider_; + rtc::scoped_refptr shared_screencast_stream_; +}; + +TEST_F(PipeWireStreamTest, TestPipeWire) { + // Set expectations for PipeWire to successfully connect both streams + rtc::Event waitConnectEvent; + rtc::Event waitStartStreamingEvent; + + EXPECT_CALL(*this, OnStreamReady(_)) + .WillOnce(Invoke(this, &PipeWireStreamTest::StartScreenCastStream)); + EXPECT_CALL(*this, OnStreamConfigured).WillOnce([&waitConnectEvent] { + waitConnectEvent.Set(); + }); + EXPECT_CALL(*this, OnBufferAdded).Times(AtLeast(3)); + EXPECT_CALL(*this, OnStartStreaming).WillOnce([&waitStartStreamingEvent] { + waitStartStreamingEvent.Set(); + }); + + // Give it some time to connect, the order between these shouldn't matter, but + // we need to be sure we are connected before we proceed to work with frames. + waitConnectEvent.Wait(kLongWait); + + // Wait until we start streaming + waitStartStreamingEvent.Wait(kShortWait); + + rtc::Event frameRetrievedEvent; + EXPECT_CALL(*this, OnFrameRecorded).Times(3); + EXPECT_CALL(*this, OnDesktopFrameChanged) + .WillRepeatedly([&frameRetrievedEvent] { frameRetrievedEvent.Set(); }); + + // Record a frame in FakePipeWireStream + RgbaColor red_color(0, 0, 255); + test_screencast_stream_provider_->RecordFrame(red_color); + + // Retrieve a frame from SharedScreenCastStream + frameRetrievedEvent.Wait(kShortWait); + std::unique_ptr frame = + shared_screencast_stream_->CaptureFrame(); + + // Check frame parameters + ASSERT_NE(frame, nullptr); + ASSERT_NE(frame->data(), nullptr); + EXPECT_EQ(frame->rect().width(), kWidth); + EXPECT_EQ(frame->rect().height(), kHeight); + EXPECT_EQ(frame->stride(), frame->rect().width() * kBytesPerPixel); + EXPECT_EQ(RgbaColor(frame->data()), red_color); + + // Test DesktopFrameQueue + RgbaColor green_color(0, 255, 0); + test_screencast_stream_provider_->RecordFrame(green_color); + frameRetrievedEvent.Wait(kShortWait); + std::unique_ptr frame2 = + shared_screencast_stream_->CaptureFrame(); + ASSERT_NE(frame2, nullptr); + ASSERT_NE(frame2->data(), nullptr); + EXPECT_EQ(frame2->rect().width(), kWidth); + EXPECT_EQ(frame2->rect().height(), kHeight); + EXPECT_EQ(frame2->stride(), frame->rect().width() * kBytesPerPixel); + EXPECT_EQ(RgbaColor(frame2->data()), green_color); + + // Thanks to DesktopFrameQueue we should be able to have two frames shared + EXPECT_EQ(frame->IsShared(), true); + EXPECT_EQ(frame2->IsShared(), true); + EXPECT_NE(frame->data(), frame2->data()); + + // This should result into overwriting a frame in use + rtc::Event frameRecordedEvent; + RgbaColor blue_color(255, 0, 0); + EXPECT_CALL(*this, OnFailedToProcessBuffer).WillOnce([&frameRecordedEvent] { + frameRecordedEvent.Set(); + }); + + test_screencast_stream_provider_->RecordFrame(blue_color); + frameRecordedEvent.Wait(kShortWait); + + // First frame should be now overwritten with blue color + frameRetrievedEvent.Wait(kShortWait); + EXPECT_EQ(RgbaColor(frame->data()), blue_color); + + // Test disconnection from stream + EXPECT_CALL(*this, OnStopStreaming); + shared_screencast_stream_->StopScreenCastStream(); +} + +} // namespace webrtc diff --git a/modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.cc b/modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.cc new file mode 100644 index 0000000000..3b829959ac --- /dev/null +++ b/modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.cc @@ -0,0 +1,361 @@ + +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.h" + +#include +#include +#include +#include + +#include +#include +#include + +#include "modules/portal/pipewire_utils.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +constexpr int kBytesPerPixel = 4; + +TestScreenCastStreamProvider::TestScreenCastStreamProvider(Observer* observer, + uint32_t width, + uint32_t height) + : observer_(observer), width_(width), height_(height) { + if (!InitializePipeWire()) { + RTC_LOG(LS_ERROR) << "Unable to open PipeWire"; + return; + } + + pw_init(/*argc=*/nullptr, /*argc=*/nullptr); + + pw_main_loop_ = pw_thread_loop_new("pipewire-test-main-loop", nullptr); + + pw_context_ = + pw_context_new(pw_thread_loop_get_loop(pw_main_loop_), nullptr, 0); + if (!pw_context_) { + RTC_LOG(LS_ERROR) << "PipeWire test: Failed to create PipeWire context"; + return; + } + + if (pw_thread_loop_start(pw_main_loop_) < 0) { + RTC_LOG(LS_ERROR) << "PipeWire test: Failed to start main PipeWire loop"; + return; + } + + // Initialize event handlers, remote end and stream-related. + pw_core_events_.version = PW_VERSION_CORE_EVENTS; + pw_core_events_.error = &OnCoreError; + + pw_stream_events_.version = PW_VERSION_STREAM_EVENTS; + pw_stream_events_.add_buffer = &OnStreamAddBuffer; + pw_stream_events_.remove_buffer = &OnStreamRemoveBuffer; + pw_stream_events_.state_changed = &OnStreamStateChanged; + pw_stream_events_.param_changed = &OnStreamParamChanged; + + { + PipeWireThreadLoopLock thread_loop_lock(pw_main_loop_); + + pw_core_ = pw_context_connect(pw_context_, nullptr, 0); + if (!pw_core_) { + RTC_LOG(LS_ERROR) << "PipeWire test: Failed to connect PipeWire context"; + return; + } + + pw_core_add_listener(pw_core_, &spa_core_listener_, &pw_core_events_, this); + + pw_stream_ = pw_stream_new(pw_core_, "webrtc-test-stream", nullptr); + + if (!pw_stream_) { + RTC_LOG(LS_ERROR) << "PipeWire test: Failed to create PipeWire stream"; + return; + } + + pw_stream_add_listener(pw_stream_, &spa_stream_listener_, + &pw_stream_events_, this); + uint8_t buffer[2048] = {}; + + spa_pod_builder builder = spa_pod_builder{buffer, sizeof(buffer)}; + + std::vector params; + + spa_rectangle resolution = + SPA_RECTANGLE(uint32_t(width_), uint32_t(height_)); + params.push_back(BuildFormat(&builder, SPA_VIDEO_FORMAT_BGRx, + /*modifiers=*/{}, &resolution)); + + auto flags = + pw_stream_flags(PW_STREAM_FLAG_DRIVER | PW_STREAM_FLAG_ALLOC_BUFFERS); + if (pw_stream_connect(pw_stream_, PW_DIRECTION_OUTPUT, SPA_ID_INVALID, + flags, params.data(), params.size()) != 0) { + RTC_LOG(LS_ERROR) << "PipeWire test: Could not connect receiving stream."; + pw_stream_destroy(pw_stream_); + pw_stream_ = nullptr; + return; + } + } + + return; +} + +TestScreenCastStreamProvider::~TestScreenCastStreamProvider() { + if (pw_main_loop_) { + pw_thread_loop_stop(pw_main_loop_); + } + + if (pw_stream_) { + pw_stream_destroy(pw_stream_); + } + + if (pw_core_) { + pw_core_disconnect(pw_core_); + } + + if (pw_context_) { + pw_context_destroy(pw_context_); + } + + if (pw_main_loop_) { + pw_thread_loop_destroy(pw_main_loop_); + } +} + +void TestScreenCastStreamProvider::RecordFrame(RgbaColor rgba_color) { + const char* error; + if (pw_stream_get_state(pw_stream_, &error) != PW_STREAM_STATE_STREAMING) { + if (error) { + RTC_LOG(LS_ERROR) + << "PipeWire test: Failed to record frame: stream is not active: " + << error; + } + } + + struct pw_buffer* buffer = pw_stream_dequeue_buffer(pw_stream_); + if (!buffer) { + RTC_LOG(LS_ERROR) << "PipeWire test: No available buffer"; + return; + } + + struct spa_buffer* spa_buffer = buffer->buffer; + struct spa_data* spa_data = spa_buffer->datas; + uint8_t* data = static_cast(spa_data->data); + if (!data) { + RTC_LOG(LS_ERROR) + << "PipeWire test: Failed to record frame: invalid buffer data"; + pw_stream_queue_buffer(pw_stream_, buffer); + return; + } + + const int stride = SPA_ROUND_UP_N(width_ * kBytesPerPixel, 4); + + spa_data->chunk->offset = 0; + spa_data->chunk->size = height_ * stride; + spa_data->chunk->stride = stride; + + uint32_t color = rgba_color.ToUInt32(); + for (uint32_t i = 0; i < height_; i++) { + uint32_t* column = reinterpret_cast(data); + for (uint32_t j = 0; j < width_; j++) { + column[j] = color; + } + data += stride; + } + + pw_stream_queue_buffer(pw_stream_, buffer); + if (observer_) { + observer_->OnFrameRecorded(); + } +} + +void TestScreenCastStreamProvider::StartStreaming() { + if (pw_stream_ && pw_node_id_ != 0) { + pw_stream_set_active(pw_stream_, true); + } +} + +void TestScreenCastStreamProvider::StopStreaming() { + if (pw_stream_ && pw_node_id_ != 0) { + pw_stream_set_active(pw_stream_, false); + } +} + +// static +void TestScreenCastStreamProvider::OnCoreError(void* data, + uint32_t id, + int seq, + int res, + const char* message) { + TestScreenCastStreamProvider* that = + static_cast(data); + RTC_DCHECK(that); + + RTC_LOG(LS_ERROR) << "PipeWire test: PipeWire remote error: " << message; +} + +// static +void TestScreenCastStreamProvider::OnStreamStateChanged( + void* data, + pw_stream_state old_state, + pw_stream_state state, + const char* error_message) { + TestScreenCastStreamProvider* that = + static_cast(data); + RTC_DCHECK(that); + + switch (state) { + case PW_STREAM_STATE_ERROR: + RTC_LOG(LS_ERROR) << "PipeWire test: PipeWire stream state error: " + << error_message; + break; + case PW_STREAM_STATE_PAUSED: + if (that->pw_node_id_ == 0 && that->pw_stream_) { + that->pw_node_id_ = pw_stream_get_node_id(that->pw_stream_); + that->observer_->OnStreamReady(that->pw_node_id_); + } else { + // Stop streaming + that->is_streaming_ = false; + that->observer_->OnStopStreaming(); + } + break; + case PW_STREAM_STATE_STREAMING: + // Start streaming + that->is_streaming_ = true; + that->observer_->OnStartStreaming(); + break; + case PW_STREAM_STATE_CONNECTING: + break; + case PW_STREAM_STATE_UNCONNECTED: + if (that->is_streaming_) { + // Stop streaming + that->is_streaming_ = false; + that->observer_->OnStopStreaming(); + } + break; + } +} + +// static +void TestScreenCastStreamProvider::OnStreamParamChanged( + void* data, + uint32_t id, + const struct spa_pod* format) { + TestScreenCastStreamProvider* that = + static_cast(data); + RTC_DCHECK(that); + + RTC_LOG(LS_INFO) << "PipeWire test: PipeWire stream format changed."; + if (!format || id != SPA_PARAM_Format) { + return; + } + + spa_format_video_raw_parse(format, &that->spa_video_format_); + + auto stride = SPA_ROUND_UP_N(that->width_ * kBytesPerPixel, 4); + + uint8_t buffer[1024] = {}; + auto builder = spa_pod_builder{buffer, sizeof(buffer)}; + + // Setup buffers and meta header for new format. + + std::vector params; + const int buffer_types = (1 << SPA_DATA_MemFd); + spa_rectangle resolution = SPA_RECTANGLE(that->width_, that->height_); + + params.push_back(reinterpret_cast(spa_pod_builder_add_object( + &builder, SPA_TYPE_OBJECT_ParamBuffers, SPA_PARAM_Buffers, + SPA_FORMAT_VIDEO_size, SPA_POD_Rectangle(&resolution), + SPA_PARAM_BUFFERS_buffers, SPA_POD_CHOICE_RANGE_Int(16, 2, 16), + SPA_PARAM_BUFFERS_blocks, SPA_POD_Int(1), SPA_PARAM_BUFFERS_stride, + SPA_POD_Int(stride), SPA_PARAM_BUFFERS_size, + SPA_POD_Int(stride * that->height_), SPA_PARAM_BUFFERS_align, + SPA_POD_Int(16), SPA_PARAM_BUFFERS_dataType, + SPA_POD_CHOICE_FLAGS_Int(buffer_types)))); + params.push_back(reinterpret_cast(spa_pod_builder_add_object( + &builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, SPA_PARAM_META_type, + SPA_POD_Id(SPA_META_Header), SPA_PARAM_META_size, + SPA_POD_Int(sizeof(struct spa_meta_header))))); + + pw_stream_update_params(that->pw_stream_, params.data(), params.size()); +} + +// static +void TestScreenCastStreamProvider::OnStreamAddBuffer(void* data, + pw_buffer* buffer) { + TestScreenCastStreamProvider* that = + static_cast(data); + RTC_DCHECK(that); + + struct spa_data* spa_data = buffer->buffer->datas; + + spa_data->mapoffset = 0; + spa_data->flags = SPA_DATA_FLAG_READWRITE; + + if (!(spa_data[0].type & (1 << SPA_DATA_MemFd))) { + RTC_LOG(LS_ERROR) + << "PipeWire test: Client doesn't support memfd buffer data type"; + return; + } + + const int stride = SPA_ROUND_UP_N(that->width_ * kBytesPerPixel, 4); + spa_data->maxsize = stride * that->height_; + spa_data->type = SPA_DATA_MemFd; + spa_data->fd = + memfd_create("pipewire-test-memfd", MFD_CLOEXEC | MFD_ALLOW_SEALING); + if (spa_data->fd == -1) { + RTC_LOG(LS_ERROR) << "PipeWire test: Can't create memfd"; + return; + } + + spa_data->mapoffset = 0; + + if (ftruncate(spa_data->fd, spa_data->maxsize) < 0) { + RTC_LOG(LS_ERROR) << "PipeWire test: Can't truncate to" + << spa_data->maxsize; + return; + } + + unsigned int seals = F_SEAL_GROW | F_SEAL_SHRINK | F_SEAL_SEAL; + if (fcntl(spa_data->fd, F_ADD_SEALS, seals) == -1) { + RTC_LOG(LS_ERROR) << "PipeWire test: Failed to add seals"; + } + + spa_data->data = mmap(nullptr, spa_data->maxsize, PROT_READ | PROT_WRITE, + MAP_SHARED, spa_data->fd, spa_data->mapoffset); + if (spa_data->data == MAP_FAILED) { + RTC_LOG(LS_ERROR) << "PipeWire test: Failed to mmap memory"; + } else { + that->observer_->OnBufferAdded(); + RTC_LOG(LS_INFO) << "PipeWire test: Memfd created successfully: " + << spa_data->data << spa_data->maxsize; + } +} + +// static +void TestScreenCastStreamProvider::OnStreamRemoveBuffer(void* data, + pw_buffer* buffer) { + TestScreenCastStreamProvider* that = + static_cast(data); + RTC_DCHECK(that); + + struct spa_buffer* spa_buffer = buffer->buffer; + struct spa_data* spa_data = spa_buffer->datas; + if (spa_data && spa_data->type == SPA_DATA_MemFd) { + munmap(spa_data->data, spa_data->maxsize); + close(spa_data->fd); + } +} + +uint32_t TestScreenCastStreamProvider::PipeWireNodeId() { + return pw_node_id_; +} + +} // namespace webrtc diff --git a/modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.h b/modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.h new file mode 100644 index 0000000000..d893aa63ab --- /dev/null +++ b/modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.h @@ -0,0 +1,93 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_TEST_TEST_SCREENCAST_STREAM_PROVIDER_H_ +#define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_TEST_TEST_SCREENCAST_STREAM_PROVIDER_H_ + +#include +#include + +#include "modules/desktop_capture/linux/wayland/screencast_stream_utils.h" +#include "modules/desktop_capture/rgba_color.h" +#include "rtc_base/random.h" + +namespace webrtc { + +class TestScreenCastStreamProvider { + public: + class Observer { + public: + virtual void OnBufferAdded() = 0; + virtual void OnFrameRecorded() = 0; + virtual void OnStreamReady(uint32_t stream_node_id) = 0; + virtual void OnStartStreaming() = 0; + virtual void OnStopStreaming() = 0; + + protected: + Observer() = default; + virtual ~Observer() = default; + }; + + explicit TestScreenCastStreamProvider(Observer* observer, + uint32_t width, + uint32_t height); + ~TestScreenCastStreamProvider(); + + uint32_t PipeWireNodeId(); + + void RecordFrame(RgbaColor rgba_color); + void StartStreaming(); + void StopStreaming(); + + private: + Observer* observer_; + + // Resolution parameters. + uint32_t width_ = 0; + uint32_t height_ = 0; + + bool is_streaming_ = false; + uint32_t pw_node_id_ = 0; + + // PipeWire types + struct pw_context* pw_context_ = nullptr; + struct pw_core* pw_core_ = nullptr; + struct pw_stream* pw_stream_ = nullptr; + struct pw_thread_loop* pw_main_loop_ = nullptr; + + spa_hook spa_core_listener_; + spa_hook spa_stream_listener_; + + // event handlers + pw_core_events pw_core_events_ = {}; + pw_stream_events pw_stream_events_ = {}; + + struct spa_video_info_raw spa_video_format_; + + // PipeWire callbacks + static void OnCoreError(void* data, + uint32_t id, + int seq, + int res, + const char* message); + static void OnStreamAddBuffer(void* data, pw_buffer* buffer); + static void OnStreamRemoveBuffer(void* data, pw_buffer* buffer); + static void OnStreamParamChanged(void* data, + uint32_t id, + const struct spa_pod* format); + static void OnStreamStateChanged(void* data, + pw_stream_state old_state, + pw_stream_state state, + const char* error_message); +}; + +} // namespace webrtc + +#endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_TEST_TEST_SCREENCAST_STREAM_PROVIDER_H_ diff --git a/modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h b/modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h index f6ac92b5d1..b213e50308 100644 --- a/modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h +++ b/modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h @@ -11,101 +11,7 @@ #ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_XDG_DESKTOP_PORTAL_UTILS_H_ #define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_XDG_DESKTOP_PORTAL_UTILS_H_ -#include -#include - -#include -#include - -#include "absl/strings/string_view.h" -#include "modules/desktop_capture/linux/wayland/portal_request_response.h" -#include "modules/desktop_capture/linux/wayland/scoped_glib.h" -#include "modules/desktop_capture/linux/wayland/xdg_session_details.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { -namespace xdg_portal { - -constexpr char kDesktopBusName[] = "org.freedesktop.portal.Desktop"; -constexpr char kDesktopObjectPath[] = "/org/freedesktop/portal/desktop"; -constexpr char kDesktopRequestObjectPath[] = - "/org/freedesktop/portal/desktop/request"; -constexpr char kSessionInterfaceName[] = "org.freedesktop.portal.Session"; -constexpr char kRequestInterfaceName[] = "org.freedesktop.portal.Request"; -constexpr char kScreenCastInterfaceName[] = "org.freedesktop.portal.ScreenCast"; - -using ProxyRequestCallback = void (*)(GObject*, GAsyncResult*, gpointer); -using SessionRequestCallback = void (*)(GDBusProxy*, GAsyncResult*, gpointer); -using SessionRequestResponseSignalHandler = void (*)(GDBusConnection*, - const char*, - const char*, - const char*, - const char*, - GVariant*, - gpointer); -using StartRequestResponseSignalHandler = void (*)(GDBusConnection*, - const char*, - const char*, - const char*, - const char*, - GVariant*, - gpointer); -using SessionStartRequestedHandler = void (*)(GDBusProxy*, - GAsyncResult*, - gpointer); - -std::string RequestResponseToString(RequestResponse request); - -RequestResponse RequestResponseFromPortalResponse(uint32_t portal_response); - -// Returns a string path for signal handle based on the provided connection and -// token. -std::string PrepareSignalHandle(absl::string_view token, - GDBusConnection* connection); - -// Sets up the callback to execute when a response signal is received for the -// given object. -uint32_t SetupRequestResponseSignal(absl::string_view object_path, - const GDBusSignalCallback callback, - gpointer user_data, - GDBusConnection* connection); - -void RequestSessionProxy(absl::string_view interface_name, - const ProxyRequestCallback proxy_request_callback, - GCancellable* cancellable, - gpointer user_data); - -void SetupSessionRequestHandlers( - absl::string_view portal_prefix, - const SessionRequestCallback session_request_callback, - const SessionRequestResponseSignalHandler request_response_signale_handler, - GDBusConnection* connection, - GDBusProxy* proxy, - GCancellable* cancellable, - std::string& portal_handle, - guint& session_request_signal_id, - gpointer user_data); - -void StartSessionRequest( - absl::string_view prefix, - absl::string_view session_handle, - const StartRequestResponseSignalHandler signal_handler, - const SessionStartRequestedHandler session_started_handler, - GDBusProxy* proxy, - GDBusConnection* connection, - GCancellable* cancellable, - guint& start_request_signal_id, - std::string& start_handle, - gpointer user_data); - -// Tears down the portal session and cleans up related objects. -void TearDownSession(absl::string_view session_handle, - GDBusProxy* proxy, - GCancellable* cancellable, - GDBusConnection* connection); - -} // namespace xdg_portal -} // namespace webrtc +// TODO(bugs.webrtc.org/14187): remove when all users are gone +#include "modules/portal/xdg_desktop_portal_utils.h" #endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_XDG_DESKTOP_PORTAL_UTILS_H_ diff --git a/modules/desktop_capture/linux/wayland/xdg_session_details.h b/modules/desktop_capture/linux/wayland/xdg_session_details.h index b70ac4aa59..9feff5bdf7 100644 --- a/modules/desktop_capture/linux/wayland/xdg_session_details.h +++ b/modules/desktop_capture/linux/wayland/xdg_session_details.h @@ -11,23 +11,7 @@ #ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_XDG_SESSION_DETAILS_H_ #define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_XDG_SESSION_DETAILS_H_ -#include - -#include - -namespace webrtc { -namespace xdg_portal { - -// Details of the session associated with XDG desktop portal session. Portal API -// calls can be invoked by utilizing the information here. -struct SessionDetails { - GDBusProxy* proxy = nullptr; - GCancellable* cancellable = nullptr; - std::string session_handle; - uint32_t pipewire_stream_node_id = 0; -}; - -} // namespace xdg_portal -} // namespace webrtc +// TODO(bugs.webrtc.org/14187): remove when all users are gone +#include "modules/portal/xdg_session_details.h" #endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_XDG_SESSION_DETAILS_H_ diff --git a/modules/desktop_capture/screen_capturer_fuchsia.cc b/modules/desktop_capture/screen_capturer_fuchsia.cc index c0ad841c05..19c4c735ad 100644 --- a/modules/desktop_capture/screen_capturer_fuchsia.cc +++ b/modules/desktop_capture/screen_capturer_fuchsia.cc @@ -65,8 +65,7 @@ std::unique_ptr DesktopCapturer::CreateRawScreenCapturer( } ScreenCapturerFuchsia::ScreenCapturerFuchsia() - : component_context_( - sys::ComponentContext::CreateAndServeOutgoingDirectory()) { + : component_context_(sys::ComponentContext::Create()) { RTC_DCHECK(CheckRequirements()); } @@ -86,7 +85,7 @@ ScreenCapturerFuchsia::~ScreenCapturerFuchsia() { // TODO(fxbug.dev/100303): Remove this function when Flatland is the only API. bool ScreenCapturerFuchsia::CheckRequirements() { std::unique_ptr component_context = - sys::ComponentContext::CreateAndServeOutgoingDirectory(); + sys::ComponentContext::Create(); fuchsia::ui::scenic::ScenicSyncPtr scenic; zx_status_t status = component_context->svc()->Connect(scenic.NewRequest()); if (status != ZX_OK) { @@ -163,6 +162,9 @@ void ScreenCapturerFuchsia::CaptureFrame() { uint32_t stride = kFuchsiaBytesPerPixel * pixels_per_row; frame->CopyPixelsFrom(virtual_memory_mapped_addrs_[buffer_index], stride, DesktopRect::MakeWH(width_, height_)); + // Mark the whole screen as having been updated. + frame->mutable_updated_region()->SetRect( + DesktopRect::MakeWH(width_, height_)); fuchsia::ui::composition::ScreenCapture_ReleaseFrame_Result release_result; screen_capture_->ReleaseFrame(buffer_index, &release_result); diff --git a/modules/desktop_capture/screen_capturer_linux.cc b/modules/desktop_capture/screen_capturer_linux.cc index 80f6da679a..44993837e8 100644 --- a/modules/desktop_capture/screen_capturer_linux.cc +++ b/modules/desktop_capture/screen_capturer_linux.cc @@ -27,17 +27,18 @@ namespace webrtc { std::unique_ptr DesktopCapturer::CreateRawScreenCapturer( const DesktopCaptureOptions& options) { #if defined(WEBRTC_USE_PIPEWIRE) - if (options.allow_pipewire() && DesktopCapturer::IsRunningUnderWayland()) { + if (options.allow_pipewire() && BaseCapturerPipeWire::IsSupported()) { return std::make_unique(options, CaptureType::kScreen); } #endif // defined(WEBRTC_USE_PIPEWIRE) #if defined(WEBRTC_USE_X11) - return ScreenCapturerX11::CreateRawScreenCapturer(options); -#else - return nullptr; + if (!DesktopCapturer::IsRunningUnderWayland()) + return ScreenCapturerX11::CreateRawScreenCapturer(options); #endif // defined(WEBRTC_USE_X11) + + return nullptr; } } // namespace webrtc diff --git a/modules/desktop_capture/win/wgc_capture_session.cc b/modules/desktop_capture/win/wgc_capture_session.cc index 831257b4d4..ea5565c89c 100644 --- a/modules/desktop_capture/win/wgc_capture_session.cc +++ b/modules/desktop_capture/win/wgc_capture_session.cc @@ -105,7 +105,7 @@ WgcCaptureSession::~WgcCaptureSession() { RemoveEventHandlers(); } -HRESULT WgcCaptureSession::StartCapture() { +HRESULT WgcCaptureSession::StartCapture(const DesktopCaptureOptions& options) { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_DCHECK(!is_capture_started_); @@ -187,6 +187,15 @@ HRESULT WgcCaptureSession::StartCapture() { return hr; } + if (!options.prefer_cursor_embedded()) { + ComPtr session2; + if (SUCCEEDED(session_->QueryInterface( + ABI::Windows::Graphics::Capture::IID_IGraphicsCaptureSession2, + &session2))) { + session2->put_IsCursorCaptureEnabled(false); + } + } + hr = session_->StartCapture(); if (FAILED(hr)) { RTC_LOG(LS_ERROR) << "Failed to start CaptureSession: " << hr; @@ -388,17 +397,14 @@ HRESULT WgcCaptureSession::OnItemClosed(WGC::IGraphicsCaptureItem* sender, RTC_LOG(LS_INFO) << "Capture target has been closed."; item_closed_ = true; - is_capture_started_ = false; RemoveEventHandlers(); - mapped_texture_ = nullptr; - session_ = nullptr; - frame_pool_ = nullptr; - direct3d_device_ = nullptr; - item_ = nullptr; - d3d11_device_ = nullptr; - + // Do not attempt to free resources in the OnItemClosed handler, as this + // causes a race where we try to delete the item that is calling us. Removing + // the event handlers and setting `item_closed_` above is sufficient to ensure + // that the resources are no longer used, and the next time the capturer tries + // to get a frame, we will report a permanent failure and be destroyed. return S_OK; } diff --git a/modules/desktop_capture/win/wgc_capture_session.h b/modules/desktop_capture/win/wgc_capture_session.h index 27d412baf9..dfe1fa60bb 100644 --- a/modules/desktop_capture/win/wgc_capture_session.h +++ b/modules/desktop_capture/win/wgc_capture_session.h @@ -39,7 +39,7 @@ class WgcCaptureSession final { ~WgcCaptureSession(); - HRESULT StartCapture(); + HRESULT StartCapture(const DesktopCaptureOptions& options); // Returns a frame from the frame pool, if any are present. HRESULT GetFrame(std::unique_ptr* output_frame); diff --git a/modules/desktop_capture/win/wgc_capturer_win.cc b/modules/desktop_capture/win/wgc_capturer_win.cc index ce5eb6b31f..8ec6a29f23 100644 --- a/modules/desktop_capture/win/wgc_capturer_win.cc +++ b/modules/desktop_capture/win/wgc_capturer_win.cc @@ -140,10 +140,12 @@ bool IsWgcSupported(CaptureType capture_type) { } WgcCapturerWin::WgcCapturerWin( + const DesktopCaptureOptions& options, std::unique_ptr source_factory, std::unique_ptr source_enumerator, bool allow_delayed_capturable_check) - : source_factory_(std::move(source_factory)), + : options_(options), + source_factory_(std::move(source_factory)), source_enumerator_(std::move(source_enumerator)), allow_delayed_capturable_check_(allow_delayed_capturable_check) { if (!core_messaging_library_) @@ -166,7 +168,7 @@ std::unique_ptr WgcCapturerWin::CreateRawWindowCapturer( const DesktopCaptureOptions& options, bool allow_delayed_capturable_check) { return std::make_unique( - std::make_unique(), + options, std::make_unique(), std::make_unique( options.enumerate_current_process_windows()), allow_delayed_capturable_check); @@ -176,7 +178,7 @@ std::unique_ptr WgcCapturerWin::CreateRawWindowCapturer( std::unique_ptr WgcCapturerWin::CreateRawScreenCapturer( const DesktopCaptureOptions& options) { return std::make_unique( - std::make_unique(), + options, std::make_unique(), std::make_unique(), false); } @@ -309,7 +311,7 @@ void WgcCapturerWin::CaptureFrame() { } if (!capture_session->IsCaptureStarted()) { - hr = capture_session->StartCapture(); + hr = capture_session->StartCapture(options_); if (FAILED(hr)) { RTC_LOG(LS_ERROR) << "Failed to start capture: " << hr; ongoing_captures_.erase(capture_source_->GetSourceId()); @@ -344,7 +346,7 @@ void WgcCapturerWin::CaptureFrame() { capture_time_ms); frame->set_capture_time_ms(capture_time_ms); frame->set_capturer_id(DesktopCapturerId::kWgcCapturerWin); - frame->set_may_contain_cursor(true); + frame->set_may_contain_cursor(options_.prefer_cursor_embedded()); frame->set_top_left(capture_source_->GetTopLeft()); RecordWgcCapturerResult(WgcCapturerResult::kSuccess); callback_->OnCaptureResult(DesktopCapturer::Result::SUCCESS, diff --git a/modules/desktop_capture/win/wgc_capturer_win.h b/modules/desktop_capture/win/wgc_capturer_win.h index d9ee9d3fc6..30253d9db6 100644 --- a/modules/desktop_capture/win/wgc_capturer_win.h +++ b/modules/desktop_capture/win/wgc_capturer_win.h @@ -83,7 +83,8 @@ class ScreenEnumerator final : public SourceEnumerator { // capturer appropriate for the type of source they want to capture. class WgcCapturerWin : public DesktopCapturer { public: - WgcCapturerWin(std::unique_ptr source_factory, + WgcCapturerWin(const DesktopCaptureOptions& options, + std::unique_ptr source_factory, std::unique_ptr source_enumerator, bool allow_delayed_capturable_check); @@ -114,6 +115,8 @@ class WgcCapturerWin : public DesktopCapturer { DispatcherQueueOptions, ABI::Windows::System::IDispatcherQueueController**); + DesktopCaptureOptions options_; + // We need to either create or ensure that someone else created a // `DispatcherQueue` on the current thread so that events will be delivered // on the current thread rather than an arbitrary thread. A diff --git a/modules/desktop_capture/window_capturer_linux.cc b/modules/desktop_capture/window_capturer_linux.cc index 20d93d0b33..4205bf9bc0 100644 --- a/modules/desktop_capture/window_capturer_linux.cc +++ b/modules/desktop_capture/window_capturer_linux.cc @@ -27,17 +27,18 @@ namespace webrtc { std::unique_ptr DesktopCapturer::CreateRawWindowCapturer( const DesktopCaptureOptions& options) { #if defined(WEBRTC_USE_PIPEWIRE) - if (options.allow_pipewire() && DesktopCapturer::IsRunningUnderWayland()) { + if (options.allow_pipewire() && BaseCapturerPipeWire::IsSupported()) { return std::make_unique(options, CaptureType::kWindow); } #endif // defined(WEBRTC_USE_PIPEWIRE) #if defined(WEBRTC_USE_X11) - return WindowCapturerX11::CreateRawWindowCapturer(options); -#else - return nullptr; + if (!DesktopCapturer::IsRunningUnderWayland()) + return WindowCapturerX11::CreateRawWindowCapturer(options); #endif // defined(WEBRTC_USE_X11) + + return nullptr; } } // namespace webrtc diff --git a/modules/pacing/bitrate_prober.cc b/modules/pacing/bitrate_prober.cc index e01c3ae5a9..e8ebf54f32 100644 --- a/modules/pacing/bitrate_prober.cc +++ b/modules/pacing/bitrate_prober.cc @@ -18,12 +18,12 @@ #include "logging/rtc_event_log/events/rtc_event_probe_cluster_created.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/metrics.h" namespace webrtc { namespace { constexpr TimeDelta kProbeClusterTimeout = TimeDelta::Seconds(5); +constexpr size_t kMaxPendingProbeClusters = 5; } // namespace @@ -36,18 +36,9 @@ BitrateProberConfig::BitrateProberConfig( key_value_config->Lookup("WebRTC-Bwe-ProbingBehavior")); } -BitrateProber::~BitrateProber() { - RTC_HISTOGRAM_COUNTS_1000("WebRTC.BWE.Probing.TotalProbeClustersRequested", - total_probe_count_); - RTC_HISTOGRAM_COUNTS_1000("WebRTC.BWE.Probing.TotalFailedProbeClusters", - total_failed_probe_count_); -} - BitrateProber::BitrateProber(const FieldTrialsView& field_trials) : probing_state_(ProbingState::kDisabled), next_probe_time_(Timestamp::PlusInfinity()), - total_probe_count_(0), - total_failed_probe_count_(0), config_(&field_trials) { SetEnabled(true); } @@ -82,12 +73,11 @@ void BitrateProber::CreateProbeCluster( const ProbeClusterConfig& cluster_config) { RTC_DCHECK(probing_state_ != ProbingState::kDisabled); - total_probe_count_++; while (!clusters_.empty() && - cluster_config.at_time - clusters_.front().requested_at > - kProbeClusterTimeout) { + (cluster_config.at_time - clusters_.front().requested_at > + kProbeClusterTimeout || + clusters_.size() > kMaxPendingProbeClusters)) { clusters_.pop(); - total_failed_probe_count_++; } ProbeCluster cluster; @@ -169,13 +159,6 @@ void BitrateProber::ProbeSent(Timestamp now, DataSize size) { next_probe_time_ = CalculateNextProbeTime(*cluster); if (cluster->sent_bytes >= cluster->pace_info.probe_cluster_min_bytes && cluster->sent_probes >= cluster->pace_info.probe_cluster_min_probes) { - RTC_HISTOGRAM_COUNTS_100000("WebRTC.BWE.Probing.ProbeClusterSizeInBytes", - cluster->sent_bytes); - RTC_HISTOGRAM_COUNTS_100("WebRTC.BWE.Probing.ProbesPerCluster", - cluster->sent_probes); - RTC_HISTOGRAM_COUNTS_10000("WebRTC.BWE.Probing.TimePerProbeCluster", - (now - cluster->started_at).ms()); - clusters_.pop(); } if (clusters_.empty()) { diff --git a/modules/pacing/bitrate_prober.h b/modules/pacing/bitrate_prober.h index d2f1394262..4d8ec68c4f 100644 --- a/modules/pacing/bitrate_prober.h +++ b/modules/pacing/bitrate_prober.h @@ -45,7 +45,7 @@ struct BitrateProberConfig { class BitrateProber { public: explicit BitrateProber(const FieldTrialsView& field_trials); - ~BitrateProber(); + ~BitrateProber() = default; void SetEnabled(bool enable); @@ -118,9 +118,6 @@ class BitrateProber { // Time the next probe should be sent when in kActive state. Timestamp next_probe_time_; - int total_probe_count_; - int total_failed_probe_count_; - BitrateProberConfig config_; }; diff --git a/modules/pacing/bitrate_prober_unittest.cc b/modules/pacing/bitrate_prober_unittest.cc index 00f84e69f1..3be7d2d99e 100644 --- a/modules/pacing/bitrate_prober_unittest.cc +++ b/modules/pacing/bitrate_prober_unittest.cc @@ -13,6 +13,8 @@ #include #include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "test/explicit_key_value_config.h" #include "test/gtest.h" @@ -143,6 +145,41 @@ TEST(BitrateProberTest, DiscardsDelayedProbes) { EXPECT_FALSE(prober.CurrentCluster(now).has_value()); } +TEST(BitrateProberTest, LimitsNumberOfPendingProbeClusters) { + const FieldTrialBasedConfig config; + BitrateProber prober(config); + const DataSize kProbeSize = DataSize::Bytes(1000); + Timestamp now = Timestamp::Zero(); + prober.CreateProbeCluster({.at_time = now, + .target_data_rate = DataRate::KilobitsPerSec(900), + .target_duration = TimeDelta::Millis(15), + .target_probe_count = 5, + .id = 0}); + prober.OnIncomingPacket(kProbeSize); + ASSERT_TRUE(prober.is_probing()); + ASSERT_EQ(prober.CurrentCluster(now)->probe_cluster_id, 0); + + for (int i = 1; i < 11; ++i) { + prober.CreateProbeCluster( + {.at_time = now, + .target_data_rate = DataRate::KilobitsPerSec(900), + .target_duration = TimeDelta::Millis(15), + .target_probe_count = 5, + .id = i}); + prober.OnIncomingPacket(kProbeSize); + } + // Expect some clusters has been dropped. + EXPECT_TRUE(prober.is_probing()); + EXPECT_GE(prober.CurrentCluster(now)->probe_cluster_id, 5); + + Timestamp max_expected_probe_time = now + TimeDelta::Seconds(1); + while (prober.is_probing() && now < max_expected_probe_time) { + now = std::max(now, prober.NextProbeTime(now)); + prober.ProbeSent(now, kProbeSize); + } + EXPECT_FALSE(prober.is_probing()); +} + TEST(BitrateProberTest, DoesntInitializeProbingForSmallPackets) { const FieldTrialBasedConfig config; BitrateProber prober(config); diff --git a/modules/pacing/pacing_controller.cc b/modules/pacing/pacing_controller.cc index 7b6c416730..8c1ddcb184 100644 --- a/modules/pacing/pacing_controller.cc +++ b/modules/pacing/pacing_controller.cc @@ -33,7 +33,6 @@ constexpr TimeDelta kCongestedPacketInterval = TimeDelta::Millis(500); // The maximum debt level, in terms of time, capped when sending packets. constexpr TimeDelta kMaxDebtInTime = TimeDelta::Millis(500); constexpr TimeDelta kMaxElapsedTime = TimeDelta::Seconds(2); -constexpr TimeDelta kTargetPaddingDuration = TimeDelta::Millis(5); bool IsDisabled(const FieldTrialsView& field_trials, absl::string_view key) { return absl::StartsWith(field_trials.Lookup(key), "Disabled"); @@ -50,6 +49,9 @@ const TimeDelta PacingController::kMaxExpectedQueueLength = const TimeDelta PacingController::kPausedProcessInterval = kCongestedPacketInterval; const TimeDelta PacingController::kMinSleepTime = TimeDelta::Millis(1); +const TimeDelta PacingController::kTargetPaddingDuration = TimeDelta::Millis(5); +const TimeDelta PacingController::kMaxPaddingReplayDuration = + TimeDelta::Millis(50); const TimeDelta PacingController::kMaxEarlyProbeProcessing = TimeDelta::Millis(1); @@ -87,7 +89,8 @@ PacingController::PacingController(Clock* clock, congested_(false), queue_time_limit_(kMaxExpectedQueueLength), account_for_audio_(false), - include_overhead_(false) { + include_overhead_(false), + circuit_breaker_threshold_(1 << 16) { if (!drain_large_queues_) { RTC_LOG(LS_WARNING) << "Pacer queues will not be drained," "pushback experiment must be enabled."; @@ -141,6 +144,14 @@ void PacingController::SetCongested(bool congested) { congested_ = congested; } +void PacingController::SetCircuitBreakerThreshold(int num_iterations) { + circuit_breaker_threshold_ = num_iterations; +} + +void PacingController::RemovePacketsForSsrc(uint32_t ssrc) { + packet_queue_.RemovePacketsForSsrc(ssrc); +} + bool PacingController::IsProbing() const { return prober_.is_probing(); } @@ -423,18 +434,24 @@ void PacingController::ProcessPackets() { } DataSize data_sent = DataSize::Zero(); - // Circuit breaker, making sure main loop isn't forever. - static constexpr int kMaxIterations = 1 << 16; int iteration = 0; int packets_sent = 0; int padding_packets_generated = 0; - for (; iteration < kMaxIterations; ++iteration) { + for (; iteration < circuit_breaker_threshold_; ++iteration) { // Fetch packet, so long as queue is not empty or budget is not // exhausted. std::unique_ptr rtp_packet = GetPendingPacket(pacing_info, target_send_time, now); if (rtp_packet == nullptr) { // No packet available to send, check if we should send padding. + if (now - target_send_time > kMaxPaddingReplayDuration) { + // The target send time is more than `kMaxPaddingReplayDuration` behind + // the real-time clock. This can happen if the clock is adjusted forward + // without `ProcessPackets()` having been called at the expected times. + target_send_time = now - kMaxPaddingReplayDuration; + last_process_time_ = std::max(last_process_time_, target_send_time); + } + DataSize padding_to_add = PaddingToAdd(recommended_probe_size, data_sent); if (padding_to_add > DataSize::Zero()) { std::vector> padding_packets = @@ -499,14 +516,30 @@ void PacingController::ProcessPackets() { } } - if (iteration >= kMaxIterations) { + if (iteration >= circuit_breaker_threshold_) { // Circuit break activated. Log warning, adjust send time and return. // TODO(sprang): Consider completely clearing state. - RTC_LOG(LS_ERROR) << "PacingController exceeded max iterations in " - "send-loop: packets sent = " - << packets_sent << ", padding packets generated = " - << padding_packets_generated - << ", bytes sent = " << data_sent.bytes(); + RTC_LOG(LS_ERROR) + << "PacingController exceeded max iterations in " + "send-loop. Debug info: " + << " packets sent = " << packets_sent + << ", padding packets generated = " << padding_packets_generated + << ", bytes sent = " << data_sent.bytes() + << ", probing = " << (is_probing ? "true" : "false") + << ", recommended_probe_size = " << recommended_probe_size.bytes() + << ", now = " << now.us() + << ", target_send_time = " << target_send_time.us() + << ", last_process_time = " << last_process_time_.us() + << ", last_send_time = " << last_send_time_.us() + << ", paused = " << (paused_ ? "true" : "false") + << ", media_debt = " << media_debt_.bytes() + << ", padding_debt = " << padding_debt_.bytes() + << ", pacing_rate = " << pacing_rate_.bps() + << ", adjusted_media_rate = " << adjusted_media_rate_.bps() + << ", padding_rate = " << padding_rate_.bps() + << ", queue size (packets) = " << packet_queue_.SizeInPackets() + << ", queue size (payload bytes) = " + << packet_queue_.SizeInPayloadBytes(); last_send_time_ = now; last_process_time_ = now; return; diff --git a/modules/pacing/pacing_controller.h b/modules/pacing/pacing_controller.h index 94d2402de6..91c0548568 100644 --- a/modules/pacing/pacing_controller.h +++ b/modules/pacing/pacing_controller.h @@ -72,9 +72,14 @@ class PacingController { // order to send a keep-alive packet so we don't get stuck in a bad state due // to lack of feedback. static const TimeDelta kPausedProcessInterval; - + // The default minimum time that should elapse calls to `ProcessPackets()`. static const TimeDelta kMinSleepTime; - + // When padding should be generated, add packets to the buffer with a size + // corresponding to this duration times the current padding rate. + static const TimeDelta kTargetPaddingDuration; + // The maximum time that the pacer can use when "replaying" passed time where + // padding should have been generated. + static const TimeDelta kMaxPaddingReplayDuration; // Allow probes to be processed slightly ahead of inteded send time. Currently // set to 1ms as this is intended to allow times be rounded down to the // nearest millisecond. @@ -156,6 +161,14 @@ class PacingController { bool IsProbing() const; + // Note: Intended for debugging purposes only, will be removed. + // Sets the number of iterations of the main loop in `ProcessPackets()` that + // is considered erroneous to exceed. + void SetCircuitBreakerThreshold(int num_iterations); + + // Remove any pending packets matching this SSRC from the packet queue. + void RemovePacketsForSsrc(uint32_t ssrc); + private: TimeDelta UpdateTimeAndGetElapsed(Timestamp now); bool ShouldSendKeepalive(Timestamp now) const; @@ -232,6 +245,8 @@ class PacingController { TimeDelta queue_time_limit_; bool account_for_audio_; bool include_overhead_; + + int circuit_breaker_threshold_; }; } // namespace webrtc diff --git a/modules/pacing/pacing_controller_unittest.cc b/modules/pacing/pacing_controller_unittest.cc index 37b8605e2e..3b3c3eb761 100644 --- a/modules/pacing/pacing_controller_unittest.cc +++ b/modules/pacing/pacing_controller_unittest.cc @@ -27,6 +27,7 @@ #include "test/gtest.h" using ::testing::_; +using ::testing::AnyNumber; using ::testing::Field; using ::testing::Pointee; using ::testing::Property; @@ -1520,7 +1521,7 @@ TEST_F(PacingControllerTest, SmallFirstProbePacket) { size_t packets_sent = 0; bool media_seen = false; EXPECT_CALL(callback, SendPacket) - .Times(::testing::AnyNumber()) + .Times(AnyNumber()) .WillRepeatedly([&](std::unique_ptr packet, const PacedPacketInfo& cluster_info) { if (packets_sent == 0) { @@ -1674,7 +1675,7 @@ TEST_F(PacingControllerTest, for (bool account_for_audio : {false, true}) { uint16_t sequence_number = 1234; MockPacketSender callback; - EXPECT_CALL(callback, SendPacket).Times(::testing::AnyNumber()); + EXPECT_CALL(callback, SendPacket).Times(AnyNumber()); auto pacer = std::make_unique(&clock_, &callback, trials_); pacer->SetAccountForAudioPackets(account_for_audio); @@ -2115,5 +2116,60 @@ TEST_F(PacingControllerTest, BudgetDoesNotAffectRetransmissionInsTrial) { pacer.ProcessPackets(); } +TEST_F(PacingControllerTest, AbortsAfterReachingCircuitBreakLimit) { + const DataSize kPacketSize = DataSize::Bytes(1000); + + EXPECT_CALL(callback_, SendPadding).Times(0); + PacingController pacer(&clock_, &callback_, trials_); + pacer.SetPacingRates(kTargetRate, /*padding_rate=*/DataRate::Zero()); + + // Set the circuit breaker to abort after one iteration of the main + // sending loop. + pacer.SetCircuitBreakerThreshold(1); + EXPECT_CALL(callback_, SendPacket).Times(1); + + // Send two packets. + pacer.EnqueuePacket(BuildPacket(RtpPacketMediaType::kVideo, kVideoSsrc, + /*sequence_number=*/1, + /*capture_time=*/1, kPacketSize.bytes())); + pacer.EnqueuePacket(BuildPacket(RtpPacketMediaType::kVideo, kVideoSsrc, + /*sequence_number=*/2, + /*capture_time=*/2, kPacketSize.bytes())); + + // Advance time to way past where both should be eligible for sending. + clock_.AdvanceTime(TimeDelta::Seconds(1)); + + pacer.ProcessPackets(); +} + +TEST_F(PacingControllerTest, DoesNotPadIfProcessThreadIsBorked) { + PacingControllerPadding callback; + PacingController pacer(&clock_, &callback, trials_); + + // Set both pacing and padding rate to be non-zero. + pacer.SetPacingRates(kTargetRate, /*padding_rate=*/kTargetRate); + + // Add one packet to the queue, but do not send it yet. + pacer.EnqueuePacket(BuildPacket(RtpPacketMediaType::kVideo, kVideoSsrc, + /*sequence_number=*/1, + /*capture_time=*/1, + /*size=*/1000)); + + // Advance time to waaay after the packet should have been sent. + clock_.AdvanceTime(TimeDelta::Seconds(42)); + + // `ProcessPackets()` should send the delayed packet, followed by a small + // amount of missed padding. + pacer.ProcessPackets(); + + // The max padding window is the max replay duration + the target padding + // duration. + const DataSize kMaxPadding = (PacingController::kMaxPaddingReplayDuration + + PacingController::kTargetPaddingDuration) * + kTargetRate; + + EXPECT_LE(callback.padding_sent(), kMaxPadding.bytes()); +} + } // namespace } // namespace webrtc diff --git a/modules/pacing/prioritized_packet_queue.cc b/modules/pacing/prioritized_packet_queue.cc index b3874a2324..0c285c463a 100644 --- a/modules/pacing/prioritized_packet_queue.cc +++ b/modules/pacing/prioritized_packet_queue.cc @@ -60,7 +60,7 @@ bool PrioritizedPacketQueue::StreamQueue::EnqueuePacket(QueuedPacket packet, } PrioritizedPacketQueue::QueuedPacket -PrioritizedPacketQueue::StreamQueue::DequePacket(int priority_level) { +PrioritizedPacketQueue::StreamQueue::DequeuePacket(int priority_level) { RTC_DCHECK(!packets_[priority_level].empty()); QueuedPacket packet = std::move(packets_[priority_level].front()); packets_[priority_level].pop_front(); @@ -91,6 +91,16 @@ Timestamp PrioritizedPacketQueue::StreamQueue::LastEnqueueTime() const { return last_enqueue_time_; } +std::array, + PrioritizedPacketQueue::kNumPriorityLevels> +PrioritizedPacketQueue::StreamQueue::DequeueAll() { + std::array, kNumPriorityLevels> packets_by_prio; + for (int i = 0; i < kNumPriorityLevels; ++i) { + packets_by_prio[i].swap(packets_[i]); + } + return packets_by_prio; +} + PrioritizedPacketQueue::PrioritizedPacketQueue(Timestamp creation_time) : queue_time_sum_(TimeDelta::Zero()), pause_time_sum_(TimeDelta::Zero()), @@ -162,54 +172,16 @@ std::unique_ptr PrioritizedPacketQueue::Pop() { RTC_DCHECK_GE(top_active_prio_level_, 0); StreamQueue& stream_queue = *streams_by_prio_[top_active_prio_level_].front(); - QueuedPacket packet = stream_queue.DequePacket(top_active_prio_level_); - --size_packets_; - RTC_DCHECK(packet.packet->packet_type().has_value()); - RtpPacketMediaType packet_type = packet.packet->packet_type().value(); - --size_packets_per_media_type_[static_cast(packet_type)]; - RTC_DCHECK_GE(size_packets_per_media_type_[static_cast(packet_type)], - 0); - size_payload_ -= packet.PacketSize(); - - // Calculate the total amount of time spent by this packet in the queue - // while in a non-paused state. Note that the `pause_time_sum_ms_` was - // subtracted from `packet.enqueue_time_ms` when the packet was pushed, and - // by subtracting it now we effectively remove the time spent in in the - // queue while in a paused state. - TimeDelta time_in_non_paused_state = - last_update_time_ - packet.enqueue_time - pause_time_sum_; - queue_time_sum_ -= time_in_non_paused_state; - - // Set the time spent in the send queue, which is the per-packet equivalent of - // totalPacketSendDelay. The notion of being paused is an implementation - // detail that we do not want to expose, so it makes sense to report the - // metric excluding the pause time. This also avoids spikes in the metric. - // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalpacketsenddelay - packet.packet->set_time_in_send_queue(time_in_non_paused_state); - - RTC_DCHECK(size_packets_ > 0 || queue_time_sum_ == TimeDelta::Zero()); - - RTC_CHECK(packet.enqueue_time_iterator != enqueue_times_.end()); - enqueue_times_.erase(packet.enqueue_time_iterator); + QueuedPacket packet = stream_queue.DequeuePacket(top_active_prio_level_); + DequeuePacketInternal(packet); // Remove StreamQueue from head of fifo-queue for this prio level, and // and add it to the end if it still has packets. streams_by_prio_[top_active_prio_level_].pop_front(); if (stream_queue.HasPacketsAtPrio(top_active_prio_level_)) { streams_by_prio_[top_active_prio_level_].push_back(&stream_queue); - } else if (streams_by_prio_[top_active_prio_level_].empty()) { - // No stream queues have packets at this prio level, find top priority - // that is not empty. - if (size_packets_ == 0) { - top_active_prio_level_ = -1; - } else { - for (int i = 0; i < kNumPriorityLevels; ++i) { - if (!streams_by_prio_[i].empty()) { - top_active_prio_level_ = i; - break; - } - } - } + } else { + MaybeUpdateTopPrioLevel(); } return std::move(packet.packet); @@ -276,4 +248,96 @@ void PrioritizedPacketQueue::SetPauseState(bool paused, Timestamp now) { paused_ = paused; } +void PrioritizedPacketQueue::RemovePacketsForSsrc(uint32_t ssrc) { + auto kv = streams_.find(ssrc); + if (kv != streams_.end()) { + // Dequeue all packets from the queue for this SSRC. + StreamQueue& queue = *kv->second; + std::array, kNumPriorityLevels> packets_by_prio = + queue.DequeueAll(); + for (int i = 0; i < kNumPriorityLevels; ++i) { + std::deque& packet_queue = packets_by_prio[i]; + if (packet_queue.empty()) { + continue; + } + + // First erase all packets at this prio level. + while (!packet_queue.empty()) { + QueuedPacket packet = std::move(packet_queue.front()); + packet_queue.pop_front(); + DequeuePacketInternal(packet); + } + + // Next, deregister this `StreamQueue` from the round-robin tables. + RTC_DCHECK(!streams_by_prio_[i].empty()); + if (streams_by_prio_[i].size() == 1) { + // This is the last and only queue that had packets for this prio level. + // Update the global top prio level if neccessary. + RTC_DCHECK(streams_by_prio_[i].front() == &queue); + streams_by_prio_[i].pop_front(); + if (i == top_active_prio_level_) { + MaybeUpdateTopPrioLevel(); + } + } else { + // More than stream had packets at this prio level, filter this one out. + std::deque filtered_queue; + for (StreamQueue* queue_ptr : streams_by_prio_[i]) { + if (queue_ptr != &queue) { + filtered_queue.push_back(queue_ptr); + } + } + streams_by_prio_[i].swap(filtered_queue); + } + } + } +} + +void PrioritizedPacketQueue::DequeuePacketInternal(QueuedPacket& packet) { + --size_packets_; + RTC_DCHECK(packet.packet->packet_type().has_value()); + RtpPacketMediaType packet_type = packet.packet->packet_type().value(); + --size_packets_per_media_type_[static_cast(packet_type)]; + RTC_DCHECK_GE(size_packets_per_media_type_[static_cast(packet_type)], + 0); + size_payload_ -= packet.PacketSize(); + + // Calculate the total amount of time spent by this packet in the queue + // while in a non-paused state. Note that the `pause_time_sum_ms_` was + // subtracted from `packet.enqueue_time_ms` when the packet was pushed, and + // by subtracting it now we effectively remove the time spent in in the + // queue while in a paused state. + TimeDelta time_in_non_paused_state = + last_update_time_ - packet.enqueue_time - pause_time_sum_; + queue_time_sum_ -= time_in_non_paused_state; + + // Set the time spent in the send queue, which is the per-packet equivalent of + // totalPacketSendDelay. The notion of being paused is an implementation + // detail that we do not want to expose, so it makes sense to report the + // metric excluding the pause time. This also avoids spikes in the metric. + // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalpacketsenddelay + packet.packet->set_time_in_send_queue(time_in_non_paused_state); + + RTC_DCHECK(size_packets_ > 0 || queue_time_sum_ == TimeDelta::Zero()); + + RTC_CHECK(packet.enqueue_time_iterator != enqueue_times_.end()); + enqueue_times_.erase(packet.enqueue_time_iterator); +} + +void PrioritizedPacketQueue::MaybeUpdateTopPrioLevel() { + if (streams_by_prio_[top_active_prio_level_].empty()) { + // No stream queues have packets at this prio level, find top priority + // that is not empty. + if (size_packets_ == 0) { + top_active_prio_level_ = -1; + } else { + for (int i = 0; i < kNumPriorityLevels; ++i) { + if (!streams_by_prio_[i].empty()) { + top_active_prio_level_ = i; + break; + } + } + } + } +} + } // namespace webrtc diff --git a/modules/pacing/prioritized_packet_queue.h b/modules/pacing/prioritized_packet_queue.h index 3b5748f12a..364b53af11 100644 --- a/modules/pacing/prioritized_packet_queue.h +++ b/modules/pacing/prioritized_packet_queue.h @@ -13,10 +13,12 @@ #include +#include #include #include #include #include +#include #include "api/units/data_size.h" #include "api/units/time_delta.h" @@ -80,6 +82,9 @@ class PrioritizedPacketQueue { // Set the pause state, while `paused` is true queuing time is not counted. void SetPauseState(bool paused, Timestamp now); + // Remove any packets matching the given SSRC. + void RemovePacketsForSsrc(uint32_t ssrc); + private: static constexpr int kNumPriorityLevels = 4; @@ -107,18 +112,27 @@ class PrioritizedPacketQueue { // count for that priority level went from zero to non-zero. bool EnqueuePacket(QueuedPacket packet, int priority_level); - QueuedPacket DequePacket(int priority_level); + QueuedPacket DequeuePacket(int priority_level); bool HasPacketsAtPrio(int priority_level) const; bool IsEmpty() const; Timestamp LeadingPacketEnqueueTime(int priority_level) const; Timestamp LastEnqueueTime() const; + std::array, kNumPriorityLevels> DequeueAll(); + private: std::deque packets_[kNumPriorityLevels]; Timestamp last_enqueue_time_; }; + // Remove the packet from the internal state, e.g. queue time / size etc. + void DequeuePacketInternal(QueuedPacket& packet); + + // Check if the queue pointed to by `top_active_prio_level_` is empty and + // if so move it to the lowest non-empty index. + void MaybeUpdateTopPrioLevel(); + // Cumulative sum, over all packets, of time spent in the queue. TimeDelta queue_time_sum_; // Cumulative sum of time the queue has spent in a paused state. diff --git a/modules/pacing/prioritized_packet_queue_unittest.cc b/modules/pacing/prioritized_packet_queue_unittest.cc index 5e79e7b68e..964051c0c7 100644 --- a/modules/pacing/prioritized_packet_queue_unittest.cc +++ b/modules/pacing/prioritized_packet_queue_unittest.cc @@ -306,4 +306,58 @@ TEST(PrioritizedPacketQueue, } } +TEST(PrioritizedPacketQueue, ClearsPackets) { + Timestamp now = Timestamp::Zero(); + PrioritizedPacketQueue queue(now); + const uint32_t kSsrc = 1; + + // Add two packets of each type, all using the same SSRC. + int sequence_number = 0; + for (size_t i = 0; i < kNumMediaTypes; ++i) { + queue.Push(now, CreatePacket(static_cast(i), + sequence_number++, kSsrc)); + queue.Push(now, CreatePacket(static_cast(i), + sequence_number++, kSsrc)); + } + EXPECT_EQ(queue.SizeInPackets(), 2 * int{kNumMediaTypes}); + + // Remove all of them. + queue.RemovePacketsForSsrc(kSsrc); + EXPECT_TRUE(queue.Empty()); +} + +TEST(PrioritizedPacketQueue, ClearPacketsAffectsOnlySpecifiedSsrc) { + Timestamp now = Timestamp::Zero(); + PrioritizedPacketQueue queue(now); + const uint32_t kRemovingSsrc = 1; + const uint32_t kStayingSsrc = 2; + + // Add an audio packet and a retransmission for the SSRC we will remove, + // ensuring they are first in line. + queue.Push( + now, CreatePacket(RtpPacketMediaType::kAudio, /*seq=*/1, kRemovingSsrc)); + queue.Push(now, CreatePacket(RtpPacketMediaType::kRetransmission, /*seq=*/2, + kRemovingSsrc)); + + // Add a video packet and a retransmission for the SSRC that will remain. + // The retransmission packets now both have pointers to their respective qeues + // from the same prio level. + queue.Push(now, + CreatePacket(RtpPacketMediaType::kVideo, /*seq=*/3, kStayingSsrc)); + queue.Push(now, CreatePacket(RtpPacketMediaType::kRetransmission, /*seq=*/4, + kStayingSsrc)); + + EXPECT_EQ(queue.SizeInPackets(), 4); + + // Clear the first two packets. + queue.RemovePacketsForSsrc(kRemovingSsrc); + EXPECT_EQ(queue.SizeInPackets(), 2); + + // We should get the single remaining retransmission first, then the video + // packet. + EXPECT_EQ(queue.Pop()->SequenceNumber(), 4); + EXPECT_EQ(queue.Pop()->SequenceNumber(), 3); + EXPECT_TRUE(queue.Empty()); +} + } // namespace webrtc diff --git a/modules/pacing/task_queue_paced_sender.cc b/modules/pacing/task_queue_paced_sender.cc index a42220b834..4ba249582c 100644 --- a/modules/pacing/task_queue_paced_sender.cc +++ b/modules/pacing/task_queue_paced_sender.cc @@ -57,7 +57,8 @@ TaskQueuePacedSender::TaskQueuePacedSender( const FieldTrialsView& field_trials, TaskQueueFactory* task_queue_factory, TimeDelta max_hold_back_window, - int max_hold_back_window_in_packets) + int max_hold_back_window_in_packets, + absl::optional burst_interval) : clock_(clock), bursty_pacer_flags_(field_trials), slacked_pacer_flags_(field_trials), @@ -85,6 +86,11 @@ TaskQueuePacedSender::TaskQueuePacedSender( burst = slacked_burst; } } + // If not overriden by an experiment, the burst is specified by the + // `burst_interval` argument. + if (!burst.has_value()) { + burst = burst_interval; + } if (burst.has_value()) { pacing_controller_.SetSendBurstInterval(burst.value()); } @@ -175,6 +181,14 @@ void TaskQueuePacedSender::EnqueuePackets( })); } +void TaskQueuePacedSender::RemovePacketsForSsrc(uint32_t ssrc) { + task_queue_.RunOrPost([this, ssrc]() { + RTC_DCHECK_RUN_ON(&task_queue_); + pacing_controller_.RemovePacketsForSsrc(ssrc); + MaybeProcessPackets(Timestamp::MinusInfinity()); + }); +} + void TaskQueuePacedSender::SetAccountForAudioPackets(bool account_for_audio) { task_queue_.RunOrPost([this, account_for_audio]() { RTC_DCHECK_RUN_ON(&task_queue_); diff --git a/modules/pacing/task_queue_paced_sender.h b/modules/pacing/task_queue_paced_sender.h index 18be6acef0..ea335fd8e3 100644 --- a/modules/pacing/task_queue_paced_sender.h +++ b/modules/pacing/task_queue_paced_sender.h @@ -39,16 +39,25 @@ class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender { public: static const int kNoPacketHoldback; + // The pacer can be configured using `field_trials` or specified parameters. + // // The `hold_back_window` parameter sets a lower bound on time to sleep if // there is currently a pacer queue and packets can't immediately be // processed. Increasing this reduces thread wakeups at the expense of higher // latency. - TaskQueuePacedSender(Clock* clock, - PacingController::PacketSender* packet_sender, - const FieldTrialsView& field_trials, - TaskQueueFactory* task_queue_factory, - TimeDelta max_hold_back_window, - int max_hold_back_window_in_packets); + // + // If the `burst_interval` parameter is set, the pacer is allowed to build up + // a packet "debt" that correspond to approximately the send rate during the + // specified interval. This greatly reduced wake ups by not pacing packets + // within the allowed burst budget. + TaskQueuePacedSender( + Clock* clock, + PacingController::PacketSender* packet_sender, + const FieldTrialsView& field_trials, + TaskQueueFactory* task_queue_factory, + TimeDelta max_hold_back_window, + int max_hold_back_window_in_packets, + absl::optional burst_interval = absl::nullopt); ~TaskQueuePacedSender() override; @@ -61,6 +70,8 @@ class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender { // PacingController::PacketSender::SendPacket() when it's time to send. void EnqueuePackets( std::vector> packets) override; + // Remove any pending packets matching this SSRC from the packet queue. + void RemovePacketsForSsrc(uint32_t ssrc) override; // Methods implementing RtpPacketPacer. diff --git a/modules/pacing/task_queue_paced_sender_unittest.cc b/modules/pacing/task_queue_paced_sender_unittest.cc index 59790d00dd..69c7b9b7ef 100644 --- a/modules/pacing/task_queue_paced_sender_unittest.cc +++ b/modules/pacing/task_queue_paced_sender_unittest.cc @@ -253,6 +253,53 @@ TEST_P(TaskQueuePacedSenderTest, PacesPackets) { EXPECT_NEAR((end_time - start_time).ms(), 1000.0, 50.0); } +// Same test as above, but with 0.5s of burst applied. +TEST_P(TaskQueuePacedSenderTest, PacesPacketsWithBurst) { + GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234)); + MockPacketRouter packet_router; + ScopedKeyValueConfig trials(GetParam()); + TaskQueuePacedSender pacer(time_controller.GetClock(), &packet_router, trials, + time_controller.GetTaskQueueFactory(), + PacingController::kMinSleepTime, + TaskQueuePacedSender::kNoPacketHoldback, + // Half a second of bursting. + TimeDelta::Seconds(0.5)); + + // Insert a number of packets, covering one second. + static constexpr size_t kPacketsToSend = 42; + SequenceChecker sequence_checker; + pacer.SetPacingRates( + DataRate::BitsPerSec(kDefaultPacketSize * 8 * kPacketsToSend), + DataRate::Zero()); + pacer.EnsureStarted(); + pacer.EnqueuePackets( + GeneratePackets(RtpPacketMediaType::kVideo, kPacketsToSend)); + + // Expect all of them to be sent. + size_t packets_sent = 0; + Timestamp end_time = Timestamp::PlusInfinity(); + EXPECT_CALL(packet_router, SendPacket) + .WillRepeatedly([&](std::unique_ptr packet, + const PacedPacketInfo& cluster_info) { + ++packets_sent; + if (packets_sent == kPacketsToSend) { + end_time = time_controller.GetClock()->CurrentTime(); + } + EXPECT_EQ(sequence_checker.IsCurrent(), UsingWorkerThread(GetParam())); + }); + + const Timestamp start_time = time_controller.GetClock()->CurrentTime(); + + // Packets should be sent over a period of close to 1s. Expect a little + // lower than this since initial probing is a bit quicker. + time_controller.AdvanceTime(TimeDelta::Seconds(1)); + EXPECT_EQ(packets_sent, kPacketsToSend); + ASSERT_TRUE(end_time.IsFinite()); + // Because of half a second of burst, what would normally have been paced over + // ~1 second now takes ~0.5 seconds. + EXPECT_NEAR((end_time - start_time).ms(), 500.0, 50.0); +} + TEST_P(TaskQueuePacedSenderTest, ReschedulesProcessOnRateChange) { GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234)); MockPacketRouter packet_router; diff --git a/modules/portal/BUILD.gn b/modules/portal/BUILD.gn new file mode 100644 index 0000000000..36bcb53e8e --- /dev/null +++ b/modules/portal/BUILD.gn @@ -0,0 +1,123 @@ +# Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("//build/config/linux/pkg_config.gni") +import("//tools/generate_stubs/rules.gni") +import("../../webrtc.gni") + +if ((is_linux || is_chromeos) && rtc_use_pipewire) { + pkg_config("gio") { + packages = [ + "gio-2.0", + "gio-unix-2.0", + ] + } + + pkg_config("pipewire") { + packages = [ "libpipewire-0.3" ] + if (!rtc_link_pipewire) { + ignore_libs = true + } + } + + pkg_config("gbm") { + packages = [ "gbm" ] + } + pkg_config("egl") { + packages = [ "egl" ] + } + pkg_config("epoxy") { + packages = [ "epoxy" ] + ignore_libs = true + } + pkg_config("libdrm") { + packages = [ "libdrm" ] + } + + if (!rtc_link_pipewire) { + # When libpipewire is not directly linked, use stubs to allow for dlopening of + # the binary. + generate_stubs("pipewire_stubs") { + configs = [ + "../../:common_config", + ":pipewire", + ] + deps = [ "../../rtc_base" ] + extra_header = "pipewire_stub_header.fragment" + logging_function = "RTC_LOG(LS_VERBOSE)" + logging_include = "rtc_base/logging.h" + output_name = "pipewire_stubs" + path_from_source = "modules/portal" + sigs = [ "pipewire.sigs" ] + if (!build_with_chromium) { + macro_include = "rtc_base/system/no_cfi_icall.h" + macro_deps = [ "../../rtc_base/system:no_cfi_icall" ] + } + } + } + + config("pipewire_base") { + configs = [ + ":gio", + ":pipewire", + ] + } + + config("pipewire_all") { + configs = [ + ":pipewire_base", + ":gbm", + ":egl", + ":epoxy", + ":libdrm", + ] + } + + config("pipewire_config") { + defines = [ "WEBRTC_USE_PIPEWIRE" ] + + # Chromecast build config overrides `WEBRTC_USE_PIPEWIRE` even when + # `rtc_use_pipewire` is not set, which causes pipewire_config to not be + # included in targets. More details in: webrtc:13898 + if (is_linux && !is_castos) { + defines += [ "WEBRTC_USE_GIO" ] + } + } + + rtc_library("portal") { + sources = [ + "pipewire_utils.cc", + "pipewire_utils.h", + "portal_request_response.h", + "scoped_glib.cc", + "scoped_glib.h", + "xdg_desktop_portal_utils.cc", + "xdg_desktop_portal_utils.h", + "xdg_session_details.h", + ] + + configs += [ + ":gio", + ":pipewire", + ":pipewire_config", + ] + + deps = [ + "../../rtc_base:checks", + "../../rtc_base:logging", + "../../rtc_base:sanitizer", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + + if (!rtc_link_pipewire) { + defines = [ "WEBRTC_DLOPEN_PIPEWIRE" ] + + deps += [ ":pipewire_stubs" ] + } + } +} diff --git a/modules/portal/OWNERS b/modules/portal/OWNERS new file mode 100644 index 0000000000..e3bc32ee5c --- /dev/null +++ b/modules/portal/OWNERS @@ -0,0 +1,2 @@ +alcooper@chromium.org +mfoltz@chromium.org diff --git a/modules/desktop_capture/linux/wayland/pipewire.sigs b/modules/portal/pipewire.sigs similarity index 94% rename from modules/desktop_capture/linux/wayland/pipewire.sigs rename to modules/portal/pipewire.sigs index 06a97b8f29..139a8c37a5 100644 --- a/modules/desktop_capture/linux/wayland/pipewire.sigs +++ b/modules/portal/pipewire.sigs @@ -31,6 +31,8 @@ pw_stream * pw_stream_new(pw_core *core, const char *name, pw_properties *props) int pw_stream_queue_buffer(pw_stream *stream, pw_buffer *buffer); int pw_stream_set_active(pw_stream *stream, bool active); int pw_stream_update_params(pw_stream *stream, const spa_pod **params, uint32_t n_params); +uint32_t pw_stream_get_node_id(pw_stream *stream); +pw_stream_state pw_stream_get_state(pw_stream *stream, const char **error); // thread-loop.h void pw_thread_loop_destroy(pw_thread_loop *loop); diff --git a/modules/desktop_capture/linux/wayland/pipewire_stub_header.fragment b/modules/portal/pipewire_stub_header.fragment similarity index 87% rename from modules/desktop_capture/linux/wayland/pipewire_stub_header.fragment rename to modules/portal/pipewire_stub_header.fragment index 06ae18dfd4..9d7dbd27c5 100644 --- a/modules/desktop_capture/linux/wayland/pipewire_stub_header.fragment +++ b/modules/portal/pipewire_stub_header.fragment @@ -5,5 +5,4 @@ extern "C" { #include -#include } diff --git a/modules/portal/pipewire_utils.cc b/modules/portal/pipewire_utils.cc new file mode 100644 index 0000000000..fd96b4a4db --- /dev/null +++ b/modules/portal/pipewire_utils.cc @@ -0,0 +1,53 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/portal/pipewire_utils.h" + +#include + +#include "rtc_base/sanitizer.h" + +#if defined(WEBRTC_DLOPEN_PIPEWIRE) +#include "modules/portal/pipewire_stubs.h" +#endif // defined(WEBRTC_DLOPEN_PIPEWIRE) + +namespace webrtc { + +RTC_NO_SANITIZE("cfi-icall") +bool InitializePipeWire() { +#if defined(WEBRTC_DLOPEN_PIPEWIRE) + static constexpr char kPipeWireLib[] = "libpipewire-0.3.so.0"; + + using modules_portal::InitializeStubs; + using modules_portal::kModulePipewire; + + modules_portal::StubPathMap paths; + + // Check if the PipeWire library is available. + paths[kModulePipewire].push_back(kPipeWireLib); + + static bool result = InitializeStubs(paths); + + return result; +#else + return true; +#endif // defined(WEBRTC_DLOPEN_PIPEWIRE) +} + +PipeWireThreadLoopLock::PipeWireThreadLoopLock(pw_thread_loop* loop) + : loop_(loop) { + pw_thread_loop_lock(loop_); +} + +PipeWireThreadLoopLock::~PipeWireThreadLoopLock() { + pw_thread_loop_unlock(loop_); +} + +} // namespace webrtc diff --git a/modules/portal/pipewire_utils.h b/modules/portal/pipewire_utils.h new file mode 100644 index 0000000000..0f5ccf3292 --- /dev/null +++ b/modules/portal/pipewire_utils.h @@ -0,0 +1,35 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_PORTAL_PIPEWIRE_UTILS_H_ +#define MODULES_PORTAL_PIPEWIRE_UTILS_H_ + +struct pw_thread_loop; + +namespace webrtc { + +// Prepare PipeWire so that it is ready to be used. If it needs to be dlopen'd +// this will do so. Note that this does not guarantee a PipeWire server is +// running nor does it establish a connection to one. +bool InitializePipeWire(); + +// Locks pw_thread_loop in the current scope +class PipeWireThreadLoopLock { + public: + explicit PipeWireThreadLoopLock(pw_thread_loop* loop); + ~PipeWireThreadLoopLock(); + + private: + pw_thread_loop* const loop_; +}; + +} // namespace webrtc + +#endif // MODULES_PORTAL_PIPEWIRE_UTILS_H_ diff --git a/modules/portal/portal_request_response.h b/modules/portal/portal_request_response.h new file mode 100644 index 0000000000..5fac4eb137 --- /dev/null +++ b/modules/portal/portal_request_response.h @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_PORTAL_PORTAL_REQUEST_RESPONSE_H_ +#define MODULES_PORTAL_PORTAL_REQUEST_RESPONSE_H_ + +namespace webrtc { +namespace xdg_portal { + +// Contains type of responses that can be observed when making a request to +// a desktop portal interface. +enum class RequestResponse { + // Unknown, the initialized status. + kUnknown, + // Success, the request is carried out. + kSuccess, + // The user cancelled the interaction. + kUserCancelled, + // The user interaction was ended in some other way. + kError, + + kMaxValue = kError, +}; + +} // namespace xdg_portal +} // namespace webrtc +#endif // MODULES_PORTAL_PORTAL_REQUEST_RESPONSE_H_ diff --git a/modules/desktop_capture/linux/wayland/scoped_glib.cc b/modules/portal/scoped_glib.cc similarity index 94% rename from modules/desktop_capture/linux/wayland/scoped_glib.cc rename to modules/portal/scoped_glib.cc index 0d9a87d7fd..cb4c80526e 100644 --- a/modules/desktop_capture/linux/wayland/scoped_glib.cc +++ b/modules/portal/scoped_glib.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/desktop_capture/linux/wayland/scoped_glib.h" +#include "modules/portal/scoped_glib.h" namespace webrtc { diff --git a/modules/portal/scoped_glib.h b/modules/portal/scoped_glib.h new file mode 100644 index 0000000000..b2aaa2eb3a --- /dev/null +++ b/modules/portal/scoped_glib.h @@ -0,0 +1,65 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_PORTAL_SCOPED_GLIB_H_ +#define MODULES_PORTAL_SCOPED_GLIB_H_ + +#include + +#include "rtc_base/checks.h" + +namespace webrtc { + +template +class Scoped { + public: + Scoped() {} + explicit Scoped(T* val) { ptr_ = val; } + ~Scoped() { RTC_DCHECK_NOTREACHED(); } + + T* operator->() const { return ptr_; } + + explicit operator bool() const { return ptr_ != nullptr; } + + bool operator!() const { return ptr_ == nullptr; } + + T* get() const { return ptr_; } + + T** receive() { + RTC_CHECK(!ptr_); + return &ptr_; + } + + Scoped& operator=(T* val) { + RTC_DCHECK(val); + ptr_ = val; + return *this; + } + + protected: + T* ptr_ = nullptr; +}; + +template <> +Scoped::~Scoped(); +template <> +Scoped::~Scoped(); +template <> +Scoped::~Scoped(); +template <> +Scoped::~Scoped(); +template <> +Scoped::~Scoped(); +template <> +Scoped::~Scoped(); + +} // namespace webrtc + +#endif // MODULES_PORTAL_SCOPED_GLIB_H_ diff --git a/modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.cc b/modules/portal/xdg_desktop_portal_utils.cc similarity index 98% rename from modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.cc rename to modules/portal/xdg_desktop_portal_utils.cc index 75dbf2bdf3..271e084463 100644 --- a/modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.cc +++ b/modules/portal/xdg_desktop_portal_utils.cc @@ -7,12 +7,12 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/desktop_capture/linux/wayland/xdg_desktop_portal_utils.h" +#include "modules/portal/xdg_desktop_portal_utils.h" #include #include "absl/strings/string_view.h" -#include "modules/desktop_capture/linux/wayland/scoped_glib.h" +#include "modules/portal/scoped_glib.h" #include "rtc_base/logging.h" namespace webrtc { diff --git a/modules/portal/xdg_desktop_portal_utils.h b/modules/portal/xdg_desktop_portal_utils.h new file mode 100644 index 0000000000..8571c64a28 --- /dev/null +++ b/modules/portal/xdg_desktop_portal_utils.h @@ -0,0 +1,111 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_PORTAL_XDG_DESKTOP_PORTAL_UTILS_H_ +#define MODULES_PORTAL_XDG_DESKTOP_PORTAL_UTILS_H_ + +#include +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "modules/portal/portal_request_response.h" +#include "modules/portal/scoped_glib.h" +#include "modules/portal/xdg_session_details.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace xdg_portal { + +constexpr char kDesktopBusName[] = "org.freedesktop.portal.Desktop"; +constexpr char kDesktopObjectPath[] = "/org/freedesktop/portal/desktop"; +constexpr char kDesktopRequestObjectPath[] = + "/org/freedesktop/portal/desktop/request"; +constexpr char kSessionInterfaceName[] = "org.freedesktop.portal.Session"; +constexpr char kRequestInterfaceName[] = "org.freedesktop.portal.Request"; +constexpr char kScreenCastInterfaceName[] = "org.freedesktop.portal.ScreenCast"; + +using ProxyRequestCallback = void (*)(GObject*, GAsyncResult*, gpointer); +using SessionRequestCallback = void (*)(GDBusProxy*, GAsyncResult*, gpointer); +using SessionRequestResponseSignalHandler = void (*)(GDBusConnection*, + const char*, + const char*, + const char*, + const char*, + GVariant*, + gpointer); +using StartRequestResponseSignalHandler = void (*)(GDBusConnection*, + const char*, + const char*, + const char*, + const char*, + GVariant*, + gpointer); +using SessionStartRequestedHandler = void (*)(GDBusProxy*, + GAsyncResult*, + gpointer); + +std::string RequestResponseToString(RequestResponse request); + +RequestResponse RequestResponseFromPortalResponse(uint32_t portal_response); + +// Returns a string path for signal handle based on the provided connection and +// token. +std::string PrepareSignalHandle(absl::string_view token, + GDBusConnection* connection); + +// Sets up the callback to execute when a response signal is received for the +// given object. +uint32_t SetupRequestResponseSignal(absl::string_view object_path, + const GDBusSignalCallback callback, + gpointer user_data, + GDBusConnection* connection); + +void RequestSessionProxy(absl::string_view interface_name, + const ProxyRequestCallback proxy_request_callback, + GCancellable* cancellable, + gpointer user_data); + +void SetupSessionRequestHandlers( + absl::string_view portal_prefix, + const SessionRequestCallback session_request_callback, + const SessionRequestResponseSignalHandler request_response_signale_handler, + GDBusConnection* connection, + GDBusProxy* proxy, + GCancellable* cancellable, + std::string& portal_handle, + guint& session_request_signal_id, + gpointer user_data); + +void StartSessionRequest( + absl::string_view prefix, + absl::string_view session_handle, + const StartRequestResponseSignalHandler signal_handler, + const SessionStartRequestedHandler session_started_handler, + GDBusProxy* proxy, + GDBusConnection* connection, + GCancellable* cancellable, + guint& start_request_signal_id, + std::string& start_handle, + gpointer user_data); + +// Tears down the portal session and cleans up related objects. +void TearDownSession(absl::string_view session_handle, + GDBusProxy* proxy, + GCancellable* cancellable, + GDBusConnection* connection); + +} // namespace xdg_portal +} // namespace webrtc + +#endif // MODULES_PORTAL_XDG_DESKTOP_PORTAL_UTILS_H_ diff --git a/modules/portal/xdg_session_details.h b/modules/portal/xdg_session_details.h new file mode 100644 index 0000000000..ab52508c2f --- /dev/null +++ b/modules/portal/xdg_session_details.h @@ -0,0 +1,33 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_PORTAL_XDG_SESSION_DETAILS_H_ +#define MODULES_PORTAL_XDG_SESSION_DETAILS_H_ + +#include + +#include + +namespace webrtc { +namespace xdg_portal { + +// Details of the session associated with XDG desktop portal session. Portal API +// calls can be invoked by utilizing the information here. +struct SessionDetails { + GDBusProxy* proxy = nullptr; + GCancellable* cancellable = nullptr; + std::string session_handle; + uint32_t pipewire_stream_node_id = 0; +}; + +} // namespace xdg_portal +} // namespace webrtc + +#endif // MODULES_PORTAL_XDG_SESSION_DETAILS_H_ diff --git a/modules/remote_bitrate_estimator/aimd_rate_control.cc b/modules/remote_bitrate_estimator/aimd_rate_control.cc index b625a745df..6c3638b59f 100644 --- a/modules/remote_bitrate_estimator/aimd_rate_control.cc +++ b/modules/remote_bitrate_estimator/aimd_rate_control.cc @@ -39,10 +39,6 @@ bool IsEnabled(const FieldTrialsView& field_trials, absl::string_view key) { return absl::StartsWith(field_trials.Lookup(key), "Enabled"); } -bool IsNotDisabled(const FieldTrialsView& field_trials, absl::string_view key) { - return !absl::StartsWith(field_trials.Lookup(key), "Disabled"); -} - double ReadBackoffFactor(const FieldTrialsView& key_value_config) { std::string experiment_string = key_value_config.Lookup(kBweBackOffFactorExperiment); @@ -86,13 +82,9 @@ AimdRateControl::AimdRateControl(const FieldTrialsView* key_value_config, in_alr_(false), rtt_(kDefaultRtt), send_side_(send_side), - in_experiment_(!AdaptiveThresholdExperimentIsDisabled(*key_value_config)), no_bitrate_increase_in_alr_( IsEnabled(*key_value_config, "WebRTC-DontIncreaseDelayBasedBweInAlr")), - estimate_bounded_backoff_( - IsNotDisabled(*key_value_config, - "WebRTC-Bwe-EstimateBoundedBackoff")), initial_backoff_interval_("initial_backoff_interval"), link_capacity_fix_("link_capacity_fix") { ParseFieldTrial( @@ -233,7 +225,7 @@ double AimdRateControl::GetNearMaxIncreaseRateBpsPerSecond() const { // Approximate the over-use estimator delay to 100 ms. TimeDelta response_time = rtt_ + TimeDelta::Millis(100); - if (in_experiment_) + response_time = response_time * 2; double increase_rate_bps_per_second = (avg_packet_size / response_time).bps(); @@ -382,8 +374,7 @@ DataRate AimdRateControl::ClampBitrate(DataRate new_bitrate) const { } new_bitrate = std::min(upper_bound, new_bitrate); } - if (estimate_bounded_backoff_ && network_estimate_ && - network_estimate_->link_capacity_lower.IsFinite() && + if (network_estimate_ && network_estimate_->link_capacity_lower.IsFinite() && new_bitrate < current_bitrate_) { new_bitrate = std::min( current_bitrate_, diff --git a/modules/remote_bitrate_estimator/aimd_rate_control.h b/modules/remote_bitrate_estimator/aimd_rate_control.h index 6c770cdc45..8321fd5239 100644 --- a/modules/remote_bitrate_estimator/aimd_rate_control.h +++ b/modules/remote_bitrate_estimator/aimd_rate_control.h @@ -100,13 +100,9 @@ class AimdRateControl { bool in_alr_; TimeDelta rtt_; const bool send_side_; - const bool in_experiment_; // Allow the delay based estimate to only increase as long as application // limited region (alr) is not detected. const bool no_bitrate_increase_in_alr_; - // Use estimated link capacity lower bound if it is higher than the - // acknowledged rate when backing off due to overuse. - const bool estimate_bounded_backoff_; // If false, uses estimated link capacity upper bound * // `estimate_bounded_increase_ratio_` as upper limit for the estimate. FieldTrialFlag disable_estimate_bounded_increase_{"Disabled"}; diff --git a/modules/remote_bitrate_estimator/overuse_detector.cc b/modules/remote_bitrate_estimator/overuse_detector.cc index 672822bbcd..bd2d756876 100644 --- a/modules/remote_bitrate_estimator/overuse_detector.cc +++ b/modules/remote_bitrate_estimator/overuse_detector.cc @@ -22,57 +22,22 @@ namespace webrtc { -const char kAdaptiveThresholdExperiment[] = "WebRTC-AdaptiveBweThreshold"; -const char kEnabledPrefix[] = "Enabled"; -const size_t kEnabledPrefixLength = sizeof(kEnabledPrefix) - 1; -const char kDisabledPrefix[] = "Disabled"; -const size_t kDisabledPrefixLength = sizeof(kDisabledPrefix) - 1; - const double kMaxAdaptOffsetMs = 15.0; const double kOverUsingTimeThreshold = 10; const int kMaxNumDeltas = 60; -bool AdaptiveThresholdExperimentIsDisabled( - const FieldTrialsView& key_value_config) { - std::string experiment_string = - key_value_config.Lookup(kAdaptiveThresholdExperiment); - const size_t kMinExperimentLength = kDisabledPrefixLength; - if (experiment_string.length() < kMinExperimentLength) - return false; - return experiment_string.substr(0, kDisabledPrefixLength) == kDisabledPrefix; -} - -// Gets thresholds from the experiment name following the format -// "WebRTC-AdaptiveBweThreshold/Enabled-0.5,0.002/". -bool ReadExperimentConstants(const FieldTrialsView& key_value_config, - double* k_up, - double* k_down) { - std::string experiment_string = - key_value_config.Lookup(kAdaptiveThresholdExperiment); - const size_t kMinExperimentLength = kEnabledPrefixLength + 3; - if (experiment_string.length() < kMinExperimentLength || - experiment_string.substr(0, kEnabledPrefixLength) != kEnabledPrefix) - return false; - return sscanf(experiment_string.substr(kEnabledPrefixLength + 1).c_str(), - "%lf,%lf", k_up, k_down) == 2; -} - OveruseDetector::OveruseDetector(const FieldTrialsView* key_value_config) // Experiment is on by default, but can be disabled with finch by setting // the field trial string to "WebRTC-AdaptiveBweThreshold/Disabled/". - : in_experiment_(!AdaptiveThresholdExperimentIsDisabled(*key_value_config)), - k_up_(0.0087), + : k_up_(0.0087), k_down_(0.039), - overusing_time_threshold_(100), + overusing_time_threshold_(kOverUsingTimeThreshold), threshold_(12.5), last_update_ms_(-1), prev_offset_(0.0), time_over_using_(-1), overuse_counter_(0), - hypothesis_(BandwidthUsage::kBwNormal) { - if (!AdaptiveThresholdExperimentIsDisabled(*key_value_config)) - InitializeExperiment(*key_value_config); -} + hypothesis_(BandwidthUsage::kBwNormal) {} OveruseDetector::~OveruseDetector() {} @@ -125,9 +90,6 @@ BandwidthUsage OveruseDetector::Detect(double offset, } void OveruseDetector::UpdateThreshold(double modified_offset, int64_t now_ms) { - if (!in_experiment_) - return; - if (last_update_ms_ == -1) last_update_ms_ = now_ms; @@ -146,15 +108,4 @@ void OveruseDetector::UpdateThreshold(double modified_offset, int64_t now_ms) { last_update_ms_ = now_ms; } -void OveruseDetector::InitializeExperiment( - const FieldTrialsView& key_value_config) { - RTC_DCHECK(in_experiment_); - double k_up = 0.0; - double k_down = 0.0; - overusing_time_threshold_ = kOverUsingTimeThreshold; - if (ReadExperimentConstants(key_value_config, &k_up, &k_down)) { - k_up_ = k_up; - k_down_ = k_down; - } -} } // namespace webrtc diff --git a/modules/remote_bitrate_estimator/overuse_detector.h b/modules/remote_bitrate_estimator/overuse_detector.h index dfaea9187a..07ae8734c4 100644 --- a/modules/remote_bitrate_estimator/overuse_detector.h +++ b/modules/remote_bitrate_estimator/overuse_detector.h @@ -17,9 +17,6 @@ namespace webrtc { -bool AdaptiveThresholdExperimentIsDisabled( - const FieldTrialsView& key_value_config); - class OveruseDetector { public: explicit OveruseDetector(const FieldTrialsView* key_value_config); @@ -46,10 +43,9 @@ class OveruseDetector { void UpdateThreshold(double modified_offset, int64_t now_ms); void InitializeExperiment(const FieldTrialsView& key_value_config); - bool in_experiment_; - double k_up_; - double k_down_; - double overusing_time_threshold_; + const double k_up_; + const double k_down_; + const double overusing_time_threshold_; double threshold_; int64_t last_update_ms_; double prev_offset_; diff --git a/modules/remote_bitrate_estimator/overuse_detector_unittest.cc b/modules/remote_bitrate_estimator/overuse_detector_unittest.cc index 8420af96a1..e91d4f0d22 100644 --- a/modules/remote_bitrate_estimator/overuse_detector_unittest.cc +++ b/modules/remote_bitrate_estimator/overuse_detector_unittest.cc @@ -21,7 +21,6 @@ #include "modules/remote_bitrate_estimator/inter_arrival.h" #include "modules/remote_bitrate_estimator/overuse_estimator.h" #include "rtc_base/random.h" -#include "test/field_trial.h" #include "test/gtest.h" namespace webrtc { @@ -218,69 +217,6 @@ TEST_F(OveruseDetectorTest, SimpleOveruse100kbit10fps) { EXPECT_EQ(7, frames_until_overuse); } -TEST_F(OveruseDetectorTest, DISABLED_OveruseWithHighVariance100Kbit10fps) { - uint32_t frame_duration_ms = 100; - uint32_t drift_per_frame_ms = 10; - uint32_t rtp_timestamp = frame_duration_ms * 90; - size_t packet_size = 1200; - int offset = 10; - - // Run 1000 samples to reach steady state. - for (int i = 0; i < 1000; ++i) { - UpdateDetector(rtp_timestamp, now_ms_, packet_size); - rtp_timestamp += frame_duration_ms * 90; - if (i % 2) { - offset = random_.Rand(0, 49); - now_ms_ += frame_duration_ms - offset; - } else { - now_ms_ += frame_duration_ms + offset; - } - EXPECT_EQ(BandwidthUsage::kBwNormal, overuse_detector_->State()); - } - // Simulate a higher send pace, that is too high. - // Above noise generate a standard deviation of approximately 28 ms. - // Total build up of 150 ms. - for (int j = 0; j < 15; ++j) { - UpdateDetector(rtp_timestamp, now_ms_, packet_size); - now_ms_ += frame_duration_ms + drift_per_frame_ms; - rtp_timestamp += frame_duration_ms * 90; - EXPECT_EQ(BandwidthUsage::kBwNormal, overuse_detector_->State()); - } - UpdateDetector(rtp_timestamp, now_ms_, packet_size); - EXPECT_EQ(BandwidthUsage::kBwOverusing, overuse_detector_->State()); -} - -TEST_F(OveruseDetectorTest, DISABLED_OveruseWithLowVariance100Kbit10fps) { - uint32_t frame_duration_ms = 100; - uint32_t drift_per_frame_ms = 1; - uint32_t rtp_timestamp = frame_duration_ms * 90; - size_t packet_size = 1200; - int offset = 10; - - // Run 1000 samples to reach steady state. - for (int i = 0; i < 1000; ++i) { - UpdateDetector(rtp_timestamp, now_ms_, packet_size); - rtp_timestamp += frame_duration_ms * 90; - if (i % 2) { - offset = random_.Rand(0, 1); - now_ms_ += frame_duration_ms - offset; - } else { - now_ms_ += frame_duration_ms + offset; - } - EXPECT_EQ(BandwidthUsage::kBwNormal, overuse_detector_->State()); - } - // Simulate a higher send pace, that is too high. - // Total build up of 6 ms. - for (int j = 0; j < 6; ++j) { - UpdateDetector(rtp_timestamp, now_ms_, packet_size); - now_ms_ += frame_duration_ms + drift_per_frame_ms; - rtp_timestamp += frame_duration_ms * 90; - EXPECT_EQ(BandwidthUsage::kBwNormal, overuse_detector_->State()); - } - UpdateDetector(rtp_timestamp, now_ms_, packet_size); - EXPECT_EQ(BandwidthUsage::kBwOverusing, overuse_detector_->State()); -} - TEST_F(OveruseDetectorTest, OveruseWithLowVariance2000Kbit30fps) { uint32_t frame_duration_ms = 33; uint32_t drift_per_frame_ms = 1; @@ -322,13 +258,7 @@ TEST_F(OveruseDetectorTest, OveruseWithLowVariance2000Kbit30fps) { EXPECT_EQ(BandwidthUsage::kBwOverusing, overuse_detector_->State()); } -#if defined(WEBRTC_ANDROID) -#define MAYBE_LowGaussianVariance30Kbit3fps \ - DISABLED_LowGaussianVariance30Kbit3fps -#else -#define MAYBE_LowGaussianVariance30Kbit3fps LowGaussianVariance30Kbit3fps -#endif -TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance30Kbit3fps) { +TEST_F(OveruseDetectorTest, LowGaussianVariance30Kbit3fps) { size_t packet_size = 1200; int packets_per_frame = 1; int frame_duration_ms = 333; @@ -388,13 +318,7 @@ TEST_F(OveruseDetectorTest, HighGaussianVarianceFastDrift30Kbit3fps) { EXPECT_EQ(4, frames_until_overuse); } -#if defined(WEBRTC_ANDROID) -#define MAYBE_LowGaussianVariance100Kbit5fps \ - DISABLED_LowGaussianVariance100Kbit5fps -#else -#define MAYBE_LowGaussianVariance100Kbit5fps LowGaussianVariance100Kbit5fps -#endif -TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance100Kbit5fps) { +TEST_F(OveruseDetectorTest, LowGaussianVariance100Kbit5fps) { size_t packet_size = 1200; int packets_per_frame = 2; int frame_duration_ms = 200; @@ -409,13 +333,7 @@ TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance100Kbit5fps) { EXPECT_EQ(20, frames_until_overuse); } -#if defined(WEBRTC_ANDROID) -#define MAYBE_HighGaussianVariance100Kbit5fps \ - DISABLED_HighGaussianVariance100Kbit5fps -#else -#define MAYBE_HighGaussianVariance100Kbit5fps HighGaussianVariance100Kbit5fps -#endif -TEST_F(OveruseDetectorTest, MAYBE_HighGaussianVariance100Kbit5fps) { +TEST_F(OveruseDetectorTest, HighGaussianVariance100Kbit5fps) { size_t packet_size = 1200; int packets_per_frame = 2; int frame_duration_ms = 200; @@ -430,13 +348,7 @@ TEST_F(OveruseDetectorTest, MAYBE_HighGaussianVariance100Kbit5fps) { EXPECT_EQ(44, frames_until_overuse); } -#if defined(WEBRTC_ANDROID) -#define MAYBE_LowGaussianVariance100Kbit10fps \ - DISABLED_LowGaussianVariance100Kbit10fps -#else -#define MAYBE_LowGaussianVariance100Kbit10fps LowGaussianVariance100Kbit10fps -#endif -TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance100Kbit10fps) { +TEST_F(OveruseDetectorTest, LowGaussianVariance100Kbit10fps) { size_t packet_size = 1200; int packets_per_frame = 1; int frame_duration_ms = 100; @@ -451,13 +363,7 @@ TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance100Kbit10fps) { EXPECT_EQ(20, frames_until_overuse); } -#if defined(WEBRTC_ANDROID) -#define MAYBE_HighGaussianVariance100Kbit10fps \ - DISABLED_HighGaussianVariance100Kbit10fps -#else -#define MAYBE_HighGaussianVariance100Kbit10fps HighGaussianVariance100Kbit10fps -#endif -TEST_F(OveruseDetectorTest, MAYBE_HighGaussianVariance100Kbit10fps) { +TEST_F(OveruseDetectorTest, HighGaussianVariance100Kbit10fps) { size_t packet_size = 1200; int packets_per_frame = 1; int frame_duration_ms = 100; @@ -472,13 +378,7 @@ TEST_F(OveruseDetectorTest, MAYBE_HighGaussianVariance100Kbit10fps) { EXPECT_EQ(44, frames_until_overuse); } -#if defined(WEBRTC_ANDROID) -#define MAYBE_LowGaussianVariance300Kbit30fps \ - DISABLED_LowGaussianVariance300Kbit30fps -#else -#define MAYBE_LowGaussianVariance300Kbit30fps LowGaussianVariance300Kbit30fps -#endif -TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance300Kbit30fps) { +TEST_F(OveruseDetectorTest, LowGaussianVariance300Kbit30fps) { size_t packet_size = 1200; int packets_per_frame = 1; int frame_duration_ms = 33; @@ -538,13 +438,7 @@ TEST_F(OveruseDetectorTest, HighGaussianVarianceFastDrift300Kbit30fps) { EXPECT_EQ(10, frames_until_overuse); } -#if defined(WEBRTC_ANDROID) -#define MAYBE_LowGaussianVariance1000Kbit30fps \ - DISABLED_LowGaussianVariance1000Kbit30fps -#else -#define MAYBE_LowGaussianVariance1000Kbit30fps LowGaussianVariance1000Kbit30fps -#endif -TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance1000Kbit30fps) { +TEST_F(OveruseDetectorTest, LowGaussianVariance1000Kbit30fps) { size_t packet_size = 1200; int packets_per_frame = 3; int frame_duration_ms = 33; @@ -604,13 +498,7 @@ TEST_F(OveruseDetectorTest, HighGaussianVarianceFastDrift1000Kbit30fps) { EXPECT_EQ(10, frames_until_overuse); } -#if defined(WEBRTC_ANDROID) -#define MAYBE_LowGaussianVariance2000Kbit30fps \ - DISABLED_LowGaussianVariance2000Kbit30fps -#else -#define MAYBE_LowGaussianVariance2000Kbit30fps LowGaussianVariance2000Kbit30fps -#endif -TEST_F(OveruseDetectorTest, MAYBE_LowGaussianVariance2000Kbit30fps) { +TEST_F(OveruseDetectorTest, LowGaussianVariance2000Kbit30fps) { size_t packet_size = 1200; int packets_per_frame = 6; int frame_duration_ms = 33; @@ -670,22 +558,7 @@ TEST_F(OveruseDetectorTest, HighGaussianVarianceFastDrift2000Kbit30fps) { EXPECT_EQ(10, frames_until_overuse); } -class OveruseDetectorExperimentTest : public OveruseDetectorTest { - public: - OveruseDetectorExperimentTest() - : override_field_trials_( - "WebRTC-AdaptiveBweThreshold/Enabled-0.01,0.00018/") {} - - protected: - void SetUp() override { - overuse_detector_.reset(new OveruseDetector(&field_trials_)); - } - - test::ScopedFieldTrials override_field_trials_; - const FieldTrialBasedConfig field_trials_; -}; - -TEST_F(OveruseDetectorExperimentTest, ThresholdAdapts) { +TEST_F(OveruseDetectorTest, ThresholdAdapts) { const double kOffset = 0.21; double kTsDelta = 3000.0; int64_t now_ms = 0; @@ -756,7 +629,7 @@ TEST_F(OveruseDetectorExperimentTest, ThresholdAdapts) { EXPECT_TRUE(overuse_detected); } -TEST_F(OveruseDetectorExperimentTest, DoesntAdaptToSpikes) { +TEST_F(OveruseDetectorTest, DoesntAdaptToSpikes) { const double kOffset = 1.0; const double kLargeOffset = 20.0; double kTsDelta = 3000.0; diff --git a/modules/remote_bitrate_estimator/packet_arrival_map.cc b/modules/remote_bitrate_estimator/packet_arrival_map.cc index 16d400e227..71888dfaf2 100644 --- a/modules/remote_bitrate_estimator/packet_arrival_map.cc +++ b/modules/remote_bitrate_estimator/packet_arrival_map.cc @@ -151,7 +151,6 @@ void PacketArrivalTimeMap::EraseTo(int64_t sequence_number) { } // Remove some. begin_sequence_number_ = sequence_number; - RTC_DCHECK(has_received(begin_sequence_number_)); AdjustToSize(end_sequence_number_ - begin_sequence_number_); } diff --git a/modules/remote_bitrate_estimator/packet_arrival_map.h b/modules/remote_bitrate_estimator/packet_arrival_map.h index d489a0c53d..e7086d0de4 100644 --- a/modules/remote_bitrate_estimator/packet_arrival_map.h +++ b/modules/remote_bitrate_estimator/packet_arrival_map.h @@ -31,6 +31,10 @@ namespace webrtc { // packets out-of-order. class PacketArrivalTimeMap { public: + struct PacketArrivalTime { + Timestamp arrival_time; + int64_t sequence_number; + }; // Impossible to request feedback older than what can be represented by 15 // bits. static constexpr int kMaxNumberOfPackets = (1 << 15); @@ -63,6 +67,21 @@ class PacketArrivalTimeMap { return arrival_times_[Index(sequence_number)]; } + // Returns timestamp and sequence number of the received packet with sequence + // number equal or larger than `sequence_number`. `sequence_number` must be in + // range [begin_sequence_number, end_sequence_number). + PacketArrivalTime FindNextAtOrAfter(int64_t sequence_number) const { + RTC_DCHECK_GE(sequence_number, begin_sequence_number()); + RTC_DCHECK_LT(sequence_number, end_sequence_number()); + while (true) { + Timestamp t = arrival_times_[Index(sequence_number)]; + if (t >= Timestamp::Zero()) { + return {.arrival_time = t, .sequence_number = sequence_number}; + } + ++sequence_number; + } + } + // Clamps `sequence_number` between [begin_sequence_number, // end_sequence_number]. int64_t clamp(int64_t sequence_number) const { diff --git a/modules/remote_bitrate_estimator/packet_arrival_map_test.cc b/modules/remote_bitrate_estimator/packet_arrival_map_test.cc index 00c927ffd7..f11f5a147b 100644 --- a/modules/remote_bitrate_estimator/packet_arrival_map_test.cc +++ b/modules/remote_bitrate_estimator/packet_arrival_map_test.cc @@ -65,6 +65,23 @@ TEST(PacketArrivalMapTest, InsertsWithGaps) { EXPECT_EQ(map.clamp(100), 46); } +TEST(PacketArrivalMapTest, FindNextAtOrAfterWithGaps) { + PacketArrivalTimeMap map; + + map.AddPacket(42, Timestamp::Zero()); + map.AddPacket(45, Timestamp::Millis(11)); + EXPECT_EQ(map.begin_sequence_number(), 42); + EXPECT_EQ(map.end_sequence_number(), 46); + + PacketArrivalTimeMap::PacketArrivalTime packet = map.FindNextAtOrAfter(42); + EXPECT_EQ(packet.arrival_time, Timestamp::Zero()); + EXPECT_EQ(packet.sequence_number, 42); + + packet = map.FindNextAtOrAfter(43); + EXPECT_EQ(packet.arrival_time, Timestamp::Millis(11)); + EXPECT_EQ(packet.sequence_number, 45); +} + TEST(PacketArrivalMapTest, InsertsWithinBuffer) { PacketArrivalTimeMap map; @@ -247,5 +264,28 @@ TEST(PacketArrivalMapTest, EraseAllRemembersBeginningSeqNbr) { EXPECT_FALSE(map.has_received(51)); } +TEST(PacketArrivalMapTest, EraseToMissingSequenceNumber) { + PacketArrivalTimeMap map; + + map.AddPacket(37, Timestamp::Millis(10)); + map.AddPacket(39, Timestamp::Millis(11)); + map.AddPacket(40, Timestamp::Millis(12)); + map.AddPacket(41, Timestamp::Millis(13)); + + map.EraseTo(38); + + map.AddPacket(42, Timestamp::Millis(40)); + + EXPECT_EQ(map.begin_sequence_number(), 38); + EXPECT_EQ(map.end_sequence_number(), 43); + + EXPECT_FALSE(map.has_received(37)); + EXPECT_FALSE(map.has_received(38)); + EXPECT_TRUE(map.has_received(39)); + EXPECT_TRUE(map.has_received(40)); + EXPECT_TRUE(map.has_received(41)); + EXPECT_TRUE(map.has_received(42)); +} + } // namespace } // namespace webrtc diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc b/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc index 6f442e5e2c..8f15912a49 100644 --- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc +++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc @@ -63,7 +63,7 @@ RemoteBitrateEstimatorSingleStream::RemoteBitrateEstimatorSingleStream( : clock_(clock), incoming_bitrate_(kBitrateWindowMs, 8000), last_valid_incoming_bitrate_(0), - remote_rate_(new AimdRateControl(&field_trials_)), + remote_rate_(&field_trials_), observer_(observer), last_process_time_(-1), process_interval_ms_(kProcessIntervalMs), @@ -144,7 +144,7 @@ void RemoteBitrateEstimatorSingleStream::IncomingPacket( incoming_bitrate_.Rate(now_ms); if (incoming_bitrate_bps && (prior_state != BandwidthUsage::kBwOverusing || - GetRemoteRate()->TimeToReduceFurther( + remote_rate_.TimeToReduceFurther( Timestamp::Millis(now_ms), DataRate::BitsPerSec(*incoming_bitrate_bps)))) { // The first overuse should immediately trigger a new estimate. @@ -193,14 +193,13 @@ void RemoteBitrateEstimatorSingleStream::UpdateEstimate(int64_t now_ms) { if (overuse_detectors_.empty()) { return; } - AimdRateControl* remote_rate = GetRemoteRate(); const RateControlInput input( bw_state, OptionalRateFromOptionalBps(incoming_bitrate_.Rate(now_ms))); uint32_t target_bitrate = - remote_rate->Update(&input, Timestamp::Millis(now_ms)).bps(); - if (remote_rate->ValidEstimate()) { - process_interval_ms_ = remote_rate->GetFeedbackInterval().ms(); + remote_rate_.Update(&input, Timestamp::Millis(now_ms)).bps(); + if (remote_rate_.ValidEstimate()) { + process_interval_ms_ = remote_rate_.GetFeedbackInterval().ms(); RTC_DCHECK_GT(process_interval_ms_, 0); std::vector ssrcs; GetSsrcs(&ssrcs); @@ -212,7 +211,7 @@ void RemoteBitrateEstimatorSingleStream::UpdateEstimate(int64_t now_ms) { void RemoteBitrateEstimatorSingleStream::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) { MutexLock lock(&mutex_); - GetRemoteRate()->SetRtt(TimeDelta::Millis(avg_rtt_ms)); + remote_rate_.SetRtt(TimeDelta::Millis(avg_rtt_ms)); } void RemoteBitrateEstimatorSingleStream::RemoveStream(unsigned int ssrc) { @@ -226,10 +225,10 @@ void RemoteBitrateEstimatorSingleStream::RemoveStream(unsigned int ssrc) { DataRate RemoteBitrateEstimatorSingleStream::LatestEstimate() const { MutexLock lock(&mutex_); - if (!remote_rate_->ValidEstimate() || overuse_detectors_.empty()) { + if (!remote_rate_.ValidEstimate() || overuse_detectors_.empty()) { return DataRate::Zero(); } - return remote_rate_->LatestEstimate(); + return remote_rate_.LatestEstimate(); } void RemoteBitrateEstimatorSingleStream::GetSsrcs( @@ -243,10 +242,4 @@ void RemoteBitrateEstimatorSingleStream::GetSsrcs( } } -AimdRateControl* RemoteBitrateEstimatorSingleStream::GetRemoteRate() { - if (!remote_rate_) - remote_rate_.reset(new AimdRateControl(&field_trials_)); - return remote_rate_.get(); -} - } // namespace webrtc diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h b/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h index d62f922e02..699f259d48 100644 --- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h +++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h @@ -15,7 +15,6 @@ #include #include -#include #include #include "api/transport/field_trial_based_config.h" @@ -65,16 +64,12 @@ class RemoteBitrateEstimatorSingleStream : public RemoteBitrateEstimator { void GetSsrcs(std::vector* ssrcs) const RTC_SHARED_LOCKS_REQUIRED(mutex_); - // Returns `remote_rate_` if the pointed to object exists, - // otherwise creates it. - AimdRateControl* GetRemoteRate() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - Clock* const clock_; const FieldTrialBasedConfig field_trials_; SsrcOveruseEstimatorMap overuse_detectors_ RTC_GUARDED_BY(mutex_); RateStatistics incoming_bitrate_ RTC_GUARDED_BY(mutex_); uint32_t last_valid_incoming_bitrate_ RTC_GUARDED_BY(mutex_); - std::unique_ptr remote_rate_ RTC_GUARDED_BY(mutex_); + AimdRateControl remote_rate_ RTC_GUARDED_BY(mutex_); RemoteBitrateObserver* const observer_ RTC_GUARDED_BY(mutex_); mutable Mutex mutex_; int64_t last_process_time_; diff --git a/modules/remote_bitrate_estimator/remote_estimator_proxy.cc b/modules/remote_bitrate_estimator/remote_estimator_proxy.cc index b83720d1a8..598279e0af 100644 --- a/modules/remote_bitrate_estimator/remote_estimator_proxy.cc +++ b/modules/remote_bitrate_estimator/remote_estimator_proxy.cc @@ -27,8 +27,11 @@ namespace webrtc { namespace { // The maximum allowed value for a timestamp in milliseconds. This is lower // than the numerical limit since we often convert to microseconds. -static constexpr int64_t kMaxTimeMs = - std::numeric_limits::max() / 1000; +constexpr int64_t kMaxTimeMs = std::numeric_limits::max() / 1000; +constexpr TimeDelta kBackWindow = TimeDelta::Millis(500); +constexpr TimeDelta kMinInterval = TimeDelta::Millis(50); +constexpr TimeDelta kMaxInterval = TimeDelta::Millis(250); +constexpr TimeDelta kDefaultInterval = TimeDelta::Millis(100); TimeDelta GetAbsoluteSendTimeDelta(uint32_t new_sendtime, uint32_t previous_sendtime) { @@ -48,22 +51,20 @@ TimeDelta GetAbsoluteSendTimeDelta(uint32_t new_sendtime, RemoteEstimatorProxy::RemoteEstimatorProxy( TransportFeedbackSender feedback_sender, - const FieldTrialsView* key_value_config, NetworkStateEstimator* network_state_estimator) : feedback_sender_(std::move(feedback_sender)), - send_config_(key_value_config), last_process_time_(Timestamp::MinusInfinity()), network_state_estimator_(network_state_estimator), media_ssrc_(0), feedback_packet_count_(0), packet_overhead_(DataSize::Zero()), - send_interval_(send_config_.default_interval.Get()), + send_interval_(kDefaultInterval), send_periodic_feedback_(true), previous_abs_send_time_(0), abs_send_timestamp_(Timestamp::Zero()) { RTC_LOG(LS_INFO) - << "Maximum interval between transport feedback RTCP messages (ms): " - << send_config_.max_interval->ms(); + << "Maximum interval between transport feedback RTCP messages: " + << kMaxInterval; } RemoteEstimatorProxy::~RemoteEstimatorProxy() {} @@ -72,10 +73,10 @@ void RemoteEstimatorProxy::MaybeCullOldPackets(int64_t sequence_number, Timestamp arrival_time) { if (periodic_window_start_seq_ >= packet_arrival_times_.end_sequence_number() && - arrival_time - Timestamp::Zero() >= send_config_.back_window.Get()) { + arrival_time - Timestamp::Zero() >= kBackWindow) { // Start new feedback packet, cull old packets. - packet_arrival_times_.RemoveOldPackets( - sequence_number, arrival_time - send_config_.back_window.Get()); + packet_arrival_times_.RemoveOldPackets(sequence_number, + arrival_time - kBackWindow); } } @@ -172,19 +173,17 @@ void RemoteEstimatorProxy::OnBitrateChanged(int bitrate_bps) { // TwccReport size at 250ms interval is 36 byte. // AverageTwccReport = (TwccReport(50ms) + TwccReport(250ms)) / 2 constexpr DataSize kTwccReportSize = DataSize::Bytes(20 + 8 + 10 + 30); - const DataRate kMinTwccRate = - kTwccReportSize / send_config_.max_interval.Get(); + constexpr DataRate kMinTwccRate = kTwccReportSize / kMaxInterval; // Let TWCC reports occupy 5% of total bandwidth. - DataRate twcc_bitrate = - DataRate::BitsPerSec(send_config_.bandwidth_fraction * bitrate_bps); + DataRate twcc_bitrate = DataRate::BitsPerSec(0.05 * bitrate_bps); // Check upper send_interval bound by checking bitrate to avoid overflow when // dividing by small bitrate, in particular avoid dividing by zero bitrate. - TimeDelta send_interval = twcc_bitrate <= kMinTwccRate - ? send_config_.max_interval.Get() - : std::max(kTwccReportSize / twcc_bitrate, - send_config_.min_interval.Get()); + TimeDelta send_interval = + twcc_bitrate <= kMinTwccRate + ? kMaxInterval + : std::max(kTwccReportSize / twcc_bitrate, kMinInterval); MutexLock lock(&lock_); send_interval_ = send_interval; @@ -291,10 +290,11 @@ RemoteEstimatorProxy::MaybeBuildFeedbackPacket( int64_t next_sequence_number = begin_sequence_number_inclusive; for (int64_t seq = start_seq; seq < end_seq; ++seq) { - Timestamp arrival_time = packet_arrival_times_.get(seq); - if (arrival_time < Timestamp::Zero()) { - // Packet not received. - continue; + PacketArrivalTimeMap::PacketArrivalTime packet = + packet_arrival_times_.FindNextAtOrAfter(seq); + seq = packet.sequence_number; + if (seq >= end_seq) { + break; } if (feedback_packet == nullptr) { @@ -306,12 +306,12 @@ RemoteEstimatorProxy::MaybeBuildFeedbackPacket( // shall be the time of the first received packet in the feedback. feedback_packet->SetBase( static_cast(begin_sequence_number_inclusive & 0xFFFF), - arrival_time); + packet.arrival_time); feedback_packet->SetFeedbackSequenceNumber(feedback_packet_count_++); } if (!feedback_packet->AddReceivedPacket(static_cast(seq & 0xFFFF), - arrival_time)) { + packet.arrival_time)) { // Could not add timestamp, feedback packet might be full. Return and // try again with a fresh packet. break; diff --git a/modules/remote_bitrate_estimator/remote_estimator_proxy.h b/modules/remote_bitrate_estimator/remote_estimator_proxy.h index 509ad0ba02..7b0a8a6c49 100644 --- a/modules/remote_bitrate_estimator/remote_estimator_proxy.h +++ b/modules/remote_bitrate_estimator/remote_estimator_proxy.h @@ -26,7 +26,6 @@ #include "modules/remote_bitrate_estimator/packet_arrival_map.h" #include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" -#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/numerics/sequence_number_util.h" #include "rtc_base/synchronization/mutex.h" @@ -42,7 +41,6 @@ class RemoteEstimatorProxy { using TransportFeedbackSender = std::function> packets)>; RemoteEstimatorProxy(TransportFeedbackSender feedback_sender, - const FieldTrialsView* key_value_config, NetworkStateEstimator* network_state_estimator); ~RemoteEstimatorProxy(); @@ -69,22 +67,6 @@ class RemoteEstimatorProxy { void SetTransportOverhead(DataSize overhead_per_packet); private: - struct TransportWideFeedbackConfig { - FieldTrialParameter back_window{"wind", TimeDelta::Millis(500)}; - FieldTrialParameter min_interval{"min", TimeDelta::Millis(50)}; - FieldTrialParameter max_interval{"max", TimeDelta::Millis(250)}; - FieldTrialParameter default_interval{"def", - TimeDelta::Millis(100)}; - FieldTrialParameter bandwidth_fraction{"frac", 0.05}; - explicit TransportWideFeedbackConfig( - const FieldTrialsView* key_value_config) { - ParseFieldTrial({&back_window, &min_interval, &max_interval, - &default_interval, &bandwidth_fraction}, - key_value_config->Lookup( - "WebRTC-Bwe-TransportWideFeedbackIntervals")); - } - }; - void MaybeCullOldPackets(int64_t sequence_number, Timestamp arrival_time) RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_); void SendPeriodicFeedbacks() RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_); @@ -111,7 +93,6 @@ class RemoteEstimatorProxy { bool is_periodic_update) RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_); const TransportFeedbackSender feedback_sender_; - const TransportWideFeedbackConfig send_config_; Timestamp last_process_time_; Mutex lock_; diff --git a/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc b/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc index 10bc1e80a0..16455b44a4 100644 --- a/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc +++ b/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc @@ -13,7 +13,6 @@ #include #include -#include "api/transport/field_trial_based_config.h" #include "api/transport/network_types.h" #include "api/transport/test/mock_network_control.h" #include "api/units/data_size.h" @@ -79,9 +78,7 @@ class RemoteEstimatorProxyTest : public ::testing::Test { public: RemoteEstimatorProxyTest() : clock_(0), - proxy_(feedback_sender_.AsStdFunction(), - &field_trial_config_, - &network_state_estimator_) {} + proxy_(feedback_sender_.AsStdFunction(), &network_state_estimator_) {} protected: void IncomingPacket( @@ -100,7 +97,6 @@ class RemoteEstimatorProxyTest : public ::testing::Test { proxy_.Process(clock_.CurrentTime()); } - FieldTrialBasedConfig field_trial_config_; SimulatedClock clock_; MockFunction>)> feedback_sender_; diff --git a/modules/rtp_rtcp/BUILD.gn b/modules/rtp_rtcp/BUILD.gn index 736ca58fd1..abcdb619f4 100644 --- a/modules/rtp_rtcp/BUILD.gn +++ b/modules/rtp_rtcp/BUILD.gn @@ -425,6 +425,7 @@ rtc_library("rtp_video_header") { "../../api:rtp_headers", "../../api/transport/rtp:dependency_descriptor", "../../api/video:video_frame", + "../../api/video:video_frame_metadata", "../../api/video:video_frame_type", "../../api/video:video_rtp_headers", "../../modules/video_coding:codec_globals_headers", @@ -436,6 +437,17 @@ rtc_library("rtp_video_header") { ] } +rtc_source_set("rtp_video_header_unittest") { + testonly = true + sources = [ "source/rtp_video_header_unittest.cc" ] + deps = [ + ":rtp_video_header", + "../../api/video:video_frame_metadata", + "../../api/video:video_frame_type", + "../../test:test_support", + ] +} + rtc_library("fec_test_helper") { testonly = true sources = [ @@ -596,14 +608,18 @@ if (rtc_include_tests) { ] deps = [ ":fec_test_helper", + ":frame_transformer_factory_unittest", ":mock_rtp_rtcp", ":rtcp_transceiver", ":rtp_packetizer_av1_test_helper", ":rtp_rtcp", ":rtp_rtcp_format", ":rtp_rtcp_legacy", + ":rtp_video_header_unittest", "../../api:array_view", "../../api:create_time_controller", + "../../api:field_trials_registry", + "../../api:frame_transformer_factory", "../../api:libjingle_peerconnection_api", "../../api:mock_frame_encryptor", "../../api:rtp_headers", @@ -649,6 +665,7 @@ if (rtc_include_tests) { "../../rtc_base:threading", "../../rtc_base:timeutils", "../../system_wrappers", + "../../test:explicit_key_value_config", "../../test:field_trial", "../../test:mock_frame_transformer", "../../test:mock_transport", @@ -668,3 +685,19 @@ if (rtc_include_tests) { ] } } + +rtc_source_set("frame_transformer_factory_unittest") { + testonly = true + sources = [ "source/frame_transformer_factory_unittest.cc" ] + deps = [ + "../../api:frame_transformer_factory", + "../../api:transport_api", + "../../call:video_stream_api", + "../../modules/rtp_rtcp", + "../../rtc_base:rtc_event", + "../../test:mock_frame_transformer", + "../../test:test_support", + "../../video", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] +} diff --git a/modules/rtp_rtcp/include/rtp_packet_sender.h b/modules/rtp_rtcp/include/rtp_packet_sender.h index ae221b09d3..ebc65298a5 100644 --- a/modules/rtp_rtcp/include/rtp_packet_sender.h +++ b/modules/rtp_rtcp/include/rtp_packet_sender.h @@ -28,6 +28,11 @@ class RtpPacketSender { // packets and the current target send rate. virtual void EnqueuePackets( std::vector> packets) = 0; + + // Clear any pending packets with the given SSRC from the queue. + // TODO(crbug.com/1395081): Make pure virtual when downstream code has been + // updated. + virtual void RemovePacketsForSsrc(uint32_t ssrc) {} }; } // namespace webrtc diff --git a/modules/rtp_rtcp/include/rtp_rtcp.h b/modules/rtp_rtcp/include/rtp_rtcp.h index c71d7f0c3d..e56d5ef637 100644 --- a/modules/rtp_rtcp/include/rtp_rtcp.h +++ b/modules/rtp_rtcp/include/rtp_rtcp.h @@ -18,8 +18,7 @@ namespace webrtc { -// DEPRECATED. Do not use. -class RtpRtcp : public RtpRtcpInterface { +class ABSL_DEPRECATED("") RtpRtcp : public RtpRtcpInterface { public: // Instantiates a deprecated version of the RtpRtcp module. static std::unique_ptr ABSL_DEPRECATED("") @@ -30,12 +29,6 @@ class RtpRtcp : public RtpRtcpInterface { static std::unique_ptr DEPRECATED_Create( const Configuration& configuration); - // Requests new key frame. - // using PLI, https://tools.ietf.org/html/rfc4585#section-6.3.1.1 - void SendPictureLossIndication() { SendRTCP(kRtcpPli); } - // using FIR, https://tools.ietf.org/html/rfc5104#section-4.3.1.2 - void SendFullIntraRequest() { SendRTCP(kRtcpFir); } - // Process any pending tasks such as timeouts. virtual void Process() = 0; }; diff --git a/modules/rtp_rtcp/include/rtp_rtcp_defines.h b/modules/rtp_rtcp/include/rtp_rtcp_defines.h index 22dcf34046..c482e0c7dc 100644 --- a/modules/rtp_rtcp/include/rtp_rtcp_defines.h +++ b/modules/rtp_rtcp/include/rtp_rtcp_defines.h @@ -25,6 +25,7 @@ #include "api/audio_codecs/audio_format.h" #include "api/rtp_headers.h" #include "api/transport/network_types.h" +#include "api/units/time_delta.h" #include "modules/rtp_rtcp/source/rtcp_packet/remote_estimate.h" #include "system_wrappers/include/clock.h" @@ -230,9 +231,6 @@ enum class RtpPacketMediaType : size_t { }; struct RtpPacketSendInfo { - public: - RtpPacketSendInfo() = default; - uint16_t transport_sequence_number = 0; absl::optional media_ssrc; uint16_t rtp_sequence_number = 0; // Only valid if `media_ssrc` is set. @@ -451,7 +449,10 @@ struct RtpReceiveStats { // RTCReceivedRtpStreamStats dictionary, see // https://w3c.github.io/webrtc-stats/#receivedrtpstats-dict* int32_t packets_lost = 0; + // Interarrival jitter in samples. uint32_t jitter = 0; + // Interarrival jitter in time. + webrtc::TimeDelta interarrival_jitter = webrtc::TimeDelta::Zero(); // Timestamp and counters exposed in RTCInboundRtpStreamStats, see // https://w3c.github.io/webrtc-stats/#inboundrtpstats-dict* diff --git a/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h index 5c93e0bfed..75c30742df 100644 --- a/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h +++ b/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h @@ -67,7 +67,6 @@ class MockRtpRtcpInterface : public RtpRtcpInterface { MOCK_METHOD(RtpState, GetRtxState, (), (const, override)); MOCK_METHOD(uint32_t, SSRC, (), (const, override)); MOCK_METHOD(void, SetMid, (absl::string_view mid), (override)); - MOCK_METHOD(void, SetCsrcs, (const std::vector& csrcs), (override)); MOCK_METHOD(void, SetRtxSendStatus, (int modes), (override)); MOCK_METHOD(int, RtxSendStatus, (), (const, override)); MOCK_METHOD(absl::optional, RtxSsrc, (), (const, override)); diff --git a/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc b/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc index 3687669b2f..2e7e219f94 100644 --- a/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc +++ b/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc @@ -28,11 +28,6 @@ constexpr int kSendSideDelayWindowMs = 1000; constexpr int kBitrateStatisticsWindowMs = 1000; constexpr size_t kRtpSequenceNumberMapMaxEntries = 1 << 13; -bool IsDisabled(absl::string_view name, const FieldTrialsView* field_trials) { - FieldTrialBasedConfig default_trials; - auto& trials = field_trials ? *field_trials : default_trials; - return absl::StartsWith(trials.Lookup(name), "Disabled"); -} } // namespace DEPRECATED_RtpSenderEgress::NonPacedPacketSender::NonPacedPacketSender( @@ -72,8 +67,6 @@ DEPRECATED_RtpSenderEgress::DEPRECATED_RtpSenderEgress( flexfec_ssrc_(config.fec_generator ? config.fec_generator->FecSsrc() : absl::nullopt), populate_network2_timestamp_(config.populate_network2_timestamp), - send_side_bwe_with_overhead_( - !IsDisabled("WebRTC-SendSideBwe-WithOverhead", config.field_trials)), clock_(config.clock), packet_history_(packet_history), transport_(config.outgoing_transport), @@ -316,16 +309,11 @@ void DEPRECATED_RtpSenderEgress::AddPacketToTransportFeedback( const RtpPacketToSend& packet, const PacedPacketInfo& pacing_info) { if (transport_feedback_observer_) { - size_t packet_size = packet.payload_size() + packet.padding_size(); - if (send_side_bwe_with_overhead_) { - packet_size = packet.size(); - } - RtpPacketSendInfo packet_info; packet_info.media_ssrc = ssrc_; packet_info.transport_sequence_number = packet_id; packet_info.rtp_sequence_number = packet.SequenceNumber(); - packet_info.length = packet_size; + packet_info.length = packet.size(); packet_info.pacing_info = pacing_info; packet_info.packet_type = packet.packet_type(); transport_feedback_observer_->OnAddPacket(packet_info); diff --git a/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h b/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h index fd5dfddc02..609a90d4fe 100644 --- a/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h +++ b/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h @@ -43,6 +43,7 @@ class DEPRECATED_RtpSenderEgress { void EnqueuePackets( std::vector> packets) override; + void RemovePacketsForSsrc(uint32_t ssrc) override {} private: uint16_t transport_sequence_number_; @@ -110,7 +111,6 @@ class DEPRECATED_RtpSenderEgress { const absl::optional rtx_ssrc_; const absl::optional flexfec_ssrc_; const bool populate_network2_timestamp_; - const bool send_side_bwe_with_overhead_; Clock* const clock_; RtpPacketHistory* const packet_history_; Transport* const transport_; diff --git a/modules/rtp_rtcp/source/forward_error_correction.cc b/modules/rtp_rtcp/source/forward_error_correction.cc index 903d3e7d45..1462c2f481 100644 --- a/modules/rtp_rtcp/source/forward_error_correction.cc +++ b/modules/rtp_rtcp/source/forward_error_correction.cc @@ -225,10 +225,10 @@ void ForwardErrorCorrection::GenerateFecPayloads( size_t fec_packet_length = fec_header_size + media_payload_length; if (fec_packet_length > fec_packet->data.size()) { - // Recall that XORing with zero (which the FEC packets are prefilled - // with) is the identity operator, thus all prior XORs are - // still correct even though we expand the packet length here. + size_t old_size = fec_packet->data.size(); fec_packet->data.SetSize(fec_packet_length); + memset(fec_packet->data.MutableData() + old_size, 0, + fec_packet_length - old_size); } XorHeaders(*media_packet, fec_packet); XorPayloads(*media_packet, media_payload_length, fec_header_size, @@ -573,7 +573,13 @@ bool ForwardErrorCorrection::FinishPacketRecovery( "typical IP packet, and is thus dropped."; return false; } + size_t old_size = recovered_packet->pkt->data.size(); recovered_packet->pkt->data.SetSize(new_size); + data = recovered_packet->pkt->data.MutableData(); + if (new_size > old_size) { + memset(data + old_size, 0, new_size - old_size); + } + // Set the SN field. ByteWriter::WriteBigEndian(&data[2], recovered_packet->seq_num); // Set the SSRC field. @@ -613,7 +619,10 @@ void ForwardErrorCorrection::XorPayloads(const Packet& src, RTC_DCHECK_LE(kRtpHeaderSize + payload_length, src.data.size()); RTC_DCHECK_LE(dst_offset + payload_length, dst->data.capacity()); if (dst_offset + payload_length > dst->data.size()) { - dst->data.SetSize(dst_offset + payload_length); + size_t old_size = dst->data.size(); + size_t new_size = dst_offset + payload_length; + dst->data.SetSize(new_size); + memset(dst->data.MutableData() + old_size, 0, new_size - old_size); } uint8_t* dst_data = dst->data.MutableData(); const uint8_t* src_data = src.data.cdata(); diff --git a/modules/rtp_rtcp/source/frame_transformer_factory_unittest.cc b/modules/rtp_rtcp/source/frame_transformer_factory_unittest.cc new file mode 100644 index 0000000000..e011a76ed5 --- /dev/null +++ b/modules/rtp_rtcp/source/frame_transformer_factory_unittest.cc @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/frame_transformer_factory.h" + +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "api/call/transport.h" +#include "call/video_receive_stream.h" +#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" +#include "rtc_base/event.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/mock_frame_transformer.h" + +namespace webrtc { +namespace { + +using testing::NiceMock; +using testing::Return; + +class MockTransformableVideoFrame + : public webrtc::TransformableVideoFrameInterface { + public: + MOCK_METHOD(rtc::ArrayView, GetData, (), (const override)); + MOCK_METHOD(void, SetData, (rtc::ArrayView data), (override)); + MOCK_METHOD(uint8_t, GetPayloadType, (), (const, override)); + MOCK_METHOD(uint32_t, GetSsrc, (), (const, override)); + MOCK_METHOD(uint32_t, GetTimestamp, (), (const, override)); + MOCK_METHOD(TransformableFrameInterface::Direction, + GetDirection, + (), + (const, override)); + MOCK_METHOD(bool, IsKeyFrame, (), (const, override)); + MOCK_METHOD(std::vector, GetAdditionalData, (), (const, override)); + MOCK_METHOD(const webrtc::VideoFrameMetadata&, + GetMetadata, + (), + (const, override)); +}; + +TEST(FrameTransformerFactory, CloneVideoFrame) { + NiceMock original_frame; + uint8_t data[10]; + std::fill_n(data, 10, 5); + rtc::ArrayView data_view(data); + EXPECT_CALL(original_frame, GetData()).WillRepeatedly(Return(data_view)); + auto cloned_frame = CloneVideoFrame(&original_frame); + EXPECT_EQ(cloned_frame->GetData().size(), 10u); + EXPECT_THAT(cloned_frame->GetData(), testing::Each(5u)); +} + +} // namespace +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/receive_statistics_impl.cc b/modules/rtp_rtcp/source/receive_statistics_impl.cc index 1e8e399f4d..deae14e8c5 100644 --- a/modules/rtp_rtcp/source/receive_statistics_impl.cc +++ b/modules/rtp_rtcp/source/receive_statistics_impl.cc @@ -16,6 +16,7 @@ #include #include +#include "api/units/time_delta.h" #include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" @@ -33,8 +34,7 @@ constexpr int64_t kStatisticsProcessIntervalMs = 1000; StreamStatistician::~StreamStatistician() {} -StreamStatisticianImpl::StreamStatisticianImpl(uint32_t ssrc, - Clock* clock, +StreamStatisticianImpl::StreamStatisticianImpl(uint32_t ssrc, Clock* clock, int max_reordering_threshold) : ssrc_(ssrc), clock_(clock), @@ -54,7 +54,8 @@ StreamStatisticianImpl::StreamStatisticianImpl(uint32_t ssrc, received_seq_first_(-1), received_seq_max_(-1), last_report_cumulative_loss_(0), - last_report_seq_max_(-1) {} + last_report_seq_max_(-1), + last_payload_type_frequency_(0) {} StreamStatisticianImpl::~StreamStatisticianImpl() = default; @@ -154,6 +155,8 @@ void StreamStatisticianImpl::UpdateJitter(const RtpPacketReceived& packet, time_diff_samples = std::abs(time_diff_samples); + ReviseFrequencyAndJitter(packet.payload_type_frequency()); + // lib_jingle sometimes deliver crazy jumps in TS for the same stream. // If this happens, don't update jitter value. Use 5 secs video frequency // as the threshold. @@ -164,6 +167,38 @@ void StreamStatisticianImpl::UpdateJitter(const RtpPacketReceived& packet, } } +void StreamStatisticianImpl::ReviseFrequencyAndJitter( + int payload_type_frequency) { + if (payload_type_frequency == last_payload_type_frequency_) { + return; + } + + if (payload_type_frequency != 0) { + if (last_payload_type_frequency_ != 0) { + // Value in "jitter_q4_" variable is a number of samples. + // I.e. jitter = timestamp (ms) * frequency (kHz). + // Since the frequency has changed we have to update the number of samples + // accordingly. The new value should rely on a new frequency. + + // If we don't do such procedure we end up with the number of samples that + // cannot be converted into milliseconds correctly + // (i.e. jitter_ms = jitter_q4_ >> 4 / (payload_type_frequency / 1000)). + // In such case, the number of samples has a "mix". + + // Doing so we pretend that everything prior and including the current + // packet were computed on packet's frequency. + jitter_q4_ = static_cast(static_cast(jitter_q4_) * + payload_type_frequency / + last_payload_type_frequency_); + } + // If last_payload_type_frequency_ is not present, the jitter_q4_ + // variable has its initial value. + + // Keep last_payload_type_frequency_ up to date and non-zero (set). + last_payload_type_frequency_ = payload_type_frequency; + } +} + void StreamStatisticianImpl::SetMaxReorderingThreshold( int max_reordering_threshold) { max_reordering_threshold_ = max_reordering_threshold; @@ -178,6 +213,12 @@ RtpReceiveStats StreamStatisticianImpl::GetStats() const { stats.packets_lost = cumulative_loss_; // Note: internal jitter value is in Q4 and needs to be scaled by 1/16. stats.jitter = jitter_q4_ >> 4; + if (last_payload_type_frequency_ > 0) { + // Divide value in fractional seconds by frequency to get jitter in + // fractional seconds. + stats.interarrival_jitter = + webrtc::TimeDelta::Seconds(stats.jitter) / last_payload_type_frequency_; + } if (receive_counters_.last_packet_received_timestamp_ms.has_value()) { stats.last_packet_received_timestamp_ms = *receive_counters_.last_packet_received_timestamp_ms + diff --git a/modules/rtp_rtcp/source/receive_statistics_impl.h b/modules/rtp_rtcp/source/receive_statistics_impl.h index 1a70fe4ad7..4aac20a74b 100644 --- a/modules/rtp_rtcp/source/receive_statistics_impl.h +++ b/modules/rtp_rtcp/source/receive_statistics_impl.h @@ -65,6 +65,7 @@ class StreamStatisticianImpl : public StreamStatisticianImplInterface { bool IsRetransmitOfOldPacket(const RtpPacketReceived& packet, int64_t now_ms) const; void UpdateJitter(const RtpPacketReceived& packet, int64_t receive_time_ms); + void ReviseFrequencyAndJitter(int payload_type_frequency); // Updates StreamStatistician for out of order packets. // Returns true if packet considered to be out of order. bool UpdateOutOfOrder(const RtpPacketReceived& packet, @@ -108,6 +109,9 @@ class StreamStatisticianImpl : public StreamStatisticianImplInterface { // Counter values when we sent the last report. int32_t last_report_cumulative_loss_; int64_t last_report_seq_max_; + + // The sample frequency of the last received packet. + int last_payload_type_frequency_; }; // Thread-safe implementation of StreamStatisticianImplInterface. diff --git a/modules/rtp_rtcp/source/receive_statistics_unittest.cc b/modules/rtp_rtcp/source/receive_statistics_unittest.cc index d40a743469..92c8f34196 100644 --- a/modules/rtp_rtcp/source/receive_statistics_unittest.cc +++ b/modules/rtp_rtcp/source/receive_statistics_unittest.cc @@ -10,9 +10,11 @@ #include "modules/rtp_rtcp/include/receive_statistics.h" +#include #include #include +#include "api/units/time_delta.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/random.h" #include "system_wrappers/include/clock.h" @@ -53,6 +55,25 @@ RtpPacketReceived CreateRtpPacket(uint32_t ssrc, return packet; } +RtpPacketReceived MakeRtpPacket(int payload_type_frequency, + uint32_t timestamp) { + RtpPacketReceived packet = + CreateRtpPacket(kSsrc1, + /*header_size=*/12, kPacketSize1 - 12, + /*padding_size=*/0); + packet.SetTimestamp(timestamp); + packet.set_payload_type_frequency(payload_type_frequency); + return packet; +} + +RtpPacketReceived MakeNextRtpPacket(const RtpPacketReceived& previous_packet, + int payload_type_frequency, + uint32_t timestamp) { + RtpPacketReceived packet = MakeRtpPacket(payload_type_frequency, timestamp); + packet.SetSequenceNumber(previous_packet.SequenceNumber() + 1); + return packet; +} + RtpPacketReceived CreateRtpPacket(uint32_t ssrc, size_t packet_size) { return CreateRtpPacket(ssrc, 12, packet_size - 12, 0); } @@ -65,6 +86,10 @@ void IncrementSequenceNumber(RtpPacketReceived* packet) { IncrementSequenceNumber(packet, 1); } +uint32_t GetJitter(const ReceiveStatistics& stats) { + return stats.GetStatistician(kSsrc1)->GetStats().jitter; +} + class ReceiveStatisticsTest : public ::testing::TestWithParam { public: ReceiveStatisticsTest() @@ -578,5 +603,299 @@ TEST_P(ReceiveStatisticsTest, LastPacketReceivedTimestamp) { EXPECT_EQ(45, counters.last_packet_received_timestamp_ms); } +TEST_P(ReceiveStatisticsTest, SimpleJitterComputation) { + const int kMsPerPacket = 20; + const int kCodecSampleRate = 48'000; + const int kSamplesPerPacket = kMsPerPacket * kCodecSampleRate / 1'000; + const int kLateArrivalDeltaMs = 100; + const int kLateArrivalDeltaSamples = + kLateArrivalDeltaMs * kCodecSampleRate / 1'000; + + packet1_.set_payload_type_frequency(kCodecSampleRate); + packet1_.SetSequenceNumber(1); + packet1_.SetTimestamp(0); + receive_statistics_->OnRtpPacket(packet1_); + packet1_.SetSequenceNumber(2); + packet1_.SetTimestamp(kSamplesPerPacket); + // Arrives 100 ms late. + clock_.AdvanceTimeMilliseconds(kMsPerPacket + kLateArrivalDeltaMs); + receive_statistics_->OnRtpPacket(packet1_); + + StreamStatistician* statistician = + receive_statistics_->GetStatistician(kSsrc1); + // See jitter caluculation in https://www.rfc-editor.org/rfc/rfc3550 6.4.1. + const uint32_t expected_jitter = (kLateArrivalDeltaSamples) / 16; + EXPECT_EQ(expected_jitter, statistician->GetStats().jitter); + EXPECT_EQ(webrtc::TimeDelta::Seconds(expected_jitter) / kCodecSampleRate, + statistician->GetStats().interarrival_jitter); +} + +TEST(ReviseJitterTest, AllPacketsHaveSamePayloadTypeFrequency) { + SimulatedClock clock(0); + std::unique_ptr statistics = + ReceiveStatistics::Create(&clock); + RtpPacketReceived packet1 = MakeRtpPacket(/*payload_type_frequency=*/8'000, + /*timestamp=*/1); + RtpPacketReceived packet2 = MakeNextRtpPacket( + packet1, /*payload_type_frequency=*/8'000, /*timestamp=*/1 + 160); + + RtpPacketReceived packet3 = MakeNextRtpPacket( + packet2, /*payload_type_frequency=*/8'000, /*timestamp=*/1 + 2 * 160); + + statistics->OnRtpPacket(packet1); + clock.AdvanceTimeMilliseconds(50); + statistics->OnRtpPacket(packet2); + clock.AdvanceTimeMilliseconds(50); + statistics->OnRtpPacket(packet3); + + // packet1: no jitter calculation + // packet2: jitter = 0[jitter] + (abs(50[receive time ms] * + // 8[frequency KHz] - 160[timestamp diff]) * 16 - 0[jitter] + 8) + // / 16 = 240 + // packet3: jitter = 240[jitter] + (abs(50[receive time ms] * + // 8[frequency KHz] - 160[timestamp diff]) * 16 - 240[jitter] + 8) + // / 16 = 465 + // final jitter: 465 / 16 = 29 + EXPECT_EQ(GetJitter(*statistics), 29U); +} + +TEST(ReviseJitterTest, AllPacketsHaveDifferentPayloadTypeFrequency) { + SimulatedClock clock(0); + std::unique_ptr statistics = + ReceiveStatistics::Create(&clock); + RtpPacketReceived packet1 = MakeRtpPacket(/*payload_type_frequency=*/8'000, + /*timestamp=*/1); + RtpPacketReceived packet2 = MakeNextRtpPacket( + packet1, /*payload_type_frequency=*/8'000, /*timestamp=*/1 + 160); + RtpPacketReceived packet3 = MakeNextRtpPacket( + packet2, /*payload_type_frequency=*/48'000, /*timestamp=*/1 + 160 + 960); + + statistics->OnRtpPacket(packet1); + clock.AdvanceTimeMilliseconds(50); + statistics->OnRtpPacket(packet2); + clock.AdvanceTimeMilliseconds(50); + statistics->OnRtpPacket(packet3); + + // packet1: no jitter calculation + // packet2: jitter = 0[jitter] + (abs(50[receive time ms] * + // 8[frequency KHz] - 160[timestamp diff]) * 16 - 0[jitter] + 8) + // / 16 = 240 + // packet3: revised jitter: 240 * 48[frequency KHz] / 8[frequency KHz] = 1'440 + // jitter = 1'440[jitter] + (abs(50[receive time ms] * + // 48[frequency KHz] - 960[timestamp diff]) * 16 - 1'440[jitter] + 8) + // / 16 = 2'790 + // final jitter: 2'790 / 16 = 174 + EXPECT_EQ(GetJitter(*statistics), 174U); +} + +TEST(ReviseJitterTest, + FirstPacketPayloadTypeFrequencyIsZeroAndFrequencyChanged) { + SimulatedClock clock(0); + std::unique_ptr statistics = + ReceiveStatistics::Create(&clock); + RtpPacketReceived packet1 = MakeRtpPacket(/*payload_type_frequency=*/0, + /*timestamp=*/1); + RtpPacketReceived packet2 = MakeNextRtpPacket( + packet1, /*payload_type_frequency=*/8'000, /*timestamp=*/1 + 160); + RtpPacketReceived packet3 = MakeNextRtpPacket( + packet2, /*payload_type_frequency=*/48'000, /*timestamp=*/1 + 160 + 960); + + statistics->OnRtpPacket(packet1); + clock.AdvanceTimeMilliseconds(50); + statistics->OnRtpPacket(packet2); + clock.AdvanceTimeMilliseconds(50); + statistics->OnRtpPacket(packet3); + + // packet1: no jitter calculation + // packet2: jitter = 0[jitter] + (abs(50[receive time ms] * + // 8[frequency KHz] - 160[timestamp diff]) * 16 - 0[jitter] + 8) + // / 16 = 240 + // packet3: revised jitter: 240 * 48[frequency KHz] / 8[frequency KHz] = 1'440 + // jitter = 1'440[jitter] + (abs(50[receive time ms] * + // 48[frequency KHz] - 960[timestamp diff]) * 16 - 1'440[jitter] + 8) + // / 16 = 2'790 + // final jitter: 2'790 / 16 = 174 + EXPECT_EQ(GetJitter(*statistics), 174U); +} + +TEST(ReviseJitterTest, + FirstPacketPayloadTypeFrequencyIsZeroAndFrequencyNotChanged) { + SimulatedClock clock(0); + std::unique_ptr statistics = + ReceiveStatistics::Create(&clock); + RtpPacketReceived packet1 = MakeRtpPacket(/*payload_type_frequency=*/0, + /*timestamp=*/1); + RtpPacketReceived packet2 = MakeNextRtpPacket( + packet1, /*payload_type_frequency=*/8'000, /*timestamp=*/1 + 160); + RtpPacketReceived packet3 = MakeNextRtpPacket( + packet2, /*payload_type_frequency=*/8'000, /*timestamp=*/1 + 160 + 160); + + statistics->OnRtpPacket(packet1); + clock.AdvanceTimeMilliseconds(50); + statistics->OnRtpPacket(packet2); + clock.AdvanceTimeMilliseconds(50); + statistics->OnRtpPacket(packet3); + + // packet1: no jitter calculation + // packet2: jitter = 0[jitter] + (abs(50[receive time ms] * + // 8[frequency KHz] - 160[timestamp diff]) * 16 - 0[jitter] + 8) + // / 16 = 240 + // packet3: jitter = 240[jitter] + (abs(50[receive time ms] * + // 8[frequency KHz] - 160[timestamp diff]) * 16 - 240[jitter] + 8) + // / 16 = 465 + // final jitter: 465 / 16 = 29 + EXPECT_EQ(GetJitter(*statistics), 29U); +} + +TEST(ReviseJitterTest, + TwoFirstPacketPayloadTypeFrequencyIsZeroAndFrequencyChanged) { + SimulatedClock clock(0); + std::unique_ptr statistics = + ReceiveStatistics::Create(&clock); + RtpPacketReceived packet1 = MakeRtpPacket(/*payload_type_frequency=*/0, + /*timestamp=*/1); + RtpPacketReceived packet2 = MakeNextRtpPacket( + packet1, /*payload_type_frequency=*/0, /*timestamp=*/1 + 160); + RtpPacketReceived packet3 = MakeNextRtpPacket( + packet2, /*payload_type_frequency=*/48'000, /*timestamp=*/1 + 160 + 960); + RtpPacketReceived packet4 = + MakeNextRtpPacket(packet3, /*payload_type_frequency=*/8'000, + /*timestamp=*/1 + 160 + 960 + 160); + + statistics->OnRtpPacket(packet1); + clock.AdvanceTimeMilliseconds(50); + statistics->OnRtpPacket(packet2); + clock.AdvanceTimeMilliseconds(50); + statistics->OnRtpPacket(packet3); + clock.AdvanceTimeMilliseconds(50); + statistics->OnRtpPacket(packet4); + + // packet1: no jitter calculation + // packet2: jitter = 0[jitter] + (abs(50[receive time ms] * + // 0[frequency KHz] - 160[timestamp diff]) * 16 - 0[jitter] + 8) + // / 16 = 160 + // packet3: jitter = 160[jitter] + (abs(50[receive time ms] * + // 48[frequency KHz] - 960[timestamp diff]) * 16 - 160[jitter] + 8) + // / 16 = 1'590 + // packet4: revised jitter: 1'590 * 8[frequency KHz] / 48[frequency KHz] = 265 + // packet4: jitter = 265[jitter] + (abs(50[receive time ms] * + // 8[frequency KHz] - 160[timestamp diff]) * 16 - 265[jitter] + 8) + // / 16 = 488 + // final jitter: 488 / 16 = 30 + EXPECT_EQ(GetJitter(*statistics), 30U); +} + +TEST(ReviseJitterTest, + TwoFirstPacketPayloadTypeFrequencyIsZeroAndFrequencyNotChanged) { + SimulatedClock clock(0); + std::unique_ptr statistics = + ReceiveStatistics::Create(&clock); + RtpPacketReceived packet1 = MakeRtpPacket(/*payload_type_frequency=*/0, + /*timestamp=*/1); + RtpPacketReceived packet2 = MakeNextRtpPacket( + packet1, /*payload_type_frequency=*/0, /*timestamp=*/1 + 160); + RtpPacketReceived packet3 = MakeNextRtpPacket( + packet2, /*payload_type_frequency=*/8'000, /*timestamp=*/1 + 160 + 160); + RtpPacketReceived packet4 = + MakeNextRtpPacket(packet3, /*payload_type_frequency=*/8'000, + /*timestamp=*/1 + 160 + 160 + 160); + + statistics->OnRtpPacket(packet1); + clock.AdvanceTimeMilliseconds(50); + statistics->OnRtpPacket(packet2); + clock.AdvanceTimeMilliseconds(50); + statistics->OnRtpPacket(packet3); + clock.AdvanceTimeMilliseconds(50); + statistics->OnRtpPacket(packet4); + + // packet1: no jitter calculation + // packet2: jitter = 0[jitter] + (abs(50[receive time ms] * + // 0[frequency KHz] - 160[timestamp diff]) * 16 - 0[jitter] + 8) + // / 16 = 160 + // packet3: jitter = 160[jitter] + (abs(50[receive time ms] * + // 8[frequency KHz] - 160[timestamp diff]) * 16 - 160[jitter] + 8) + // / 16 = 390 + // packet4: jitter = 390[jitter] + (abs(50[receive time ms] * + // 8[frequency KHz] - 160[timestamp diff]) * 16 - 390[jitter] + 8) + // / 16 = 606 + // final jitter: 606 / 16 = 37 + EXPECT_EQ(GetJitter(*statistics), 37U); +} + +TEST(ReviseJitterTest, + MiddlePacketPayloadTypeFrequencyIsZeroAndFrequencyChanged) { + SimulatedClock clock(0); + std::unique_ptr statistics = + ReceiveStatistics::Create(&clock); + RtpPacketReceived packet1 = MakeRtpPacket(/*payload_type_frequency=*/48'000, + /*timestamp=*/1); + RtpPacketReceived packet2 = MakeNextRtpPacket( + packet1, /*payload_type_frequency=*/48'000, /*timestamp=*/1 + 960); + RtpPacketReceived packet3 = MakeNextRtpPacket( + packet2, /*payload_type_frequency=*/0, /*timestamp=*/1 + 960 + 55); + RtpPacketReceived packet4 = + MakeNextRtpPacket(packet3, /*payload_type_frequency=*/8'000, + /*timestamp=*/1 + 960 + 55 + 160); + + statistics->OnRtpPacket(packet1); + clock.AdvanceTimeMilliseconds(50); + statistics->OnRtpPacket(packet2); + clock.AdvanceTimeMilliseconds(50); + statistics->OnRtpPacket(packet3); + clock.AdvanceTimeMilliseconds(50); + statistics->OnRtpPacket(packet4); + + // packet1: no jitter calculation + // packet2: jitter = 0[jitter] + (abs(50[receive time ms] * + // 48[frequency KHz] - 960[timestamp diff]) * 16 - 0[jitter] + 8) + // / 16 = 1'440 + // packet3: jitter = 1'440[jitter] + (abs(50[receive time ms] * + // 0[frequency KHz] - 55[timestamp diff]) * 16 - 1'440[jitter] + 8) + // / 16 = 1'405 + // packet4: revised jitter: 1'405 * 8[frequency KHz] / 48[frequency KHz] = 234 + // jitter = 234[jitter] + (abs(50[receive time ms] * + // 8[frequency KHz] - 160[timestamp diff]) * 16 - 234[jitter] + 8) + // / 16 = 459 + // final jitter: 459 / 16 = 28 + EXPECT_EQ(GetJitter(*statistics), 28U); +} + +TEST(ReviseJitterTest, + MiddlePacketPayloadTypeFrequencyIsZeroAndFrequencyNotChanged) { + SimulatedClock clock(0); + std::unique_ptr statistics = + ReceiveStatistics::Create(&clock); + RtpPacketReceived packet1 = MakeRtpPacket(/*payload_type_frequency=*/48'000, + /*timestamp=*/1); + RtpPacketReceived packet2 = MakeNextRtpPacket( + packet1, /*payload_type_frequency=*/48'000, /*timestamp=*/1 + 960); + RtpPacketReceived packet3 = MakeNextRtpPacket( + packet2, /*payload_type_frequency=*/0, /*timestamp=*/1 + 960 + 55); + RtpPacketReceived packet4 = + MakeNextRtpPacket(packet3, /*payload_type_frequency=*/48'000, + /*timestamp=*/1 + 960 + 55 + 960); + + statistics->OnRtpPacket(packet1); + clock.AdvanceTimeMilliseconds(50); + statistics->OnRtpPacket(packet2); + clock.AdvanceTimeMilliseconds(50); + statistics->OnRtpPacket(packet3); + clock.AdvanceTimeMilliseconds(50); + statistics->OnRtpPacket(packet4); + + // packet1: no jitter calculation + // packet2: jitter = 0[jitter] + (abs(50[receive time ms] * + // 48[frequency KHz] - 960[timestamp diff]) * 16 - 0[jitter] + 8) + // / 16 = 1'440 + // packet3: jitter = 1'440[jitter] + (abs(50[receive time ms] * + // 0[frequency KHz] - 55[timestamp diff]) * 16 - 1'440[jitter] + 8) + // / 16 = 1'405 + // packet4: jitter = 1'405[jitter] + (abs(50[receive time ms] * + // 48[frequency KHz] - 960[timestamp diff]) * 16 - 1'405[jitter] + 8) + // / 16 = 2'757 + // final jitter: 2'757 / 16 = 172 + EXPECT_EQ(GetJitter(*statistics), 172U); +} + } // namespace } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc index a3662f19d9..eac023df3d 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc @@ -265,11 +265,6 @@ void ModuleRtpRtcpImpl::SetMid(absl::string_view mid) { // RTCP, this will need to be passed down to the RTCPSender also. } -void ModuleRtpRtcpImpl::SetCsrcs(const std::vector& csrcs) { - rtcp_sender_.SetCsrcs(csrcs); - rtp_sender_->packet_generator.SetCsrcs(csrcs); -} - // TODO(pbos): Handle media and RTX streams separately (separate RTCP // feedbacks). RTCPSender::FeedbackState ModuleRtpRtcpImpl::GetFeedbackState() { diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/modules/rtp_rtcp/source/rtp_rtcp_impl.h index 0f4f00453d..f164195168 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl.h +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.h @@ -43,8 +43,12 @@ class Clock; struct PacedPacketInfo; struct RTPVideoHeader; -// DEPRECATED. -class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" +class ABSL_DEPRECATED("") ModuleRtpRtcpImpl + : public RtpRtcp, + public RTCPReceiver::ModuleRtpRtcp { +#pragma clang diagnostic pop public: explicit ModuleRtpRtcpImpl( const RtpRtcpInterface::Configuration& configuration); @@ -99,8 +103,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { void SetMid(absl::string_view mid) override; - void SetCsrcs(const std::vector& csrcs) override; - RTCPSender::FeedbackState GetFeedbackState(); void SetRtxSendStatus(int mode) override; diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc index 4329a423cb..31dd1499d5 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc @@ -89,6 +89,8 @@ ModuleRtpRtcpImpl2::ModuleRtpRtcpImpl2(const Configuration& configuration) // Make sure rtcp sender use same timestamp offset as rtp sender. rtcp_sender_.SetTimestampOffset( rtp_sender_->packet_generator.TimestampOffset()); + rtp_sender_->packet_sender.SetTimestampOffset( + rtp_sender_->packet_generator.TimestampOffset()); } // Set default packet size limit. @@ -186,6 +188,7 @@ void ModuleRtpRtcpImpl2::SetRtpState(const RtpState& rtp_state) { rtp_sender_->packet_generator.SetRtpState(rtp_state); rtp_sender_->sequencer.SetRtpState(rtp_state); rtcp_sender_.SetTimestampOffset(rtp_state.start_timestamp); + rtp_sender_->packet_sender.SetTimestampOffset(rtp_state.start_timestamp); } void ModuleRtpRtcpImpl2::SetRtxState(const RtpState& rtp_state) { @@ -227,11 +230,6 @@ void ModuleRtpRtcpImpl2::SetMid(absl::string_view mid) { // RTCP, this will need to be passed down to the RTCPSender also. } -void ModuleRtpRtcpImpl2::SetCsrcs(const std::vector& csrcs) { - rtcp_sender_.SetCsrcs(csrcs); - rtp_sender_->packet_generator.SetCsrcs(csrcs); -} - // TODO(pbos): Handle media and RTX streams separately (separate RTCP // feedbacks). RTCPSender::FeedbackState ModuleRtpRtcpImpl2::GetFeedbackState() { diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2.h b/modules/rtp_rtcp/source/rtp_rtcp_impl2.h index 34b6b8342d..e7a3ac03e8 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl2.h +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2.h @@ -114,8 +114,6 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, void SetMid(absl::string_view mid) override; - void SetCsrcs(const std::vector& csrcs) override; - RTCPSender::FeedbackState GetFeedbackState(); void SetRtxSendStatus(int mode) override; diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc index 4c08ce5c13..918e075be8 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc @@ -17,7 +17,7 @@ #include #include "absl/types/optional.h" -#include "api/transport/field_trial_based_config.h" +#include "api/field_trials_registry.h" #include "api/units/time_delta.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet.h" @@ -28,6 +28,7 @@ #include "rtc_base/logging.h" #include "rtc_base/rate_limiter.h" #include "rtc_base/strings/string_builder.h" +#include "test/explicit_key_value_config.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" @@ -43,6 +44,8 @@ using ::testing::Not; using ::testing::Optional; using ::testing::SizeIs; +using webrtc::test::ExplicitKeyValueConfig; + namespace webrtc { namespace { constexpr uint32_t kSenderSsrc = 0x12345; @@ -151,36 +154,6 @@ class SendTransport : public Transport, std::deque rtcp_packets_; }; -struct TestConfig { - explicit TestConfig(bool with_overhead) : with_overhead(with_overhead) {} - - bool with_overhead = false; -}; - -class FieldTrialConfig : public FieldTrialsView { - public: - static FieldTrialConfig GetFromTestConfig(const TestConfig& config) { - FieldTrialConfig trials; - trials.overhead_enabled_ = config.with_overhead; - return trials; - } - - FieldTrialConfig() : overhead_enabled_(false) {} - ~FieldTrialConfig() override {} - - void SetOverHeadEnabled(bool enabled) { overhead_enabled_ = enabled; } - - std::string Lookup(absl::string_view key) const override { - if (key == "WebRTC-SendSideBwe-WithOverhead") { - return overhead_enabled_ ? "Enabled" : "Disabled"; - } - return ""; - } - - private: - bool overhead_enabled_; -}; - class RtpRtcpModule : public RtcpPacketTypeCounterObserver, public SendPacketObserver { public: @@ -194,7 +167,7 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver, RtpRtcpModule(GlobalSimulatedTimeController* time_controller, bool is_sender, - const FieldTrialConfig& trials) + const FieldTrialsRegistry& trials) : time_controller_(time_controller), is_sender_(is_sender), trials_(trials), @@ -206,7 +179,7 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver, TimeController* const time_controller_; const bool is_sender_; - const FieldTrialConfig& trials_; + const FieldTrialsRegistry& trials_; RtcpPacketTypeCounter packets_sent_; RtcpPacketTypeCounter packets_received_; std::unique_ptr receive_statistics_; @@ -289,11 +262,11 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver, }; } // namespace -class RtpRtcpImpl2Test : public ::testing::TestWithParam { +class RtpRtcpImpl2Test : public ::testing::Test { protected: RtpRtcpImpl2Test() : time_controller_(Timestamp::Micros(133590000000000)), - field_trials_(FieldTrialConfig::GetFromTestConfig(GetParam())), + field_trials_(""), sender_(&time_controller_, /*is_sender=*/true, field_trials_), @@ -346,7 +319,7 @@ class RtpRtcpImpl2Test : public ::testing::TestWithParam { } GlobalSimulatedTimeController time_controller_; - FieldTrialConfig field_trials_; + test::ExplicitKeyValueConfig field_trials_; RtpRtcpModule sender_; std::unique_ptr sender_video_; RtpRtcpModule receiver_; @@ -403,7 +376,7 @@ class RtpRtcpImpl2Test : public ::testing::TestWithParam { } }; -TEST_P(RtpRtcpImpl2Test, RetransmitsAllLayers) { +TEST_F(RtpRtcpImpl2Test, RetransmitsAllLayers) { // Send frames. EXPECT_EQ(0, sender_.RtpSent()); EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), @@ -432,7 +405,7 @@ TEST_P(RtpRtcpImpl2Test, RetransmitsAllLayers) { EXPECT_EQ(kSequenceNumber + 2, sender_.LastRtpSequenceNumber()); } -TEST_P(RtpRtcpImpl2Test, Rtt) { +TEST_F(RtpRtcpImpl2Test, Rtt) { RtpPacketReceived packet; packet.SetTimestamp(1); packet.SetSequenceNumber(123); @@ -476,7 +449,7 @@ TEST_P(RtpRtcpImpl2Test, Rtt) { EXPECT_NEAR(2 * kOneWayNetworkDelay.ms(), sender_.impl_->rtt_ms(), 1); } -TEST_P(RtpRtcpImpl2Test, RttForReceiverOnly) { +TEST_F(RtpRtcpImpl2Test, RttForReceiverOnly) { // Receiver module should send a Receiver time reference report (RTRR). EXPECT_EQ(0, receiver_.impl_->SendRTCP(kRtcpReport)); @@ -495,7 +468,7 @@ TEST_P(RtpRtcpImpl2Test, RttForReceiverOnly) { EXPECT_NEAR(2 * kOneWayNetworkDelay.ms(), receiver_.impl_->rtt_ms(), 1); } -TEST_P(RtpRtcpImpl2Test, NoSrBeforeMedia) { +TEST_F(RtpRtcpImpl2Test, NoSrBeforeMedia) { // Ignore fake transport delays in this test. sender_.transport_.SimulateNetworkDelay(TimeDelta::Zero()); receiver_.transport_.SimulateNetworkDelay(TimeDelta::Zero()); @@ -512,7 +485,7 @@ TEST_P(RtpRtcpImpl2Test, NoSrBeforeMedia) { EXPECT_EQ(sender_.transport_.NumRtcpSent(), 1u); } -TEST_P(RtpRtcpImpl2Test, RtcpPacketTypeCounter_Nack) { +TEST_F(RtpRtcpImpl2Test, RtcpPacketTypeCounter_Nack) { EXPECT_EQ(0U, sender_.RtcpReceived().nack_packets); EXPECT_EQ(0U, receiver_.RtcpSent().nack_packets); @@ -527,7 +500,7 @@ TEST_P(RtpRtcpImpl2Test, RtcpPacketTypeCounter_Nack) { EXPECT_EQ(1U, sender_.RtcpReceived().nack_packets); } -TEST_P(RtpRtcpImpl2Test, AddStreamDataCounters) { +TEST_F(RtpRtcpImpl2Test, AddStreamDataCounters) { StreamDataCounters rtp; const int64_t kStartTimeMs = 1; rtp.first_packet_time_ms = kStartTimeMs; @@ -570,7 +543,7 @@ TEST_P(RtpRtcpImpl2Test, AddStreamDataCounters) { EXPECT_EQ(kStartTimeMs, sum.first_packet_time_ms); // Holds oldest time. } -TEST_P(RtpRtcpImpl2Test, SendsInitialNackList) { +TEST_F(RtpRtcpImpl2Test, SendsInitialNackList) { // Send module sends a NACK. const uint16_t kNackLength = 1; uint16_t nack_list[kNackLength] = {123}; @@ -582,7 +555,7 @@ TEST_P(RtpRtcpImpl2Test, SendsInitialNackList) { EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123)); } -TEST_P(RtpRtcpImpl2Test, SendsExtendedNackList) { +TEST_F(RtpRtcpImpl2Test, SendsExtendedNackList) { // Send module sends a NACK. const uint16_t kNackLength = 1; uint16_t nack_list[kNackLength] = {123}; @@ -606,7 +579,7 @@ TEST_P(RtpRtcpImpl2Test, SendsExtendedNackList) { EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(124)); } -TEST_P(RtpRtcpImpl2Test, ReSendsNackListAfterRttMs) { +TEST_F(RtpRtcpImpl2Test, ReSendsNackListAfterRttMs) { sender_.transport_.SimulateNetworkDelay(TimeDelta::Zero()); // Send module sends a NACK. const uint16_t kNackLength = 2; @@ -631,7 +604,7 @@ TEST_P(RtpRtcpImpl2Test, ReSendsNackListAfterRttMs) { EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123, 125)); } -TEST_P(RtpRtcpImpl2Test, UniqueNackRequests) { +TEST_F(RtpRtcpImpl2Test, UniqueNackRequests) { receiver_.transport_.SimulateNetworkDelay(TimeDelta::Zero()); EXPECT_EQ(0U, receiver_.RtcpSent().nack_packets); EXPECT_EQ(0U, receiver_.RtcpSent().nack_requests); @@ -671,7 +644,7 @@ TEST_P(RtpRtcpImpl2Test, UniqueNackRequests) { EXPECT_EQ(75, sender_.RtcpReceived().UniqueNackRequestsInPercent()); } -TEST_P(RtpRtcpImpl2Test, ConfigurableRtcpReportInterval) { +TEST_F(RtpRtcpImpl2Test, ConfigurableRtcpReportInterval) { const TimeDelta kVideoReportInterval = TimeDelta::Millis(3000); // Recreate sender impl with new configuration, and redo setup. @@ -709,7 +682,37 @@ TEST_P(RtpRtcpImpl2Test, ConfigurableRtcpReportInterval) { EXPECT_EQ(sender_.transport_.NumRtcpSent(), 2u); } -TEST_P(RtpRtcpImpl2Test, StoresPacketInfoForSentPackets) { +TEST_F(RtpRtcpImpl2Test, RtpSenderEgressTimestampOffset) { + // RTP timestamp offset not explicitly set, default to random value. + uint16_t seqno = sender_.impl_->GetRtpState().sequence_number; + uint32_t media_rtp_ts = 1001; + uint32_t rtp_ts = media_rtp_ts + sender_.impl_->StartTimestamp(); + EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid, rtp_ts, + /*capture_time_ms=*/0)); + AdvanceTime(kOneWayNetworkDelay); + EXPECT_THAT( + sender_.impl_->GetSentRtpPacketInfos(std::vector{seqno}), + ElementsAre(Field(&RtpSequenceNumberMap::Info::timestamp, media_rtp_ts))); + + RtpState saved_rtp_state = sender_.impl_->GetRtpState(); + + // Change RTP timestamp offset. + sender_.impl_->SetStartTimestamp(2000); + + // Restores RtpState and make sure the old timestamp offset is in place. + sender_.impl_->SetRtpState(saved_rtp_state); + seqno = sender_.impl_->GetRtpState().sequence_number; + media_rtp_ts = 1031; + rtp_ts = media_rtp_ts + sender_.impl_->StartTimestamp(); + EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid, rtp_ts, + /*capture_time_ms=*/0)); + AdvanceTime(kOneWayNetworkDelay); + EXPECT_THAT( + sender_.impl_->GetSentRtpPacketInfos(std::vector{seqno}), + ElementsAre(Field(&RtpSequenceNumberMap::Info::timestamp, media_rtp_ts))); +} + +TEST_F(RtpRtcpImpl2Test, StoresPacketInfoForSentPackets) { const uint32_t kStartTimestamp = 1u; SetUp(); sender_.impl_->SetStartTimestamp(kStartTimestamp); @@ -768,12 +771,12 @@ TEST_P(RtpRtcpImpl2Test, StoresPacketInfoForSentPackets) { } // Checks that the sender report stats are not available if no RTCP SR was sent. -TEST_P(RtpRtcpImpl2Test, SenderReportStatsNotAvailable) { +TEST_F(RtpRtcpImpl2Test, SenderReportStatsNotAvailable) { EXPECT_THAT(receiver_.impl_->GetSenderReportStats(), Eq(absl::nullopt)); } // Checks that the sender report stats are available if an RTCP SR was sent. -TEST_P(RtpRtcpImpl2Test, SenderReportStatsAvailable) { +TEST_F(RtpRtcpImpl2Test, SenderReportStatsAvailable) { // Send a frame in order to send an SR. EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid)); // Send an SR. @@ -784,7 +787,7 @@ TEST_P(RtpRtcpImpl2Test, SenderReportStatsAvailable) { // Checks that the sender report stats are not available if an RTCP SR with an // unexpected SSRC is received. -TEST_P(RtpRtcpImpl2Test, SenderReportStatsNotUpdatedWithUnexpectedSsrc) { +TEST_F(RtpRtcpImpl2Test, SenderReportStatsNotUpdatedWithUnexpectedSsrc) { constexpr uint32_t kUnexpectedSenderSsrc = 0x87654321; static_assert(kUnexpectedSenderSsrc != kSenderSsrc, ""); // Forge a sender report and pass it to the receiver as if an RTCP SR were @@ -800,7 +803,7 @@ TEST_P(RtpRtcpImpl2Test, SenderReportStatsNotUpdatedWithUnexpectedSsrc) { } // Checks the stats derived from the last received RTCP SR are set correctly. -TEST_P(RtpRtcpImpl2Test, SenderReportStatsCheckStatsFromLastReport) { +TEST_F(RtpRtcpImpl2Test, SenderReportStatsCheckStatsFromLastReport) { using SenderReportStats = RtpRtcpInterface::SenderReportStats; const NtpTime ntp(/*seconds=*/1u, /*fractions=*/1u << 31); constexpr uint32_t kPacketCount = 123u; @@ -823,7 +826,7 @@ TEST_P(RtpRtcpImpl2Test, SenderReportStatsCheckStatsFromLastReport) { } // Checks that the sender report stats count equals the number of sent RTCP SRs. -TEST_P(RtpRtcpImpl2Test, SenderReportStatsCount) { +TEST_F(RtpRtcpImpl2Test, SenderReportStatsCount) { using SenderReportStats = RtpRtcpInterface::SenderReportStats; // Send a frame in order to send an SR. EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid)); @@ -841,7 +844,7 @@ TEST_P(RtpRtcpImpl2Test, SenderReportStatsCount) { // Checks that the sender report stats include a valid arrival time if an RTCP // SR was sent. -TEST_P(RtpRtcpImpl2Test, SenderReportStatsArrivalTimestampSet) { +TEST_F(RtpRtcpImpl2Test, SenderReportStatsArrivalTimestampSet) { // Send a frame in order to send an SR. EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid)); // Send an SR. @@ -854,7 +857,7 @@ TEST_P(RtpRtcpImpl2Test, SenderReportStatsArrivalTimestampSet) { // Checks that the packet and byte counters from an RTCP SR are not zero once // a frame is sent. -TEST_P(RtpRtcpImpl2Test, SenderReportStatsPacketByteCounters) { +TEST_F(RtpRtcpImpl2Test, SenderReportStatsPacketByteCounters) { using SenderReportStats = RtpRtcpInterface::SenderReportStats; // Send a frame in order to send an SR. EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid)); @@ -870,14 +873,14 @@ TEST_P(RtpRtcpImpl2Test, SenderReportStatsPacketByteCounters) { Field(&SenderReportStats::bytes_sent, Gt(0u))))); } -TEST_P(RtpRtcpImpl2Test, SendingVideoAdvancesSequenceNumber) { +TEST_F(RtpRtcpImpl2Test, SendingVideoAdvancesSequenceNumber) { const uint16_t sequence_number = sender_.impl_->SequenceNumber(); EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid)); ASSERT_THAT(sender_.transport_.rtp_packets_sent_, Gt(0)); EXPECT_EQ(sequence_number + 1, sender_.impl_->SequenceNumber()); } -TEST_P(RtpRtcpImpl2Test, SequenceNumberNotAdvancedWhenNotSending) { +TEST_F(RtpRtcpImpl2Test, SequenceNumberNotAdvancedWhenNotSending) { const uint16_t sequence_number = sender_.impl_->SequenceNumber(); sender_.impl_->SetSendingMediaStatus(false); EXPECT_FALSE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid)); @@ -885,7 +888,7 @@ TEST_P(RtpRtcpImpl2Test, SequenceNumberNotAdvancedWhenNotSending) { EXPECT_EQ(sequence_number, sender_.impl_->SequenceNumber()); } -TEST_P(RtpRtcpImpl2Test, PaddingNotAllowedInMiddleOfFrame) { +TEST_F(RtpRtcpImpl2Test, PaddingNotAllowedInMiddleOfFrame) { constexpr size_t kPaddingSize = 100; // Can't send padding before media. @@ -920,7 +923,7 @@ TEST_P(RtpRtcpImpl2Test, PaddingNotAllowedInMiddleOfFrame) { EXPECT_THAT(sender_.impl_->GeneratePadding(kPaddingSize), SizeIs(Gt(0u))); } -TEST_P(RtpRtcpImpl2Test, PaddingTimestampMatchesMedia) { +TEST_F(RtpRtcpImpl2Test, PaddingTimestampMatchesMedia) { constexpr size_t kPaddingSize = 100; const uint32_t kTimestamp = 123; @@ -941,7 +944,7 @@ TEST_P(RtpRtcpImpl2Test, PaddingTimestampMatchesMedia) { EXPECT_EQ(sender_.last_packet().Timestamp(), kTimestamp); } -TEST_P(RtpRtcpImpl2Test, AssignsTransportSequenceNumber) { +TEST_F(RtpRtcpImpl2Test, AssignsTransportSequenceNumber) { sender_.RegisterHeaderExtension(TransportSequenceNumber::Uri(), kTransportSequenceNumberExtensionId); @@ -958,7 +961,7 @@ TEST_P(RtpRtcpImpl2Test, AssignsTransportSequenceNumber) { EXPECT_EQ(first_transport_seq + 1, second_transport_seq); } -TEST_P(RtpRtcpImpl2Test, AssignsAbsoluteSendTime) { +TEST_F(RtpRtcpImpl2Test, AssignsAbsoluteSendTime) { sender_.RegisterHeaderExtension(AbsoluteSendTime::Uri(), kAbsoluteSendTimeExtensionId); @@ -966,7 +969,7 @@ TEST_P(RtpRtcpImpl2Test, AssignsAbsoluteSendTime) { EXPECT_NE(sender_.last_packet().GetExtension(), 0u); } -TEST_P(RtpRtcpImpl2Test, AssignsTransmissionTimeOffset) { +TEST_F(RtpRtcpImpl2Test, AssignsTransmissionTimeOffset) { sender_.RegisterHeaderExtension(TransmissionOffset::Uri(), kTransmissionOffsetExtensionId); @@ -982,7 +985,7 @@ TEST_P(RtpRtcpImpl2Test, AssignsTransmissionTimeOffset) { kOffset.ms() * kCaptureTimeMsToRtpTimestamp); } -TEST_P(RtpRtcpImpl2Test, PropagatesSentPacketInfo) { +TEST_F(RtpRtcpImpl2Test, PropagatesSentPacketInfo) { sender_.RegisterHeaderExtension(TransportSequenceNumber::Uri(), kTransportSequenceNumberExtensionId); int64_t now_ms = time_controller_.GetClock()->TimeInMilliseconds(); @@ -997,7 +1000,7 @@ TEST_P(RtpRtcpImpl2Test, PropagatesSentPacketInfo) { Field(&RtpRtcpModule::SentPacket::ssrc, Eq(kSenderSsrc))))); } -TEST_P(RtpRtcpImpl2Test, GeneratesFlexfec) { +TEST_F(RtpRtcpImpl2Test, GeneratesFlexfec) { constexpr int kFlexfecPayloadType = 118; constexpr uint32_t kFlexfecSsrc = 17; const char kNoMid[] = ""; @@ -1030,7 +1033,7 @@ TEST_P(RtpRtcpImpl2Test, GeneratesFlexfec) { EXPECT_EQ(fec_packet.PayloadType(), kFlexfecPayloadType); } -TEST_P(RtpRtcpImpl2Test, GeneratesUlpfec) { +TEST_F(RtpRtcpImpl2Test, GeneratesUlpfec) { constexpr int kUlpfecPayloadType = 118; constexpr int kRedPayloadType = 119; UlpfecGenerator ulpfec_sender(kRedPayloadType, kUlpfecPayloadType, @@ -1058,7 +1061,7 @@ TEST_P(RtpRtcpImpl2Test, GeneratesUlpfec) { EXPECT_EQ(fec_packet.payload()[0], kUlpfecPayloadType); } -TEST_P(RtpRtcpImpl2Test, RtpStateReflectsCurrentState) { +TEST_F(RtpRtcpImpl2Test, RtpStateReflectsCurrentState) { // Verify that that each of the field of GetRtpState actually reflects // the current state. @@ -1106,7 +1109,7 @@ TEST_P(RtpRtcpImpl2Test, RtpStateReflectsCurrentState) { EXPECT_EQ(state.ssrc_has_acked, true); } -TEST_P(RtpRtcpImpl2Test, RtxRtpStateReflectsCurrentState) { +TEST_F(RtpRtcpImpl2Test, RtxRtpStateReflectsCurrentState) { // Enable RTX. sender_.impl_->SetStorePacketsStatus(/*enable=*/true, /*number_to_store=*/10); sender_.impl_->SetRtxSendPayloadType(kRtxPayloadType, kPayloadType); @@ -1151,9 +1154,4 @@ TEST_P(RtpRtcpImpl2Test, RtxRtpStateReflectsCurrentState) { EXPECT_EQ(rtx_state.sequence_number, rtx_packet.SequenceNumber() + 1); } -INSTANTIATE_TEST_SUITE_P(WithAndWithoutOverhead, - RtpRtcpImpl2Test, - ::testing::Values(TestConfig{false}, - TestConfig{true})); - } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc index 9b9c1d8970..96bef23a3a 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc @@ -44,6 +44,9 @@ const uint8_t kPayloadType = 100; const int kWidth = 320; const int kHeight = 100; +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + class RtcpRttStatsTestImpl : public RtcpRttStats { public: RtcpRttStatsTestImpl() : rtt_ms_(0) {} @@ -697,4 +700,6 @@ TEST_F(RtpRtcpImplTest, SenderReportStatsPacketByteCounters) { Field(&SenderReportStats::bytes_sent, Gt(0u))))); } +#pragma clang diagnostic pop + } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_rtcp_interface.h b/modules/rtp_rtcp/source/rtp_rtcp_interface.h index 8b1d11aa45..2024b308dd 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_interface.h +++ b/modules/rtp_rtcp/source/rtp_rtcp_interface.h @@ -258,10 +258,6 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { // Once set, this value can not be changed or removed. virtual void SetMid(absl::string_view mid) = 0; - // Sets CSRC. - // `csrcs` - vector of CSRCs - virtual void SetCsrcs(const std::vector& csrcs) = 0; - // Turns on/off sending RTX (RFC 4588). The modes can be set as a combination // of values of the enumerator RtxMode. virtual void SetRtxSendStatus(int modes) = 0; diff --git a/modules/rtp_rtcp/source/rtp_sender_egress.cc b/modules/rtp_rtcp/source/rtp_sender_egress.cc index e81ea8da19..c211b5a1ec 100644 --- a/modules/rtp_rtcp/source/rtp_sender_egress.cc +++ b/modules/rtp_rtcp/source/rtp_sender_egress.cc @@ -28,14 +28,6 @@ constexpr int kBitrateStatisticsWindowMs = 1000; constexpr size_t kRtpSequenceNumberMapMaxEntries = 1 << 13; constexpr TimeDelta kUpdateInterval = TimeDelta::Millis(kBitrateStatisticsWindowMs); - -bool IsTrialSetTo(const FieldTrialsView* field_trials, - absl::string_view name, - absl::string_view value) { - FieldTrialBasedConfig default_trials; - auto& trials = field_trials ? *field_trials : default_trials; - return absl::StartsWith(trials.Lookup(name), value); -} } // namespace RtpSenderEgress::NonPacedPacketSender::NonPacedPacketSender( @@ -81,10 +73,6 @@ RtpSenderEgress::RtpSenderEgress(const RtpRtcpInterface::Configuration& config, flexfec_ssrc_(config.fec_generator ? config.fec_generator->FecSsrc() : absl::nullopt), populate_network2_timestamp_(config.populate_network2_timestamp), - send_side_bwe_with_overhead_( - !IsTrialSetTo(config.field_trials, - "WebRTC-SendSideBwe-WithOverhead", - "Disabled")), clock_(config.clock), packet_history_(packet_history), transport_(config.outgoing_transport), @@ -422,15 +410,10 @@ void RtpSenderEgress::AddPacketToTransportFeedback( const RtpPacketToSend& packet, const PacedPacketInfo& pacing_info) { if (transport_feedback_observer_) { - size_t packet_size = packet.payload_size() + packet.padding_size(); - if (send_side_bwe_with_overhead_) { - packet_size = packet.size(); - } - RtpPacketSendInfo packet_info; packet_info.transport_sequence_number = packet_id; packet_info.rtp_timestamp = packet.Timestamp(); - packet_info.length = packet_size; + packet_info.length = packet.size(); packet_info.pacing_info = pacing_info; packet_info.packet_type = packet.packet_type(); diff --git a/modules/rtp_rtcp/source/rtp_sender_egress.h b/modules/rtp_rtcp/source/rtp_sender_egress.h index c46f6aeb40..e0a8d966f5 100644 --- a/modules/rtp_rtcp/source/rtp_sender_egress.h +++ b/modules/rtp_rtcp/source/rtp_sender_egress.h @@ -49,6 +49,8 @@ class RtpSenderEgress { void EnqueuePackets( std::vector> packets) override; + // Since we don't pace packets, there's no pending packets to remove. + void RemovePacketsForSsrc(uint32_t ssrc) override {} private: void PrepareForSend(RtpPacketToSend* packet); @@ -137,7 +139,6 @@ class RtpSenderEgress { const absl::optional rtx_ssrc_; const absl::optional flexfec_ssrc_; const bool populate_network2_timestamp_; - const bool send_side_bwe_with_overhead_; Clock* const clock_; RtpPacketHistory* const packet_history_; Transport* const transport_; diff --git a/modules/rtp_rtcp/source/rtp_sender_egress_unittest.cc b/modules/rtp_rtcp/source/rtp_sender_egress_unittest.cc index efed66b9ff..cc1c8feb8d 100644 --- a/modules/rtp_rtcp/source/rtp_sender_egress_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_sender_egress_unittest.cc @@ -15,6 +15,7 @@ #include "absl/types/optional.h" #include "api/array_view.h" #include "api/call/transport.h" +#include "api/field_trials_registry.h" #include "api/units/data_size.h" #include "api/units/timestamp.h" #include "logging/rtc_event_log/mock/mock_rtc_event_log.h" @@ -25,6 +26,7 @@ #include "modules/rtp_rtcp/source/rtp_packet_history.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "test/explicit_key_value_config.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/time_controller/simulated_time_controller.h" @@ -52,11 +54,6 @@ enum : int { kVideoTimingExtensionId, }; -struct TestConfig { - explicit TestConfig(bool with_overhead) : with_overhead(with_overhead) {} - bool with_overhead = false; -}; - class MockSendPacketObserver : public SendPacketObserver { public: MOCK_METHOD(void, OnSendPacket, (uint16_t, int64_t, uint32_t), (override)); @@ -84,24 +81,6 @@ class MockSendSideDelayObserver : public SendSideDelayObserver { MOCK_METHOD(void, SendSideDelayUpdated, (int, int, uint32_t), (override)); }; -class FieldTrialConfig : public FieldTrialsView { - public: - FieldTrialConfig() : overhead_enabled_(false) {} - ~FieldTrialConfig() override {} - - void SetOverHeadEnabled(bool enabled) { overhead_enabled_ = enabled; } - - std::string Lookup(absl::string_view key) const override { - if (key == "WebRTC-SendSideBwe-WithOverhead") { - return overhead_enabled_ ? "Enabled" : "Disabled"; - } - return ""; - } - - private: - bool overhead_enabled_; -}; - struct TransmittedPacket { TransmittedPacket(rtc::ArrayView data, const PacketOptions& packet_options, @@ -138,23 +117,22 @@ class TestTransport : public Transport { } // namespace -class RtpSenderEgressTest : public ::testing::TestWithParam { +class RtpSenderEgressTest : public ::testing::Test { protected: RtpSenderEgressTest() : time_controller_(kStartTime), clock_(time_controller_.GetClock()), transport_(&header_extensions_), packet_history_(clock_, /*enable_rtx_padding_prioritization=*/true), - sequence_number_(kStartSequenceNumber) { - trials_.SetOverHeadEnabled(GetParam().with_overhead); - } + trials_(""), + sequence_number_(kStartSequenceNumber) {} std::unique_ptr CreateRtpSenderEgress() { return std::make_unique(DefaultConfig(), &packet_history_); } - RtpRtcp::Configuration DefaultConfig() { - RtpRtcp::Configuration config; + RtpRtcpInterface::Configuration DefaultConfig() { + RtpRtcpInterface::Configuration config; config.clock = clock_; config.outgoing_transport = &transport_; config.local_media_ssrc = kSsrc; @@ -199,11 +177,11 @@ class RtpSenderEgressTest : public ::testing::TestWithParam { RtpHeaderExtensionMap header_extensions_; TestTransport transport_; RtpPacketHistory packet_history_; - FieldTrialConfig trials_; + test::ExplicitKeyValueConfig trials_; uint16_t sequence_number_; }; -TEST_P(RtpSenderEgressTest, TransportFeedbackObserverGetsCorrectByteCount) { +TEST_F(RtpSenderEgressTest, TransportFeedbackObserverGetsCorrectByteCount) { constexpr size_t kRtpOverheadBytesPerPacket = 12 + 8; constexpr size_t kPayloadSize = 1400; const uint16_t kTransportSequenceNumber = 17; @@ -211,9 +189,7 @@ TEST_P(RtpSenderEgressTest, TransportFeedbackObserverGetsCorrectByteCount) { header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); - const size_t expected_bytes = GetParam().with_overhead - ? kPayloadSize + kRtpOverheadBytesPerPacket - : kPayloadSize; + const size_t expected_bytes = kPayloadSize + kRtpOverheadBytesPerPacket; EXPECT_CALL( feedback_observer_, @@ -233,7 +209,7 @@ TEST_P(RtpSenderEgressTest, TransportFeedbackObserverGetsCorrectByteCount) { sender->SendPacket(packet.get(), PacedPacketInfo()); } -TEST_P(RtpSenderEgressTest, PacketOptionsIsRetransmitSetByPacketType) { +TEST_F(RtpSenderEgressTest, PacketOptionsIsRetransmitSetByPacketType) { std::unique_ptr sender = CreateRtpSenderEgress(); std::unique_ptr media_packet = BuildRtpPacket(); @@ -249,7 +225,7 @@ TEST_P(RtpSenderEgressTest, PacketOptionsIsRetransmitSetByPacketType) { EXPECT_TRUE(transport_.last_packet()->options.is_retransmit); } -TEST_P(RtpSenderEgressTest, DoesnSetIncludedInAllocationByDefault) { +TEST_F(RtpSenderEgressTest, DoesnSetIncludedInAllocationByDefault) { std::unique_ptr sender = CreateRtpSenderEgress(); std::unique_ptr packet = BuildRtpPacket(); @@ -258,7 +234,7 @@ TEST_P(RtpSenderEgressTest, DoesnSetIncludedInAllocationByDefault) { EXPECT_FALSE(transport_.last_packet()->options.included_in_allocation); } -TEST_P(RtpSenderEgressTest, +TEST_F(RtpSenderEgressTest, SetsIncludedInFeedbackWhenTransportSequenceNumberExtensionIsRegistered) { std::unique_ptr sender = CreateRtpSenderEgress(); @@ -269,7 +245,7 @@ TEST_P(RtpSenderEgressTest, EXPECT_TRUE(transport_.last_packet()->options.included_in_feedback); } -TEST_P( +TEST_F( RtpSenderEgressTest, SetsIncludedInAllocationWhenTransportSequenceNumberExtensionIsRegistered) { std::unique_ptr sender = CreateRtpSenderEgress(); @@ -281,7 +257,7 @@ TEST_P( EXPECT_TRUE(transport_.last_packet()->options.included_in_allocation); } -TEST_P(RtpSenderEgressTest, +TEST_F(RtpSenderEgressTest, SetsIncludedInAllocationWhenForcedAsPartOfAllocation) { std::unique_ptr sender = CreateRtpSenderEgress(); sender->ForceIncludeSendPacketsInAllocation(true); @@ -292,7 +268,7 @@ TEST_P(RtpSenderEgressTest, EXPECT_TRUE(transport_.last_packet()->options.included_in_allocation); } -TEST_P(RtpSenderEgressTest, OnSendSideDelayUpdated) { +TEST_F(RtpSenderEgressTest, OnSendSideDelayUpdated) { StrictMock send_side_delay_observer; RtpRtcpInterface::Configuration config = DefaultConfig(); config.send_side_delay_observer = &send_side_delay_observer; @@ -334,7 +310,7 @@ TEST_P(RtpSenderEgressTest, OnSendSideDelayUpdated) { PacedPacketInfo()); } -TEST_P(RtpSenderEgressTest, WritesPacerExitToTimingExtension) { +TEST_F(RtpSenderEgressTest, WritesPacerExitToTimingExtension) { std::unique_ptr sender = CreateRtpSenderEgress(); header_extensions_.RegisterByUri(kVideoTimingExtensionId, VideoTimingExtension::Uri()); @@ -354,7 +330,7 @@ TEST_P(RtpSenderEgressTest, WritesPacerExitToTimingExtension) { EXPECT_EQ(video_timing.pacer_exit_delta_ms, kStoredTimeInMs); } -TEST_P(RtpSenderEgressTest, WritesNetwork2ToTimingExtension) { +TEST_F(RtpSenderEgressTest, WritesNetwork2ToTimingExtension) { RtpRtcpInterface::Configuration rtp_config = DefaultConfig(); rtp_config.populate_network2_timestamp = true; auto sender = std::make_unique(rtp_config, &packet_history_); @@ -380,7 +356,7 @@ TEST_P(RtpSenderEgressTest, WritesNetwork2ToTimingExtension) { EXPECT_EQ(video_timing.pacer_exit_delta_ms, kPacerExitMs); } -TEST_P(RtpSenderEgressTest, OnSendPacketUpdated) { +TEST_F(RtpSenderEgressTest, OnSendPacketUpdated) { std::unique_ptr sender = CreateRtpSenderEgress(); header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); @@ -394,7 +370,7 @@ TEST_P(RtpSenderEgressTest, OnSendPacketUpdated) { sender->SendPacket(packet.get(), PacedPacketInfo()); } -TEST_P(RtpSenderEgressTest, OnSendPacketNotUpdatedForRetransmits) { +TEST_F(RtpSenderEgressTest, OnSendPacketNotUpdatedForRetransmits) { std::unique_ptr sender = CreateRtpSenderEgress(); header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); @@ -408,7 +384,7 @@ TEST_P(RtpSenderEgressTest, OnSendPacketNotUpdatedForRetransmits) { sender->SendPacket(packet.get(), PacedPacketInfo()); } -TEST_P(RtpSenderEgressTest, ReportsFecRate) { +TEST_F(RtpSenderEgressTest, ReportsFecRate) { constexpr int kNumPackets = 10; constexpr TimeDelta kTimeBetweenPackets = TimeDelta::Millis(33); @@ -436,7 +412,7 @@ TEST_P(RtpSenderEgressTest, ReportsFecRate) { (total_fec_data_sent / (kTimeBetweenPackets * kNumPackets)).bps(), 500); } -TEST_P(RtpSenderEgressTest, BitrateCallbacks) { +TEST_F(RtpSenderEgressTest, BitrateCallbacks) { class MockBitrateStaticsObserver : public BitrateStatisticsObserver { public: MOCK_METHOD(void, Notify, (uint32_t, uint32_t, uint32_t), (override)); @@ -483,7 +459,7 @@ TEST_P(RtpSenderEgressTest, BitrateCallbacks) { } } -TEST_P(RtpSenderEgressTest, DoesNotPutNotRetransmittablePacketsInHistory) { +TEST_F(RtpSenderEgressTest, DoesNotPutNotRetransmittablePacketsInHistory) { std::unique_ptr sender = CreateRtpSenderEgress(); packet_history_.SetStorePacketsStatus( RtpPacketHistory::StorageMode::kStoreAndCull, 10); @@ -494,7 +470,7 @@ TEST_P(RtpSenderEgressTest, DoesNotPutNotRetransmittablePacketsInHistory) { EXPECT_FALSE(packet_history_.GetPacketState(packet->SequenceNumber())); } -TEST_P(RtpSenderEgressTest, PutsRetransmittablePacketsInHistory) { +TEST_F(RtpSenderEgressTest, PutsRetransmittablePacketsInHistory) { std::unique_ptr sender = CreateRtpSenderEgress(); packet_history_.SetStorePacketsStatus( RtpPacketHistory::StorageMode::kStoreAndCull, 10); @@ -505,7 +481,7 @@ TEST_P(RtpSenderEgressTest, PutsRetransmittablePacketsInHistory) { EXPECT_TRUE(packet_history_.GetPacketState(packet->SequenceNumber())); } -TEST_P(RtpSenderEgressTest, DoesNotPutNonMediaInHistory) { +TEST_F(RtpSenderEgressTest, DoesNotPutNonMediaInHistory) { std::unique_ptr sender = CreateRtpSenderEgress(); packet_history_.SetStorePacketsStatus( RtpPacketHistory::StorageMode::kStoreAndCull, 10); @@ -534,7 +510,7 @@ TEST_P(RtpSenderEgressTest, DoesNotPutNonMediaInHistory) { EXPECT_FALSE(packet_history_.GetPacketState(padding->SequenceNumber())); } -TEST_P(RtpSenderEgressTest, UpdatesSendStatusOfRetransmittedPackets) { +TEST_F(RtpSenderEgressTest, UpdatesSendStatusOfRetransmittedPackets) { std::unique_ptr sender = CreateRtpSenderEgress(); packet_history_.SetStorePacketsStatus( RtpPacketHistory::StorageMode::kStoreAndCull, 10); @@ -558,7 +534,7 @@ TEST_P(RtpSenderEgressTest, UpdatesSendStatusOfRetransmittedPackets) { EXPECT_TRUE(packet_history_.GetPacketState(media_packet->SequenceNumber())); } -TEST_P(RtpSenderEgressTest, StreamDataCountersCallbacks) { +TEST_F(RtpSenderEgressTest, StreamDataCountersCallbacks) { std::unique_ptr sender = CreateRtpSenderEgress(); const RtpPacketCounter kEmptyCounter; @@ -643,7 +619,7 @@ TEST_P(RtpSenderEgressTest, StreamDataCountersCallbacks) { time_controller_.AdvanceTime(TimeDelta::Zero()); } -TEST_P(RtpSenderEgressTest, StreamDataCountersCallbacksFec) { +TEST_F(RtpSenderEgressTest, StreamDataCountersCallbacksFec) { std::unique_ptr sender = CreateRtpSenderEgress(); const RtpPacketCounter kEmptyCounter; @@ -693,7 +669,7 @@ TEST_P(RtpSenderEgressTest, StreamDataCountersCallbacksFec) { time_controller_.AdvanceTime(TimeDelta::Zero()); } -TEST_P(RtpSenderEgressTest, UpdatesDataCounters) { +TEST_F(RtpSenderEgressTest, UpdatesDataCounters) { std::unique_ptr sender = CreateRtpSenderEgress(); const RtpPacketCounter kEmptyCounter; @@ -734,7 +710,7 @@ TEST_P(RtpSenderEgressTest, UpdatesDataCounters) { EXPECT_EQ(rtx_stats.fec, kEmptyCounter); } -TEST_P(RtpSenderEgressTest, SendPacketUpdatesExtensions) { +TEST_F(RtpSenderEgressTest, SendPacketUpdatesExtensions) { header_extensions_.RegisterByUri(kVideoTimingExtensionId, VideoTimingExtension::Uri()); header_extensions_.RegisterByUri(kAbsoluteSendTimeExtensionId, @@ -763,7 +739,7 @@ TEST_P(RtpSenderEgressTest, SendPacketUpdatesExtensions) { EXPECT_EQ(timing.pacer_exit_delta_ms, kDiffMs); } -TEST_P(RtpSenderEgressTest, SendPacketSetsPacketOptions) { +TEST_F(RtpSenderEgressTest, SendPacketSetsPacketOptions) { const uint16_t kPacketId = 42; std::unique_ptr sender = CreateRtpSenderEgress(); header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, @@ -790,7 +766,7 @@ TEST_P(RtpSenderEgressTest, SendPacketSetsPacketOptions) { EXPECT_TRUE(transport_.last_packet()->options.is_retransmit); } -TEST_P(RtpSenderEgressTest, SendPacketUpdatesStats) { +TEST_F(RtpSenderEgressTest, SendPacketUpdatesStats) { const size_t kPayloadSize = 1000; StrictMock send_side_delay_observer; @@ -855,7 +831,7 @@ TEST_P(RtpSenderEgressTest, SendPacketUpdatesStats) { EXPECT_EQ(rtx_stats.retransmitted.packets, 1u); } -TEST_P(RtpSenderEgressTest, TransportFeedbackObserverWithRetransmission) { +TEST_F(RtpSenderEgressTest, TransportFeedbackObserverWithRetransmission) { const uint16_t kTransportSequenceNumber = 17; header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); @@ -877,7 +853,7 @@ TEST_P(RtpSenderEgressTest, TransportFeedbackObserverWithRetransmission) { sender->SendPacket(retransmission.get(), PacedPacketInfo()); } -TEST_P(RtpSenderEgressTest, TransportFeedbackObserverWithRtxRetransmission) { +TEST_F(RtpSenderEgressTest, TransportFeedbackObserverWithRtxRetransmission) { const uint16_t kTransportSequenceNumber = 17; header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); @@ -901,7 +877,7 @@ TEST_P(RtpSenderEgressTest, TransportFeedbackObserverWithRtxRetransmission) { sender->SendPacket(rtx_retransmission.get(), PacedPacketInfo()); } -TEST_P(RtpSenderEgressTest, TransportFeedbackObserverPadding) { +TEST_F(RtpSenderEgressTest, TransportFeedbackObserverPadding) { const uint16_t kTransportSequenceNumber = 17; header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); @@ -919,7 +895,7 @@ TEST_P(RtpSenderEgressTest, TransportFeedbackObserverPadding) { sender->SendPacket(padding.get(), PacedPacketInfo()); } -TEST_P(RtpSenderEgressTest, TransportFeedbackObserverRtxPadding) { +TEST_F(RtpSenderEgressTest, TransportFeedbackObserverRtxPadding) { const uint16_t kTransportSequenceNumber = 17; header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); @@ -939,7 +915,7 @@ TEST_P(RtpSenderEgressTest, TransportFeedbackObserverRtxPadding) { sender->SendPacket(rtx_padding.get(), PacedPacketInfo()); } -TEST_P(RtpSenderEgressTest, TransportFeedbackObserverFec) { +TEST_F(RtpSenderEgressTest, TransportFeedbackObserverFec) { const uint16_t kTransportSequenceNumber = 17; header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); @@ -964,7 +940,7 @@ TEST_P(RtpSenderEgressTest, TransportFeedbackObserverFec) { sender->SendPacket(fec_packet.get(), PacedPacketInfo()); } -TEST_P(RtpSenderEgressTest, SupportsAbortingRetransmissions) { +TEST_F(RtpSenderEgressTest, SupportsAbortingRetransmissions) { std::unique_ptr sender = CreateRtpSenderEgress(); packet_history_.SetStorePacketsStatus( RtpPacketHistory::StorageMode::kStoreAndCull, 10); @@ -991,9 +967,4 @@ TEST_P(RtpSenderEgressTest, SupportsAbortingRetransmissions) { EXPECT_TRUE(packet_history_.GetPacketAndMarkAsPending(media_sequence_number)); } -INSTANTIATE_TEST_SUITE_P(WithAndWithoutOverhead, - RtpSenderEgressTest, - ::testing::Values(TestConfig(false), - TestConfig(true))); - } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/modules/rtp_rtcp/source/rtp_sender_unittest.cc index ea9277f612..c9e98ff3c3 100644 --- a/modules/rtp_rtcp/source/rtp_sender_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_sender_unittest.cc @@ -102,6 +102,7 @@ class MockRtpPacketPacer : public RtpPacketSender { EnqueuePackets, (std::vector>), (override)); + MOCK_METHOD(void, RemovePacketsForSsrc, (uint32_t), (override)); }; } // namespace diff --git a/modules/rtp_rtcp/source/rtp_sender_video.cc b/modules/rtp_rtcp/source/rtp_sender_video.cc index 3a583c684c..e1ac4e41c3 100644 --- a/modules/rtp_rtcp/source/rtp_sender_video.cc +++ b/modules/rtp_rtcp/source/rtp_sender_video.cc @@ -525,32 +525,41 @@ bool RTPSenderVideo::SendVideo( (use_fec ? FecPacketOverhead() : 0) - (rtp_sender_->RtxStatus() ? kRtxHeaderSize : 0); + absl::optional capture_time; + if (capture_time_ms > 0) { + capture_time = Timestamp::Millis(capture_time_ms); + } + std::unique_ptr single_packet = rtp_sender_->AllocatePacket(); RTC_DCHECK_LE(packet_capacity, single_packet->capacity()); single_packet->SetPayloadType(payload_type); single_packet->SetTimestamp(rtp_timestamp); - single_packet->set_capture_time(Timestamp::Millis(capture_time_ms)); + if (capture_time) + single_packet->set_capture_time(*capture_time); // Construct the absolute capture time extension if not provided. - if (!video_header.absolute_capture_time.has_value()) { + if (!video_header.absolute_capture_time.has_value() && + capture_time.has_value()) { video_header.absolute_capture_time.emplace(); video_header.absolute_capture_time->absolute_capture_timestamp = Int64MsToUQ32x32( - clock_->ConvertTimestampToNtpTimeInMilliseconds(capture_time_ms)); + clock_->ConvertTimestampToNtpTime(*capture_time).ToMs()); if (include_capture_clock_offset_) { video_header.absolute_capture_time->estimated_capture_clock_offset = 0; } } // Let `absolute_capture_time_sender_` decide if the extension should be sent. - video_header.absolute_capture_time = - absolute_capture_time_sender_.OnSendPacket( - AbsoluteCaptureTimeSender::GetSource(single_packet->Ssrc(), - single_packet->Csrcs()), - single_packet->Timestamp(), kVideoPayloadTypeFrequency, - video_header.absolute_capture_time->absolute_capture_timestamp, - video_header.absolute_capture_time->estimated_capture_clock_offset); + if (video_header.absolute_capture_time.has_value()) { + video_header.absolute_capture_time = + absolute_capture_time_sender_.OnSendPacket( + AbsoluteCaptureTimeSender::GetSource(single_packet->Ssrc(), + single_packet->Csrcs()), + single_packet->Timestamp(), kVideoPayloadTypeFrequency, + video_header.absolute_capture_time->absolute_capture_timestamp, + video_header.absolute_capture_time->estimated_capture_clock_offset); + } auto first_packet = std::make_unique(*single_packet); auto middle_packet = std::make_unique(*single_packet); diff --git a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc index 3d6931fe23..02194391af 100644 --- a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc +++ b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc @@ -34,7 +34,7 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { uint32_t ssrc) : encoded_data_(encoded_image.GetEncodedData()), header_(video_header), - metadata_(header_), + metadata_(header_.GetAsMetadata()), frame_type_(encoded_image._frameType), payload_type_(payload_type), codec_type_(codec_type), @@ -103,9 +103,9 @@ RTPSenderVideoFrameTransformerDelegate::RTPSenderVideoFrameTransformerDelegate( : sender_(sender), frame_transformer_(std::move(frame_transformer)), ssrc_(ssrc), - task_queue_factory_(task_queue_factory) { - RTC_DCHECK(task_queue_factory_); -} + transformation_queue_(task_queue_factory->CreateTaskQueue( + "video_frame_transformer", + TaskQueueFactory::Priority::NORMAL)) {} void RTPSenderVideoFrameTransformerDelegate::Init() { frame_transformer_->RegisterTransformedFrameSinkCallback( @@ -119,29 +119,6 @@ bool RTPSenderVideoFrameTransformerDelegate::TransformFrame( const EncodedImage& encoded_image, RTPVideoHeader video_header, absl::optional expected_retransmission_time_ms) { - TaskQueueBase* current = TaskQueueBase::Current(); - if (!encoder_queue_) { - // Save the current task queue to post the transformed frame for sending - // once it is transformed. When there is no current task queue, i.e. - // encoding is done on an external thread (for example in the case of - // hardware encoders), create a new task queue. - if (current) { - encoder_queue_ = current; - } else { - owned_encoder_queue_ = task_queue_factory_->CreateTaskQueue( - "video_frame_transformer", TaskQueueFactory::Priority::NORMAL); - encoder_queue_ = owned_encoder_queue_.get(); - } - } - // DCHECK that the current queue does not change, or if does then it was due - // to a hardware encoder fallback and thus there is an owned queue. - RTC_DCHECK(!current || current == encoder_queue_ || owned_encoder_queue_) - << "Current thread must either be an external thread (nullptr) or be the " - "same as the previous encoder queue. The current thread is " - << (current ? "non-null" : "nullptr") << " and the encoder thread is " - << (current == encoder_queue_ ? "the same queue." - : "not the same queue."); - frame_transformer_->Transform(std::make_unique( encoded_image, video_header, payload_type, codec_type, rtp_timestamp, expected_retransmission_time_ms, ssrc_)); @@ -152,22 +129,20 @@ void RTPSenderVideoFrameTransformerDelegate::OnTransformedFrame( std::unique_ptr frame) { MutexLock lock(&sender_lock_); - // The encoder queue normally gets destroyed after the sender; - // however, it might still be null by the time a previously queued frame - // arrives. - if (!sender_ || !encoder_queue_) + if (!sender_) { return; + } rtc::scoped_refptr delegate(this); - encoder_queue_->PostTask( + transformation_queue_->PostTask( [delegate = std::move(delegate), frame = std::move(frame)]() mutable { - RTC_DCHECK_RUN_ON(delegate->encoder_queue_); + RTC_DCHECK_RUN_ON(delegate->transformation_queue_.get()); delegate->SendVideo(std::move(frame)); }); } void RTPSenderVideoFrameTransformerDelegate::SendVideo( std::unique_ptr transformed_frame) const { - RTC_DCHECK_RUN_ON(encoder_queue_); + RTC_DCHECK_RUN_ON(transformation_queue_.get()); RTC_CHECK_EQ(transformed_frame->GetDirection(), TransformableFrameInterface::Direction::kSender); MutexLock lock(&sender_lock_); @@ -206,4 +181,37 @@ void RTPSenderVideoFrameTransformerDelegate::Reset() { sender_ = nullptr; } } + +std::unique_ptr CloneSenderVideoFrame( + TransformableVideoFrameInterface* original) { + auto encoded_image_buffer = EncodedImageBuffer::Create( + original->GetData().data(), original->GetData().size()); + EncodedImage encoded_image; + encoded_image.SetEncodedData(encoded_image_buffer); + RTPVideoHeader new_header; + absl::optional new_codec_type; + // TODO(bugs.webrtc.org/14708): Figure out a way to get the header information + // without casting to TransformableVideoSenderFrame. + if (original->GetDirection() == + TransformableFrameInterface::Direction::kSender) { + // TODO(bugs.webrtc.org/14708): Figure out a way to bulletproof this cast. + auto original_as_sender = + static_cast(original); + new_header = original_as_sender->GetHeader(); + new_codec_type = original_as_sender->GetCodecType(); + } else { + // TODO(bugs.webrtc.org/14708): Make this codec dependent + new_header.video_type_header.emplace(); + new_codec_type = kVideoCodecVP8; + // TODO(bugs.webrtc.org/14708): Fill in the new_header when it's not + // `Direction::kSender` + } + // TODO(bugs.webrtc.org/14708): Fill in other EncodedImage parameters + return std::make_unique( + encoded_image, new_header, original->GetPayloadType(), new_codec_type, + original->GetTimestamp(), + absl::nullopt, // expected_retransmission_time_ms + original->GetSsrc()); +} + } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h index 65d6d3f6cd..55f7961e2d 100644 --- a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h +++ b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h @@ -53,7 +53,7 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback { // Delegates the call to RTPSendVideo::SendVideo on the `encoder_queue_`. void SendVideo(std::unique_ptr frame) const - RTC_RUN_ON(encoder_queue_); + RTC_RUN_ON(transformation_queue_); // Delegates the call to RTPSendVideo::SetVideoStructureAfterTransformation // under `sender_lock_`. @@ -74,17 +74,21 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback { ~RTPSenderVideoFrameTransformerDelegate() override = default; private: + void EnsureEncoderQueueCreated(); + mutable Mutex sender_lock_; RTPSenderVideo* sender_ RTC_GUARDED_BY(sender_lock_); rtc::scoped_refptr frame_transformer_; const uint32_t ssrc_; - TaskQueueBase* encoder_queue_ = nullptr; - TaskQueueFactory* task_queue_factory_; // Used when the encoded frames arrives without a current task queue. This can // happen if a hardware encoder was used. - std::unique_ptr owned_encoder_queue_; + std::unique_ptr transformation_queue_; }; +// Method to support cloning a Sender frame from another frame +std::unique_ptr CloneSenderVideoFrame( + TransformableVideoFrameInterface* original); + } // namespace webrtc #endif // MODULES_RTP_RTCP_SOURCE_RTP_SENDER_VIDEO_FRAME_TRANSFORMER_DELEGATE_H_ diff --git a/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc b/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc index a4d669edc5..72dfd0238d 100644 --- a/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc @@ -16,6 +16,8 @@ #include #include "absl/memory/memory.h" +#include "api/field_trials_registry.h" +#include "api/frame_transformer_factory.h" #include "api/rtp_headers.h" #include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" @@ -37,6 +39,7 @@ #include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" +#include "rtc_base/logging.h" #include "rtc_base/rate_limiter.h" #include "rtc_base/thread.h" #include "test/gmock.h" @@ -148,35 +151,29 @@ class TestRtpSenderVideo : public RTPSenderVideo { } }; -class FieldTrials : public FieldTrialsView { +class FieldTrials : public FieldTrialsRegistry { public: - explicit FieldTrials(bool use_send_side_bwe_with_overhead) - : use_send_side_bwe_with_overhead_(use_send_side_bwe_with_overhead), - include_capture_clock_offset_(false) {} + FieldTrials() : include_capture_clock_offset_(false) {} void set_include_capture_clock_offset(bool include_capture_clock_offset) { include_capture_clock_offset_ = include_capture_clock_offset; } - std::string Lookup(absl::string_view key) const override { - if (key == "WebRTC-SendSideBwe-WithOverhead") { - return use_send_side_bwe_with_overhead_ ? "Enabled" : ""; - } else if (key == "WebRTC-IncludeCaptureClockOffset") { + private: + std::string GetValue(absl::string_view key) const override { + if (key == "WebRTC-IncludeCaptureClockOffset") { return include_capture_clock_offset_ ? "" : "Disabled"; } return ""; } - private: - bool use_send_side_bwe_with_overhead_; bool include_capture_clock_offset_; }; -class RtpSenderVideoTest : public ::testing::TestWithParam { +class RtpSenderVideoTest : public ::testing::Test { public: RtpSenderVideoTest() - : field_trials_(GetParam()), - fake_clock_(kStartTime), + : fake_clock_(kStartTime), retransmission_rate_limiter_(&fake_clock_, 1000), rtp_module_(ModuleRtpRtcpImpl2::Create([&] { RtpRtcpInterface::Configuration config; @@ -209,7 +206,7 @@ class RtpSenderVideoTest : public ::testing::TestWithParam { std::unique_ptr rtp_sender_video_; }; -TEST_P(RtpSenderVideoTest, KeyFrameHasCVO) { +TEST_F(RtpSenderVideoTest, KeyFrameHasCVO) { uint8_t kFrame[kMaxPacketLength]; rtp_module_->RegisterRtpHeaderExtension(VideoOrientation::Uri(), kVideoRotationExtensionId); @@ -226,7 +223,7 @@ TEST_P(RtpSenderVideoTest, KeyFrameHasCVO) { EXPECT_EQ(kVideoRotation_0, rotation); } -TEST_P(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) { +TEST_F(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) { uint8_t kFrame[kMaxPacketLength]; const int64_t kPacketizationTimeMs = 100; const int64_t kEncodeStartDeltaMs = 10; @@ -254,7 +251,7 @@ TEST_P(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) { EXPECT_EQ(kEncodeFinishDeltaMs, timing.encode_finish_delta_ms); } -TEST_P(RtpSenderVideoTest, DeltaFrameHasCVOWhenChanged) { +TEST_F(RtpSenderVideoTest, DeltaFrameHasCVOWhenChanged) { uint8_t kFrame[kMaxPacketLength]; rtp_module_->RegisterRtpHeaderExtension(VideoOrientation::Uri(), kVideoRotationExtensionId); @@ -278,7 +275,7 @@ TEST_P(RtpSenderVideoTest, DeltaFrameHasCVOWhenChanged) { EXPECT_EQ(kVideoRotation_0, rotation); } -TEST_P(RtpSenderVideoTest, DeltaFrameHasCVOWhenNonZero) { +TEST_F(RtpSenderVideoTest, DeltaFrameHasCVOWhenNonZero) { uint8_t kFrame[kMaxPacketLength]; rtp_module_->RegisterRtpHeaderExtension(VideoOrientation::Uri(), kVideoRotationExtensionId); @@ -303,7 +300,7 @@ TEST_P(RtpSenderVideoTest, DeltaFrameHasCVOWhenNonZero) { // Make sure rotation is parsed correctly when the Camera (C) and Flip (F) bits // are set in the CVO byte. -TEST_P(RtpSenderVideoTest, SendVideoWithCameraAndFlipCVO) { +TEST_F(RtpSenderVideoTest, SendVideoWithCameraAndFlipCVO) { // Test extracting rotation when Camera (C) and Flip (F) bits are zero. EXPECT_EQ(kVideoRotation_0, ConvertCVOByteToVideoRotation(0)); EXPECT_EQ(kVideoRotation_90, ConvertCVOByteToVideoRotation(1)); @@ -322,7 +319,7 @@ TEST_P(RtpSenderVideoTest, SendVideoWithCameraAndFlipCVO) { ConvertCVOByteToVideoRotation(flip_bit | camera_bit | 3)); } -TEST_P(RtpSenderVideoTest, RetransmissionTypesGeneric) { +TEST_F(RtpSenderVideoTest, RetransmissionTypesGeneric) { RTPVideoHeader header; header.codec = kVideoCodecGeneric; @@ -337,7 +334,7 @@ TEST_P(RtpSenderVideoTest, RetransmissionTypesGeneric) { kDefaultExpectedRetransmissionTimeMs)); } -TEST_P(RtpSenderVideoTest, RetransmissionTypesH264) { +TEST_F(RtpSenderVideoTest, RetransmissionTypesH264) { RTPVideoHeader header; header.video_type_header.emplace().packetization_mode = H264PacketizationMode::NonInterleaved; @@ -354,7 +351,7 @@ TEST_P(RtpSenderVideoTest, RetransmissionTypesH264) { kDefaultExpectedRetransmissionTimeMs)); } -TEST_P(RtpSenderVideoTest, RetransmissionTypesVP8BaseLayer) { +TEST_F(RtpSenderVideoTest, RetransmissionTypesVP8BaseLayer) { RTPVideoHeader header; header.codec = kVideoCodecVP8; auto& vp8_header = header.video_type_header.emplace(); @@ -377,7 +374,7 @@ TEST_P(RtpSenderVideoTest, RetransmissionTypesVP8BaseLayer) { kDefaultExpectedRetransmissionTimeMs)); } -TEST_P(RtpSenderVideoTest, RetransmissionTypesVP8HigherLayers) { +TEST_F(RtpSenderVideoTest, RetransmissionTypesVP8HigherLayers) { RTPVideoHeader header; header.codec = kVideoCodecVP8; @@ -397,7 +394,7 @@ TEST_P(RtpSenderVideoTest, RetransmissionTypesVP8HigherLayers) { } } -TEST_P(RtpSenderVideoTest, RetransmissionTypesVP9) { +TEST_F(RtpSenderVideoTest, RetransmissionTypesVP9) { RTPVideoHeader header; header.codec = kVideoCodecVP9; @@ -417,7 +414,7 @@ TEST_P(RtpSenderVideoTest, RetransmissionTypesVP9) { } } -TEST_P(RtpSenderVideoTest, ConditionalRetransmit) { +TEST_F(RtpSenderVideoTest, ConditionalRetransmit) { const int64_t kFrameIntervalMs = 33; const int64_t kRttMs = (kFrameIntervalMs * 3) / 2; const uint8_t kSettings = @@ -475,7 +472,7 @@ TEST_P(RtpSenderVideoTest, ConditionalRetransmit) { rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs)); } -TEST_P(RtpSenderVideoTest, ConditionalRetransmitLimit) { +TEST_F(RtpSenderVideoTest, ConditionalRetransmitLimit) { const int64_t kFrameIntervalMs = 200; const int64_t kRttMs = (kFrameIntervalMs * 3) / 2; const int32_t kSettings = @@ -508,7 +505,7 @@ TEST_P(RtpSenderVideoTest, ConditionalRetransmitLimit) { rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs)); } -TEST_P(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) { +TEST_F(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; rtp_module_->RegisterRtpHeaderExtension( @@ -576,7 +573,7 @@ TEST_P(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) { ElementsAre(1, 501)); } -TEST_P(RtpSenderVideoTest, +TEST_F(RtpSenderVideoTest, SkipsDependencyDescriptorOnDeltaFrameWhenFailedToAttachToKeyFrame) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; @@ -630,7 +627,7 @@ TEST_P(RtpSenderVideoTest, .HasExtension()); } -TEST_P(RtpSenderVideoTest, PropagatesChainDiffsIntoDependencyDescriptor) { +TEST_F(RtpSenderVideoTest, PropagatesChainDiffsIntoDependencyDescriptor) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; rtp_module_->RegisterRtpHeaderExtension( @@ -663,7 +660,7 @@ TEST_P(RtpSenderVideoTest, PropagatesChainDiffsIntoDependencyDescriptor) { ContainerEq(generic.chain_diffs)); } -TEST_P(RtpSenderVideoTest, +TEST_F(RtpSenderVideoTest, PropagatesActiveDecodeTargetsIntoDependencyDescriptor) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; @@ -697,7 +694,7 @@ TEST_P(RtpSenderVideoTest, EXPECT_EQ(descriptor_key.active_decode_targets_bitmask, 0b01u); } -TEST_P(RtpSenderVideoTest, +TEST_F(RtpSenderVideoTest, SetDiffentVideoStructureAvoidsCollisionWithThePreviousStructure) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; @@ -771,7 +768,7 @@ TEST_P(RtpSenderVideoTest, descriptor_key2.attached_structure.get(), &descriptor_delta)); } -TEST_P(RtpSenderVideoTest, +TEST_F(RtpSenderVideoTest, AuthenticateVideoHeaderWhenDependencyDescriptorExtensionIsUsed) { static constexpr size_t kFrameSize = 100; uint8_t kFrame[kFrameSize] = {1, 2, 3, 4}; @@ -814,7 +811,7 @@ TEST_P(RtpSenderVideoTest, .HasExtension()); } -TEST_P(RtpSenderVideoTest, PopulateGenericFrameDescriptor) { +TEST_F(RtpSenderVideoTest, PopulateGenericFrameDescriptor) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; rtp_module_->RegisterRtpHeaderExtension( @@ -871,17 +868,17 @@ void RtpSenderVideoTest:: EXPECT_EQ(transport_.last_sent_packet().payload_size(), 1 + kFrameSize); } -TEST_P(RtpSenderVideoTest, +TEST_F(RtpSenderVideoTest, UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed00) { UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed(0); } -TEST_P(RtpSenderVideoTest, +TEST_F(RtpSenderVideoTest, UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed01) { UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed(1); } -TEST_P(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnKeyFrames) { +TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnKeyFrames) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; rtp_module_->RegisterRtpHeaderExtension( @@ -917,7 +914,7 @@ TEST_P(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnKeyFrames) { .GetExtension(&sent_allocation)); } -TEST_P(RtpSenderVideoTest, +TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithoutResolutionSentOnDeltaWhenUpdated) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; @@ -965,7 +962,7 @@ TEST_P(RtpSenderVideoTest, SizeIs(1)); } -TEST_P(RtpSenderVideoTest, +TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnDeltaWhenSpatialLayerAdded) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; @@ -1011,7 +1008,7 @@ TEST_P(RtpSenderVideoTest, EXPECT_TRUE(sent_allocation.resolution_and_frame_rate_is_valid); } -TEST_P(RtpSenderVideoTest, +TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnLargeFrameRateChange) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; @@ -1053,7 +1050,7 @@ TEST_P(RtpSenderVideoTest, EXPECT_EQ(sent_allocation.active_spatial_layers[0].frame_rate_fps, 20); } -TEST_P(RtpSenderVideoTest, +TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithoutResolutionSentOnSmallFrameRateChange) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; @@ -1094,7 +1091,7 @@ TEST_P(RtpSenderVideoTest, EXPECT_FALSE(sent_allocation.resolution_and_frame_rate_is_valid); } -TEST_P(RtpSenderVideoTest, VideoLayersAllocationSentOnDeltaFramesOnlyOnUpdate) { +TEST_F(RtpSenderVideoTest, VideoLayersAllocationSentOnDeltaFramesOnlyOnUpdate) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; rtp_module_->RegisterRtpHeaderExtension( @@ -1136,7 +1133,7 @@ TEST_P(RtpSenderVideoTest, VideoLayersAllocationSentOnDeltaFramesOnlyOnUpdate) { .GetExtension(&sent_allocation)); } -TEST_P(RtpSenderVideoTest, VideoLayersAllocationNotSentOnHigherTemporalLayers) { +TEST_F(RtpSenderVideoTest, VideoLayersAllocationNotSentOnHigherTemporalLayers) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; rtp_module_->RegisterRtpHeaderExtension( @@ -1172,7 +1169,7 @@ TEST_P(RtpSenderVideoTest, VideoLayersAllocationNotSentOnHigherTemporalLayers) { .HasExtension()); } -TEST_P(RtpSenderVideoTest, AbsoluteCaptureTime) { +TEST_F(RtpSenderVideoTest, AbsoluteCaptureTime) { constexpr int64_t kAbsoluteCaptureTimestampMs = 12345678; uint8_t kFrame[kMaxPacketLength]; rtp_module_->RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::Uri(), @@ -1207,9 +1204,27 @@ TEST_P(RtpSenderVideoTest, AbsoluteCaptureTime) { absolute_capture_time->estimated_capture_clock_offset.has_value()); } +TEST_F(RtpSenderVideoTest, + AbsoluteCaptureTimeNotForwardedWhenImageHasNoCaptureTime) { + uint8_t kFrame[kMaxPacketLength]; + rtp_module_->RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::Uri(), + kAbsoluteCaptureTimeExtensionId); + + RTPVideoHeader hdr; + hdr.frame_type = VideoFrameType::kVideoFrameKey; + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, + /*capture_time_ms=*/0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); + // No absolute capture time should be set as the capture_time_ms was the + // default value. + for (const RtpPacketReceived& packet : transport_.sent_packets()) { + EXPECT_FALSE(packet.HasExtension()); + } +} + // Essentially the same test as AbsoluteCaptureTime but with a field trial. // After the field trial is experimented, we will remove AbsoluteCaptureTime. -TEST_P(RtpSenderVideoTest, AbsoluteCaptureTimeWithCaptureClockOffset) { +TEST_F(RtpSenderVideoTest, AbsoluteCaptureTimeWithCaptureClockOffset) { field_trials_.set_include_capture_clock_offset(true); rtp_sender_video_ = std::make_unique( &fake_clock_, rtp_module_->RtpSender(), field_trials_); @@ -1247,7 +1262,7 @@ TEST_P(RtpSenderVideoTest, AbsoluteCaptureTimeWithCaptureClockOffset) { EXPECT_EQ(absolute_capture_time->estimated_capture_clock_offset, 0); } -TEST_P(RtpSenderVideoTest, AbsoluteCaptureTimeWithExtensionProvided) { +TEST_F(RtpSenderVideoTest, AbsoluteCaptureTimeWithExtensionProvided) { constexpr AbsoluteCaptureTime kAbsoluteCaptureTime = { 123, absl::optional(456), @@ -1280,7 +1295,7 @@ TEST_P(RtpSenderVideoTest, AbsoluteCaptureTimeWithExtensionProvided) { EXPECT_EQ(absolute_capture_time, kAbsoluteCaptureTime); } -TEST_P(RtpSenderVideoTest, PopulatesPlayoutDelay) { +TEST_F(RtpSenderVideoTest, PopulatesPlayoutDelay) { // Single packet frames. constexpr size_t kPacketSize = 123; uint8_t kFrame[kPacketSize]; @@ -1337,7 +1352,7 @@ TEST_P(RtpSenderVideoTest, PopulatesPlayoutDelay) { EXPECT_EQ(received_delay, kExpectedDelay); } -TEST_P(RtpSenderVideoTest, SendGenericVideo) { +TEST_F(RtpSenderVideoTest, SendGenericVideo) { const uint8_t kPayloadType = 127; const VideoCodecType kCodecType = VideoCodecType::kVideoCodecGeneric; const uint8_t kPayload[] = {47, 11, 32, 93, 89}; @@ -1370,7 +1385,7 @@ TEST_P(RtpSenderVideoTest, SendGenericVideo) { EXPECT_THAT(sent_payload.subview(1), ElementsAreArray(kDeltaPayload)); } -TEST_P(RtpSenderVideoTest, SendRawVideo) { +TEST_F(RtpSenderVideoTest, SendRawVideo) { const uint8_t kPayloadType = 111; const uint8_t kPayload[] = {11, 22, 33, 44, 55}; @@ -1386,10 +1401,6 @@ TEST_P(RtpSenderVideoTest, SendRawVideo) { EXPECT_THAT(sent_payload, ElementsAreArray(kPayload)); } -INSTANTIATE_TEST_SUITE_P(WithAndWithoutOverhead, - RtpSenderVideoTest, - ::testing::Bool()); - class RtpSenderVideoWithFrameTransformerTest : public ::testing::Test { public: RtpSenderVideoWithFrameTransformerTest() @@ -1533,75 +1544,6 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, OnTransformedFrameSendsVideo) { EXPECT_EQ(transport_.packets_sent(), 2); } -// Task queue which behaves as if it was a hardware encoder thread where no -// CurrentTaskQueue is set. -class HardwareEncoderTaskQueue : public TaskQueueBase { - public: - HardwareEncoderTaskQueue() = default; - - void Delete() override {} - void PostTask(absl::AnyInvocable task) override { - CurrentTaskQueueSetter null_setter(nullptr); - std::move(task)(); - } - void PostDelayedTask(absl::AnyInvocable task, - TimeDelta delay) override { - // Not implemented. - RTC_CHECK_NOTREACHED(); - } - void PostDelayedHighPrecisionTask(absl::AnyInvocable task, - TimeDelta delay) override { - // Not implemented. - RTC_CHECK_NOTREACHED(); - } -}; - -TEST_F(RtpSenderVideoWithFrameTransformerTest, - OnTransformedFrameSendsVideoOnNewQueueForHwEncoders) { - auto mock_frame_transformer = - rtc::make_ref_counted>(); - rtc::scoped_refptr callback; - EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameSinkCallback) - .WillOnce(SaveArg<0>(&callback)); - std::unique_ptr rtp_sender_video = - CreateSenderWithFrameTransformer(mock_frame_transformer); - ASSERT_TRUE(callback); - - auto encoded_image = CreateDefaultEncodedImage(); - RTPVideoHeader video_header; - video_header.frame_type = VideoFrameType::kVideoFrameKey; - ON_CALL(*mock_frame_transformer, Transform) - .WillByDefault( - [&callback](std::unique_ptr frame) { - callback->OnTransformedFrame(std::move(frame)); - }); - - // Hardware encoder task queue has no TaskQueue::Current() set, and so a new - // task queue should be created to handle the callback. - HardwareEncoderTaskQueue hw_encoder_queue; - hw_encoder_queue.PostTask([&] { - rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, - *encoded_image, video_header, - kDefaultExpectedRetransmissionTimeMs); - }); - - // No packets sent yet since a task should be posted onto a new task queue. - EXPECT_EQ(transport_.packets_sent(), 0); - time_controller_.AdvanceTime(TimeDelta::Zero()); - EXPECT_EQ(transport_.packets_sent(), 1); - - // Check software encoder fallback. - auto encoder_queue = time_controller_.GetTaskQueueFactory()->CreateTaskQueue( - "encoder_queue", TaskQueueFactory::Priority::NORMAL); - encoder_queue->PostTask([&] { - rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, - *encoded_image, video_header, - kDefaultExpectedRetransmissionTimeMs); - }); - time_controller_.AdvanceTime(TimeDelta::Zero()); - EXPECT_EQ(transport_.packets_sent(), 2); -} - TEST_F(RtpSenderVideoWithFrameTransformerTest, TransformableFrameMetadataHasCorrectValue) { auto mock_frame_transformer = @@ -1644,5 +1586,45 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, kDefaultExpectedRetransmissionTimeMs); } +TEST_F(RtpSenderVideoWithFrameTransformerTest, + OnTransformedFrameSendsVideoWhenCloned) { + auto mock_frame_transformer = + rtc::make_ref_counted>(); + rtc::scoped_refptr callback; + EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameSinkCallback) + .WillOnce(SaveArg<0>(&callback)); + std::unique_ptr rtp_sender_video = + CreateSenderWithFrameTransformer(mock_frame_transformer); + ASSERT_TRUE(callback); + + auto encoded_image = CreateDefaultEncodedImage(); + RTPVideoHeader video_header; + video_header.frame_type = VideoFrameType::kVideoFrameKey; + ON_CALL(*mock_frame_transformer, Transform) + .WillByDefault( + [&callback](std::unique_ptr frame) { + auto clone = CloneVideoFrame( + static_cast(frame.get())); + EXPECT_TRUE(clone); + callback->OnTransformedFrame(std::move(clone)); + }); + auto encoder_queue = time_controller_.GetTaskQueueFactory()->CreateTaskQueue( + "encoder_queue", TaskQueueFactory::Priority::NORMAL); + encoder_queue->PostTask([&] { + rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, + *encoded_image, video_header, + kDefaultExpectedRetransmissionTimeMs); + }); + time_controller_.AdvanceTime(TimeDelta::Zero()); + EXPECT_EQ(transport_.packets_sent(), 1); + encoder_queue->PostTask([&] { + rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, + *encoded_image, video_header, + kDefaultExpectedRetransmissionTimeMs); + }); + time_controller_.AdvanceTime(TimeDelta::Zero()); + EXPECT_EQ(transport_.packets_sent(), 2); +} + } // namespace } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_video_header.cc b/modules/rtp_rtcp/source/rtp_video_header.cc index bb9413ddd5..1da43eec2d 100644 --- a/modules/rtp_rtcp/source/rtp_video_header.cc +++ b/modules/rtp_rtcp/source/rtp_video_header.cc @@ -12,13 +12,33 @@ namespace webrtc { -RTPVideoHeader::RTPVideoHeader() : video_timing() {} -RTPVideoHeader::RTPVideoHeader(const RTPVideoHeader& other) = default; -RTPVideoHeader::~RTPVideoHeader() = default; - RTPVideoHeader::GenericDescriptorInfo::GenericDescriptorInfo() = default; RTPVideoHeader::GenericDescriptorInfo::GenericDescriptorInfo( const GenericDescriptorInfo& other) = default; RTPVideoHeader::GenericDescriptorInfo::~GenericDescriptorInfo() = default; +RTPVideoHeader::RTPVideoHeader() : video_timing() {} +RTPVideoHeader::RTPVideoHeader(const RTPVideoHeader& other) = default; +RTPVideoHeader::~RTPVideoHeader() = default; + +VideoFrameMetadata RTPVideoHeader::GetAsMetadata() const { + VideoFrameMetadata metadata; + metadata.SetFrameType(frame_type); + metadata.SetWidth(width); + metadata.SetHeight(height); + metadata.SetRotation(rotation); + metadata.SetContentType(content_type); + if (generic) { + metadata.SetFrameId(generic->frame_id); + metadata.SetSpatialIndex(generic->spatial_index); + metadata.SetTemporalIndex(generic->temporal_index); + metadata.SetFrameDependencies(generic->dependencies); + metadata.SetDecodeTargetIndications(generic->decode_target_indications); + } + metadata.SetIsLastFrameInPicture(is_last_frame_in_picture); + metadata.SetSimulcastIdx(simulcastIdx); + metadata.SetCodec(codec); + return metadata; +} + } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_video_header.h b/modules/rtp_rtcp/source/rtp_video_header.h index 115b17d36d..c65bde9fab 100644 --- a/modules/rtp_rtcp/source/rtp_video_header.h +++ b/modules/rtp_rtcp/source/rtp_video_header.h @@ -21,6 +21,7 @@ #include "api/video/color_space.h" #include "api/video/video_codec_type.h" #include "api/video/video_content_type.h" +#include "api/video/video_frame_metadata.h" #include "api/video/video_frame_type.h" #include "api/video/video_rotation.h" #include "api/video/video_timing.h" @@ -62,6 +63,9 @@ struct RTPVideoHeader { ~RTPVideoHeader(); + // The subset of RTPVideoHeader that is exposed in the Insertable Streams API. + VideoFrameMetadata GetAsMetadata() const; + absl::optional generic; VideoFrameType frame_type = VideoFrameType::kEmptyFrame; diff --git a/modules/rtp_rtcp/source/rtp_video_header_unittest.cc b/modules/rtp_rtcp/source/rtp_video_header_unittest.cc new file mode 100644 index 0000000000..c8439e1796 --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_video_header_unittest.cc @@ -0,0 +1,166 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/rtp_video_header.h" + +#include "api/video/video_frame_metadata.h" +#include "api/video/video_frame_type.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::ElementsAre; +using ::testing::IsEmpty; + +TEST(RTPVideoHeaderTest, GetAsMetadataGetFrameType) { + RTPVideoHeader video_header; + video_header.frame_type = VideoFrameType::kVideoFrameKey; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_EQ(metadata.GetFrameType(), VideoFrameType::kVideoFrameKey); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetWidth) { + RTPVideoHeader video_header; + video_header.width = 1280u; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_EQ(metadata.GetWidth(), video_header.width); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetHeight) { + RTPVideoHeader video_header; + video_header.height = 720u; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_EQ(metadata.GetHeight(), video_header.height); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetRotation) { + RTPVideoHeader video_header; + video_header.rotation = VideoRotation::kVideoRotation_90; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_EQ(metadata.GetRotation(), VideoRotation::kVideoRotation_90); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetContentType) { + RTPVideoHeader video_header; + video_header.content_type = VideoContentType::SCREENSHARE; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_EQ(metadata.GetContentType(), VideoContentType::SCREENSHARE); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetFrameId) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.frame_id = 10; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_EQ(metadata.GetFrameId().value(), 10); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataHasNoFrameIdForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + ASSERT_FALSE(video_header.generic); + EXPECT_FALSE(metadata.GetFrameId().has_value()); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetSpatialIndex) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.spatial_index = 2; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_EQ(metadata.GetSpatialIndex(), 2); +} + +TEST(RTPVideoHeaderTest, + GetAsMetadataSpatialIndexIsZeroForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + ASSERT_FALSE(video_header.generic); + EXPECT_EQ(metadata.GetSpatialIndex(), 0); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetTemporalIndex) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.temporal_index = 3; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_EQ(metadata.GetTemporalIndex(), 3); +} + +TEST(RTPVideoHeaderTest, + GetAsMetadataTemporalIndexIsZeroForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + ASSERT_FALSE(video_header.generic); + EXPECT_EQ(metadata.GetTemporalIndex(), 0); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetFrameDependencies) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.dependencies = {5, 6, 7}; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_THAT(metadata.GetFrameDependencies(), ElementsAre(5, 6, 7)); +} + +TEST(RTPVideoHeaderTest, + GetAsMetadataFrameDependencyIsEmptyForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + ASSERT_FALSE(video_header.generic); + EXPECT_THAT(metadata.GetFrameDependencies(), IsEmpty()); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetDecodeTargetIndications) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.decode_target_indications = {DecodeTargetIndication::kSwitch}; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_THAT(metadata.GetDecodeTargetIndications(), + ElementsAre(DecodeTargetIndication::kSwitch)); +} + +TEST(RTPVideoHeaderTest, + GetAsMetadataGetDecodeTargetIndicationsIsEmptyForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + ASSERT_FALSE(video_header.generic); + EXPECT_THAT(metadata.GetDecodeTargetIndications(), IsEmpty()); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetIsLastFrameInPicture) { + RTPVideoHeader video_header; + video_header.is_last_frame_in_picture = false; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_FALSE(metadata.GetIsLastFrameInPicture()); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetSimulcastIdx) { + RTPVideoHeader video_header; + video_header.simulcastIdx = 123; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_EQ(metadata.GetSimulcastIdx(), 123); +} + +TEST(RTPVideoHeaderTest, GetAsMetadataGetCodec) { + RTPVideoHeader video_header; + video_header.codec = VideoCodecType::kVideoCodecVP9; + VideoFrameMetadata metadata = video_header.GetAsMetadata(); + EXPECT_EQ(metadata.GetCodec(), VideoCodecType::kVideoCodecVP9); +} + +} // namespace +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc b/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc index 6816a6277f..5172ed4ce7 100644 --- a/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc +++ b/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc @@ -150,59 +150,8 @@ SpatialLayersBitmasks SpatialLayersBitmasksPerRtpStream( } // namespace -// +-+-+-+-+-+-+-+-+ -// |RID| NS| sl_bm | -// +-+-+-+-+-+-+-+-+ -// Spatial layer bitmask |sl0_bm |sl1_bm | -// up to 2 bytes |---------------| -// when sl_bm == 0 |sl2_bm |sl3_bm | -// +-+-+-+-+-+-+-+-+ -// Number of temporal |#tl|#tl|#tl|#tl| -// layers per spatial layer :---------------: -// up to 4 bytes | ... | -// +-+-+-+-+-+-+-+-+ -// Target bitrate in kpbs | | -// per temporal layer : ... : -// leb128 encoded | | -// +-+-+-+-+-+-+-+-+ -// Resolution and framerate | | -// 5 bytes per spatial layer + width-1 for + -// (optional) | rid=0, sid=0 | -// +---------------+ -// | | -// + height-1 for + -// | rid=0, sid=0 | -// +---------------+ -// | max framerate | -// +-+-+-+-+-+-+-+-+ -// : ... : -// +-+-+-+-+-+-+-+-+ -// -// RID: RTP stream index this allocation is sent on, numbered from 0. 2 bits. -// NS: Number of RTP streams - 1. 2 bits, thus allowing up-to 4 RTP streams. -// sl_bm: BitMask of the active Spatial Layers when same for all RTP streams or -// 0 otherwise. 4 bits thus allows up to 4 spatial layers per RTP streams. -// slX_bm: BitMask of the active Spatial Layers for RTP stream with index=X. -// byte-aligned. When NS < 2, takes ones byte, otherwise uses two bytes. -// #tl: 2-bit value of number of temporal layers-1, thus allowing up-to 4 -// temporal layer per spatial layer. One per spatial layer per RTP stream. -// values are stored in (RTP stream id, spatial id) ascending order. -// zero-padded to byte alignment. -// Target bitrate in kbps. Values are stored using leb128 encoding. -// one value per temporal layer. values are stored in -// (RTP stream id, spatial id, temporal id) ascending order. -// All bitrates are total required bitrate to receive the corresponding -// layer, i.e. in simulcast mode they include only corresponding spatial -// layer, in full-svc all lower spatial layers are included. All lower -// temporal layers are also included. -// Resolution and framerate. -// Optional. Presense is infered from the rtp header extension size. -// Encoded (width - 1), 16-bit, (height - 1), 16-bit, max frame rate 8-bit -// per spatial layer per RTP stream. -// Values are stored in (RTP stream id, spatial id) ascending order. -// -// An empty layer allocation (i.e nothing sent on ssrc) is encoded as -// special case with a single 0 byte. +// See /docs/native-code/rtp-rtpext/video-layers-allocation00/README.md +// for the description of the format. bool RtpVideoLayersAllocationExtension::Write( rtc::ArrayView data, diff --git a/modules/video_capture/device_info_impl.cc b/modules/video_capture/device_info_impl.cc index ac78cbc84a..ff32a78580 100644 --- a/modules/video_capture/device_info_impl.cc +++ b/modules/video_capture/device_info_impl.cc @@ -148,7 +148,8 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability( (capability.videoType == requested.videoType || capability.videoType == VideoType::kI420 || capability.videoType == VideoType::kYUY2 || - capability.videoType == VideoType::kYV12)) { + capability.videoType == VideoType::kYV12 || + capability.videoType == VideoType::kNV12)) { bestVideoType = capability.videoType; bestformatIndex = tmp; } diff --git a/modules/video_capture/linux/device_info_v4l2.cc b/modules/video_capture/linux/device_info_v4l2.cc index c1062d4078..5af58015a7 100644 --- a/modules/video_capture/linux/device_info_v4l2.cc +++ b/modules/video_capture/linux/device_info_v4l2.cc @@ -228,9 +228,10 @@ int32_t DeviceInfoV4l2::FillCapabilities(int fd) { video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; video_fmt.fmt.pix.sizeimage = 0; - int totalFmts = 4; + int totalFmts = 5; unsigned int videoFormats[] = {V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_YUV420, - V4L2_PIX_FMT_YUYV, V4L2_PIX_FMT_UYVY}; + V4L2_PIX_FMT_YUYV, V4L2_PIX_FMT_UYVY, + V4L2_PIX_FMT_NV12}; int sizes = 13; unsigned int size[][2] = {{128, 96}, {160, 120}, {176, 144}, {320, 240}, @@ -258,6 +259,8 @@ int32_t DeviceInfoV4l2::FillCapabilities(int fd) { cap.videoType = VideoType::kMJPEG; } else if (videoFormats[fmts] == V4L2_PIX_FMT_UYVY) { cap.videoType = VideoType::kUYVY; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_NV12) { + cap.videoType = VideoType::kNV12; } // get fps of current camera mode diff --git a/modules/video_capture/linux/video_capture_v4l2.cc b/modules/video_capture/linux/video_capture_v4l2.cc index 2655fbefaa..5101a67e0c 100644 --- a/modules/video_capture/linux/video_capture_v4l2.cc +++ b/modules/video_capture/linux/video_capture_v4l2.cc @@ -115,20 +115,22 @@ int32_t VideoCaptureModuleV4L2::StartCapture( // Supported video formats in preferred order. // If the requested resolution is larger than VGA, we prefer MJPEG. Go for // I420 otherwise. - const int nFormats = 5; + const int nFormats = 6; unsigned int fmts[nFormats]; if (capability.width > 640 || capability.height > 480) { fmts[0] = V4L2_PIX_FMT_MJPEG; fmts[1] = V4L2_PIX_FMT_YUV420; fmts[2] = V4L2_PIX_FMT_YUYV; fmts[3] = V4L2_PIX_FMT_UYVY; - fmts[4] = V4L2_PIX_FMT_JPEG; + fmts[4] = V4L2_PIX_FMT_NV12; + fmts[5] = V4L2_PIX_FMT_JPEG; } else { fmts[0] = V4L2_PIX_FMT_YUV420; fmts[1] = V4L2_PIX_FMT_YUYV; fmts[2] = V4L2_PIX_FMT_UYVY; - fmts[3] = V4L2_PIX_FMT_MJPEG; - fmts[4] = V4L2_PIX_FMT_JPEG; + fmts[3] = V4L2_PIX_FMT_NV12; + fmts[4] = V4L2_PIX_FMT_MJPEG; + fmts[5] = V4L2_PIX_FMT_JPEG; } // Enumerate image formats. @@ -173,6 +175,8 @@ int32_t VideoCaptureModuleV4L2::StartCapture( _captureVideoType = VideoType::kI420; else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY) _captureVideoType = VideoType::kUYVY; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_NV12) + _captureVideoType = VideoType::kNV12; else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG || video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG) _captureVideoType = VideoType::kMJPEG; diff --git a/modules/video_coding/BUILD.gn b/modules/video_coding/BUILD.gn index 640d6626de..b097daa922 100644 --- a/modules/video_coding/BUILD.gn +++ b/modules/video_coding/BUILD.gn @@ -331,6 +331,7 @@ rtc_library("video_codec_interface") { ":codec_globals_headers", "../../api/video:video_frame", "../../api/video:video_rtp_headers", + "../../api/video_codecs:scalability_mode", "../../api/video_codecs:video_codecs_api", "../../common_video", "../../common_video/generic_frame_descriptor", @@ -516,6 +517,8 @@ rtc_library("webrtc_h264") { deps = [ ":video_codec_interface", ":video_coding_utility", + "../../api/transport/rtp:dependency_descriptor", + "../../api/video:video_codec_constants", "../../api/video:video_frame", "../../api/video:video_frame_i010", "../../api/video:video_rtp_headers", @@ -630,6 +633,7 @@ rtc_library("webrtc_vp8") { "../../api/video:encoded_image", "../../api/video:video_frame", "../../api/video:video_rtp_headers", + "../../api/video_codecs:scalability_mode", "../../api/video_codecs:video_codecs_api", "../../api/video_codecs:vp8_temporal_layers_factory", "../../common_video", @@ -777,6 +781,7 @@ rtc_library("webrtc_vp9") { "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings:strings", + "//third_party/abseil-cpp/absl/types:optional", ] if (rtc_build_libvpx) { deps += [ rtc_libvpx_dir ] @@ -872,6 +877,8 @@ if (rtc_include_tests) { rtc_library("video_codecs_test_framework") { testonly = true sources = [ + "codecs/test/video_codec_analyzer.cc", + "codecs/test/video_codec_analyzer.h", "codecs/test/video_codec_unittest.cc", "codecs/test/video_codec_unittest.h", "codecs/test/videoprocessor.cc", @@ -890,13 +897,17 @@ if (rtc_include_tests) { "../../api:frame_generator_api", "../../api:scoped_refptr", "../../api:sequence_checker", + "../../api:video_codec_tester_api", "../../api:videocodec_test_fixture_api", "../../api/task_queue", + "../../api/task_queue:default_task_queue_factory", "../../api/video:builtin_video_bitrate_allocator_factory", "../../api/video:encoded_image", + "../../api/video:resolution", "../../api/video:video_bitrate_allocation", "../../api/video:video_bitrate_allocator", "../../api/video:video_bitrate_allocator_factory", + "../../api/video:video_codec_constants", "../../api/video:video_frame", "../../api/video:video_rtp_headers", "../../api/video_codecs:video_codecs_api", @@ -905,6 +916,7 @@ if (rtc_include_tests) { "../../rtc_base:checks", "../../rtc_base:macromagic", "../../rtc_base:rtc_event", + "../../rtc_base:task_queue_for_test", "../../rtc_base:timeutils", "../../rtc_base/synchronization:mutex", "../../rtc_base/system:no_unique_address", @@ -953,6 +965,8 @@ if (rtc_include_tests) { rtc_library("videocodec_test_impl") { testonly = true sources = [ + "codecs/test/video_codec_tester_impl.cc", + "codecs/test/video_codec_tester_impl.h", "codecs/test/videocodec_test_fixture_impl.cc", "codecs/test/videocodec_test_fixture_impl.h", ] @@ -964,12 +978,20 @@ if (rtc_include_tests) { ":videocodec_test_stats_impl", ":webrtc_vp9_helpers", "../../api:array_view", + "../../api:video_codec_tester_api", "../../api:videocodec_test_fixture_api", + "../../api/task_queue:default_task_queue_factory", + "../../api/task_queue:task_queue", "../../api/test/metrics:global_metrics_logger_and_exporter", "../../api/test/metrics:metric", "../../api/test/video:function_video_factory", "../../api/transport:field_trial_based_config", + "../../api/units:frequency", + "../../api/units:time_delta", + "../../api/units:timestamp", + "../../api/video:encoded_image", "../../api/video:video_bitrate_allocation", + "../../api/video:video_frame", "../../api/video_codecs:video_codecs_api", "../../api/video_codecs:video_decoder_factory_template", "../../api/video_codecs:video_decoder_factory_template_dav1d_adapter", @@ -988,6 +1010,7 @@ if (rtc_include_tests) { "../../rtc_base:checks", "../../rtc_base:logging", "../../rtc_base:rtc_base_tests_utils", + "../../rtc_base:rtc_event", "../../rtc_base:stringutils", "../../rtc_base:task_queue_for_test", "../../rtc_base:timeutils", @@ -1012,7 +1035,7 @@ if (rtc_include_tests) { "codecs/test/videocodec_test_stats_impl.h", ] deps = [ - "../../api:videocodec_test_fixture_api", + "../../api:videocodec_test_stats_api", "../../api/numerics", "../../rtc_base:checks", "../../rtc_base:rtc_numerics", @@ -1029,6 +1052,7 @@ if (rtc_include_tests) { sources = [ "codecs/h264/test/h264_impl_unittest.cc", "codecs/multiplex/test/multiplex_adapter_unittest.cc", + "codecs/test/video_codec_test.cc", "codecs/test/video_encoder_decoder_instantiation_tests.cc", "codecs/test/videocodec_test_av1.cc", "codecs/test/videocodec_test_libvpx.cc", @@ -1057,18 +1081,27 @@ if (rtc_include_tests) { ":webrtc_vp9", ":webrtc_vp9_helpers", "../../api:create_frame_generator", + "../../api:create_video_codec_tester_api", "../../api:create_videocodec_test_fixture_api", "../../api:frame_generator_api", "../../api:mock_video_codec_factory", "../../api:mock_video_decoder", "../../api:mock_video_encoder", "../../api:scoped_refptr", + "../../api:video_codec_tester_api", "../../api:videocodec_test_fixture_api", + "../../api:videocodec_test_stats_api", "../../api/test/video:function_video_factory", + "../../api/units:data_rate", + "../../api/units:frequency", "../../api/video:encoded_image", + "../../api/video:resolution", "../../api/video:video_frame", "../../api/video:video_rtp_headers", + "../../api/video_codecs:builtin_video_decoder_factory", + "../../api/video_codecs:builtin_video_encoder_factory", "../../api/video_codecs:rtc_software_fallback_wrappers", + "../../api/video_codecs:scalability_mode", "../../api/video_codecs:video_codecs_api", "../../common_video", "../../common_video/test:utilities", @@ -1084,11 +1117,14 @@ if (rtc_include_tests) { "../../test:fileutils", "../../test:test_support", "../../test:video_test_common", + "../../test:video_test_support", "../rtp_rtcp:rtp_rtcp_format", "codecs/av1:dav1d_decoder", + "svc:scalability_mode_util", "//third_party/libyuv", ] absl_deps = [ + "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/types:optional", ] @@ -1124,6 +1160,8 @@ if (rtc_include_tests) { sources = [ "chain_diff_calculator_unittest.cc", + "codecs/test/video_codec_analyzer_unittest.cc", + "codecs/test/video_codec_tester_impl_unittest.cc", "codecs/test/videocodec_test_fixture_config_unittest.cc", "codecs/test/videocodec_test_stats_impl_unittest.cc", "codecs/test/videoprocessor_unittest.cc", @@ -1207,9 +1245,11 @@ if (rtc_include_tests) { "../../api:rtp_packet_info", "../../api:scoped_refptr", "../../api:simulcast_test_fixture_api", + "../../api:video_codec_tester_api", "../../api:videocodec_test_fixture_api", "../../api/task_queue", "../../api/task_queue:default_task_queue_factory", + "../../api/task_queue/test:mock_task_queue_base", "../../api/test/video:function_video_factory", "../../api/units:data_size", "../../api/units:frequency", @@ -1217,6 +1257,7 @@ if (rtc_include_tests) { "../../api/units:timestamp", "../../api/video:builtin_video_bitrate_allocator_factory", "../../api/video:encoded_frame", + "../../api/video:encoded_image", "../../api/video:render_resolution", "../../api/video:video_adaptation", "../../api/video:video_bitrate_allocation", @@ -1233,6 +1274,7 @@ if (rtc_include_tests) { "../../media:rtc_media_base", "../../rtc_base", "../../rtc_base:checks", + "../../rtc_base:gunit_helpers", "../../rtc_base:histogram_percentile_counter", "../../rtc_base:platform_thread", "../../rtc_base:random", @@ -1259,6 +1301,7 @@ if (rtc_include_tests) { "../../test:video_test_common", "../../test:video_test_support", "../../test/time_controller:time_controller", + "../../third_party/libyuv:libyuv", "../rtp_rtcp:rtp_rtcp_format", "../rtp_rtcp:rtp_video_header", "codecs/av1:video_coding_codecs_av1_tests", diff --git a/modules/video_coding/codecs/av1/BUILD.gn b/modules/video_coding/codecs/av1/BUILD.gn index 24be86c0ba..610f958ad1 100644 --- a/modules/video_coding/codecs/av1/BUILD.gn +++ b/modules/video_coding/codecs/av1/BUILD.gn @@ -57,10 +57,12 @@ rtc_library("libaom_av1_encoder") { "../../../../api:scoped_refptr", "../../../../api/video:encoded_image", "../../../../api/video:video_frame", + "../../../../api/video_codecs:scalability_mode", "../../../../api/video_codecs:video_codecs_api", "../../../../common_video", "../../../../rtc_base:checks", "../../../../rtc_base:logging", + "../../../../rtc_base:rtc_numerics", "../../svc:scalability_structures", "../../svc:scalable_video_controller", "//third_party/libaom", diff --git a/modules/video_coding/codecs/av1/libaom_av1_encoder.cc b/modules/video_coding/codecs/av1/libaom_av1_encoder.cc index 807513bc7b..4d8786c824 100644 --- a/modules/video_coding/codecs/av1/libaom_av1_encoder.cc +++ b/modules/video_coding/codecs/av1/libaom_av1_encoder.cc @@ -23,6 +23,7 @@ #include "api/video/encoded_image.h" #include "api/video/i420_buffer.h" #include "api/video/video_frame.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" #include "modules/video_coding/include/video_codec_interface.h" @@ -32,6 +33,7 @@ #include "modules/video_coding/svc/scalable_video_controller_no_layering.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/sequence_number_util.h" #include "third_party/libaom/source/libaom/aom/aom_codec.h" #include "third_party/libaom/source/libaom/aom/aom_encoder.h" #include "third_party/libaom/source/libaom/aom/aomcx.h" @@ -108,6 +110,7 @@ class LibaomAv1Encoder final : public VideoEncoder { void MaybeRewrapImgWithFormat(const aom_img_fmt_t fmt); std::unique_ptr svc_controller_; + absl::optional scalability_mode_; bool inited_; bool rates_configured_; absl::optional svc_params_; @@ -117,6 +120,7 @@ class LibaomAv1Encoder final : public VideoEncoder { aom_codec_ctx_t ctx_; aom_codec_enc_cfg_t cfg_; EncodedImageCallback* encoded_image_callback_; + SeqNumUnwrapper rtp_timestamp_unwrapper_; }; int32_t VerifyCodecSettings(const VideoCodec& codec_settings) { @@ -183,16 +187,15 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings, RTC_LOG(LS_WARNING) << "Simulcast is not implemented by LibaomAv1Encoder."; return result; } - absl::optional scalability_mode = - encoder_settings_.GetScalabilityMode(); - if (!scalability_mode.has_value()) { + scalability_mode_ = encoder_settings_.GetScalabilityMode(); + if (!scalability_mode_.has_value()) { RTC_LOG(LS_WARNING) << "Scalability mode is not set, using 'L1T1'."; - scalability_mode = ScalabilityMode::kL1T1; + scalability_mode_ = ScalabilityMode::kL1T1; } - svc_controller_ = CreateScalabilityStructure(*scalability_mode); + svc_controller_ = CreateScalabilityStructure(*scalability_mode_); if (svc_controller_ == nullptr) { RTC_LOG(LS_WARNING) << "Failed to set scalability mode " - << static_cast(*scalability_mode); + << static_cast(*scalability_mode_); return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } @@ -636,9 +639,11 @@ int32_t LibaomAv1Encoder::Encode( layer_frame->TemporalId() > 0 ? 1 : 0); } - // Encode a frame. - aom_codec_err_t ret = aom_codec_encode(&ctx_, frame_for_encode_, - frame.timestamp(), duration, flags); + // Encode a frame. The presentation timestamp `pts` should never wrap, hence + // the unwrapping. + aom_codec_err_t ret = aom_codec_encode( + &ctx_, frame_for_encode_, + rtp_timestamp_unwrapper_.Unwrap(frame.timestamp()), duration, flags); if (ret != AOM_CODEC_OK) { RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret << " on aom_codec_encode."; @@ -705,6 +710,7 @@ int32_t LibaomAv1Encoder::Encode( CodecSpecificInfo codec_specific_info; codec_specific_info.codecType = kVideoCodecAV1; codec_specific_info.end_of_picture = end_of_picture; + codec_specific_info.scalability_mode = scalability_mode_; bool is_keyframe = layer_frame->IsKeyframe(); codec_specific_info.generic_frame_info = svc_controller_->OnEncodeDone(*layer_frame); diff --git a/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc b/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc index 5243edc1e4..d194cef35b 100644 --- a/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc +++ b/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc @@ -10,6 +10,7 @@ #include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" +#include #include #include @@ -235,5 +236,29 @@ TEST(LibaomAv1EncoderTest, PopulatesEncodedFrameSize) { codec_settings.height))))); } +TEST(LibaomAv1EncoderTest, RtpTimestampWrap) { + std::unique_ptr encoder = CreateLibaomAv1Encoder(); + VideoCodec codec_settings = DefaultCodecSettings(); + codec_settings.SetScalabilityMode(ScalabilityMode::kL1T1); + ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()), + WEBRTC_VIDEO_CODEC_OK); + + VideoEncoder::RateControlParameters rate_parameters; + rate_parameters.framerate_fps = 30; + rate_parameters.bitrate.SetBitrate(/*spatial_index=*/0, 0, 300'000); + encoder->SetRates(rate_parameters); + + std::vector encoded_frames = + EncodedVideoFrameProducer(*encoder) + .SetNumInputFrames(2) + .SetRtpTimestamp(std::numeric_limits::max()) + .Encode(); + ASSERT_THAT(encoded_frames, SizeIs(2)); + EXPECT_THAT(encoded_frames[0].encoded_image._frameType, + Eq(VideoFrameType::kVideoFrameKey)); + EXPECT_THAT(encoded_frames[1].encoded_image._frameType, + Eq(VideoFrameType::kVideoFrameDelta)); +} + } // namespace } // namespace webrtc diff --git a/modules/video_coding/codecs/av1/libaom_av1_unittest.cc b/modules/video_coding/codecs/av1/libaom_av1_unittest.cc index dbb62ea6dc..86e317f94b 100644 --- a/modules/video_coding/codecs/av1/libaom_av1_unittest.cc +++ b/modules/video_coding/codecs/av1/libaom_av1_unittest.cc @@ -44,6 +44,7 @@ using ::testing::Ge; using ::testing::IsEmpty; using ::testing::Not; using ::testing::NotNull; +using ::testing::Optional; using ::testing::Pointwise; using ::testing::SizeIs; using ::testing::Truly; @@ -248,6 +249,8 @@ TEST_P(LibaomAv1SvcTest, EncodeAndDecodeAllDecodeTargets) { requested_ids.push_back(frame_id); decoder.Decode(frame_id, frame.encoded_image); } + EXPECT_THAT(frame.codec_specific_info.scalability_mode, + Optional(param.GetScalabilityMode())); } ASSERT_THAT(requested_ids, SizeIs(Ge(2u))); diff --git a/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/modules/video_coding/codecs/h264/h264_encoder_impl.cc index fc3fd195fb..b8055ac85f 100644 --- a/modules/video_coding/codecs/h264/h264_encoder_impl.cc +++ b/modules/video_coding/codecs/h264/h264_encoder_impl.cc @@ -21,6 +21,9 @@ #include #include "absl/strings/match.h" +#include "absl/types/optional.h" +#include "api/video/video_codec_constants.h" +#include "api/video_codecs/scalability_mode.h" #include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/video_coding/svc/create_scalability_structure.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" @@ -31,10 +34,10 @@ #include "system_wrappers/include/metrics.h" #include "third_party/libyuv/include/libyuv/convert.h" #include "third_party/libyuv/include/libyuv/scale.h" -#include "third_party/openh264/src/codec/api/svc/codec_api.h" -#include "third_party/openh264/src/codec/api/svc/codec_app_def.h" -#include "third_party/openh264/src/codec/api/svc/codec_def.h" -#include "third_party/openh264/src/codec/api/svc/codec_ver.h" +#include "third_party/openh264/src/codec/api/wels/codec_api.h" +#include "third_party/openh264/src/codec/api/wels/codec_app_def.h" +#include "third_party/openh264/src/codec/api/wels/codec_def.h" +#include "third_party/openh264/src/codec/api/wels/codec_ver.h" namespace webrtc { @@ -86,6 +89,23 @@ VideoFrameType ConvertToVideoFrameType(EVideoFrameType type) { return VideoFrameType::kEmptyFrame; } +absl::optional ScalabilityModeFromTemporalLayers( + int num_temporal_layers) { + switch (num_temporal_layers) { + case 0: + break; + case 1: + return ScalabilityMode::kL1T1; + case 2: + return ScalabilityMode::kL1T2; + case 3: + return ScalabilityMode::kL1T3; + default: + RTC_DCHECK_NOTREACHED(); + } + return absl::nullopt; +} + } // namespace // Helper method used by H264EncoderImpl::Encode. @@ -199,6 +219,7 @@ int32_t H264EncoderImpl::InitEncode(const VideoCodec* inst, encoders_.resize(number_of_streams); pictures_.resize(number_of_streams); svc_controllers_.resize(number_of_streams); + scalability_modes_.resize(number_of_streams); configurations_.resize(number_of_streams); tl0sync_limit_.resize(number_of_streams); @@ -284,25 +305,10 @@ int32_t H264EncoderImpl::InitEncode(const VideoCodec* inst, encoded_images_[i].set_size(0); tl0sync_limit_[i] = configurations_[i].num_temporal_layers; - absl::optional scalability_mode; - switch (configurations_[i].num_temporal_layers) { - case 0: - break; - case 1: - scalability_mode = ScalabilityMode::kL1T1; - break; - case 2: - scalability_mode = ScalabilityMode::kL1T2; - break; - case 3: - scalability_mode = ScalabilityMode::kL1T3; - break; - default: - RTC_DCHECK_NOTREACHED(); - } - if (scalability_mode.has_value()) { - svc_controllers_[i] = - CreateScalabilityStructure(scalability_mode.value()); + scalability_modes_[i] = ScalabilityModeFromTemporalLayers( + configurations_[i].num_temporal_layers); + if (scalability_modes_[i].has_value()) { + svc_controllers_[i] = CreateScalabilityStructure(*scalability_modes_[i]); if (svc_controllers_[i] == nullptr) { RTC_LOG(LS_ERROR) << "Failed to create scalability structure"; Release(); @@ -335,6 +341,7 @@ int32_t H264EncoderImpl::Release() { pictures_.clear(); tl0sync_limit_.clear(); svc_controllers_.clear(); + scalability_modes_.clear(); return WEBRTC_VIDEO_CODEC_OK; } @@ -416,26 +423,17 @@ int32_t H264EncoderImpl::Encode( RTC_CHECK(frame_buffer->type() == VideoFrameBuffer::Type::kI420 || frame_buffer->type() == VideoFrameBuffer::Type::kI420A); - bool send_key_frame = false; + bool is_keyframe_needed = false; for (size_t i = 0; i < configurations_.size(); ++i) { if (configurations_[i].key_frame_request && configurations_[i].sending) { - send_key_frame = true; + // This is legacy behavior, generating a keyframe on all layers + // when generating one for a layer that became active for the first time + // or after being disabled. + is_keyframe_needed = true; break; } } - if (!send_key_frame && frame_types) { - for (size_t i = 0; i < configurations_.size(); ++i) { - const size_t simulcast_idx = - static_cast(configurations_[i].simulcast_idx); - if (configurations_[i].sending && simulcast_idx < frame_types->size() && - (*frame_types)[simulcast_idx] == VideoFrameType::kVideoFrameKey) { - send_key_frame = true; - break; - } - } - } - RTC_DCHECK_EQ(configurations_[0].width, frame_buffer->width()); RTC_DCHECK_EQ(configurations_[0].height, frame_buffer->height()); @@ -480,12 +478,20 @@ int32_t H264EncoderImpl::Encode( if (!configurations_[i].sending) { continue; } - if (frame_types != nullptr) { + if (frame_types != nullptr && i < frame_types->size()) { // Skip frame? if ((*frame_types)[i] == VideoFrameType::kEmptyFrame) { continue; } } + // Send a key frame either when this layer is configured to require one + // or we have explicitly been asked to. + const size_t simulcast_idx = + static_cast(configurations_[i].simulcast_idx); + bool send_key_frame = + is_keyframe_needed || + (frame_types && simulcast_idx < frame_types->size() && + (*frame_types)[simulcast_idx] == VideoFrameType::kVideoFrameKey); if (send_key_frame) { // API doc says ForceIntraFrame(false) does nothing, but calling this // function forces a key frame regardless of the `bIDR` argument's value. @@ -569,6 +575,7 @@ int32_t H264EncoderImpl::Encode( codec_specific.template_structure = svc_controllers_[i]->DependencyStructure(); } + codec_specific.scalability_mode = scalability_modes_[i]; } encoded_image_callback_->OnEncodedImage(encoded_images_[i], &codec_specific); diff --git a/modules/video_coding/codecs/h264/h264_encoder_impl.h b/modules/video_coding/codecs/h264/h264_encoder_impl.h index 780781be23..f02521f0dc 100644 --- a/modules/video_coding/codecs/h264/h264_encoder_impl.h +++ b/modules/video_coding/codecs/h264/h264_encoder_impl.h @@ -24,13 +24,17 @@ #include #include +#include "absl/container/inlined_vector.h" +#include "api/transport/rtp/dependency_descriptor.h" #include "api/video/i420_buffer.h" +#include "api/video/video_codec_constants.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/video_encoder.h" #include "common_video/h264/h264_bitstream_parser.h" #include "modules/video_coding/codecs/h264/include/h264.h" #include "modules/video_coding/svc/scalable_video_controller.h" #include "modules/video_coding/utility/quality_scaler.h" -#include "third_party/openh264/src/codec/api/svc/codec_app_def.h" +#include "third_party/openh264/src/codec/api/wels/codec_app_def.h" class ISVCEncoder; @@ -99,6 +103,8 @@ class H264EncoderImpl : public H264Encoder { std::vector configurations_; std::vector encoded_images_; std::vector> svc_controllers_; + absl::InlinedVector, kMaxSimulcastStreams> + scalability_modes_; VideoCodec codec_; H264PacketizationMode packetization_mode_; diff --git a/modules/video_coding/codecs/h264/h264_simulcast_unittest.cc b/modules/video_coding/codecs/h264/h264_simulcast_unittest.cc index 12b5da1404..2acb629a76 100644 --- a/modules/video_coding/codecs/h264/h264_simulcast_unittest.cc +++ b/modules/video_coding/codecs/h264/h264_simulcast_unittest.cc @@ -35,8 +35,12 @@ std::unique_ptr CreateSpecificSimulcastTestFixture() { } // namespace TEST(TestH264Simulcast, TestKeyFrameRequestsOnAllStreams) { + GTEST_SKIP() << "Not applicable to H264."; +} + +TEST(TestH264Simulcast, TestKeyFrameRequestsOnSpecificStreams) { auto fixture = CreateSpecificSimulcastTestFixture(); - fixture->TestKeyFrameRequestsOnAllStreams(); + fixture->TestKeyFrameRequestsOnSpecificStreams(); } TEST(TestH264Simulcast, TestPaddingAllStreams) { diff --git a/modules/video_coding/codecs/test/encoded_video_frame_producer.h b/modules/video_coding/codecs/test/encoded_video_frame_producer.h index 2216287b92..04f4a64950 100644 --- a/modules/video_coding/codecs/test/encoded_video_frame_producer.h +++ b/modules/video_coding/codecs/test/encoded_video_frame_producer.h @@ -47,6 +47,8 @@ class EncodedVideoFrameProducer { EncodedVideoFrameProducer& SetFramerateFps(int value); + EncodedVideoFrameProducer& SetRtpTimestamp(uint32_t value); + // Generates input video frames and encodes them with `encoder` provided in // the constructor. Returns frame passed to the `OnEncodedImage` by wraping // `EncodedImageCallback` underneath. @@ -88,5 +90,11 @@ inline EncodedVideoFrameProducer& EncodedVideoFrameProducer::SetFramerateFps( return *this; } +inline EncodedVideoFrameProducer& EncodedVideoFrameProducer::SetRtpTimestamp( + uint32_t value) { + rtp_timestamp_ = value; + return *this; +} + } // namespace webrtc #endif // MODULES_VIDEO_CODING_CODECS_TEST_ENCODED_VIDEO_FRAME_PRODUCER_H_ diff --git a/modules/video_coding/codecs/test/video_codec_analyzer.cc b/modules/video_coding/codecs/test/video_codec_analyzer.cc new file mode 100644 index 0000000000..50af417bcf --- /dev/null +++ b/modules/video_coding/codecs/test/video_codec_analyzer.cc @@ -0,0 +1,186 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/codecs/test/video_codec_analyzer.h" + +#include + +#include "api/task_queue/default_task_queue_factory.h" +#include "api/test/video_codec_tester.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_frame.h" +#include "rtc_base/checks.h" +#include "rtc_base/event.h" +#include "rtc_base/time_utils.h" +#include "third_party/libyuv/include/libyuv/compare.h" + +namespace webrtc { +namespace test { + +namespace { + +struct Psnr { + double y; + double u; + double v; + double yuv; +}; + +Psnr CalcPsnr(const I420BufferInterface& ref_buffer, + const I420BufferInterface& dec_buffer) { + RTC_CHECK_EQ(ref_buffer.width(), dec_buffer.width()); + RTC_CHECK_EQ(ref_buffer.height(), dec_buffer.height()); + + uint64_t sse_y = libyuv::ComputeSumSquareErrorPlane( + dec_buffer.DataY(), dec_buffer.StrideY(), ref_buffer.DataY(), + ref_buffer.StrideY(), dec_buffer.width(), dec_buffer.height()); + + uint64_t sse_u = libyuv::ComputeSumSquareErrorPlane( + dec_buffer.DataU(), dec_buffer.StrideU(), ref_buffer.DataU(), + ref_buffer.StrideU(), dec_buffer.width() / 2, dec_buffer.height() / 2); + + uint64_t sse_v = libyuv::ComputeSumSquareErrorPlane( + dec_buffer.DataV(), dec_buffer.StrideV(), ref_buffer.DataV(), + ref_buffer.StrideV(), dec_buffer.width() / 2, dec_buffer.height() / 2); + + int num_y_samples = dec_buffer.width() * dec_buffer.height(); + Psnr psnr; + psnr.y = libyuv::SumSquareErrorToPsnr(sse_y, num_y_samples); + psnr.u = libyuv::SumSquareErrorToPsnr(sse_u, num_y_samples / 4); + psnr.v = libyuv::SumSquareErrorToPsnr(sse_v, num_y_samples / 4); + psnr.yuv = libyuv::SumSquareErrorToPsnr(sse_y + sse_u + sse_v, + num_y_samples + num_y_samples / 2); + return psnr; +} + +} // namespace + +VideoCodecAnalyzer::VideoCodecAnalyzer( + rtc::TaskQueue& task_queue, + ReferenceVideoSource* reference_video_source) + : task_queue_(task_queue), reference_video_source_(reference_video_source) { + sequence_checker_.Detach(); +} + +void VideoCodecAnalyzer::StartEncode(const VideoFrame& input_frame) { + int64_t encode_started_ns = rtc::TimeNanos(); + task_queue_.PostTask( + [this, timestamp_rtp = input_frame.timestamp(), encode_started_ns]() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + VideoCodecTestStats::FrameStatistics* fs = + stats_.GetOrAddFrame(timestamp_rtp, /*spatial_idx=*/0); + fs->encode_start_ns = encode_started_ns; + }); +} + +void VideoCodecAnalyzer::FinishEncode(const EncodedImage& frame) { + int64_t encode_finished_ns = rtc::TimeNanos(); + + task_queue_.PostTask([this, timestamp_rtp = frame.Timestamp(), + spatial_idx = frame.SpatialIndex().value_or(0), + temporal_idx = frame.TemporalIndex().value_or(0), + frame_type = frame._frameType, qp = frame.qp_, + frame_size_bytes = frame.size(), encode_finished_ns]() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + VideoCodecTestStats::FrameStatistics* fs = + stats_.GetOrAddFrame(timestamp_rtp, spatial_idx); + VideoCodecTestStats::FrameStatistics* fs_base = + stats_.GetOrAddFrame(timestamp_rtp, 0); + + fs->encode_start_ns = fs_base->encode_start_ns; + fs->spatial_idx = spatial_idx; + fs->temporal_idx = temporal_idx; + fs->frame_type = frame_type; + fs->qp = qp; + + fs->encode_time_us = (encode_finished_ns - fs->encode_start_ns) / + rtc::kNumNanosecsPerMicrosec; + fs->length_bytes = frame_size_bytes; + + fs->encoding_successful = true; + }); +} + +void VideoCodecAnalyzer::StartDecode(const EncodedImage& frame) { + int64_t decode_start_ns = rtc::TimeNanos(); + task_queue_.PostTask([this, timestamp_rtp = frame.Timestamp(), + spatial_idx = frame.SpatialIndex().value_or(0), + frame_size_bytes = frame.size(), decode_start_ns]() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + VideoCodecTestStats::FrameStatistics* fs = + stats_.GetOrAddFrame(timestamp_rtp, spatial_idx); + if (fs->length_bytes == 0) { + // In encode-decode test the frame size is set in EncodeFinished. In + // decode-only test set it here. + fs->length_bytes = frame_size_bytes; + } + fs->decode_start_ns = decode_start_ns; + }); +} + +void VideoCodecAnalyzer::FinishDecode(const VideoFrame& frame, + int spatial_idx) { + int64_t decode_finished_ns = rtc::TimeNanos(); + task_queue_.PostTask([this, timestamp_rtp = frame.timestamp(), spatial_idx, + width = frame.width(), height = frame.height(), + decode_finished_ns]() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + VideoCodecTestStats::FrameStatistics* fs = + stats_.GetFrameWithTimestamp(timestamp_rtp, spatial_idx); + fs->decode_time_us = (decode_finished_ns - fs->decode_start_ns) / + rtc::kNumNanosecsPerMicrosec; + fs->decoded_width = width; + fs->decoded_height = height; + fs->decoding_successful = true; + }); + + if (reference_video_source_ != nullptr) { + // Copy hardware-backed frame into main memory to release output buffers + // which number may be limited in hardware decoders. + rtc::scoped_refptr decoded_buffer = + frame.video_frame_buffer()->ToI420(); + + task_queue_.PostTask([this, decoded_buffer, + timestamp_rtp = frame.timestamp(), spatial_idx]() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + VideoFrame ref_frame = reference_video_source_->GetFrame( + timestamp_rtp, {.width = decoded_buffer->width(), + .height = decoded_buffer->height()}); + rtc::scoped_refptr ref_buffer = + ref_frame.video_frame_buffer()->ToI420(); + + Psnr psnr = CalcPsnr(*decoded_buffer, *ref_buffer); + VideoCodecTestStats::FrameStatistics* fs = + this->stats_.GetFrameWithTimestamp(timestamp_rtp, spatial_idx); + fs->psnr_y = static_cast(psnr.y); + fs->psnr_u = static_cast(psnr.u); + fs->psnr_v = static_cast(psnr.v); + fs->psnr = static_cast(psnr.yuv); + + fs->quality_analysis_successful = true; + }); + } +} + +std::unique_ptr VideoCodecAnalyzer::GetStats() { + std::unique_ptr stats; + rtc::Event ready; + task_queue_.PostTask([this, &stats, &ready]() mutable { + RTC_DCHECK_RUN_ON(&sequence_checker_); + stats.reset(new VideoCodecTestStatsImpl(stats_)); + ready.Set(); + }); + ready.Wait(rtc::Event::kForever); + return stats; +} + +} // namespace test +} // namespace webrtc diff --git a/modules/video_coding/codecs/test/video_codec_analyzer.h b/modules/video_coding/codecs/test/video_codec_analyzer.h new file mode 100644 index 0000000000..63a864e810 --- /dev/null +++ b/modules/video_coding/codecs/test/video_codec_analyzer.h @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_ANALYZER_H_ +#define MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_ANALYZER_H_ + +#include + +#include "absl/types/optional.h" +#include "api/sequence_checker.h" +#include "api/video/encoded_image.h" +#include "api/video/resolution.h" +#include "api/video/video_frame.h" +#include "modules/video_coding/codecs/test/videocodec_test_stats_impl.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/no_unique_address.h" +#include "rtc_base/task_queue_for_test.h" + +namespace webrtc { +namespace test { + +// Analyzer measures and collects metrics necessary for evaluation of video +// codec quality and performance. This class is thread-safe. +class VideoCodecAnalyzer { + public: + // An interface that provides reference frames for spatial quality analysis. + class ReferenceVideoSource { + public: + virtual ~ReferenceVideoSource() = default; + + virtual VideoFrame GetFrame(uint32_t timestamp_rtp, + Resolution resolution) = 0; + }; + + VideoCodecAnalyzer(rtc::TaskQueue& task_queue, + ReferenceVideoSource* reference_video_source = nullptr); + + void StartEncode(const VideoFrame& frame); + + void FinishEncode(const EncodedImage& frame); + + void StartDecode(const EncodedImage& frame); + + void FinishDecode(const VideoFrame& frame, int spatial_idx); + + std::unique_ptr GetStats(); + + protected: + rtc::TaskQueue& task_queue_; + ReferenceVideoSource* const reference_video_source_; + VideoCodecTestStatsImpl stats_ RTC_GUARDED_BY(sequence_checker_); + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; +}; + +} // namespace test +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_ANALYZER_H_ diff --git a/modules/video_coding/codecs/test/video_codec_analyzer_unittest.cc b/modules/video_coding/codecs/test/video_codec_analyzer_unittest.cc new file mode 100644 index 0000000000..3f9de6dac2 --- /dev/null +++ b/modules/video_coding/codecs/test/video_codec_analyzer_unittest.cc @@ -0,0 +1,141 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/codecs/test/video_codec_analyzer.h" + +#include "absl/types/optional.h" +#include "api/video/i420_buffer.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "third_party/libyuv/include/libyuv/planar_functions.h" + +namespace webrtc { +namespace test { + +namespace { +using ::testing::Return; +using ::testing::Values; + +const size_t kTimestamp = 3000; +const size_t kSpatialIdx = 2; + +class MockReferenceVideoSource + : public VideoCodecAnalyzer::ReferenceVideoSource { + public: + MOCK_METHOD(VideoFrame, GetFrame, (uint32_t, Resolution), (override)); +}; + +VideoFrame CreateVideoFrame(uint32_t timestamp_rtp, + uint8_t y = 0, + uint8_t u = 0, + uint8_t v = 0) { + rtc::scoped_refptr buffer(I420Buffer::Create(2, 2)); + + libyuv::I420Rect(buffer->MutableDataY(), buffer->StrideY(), + buffer->MutableDataU(), buffer->StrideU(), + buffer->MutableDataV(), buffer->StrideV(), 0, 0, + buffer->width(), buffer->height(), y, u, v); + + return VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_timestamp_rtp(timestamp_rtp) + .build(); +} + +EncodedImage CreateEncodedImage(uint32_t timestamp_rtp, int spatial_idx = 0) { + EncodedImage encoded_image; + encoded_image.SetTimestamp(timestamp_rtp); + encoded_image.SetSpatialIndex(spatial_idx); + return encoded_image; +} +} // namespace + +TEST(VideoCodecAnalyzerTest, EncodeStartedCreatesFrameStats) { + TaskQueueForTest task_queue; + VideoCodecAnalyzer analyzer(task_queue); + analyzer.StartEncode(CreateVideoFrame(kTimestamp)); + + auto fs = analyzer.GetStats()->GetFrameStatistics(); + EXPECT_EQ(1u, fs.size()); + EXPECT_EQ(fs[0].rtp_timestamp, kTimestamp); +} + +TEST(VideoCodecAnalyzerTest, EncodeFinishedUpdatesFrameStats) { + TaskQueueForTest task_queue; + VideoCodecAnalyzer analyzer(task_queue); + analyzer.StartEncode(CreateVideoFrame(kTimestamp)); + + EncodedImage encoded_frame = CreateEncodedImage(kTimestamp, kSpatialIdx); + analyzer.FinishEncode(encoded_frame); + + auto fs = analyzer.GetStats()->GetFrameStatistics(); + EXPECT_EQ(2u, fs.size()); + EXPECT_TRUE(fs[1].encoding_successful); +} + +TEST(VideoCodecAnalyzerTest, DecodeStartedNoFrameStatsCreatesFrameStats) { + TaskQueueForTest task_queue; + VideoCodecAnalyzer analyzer(task_queue); + analyzer.StartDecode(CreateEncodedImage(kTimestamp, kSpatialIdx)); + + auto fs = analyzer.GetStats()->GetFrameStatistics(); + EXPECT_EQ(1u, fs.size()); + EXPECT_EQ(fs[0].rtp_timestamp, kTimestamp); +} + +TEST(VideoCodecAnalyzerTest, DecodeStartedFrameStatsExistsReusesFrameStats) { + TaskQueueForTest task_queue; + VideoCodecAnalyzer analyzer(task_queue); + analyzer.StartEncode(CreateVideoFrame(kTimestamp)); + analyzer.StartDecode(CreateEncodedImage(kTimestamp, /*spatial_idx=*/0)); + + auto fs = analyzer.GetStats()->GetFrameStatistics(); + EXPECT_EQ(1u, fs.size()); +} + +TEST(VideoCodecAnalyzerTest, DecodeFinishedUpdatesFrameStats) { + TaskQueueForTest task_queue; + VideoCodecAnalyzer analyzer(task_queue); + analyzer.StartDecode(CreateEncodedImage(kTimestamp, kSpatialIdx)); + VideoFrame decoded_frame = CreateVideoFrame(kTimestamp); + analyzer.FinishDecode(decoded_frame, kSpatialIdx); + + auto fs = analyzer.GetStats()->GetFrameStatistics(); + EXPECT_EQ(1u, fs.size()); + + EXPECT_TRUE(fs[0].decoding_successful); + EXPECT_EQ(static_cast(fs[0].decoded_width), decoded_frame.width()); + EXPECT_EQ(static_cast(fs[0].decoded_height), decoded_frame.height()); +} + +TEST(VideoCodecAnalyzerTest, DecodeFinishedComputesPsnr) { + TaskQueueForTest task_queue; + MockReferenceVideoSource reference_video_source; + VideoCodecAnalyzer analyzer(task_queue, &reference_video_source); + analyzer.StartDecode(CreateEncodedImage(kTimestamp, kSpatialIdx)); + + EXPECT_CALL(reference_video_source, GetFrame) + .WillOnce(Return(CreateVideoFrame(kTimestamp, /*y=*/0, + /*u=*/0, /*v=*/0))); + + analyzer.FinishDecode( + CreateVideoFrame(kTimestamp, /*value_y=*/1, /*value_u=*/2, /*value_v=*/3), + kSpatialIdx); + + auto fs = analyzer.GetStats()->GetFrameStatistics(); + EXPECT_EQ(1u, fs.size()); + + EXPECT_NEAR(fs[0].psnr_y, 48, 1); + EXPECT_NEAR(fs[0].psnr_u, 42, 1); + EXPECT_NEAR(fs[0].psnr_v, 38, 1); +} + +} // namespace test +} // namespace webrtc diff --git a/modules/video_coding/codecs/test/video_codec_test.cc b/modules/video_coding/codecs/test/video_codec_test.cc new file mode 100644 index 0000000000..bd4c8e07f2 --- /dev/null +++ b/modules/video_coding/codecs/test/video_codec_test.cc @@ -0,0 +1,456 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video_codecs/video_codec.h" + +#include +#include +#include +#include + +#include "absl/functional/any_invocable.h" +#include "api/test/create_video_codec_tester.h" +#include "api/test/videocodec_test_stats.h" +#include "api/units/data_rate.h" +#include "api/units/frequency.h" +#include "api/video/i420_buffer.h" +#include "api/video/resolution.h" +#include "api/video_codecs/builtin_video_decoder_factory.h" +#include "api/video_codecs/builtin_video_encoder_factory.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/video_decoder.h" +#include "api/video_codecs/video_encoder.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" +#include "media/base/media_constants.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/svc/scalability_mode_util.h" +#include "rtc_base/strings/string_builder.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" +#include "test/testsupport/frame_reader.h" + +namespace webrtc { +namespace test { + +namespace { +using ::testing::Combine; +using ::testing::Values; +using Layer = std::pair; + +struct VideoInfo { + std::string name; + Resolution resolution; +}; + +struct CodecInfo { + std::string type; + std::string encoder; + std::string decoder; +}; + +struct EncodingSettings { + ScalabilityMode scalability_mode; + // Spatial layer resolution. + std::map resolution; + // Top temporal layer frame rate. + Frequency framerate; + // Bitrate of spatial and temporal layers. + std::map bitrate; +}; + +struct EncodingTestSettings { + std::string name; + int num_frames = 1; + std::map frame_settings; +}; + +struct DecodingTestSettings { + std::string name; +}; + +struct QualityExpectations { + double min_apsnr_y; +}; + +struct EncodeDecodeTestParams { + CodecInfo codec; + VideoInfo video; + VideoCodecTester::EncoderSettings encoder_settings; + VideoCodecTester::DecoderSettings decoder_settings; + EncodingTestSettings encoding_settings; + DecodingTestSettings decoding_settings; + QualityExpectations quality_expectations; +}; + +const EncodingSettings kQvga64Kbps30Fps = { + .scalability_mode = ScalabilityMode::kL1T1, + .resolution = {{0, {.width = 320, .height = 180}}}, + .framerate = Frequency::Hertz(30), + .bitrate = {{Layer(0, 0), DataRate::KilobitsPerSec(64)}}}; + +const EncodingTestSettings kConstantRateQvga64Kbps30Fps = { + .name = "ConstantRateQvga64Kbps30Fps", + .num_frames = 300, + .frame_settings = {{/*frame_num=*/0, kQvga64Kbps30Fps}}}; + +const QualityExpectations kLowQuality = {.min_apsnr_y = 30}; + +const VideoInfo kFourPeople_1280x720_30 = { + .name = "FourPeople_1280x720_30", + .resolution = {.width = 1280, .height = 720}}; + +const CodecInfo kLibvpxVp8 = {.type = "VP8", + .encoder = "libvpx", + .decoder = "libvpx"}; + +const CodecInfo kLibvpxVp9 = {.type = "VP9", + .encoder = "libvpx", + .decoder = "libvpx"}; + +const CodecInfo kOpenH264 = {.type = "H264", + .encoder = "openh264", + .decoder = "ffmpeg"}; + +class TestRawVideoSource : public VideoCodecTester::RawVideoSource { + public: + static constexpr Frequency k90kHz = Frequency::Hertz(90000); + + TestRawVideoSource(std::unique_ptr frame_reader, + const EncodingTestSettings& test_settings) + : frame_reader_(std::move(frame_reader)), + test_settings_(test_settings), + frame_num_(0), + timestamp_rtp_(0) { + // Ensure settings for the first frame are provided. + RTC_CHECK_GT(test_settings_.frame_settings.size(), 0u); + RTC_CHECK_EQ(test_settings_.frame_settings.begin()->first, 0); + } + + // Pulls next frame. Frame RTP timestamp is set accordingly to + // `EncodingSettings::framerate`. + absl::optional PullFrame() override { + if (frame_num_ >= test_settings_.num_frames) { + // End of stream. + return absl::nullopt; + } + + EncodingSettings frame_settings = + std::prev(test_settings_.frame_settings.upper_bound(frame_num_)) + ->second; + + int pulled_frame; + auto buffer = frame_reader_->PullFrame( + &pulled_frame, frame_settings.resolution.rbegin()->second, + {.num = 30, .den = static_cast(frame_settings.framerate.hertz())}); + RTC_CHECK(buffer) << "Cannot pull frame " << frame_num_; + + auto frame = VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_timestamp_rtp(timestamp_rtp_) + .build(); + + pulled_frames_[timestamp_rtp_] = pulled_frame; + timestamp_rtp_ += k90kHz / frame_settings.framerate; + ++frame_num_; + + return frame; + } + + // Reads frame specified by `timestamp_rtp`, scales it to `resolution` and + // returns. Frame with the given `timestamp_rtp` is expected to be pulled + // before. + VideoFrame GetFrame(uint32_t timestamp_rtp, Resolution resolution) override { + RTC_CHECK(pulled_frames_.find(timestamp_rtp) != pulled_frames_.end()) + << "Frame with RTP timestamp " << timestamp_rtp + << " was not pulled before"; + auto buffer = + frame_reader_->ReadFrame(pulled_frames_[timestamp_rtp], resolution); + return VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_timestamp_rtp(timestamp_rtp) + .build(); + } + + protected: + std::unique_ptr frame_reader_; + const EncodingTestSettings& test_settings_; + int frame_num_; + uint32_t timestamp_rtp_; + std::map pulled_frames_; +}; + +class TestEncoder : public VideoCodecTester::Encoder, + public EncodedImageCallback { + public: + TestEncoder(std::unique_ptr encoder, + const CodecInfo& codec_info, + const std::map& frame_settings) + : encoder_(std::move(encoder)), + codec_info_(codec_info), + frame_settings_(frame_settings), + frame_num_(0) { + // Ensure settings for the first frame is provided. + RTC_CHECK_GT(frame_settings_.size(), 0u); + RTC_CHECK_EQ(frame_settings_.begin()->first, 0); + + encoder_->RegisterEncodeCompleteCallback(this); + } + + void Encode(const VideoFrame& frame, EncodeCallback callback) override { + callbacks_[frame.timestamp()] = std::move(callback); + + if (auto fs = frame_settings_.find(frame_num_); + fs != frame_settings_.end()) { + if (fs == frame_settings_.begin() || + ConfigChanged(fs->second, std::prev(fs)->second)) { + Configure(fs->second); + } + if (fs == frame_settings_.begin() || + RateChanged(fs->second, std::prev(fs)->second)) { + SetRates(fs->second); + } + } + + int result = encoder_->Encode(frame, nullptr); + RTC_CHECK_EQ(result, WEBRTC_VIDEO_CODEC_OK); + ++frame_num_; + } + + protected: + Result OnEncodedImage(const EncodedImage& encoded_image, + const CodecSpecificInfo* codec_specific_info) override { + auto cb = callbacks_.find(encoded_image.Timestamp()); + RTC_CHECK(cb != callbacks_.end()); + cb->second(encoded_image); + + callbacks_.erase(callbacks_.begin(), cb); + return Result(Result::Error::OK); + } + + void Configure(const EncodingSettings& es) { + VideoCodec vc; + const Resolution& resolution = es.resolution.rbegin()->second; + vc.width = resolution.width; + vc.height = resolution.height; + const DataRate& bitrate = es.bitrate.rbegin()->second; + vc.startBitrate = bitrate.kbps(); + vc.maxBitrate = bitrate.kbps(); + vc.minBitrate = 0; + vc.maxFramerate = static_cast(es.framerate.hertz()); + vc.active = true; + vc.qpMax = 0; + vc.numberOfSimulcastStreams = 0; + vc.mode = webrtc::VideoCodecMode::kRealtimeVideo; + vc.SetFrameDropEnabled(true); + + vc.codecType = PayloadStringToCodecType(codec_info_.type); + if (vc.codecType == kVideoCodecVP8) { + *(vc.VP8()) = VideoEncoder::GetDefaultVp8Settings(); + } else if (vc.codecType == kVideoCodecVP9) { + *(vc.VP9()) = VideoEncoder::GetDefaultVp9Settings(); + } else if (vc.codecType == kVideoCodecH264) { + *(vc.H264()) = VideoEncoder::GetDefaultH264Settings(); + } + + VideoEncoder::Settings ves( + VideoEncoder::Capabilities(/*loss_notification=*/false), + /*number_of_cores=*/1, + /*max_payload_size=*/1440); + + int result = encoder_->InitEncode(&vc, ves); + RTC_CHECK_EQ(result, WEBRTC_VIDEO_CODEC_OK); + } + + void SetRates(const EncodingSettings& es) { + VideoEncoder::RateControlParameters rc; + int num_spatial_layers = + ScalabilityModeToNumSpatialLayers(es.scalability_mode); + int num_temporal_layers = + ScalabilityModeToNumSpatialLayers(es.scalability_mode); + for (int sidx = 0; sidx < num_spatial_layers; ++sidx) { + for (int tidx = 0; tidx < num_temporal_layers; ++tidx) { + RTC_CHECK(es.bitrate.find(Layer(sidx, tidx)) != es.bitrate.end()) + << "Bitrate for layer S=" << sidx << " T=" << tidx << " is not set"; + rc.bitrate.SetBitrate(sidx, tidx, + es.bitrate.at(Layer(sidx, tidx)).bps()); + } + } + + rc.framerate_fps = es.framerate.millihertz() / 1000.0; + encoder_->SetRates(rc); + } + + bool ConfigChanged(const EncodingSettings& es, + const EncodingSettings& prev_es) const { + return es.scalability_mode != prev_es.scalability_mode || + es.resolution != prev_es.resolution; + } + + bool RateChanged(const EncodingSettings& es, + const EncodingSettings& prev_es) const { + return es.bitrate != prev_es.bitrate || es.framerate != prev_es.framerate; + } + + std::unique_ptr encoder_; + const CodecInfo& codec_info_; + const std::map& frame_settings_; + int frame_num_; + std::map callbacks_; +}; + +class TestDecoder : public VideoCodecTester::Decoder, + public DecodedImageCallback { + public: + TestDecoder(std::unique_ptr decoder, + const CodecInfo& codec_info) + : decoder_(std::move(decoder)), codec_info_(codec_info), frame_num_(0) { + decoder_->RegisterDecodeCompleteCallback(this); + } + void Decode(const EncodedImage& frame, DecodeCallback callback) override { + callbacks_[frame.Timestamp()] = std::move(callback); + + if (frame_num_ == 0) { + Configure(); + } + + decoder_->Decode(frame, /*missing_frames=*/false, + /*render_time_ms=*/0); + ++frame_num_; + } + + void Configure() { + VideoDecoder::Settings ds; + ds.set_codec_type(PayloadStringToCodecType(codec_info_.type)); + ds.set_number_of_cores(1); + + bool result = decoder_->Configure(ds); + RTC_CHECK(result); + } + + protected: + int Decoded(VideoFrame& decoded_frame) override { + auto cb = callbacks_.find(decoded_frame.timestamp()); + RTC_CHECK(cb != callbacks_.end()); + cb->second(decoded_frame); + + callbacks_.erase(callbacks_.begin(), cb); + return WEBRTC_VIDEO_CODEC_OK; + } + + std::unique_ptr decoder_; + const CodecInfo& codec_info_; + int frame_num_; + std::map callbacks_; +}; + +std::unique_ptr CreateEncoder( + const CodecInfo& codec_info, + const std::map& frame_settings) { + auto factory = CreateBuiltinVideoEncoderFactory(); + auto encoder = factory->CreateVideoEncoder(SdpVideoFormat(codec_info.type)); + return std::make_unique(std::move(encoder), codec_info, + frame_settings); +} + +std::unique_ptr CreateDecoder( + const CodecInfo& codec_info) { + auto factory = CreateBuiltinVideoDecoderFactory(); + auto decoder = factory->CreateVideoDecoder(SdpVideoFormat(codec_info.type)); + return std::make_unique(std::move(decoder), codec_info); +} + +} // namespace + +class EncodeDecodeTest + : public ::testing::TestWithParam { + public: + EncodeDecodeTest() : test_params_(GetParam()) {} + + void SetUp() override { + std::unique_ptr frame_reader = + CreateYuvFrameReader(ResourcePath(test_params_.video.name, "yuv"), + test_params_.video.resolution, + YuvFrameReaderImpl::RepeatMode::kPingPong); + video_source_ = std::make_unique( + std::move(frame_reader), test_params_.encoding_settings); + + encoder_ = CreateEncoder(test_params_.codec, + test_params_.encoding_settings.frame_settings); + decoder_ = CreateDecoder(test_params_.codec); + + tester_ = CreateVideoCodecTester(); + } + + static std::string TestParametersToStr( + const ::testing::TestParamInfo& info) { + return std::string(info.param.encoding_settings.name + + info.param.codec.type + info.param.codec.encoder + + info.param.codec.decoder); + } + + protected: + EncodeDecodeTestParams test_params_; + std::unique_ptr video_source_; + std::unique_ptr encoder_; + std::unique_ptr decoder_; + std::unique_ptr tester_; +}; + +TEST_P(EncodeDecodeTest, DISABLED_TestEncodeDecode) { + std::unique_ptr stats = tester_->RunEncodeDecodeTest( + std::move(video_source_), std::move(encoder_), std::move(decoder_), + test_params_.encoder_settings, test_params_.decoder_settings); + + const auto& frame_settings = test_params_.encoding_settings.frame_settings; + for (auto fs = frame_settings.begin(); fs != frame_settings.end(); ++fs) { + int first_frame = fs->first; + int last_frame = std::next(fs) != frame_settings.end() + ? std::next(fs)->first - 1 + : test_params_.encoding_settings.num_frames - 1; + + const EncodingSettings& encoding_settings = fs->second; + auto metrics = stats->CalcVideoStatistic( + first_frame, last_frame, encoding_settings.bitrate.rbegin()->second, + encoding_settings.framerate); + + EXPECT_GE(metrics.avg_psnr_y, + test_params_.quality_expectations.min_apsnr_y); + } +} + +std::list ConstantRateTestParameters() { + std::list test_params; + std::vector codecs = {kLibvpxVp8}; + std::vector videos = {kFourPeople_1280x720_30}; + std::vector> + encoding_settings = {{kConstantRateQvga64Kbps30Fps, kLowQuality}}; + for (const CodecInfo& codec : codecs) { + for (const VideoInfo& video : videos) { + for (const auto& es : encoding_settings) { + EncodeDecodeTestParams p; + p.codec = codec; + p.video = video; + p.encoding_settings = es.first; + p.quality_expectations = es.second; + test_params.push_back(p); + } + } + } + return test_params; +} + +INSTANTIATE_TEST_SUITE_P(ConstantRate, + EncodeDecodeTest, + ::testing::ValuesIn(ConstantRateTestParameters()), + EncodeDecodeTest::TestParametersToStr); +} // namespace test + +} // namespace webrtc diff --git a/modules/video_coding/codecs/test/video_codec_tester_impl.cc b/modules/video_coding/codecs/test/video_codec_tester_impl.cc new file mode 100644 index 0000000000..3000c1adee --- /dev/null +++ b/modules/video_coding/codecs/test/video_codec_tester_impl.cc @@ -0,0 +1,325 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/codecs/test/video_codec_tester_impl.h" + +#include +#include +#include + +#include "api/task_queue/default_task_queue_factory.h" +#include "api/units/frequency.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/encoded_image.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "modules/video_coding/codecs/test/video_codec_analyzer.h" +#include "rtc_base/event.h" +#include "rtc_base/time_utils.h" +#include "system_wrappers/include/sleep.h" + +namespace webrtc { +namespace test { + +namespace { +using RawVideoSource = VideoCodecTester::RawVideoSource; +using CodedVideoSource = VideoCodecTester::CodedVideoSource; +using Decoder = VideoCodecTester::Decoder; +using Encoder = VideoCodecTester::Encoder; +using EncoderSettings = VideoCodecTester::EncoderSettings; +using DecoderSettings = VideoCodecTester::DecoderSettings; +using PacingSettings = VideoCodecTester::PacingSettings; +using PacingMode = PacingSettings::PacingMode; + +constexpr Frequency k90kHz = Frequency::Hertz(90000); + +// A thread-safe wrapper for video source to be shared with the quality analyzer +// that reads reference frames from a separate thread. +class SyncRawVideoSource : public VideoCodecAnalyzer::ReferenceVideoSource { + public: + explicit SyncRawVideoSource(std::unique_ptr video_source) + : video_source_(std::move(video_source)) {} + + absl::optional PullFrame() { + MutexLock lock(&mutex_); + return video_source_->PullFrame(); + } + + VideoFrame GetFrame(uint32_t timestamp_rtp, Resolution resolution) override { + MutexLock lock(&mutex_); + return video_source_->GetFrame(timestamp_rtp, resolution); + } + + protected: + std::unique_ptr video_source_ RTC_GUARDED_BY(mutex_); + Mutex mutex_; +}; + +// Pacer calculates delay necessary to keep frame encode or decode call spaced +// from the previous calls by the pacing time. `Delay` is expected to be called +// as close as possible to posting frame encode or decode task. This class is +// not thread safe. +class Pacer { + public: + explicit Pacer(PacingSettings settings) + : settings_(settings), delay_(TimeDelta::Zero()) {} + TimeDelta Delay(Timestamp beat) { + if (settings_.mode == PacingMode::kNoPacing) { + return TimeDelta::Zero(); + } + + Timestamp now = Timestamp::Micros(rtc::TimeMicros()); + if (prev_time_.has_value()) { + delay_ += PacingTime(beat); + delay_ -= (now - *prev_time_); + if (delay_.ns() < 0) { + delay_ = TimeDelta::Zero(); + } + } + + prev_beat_ = beat; + prev_time_ = now; + return delay_; + } + + private: + TimeDelta PacingTime(Timestamp beat) { + if (settings_.mode == PacingMode::kRealTime) { + return beat - *prev_beat_; + } + RTC_CHECK_EQ(PacingMode::kConstantRate, settings_.mode); + return 1 / settings_.constant_rate; + } + + PacingSettings settings_; + absl::optional prev_beat_; + absl::optional prev_time_; + TimeDelta delay_; +}; + +// Task queue that keeps the number of queued tasks below a certain limit. If +// the limit is reached, posting of a next task is blocked until execution of a +// previously posted task starts. This class is not thread-safe. +class LimitedTaskQueue { + public: + // The codec tester reads frames from video source in the main thread. + // Encoding and decoding are done in separate threads. If encoding or + // decoding is slow, the reading may go far ahead and may buffer too many + // frames in memory. To prevent this we limit the encoding/decoding queue + // size. When the queue is full, the main thread and, hence, reading frames + // from video source is blocked until a previously posted encoding/decoding + // task starts. + static constexpr int kMaxTaskQueueSize = 3; + + explicit LimitedTaskQueue(rtc::TaskQueue& task_queue) + : task_queue_(task_queue), queue_size_(0) {} + + void PostDelayedTask(absl::AnyInvocable task, TimeDelta delay) { + ++queue_size_; + task_queue_.PostDelayedTask( + [this, task = std::move(task)]() mutable { + std::move(task)(); + --queue_size_; + task_executed_.Set(); + }, + delay); + + task_executed_.Reset(); + if (queue_size_ > kMaxTaskQueueSize) { + task_executed_.Wait(rtc::Event::kForever); + } + RTC_CHECK(queue_size_ <= kMaxTaskQueueSize); + } + + void WaitForPreviouslyPostedTasks() { + while (queue_size_ > 0) { + task_executed_.Wait(rtc::Event::kForever); + task_executed_.Reset(); + } + } + + rtc::TaskQueue& task_queue_; + std::atomic_int queue_size_; + rtc::Event task_executed_; +}; + +class TesterDecoder { + public: + TesterDecoder(std::unique_ptr decoder, + VideoCodecAnalyzer* analyzer, + const DecoderSettings& settings, + rtc::TaskQueue& task_queue) + : decoder_(std::move(decoder)), + analyzer_(analyzer), + settings_(settings), + pacer_(settings.pacing), + task_queue_(task_queue) { + RTC_CHECK(analyzer_) << "Analyzer must be provided"; + } + + void Decode(const EncodedImage& frame) { + Timestamp timestamp = Timestamp::Micros((frame.Timestamp() / k90kHz).us()); + + task_queue_.PostDelayedTask( + [this, frame] { + analyzer_->StartDecode(frame); + decoder_->Decode(frame, [this](const VideoFrame& decoded_frame) { + this->analyzer_->FinishDecode(decoded_frame, /*spatial_idx=*/0); + }); + }, + pacer_.Delay(timestamp)); + } + + void Flush() { task_queue_.WaitForPreviouslyPostedTasks(); } + + protected: + std::unique_ptr decoder_; + VideoCodecAnalyzer* const analyzer_; + const DecoderSettings& settings_; + Pacer pacer_; + LimitedTaskQueue task_queue_; +}; + +class TesterEncoder { + public: + TesterEncoder(std::unique_ptr encoder, + TesterDecoder* decoder, + VideoCodecAnalyzer* analyzer, + const EncoderSettings& settings, + rtc::TaskQueue& task_queue) + : encoder_(std::move(encoder)), + decoder_(decoder), + analyzer_(analyzer), + settings_(settings), + pacer_(settings.pacing), + task_queue_(task_queue) { + RTC_CHECK(analyzer_) << "Analyzer must be provided"; + } + + void Encode(const VideoFrame& frame) { + Timestamp timestamp = Timestamp::Micros((frame.timestamp() / k90kHz).us()); + + task_queue_.PostDelayedTask( + [this, frame] { + analyzer_->StartEncode(frame); + encoder_->Encode(frame, [this](const EncodedImage& encoded_frame) { + this->analyzer_->FinishEncode(encoded_frame); + if (decoder_ != nullptr) { + this->decoder_->Decode(encoded_frame); + } + }); + }, + pacer_.Delay(timestamp)); + } + + void Flush() { task_queue_.WaitForPreviouslyPostedTasks(); } + + protected: + std::unique_ptr encoder_; + TesterDecoder* const decoder_; + VideoCodecAnalyzer* const analyzer_; + const EncoderSettings& settings_; + Pacer pacer_; + LimitedTaskQueue task_queue_; +}; + +} // namespace + +VideoCodecTesterImpl::VideoCodecTesterImpl() + : VideoCodecTesterImpl(/*task_queue_factory=*/nullptr) {} + +VideoCodecTesterImpl::VideoCodecTesterImpl(TaskQueueFactory* task_queue_factory) + : task_queue_factory_(task_queue_factory) { + if (task_queue_factory_ == nullptr) { + owned_task_queue_factory_ = CreateDefaultTaskQueueFactory(); + task_queue_factory_ = owned_task_queue_factory_.get(); + } +} + +std::unique_ptr VideoCodecTesterImpl::RunDecodeTest( + std::unique_ptr video_source, + std::unique_ptr decoder, + const DecoderSettings& decoder_settings) { + rtc::TaskQueue analyser_task_queue(task_queue_factory_->CreateTaskQueue( + "Analyzer", TaskQueueFactory::Priority::NORMAL)); + rtc::TaskQueue decoder_task_queue(task_queue_factory_->CreateTaskQueue( + "Decoder", TaskQueueFactory::Priority::NORMAL)); + + VideoCodecAnalyzer perf_analyzer(analyser_task_queue); + TesterDecoder tester_decoder(std::move(decoder), &perf_analyzer, + decoder_settings, decoder_task_queue); + + while (auto frame = video_source->PullFrame()) { + tester_decoder.Decode(*frame); + } + + tester_decoder.Flush(); + + return perf_analyzer.GetStats(); +} + +std::unique_ptr VideoCodecTesterImpl::RunEncodeTest( + std::unique_ptr video_source, + std::unique_ptr encoder, + const EncoderSettings& encoder_settings) { + rtc::TaskQueue analyser_task_queue(task_queue_factory_->CreateTaskQueue( + "Analyzer", TaskQueueFactory::Priority::NORMAL)); + rtc::TaskQueue encoder_task_queue(task_queue_factory_->CreateTaskQueue( + "Encoder", TaskQueueFactory::Priority::NORMAL)); + + SyncRawVideoSource sync_source(std::move(video_source)); + VideoCodecAnalyzer perf_analyzer(analyser_task_queue); + TesterEncoder tester_encoder(std::move(encoder), /*decoder=*/nullptr, + &perf_analyzer, encoder_settings, + encoder_task_queue); + + while (auto frame = sync_source.PullFrame()) { + tester_encoder.Encode(*frame); + } + + tester_encoder.Flush(); + + return perf_analyzer.GetStats(); +} + +std::unique_ptr VideoCodecTesterImpl::RunEncodeDecodeTest( + std::unique_ptr video_source, + std::unique_ptr encoder, + std::unique_ptr decoder, + const EncoderSettings& encoder_settings, + const DecoderSettings& decoder_settings) { + rtc::TaskQueue analyser_task_queue(task_queue_factory_->CreateTaskQueue( + "Analyzer", TaskQueueFactory::Priority::NORMAL)); + rtc::TaskQueue decoder_task_queue(task_queue_factory_->CreateTaskQueue( + "Decoder", TaskQueueFactory::Priority::NORMAL)); + rtc::TaskQueue encoder_task_queue(task_queue_factory_->CreateTaskQueue( + "Encoder", TaskQueueFactory::Priority::NORMAL)); + + SyncRawVideoSource sync_source(std::move(video_source)); + VideoCodecAnalyzer perf_analyzer(analyser_task_queue, &sync_source); + TesterDecoder tester_decoder(std::move(decoder), &perf_analyzer, + decoder_settings, decoder_task_queue); + TesterEncoder tester_encoder(std::move(encoder), &tester_decoder, + &perf_analyzer, encoder_settings, + encoder_task_queue); + + while (auto frame = sync_source.PullFrame()) { + tester_encoder.Encode(*frame); + } + + tester_encoder.Flush(); + tester_decoder.Flush(); + + return perf_analyzer.GetStats(); +} + +} // namespace test +} // namespace webrtc diff --git a/modules/video_coding/codecs/test/video_codec_tester_impl.h b/modules/video_coding/codecs/test/video_codec_tester_impl.h new file mode 100644 index 0000000000..b64adeb882 --- /dev/null +++ b/modules/video_coding/codecs/test/video_codec_tester_impl.h @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_TESTER_IMPL_H_ +#define MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_TESTER_IMPL_H_ + +#include + +#include "api/task_queue/task_queue_factory.h" +#include "api/test/video_codec_tester.h" + +namespace webrtc { +namespace test { + +// A stateless implementation of `VideoCodecTester`. This class is thread safe. +class VideoCodecTesterImpl : public VideoCodecTester { + public: + VideoCodecTesterImpl(); + explicit VideoCodecTesterImpl(TaskQueueFactory* task_queue_factory); + + std::unique_ptr RunDecodeTest( + std::unique_ptr video_source, + std::unique_ptr decoder, + const DecoderSettings& decoder_settings) override; + + std::unique_ptr RunEncodeTest( + std::unique_ptr video_source, + std::unique_ptr encoder, + const EncoderSettings& encoder_settings) override; + + std::unique_ptr RunEncodeDecodeTest( + std::unique_ptr video_source, + std::unique_ptr encoder, + std::unique_ptr decoder, + const EncoderSettings& encoder_settings, + const DecoderSettings& decoder_settings) override; + + protected: + std::unique_ptr owned_task_queue_factory_; + TaskQueueFactory* task_queue_factory_; +}; + +} // namespace test +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_TESTER_IMPL_H_ diff --git a/modules/video_coding/codecs/test/video_codec_tester_impl_unittest.cc b/modules/video_coding/codecs/test/video_codec_tester_impl_unittest.cc new file mode 100644 index 0000000000..29fb006fb5 --- /dev/null +++ b/modules/video_coding/codecs/test/video_codec_tester_impl_unittest.cc @@ -0,0 +1,259 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/codecs/test/video_codec_tester_impl.h" + +#include +#include +#include +#include + +#include "api/task_queue/task_queue_factory.h" +#include "api/task_queue/test/mock_task_queue_base.h" +#include "api/units/frequency.h" +#include "api/units/time_delta.h" +#include "api/video/encoded_image.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "rtc_base/fake_clock.h" +#include "rtc_base/gunit.h" +#include "rtc_base/task_queue_for_test.h" +#include "rtc_base/time_utils.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace test { + +namespace { +using ::testing::_; +using ::testing::Invoke; +using ::testing::InvokeWithoutArgs; +using ::testing::Return; + +using Decoder = VideoCodecTester::Decoder; +using Encoder = VideoCodecTester::Encoder; +using CodedVideoSource = VideoCodecTester::CodedVideoSource; +using RawVideoSource = VideoCodecTester::RawVideoSource; +using DecoderSettings = VideoCodecTester::DecoderSettings; +using EncoderSettings = VideoCodecTester::EncoderSettings; +using PacingSettings = VideoCodecTester::PacingSettings; +using PacingMode = PacingSettings::PacingMode; + +constexpr Frequency k90kHz = Frequency::Hertz(90000); + +VideoFrame CreateVideoFrame(uint32_t timestamp_rtp) { + rtc::scoped_refptr buffer(I420Buffer::Create(2, 2)); + return VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_timestamp_rtp(timestamp_rtp) + .build(); +} + +EncodedImage CreateEncodedImage(uint32_t timestamp_rtp) { + EncodedImage encoded_image; + encoded_image.SetTimestamp(timestamp_rtp); + return encoded_image; +} + +class MockRawVideoSource : public RawVideoSource { + public: + MOCK_METHOD(absl::optional, PullFrame, (), (override)); + MOCK_METHOD(VideoFrame, + GetFrame, + (uint32_t timestamp_rtp, Resolution), + (override)); +}; + +class MockCodedVideoSource : public CodedVideoSource { + public: + MOCK_METHOD(absl::optional, PullFrame, (), (override)); +}; + +class MockDecoder : public Decoder { + public: + MOCK_METHOD(void, + Decode, + (const EncodedImage& frame, DecodeCallback callback), + (override)); +}; + +class MockEncoder : public Encoder { + public: + MOCK_METHOD(void, + Encode, + (const VideoFrame& frame, EncodeCallback callback), + (override)); +}; + +class MockTaskQueueFactory : public TaskQueueFactory { + public: + explicit MockTaskQueueFactory(TaskQueueBase& task_queue) + : task_queue_(task_queue) {} + + std::unique_ptr CreateTaskQueue( + absl::string_view name, + Priority priority) const override { + return std::unique_ptr(&task_queue_); + } + + protected: + TaskQueueBase& task_queue_; +}; +} // namespace + +class VideoCodecTesterImplPacingTest + : public ::testing::TestWithParam, + std::vector, + std::vector>> { + public: + VideoCodecTesterImplPacingTest() + : pacing_settings_(std::get<0>(GetParam())), + frame_timestamp_ms_(std::get<1>(GetParam())), + frame_capture_delay_ms_(std::get<2>(GetParam())), + expected_frame_start_ms_(std::get<3>(GetParam())), + num_frames_(frame_timestamp_ms_.size()), + task_queue_factory_(task_queue_) {} + + void SetUp() override { + ON_CALL(task_queue_, PostTask) + .WillByDefault(Invoke( + [](absl::AnyInvocable task) { std::move(task)(); })); + + ON_CALL(task_queue_, PostDelayedTask) + .WillByDefault( + Invoke([&](absl::AnyInvocable task, TimeDelta delay) { + clock_.AdvanceTime(delay); + std::move(task)(); + })); + } + + protected: + PacingSettings pacing_settings_; + std::vector frame_timestamp_ms_; + std::vector frame_capture_delay_ms_; + std::vector expected_frame_start_ms_; + size_t num_frames_; + + rtc::ScopedFakeClock clock_; + MockTaskQueueBase task_queue_; + MockTaskQueueFactory task_queue_factory_; +}; + +TEST_P(VideoCodecTesterImplPacingTest, PaceEncode) { + auto video_source = std::make_unique(); + + size_t frame_num = 0; + EXPECT_CALL(*video_source, PullFrame).WillRepeatedly(Invoke([&]() mutable { + if (frame_num >= num_frames_) { + return absl::optional(); + } + clock_.AdvanceTime(TimeDelta::Millis(frame_capture_delay_ms_[frame_num])); + + uint32_t timestamp_rtp = frame_timestamp_ms_[frame_num] * k90kHz.hertz() / + rtc::kNumMillisecsPerSec; + ++frame_num; + return absl::optional(CreateVideoFrame(timestamp_rtp)); + })); + + auto encoder = std::make_unique(); + EncoderSettings encoder_settings; + encoder_settings.pacing = pacing_settings_; + + VideoCodecTesterImpl tester(&task_queue_factory_); + auto fs = tester + .RunEncodeTest(std::move(video_source), std::move(encoder), + encoder_settings) + ->GetFrameStatistics(); + ASSERT_EQ(fs.size(), num_frames_); + + for (size_t i = 0; i < fs.size(); ++i) { + int encode_start_ms = (fs[i].encode_start_ns - fs[0].encode_start_ns) / + rtc::kNumNanosecsPerMillisec; + EXPECT_NEAR(encode_start_ms, expected_frame_start_ms_[i], 10); + } +} + +TEST_P(VideoCodecTesterImplPacingTest, PaceDecode) { + auto video_source = std::make_unique(); + + size_t frame_num = 0; + EXPECT_CALL(*video_source, PullFrame).WillRepeatedly(Invoke([&]() mutable { + if (frame_num >= num_frames_) { + return absl::optional(); + } + clock_.AdvanceTime(TimeDelta::Millis(frame_capture_delay_ms_[frame_num])); + + uint32_t timestamp_rtp = frame_timestamp_ms_[frame_num] * k90kHz.hertz() / + rtc::kNumMillisecsPerSec; + ++frame_num; + return absl::optional(CreateEncodedImage(timestamp_rtp)); + })); + + auto decoder = std::make_unique(); + DecoderSettings decoder_settings; + decoder_settings.pacing = pacing_settings_; + + VideoCodecTesterImpl tester(&task_queue_factory_); + auto fs = tester + .RunDecodeTest(std::move(video_source), std::move(decoder), + decoder_settings) + ->GetFrameStatistics(); + ASSERT_EQ(fs.size(), num_frames_); + + for (size_t i = 0; i < fs.size(); ++i) { + int decode_start_ms = (fs[i].decode_start_ns - fs[0].decode_start_ns) / + rtc::kNumNanosecsPerMillisec; + EXPECT_NEAR(decode_start_ms, expected_frame_start_ms_[i], 10); + } +} + +INSTANTIATE_TEST_SUITE_P( + All, + VideoCodecTesterImplPacingTest, + ::testing::ValuesIn( + {std::make_tuple(PacingSettings({.mode = PacingMode::kNoPacing}), + /*frame_timestamp_ms=*/std::vector{0, 100}, + /*frame_capture_delay_ms=*/std::vector{0, 0}, + /*expected_frame_start_ms=*/std::vector{0, 0}), + // Pace with rate equal to the source frame rate. Frames are captured + // instantly. Verify that frames are paced with the source frame rate. + std::make_tuple(PacingSettings({.mode = PacingMode::kRealTime}), + /*frame_timestamp_ms=*/std::vector{0, 100}, + /*frame_capture_delay_ms=*/std::vector{0, 0}, + /*expected_frame_start_ms=*/std::vector{0, 100}), + // Pace with rate equal to the source frame rate. Frame capture is + // delayed by more than pacing time. Verify that no extra delay is + // added. + std::make_tuple(PacingSettings({.mode = PacingMode::kRealTime}), + /*frame_timestamp_ms=*/std::vector{0, 100}, + /*frame_capture_delay_ms=*/std::vector{0, 200}, + /*expected_frame_start_ms=*/std::vector{0, 200}), + // Pace with constant rate less then source frame rate. Frames are + // captured instantly. Verify that frames are paced with the requested + // constant rate. + std::make_tuple( + PacingSettings({.mode = PacingMode::kConstantRate, + .constant_rate = Frequency::Hertz(20)}), + /*frame_timestamp_ms=*/std::vector{0, 100}, + /*frame_capture_delay_ms=*/std::vector{0, 0}, + /*expected_frame_start_ms=*/std::vector{0, 50}), + // Pace with constant rate less then source frame rate. Frame capture + // is delayed by more than the pacing time. Verify that no extra delay + // is added. + std::make_tuple( + PacingSettings({.mode = PacingMode::kConstantRate, + .constant_rate = Frequency::Hertz(20)}), + /*frame_timestamp_ms=*/std::vector{0, 100}, + /*frame_capture_delay_ms=*/std::vector{0, 200}, + /*expected_frame_start_ms=*/std::vector{0, 200})})); +} // namespace test +} // namespace webrtc diff --git a/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc b/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc index e7028f6fe1..e56e8a92af 100644 --- a/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc +++ b/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc @@ -736,13 +736,10 @@ bool VideoCodecTestFixtureImpl::SetUpAndInitObjects( int clip_height = config_.clip_height.value_or(config_.codec_settings.height); // Create file objects for quality analysis. - source_frame_reader_.reset(new YuvFrameReaderImpl( - config_.filepath, clip_width, clip_height, - config_.reference_width.value_or(clip_width), - config_.reference_height.value_or(clip_height), - YuvFrameReaderImpl::RepeatMode::kPingPong, config_.clip_fps, - config_.codec_settings.maxFramerate)); - EXPECT_TRUE(source_frame_reader_->Init()); + source_frame_reader_ = CreateYuvFrameReader( + config_.filepath, + Resolution({.width = clip_width, .height = clip_height}), + YuvFrameReaderImpl::RepeatMode::kPingPong); RTC_DCHECK(encoded_frame_writers_.empty()); RTC_DCHECK(decoded_frame_writers_.empty()); @@ -820,7 +817,7 @@ void VideoCodecTestFixtureImpl::ReleaseAndCloseObjects( DestroyEncoderAndDecoder(); }); - source_frame_reader_->Close(); + source_frame_reader_.reset(); // Close visualization files. for (auto& encoded_frame_writer : encoded_frame_writers_) { diff --git a/modules/video_coding/codecs/test/videocodec_test_stats_impl.cc b/modules/video_coding/codecs/test/videocodec_test_stats_impl.cc index efb7502e5d..390348b97a 100644 --- a/modules/video_coding/codecs/test/videocodec_test_stats_impl.cc +++ b/modules/video_coding/codecs/test/videocodec_test_stats_impl.cc @@ -58,7 +58,20 @@ FrameStatistics* VideoCodecTestStatsImpl::GetFrameWithTimestamp( return GetFrame(rtp_timestamp_to_frame_num_[layer_idx][timestamp], layer_idx); } -std::vector VideoCodecTestStatsImpl::GetFrameStatistics() { +FrameStatistics* VideoCodecTestStatsImpl::GetOrAddFrame(size_t timestamp_rtp, + size_t spatial_idx) { + if (rtp_timestamp_to_frame_num_[spatial_idx].count(timestamp_rtp) > 0) { + return GetFrameWithTimestamp(timestamp_rtp, spatial_idx); + } + + size_t frame_num = layer_stats_[spatial_idx].size(); + AddFrame(FrameStatistics(frame_num, timestamp_rtp, spatial_idx)); + + return GetFrameWithTimestamp(timestamp_rtp, spatial_idx); +} + +std::vector VideoCodecTestStatsImpl::GetFrameStatistics() + const { size_t capacity = 0; for (const auto& layer_stat : layer_stats_) { capacity += layer_stat.second.size(); @@ -92,7 +105,8 @@ VideoCodecTestStatsImpl::SliceAndCalcLayerVideoStatistic( for (size_t temporal_idx = 0; temporal_idx < num_temporal_layers; ++temporal_idx) { VideoStatistics layer_stat = SliceAndCalcVideoStatistic( - first_frame_num, last_frame_num, spatial_idx, temporal_idx, false); + first_frame_num, last_frame_num, spatial_idx, temporal_idx, false, + /*target_bitrate=*/absl::nullopt, /*target_framerate=*/absl::nullopt); layer_stats.push_back(layer_stat); } } @@ -110,9 +124,24 @@ VideoStatistics VideoCodecTestStatsImpl::SliceAndCalcAggregatedVideoStatistic( RTC_CHECK_GT(num_spatial_layers, 0); RTC_CHECK_GT(num_temporal_layers, 0); - return SliceAndCalcVideoStatistic(first_frame_num, last_frame_num, - num_spatial_layers - 1, - num_temporal_layers - 1, true); + return SliceAndCalcVideoStatistic( + first_frame_num, last_frame_num, num_spatial_layers - 1, + num_temporal_layers - 1, true, /*target_bitrate=*/absl::nullopt, + /*target_framerate=*/absl::nullopt); +} + +VideoStatistics VideoCodecTestStatsImpl::CalcVideoStatistic( + size_t first_frame_num, + size_t last_frame_num, + DataRate target_bitrate, + Frequency target_framerate) { + size_t num_spatial_layers = 0; + size_t num_temporal_layers = 0; + GetNumberOfEncodedLayers(first_frame_num, last_frame_num, &num_spatial_layers, + &num_temporal_layers); + return SliceAndCalcVideoStatistic( + first_frame_num, last_frame_num, num_spatial_layers - 1, + num_temporal_layers - 1, true, target_bitrate, target_framerate); } size_t VideoCodecTestStatsImpl::Size(size_t spatial_idx) { @@ -175,7 +204,9 @@ VideoStatistics VideoCodecTestStatsImpl::SliceAndCalcVideoStatistic( size_t last_frame_num, size_t spatial_idx, size_t temporal_idx, - bool aggregate_independent_layers) { + bool aggregate_independent_layers, + absl::optional target_bitrate, + absl::optional target_framerate) { VideoStatistics video_stat; float buffer_level_bits = 0.0f; @@ -200,8 +231,11 @@ VideoStatistics VideoCodecTestStatsImpl::SliceAndCalcVideoStatistic( FrameStatistics last_successfully_decoded_frame(0, 0, 0); const size_t target_bitrate_kbps = - CalcLayerTargetBitrateKbps(first_frame_num, last_frame_num, spatial_idx, - temporal_idx, aggregate_independent_layers); + target_bitrate.has_value() + ? target_bitrate->kbps() + : CalcLayerTargetBitrateKbps(first_frame_num, last_frame_num, + spatial_idx, temporal_idx, + aggregate_independent_layers); const size_t target_bitrate_bps = 1000 * target_bitrate_kbps; RTC_CHECK_GT(target_bitrate_kbps, 0); // We divide by `target_bitrate_kbps`. @@ -303,7 +337,9 @@ VideoStatistics VideoCodecTestStatsImpl::SliceAndCalcVideoStatistic( GetFrame(first_frame_num, spatial_idx)->rtp_timestamp; RTC_CHECK_GT(timestamp_delta, 0); const float input_framerate_fps = - 1.0 * kVideoPayloadTypeFrequency / timestamp_delta; + target_framerate.has_value() + ? target_framerate->millihertz() / 1000.0 + : 1.0 * kVideoPayloadTypeFrequency / timestamp_delta; RTC_CHECK_GT(input_framerate_fps, 0); const float duration_sec = num_frames / input_framerate_fps; diff --git a/modules/video_coding/codecs/test/videocodec_test_stats_impl.h b/modules/video_coding/codecs/test/videocodec_test_stats_impl.h index 61850d3622..1a7980aa0a 100644 --- a/modules/video_coding/codecs/test/videocodec_test_stats_impl.h +++ b/modules/video_coding/codecs/test/videocodec_test_stats_impl.h @@ -35,8 +35,12 @@ class VideoCodecTestStatsImpl : public VideoCodecTestStats { FrameStatistics* GetFrame(size_t frame_number, size_t spatial_idx); FrameStatistics* GetFrameWithTimestamp(size_t timestamp, size_t spatial_idx); + // Creates FrameStatisticts if it doesn't exists and/or returns + // created/existing FrameStatisticts. + FrameStatistics* GetOrAddFrame(size_t timestamp_rtp, size_t spatial_idx); + // Implements VideoCodecTestStats. - std::vector GetFrameStatistics() override; + std::vector GetFrameStatistics() const override; std::vector SliceAndCalcLayerVideoStatistic( size_t first_frame_num, size_t last_frame_num) override; @@ -44,6 +48,11 @@ class VideoCodecTestStatsImpl : public VideoCodecTestStats { VideoStatistics SliceAndCalcAggregatedVideoStatistic(size_t first_frame_num, size_t last_frame_num); + VideoStatistics CalcVideoStatistic(size_t first_frame, + size_t last_frame, + DataRate target_bitrate, + Frequency target_framerate) override; + size_t Size(size_t spatial_idx); void Clear(); @@ -65,7 +74,9 @@ class VideoCodecTestStatsImpl : public VideoCodecTestStats { size_t last_frame_num, size_t spatial_idx, size_t temporal_idx, - bool aggregate_independent_layers); + bool aggregate_independent_layers, + absl::optional target_bitrate, + absl::optional target_framerate); void GetNumberOfEncodedLayers(size_t first_frame_num, size_t last_frame_num, diff --git a/modules/video_coding/codecs/test/videocodec_test_stats_impl_unittest.cc b/modules/video_coding/codecs/test/videocodec_test_stats_impl_unittest.cc index 6477b6ab8c..89e7d2e1c4 100644 --- a/modules/video_coding/codecs/test/videocodec_test_stats_impl_unittest.cc +++ b/modules/video_coding/codecs/test/videocodec_test_stats_impl_unittest.cc @@ -38,6 +38,21 @@ TEST(StatsTest, AddAndGetFrame) { EXPECT_EQ(kTimestamp, frame_stat->rtp_timestamp); } +TEST(StatsTest, GetOrAddFrame_noFrame_createsNewFrameStat) { + VideoCodecTestStatsImpl stats; + stats.GetOrAddFrame(kTimestamp, 0); + FrameStatistics* frame_stat = stats.GetFrameWithTimestamp(kTimestamp, 0); + EXPECT_EQ(kTimestamp, frame_stat->rtp_timestamp); +} + +TEST(StatsTest, GetOrAddFrame_frameExists_returnsExistingFrameStat) { + VideoCodecTestStatsImpl stats; + stats.AddFrame(FrameStatistics(0, kTimestamp, 0)); + FrameStatistics* frame_stat1 = stats.GetFrameWithTimestamp(kTimestamp, 0); + FrameStatistics* frame_stat2 = stats.GetOrAddFrame(kTimestamp, 0); + EXPECT_EQ(frame_stat1, frame_stat2); +} + TEST(StatsTest, AddAndGetFrames) { VideoCodecTestStatsImpl stats; const size_t kNumFrames = 1000; diff --git a/modules/video_coding/codecs/test/videoprocessor.cc b/modules/video_coding/codecs/test/videoprocessor.cc index 353a00df79..13266c40df 100644 --- a/modules/video_coding/codecs/test/videoprocessor.cc +++ b/modules/video_coding/codecs/test/videoprocessor.cc @@ -153,7 +153,6 @@ VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder, bitrate_allocator_( CreateBuiltinVideoBitrateAllocatorFactory() ->CreateVideoBitrateAllocator(config_.codec_settings)), - framerate_fps_(0), encode_callback_(this), input_frame_reader_(input_frame_reader), merged_encoded_frames_(num_simulcast_or_spatial_layers_), @@ -231,15 +230,27 @@ void VideoProcessor::ProcessFrame() { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_DCHECK(!is_finalized_); + RTC_DCHECK_GT(target_rates_.size(), 0u); + RTC_DCHECK_EQ(target_rates_.begin()->first, 0u); + RateProfile target_rate = + std::prev(target_rates_.upper_bound(last_inputed_frame_num_))->second; + const size_t frame_number = last_inputed_frame_num_++; // Get input frame and store for future quality calculation. + Resolution resolution = Resolution({.width = config_.codec_settings.width, + .height = config_.codec_settings.height}); + FrameReader::Ratio framerate_scale = FrameReader::Ratio( + {.num = config_.clip_fps.value_or(config_.codec_settings.maxFramerate), + .den = static_cast(config_.codec_settings.maxFramerate)}); rtc::scoped_refptr buffer = - input_frame_reader_->ReadFrame(); + input_frame_reader_->PullFrame( + /*frame_num*/ nullptr, resolution, framerate_scale); + RTC_CHECK(buffer) << "Tried to read too many frames from the file."; const size_t timestamp = last_inputed_timestamp_ + - static_cast(kVideoPayloadTypeFrequency / framerate_fps_); + static_cast(kVideoPayloadTypeFrequency / target_rate.input_fps); VideoFrame input_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) @@ -303,8 +314,10 @@ void VideoProcessor::ProcessFrame() { // Encode. const std::vector frame_types = (frame_number == 0) - ? std::vector{VideoFrameType::kVideoFrameKey} - : std::vector{VideoFrameType::kVideoFrameDelta}; + ? std::vector(num_simulcast_or_spatial_layers_, + VideoFrameType::kVideoFrameKey) + : std::vector(num_simulcast_or_spatial_layers_, + VideoFrameType::kVideoFrameDelta); const int encode_return_code = encoder_->Encode(input_frame, &frame_types); for (size_t i = 0; i < num_simulcast_or_spatial_layers_; ++i) { FrameStatistics* frame_stat = stats_->GetFrame(frame_number, i); @@ -316,12 +329,14 @@ void VideoProcessor::SetRates(size_t bitrate_kbps, double framerate_fps) { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_DCHECK(!is_finalized_); - framerate_fps_ = framerate_fps; - bitrate_allocation_ = + target_rates_[last_inputed_frame_num_] = + RateProfile({.target_kbps = bitrate_kbps, .input_fps = framerate_fps}); + + auto bitrate_allocation = bitrate_allocator_->Allocate(VideoBitrateAllocationParameters( - static_cast(bitrate_kbps * 1000), framerate_fps_)); + static_cast(bitrate_kbps * 1000), framerate_fps)); encoder_->SetRates( - VideoEncoder::RateControlParameters(bitrate_allocation_, framerate_fps_)); + VideoEncoder::RateControlParameters(bitrate_allocation, framerate_fps)); } int32_t VideoProcessor::VideoProcessorDecodeCompleteCallback::Decoded( @@ -389,13 +404,20 @@ void VideoProcessor::FrameEncoded( first_encoded_frame_[spatial_idx] = false; last_encoded_frame_num_[spatial_idx] = frame_number; + RateProfile target_rate = + std::prev(target_rates_.upper_bound(frame_number))->second; + auto bitrate_allocation = + bitrate_allocator_->Allocate(VideoBitrateAllocationParameters( + static_cast(target_rate.target_kbps * 1000), + target_rate.input_fps)); + // Update frame statistics. frame_stat->encoding_successful = true; frame_stat->encode_time_us = GetElapsedTimeMicroseconds( frame_stat->encode_start_ns, encode_stop_ns - post_encode_time_ns_); frame_stat->target_bitrate_kbps = - bitrate_allocation_.GetTemporalLayerSum(spatial_idx, temporal_idx) / 1000; - frame_stat->target_framerate_fps = framerate_fps_; + bitrate_allocation.GetTemporalLayerSum(spatial_idx, temporal_idx) / 1000; + frame_stat->target_framerate_fps = target_rate.input_fps; frame_stat->length_bytes = encoded_image.size(); frame_stat->frame_type = encoded_image._frameType; frame_stat->temporal_idx = temporal_idx; diff --git a/modules/video_coding/codecs/test/videoprocessor.h b/modules/video_coding/codecs/test/videoprocessor.h index 4c89c790a9..0a5fdf8622 100644 --- a/modules/video_coding/codecs/test/videoprocessor.h +++ b/modules/video_coding/codecs/test/videoprocessor.h @@ -25,6 +25,7 @@ #include "api/test/videocodec_test_fixture.h" #include "api/video/encoded_image.h" #include "api/video/i420_buffer.h" +#include "api/video/resolution.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_bitrate_allocator.h" #include "api/video/video_frame.h" @@ -191,8 +192,9 @@ class VideoProcessor { webrtc::VideoEncoder* const encoder_; VideoDecoderList* const decoders_; const std::unique_ptr bitrate_allocator_; - VideoBitrateAllocation bitrate_allocation_ RTC_GUARDED_BY(sequence_checker_); - double framerate_fps_ RTC_GUARDED_BY(sequence_checker_); + + // Target bitrate and framerate per frame. + std::map target_rates_ RTC_GUARDED_BY(sequence_checker_); // Adapters for the codec callbacks. VideoProcessorEncodeCompleteCallback encode_callback_; diff --git a/modules/video_coding/codecs/test/videoprocessor_unittest.cc b/modules/video_coding/codecs/test/videoprocessor_unittest.cc index 6af775cece..f1774af5df 100644 --- a/modules/video_coding/codecs/test/videoprocessor_unittest.cc +++ b/modules/video_coding/codecs/test/videoprocessor_unittest.cc @@ -38,7 +38,6 @@ namespace { const int kWidth = 352; const int kHeight = 288; -const int kFrameSize = kWidth * kHeight * 3 / 2; // I420. } // namespace @@ -52,8 +51,6 @@ class VideoProcessorTest : public ::testing::Test { decoders_.push_back(std::unique_ptr(decoder_mock_)); ExpectInit(); - EXPECT_CALL(frame_reader_mock_, FrameLength()) - .WillRepeatedly(Return(kFrameSize)); q_.SendTask( [this] { video_processor_ = std::make_unique( @@ -107,7 +104,7 @@ TEST_F(VideoProcessorTest, ProcessFrames_FixedFramerate) { .Times(1); q_.SendTask([=] { video_processor_->SetRates(kBitrateKbps, kFramerateFps); }); - EXPECT_CALL(frame_reader_mock_, ReadFrame()) + EXPECT_CALL(frame_reader_mock_, PullFrame(_, _, _)) .WillRepeatedly(Return(I420Buffer::Create(kWidth, kHeight))); EXPECT_CALL( encoder_mock_, @@ -136,7 +133,7 @@ TEST_F(VideoProcessorTest, ProcessFrames_VariableFramerate) { q_.SendTask( [=] { video_processor_->SetRates(kBitrateKbps, kStartFramerateFps); }); - EXPECT_CALL(frame_reader_mock_, ReadFrame()) + EXPECT_CALL(frame_reader_mock_, PullFrame(_, _, _)) .WillRepeatedly(Return(I420Buffer::Create(kWidth, kHeight))); EXPECT_CALL(encoder_mock_, Encode(Property(&VideoFrame::timestamp, kStartTimestamp), _)) diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc index 49ccf2dade..8e401fcc7b 100644 --- a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc +++ b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc @@ -25,6 +25,7 @@ #include "api/video/video_content_type.h" #include "api/video/video_frame_buffer.h" #include "api/video/video_timing.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/vp8_temporal_layers.h" #include "api/video_codecs/vp8_temporal_layers_factory.h" #include "modules/video_coding/codecs/interface/common_constants.h" @@ -1103,6 +1104,17 @@ void LibvpxVp8Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, codec_specific->template_structure->resolutions = { RenderResolution(pkt.data.frame.width[0], pkt.data.frame.height[0])}; } + switch (vpx_configs_[encoder_idx].ts_number_layers) { + case 1: + codec_specific->scalability_mode = ScalabilityMode::kL1T1; + break; + case 2: + codec_specific->scalability_mode = ScalabilityMode::kL1T2; + break; + case 3: + codec_specific->scalability_mode = ScalabilityMode::kL1T3; + break; + } } int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image, diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_simulcast_test.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_simulcast_test.cc index 67c9110b3c..4ca3de20d5 100644 --- a/modules/video_coding/codecs/vp8/libvpx_vp8_simulcast_test.cc +++ b/modules/video_coding/codecs/vp8/libvpx_vp8_simulcast_test.cc @@ -39,6 +39,10 @@ TEST(LibvpxVp8SimulcastTest, TestKeyFrameRequestsOnAllStreams) { fixture->TestKeyFrameRequestsOnAllStreams(); } +TEST(LibvpxVp8SimulcastTest, TestKeyFrameRequestsOnSpecificStreams) { + GTEST_SKIP() << "Not applicable to VP8."; +} + TEST(LibvpxVp8SimulcastTest, TestPaddingAllStreams) { auto fixture = CreateSpecificSimulcastTestFixture(); fixture->TestPaddingAllStreams(); diff --git a/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc b/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc index 8cf761742e..c5a8b659c4 100644 --- a/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc +++ b/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc @@ -640,7 +640,7 @@ TEST(LibvpxVp8EncoderTest, GetEncoderInfoReturnsStaticInformation) { EXPECT_FALSE(info.is_hardware_accelerated); EXPECT_TRUE(info.supports_simulcast); EXPECT_EQ(info.implementation_name, "libvpx"); - EXPECT_EQ(info.requested_resolution_alignment, 1); + EXPECT_EQ(info.requested_resolution_alignment, 1u); EXPECT_THAT(info.preferred_pixel_formats, testing::UnorderedElementsAre(VideoFrameBuffer::Type::kNV12, VideoFrameBuffer::Type::kI420)); @@ -655,7 +655,7 @@ TEST(LibvpxVp8EncoderTest, RequestedResolutionAlignmentFromFieldTrial) { LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), VP8Encoder::Settings()); - EXPECT_EQ(encoder.GetEncoderInfo().requested_resolution_alignment, 10); + EXPECT_EQ(encoder.GetEncoderInfo().requested_resolution_alignment, 10u); EXPECT_FALSE( encoder.GetEncoderInfo().apply_alignment_to_all_simulcast_layers); EXPECT_TRUE(encoder.GetEncoderInfo().resolution_bitrate_limits.empty()); diff --git a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc index 4c27f4ce22..c2884c0395 100644 --- a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc +++ b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc @@ -9,23 +9,26 @@ * */ +#include #ifdef RTC_ENABLE_VP9 -#include "modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h" - #include #include +#include #include #include #include "absl/algorithm/container.h" #include "absl/memory/memory.h" #include "absl/strings/match.h" +#include "absl/types/optional.h" #include "api/video/color_space.h" #include "api/video/i010_buffer.h" +#include "api/video_codecs/scalability_mode.h" #include "common_video/include/video_frame_buffer.h" #include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h" #include "modules/video_coding/svc/create_scalability_structure.h" #include "modules/video_coding/svc/scalability_mode_util.h" #include "modules/video_coding/svc/scalable_video_controller.h" @@ -79,13 +82,17 @@ std::pair GetActiveLayers( return {0, 0}; } -std::unique_ptr CreateVp9ScalabilityStructure( +using Vp9ScalabilityStructure = + std::tuple, ScalabilityMode>; +absl::optional CreateVp9ScalabilityStructure( const VideoCodec& codec) { int num_spatial_layers = codec.VP9().numberOfSpatialLayers; int num_temporal_layers = std::max(1, int{codec.VP9().numberOfTemporalLayers}); if (num_spatial_layers == 1 && num_temporal_layers == 1) { - return std::make_unique(); + return absl::make_optional( + std::make_unique(), + ScalabilityMode::kL1T1); } char name[20]; @@ -93,7 +100,7 @@ std::unique_ptr CreateVp9ScalabilityStructure( if (codec.mode == VideoCodecMode::kScreensharing) { // TODO(bugs.webrtc.org/11999): Compose names of the structures when they // are implemented. - return nullptr; + return absl::nullopt; } else if (codec.VP9().interLayerPred == InterLayerPredMode::kOn || num_spatial_layers == 1) { ss << "L" << num_spatial_layers << "T" << num_temporal_layers; @@ -110,7 +117,7 @@ std::unique_ptr CreateVp9ScalabilityStructure( codec.height != codec.spatialLayers[num_spatial_layers - 1].height) { RTC_LOG(LS_WARNING) << "Top layer resolution expected to match overall resolution"; - return nullptr; + return absl::nullopt; } // Check if the ratio is one of the supported. int numerator; @@ -128,7 +135,7 @@ std::unique_ptr CreateVp9ScalabilityStructure( RTC_LOG(LS_WARNING) << "Unsupported scalability ratio " << codec.spatialLayers[0].width << ":" << codec.spatialLayers[1].width; - return nullptr; + return absl::nullopt; } // Validate ratio is consistent for all spatial layer transitions. for (int sid = 1; sid < num_spatial_layers; ++sid) { @@ -138,7 +145,7 @@ std::unique_ptr CreateVp9ScalabilityStructure( codec.spatialLayers[sid - 1].height * denominator) { RTC_LOG(LS_WARNING) << "Inconsistent scalability ratio " << numerator << ":" << denominator; - return nullptr; + return absl::nullopt; } } } @@ -147,7 +154,7 @@ std::unique_ptr CreateVp9ScalabilityStructure( ScalabilityModeFromString(name); if (!scalability_mode.has_value()) { RTC_LOG(LS_WARNING) << "Invalid scalability mode " << name; - return nullptr; + return absl::nullopt; } auto scalability_structure_controller = CreateScalabilityStructure(*scalability_mode); @@ -156,7 +163,8 @@ std::unique_ptr CreateVp9ScalabilityStructure( } else { RTC_LOG(LS_INFO) << "Created scalability structure " << name; } - return scalability_structure_controller; + return absl::make_optional( + std::move(scalability_structure_controller), *scalability_mode); } vpx_svc_ref_frame_config_t Vp9References( @@ -570,12 +578,12 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, force_key_frame_ = true; pics_since_key_ = 0; - absl::optional scalability_mode = inst->GetScalabilityMode(); - if (scalability_mode.has_value()) { + scalability_mode_ = inst->GetScalabilityMode(); + if (scalability_mode_.has_value()) { // Use settings from `ScalabilityMode` identifier. RTC_LOG(LS_INFO) << "Create scalability structure " - << ScalabilityModeToString(*scalability_mode); - svc_controller_ = CreateScalabilityStructure(*scalability_mode); + << ScalabilityModeToString(*scalability_mode_); + svc_controller_ = CreateScalabilityStructure(*scalability_mode_); if (!svc_controller_) { RTC_LOG(LS_WARNING) << "Failed to create scalability structure."; return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; @@ -584,7 +592,7 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, svc_controller_->StreamConfig(); num_spatial_layers_ = info.num_spatial_layers; num_temporal_layers_ = info.num_temporal_layers; - inter_layer_pred_ = ScalabilityModeToInterLayerPredMode(*scalability_mode); + inter_layer_pred_ = ScalabilityModeToInterLayerPredMode(*scalability_mode_); } else { num_spatial_layers_ = inst->VP9().numberOfSpatialLayers; RTC_DCHECK_GT(num_spatial_layers_, 0); @@ -593,7 +601,14 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, num_temporal_layers_ = 1; } inter_layer_pred_ = inst->VP9().interLayerPred; - svc_controller_ = CreateVp9ScalabilityStructure(*inst); + auto vp9_scalability = CreateVp9ScalabilityStructure(*inst); + if (vp9_scalability.has_value()) { + std::tie(svc_controller_, scalability_mode_) = + std::move(vp9_scalability.value()); + } else { + svc_controller_ = nullptr; + scalability_mode_ = absl::nullopt; + } } framerate_controller_ = std::vector( @@ -1443,6 +1458,7 @@ bool LibvpxVp9Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, } } } + codec_specific->scalability_mode = scalability_mode_; return true; } diff --git a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h index fb6f234ead..6b662ae8f9 100644 --- a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h +++ b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h @@ -20,6 +20,7 @@ #include "api/fec_controller_override.h" #include "api/field_trials_view.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/vp9_profile.h" #include "common_video/include/video_frame_buffer_pool.h" @@ -151,6 +152,7 @@ class LibvpxVp9Encoder : public VP9Encoder { bool force_all_active_layers_; std::unique_ptr svc_controller_; + absl::optional scalability_mode_; std::vector framerate_controller_; // Used for flexible mode. diff --git a/modules/video_coding/codecs/vp9/svc_config.cc b/modules/video_coding/codecs/vp9/svc_config.cc index 77eee3dbf5..3a32a43622 100644 --- a/modules/video_coding/codecs/vp9/svc_config.cc +++ b/modules/video_coding/codecs/vp9/svc_config.cc @@ -172,6 +172,21 @@ std::vector GetVp9SvcConfig(VideoCodec& codec) { absl::optional scalability_mode = codec.GetScalabilityMode(); RTC_DCHECK(scalability_mode.has_value()); + // Limit number of spatial layers for given resolution. + int limited_num_spatial_layers = + GetLimitedNumSpatialLayers(codec.width, codec.height); + if (limited_num_spatial_layers < + ScalabilityModeToNumSpatialLayers(*scalability_mode)) { + ScalabilityMode limited_scalability_mode = + LimitNumSpatialLayers(*scalability_mode, limited_num_spatial_layers); + RTC_LOG(LS_WARNING) + << "Reducing number of spatial layers due to low input resolution: " + << ScalabilityModeToString(*scalability_mode) << " to " + << ScalabilityModeToString(limited_scalability_mode); + scalability_mode = limited_scalability_mode; + codec.SetScalabilityMode(limited_scalability_mode); + } + absl::optional info = ScalabilityStructureConfig(*scalability_mode); if (!info.has_value()) { @@ -180,16 +195,6 @@ std::vector GetVp9SvcConfig(VideoCodec& codec) { return {}; } - if (static_cast(GetLimitedNumSpatialLayers(codec.width, codec.height)) < - info->num_spatial_layers) { - // Layers will be reduced, do not use scalability mode for now. - // TODO(bugs.webrtc.org/11607): Use a lower scalability mode once all lower - // modes are supported. - codec.UnsetScalabilityMode(); - codec.VP9()->interLayerPred = - ScalabilityModeToInterLayerPredMode(*scalability_mode); - } - // TODO(bugs.webrtc.org/11607): Add support for screensharing. std::vector spatial_layers = GetSvcConfig(codec.width, codec.height, codec.maxFramerate, diff --git a/modules/video_coding/codecs/vp9/svc_config_unittest.cc b/modules/video_coding/codecs/vp9/svc_config_unittest.cc index 4de3c5b2a6..762fd39287 100644 --- a/modules/video_coding/codecs/vp9/svc_config_unittest.cc +++ b/modules/video_coding/codecs/vp9/svc_config_unittest.cc @@ -72,15 +72,14 @@ TEST(SvcConfig, NumSpatialLayersLimitedWithScalabilityMode) { codec.height = 270; codec.SetScalabilityMode(ScalabilityMode::kL3T3_KEY); - // Scalability mode reset, configuration should be in accordance to L2T3_KEY. + // Scalability mode updated. std::vector spatial_layers = GetVp9SvcConfig(codec); EXPECT_THAT(spatial_layers, ElementsAre(Field(&SpatialLayer::height, 135), Field(&SpatialLayer::height, 270))); EXPECT_THAT(spatial_layers, ElementsAre(Field(&SpatialLayer::numberOfTemporalLayers, 3), Field(&SpatialLayer::numberOfTemporalLayers, 3))); - EXPECT_EQ(codec.VP9()->interLayerPred, InterLayerPredMode::kOnKeyPic); - EXPECT_EQ(codec.GetScalabilityMode(), absl::nullopt); + EXPECT_EQ(codec.GetScalabilityMode(), ScalabilityMode::kL2T3_KEY); } TEST(SvcConfig, NumSpatialLayersLimitedWithScalabilityModePortrait) { @@ -90,15 +89,14 @@ TEST(SvcConfig, NumSpatialLayersLimitedWithScalabilityModePortrait) { codec.height = 480; codec.SetScalabilityMode(ScalabilityMode::kL3T1); - // Scalability mode reset, configuration should be in accordance to L2T1. + // Scalability mode updated. std::vector spatial_layers = GetVp9SvcConfig(codec); EXPECT_THAT(spatial_layers, ElementsAre(Field(&SpatialLayer::width, 135), Field(&SpatialLayer::width, 270))); EXPECT_THAT(spatial_layers, ElementsAre(Field(&SpatialLayer::numberOfTemporalLayers, 1), Field(&SpatialLayer::numberOfTemporalLayers, 1))); - EXPECT_EQ(codec.VP9()->interLayerPred, InterLayerPredMode::kOn); - EXPECT_EQ(codec.GetScalabilityMode(), absl::nullopt); + EXPECT_EQ(codec.GetScalabilityMode(), ScalabilityMode::kL2T1); } TEST(SvcConfig, NumSpatialLayersWithScalabilityModeResolutionRatio1_5) { @@ -122,15 +120,14 @@ TEST(SvcConfig, NumSpatialLayersLimitedWithScalabilityModeResolutionRatio1_5) { codec.codecType = kVideoCodecVP9; codec.width = 320; codec.height = 180; - codec.SetScalabilityMode(ScalabilityMode::kL2T1h); // 1.5:1 + codec.SetScalabilityMode(ScalabilityMode::kL3T1h); // 1.5:1 - // Scalability mode reset, configuration should be in accordance to L1T1. + // Scalability mode updated. std::vector spatial_layers = GetVp9SvcConfig(codec); EXPECT_THAT(spatial_layers, ElementsAre(Field(&SpatialLayer::width, 320))); EXPECT_THAT(spatial_layers, ElementsAre(Field(&SpatialLayer::numberOfTemporalLayers, 1))); - EXPECT_EQ(codec.VP9()->interLayerPred, InterLayerPredMode::kOn); - EXPECT_EQ(codec.GetScalabilityMode(), absl::nullopt); + EXPECT_EQ(codec.GetScalabilityMode(), ScalabilityMode::kL1T1); } TEST(SvcConfig, AlwaysSendsAtLeastOneLayer) { diff --git a/modules/video_coding/generic_decoder.cc b/modules/video_coding/generic_decoder.cc index dac8f2cd43..b660e02b72 100644 --- a/modules/video_coding/generic_decoder.cc +++ b/modules/video_coding/generic_decoder.cc @@ -20,6 +20,7 @@ #include "absl/algorithm/container.h" #include "absl/types/optional.h" #include "api/video/video_timing.h" +#include "api/video_codecs/video_decoder.h" #include "modules/include/module_common_types_public.h" #include "modules/video_coding/include/video_error_codes.h" #include "rtc_base/checks.h" @@ -202,9 +203,9 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, frame_info->content_type); } -void VCMDecodedFrameCallback::OnDecoderImplementationName( - const char* implementation_name) { - _receiveCallback->OnDecoderImplementationName(implementation_name); +void VCMDecodedFrameCallback::OnDecoderInfoChanged( + const VideoDecoder::DecoderInfo& decoder_info) { + _receiveCallback->OnDecoderInfoChanged(decoder_info); } void VCMDecodedFrameCallback::Map(FrameInfo frameInfo) { @@ -254,8 +255,7 @@ bool VCMGenericDecoder::Configure(const VideoDecoder::Settings& settings) { decoder_info_ = decoder_->GetDecoderInfo(); RTC_LOG(LS_INFO) << "Decoder implementation: " << decoder_info_.ToString(); if (_callback) { - _callback->OnDecoderImplementationName( - decoder_info_.implementation_name.c_str()); + _callback->OnDecoderInfoChanged(decoder_info_); } return ok; } @@ -293,10 +293,10 @@ int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, Timestamp now) { RTC_LOG(LS_INFO) << "Changed decoder implementation to: " << decoder_info.ToString(); decoder_info_ = decoder_info; - _callback->OnDecoderImplementationName( - decoder_info.implementation_name.empty() - ? "unknown" - : decoder_info.implementation_name.c_str()); + if (decoder_info.implementation_name.empty()) { + decoder_info.implementation_name = "unknown"; + } + _callback->OnDecoderInfoChanged(std::move(decoder_info)); } if (ret < WEBRTC_VIDEO_CODEC_OK) { RTC_LOG(LS_WARNING) << "Failed to decode frame with timestamp " @@ -314,8 +314,7 @@ int32_t VCMGenericDecoder::RegisterDecodeCompleteCallback( _callback = callback; int32_t ret = decoder_->RegisterDecodeCompleteCallback(callback); if (callback && !decoder_info_.implementation_name.empty()) { - callback->OnDecoderImplementationName( - decoder_info_.implementation_name.c_str()); + callback->OnDecoderInfoChanged(decoder_info_); } return ret; } diff --git a/modules/video_coding/generic_decoder.h b/modules/video_coding/generic_decoder.h index d7e1850abb..7dc6d34c01 100644 --- a/modules/video_coding/generic_decoder.h +++ b/modules/video_coding/generic_decoder.h @@ -63,7 +63,7 @@ class VCMDecodedFrameCallback : public DecodedImageCallback { absl::optional decode_time_ms, absl::optional qp) override; - void OnDecoderImplementationName(const char* implementation_name); + void OnDecoderInfoChanged(const VideoDecoder::DecoderInfo& decoder_info); void Map(FrameInfo frameInfo); void ClearTimestampMap(); diff --git a/modules/video_coding/include/video_codec_interface.h b/modules/video_coding/include/video_codec_interface.h index 261ffb11c1..46ae0d29e1 100644 --- a/modules/video_coding/include/video_codec_interface.h +++ b/modules/video_coding/include/video_codec_interface.h @@ -16,6 +16,7 @@ #include "absl/base/attributes.h" #include "absl/types/optional.h" #include "api/video/video_frame.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_encoder.h" #include "common_video/generic_frame_descriptor/generic_frame_info.h" @@ -112,6 +113,7 @@ struct RTC_EXPORT CodecSpecificInfo { bool end_of_picture = true; absl::optional generic_frame_info; absl::optional template_structure; + absl::optional scalability_mode; }; } // namespace webrtc diff --git a/modules/video_coding/include/video_coding_defines.h b/modules/video_coding/include/video_coding_defines.h index 8b93b07aa0..8f70e0298d 100644 --- a/modules/video_coding/include/video_coding_defines.h +++ b/modules/video_coding/include/video_coding_defines.h @@ -18,6 +18,7 @@ #include "api/video/video_content_type.h" #include "api/video/video_frame.h" #include "api/video/video_timing.h" +#include "api/video_codecs/video_decoder.h" namespace webrtc { @@ -58,7 +59,8 @@ class VCMReceiveCallback { // Called when the current receive codec changes. virtual void OnIncomingPayloadType(int payload_type); - virtual void OnDecoderImplementationName(const char* implementation_name); + virtual void OnDecoderInfoChanged( + const VideoDecoder::DecoderInfo& decoder_info); protected: virtual ~VCMReceiveCallback() {} diff --git a/modules/video_coding/svc/scalability_mode_util.cc b/modules/video_coding/svc/scalability_mode_util.cc index 39a4f1fd1e..35d66df203 100644 --- a/modules/video_coding/svc/scalability_mode_util.cc +++ b/modules/video_coding/svc/scalability_mode_util.cc @@ -292,4 +292,99 @@ absl::optional ScalabilityModeToResolutionRatio( RTC_CHECK_NOTREACHED(); } +ScalabilityMode LimitNumSpatialLayers(ScalabilityMode scalability_mode, + int max_spatial_layers) { + int num_spatial_layers = ScalabilityModeToNumSpatialLayers(scalability_mode); + if (max_spatial_layers >= num_spatial_layers) { + return scalability_mode; + } + + switch (scalability_mode) { + case ScalabilityMode::kL1T1: + return ScalabilityMode::kL1T1; + case ScalabilityMode::kL1T2: + return ScalabilityMode::kL1T2; + case ScalabilityMode::kL1T3: + return ScalabilityMode::kL1T3; + case ScalabilityMode::kL2T1: + return ScalabilityMode::kL1T1; + case ScalabilityMode::kL2T1h: + return ScalabilityMode::kL1T1; + case ScalabilityMode::kL2T1_KEY: + return ScalabilityMode::kL1T1; + case ScalabilityMode::kL2T2: + return ScalabilityMode::kL1T2; + case ScalabilityMode::kL2T2h: + return ScalabilityMode::kL1T2; + case ScalabilityMode::kL2T2_KEY: + return ScalabilityMode::kL1T2; + case ScalabilityMode::kL2T2_KEY_SHIFT: + return ScalabilityMode::kL1T2; + case ScalabilityMode::kL2T3: + return ScalabilityMode::kL1T3; + case ScalabilityMode::kL2T3h: + return ScalabilityMode::kL1T3; + case ScalabilityMode::kL2T3_KEY: + return ScalabilityMode::kL1T3; + case ScalabilityMode::kL3T1: + return max_spatial_layers == 2 ? ScalabilityMode::kL2T1 + : ScalabilityMode::kL1T1; + case ScalabilityMode::kL3T1h: + return max_spatial_layers == 2 ? ScalabilityMode::kL2T1h + : ScalabilityMode::kL1T1; + case ScalabilityMode::kL3T1_KEY: + return max_spatial_layers == 2 ? ScalabilityMode::kL2T1_KEY + : ScalabilityMode::kL1T1; + case ScalabilityMode::kL3T2: + return max_spatial_layers == 2 ? ScalabilityMode::kL2T2 + : ScalabilityMode::kL1T2; + case ScalabilityMode::kL3T2h: + return max_spatial_layers == 2 ? ScalabilityMode::kL2T2h + : ScalabilityMode::kL1T2; + case ScalabilityMode::kL3T2_KEY: + return max_spatial_layers == 2 ? ScalabilityMode::kL2T2_KEY + : ScalabilityMode::kL1T2; + case ScalabilityMode::kL3T3: + return max_spatial_layers == 2 ? ScalabilityMode::kL2T3 + : ScalabilityMode::kL1T3; + case ScalabilityMode::kL3T3h: + return max_spatial_layers == 2 ? ScalabilityMode::kL2T3h + : ScalabilityMode::kL1T3; + case ScalabilityMode::kL3T3_KEY: + return max_spatial_layers == 2 ? ScalabilityMode::kL2T3_KEY + : ScalabilityMode::kL1T3; + case ScalabilityMode::kS2T1: + return ScalabilityMode::kL1T1; + case ScalabilityMode::kS2T1h: + return ScalabilityMode::kL1T1; + case ScalabilityMode::kS2T2: + return ScalabilityMode::kL1T2; + case ScalabilityMode::kS2T2h: + return ScalabilityMode::kL1T2; + case ScalabilityMode::kS2T3: + return ScalabilityMode::kL1T3; + case ScalabilityMode::kS2T3h: + return ScalabilityMode::kL1T3; + case ScalabilityMode::kS3T1: + return max_spatial_layers == 2 ? ScalabilityMode::kS2T1 + : ScalabilityMode::kL1T1; + case ScalabilityMode::kS3T1h: + return max_spatial_layers == 2 ? ScalabilityMode::kS2T1h + : ScalabilityMode::kL1T1; + case ScalabilityMode::kS3T2: + return max_spatial_layers == 2 ? ScalabilityMode::kS2T2 + : ScalabilityMode::kL1T2; + case ScalabilityMode::kS3T2h: + return max_spatial_layers == 2 ? ScalabilityMode::kS2T2h + : ScalabilityMode::kL1T2; + case ScalabilityMode::kS3T3: + return max_spatial_layers == 2 ? ScalabilityMode::kS2T3 + : ScalabilityMode::kL1T3; + case ScalabilityMode::kS3T3h: + return max_spatial_layers == 2 ? ScalabilityMode::kS2T3h + : ScalabilityMode::kL1T3; + } + RTC_CHECK_NOTREACHED(); +} + } // namespace webrtc diff --git a/modules/video_coding/svc/scalability_mode_util.h b/modules/video_coding/svc/scalability_mode_util.h index fa0b730e52..aef955a9a5 100644 --- a/modules/video_coding/svc/scalability_mode_util.h +++ b/modules/video_coding/svc/scalability_mode_util.h @@ -36,6 +36,9 @@ int ScalabilityModeToNumTemporalLayers(ScalabilityMode scalability_mode); absl::optional ScalabilityModeToResolutionRatio( ScalabilityMode scalability_mode); +ScalabilityMode LimitNumSpatialLayers(ScalabilityMode scalability_mode, + int max_spatial_layers); + } // namespace webrtc #endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_MODE_UTIL_H_ diff --git a/modules/video_coding/svc/scalability_mode_util_unittest.cc b/modules/video_coding/svc/scalability_mode_util_unittest.cc index 7fb103631f..448494ffcc 100644 --- a/modules/video_coding/svc/scalability_mode_util_unittest.cc +++ b/modules/video_coding/svc/scalability_mode_util_unittest.cc @@ -10,6 +10,10 @@ #include "modules/video_coding/svc/scalability_mode_util.h" +#include +#include +#include + #include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/video_codecs/scalability_mode.h" @@ -30,7 +34,7 @@ TEST(ScalabilityModeUtil, RejectsUnknownString) { // Check roundtrip conversion of all enum values. TEST(ScalabilityModeUtil, ConvertsAllToAndFromString) { - const ScalabilityMode kLastEnum = ScalabilityMode::kS3T3; + const ScalabilityMode kLastEnum = ScalabilityMode::kS3T3h; for (int numerical_enum = 0; numerical_enum <= static_cast(kLastEnum); numerical_enum++) { ScalabilityMode scalability_mode = @@ -43,5 +47,70 @@ TEST(ScalabilityModeUtil, ConvertsAllToAndFromString) { } } +struct TestParams { + std::string scalability_mode; + std::vector, std::string>> + limited_scalability_mode; +}; + +class NumSpatialLayersTest : public ::testing::TestWithParam {}; + +INSTANTIATE_TEST_SUITE_P( + MaxLayers, + NumSpatialLayersTest, + ::testing::ValuesIn( + {{"L1T1", {{{0, 1}, "L1T1"}, {{2}, "L1T1"}, {{3}, "L1T1"}}}, + {"L1T2", {{{0, 1}, "L1T2"}, {{2}, "L1T2"}, {{3}, "L1T2"}}}, + {"L1T3", {{{0, 1}, "L1T3"}, {{2}, "L1T3"}, {{3}, "L1T3"}}}, + {"L2T1", {{{0, 1}, "L1T1"}, {{2}, "L2T1"}, {{3}, "L2T1"}}}, + {"L2T1h", {{{0, 1}, "L1T1"}, {{2}, "L2T1h"}, {{3}, "L2T1h"}}}, + {"L2T1_KEY", {{{0, 1}, "L1T1"}, {{2}, "L2T1_KEY"}, {{3}, "L2T1_KEY"}}}, + {"L2T2", {{{0, 1}, "L1T2"}, {{2}, "L2T2"}, {{3}, "L2T2"}}}, + {"L2T2h", {{{0, 1}, "L1T2"}, {{2}, "L2T2h"}, {{3}, "L2T2h"}}}, + {"L2T2_KEY", {{{0, 1}, "L1T2"}, {{2}, "L2T2_KEY"}, {{3}, "L2T2_KEY"}}}, + {"L2T2_KEY_SHIFT", + {{{0, 1}, "L1T2"}, {{2}, "L2T2_KEY_SHIFT"}, {{3}, "L2T2_KEY_SHIFT"}}}, + {"L2T3", {{{0, 1}, "L1T3"}, {{2}, "L2T3"}, {{3}, "L2T3"}}}, + {"L2T3h", {{{0, 1}, "L1T3"}, {{2}, "L2T3h"}, {{3}, "L2T3h"}}}, + {"L2T3_KEY", {{{0, 1}, "L1T3"}, {{2}, "L2T3_KEY"}, {{3}, "L2T3_KEY"}}}, + {"L3T1", {{{0, 1}, "L1T1"}, {{2}, "L2T1"}, {{3}, "L3T1"}}}, + {"L3T1h", {{{0, 1}, "L1T1"}, {{2}, "L2T1h"}, {{3}, "L3T1h"}}}, + {"L3T1_KEY", {{{0, 1}, "L1T1"}, {{2}, "L2T1_KEY"}, {{3}, "L3T1_KEY"}}}, + {"L3T2", {{{0, 1}, "L1T2"}, {{2}, "L2T2"}, {{3}, "L3T2"}}}, + {"L3T2h", {{{0, 1}, "L1T2"}, {{2}, "L2T2h"}, {{3}, "L3T2h"}}}, + {"L3T2_KEY", {{{0, 1}, "L1T2"}, {{2}, "L2T2_KEY"}, {{3}, "L3T2_KEY"}}}, + {"L3T3", {{{0, 1}, "L1T3"}, {{2}, "L2T3"}, {{3}, "L3T3"}}}, + {"L3T3h", {{{0, 1}, "L1T3"}, {{2}, "L2T3h"}, {{3}, "L3T3h"}}}, + {"L3T3_KEY", {{{0, 1}, "L1T3"}, {{2}, "L2T3_KEY"}, {{3}, "L3T3_KEY"}}}, + {"S2T1", {{{0, 1}, "L1T1"}, {{2}, "S2T1"}, {{3}, "S2T1"}}}, + {"S2T1h", {{{0, 1}, "L1T1"}, {{2}, "S2T1h"}, {{3}, "S2T1h"}}}, + {"S2T2", {{{0, 1}, "L1T2"}, {{2}, "S2T2"}, {{3}, "S2T2"}}}, + {"S2T2h", {{{0, 1}, "L1T2"}, {{2}, "S2T2h"}, {{3}, "S2T2h"}}}, + {"S2T3", {{{0, 1}, "L1T3"}, {{2}, "S2T3"}, {{3}, "S2T3"}}}, + {"S2T3h", {{{0, 1}, "L1T3"}, {{2}, "S2T3h"}, {{3}, "S2T3h"}}}, + {"S3T1", {{{0, 1}, "L1T1"}, {{2}, "S2T1"}, {{3}, "S3T1"}}}, + {"S3T1h", {{{0, 1}, "L1T1"}, {{2}, "S2T1h"}, {{3}, "S3T1h"}}}, + {"S3T2", {{{0, 1}, "L1T2"}, {{2}, "S2T2"}, {{3}, "S3T2"}}}, + {"S3T2h", {{{0, 1}, "L1T2"}, {{2}, "S2T2h"}, {{3}, "S3T2h"}}}, + {"S3T3", {{{0, 1}, "L1T3"}, {{2}, "S2T3"}, {{3}, "S3T3"}}}, + {"S3T3h", {{{0, 1}, "L1T3"}, {{2}, "S2T3h"}, {{3}, "S3T3h"}}}}), + [](const ::testing::TestParamInfo& info) { + return info.param.scalability_mode; + }); + +TEST_P(NumSpatialLayersTest, LimitsSpatialLayers) { + const ScalabilityMode mode = + *ScalabilityModeFromString(GetParam().scalability_mode); + for (const auto& param : GetParam().limited_scalability_mode) { + const std::vector max_num_spatial_layers = + std::get>(param); + const ScalabilityMode expected_mode = + *ScalabilityModeFromString(std::get(param)); + for (const auto& max_layers : max_num_spatial_layers) { + EXPECT_EQ(expected_mode, LimitNumSpatialLayers(mode, max_layers)); + } + } +} + } // namespace } // namespace webrtc diff --git a/modules/video_coding/utility/ivf_file_reader.cc b/modules/video_coding/utility/ivf_file_reader.cc index 85d1fa00d7..13092b5e24 100644 --- a/modules/video_coding/utility/ivf_file_reader.cc +++ b/modules/video_coding/utility/ivf_file_reader.cc @@ -30,6 +30,9 @@ constexpr uint8_t kVp9Header[kCodecTypeBytesCount] = {'V', 'P', '9', '0'}; constexpr uint8_t kAv1Header[kCodecTypeBytesCount] = {'A', 'V', '0', '1'}; constexpr uint8_t kH264Header[kCodecTypeBytesCount] = {'H', '2', '6', '4'}; +// RTP standard required 90kHz clock rate. +constexpr int32_t kRtpClockRateHz = 90000; + } // namespace std::unique_ptr IvfFileReader::Create(FileWrapper file) { @@ -77,13 +80,9 @@ bool IvfFileReader::Reset() { return false; } - uint32_t time_scale = ByteReader::ReadLittleEndian(&ivf_header[16]); - if (time_scale == 1000) { - using_capture_timestamps_ = true; - } else if (time_scale == 90000) { - using_capture_timestamps_ = false; - } else { - RTC_LOG(LS_ERROR) << "Invalid IVF header: Unknown time scale"; + time_scale_ = ByteReader::ReadLittleEndian(&ivf_header[16]); + if (time_scale_ == 0) { + RTC_LOG(LS_ERROR) << "Invalid IVF header: time scale can't be 0"; return false; } @@ -106,8 +105,7 @@ bool IvfFileReader::Reset() { const char* codec_name = CodecTypeToPayloadString(codec_type_); RTC_LOG(LS_INFO) << "Opened IVF file with codec data of type " << codec_name << " at resolution " << width_ << " x " << height_ - << ", using " << (using_capture_timestamps_ ? "1" : "90") - << "kHz clock resolution."; + << ", using " << time_scale_ << "Hz clock resolution."; return true; } @@ -157,12 +155,9 @@ absl::optional IvfFileReader::NextFrame() { } EncodedImage image; - if (using_capture_timestamps_) { - image.capture_time_ms_ = current_timestamp; - image.SetTimestamp(static_cast(90 * current_timestamp)); - } else { - image.SetTimestamp(static_cast(current_timestamp)); - } + image.capture_time_ms_ = current_timestamp; + image.SetTimestamp( + static_cast(current_timestamp * kRtpClockRateHz / time_scale_)); image.SetEncodedData(payload); image.SetSpatialIndex(static_cast(layer_sizes.size()) - 1); for (size_t i = 0; i < layer_sizes.size(); ++i) { diff --git a/modules/video_coding/utility/ivf_file_reader.h b/modules/video_coding/utility/ivf_file_reader.h index 75f2e3ac8c..db4fc25575 100644 --- a/modules/video_coding/utility/ivf_file_reader.h +++ b/modules/video_coding/utility/ivf_file_reader.h @@ -70,7 +70,7 @@ class IvfFileReader { size_t num_read_frames_; uint16_t width_; uint16_t height_; - bool using_capture_timestamps_; + uint32_t time_scale_; FileWrapper file_; absl::optional next_frame_header_; diff --git a/modules/video_coding/utility/simulcast_test_fixture_impl.cc b/modules/video_coding/utility/simulcast_test_fixture_impl.cc index 84cd2e1589..35224b17ed 100644 --- a/modules/video_coding/utility/simulcast_test_fixture_impl.cc +++ b/modules/video_coding/utility/simulcast_test_fixture_impl.cc @@ -44,6 +44,7 @@ const int kMaxBitrates[kNumberOfSimulcastStreams] = {150, 600, 1200}; const int kMinBitrates[kNumberOfSimulcastStreams] = {50, 150, 600}; const int kTargetBitrates[kNumberOfSimulcastStreams] = {100, 450, 1000}; const float kMaxFramerates[kNumberOfSimulcastStreams] = {30, 30, 30}; +const int kScaleResolutionDownBy[kNumberOfSimulcastStreams] = {4, 2, 1}; const int kDefaultTemporalLayerProfile[3] = {3, 3, 3}; const int kNoTemporalLayerProfile[3] = {0, 0, 0}; @@ -333,45 +334,30 @@ void SimulcastTestFixtureImpl::UpdateActiveStreams( EXPECT_EQ(0, encoder_->InitEncode(&settings_, kSettings)); } +void SimulcastTestFixtureImpl::ExpectStream(VideoFrameType frame_type, + int scaleResolutionDownBy) { + EXPECT_CALL( + encoder_callback_, + OnEncodedImage(AllOf(Field(&EncodedImage::_frameType, frame_type), + Field(&EncodedImage::_encodedWidth, + kDefaultWidth / scaleResolutionDownBy), + Field(&EncodedImage::_encodedHeight, + kDefaultHeight / scaleResolutionDownBy)), + _)) + .Times(1) + .WillRepeatedly(Return( + EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); +} + void SimulcastTestFixtureImpl::ExpectStreams( VideoFrameType frame_type, const std::vector expected_streams_active) { ASSERT_EQ(static_cast(expected_streams_active.size()), kNumberOfSimulcastStreams); - if (expected_streams_active[0]) { - EXPECT_CALL( - encoder_callback_, - OnEncodedImage( - AllOf(Field(&EncodedImage::_frameType, frame_type), - Field(&EncodedImage::_encodedWidth, kDefaultWidth / 4), - Field(&EncodedImage::_encodedHeight, kDefaultHeight / 4)), - _)) - .Times(1) - .WillRepeatedly(Return( - EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); - } - if (expected_streams_active[1]) { - EXPECT_CALL( - encoder_callback_, - OnEncodedImage( - AllOf(Field(&EncodedImage::_frameType, frame_type), - Field(&EncodedImage::_encodedWidth, kDefaultWidth / 2), - Field(&EncodedImage::_encodedHeight, kDefaultHeight / 2)), - _)) - .Times(1) - .WillRepeatedly(Return( - EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); - } - if (expected_streams_active[2]) { - EXPECT_CALL(encoder_callback_, - OnEncodedImage( - AllOf(Field(&EncodedImage::_frameType, frame_type), - Field(&EncodedImage::_encodedWidth, kDefaultWidth), - Field(&EncodedImage::_encodedHeight, kDefaultHeight)), - _)) - .Times(1) - .WillRepeatedly(Return( - EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); + for (size_t i = 0; i < kNumberOfSimulcastStreams; i++) { + if (expected_streams_active[i]) { + ExpectStream(frame_type, kScaleResolutionDownBy[i]); + } } } @@ -400,8 +386,8 @@ void SimulcastTestFixtureImpl::VerifyTemporalIdxAndSyncForAllSpatialLayers( } } -// We currently expect all active streams to generate a key frame even though -// a key frame was only requested for some of them. +// For some codecs (VP8) expect all active streams to generate a key frame even +// though a key frame was only requested for some of them. void SimulcastTestFixtureImpl::TestKeyFrameRequestsOnAllStreams() { SetRates(kMaxBitrates[2], 30); // To get all three streams. std::vector frame_types(kNumberOfSimulcastStreams, @@ -439,6 +425,69 @@ void SimulcastTestFixtureImpl::TestKeyFrameRequestsOnAllStreams() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } +// For some codecs (H264) expect only particular active streams to generate a +// key frame when a key frame was only requested for some of them. +void SimulcastTestFixtureImpl::TestKeyFrameRequestsOnSpecificStreams() { + SetRates(kMaxBitrates[2], 30); // To get all three streams. + std::vector frame_types(kNumberOfSimulcastStreams, + VideoFrameType::kVideoFrameDelta); + ExpectStreams(VideoFrameType::kVideoFrameKey, kNumberOfSimulcastStreams); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + ExpectStreams(VideoFrameType::kVideoFrameDelta, kNumberOfSimulcastStreams); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + frame_types[0] = VideoFrameType::kVideoFrameKey; + ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[0]); + ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[1]); + ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[2]); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + std::fill(frame_types.begin(), frame_types.end(), + VideoFrameType::kVideoFrameDelta); + frame_types[1] = VideoFrameType::kVideoFrameKey; + ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[0]); + ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[1]); + ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[2]); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + std::fill(frame_types.begin(), frame_types.end(), + VideoFrameType::kVideoFrameDelta); + frame_types[2] = VideoFrameType::kVideoFrameKey; + ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[0]); + ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[1]); + ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[2]); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + std::fill(frame_types.begin(), frame_types.end(), + VideoFrameType::kVideoFrameDelta); + frame_types[0] = VideoFrameType::kVideoFrameKey; + frame_types[2] = VideoFrameType::kVideoFrameKey; + ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[0]); + ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[1]); + ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[2]); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + std::fill(frame_types.begin(), frame_types.end(), + VideoFrameType::kVideoFrameKey); + ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[0]); + ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[1]); + ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[2]); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); + + std::fill(frame_types.begin(), frame_types.end(), + VideoFrameType::kVideoFrameDelta); + ExpectStreams(VideoFrameType::kVideoFrameDelta, kNumberOfSimulcastStreams); + input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); +} + void SimulcastTestFixtureImpl::TestPaddingAllStreams() { // We should always encode the base layer. SetRates(kMinBitrates[0] - 1, 30); diff --git a/modules/video_coding/utility/simulcast_test_fixture_impl.h b/modules/video_coding/utility/simulcast_test_fixture_impl.h index cdfdc609d5..f142ab4813 100644 --- a/modules/video_coding/utility/simulcast_test_fixture_impl.h +++ b/modules/video_coding/utility/simulcast_test_fixture_impl.h @@ -35,6 +35,7 @@ class SimulcastTestFixtureImpl final : public SimulcastTestFixture { // Implements SimulcastTestFixture. void TestKeyFrameRequestsOnAllStreams() override; + void TestKeyFrameRequestsOnSpecificStreams() override; void TestPaddingAllStreams() override; void TestPaddingTwoStreams() override; void TestPaddingTwoStreamsOneMaxedOut() override; @@ -66,6 +67,7 @@ class SimulcastTestFixtureImpl final : public SimulcastTestFixture { void SetRates(uint32_t bitrate_kbps, uint32_t fps); void RunActiveStreamsTest(std::vector active_streams); void UpdateActiveStreams(std::vector active_streams); + void ExpectStream(VideoFrameType frame_type, int scaleResolutionDownBy); void ExpectStreams(VideoFrameType frame_type, std::vector expected_streams_active); void ExpectStreams(VideoFrameType frame_type, int expected_video_streams); diff --git a/modules/video_coding/video_coding_defines.cc b/modules/video_coding/video_coding_defines.cc index 424b23f971..436b1a6490 100644 --- a/modules/video_coding/video_coding_defines.cc +++ b/modules/video_coding/video_coding_defines.cc @@ -14,7 +14,7 @@ namespace webrtc { void VCMReceiveCallback::OnDroppedFrames(uint32_t frames_dropped) {} void VCMReceiveCallback::OnIncomingPayloadType(int payload_type) {} -void VCMReceiveCallback::OnDecoderImplementationName( - const char* implementation_name) {} +void VCMReceiveCallback::OnDecoderInfoChanged( + const VideoDecoder::DecoderInfo&) {} } // namespace webrtc diff --git a/modules/video_coding/video_receiver2_unittest.cc b/modules/video_coding/video_receiver2_unittest.cc index 703c6c31f7..6edf1230d8 100644 --- a/modules/video_coding/video_receiver2_unittest.cc +++ b/modules/video_coding/video_receiver2_unittest.cc @@ -41,7 +41,10 @@ class MockVCMReceiveCallback : public VCMReceiveCallback { (VideoFrame&, absl::optional, TimeDelta, VideoContentType), (override)); MOCK_METHOD(void, OnIncomingPayloadType, (int), (override)); - MOCK_METHOD(void, OnDecoderImplementationName, (const char*), (override)); + MOCK_METHOD(void, + OnDecoderInfoChanged, + (const VideoDecoder::DecoderInfo&), + (override)); }; class TestEncodedFrame : public EncodedFrame { @@ -126,7 +129,7 @@ TEST_F(VideoReceiver2Test, RegisterReceiveCodecs) { EXPECT_TRUE(receiver_.IsExternalDecoderRegistered(kPayloadType)); EXPECT_CALL(receive_callback_, OnIncomingPayloadType(kPayloadType)); - EXPECT_CALL(receive_callback_, OnDecoderImplementationName); + EXPECT_CALL(receive_callback_, OnDecoderInfoChanged); // Call `Decode`. This triggers the above call expectations. EXPECT_EQ(receiver_.Decode(&frame), VCM_OK); diff --git a/modules/video_coding/video_receiver_unittest.cc b/modules/video_coding/video_receiver_unittest.cc index f2ebce8ec2..fe9674e521 100644 --- a/modules/video_coding/video_receiver_unittest.cc +++ b/modules/video_coding/video_receiver_unittest.cc @@ -44,7 +44,10 @@ class MockVCMReceiveCallback : public VCMReceiveCallback { (VideoFrame&, absl::optional, TimeDelta, VideoContentType), (override)); MOCK_METHOD(void, OnIncomingPayloadType, (int), (override)); - MOCK_METHOD(void, OnDecoderImplementationName, (const char*), (override)); + MOCK_METHOD(void, + OnDecoderInfoChanged, + (const VideoDecoder::DecoderInfo&), + (override)); }; class TestVideoReceiver : public ::testing::Test { @@ -74,8 +77,7 @@ class TestVideoReceiver : public ::testing::Test { // Since we call Decode, we need to provide a valid receive callback. // However, for the purposes of these tests, we ignore the callbacks. EXPECT_CALL(receive_callback_, OnIncomingPayloadType(_)).Times(AnyNumber()); - EXPECT_CALL(receive_callback_, OnDecoderImplementationName(_)) - .Times(AnyNumber()); + EXPECT_CALL(receive_callback_, OnDecoderInfoChanged).Times(AnyNumber()); receiver_.RegisterReceiveCallback(&receive_callback_); } diff --git a/net/dcsctp/common/sequence_numbers.h b/net/dcsctp/common/sequence_numbers.h index 919fc5014a..c3422c2ccd 100644 --- a/net/dcsctp/common/sequence_numbers.h +++ b/net/dcsctp/common/sequence_numbers.h @@ -119,6 +119,14 @@ class UnwrappedSequenceNumber { return value_ <= other.value_; } + // Const accessors for underlying value. + constexpr const int64_t* operator->() const { return &value_; } + constexpr const int64_t& operator*() const& { return value_; } + constexpr const int64_t&& operator*() const&& { return std::move(value_); } + constexpr const int64_t& value() const& { return value_; } + constexpr const int64_t&& value() const&& { return std::move(value_); } + constexpr explicit operator const int64_t&() const& { return value_; } + // Increments the value. void Increment() { ++value_; } diff --git a/p2p/BUILD.gn b/p2p/BUILD.gn index dc552e086d..ea252e0839 100644 --- a/p2p/BUILD.gn +++ b/p2p/BUILD.gn @@ -114,6 +114,7 @@ rtc_library("rtc_p2p") { "../api/transport:field_trial_based_config", "../api/transport:stun_types", "../api/units:time_delta", + "../api/units:timestamp", "../logging:ice_log", "../rtc_base", "../rtc_base:async_resolver_interface", @@ -207,6 +208,7 @@ if (rtc_include_tests) { "base/fake_packet_transport.h", "base/mock_active_ice_controller.h", "base/mock_async_resolver.h", + "base/mock_dns_resolving_packet_socket_factory.h", "base/mock_ice_agent.h", "base/mock_ice_controller.h", "base/mock_ice_transport.h", @@ -222,6 +224,7 @@ if (rtc_include_tests) { ":rtc_p2p", "../api:dtls_transport_interface", "../api:libjingle_peerconnection_api", + "../api:mock_async_dns_resolver", "../api:packet_socket_factory", "../api:sequence_checker", "../api:turn_customizer", diff --git a/p2p/base/connection.cc b/p2p/base/connection.cc index 931d3bf6d9..ce0b2fdcf4 100644 --- a/p2p/base/connection.cc +++ b/p2p/base/connection.cc @@ -18,6 +18,7 @@ #include #include "absl/algorithm/container.h" +#include "absl/strings/escaping.h" #include "absl/strings/match.h" #include "absl/strings/string_view.h" #include "p2p/base/port_allocator.h" @@ -234,6 +235,7 @@ Connection::Connection(rtc::WeakPtr port, last_ping_response_received_(0), state_(IceCandidatePairState::WAITING), time_created_ms_(rtc::TimeMillis()), + delta_internal_unix_epoch_ms_(rtc::TimeUTCMillis() - rtc::TimeMillis()), field_trials_(&kDefaultFieldTrials), rtt_estimate_(DEFAULT_RTT_ESTIMATE_HALF_TIME_MS) { RTC_DCHECK_RUN_ON(network_thread_); @@ -474,22 +476,20 @@ void Connection::OnReadPacket(const char* data, rtc::LoggingSeverity sev = (!writable() ? rtc::LS_INFO : rtc::LS_VERBOSE); switch (msg->integrity()) { case StunMessage::IntegrityStatus::kNotSet: - // Late computation of integrity status, but not an error. + // This packet did not come through Port processing? + // TODO(bugs.webrtc.org/14578): Clean up this situation. msg->ValidateMessageIntegrity(remote_candidate().password()); break; case StunMessage::IntegrityStatus::kIntegrityOk: if (remote_candidate().password() != msg->password()) { - // Password has changed. Recheck message. - // TODO(crbug.com/1177125): Redesign logic to check only once. - msg->RevalidateMessageIntegrity(remote_candidate().password()); + // TODO(bugs.webrtc.org/14578): Do a better thing + RTC_LOG(LS_INFO) << "STUN code error - Different passwords, old = " + << absl::CHexEscape(msg->password()) << ", new " + << absl::CHexEscape(remote_candidate().password()); } break; - case StunMessage::IntegrityStatus::kIntegrityBad: - // Possibly we have a new password to try. - // TODO(crbug.com/1177125): Redesign logic to check only once. - msg->RevalidateMessageIntegrity(remote_candidate().password()); - break; default: + // kIntegrityBad and kNoIntegrity. // This shouldn't happen. RTC_DCHECK_NOTREACHED(); break; @@ -1527,6 +1527,14 @@ ConnectionInfo Connection::stats() { stats_.total_round_trip_time_ms = total_round_trip_time_ms_; stats_.current_round_trip_time_ms = current_round_trip_time_ms_; stats_.remote_candidate = remote_candidate(); + if (last_data_received_ > 0) { + stats_.last_data_received = webrtc::Timestamp::Millis( + last_data_received_ + delta_internal_unix_epoch_ms_); + } + if (last_send_data_ > 0) { + stats_.last_data_sent = webrtc::Timestamp::Millis( + last_send_data_ + delta_internal_unix_epoch_ms_); + } return stats_; } diff --git a/p2p/base/connection.h b/p2p/base/connection.h index 7baff0287c..c0a19b510f 100644 --- a/p2p/base/connection.h +++ b/p2p/base/connection.h @@ -316,10 +316,22 @@ class Connection : public CandidatePairInterface { Port* PortForTest() { return port_.get(); } const Port* PortForTest() const { return port_.get(); } + std::unique_ptr BuildPingRequestForTest() { + RTC_DCHECK_RUN_ON(network_thread_); + return BuildPingRequest(); + } + // Public for unit tests. uint32_t acked_nomination() const; void set_remote_nomination(uint32_t remote_nomination); + const std::string& remote_password_for_test() const { + return remote_candidate().password(); + } + void set_remote_password_for_test(absl::string_view pwd) { + remote_candidate_.set_password(pwd); + } + protected: // A ConnectionRequest is a simple STUN ping used to determine writability. class ConnectionRequest; @@ -442,7 +454,8 @@ class Connection : public CandidatePairInterface { IceCandidatePairState state_ RTC_GUARDED_BY(network_thread_); // Time duration to switch from receiving to not receiving. absl::optional receiving_timeout_ RTC_GUARDED_BY(network_thread_); - int64_t time_created_ms_ RTC_GUARDED_BY(network_thread_); + const int64_t time_created_ms_ RTC_GUARDED_BY(network_thread_); + const int64_t delta_internal_unix_epoch_ms_ RTC_GUARDED_BY(network_thread_); int num_pings_sent_ RTC_GUARDED_BY(network_thread_) = 0; absl::optional log_description_ diff --git a/p2p/base/connection_info.h b/p2p/base/connection_info.h index a30b636d86..cd2a913451 100644 --- a/p2p/base/connection_info.h +++ b/p2p/base/connection_info.h @@ -15,6 +15,7 @@ #include "absl/types/optional.h" #include "api/candidate.h" +#include "api/units/timestamp.h" namespace cricket { @@ -72,6 +73,10 @@ struct ConnectionInfo { uint64_t total_round_trip_time_ms; // https://w3c.github.io/webrtc-stats/#dom-rtcicecandidatepairstats-currentroundtriptime absl::optional current_round_trip_time_ms; + + // https://w3c.github.io/webrtc-stats/#dom-rtcicecandidatepairstats-lastpacketreceivedtimestamp + absl::optional last_data_received; + absl::optional last_data_sent; }; // Information about all the candidate pairs of a channel. diff --git a/p2p/base/dtls_transport.cc b/p2p/base/dtls_transport.cc index 904a0cbbc9..af16efad78 100644 --- a/p2p/base/dtls_transport.cc +++ b/p2p/base/dtls_transport.cc @@ -16,6 +16,7 @@ #include "absl/memory/memory.h" #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/dtls_transport_interface.h" #include "api/rtc_event_log/rtc_event_log.h" #include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h" @@ -71,10 +72,9 @@ StreamInterfaceChannel::StreamInterfaceChannel( state_(rtc::SS_OPEN), packets_(kMaxPendingPackets, kMaxDtlsPacketLen) {} -rtc::StreamResult StreamInterfaceChannel::Read(void* buffer, - size_t buffer_len, - size_t* read, - int* error) { +rtc::StreamResult StreamInterfaceChannel::Read(rtc::ArrayView buffer, + size_t& read, + int& error) { RTC_DCHECK_RUN_ON(&sequence_checker_); if (state_ == rtc::SS_CLOSED) @@ -82,27 +82,25 @@ rtc::StreamResult StreamInterfaceChannel::Read(void* buffer, if (state_ == rtc::SS_OPENING) return rtc::SR_BLOCK; - if (!packets_.ReadFront(buffer, buffer_len, read)) { + if (!packets_.ReadFront(buffer.data(), buffer.size(), &read)) { return rtc::SR_BLOCK; } return rtc::SR_SUCCESS; } -rtc::StreamResult StreamInterfaceChannel::Write(const void* data, - size_t data_len, - size_t* written, - int* error) { +rtc::StreamResult StreamInterfaceChannel::Write( + rtc::ArrayView data, + size_t& written, + int& error) { RTC_DCHECK_RUN_ON(&sequence_checker_); // Always succeeds, since this is an unreliable transport anyway. // TODO(zhihuang): Should this block if ice_transport_'s temporarily // unwritable? rtc::PacketOptions packet_options; - ice_transport_->SendPacket(static_cast(data), data_len, - packet_options); - if (written) { - *written = data_len; - } + ice_transport_->SendPacket(reinterpret_cast(data.data()), + data.size(), packet_options); + written = data.size(); return rtc::SR_SUCCESS; } @@ -445,7 +443,12 @@ int DtlsTransport::SendPacket(const char* data, return ice_transport_->SendPacket(data, size, options); } else { - return (dtls_->WriteAll(data, size, NULL, NULL) == rtc::SR_SUCCESS) + size_t written; + int error; + return (dtls_->WriteAll( + rtc::MakeArrayView(reinterpret_cast(data), + size), + written, error) == rtc::SR_SUCCESS) ? static_cast(size) : -1; } @@ -691,16 +694,17 @@ void DtlsTransport::OnDtlsEvent(rtc::StreamInterface* dtls, int sig, int err) { } } if (sig & rtc::SE_READ) { - char buf[kMaxDtlsPacketLen]; + uint8_t buf[kMaxDtlsPacketLen]; size_t read; int read_error; rtc::StreamResult ret; // The underlying DTLS stream may have received multiple DTLS records in // one packet, so read all of them. do { - ret = dtls_->Read(buf, sizeof(buf), &read, &read_error); + ret = dtls_->Read(buf, read, read_error); if (ret == rtc::SR_SUCCESS) { - SignalReadPacket(this, buf, read, rtc::TimeMicros(), 0); + SignalReadPacket(this, reinterpret_cast(buf), read, + rtc::TimeMicros(), 0); } else if (ret == rtc::SR_EOS) { // Remote peer shut down the association with no error. RTC_LOG(LS_INFO) << ToString() << ": DTLS transport closed by remote"; @@ -754,7 +758,9 @@ void DtlsTransport::MaybeStartDtls() { set_dtls_state(webrtc::DtlsTransportState::kFailed); return; } - RTC_LOG(LS_INFO) << ToString() << ": DtlsTransport: Started DTLS handshake"; + RTC_LOG(LS_INFO) << ToString() + << ": DtlsTransport: Started DTLS handshake active=" + << IsDtlsActive(); set_dtls_state(webrtc::DtlsTransportState::kConnecting); // Now that the handshake has started, we can process a cached ClientHello // (if one exists). diff --git a/p2p/base/dtls_transport.h b/p2p/base/dtls_transport.h index 2b26e2553f..4e21410b76 100644 --- a/p2p/base/dtls_transport.h +++ b/p2p/base/dtls_transport.h @@ -49,14 +49,12 @@ class StreamInterfaceChannel : public rtc::StreamInterface { // Implementations of StreamInterface rtc::StreamState GetState() const override; void Close() override; - rtc::StreamResult Read(void* buffer, - size_t buffer_len, - size_t* read, - int* error) override; - rtc::StreamResult Write(const void* data, - size_t data_len, - size_t* written, - int* error) override; + rtc::StreamResult Read(rtc::ArrayView buffer, + size_t& read, + int& error) override; + rtc::StreamResult Write(rtc::ArrayView data, + size_t& written, + int& error) override; private: RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker sequence_checker_; diff --git a/p2p/base/fake_port_allocator.h b/p2p/base/fake_port_allocator.h index c311e55a93..5646a40c9e 100644 --- a/p2p/base/fake_port_allocator.h +++ b/p2p/base/fake_port_allocator.h @@ -112,6 +112,7 @@ class FakePortAllocatorSession : public PortAllocatorSession { field_trials_(field_trials) { ipv4_network_.AddIP(rtc::IPAddress(INADDR_LOOPBACK)); ipv6_network_.AddIP(rtc::IPAddress(in6addr_loopback)); + set_ice_tiebreaker(/*kTiebreakerDefault = */ 44444); } void SetCandidateFilter(uint32_t filter) override { @@ -128,6 +129,7 @@ class FakePortAllocatorSession : public PortAllocatorSession { username(), password(), false, &field_trials_)); RTC_DCHECK(port_); + port_->SetIceTiebreaker(ice_tiebreaker()); // RingRTC change to support ICE forking port_->SignalDestroyed.connect(this, &FakePortAllocatorSession::OnPortDestroyed); AddPort(port_.get()); diff --git a/p2p/base/ice_agent_interface.h b/p2p/base/ice_agent_interface.h index c00204cbc5..30b6ade6e6 100644 --- a/p2p/base/ice_agent_interface.h +++ b/p2p/base/ice_agent_interface.h @@ -11,8 +11,7 @@ #ifndef P2P_BASE_ICE_AGENT_INTERFACE_H_ #define P2P_BASE_ICE_AGENT_INTERFACE_H_ -#include - +#include "api/array_view.h" #include "p2p/base/connection.h" #include "p2p/base/ice_switch_reason.h" @@ -61,7 +60,7 @@ class IceAgentInterface { // // SignalStateChange will not be triggered. virtual void ForgetLearnedStateForConnections( - std::vector connections) = 0; + rtc::ArrayView connections) = 0; // Send a STUN ping request for the given connection. virtual void SendPingRequest(const Connection* connection) = 0; @@ -72,7 +71,8 @@ class IceAgentInterface { // Prune away the given connections. Returns true if pruning is permitted and // successfully performed. - virtual bool PruneConnections(std::vector connections) = 0; + virtual bool PruneConnections( + rtc::ArrayView connections) = 0; }; } // namespace cricket diff --git a/p2p/base/mock_dns_resolving_packet_socket_factory.h b/p2p/base/mock_dns_resolving_packet_socket_factory.h new file mode 100644 index 0000000000..8f18e9b0e1 --- /dev/null +++ b/p2p/base/mock_dns_resolving_packet_socket_factory.h @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_BASE_MOCK_DNS_RESOLVING_PACKET_SOCKET_FACTORY_H_ +#define P2P_BASE_MOCK_DNS_RESOLVING_PACKET_SOCKET_FACTORY_H_ + +#include +#include + +#include "api/test/mock_async_dns_resolver.h" +#include "p2p/base/basic_packet_socket_factory.h" + +namespace rtc { + +// A PacketSocketFactory implementation for tests that uses a mock DnsResolver +// and allows setting expectations on the resolver and results. +class MockDnsResolvingPacketSocketFactory : public BasicPacketSocketFactory { + public: + using Expectations = std::function; + + explicit MockDnsResolvingPacketSocketFactory(SocketFactory* socket_factory) + : BasicPacketSocketFactory(socket_factory) {} + + std::unique_ptr CreateAsyncDnsResolver() + override { + std::unique_ptr resolver = + std::make_unique(); + if (expectations_) { + expectations_(resolver.get(), &resolver_result_); + } + return resolver; + } + + void SetExpectations(Expectations expectations) { + expectations_ = expectations; + } + + private: + webrtc::MockAsyncDnsResolverResult resolver_result_; + Expectations expectations_; +}; + +} // namespace rtc + +#endif // P2P_BASE_MOCK_DNS_RESOLVING_PACKET_SOCKET_FACTORY_H_ diff --git a/p2p/base/mock_ice_agent.h b/p2p/base/mock_ice_agent.h index e4100ecd7a..a1c0ebffbf 100644 --- a/p2p/base/mock_ice_agent.h +++ b/p2p/base/mock_ice_agent.h @@ -32,7 +32,7 @@ class MockIceAgent : public IceAgentInterface { MOCK_METHOD(void, UpdateState, (), (override)); MOCK_METHOD(void, ForgetLearnedStateForConnections, - (std::vector), + (rtc::ArrayView), (override)); MOCK_METHOD(void, SendPingRequest, (const Connection*), (override)); MOCK_METHOD(void, @@ -41,7 +41,7 @@ class MockIceAgent : public IceAgentInterface { (override)); MOCK_METHOD(bool, PruneConnections, - (std::vector), + (rtc::ArrayView), (override)); }; diff --git a/p2p/base/p2p_transport_channel.cc b/p2p/base/p2p_transport_channel.cc index cfcdf467b2..52f17d927e 100644 --- a/p2p/base/p2p_transport_channel.cc +++ b/p2p/base/p2p_transport_channel.cc @@ -381,7 +381,7 @@ bool P2PTransportChannel::MaybeSwitchSelectedConnection( } void P2PTransportChannel::ForgetLearnedStateForConnections( - std::vector connections) { + rtc::ArrayView connections) { for (const Connection* con : connections) { FromIceController(con)->ForgetLearnedState(); } @@ -782,9 +782,6 @@ void P2PTransportChannel::ParseFieldTrials( if (field_trials->IsEnabled("WebRTC-ExtraICEPing")) { RTC_LOG(LS_INFO) << "Set WebRTC-ExtraICEPing: Enabled"; } - if (field_trials->IsEnabled("WebRTC-TurnAddMultiMapping")) { - RTC_LOG(LS_INFO) << "Set WebRTC-TurnAddMultiMapping: Enabled"; - } webrtc::StructParametersParser::Create( // go/skylift-light @@ -2026,7 +2023,7 @@ void P2PTransportChannel::PruneConnections() { } bool P2PTransportChannel::PruneConnections( - std::vector connections) { + rtc::ArrayView connections) { RTC_DCHECK_RUN_ON(network_thread_); if (!AllowedToPruneConnections()) { RTC_LOG(LS_WARNING) << "Not allowed to prune connections"; diff --git a/p2p/base/p2p_transport_channel.h b/p2p/base/p2p_transport_channel.h index 14ec3fab54..8cc2e9842b 100644 --- a/p2p/base/p2p_transport_channel.h +++ b/p2p/base/p2p_transport_channel.h @@ -187,8 +187,9 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, void SwitchSelectedConnection(const Connection* connection, IceSwitchReason reason) override; void ForgetLearnedStateForConnections( - std::vector connections) override; - bool PruneConnections(std::vector connections) override; + rtc::ArrayView connections) override; + bool PruneConnections( + rtc::ArrayView connections) override; // TODO(honghaiz): Remove this method once the reference of it in // Chromoting is removed. diff --git a/p2p/base/p2p_transport_channel_unittest.cc b/p2p/base/p2p_transport_channel_unittest.cc index 0744d3bac6..0e255a58d8 100644 --- a/p2p/base/p2p_transport_channel_unittest.cc +++ b/p2p/base/p2p_transport_channel_unittest.cc @@ -142,6 +142,7 @@ const cricket::IceParameters kIceParams[4] = { const uint64_t kLowTiebreaker = 11111; const uint64_t kHighTiebreaker = 22222; +const uint64_t kTiebreakerDefault = 44444; cricket::IceConfig CreateIceConfig( int receiving_timeout, @@ -301,6 +302,11 @@ class P2PTransportChannelTestBase : public ::testing::Test, ep2_.allocator_.reset(CreateBasicPortAllocator( &ep2_.network_manager_, ss_.get(), stun_servers, kTurnUdpIntAddr, rtc::SocketAddress())); + + ep1_.SetIceTiebreaker(kTiebreakerDefault); + ep1_.allocator_->SetIceTiebreaker(kTiebreakerDefault); + ep2_.SetIceTiebreaker(kTiebreakerDefault); + ep2_.allocator_->SetIceTiebreaker(kTiebreakerDefault); webrtc::metrics::Reset(); } @@ -2732,8 +2738,8 @@ TEST_P(P2PTransportChannelMultihomedTest, TestFailoverWithManyConnections) { RelayServerConfig turn_server; turn_server.credentials = kRelayCredentials; turn_server.ports.push_back(ProtocolAddress(kTurnTcpIntAddr, PROTO_TCP)); - GetAllocator(0)->AddTurnServer(turn_server); - GetAllocator(1)->AddTurnServer(turn_server); + GetAllocator(0)->AddTurnServerForTesting(turn_server); + GetAllocator(1)->AddTurnServerForTesting(turn_server); // Enable IPv6 SetAllocatorFlags( 0, PORTALLOCATOR_ENABLE_IPV6 | PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); @@ -3477,6 +3483,7 @@ class P2PTransportChannelPingTest : public TestWithParam, protected: void PrepareChannel(P2PTransportChannel* ch) { ch->SetIceRole(ICEROLE_CONTROLLING); + ch->SetIceTiebreaker(kTiebreakerDefault); ch->SetIceParameters(kIceParams[0]); ch->SetRemoteIceParameters(kIceParams[1]); ch->SignalNetworkRouteChanged.connect( @@ -3841,6 +3848,7 @@ TEST_P(P2PTransportChannelPingTest, PingingStartedAsSoonAsPossible) { FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory()); P2PTransportChannel ch("TestChannel", 1, &pa, &field_trials_); ch.SetIceRole(ICEROLE_CONTROLLING); + ch.SetIceTiebreaker(kTiebreakerDefault); ch.SetIceParameters(kIceParams[0]); ch.MaybeStartGathering(); EXPECT_EQ_WAIT(IceGatheringState::kIceGatheringComplete, ch.gathering_state(), @@ -5231,7 +5239,7 @@ TEST_P(P2PTransportChannelMostLikelyToWorkFirstTest, TestTcpTurn) { RelayServerConfig config; config.credentials = kRelayCredentials; config.ports.push_back(ProtocolAddress(kTurnTcpIntAddr, PROTO_TCP)); - allocator()->AddTurnServer(config); + allocator()->AddTurnServerForTesting(config); P2PTransportChannel& ch = StartTransportChannel(true, 500, &field_trials_); EXPECT_TRUE_WAIT(ch.ports().size() == 3, kDefaultTimeout); diff --git a/p2p/base/port.cc b/p2p/base/port.cc index 168292c50e..9b4369dd97 100644 --- a/p2p/base/port.cc +++ b/p2p/base/port.cc @@ -209,6 +209,7 @@ void Port::SetIceRole(IceRole role) { void Port::SetIceTiebreaker(uint64_t tiebreaker) { tiebreaker_ = tiebreaker; } + uint64_t Port::IceTiebreaker() const { return tiebreaker_; } diff --git a/p2p/base/port_allocator.h b/p2p/base/port_allocator.h index d7e5cb8cc5..4f8fecd7e7 100644 --- a/p2p/base/port_allocator.h +++ b/p2p/base/port_allocator.h @@ -173,14 +173,12 @@ struct RTC_EXPORT RelayServerConfig { ~RelayServerConfig(); bool operator==(const RelayServerConfig& o) const { - return ports == o.ports && credentials == o.credentials && - priority == o.priority; + return ports == o.ports && credentials == o.credentials; } bool operator!=(const RelayServerConfig& o) const { return !(*this == o); } PortList ports; RelayCredentials credentials; - int priority = 0; TlsCertPolicy tls_cert_policy = TlsCertPolicy::TLS_CERT_POLICY_SECURE; std::vector tls_alpn_protocols; std::vector tls_elliptic_curves; diff --git a/p2p/base/port_allocator_unittest.cc b/p2p/base/port_allocator_unittest.cc index 4d8f055f3a..839df4338b 100644 --- a/p2p/base/port_allocator_unittest.cc +++ b/p2p/base/port_allocator_unittest.cc @@ -25,6 +25,7 @@ static const char kIceUfrag[] = "UF00"; static const char kIcePwd[] = "TESTICEPWD00000000000000"; static const char kTurnUsername[] = "test"; static const char kTurnPassword[] = "test"; +constexpr uint64_t kTiebreakerDefault = 44444; class PortAllocatorTest : public ::testing::Test, public sigslot::has_slots<> { public: @@ -35,7 +36,9 @@ class PortAllocatorTest : public ::testing::Test, public sigslot::has_slots<> { std::make_unique(vss_.get())), allocator_(std::make_unique( rtc::Thread::Current(), - packet_socket_factory_.get())) {} + packet_socket_factory_.get())) { + allocator_->SetIceTiebreaker(kTiebreakerDefault); + } protected: void SetConfigurationWithPoolSize(int candidate_pool_size) { diff --git a/p2p/base/port_unittest.cc b/p2p/base/port_unittest.cc index d4eeb99747..eb322efce8 100644 --- a/p2p/base/port_unittest.cc +++ b/p2p/base/port_unittest.cc @@ -105,6 +105,7 @@ const uint32_t kDefaultPrflxPriority = ICE_TYPE_PREFERENCE_PRFLX << 24 | constexpr int kTiebreaker1 = 11111; constexpr int kTiebreaker2 = 22222; +constexpr int kTiebreakerDefault = 44444; const char* data = "ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890"; @@ -534,35 +535,44 @@ class PortTest : public ::testing::Test, public sigslot::has_slots<> { } std::unique_ptr CreateUdpPort(const SocketAddress& addr, PacketSocketFactory* socket_factory) { - return UDPPort::Create(&main_, socket_factory, MakeNetwork(addr), 0, 0, - username_, password_, true, absl::nullopt, - &field_trials_); + auto port = UDPPort::Create(&main_, socket_factory, MakeNetwork(addr), 0, 0, + username_, password_, true, absl::nullopt, + &field_trials_); + port->SetIceTiebreaker(kTiebreakerDefault); + return port; } + std::unique_ptr CreateUdpPortMultipleAddrs( const SocketAddress& global_addr, const SocketAddress& link_local_addr, PacketSocketFactory* socket_factory, const webrtc::test::ScopedKeyValueConfig& field_trials) { - return UDPPort::Create( + auto port = UDPPort::Create( &main_, socket_factory, MakeNetworkMultipleAddrs(global_addr, link_local_addr, &field_trials), 0, 0, username_, password_, true, absl::nullopt, &field_trials); + port->SetIceTiebreaker(kTiebreakerDefault); + return port; } std::unique_ptr CreateTcpPort(const SocketAddress& addr) { return CreateTcpPort(addr, &socket_factory_); } std::unique_ptr CreateTcpPort(const SocketAddress& addr, PacketSocketFactory* socket_factory) { - return TCPPort::Create(&main_, socket_factory, MakeNetwork(addr), 0, 0, - username_, password_, true, &field_trials_); + auto port = TCPPort::Create(&main_, socket_factory, MakeNetwork(addr), 0, 0, + username_, password_, true, &field_trials_); + port->SetIceTiebreaker(kTiebreakerDefault); + return port; } std::unique_ptr CreateStunPort(const SocketAddress& addr, rtc::PacketSocketFactory* factory) { ServerAddresses stun_servers; stun_servers.insert(kStunAddr); - return StunPort::Create(&main_, factory, MakeNetwork(addr), 0, 0, username_, - password_, stun_servers, absl::nullopt, - &field_trials_); + auto port = StunPort::Create(&main_, factory, MakeNetwork(addr), 0, 0, + username_, password_, stun_servers, + absl::nullopt, &field_trials_); + port->SetIceTiebreaker(kTiebreakerDefault); + return port; } std::unique_ptr CreateRelayPort(const SocketAddress& addr, ProtocolType int_proto, @@ -597,7 +607,9 @@ class PortTest : public ::testing::Test, public sigslot::has_slots<> { args.config = &config; args.field_trials = &field_trials_; - return TurnPort::Create(args, 0, 0); + auto port = TurnPort::Create(args, 0, 0); + port->SetIceTiebreaker(kTiebreakerDefault); + return port; } std::unique_ptr CreateNatServer(const SocketAddress& addr, @@ -1772,6 +1784,7 @@ TEST_F(PortTest, TestUdpMultipleAddressesV6CrossTypePorts) { factory.set_next_udp_socket(socket); ports[i] = CreateUdpPortMultipleAddrs(addresses[i], kLinkLocalIPv6Addr, &factory, field_trials); + ports[i]->SetIceTiebreaker(kTiebreakerDefault); socket->set_state(AsyncPacketSocket::STATE_BINDING); socket->SignalAddressReady(socket, addresses[i]); ports[i]->PrepareAddress(); @@ -2468,10 +2481,10 @@ TEST_F(PortTest, TEST_F(PortTest, TestHandleStunResponseWithUnknownComprehensionRequiredAttribute) { // Generic setup. - auto lport = CreateTestPort(kLocalAddr1, "lfrag", "lpass"); - lport->SetIceRole(cricket::ICEROLE_CONTROLLING); - auto rport = CreateTestPort(kLocalAddr2, "rfrag", "rpass"); - rport->SetIceRole(cricket::ICEROLE_CONTROLLED); + auto lport = CreateTestPort(kLocalAddr1, "lfrag", "lpass", + cricket::ICEROLE_CONTROLLING, kTiebreakerDefault); + auto rport = CreateTestPort(kLocalAddr2, "rfrag", "rpass", + cricket::ICEROLE_CONTROLLED, kTiebreakerDefault); lport->PrepareAddress(); rport->PrepareAddress(); ASSERT_FALSE(lport->Candidates().empty()); @@ -2505,10 +2518,10 @@ TEST_F(PortTest, TEST_F(PortTest, TestHandleStunIndicationWithUnknownComprehensionRequiredAttribute) { // Generic set up. - auto lport = CreateTestPort(kLocalAddr2, "lfrag", "lpass"); - lport->SetIceRole(cricket::ICEROLE_CONTROLLING); - auto rport = CreateTestPort(kLocalAddr2, "rfrag", "rpass"); - rport->SetIceRole(cricket::ICEROLE_CONTROLLED); + auto lport = CreateTestPort(kLocalAddr2, "lfrag", "lpass", + cricket::ICEROLE_CONTROLLING, kTiebreakerDefault); + auto rport = CreateTestPort(kLocalAddr2, "rfrag", "rpass", + cricket::ICEROLE_CONTROLLED, kTiebreakerDefault); lport->PrepareAddress(); rport->PrepareAddress(); ASSERT_FALSE(lport->Candidates().empty()); @@ -2530,9 +2543,8 @@ TEST_F(PortTest, // Test handling of STUN binding indication messages . STUN binding // indications are allowed only to the connection which is in read mode. TEST_F(PortTest, TestHandleStunBindingIndication) { - auto lport = CreateTestPort(kLocalAddr2, "lfrag", "lpass"); - lport->SetIceRole(cricket::ICEROLE_CONTROLLING); - lport->SetIceTiebreaker(kTiebreaker1); + auto lport = CreateTestPort(kLocalAddr2, "lfrag", "lpass", + cricket::ICEROLE_CONTROLLING, kTiebreaker1); // Verifying encoding and decoding STUN indication message. std::unique_ptr in_msg, out_msg; @@ -2586,6 +2598,7 @@ TEST_F(PortTest, TestHandleStunBindingIndication) { TEST_F(PortTest, TestComputeCandidatePriority) { auto port = CreateTestPort(kLocalAddr1, "name", "pass"); + port->SetIceTiebreaker(kTiebreakerDefault); port->set_type_preference(90); port->set_component(177); port->AddCandidateAddress(SocketAddress("192.168.1.4", 1234)); @@ -2623,6 +2636,7 @@ TEST_F(PortTest, TestComputeCandidatePriority) { // Test that candidates with different types will have different foundation. TEST_F(PortTest, TestFoundation) { auto testport = CreateTestPort(kLocalAddr1, "name", "pass"); + testport->SetIceTiebreaker(kTiebreakerDefault); testport->AddCandidateAddress(kLocalAddr1, kLocalAddr1, LOCAL_PORT_TYPE, cricket::ICE_TYPE_PREFERENCE_HOST, false); testport->AddCandidateAddress(kLocalAddr2, kLocalAddr1, STUN_PORT_TYPE, @@ -2743,8 +2757,11 @@ TEST_F(PortTest, TestCandidatePriority) { // Test the Connection priority is calculated correctly. TEST_F(PortTest, TestConnectionPriority) { auto lport = CreateTestPort(kLocalAddr1, "lfrag", "lpass"); + lport->SetIceTiebreaker(kTiebreakerDefault); lport->set_type_preference(cricket::ICE_TYPE_PREFERENCE_HOST); + auto rport = CreateTestPort(kLocalAddr2, "rfrag", "rpass"); + rport->SetIceTiebreaker(kTiebreakerDefault); rport->set_type_preference(cricket::ICE_TYPE_PREFERENCE_RELAY_UDP); lport->set_component(123); lport->AddCandidateAddress(SocketAddress("192.168.1.4", 1234)); @@ -3629,6 +3646,7 @@ TEST_F(PortTest, TestSupportsProtocol) { // on both the port itself and its candidates. TEST_F(PortTest, TestSetIceParameters) { auto port = CreateTestPort(kLocalAddr1, "ufrag1", "password1"); + port->SetIceTiebreaker(kTiebreakerDefault); port->PrepareAddress(); EXPECT_EQ(1UL, port->Candidates().size()); port->SetIceParameters(1, "ufrag2", "password2"); @@ -3643,6 +3661,7 @@ TEST_F(PortTest, TestSetIceParameters) { TEST_F(PortTest, TestAddConnectionWithSameAddress) { auto port = CreateTestPort(kLocalAddr1, "ufrag1", "password1"); + port->SetIceTiebreaker(kTiebreakerDefault); port->PrepareAddress(); EXPECT_EQ(1u, port->Candidates().size()); rtc::SocketAddress address("1.1.1.1", 5000); diff --git a/p2p/base/pseudo_tcp_unittest.cc b/p2p/base/pseudo_tcp_unittest.cc index debddb217e..e56c6fa2c5 100644 --- a/p2p/base/pseudo_tcp_unittest.cc +++ b/p2p/base/pseudo_tcp_unittest.cc @@ -232,8 +232,10 @@ class PseudoTcpTest : public PseudoTcpTestBase { // Create some dummy data to send. send_stream_.ReserveSize(size); for (int i = 0; i < size; ++i) { - char ch = static_cast(i); - send_stream_.Write(&ch, 1, NULL, NULL); + uint8_t ch = static_cast(i); + size_t written; + int error; + send_stream_.Write(rtc::MakeArrayView(&ch, 1), written, error); } send_stream_.Rewind(); // Prepare the receive stream. @@ -296,7 +298,11 @@ class PseudoTcpTest : public PseudoTcpTestBase { do { rcvd = remote_.Recv(block, sizeof(block)); if (rcvd != -1) { - recv_stream_.Write(block, rcvd, NULL, NULL); + size_t written; + int error; + recv_stream_.Write( + rtc::MakeArrayView(reinterpret_cast(block), rcvd), + written, error); recv_stream_.GetPosition(&position); RTC_LOG(LS_VERBOSE) << "Received: " << position; } @@ -308,8 +314,10 @@ class PseudoTcpTest : public PseudoTcpTestBase { char block[kBlockSize]; do { send_stream_.GetPosition(&position); - if (send_stream_.Read(block, sizeof(block), &tosend, NULL) != - rtc::SR_EOS) { + int error; + if (send_stream_.Read( + rtc::MakeArrayView(reinterpret_cast(block), kBlockSize), + tosend, error) != rtc::SR_EOS) { sent = local_.Send(block, tosend); UpdateLocalClock(); if (sent != -1) { @@ -347,8 +355,10 @@ class PseudoTcpTestPingPong : public PseudoTcpTestBase { // Create some dummy data to send. send_stream_.ReserveSize(size); for (int i = 0; i < size; ++i) { - char ch = static_cast(i); - send_stream_.Write(&ch, 1, NULL, NULL); + uint8_t ch = static_cast(i); + size_t written; + int error; + send_stream_.Write(rtc::MakeArrayView(&ch, 1), written, error); } send_stream_.Rewind(); // Prepare the receive stream. @@ -411,7 +421,11 @@ class PseudoTcpTestPingPong : public PseudoTcpTestBase { do { rcvd = receiver_->Recv(block, sizeof(block)); if (rcvd != -1) { - recv_stream_.Write(block, rcvd, NULL, NULL); + size_t written; + int error; + recv_stream_.Write( + rtc::MakeArrayView(reinterpret_cast(block), rcvd), + written, error); recv_stream_.GetPosition(&position); RTC_LOG(LS_VERBOSE) << "Received: " << position; } @@ -424,7 +438,10 @@ class PseudoTcpTestPingPong : public PseudoTcpTestBase { do { send_stream_.GetPosition(&position); tosend = bytes_per_send_ ? bytes_per_send_ : sizeof(block); - if (send_stream_.Read(block, tosend, &tosend, NULL) != rtc::SR_EOS) { + int error; + if (send_stream_.Read( + rtc::MakeArrayView(reinterpret_cast(block), tosend), + tosend, error) != rtc::SR_EOS) { sent = sender_->Send(block, tosend); UpdateLocalClock(); if (sent != -1) { @@ -458,8 +475,10 @@ class PseudoTcpTestReceiveWindow : public PseudoTcpTestBase { // Create some dummy data to send. send_stream_.ReserveSize(size); for (int i = 0; i < size; ++i) { - char ch = static_cast(i); - send_stream_.Write(&ch, 1, NULL, NULL); + uint8_t ch = static_cast(i); + size_t written; + int error; + send_stream_.Write(rtc::MakeArrayView(&ch, 1), written, error); } send_stream_.Rewind(); @@ -510,7 +529,11 @@ class PseudoTcpTestReceiveWindow : public PseudoTcpTestBase { do { rcvd = remote_.Recv(block, sizeof(block)); if (rcvd != -1) { - recv_stream_.Write(block, rcvd, NULL, NULL); + size_t written; + int error; + recv_stream_.Write( + rtc::MakeArrayView(reinterpret_cast(block), rcvd), + written, error); recv_stream_.GetPosition(&position); RTC_LOG(LS_VERBOSE) << "Received: " << position; } @@ -534,8 +557,11 @@ class PseudoTcpTestReceiveWindow : public PseudoTcpTestBase { char block[kBlockSize]; do { send_stream_.GetPosition(&position); - if (send_stream_.Read(block, sizeof(block), &tosend, NULL) != - rtc::SR_EOS) { + int error; + if (send_stream_.Read( + rtc::MakeArrayView(reinterpret_cast(block), + sizeof(block)), + tosend, error) != rtc::SR_EOS) { sent = local_.Send(block, tosend); UpdateLocalClock(); if (sent != -1) { diff --git a/p2p/base/regathering_controller_unittest.cc b/p2p/base/regathering_controller_unittest.cc index 597bcf3f35..e795fee914 100644 --- a/p2p/base/regathering_controller_unittest.cc +++ b/p2p/base/regathering_controller_unittest.cc @@ -39,6 +39,7 @@ const rtc::SocketAddress kTurnUdpIntAddr("99.99.99.3", const cricket::RelayCredentials kRelayCredentials("test", "test"); const char kIceUfrag[] = "UF00"; const char kIcePwd[] = "TESTICEPWD00000000000000"; +constexpr uint64_t kTiebreakerDefault = 44444; } // namespace @@ -56,6 +57,7 @@ class RegatheringControllerTest : public ::testing::Test, allocator_(std::make_unique( rtc::Thread::Current(), packet_socket_factory_.get())) { + allocator_->SetIceTiebreaker(kTiebreakerDefault); BasicRegatheringController::Config regathering_config; regathering_config.regather_on_failed_networks_interval = 0; regathering_controller_.reset(new BasicRegatheringController( diff --git a/p2p/base/stun_port.cc b/p2p/base/stun_port.cc index 8f80693978..fade14c738 100644 --- a/p2p/base/stun_port.cc +++ b/p2p/base/stun_port.cc @@ -550,11 +550,12 @@ void UDPPort::OnStunBindingRequestSucceeded( } bind_request_succeeded_servers_.insert(stun_server_addr); // If socket is shared and `stun_reflected_addr` is equal to local socket - // address, or if the same address has been added by another STUN server, - // then discarding the stun address. + // address and mDNS obfuscation is not enabled, or if the same address has + // been added by another STUN server, then discarding the stun address. // For STUN, related address is the local socket address. - if ((!SharedSocket() || stun_reflected_addr != socket_->GetLocalAddress()) && - !HasCandidateWithAddress(stun_reflected_addr)) { + if ((!SharedSocket() || stun_reflected_addr != socket_->GetLocalAddress() || + Network()->GetMdnsResponder() != nullptr) && + !HasStunCandidateWithAddress(stun_reflected_addr)) { rtc::SocketAddress related_address = socket_->GetLocalAddress(); // If we can't stamp the related address correctly, empty it to avoid leak. if (!MaybeSetDefaultLocalAddress(&related_address)) { @@ -637,11 +638,12 @@ void UDPPort::OnSendPacket(const void* data, size_t size, StunRequest* req) { stats_.stun_binding_requests_sent++; } -bool UDPPort::HasCandidateWithAddress(const rtc::SocketAddress& addr) const { +bool UDPPort::HasStunCandidateWithAddress( + const rtc::SocketAddress& addr) const { const std::vector& existing_candidates = Candidates(); std::vector::const_iterator it = existing_candidates.begin(); for (; it != existing_candidates.end(); ++it) { - if (it->address() == addr) + if (it->type() == STUN_PORT_TYPE && it->address() == addr) return true; } return false; diff --git a/p2p/base/stun_port.h b/p2p/base/stun_port.h index 06b5e1ae1c..13970070ed 100644 --- a/p2p/base/stun_port.h +++ b/p2p/base/stun_port.h @@ -234,7 +234,7 @@ class UDPPort : public Port { // changed to SignalPortReady. void MaybeSetPortCompleteOrError(); - bool HasCandidateWithAddress(const rtc::SocketAddress& addr) const; + bool HasStunCandidateWithAddress(const rtc::SocketAddress& addr) const; // If this is a low-cost network, it will keep on sending STUN binding // requests indefinitely to keep the NAT binding alive. Otherwise, stop diff --git a/p2p/base/stun_port_unittest.cc b/p2p/base/stun_port_unittest.cc index a6f5d67c5c..3d56636a9b 100644 --- a/p2p/base/stun_port_unittest.cc +++ b/p2p/base/stun_port_unittest.cc @@ -14,6 +14,7 @@ #include "api/test/mock_async_dns_resolver.h" #include "p2p/base/basic_packet_socket_factory.h" +#include "p2p/base/mock_dns_resolving_packet_socket_factory.h" #include "p2p/base/test_stun_server.h" #include "rtc_base/gunit.h" #include "rtc_base/helpers.h" @@ -34,10 +35,6 @@ using ::testing::Return; using ::testing::ReturnPointee; using ::testing::SetArgPointee; -using DnsResolverExpectations = - std::function; - static const SocketAddress kLocalAddr("127.0.0.1", 0); static const SocketAddress kIPv6LocalAddr("::1", 0); static const SocketAddress kStunAddr1("127.0.0.1", 5000); @@ -59,32 +56,29 @@ static const uint32_t kIPv6StunCandidatePriority = static const int kInfiniteLifetime = -1; static const int kHighCostPortKeepaliveLifetimeMs = 2 * 60 * 1000; -// A PacketSocketFactory implementation that uses a mock DnsResolver and allows -// setting expectations on the resolver and results. -class MockDnsResolverPacketSocketFactory - : public rtc::BasicPacketSocketFactory { - public: - explicit MockDnsResolverPacketSocketFactory( - rtc::SocketFactory* socket_factory) - : rtc::BasicPacketSocketFactory(socket_factory) {} +constexpr uint64_t kTiebreakerDefault = 44444; - std::unique_ptr CreateAsyncDnsResolver() - override { - std::unique_ptr resolver = - std::make_unique(); - if (expectations_) { - expectations_(resolver.get(), &resolver_result_); - } - return resolver; +class FakeMdnsResponder : public webrtc::MdnsResponderInterface { + public: + void CreateNameForAddress(const rtc::IPAddress& addr, + NameCreatedCallback callback) override { + callback(addr, std::string("unittest-mdns-host-name.local")); } - void SetExpectations(DnsResolverExpectations expectations) { - expectations_ = expectations; + void RemoveNameForAddress(const rtc::IPAddress& addr, + NameRemovedCallback callback) override {} +}; + +class FakeMdnsResponderProvider : public rtc::MdnsResponderProvider { + public: + FakeMdnsResponderProvider() : mdns_responder_(new FakeMdnsResponder()) {} + + webrtc::MdnsResponderInterface* GetMdnsResponder() const override { + return mdns_responder_.get(); } private: - webrtc::MockAsyncDnsResolverResult resolver_result_; - DnsResolverExpectations expectations_; + std::unique_ptr mdns_responder_; }; // Base class for tests connecting a StunPort to a fake STUN server @@ -103,6 +97,7 @@ class StunPortTestBase : public ::testing::Test, public sigslot::has_slots<> { socket_factory_(ss_.get()), stun_server_1_(cricket::TestStunServer::Create(ss_.get(), kStunAddr1)), stun_server_2_(cricket::TestStunServer::Create(ss_.get(), kStunAddr2)), + mdns_responder_provider_(new FakeMdnsResponderProvider()), done_(false), error_(false), stun_keepalive_delay_(1), @@ -141,6 +136,7 @@ class StunPortTestBase : public ::testing::Test, public sigslot::has_slots<> { rtc::Thread::Current(), socket_factory(), &network_, 0, 0, rtc::CreateRandomString(16), rtc::CreateRandomString(22), stun_servers, absl::nullopt, field_trials); + stun_port_->SetIceTiebreaker(kTiebreakerDefault); stun_port_->set_stun_keepalive_delay(stun_keepalive_delay_); // If `stun_keepalive_lifetime_` is negative, let the stun port // choose its lifetime from the network type. @@ -171,6 +167,7 @@ class StunPortTestBase : public ::testing::Test, public sigslot::has_slots<> { rtc::CreateRandomString(16), rtc::CreateRandomString(22), false, absl::nullopt, field_trials); ASSERT_TRUE(stun_port_ != NULL); + stun_port_->SetIceTiebreaker(kTiebreakerDefault); ServerAddresses stun_servers; stun_servers.insert(server_addr); stun_port_->set_server_addresses(stun_servers); @@ -196,6 +193,10 @@ class StunPortTestBase : public ::testing::Test, public sigslot::has_slots<> { /* packet_time_us */ -1); } + void EnableMdnsObfuscation() { + network_.set_mdns_responder_provider(mdns_responder_provider_.get()); + } + protected: static void SetUpTestSuite() { // Ensure the RNG is inited. @@ -233,6 +234,7 @@ class StunPortTestBase : public ::testing::Test, public sigslot::has_slots<> { std::unique_ptr stun_server_1_; std::unique_ptr stun_server_2_; std::unique_ptr socket_; + std::unique_ptr mdns_responder_provider_; bool done_; bool error_; int stun_keepalive_delay_; @@ -301,12 +303,13 @@ class StunPortWithMockDnsResolverTest : public StunPortTest { return &socket_factory_; } - void SetDnsResolverExpectations(DnsResolverExpectations expectations) { + void SetDnsResolverExpectations( + rtc::MockDnsResolvingPacketSocketFactory::Expectations expectations) { socket_factory_.SetExpectations(expectations); } private: - MockDnsResolverPacketSocketFactory socket_factory_; + rtc::MockDnsResolvingPacketSocketFactory socket_factory_; }; // Test that we can get an address from a STUN server specified by a hostname. @@ -382,6 +385,41 @@ TEST_F(StunPortTestWithRealClock, // No crash is success. } +// Test that a stun candidate (srflx candidate) is discarded whose address is +// equal to that of a local candidate if mDNS obfuscation is not enabled. +TEST_F(StunPortTest, TestStunCandidateDiscardedWithMdnsObfuscationNotEnabled) { + CreateSharedUdpPort(kStunAddr1, nullptr); + PrepareAddress(); + EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); + ASSERT_EQ(1U, port()->Candidates().size()); + EXPECT_TRUE(kLocalAddr.EqualIPs(port()->Candidates()[0].address())); + EXPECT_EQ(port()->Candidates()[0].type(), cricket::LOCAL_PORT_TYPE); +} + +// Test that a stun candidate (srflx candidate) is generated whose address is +// equal to that of a local candidate if mDNS obfuscation is enabled. +TEST_F(StunPortTest, TestStunCandidateGeneratedWithMdnsObfuscationEnabled) { + EnableMdnsObfuscation(); + CreateSharedUdpPort(kStunAddr1, nullptr); + PrepareAddress(); + EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); + ASSERT_EQ(2U, port()->Candidates().size()); + + // The addresses of the candidates are both equal to kLocalAddr. + EXPECT_TRUE(kLocalAddr.EqualIPs(port()->Candidates()[0].address())); + EXPECT_TRUE(kLocalAddr.EqualIPs(port()->Candidates()[1].address())); + + // One of the generated candidates is a local candidate and the other is a + // stun candidate. + EXPECT_NE(port()->Candidates()[0].type(), port()->Candidates()[1].type()); + if (port()->Candidates()[0].type() == cricket::LOCAL_PORT_TYPE) { + EXPECT_EQ(port()->Candidates()[1].type(), cricket::STUN_PORT_TYPE); + } else { + EXPECT_EQ(port()->Candidates()[0].type(), cricket::STUN_PORT_TYPE); + EXPECT_EQ(port()->Candidates()[1].type(), cricket::LOCAL_PORT_TYPE); + } +} + // Test that the same address is added only once if two STUN servers are in // use. TEST_F(StunPortTest, TestNoDuplicatedAddressWithTwoStunServers) { @@ -614,12 +652,13 @@ class StunIPv6PortTestWithMockDnsResolver : public StunIPv6PortTest { return &socket_factory_; } - void SetDnsResolverExpectations(DnsResolverExpectations expectations) { + void SetDnsResolverExpectations( + rtc::MockDnsResolvingPacketSocketFactory::Expectations expectations) { socket_factory_.SetExpectations(expectations); } private: - MockDnsResolverPacketSocketFactory socket_factory_; + rtc::MockDnsResolvingPacketSocketFactory socket_factory_; }; // Test that we can get an address from a STUN server specified by a hostname. diff --git a/p2p/base/tcp_port_unittest.cc b/p2p/base/tcp_port_unittest.cc index 8adf35ca4c..1bb59811b8 100644 --- a/p2p/base/tcp_port_unittest.cc +++ b/p2p/base/tcp_port_unittest.cc @@ -43,6 +43,8 @@ static const SocketAddress kRemoteAddr("22.22.22.22", 0); static const SocketAddress kRemoteIPv6Addr("2401:fa00:4:1000:be30:5bff:fee5:c4", 0); +constexpr uint64_t kTiebreakerDefault = 44444; + class ConnectionObserver : public sigslot::has_slots<> { public: explicit ConnectionObserver(Connection* conn) : conn_(conn) { @@ -81,15 +83,19 @@ class TCPPortTest : public ::testing::Test, public sigslot::has_slots<> { } std::unique_ptr CreateTCPPort(const SocketAddress& addr) { - return std::unique_ptr( + auto port = std::unique_ptr( TCPPort::Create(&main_, &socket_factory_, MakeNetwork(addr), 0, 0, username_, password_, true, &field_trials_)); + port->SetIceTiebreaker(kTiebreakerDefault); + return port; } std::unique_ptr CreateTCPPort(const rtc::Network* network) { - return std::unique_ptr( + auto port = std::unique_ptr( TCPPort::Create(&main_, &socket_factory_, network, 0, 0, username_, password_, true, &field_trials_)); + port->SetIceTiebreaker(kTiebreakerDefault); + return port; } protected: diff --git a/p2p/base/turn_port.cc b/p2p/base/turn_port.cc index c34c078c74..e36c9b8fbf 100644 --- a/p2p/base/turn_port.cc +++ b/p2p/base/turn_port.cc @@ -19,12 +19,14 @@ #include "absl/strings/match.h" #include "absl/strings/string_view.h" #include "absl/types/optional.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/transport/stun.h" #include "p2p/base/connection.h" #include "p2p/base/p2p_constants.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/byte_order.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" #include "rtc_base/net_helpers.h" #include "rtc_base/socket_address.h" @@ -32,6 +34,26 @@ namespace cricket { +namespace { + +bool ResolveTurnHostnameForFamily(const webrtc::FieldTrialsView& field_trials) { + // Bug fix for TURN hostname resolution on IPv6. + // Field trial key reserved in bugs.webrtc.org/14334 + static constexpr char field_trial_name[] = + "WebRTC-IPv6NetworkResolutionFixes"; + if (!field_trials.IsEnabled(field_trial_name)) { + return false; + } + + webrtc::FieldTrialParameter resolve_turn_hostname_for_family( + "ResolveTurnHostnameForFamily", /*default_value=*/false); + webrtc::ParseFieldTrial({&resolve_turn_hostname_for_family}, + field_trials.Lookup(field_trial_name)); + return resolve_turn_hostname_for_family; +} + +} // namespace + using ::webrtc::SafeTask; using ::webrtc::TaskQueueBase; using ::webrtc::TimeDelta; @@ -42,7 +64,6 @@ static const int TURN_ALLOCATE_REQUEST = STUN_ALLOCATE_REQUEST; // Attributes in comprehension-optional range, // ignored by TURN server that doesn't know about them. // https://tools.ietf.org/html/rfc5389#section-18.2 -static const int STUN_ATTR_MULTI_MAPPING = 0xff04; const int STUN_ATTR_TURN_LOGGING_ID = 0xff05; // TODO(juberti): Extract to turnmessage.h @@ -108,8 +129,7 @@ class TurnCreatePermissionRequest : public StunRequest { public: TurnCreatePermissionRequest(TurnPort* port, TurnEntry* entry, - const rtc::SocketAddress& ext_addr, - absl::string_view remote_ufrag); + const rtc::SocketAddress& ext_addr); ~TurnCreatePermissionRequest() override; void OnSent() override; void OnResponse(StunMessage* response) override; @@ -120,7 +140,6 @@ class TurnCreatePermissionRequest : public StunRequest { TurnPort* port_; TurnEntry* entry_; rtc::SocketAddress ext_addr_; - std::string remote_ufrag_; }; class TurnChannelBindRequest : public StunRequest { @@ -147,10 +166,8 @@ class TurnChannelBindRequest : public StunRequest { class TurnEntry : public sigslot::has_slots<> { public: enum BindState { STATE_UNBOUND, STATE_BINDING, STATE_BOUND }; - TurnEntry(TurnPort* port, - int channel_id, - const rtc::SocketAddress& ext_addr, - absl::string_view remote_ufrag); + TurnEntry(TurnPort* port, Connection* conn, int channel_id); + ~TurnEntry(); TurnPort* port() { return port_; } @@ -161,15 +178,20 @@ class TurnEntry : public sigslot::has_slots<> { const rtc::SocketAddress& address() const { return ext_addr_; } BindState state() const { return state_; } - // If the destruction timestamp is set, that means destruction has been - // scheduled (will occur kTurnPermissionTimeout after it's scheduled). - absl::optional destruction_timestamp() { - return destruction_timestamp_; - } - void set_destruction_timestamp(int64_t destruction_timestamp) { - destruction_timestamp_.emplace(destruction_timestamp); - } - void reset_destruction_timestamp() { destruction_timestamp_.reset(); } + // Adds a new connection object to the list of connections that are associated + // with this entry. If prior to this call there were no connections being + // tracked (i.e. count goes from 0 -> 1), the internal safety flag is reset + // which cancels any potential pending deletion tasks. + void TrackConnection(Connection* conn); + + // Removes a connection from the list of tracked connections. + // * If `conn` was the last connection removed, the function returns a + // safety flag that's used to schedule the deletion of the entry after a + // timeout expires. If during this timeout `TrackConnection` is called, the + // flag will be reset and pending tasks associated with it, cancelled. + // * If `conn` was not the last connection, the return value will be nullptr. + rtc::scoped_refptr UntrackConnection( + Connection* conn); // Helper methods to send permission and channel bind requests. void SendCreatePermissionRequest(int delay); @@ -190,23 +212,16 @@ class TurnEntry : public sigslot::has_slots<> { // Signal sent when TurnEntry is destroyed. webrtc::CallbackList destroyed_callback_list_; - const std::string& get_remote_ufrag() const { return remote_ufrag_; } - void set_remote_ufrag(absl::string_view remote_ufrag) { - remote_ufrag_ = std::string(remote_ufrag); - } - private: TurnPort* port_; int channel_id_; rtc::SocketAddress ext_addr_; BindState state_; - // An unset value indicates that this entry is scheduled to be destroyed. It - // is also used as an ID of the event scheduling. When the destruction event - // actually fires, the TurnEntry will be destroyed only if the timestamp here - // matches the one in the firing event. - absl::optional destruction_timestamp_; - - std::string remote_ufrag_; + // List of associated connection instances to keep track of how many and + // which connections are associated with this entry. Once this is empty, + // the entry can be deleted. + std::vector connections_; + webrtc::ScopedTaskSafety task_safety_; }; TurnPort::TurnPort(TaskQueueBase* thread, @@ -278,7 +293,7 @@ TurnPort::TurnPort(TaskQueueBase* thread, tls_elliptic_curves_(tls_elliptic_curves), tls_cert_verifier_(tls_cert_verifier), credentials_(credentials), - socket_(NULL), + socket_(nullptr), error_(0), stun_dscp_value_(rtc::DSCP_NO_CHANGE), request_manager_( @@ -301,9 +316,7 @@ TurnPort::~TurnPort() { Release(); } - while (!entries_.empty()) { - DestroyEntry(entries_.front()); - } + entries_.clear(); if (socket_) socket_->UnsubscribeClose(this); @@ -554,7 +567,7 @@ void TurnPort::OnAllocateMismatch() { } else { delete socket_; } - socket_ = NULL; + socket_ = nullptr; ResetNonce(); PrepareAddress(); @@ -587,15 +600,13 @@ Connection* TurnPort::CreateConnection(const Candidate& remote_candidate, if (local_candidate.type() == RELAY_PORT_TYPE && local_candidate.address().family() == remote_candidate.address().family()) { - // Create an entry, if needed, so we can get our permissions set up - // correctly. - if (CreateOrRefreshEntry(remote_candidate.address(), next_channel_number_, - remote_candidate.username())) { - // An entry was created. - next_channel_number_++; - } ProxyConnection* conn = new ProxyConnection(NewWeakPtr(), index, remote_candidate); + // Create an entry, if needed, so we can get our permissions set up + // correctly. + if (CreateOrRefreshEntry(conn, next_channel_number_)) { + next_channel_number_++; + } AddOrReplaceConnection(conn); return conn; } @@ -650,11 +661,7 @@ int TurnPort::SendTo(const void* data, bool payload) { // Try to find an entry for this specific address; we should have one. TurnEntry* entry = FindEntry(addr); - if (!entry) { - RTC_LOG(LS_ERROR) << "Did not find the TurnEntry for address " - << addr.ToSensitiveString(); - return 0; - } + RTC_DCHECK(entry); if (!ready()) { error_ = ENOTCONN; @@ -666,6 +673,7 @@ int TurnPort::SendTo(const void* data, CopyPortInformationToPacketInfo(&modified_options.info_signaled_after_sent); int sent = entry->Send(data, size, payload, modified_options); if (sent <= 0) { + error_ = socket_->GetError(); return SOCKET_ERROR; } @@ -679,6 +687,16 @@ bool TurnPort::CanHandleIncomingPacketsFrom( return server_address_.address == addr; } +void TurnPort::SendBindingErrorResponse(StunMessage* message, + const rtc::SocketAddress& addr, + int error_code, + absl::string_view reason) { + if (!GetConnection(addr)) + return; + + Port::SendBindingErrorResponse(message, addr, error_code, reason); +} + bool TurnPort::HandleIncomingPacket(rtc::AsyncPacketSocket* socket, const char* data, size_t size, @@ -811,7 +829,7 @@ void TurnPort::ResolveTurnAddress(const rtc::SocketAddress& address) { RTC_LOG(LS_INFO) << ToString() << ": Starting TURN host lookup for " << address.ToSensitiveString(); resolver_ = socket_factory()->CreateAsyncDnsResolver(); - resolver_->Start(address, [this] { + auto callback = [this] { // If DNS resolve is failed when trying to connect to the server using TCP, // one of the reason could be due to DNS queries blocked by firewall. // In such cases we will try to connect to the server with hostname, @@ -842,7 +860,13 @@ void TurnPort::ResolveTurnAddress(const rtc::SocketAddress& address) { } server_address_.address = resolved_address; PrepareAddress(); - }); + }; + // TODO(bugs.webrtc.org/14733): remove duplicate resolution with STUN port. + if (ResolveTurnHostnameForFamily(field_trials())) { + resolver_->Start(address, Network()->family(), std::move(callback)); + } else { + resolver_->Start(address, std::move(callback)); + } } void TurnPort::OnSendStunPacket(const void* data, @@ -1179,115 +1203,62 @@ void TurnPort::ResetNonce() { } bool TurnPort::HasPermission(const rtc::IPAddress& ipaddr) const { - return absl::c_any_of(entries_, [&ipaddr](const TurnEntry* e) { + return absl::c_any_of(entries_, [&ipaddr](const auto& e) { return e->address().ipaddr() == ipaddr; }); } TurnEntry* TurnPort::FindEntry(const rtc::SocketAddress& addr) const { auto it = absl::c_find_if( - entries_, [&addr](const TurnEntry* e) { return e->address() == addr; }); - return (it != entries_.end()) ? *it : NULL; + entries_, [&addr](const auto& e) { return e->address() == addr; }); + return (it != entries_.end()) ? it->get() : nullptr; } TurnEntry* TurnPort::FindEntry(int channel_id) const { - auto it = absl::c_find_if(entries_, [&channel_id](const TurnEntry* e) { + auto it = absl::c_find_if(entries_, [&channel_id](const auto& e) { return e->channel_id() == channel_id; }); - return (it != entries_.end()) ? *it : NULL; + return (it != entries_.end()) ? it->get() : nullptr; } -bool TurnPort::EntryExists(TurnEntry* e) { - return absl::c_linear_search(entries_, e); -} - -bool TurnPort::CreateOrRefreshEntry(const rtc::SocketAddress& addr, - int channel_number) { - return CreateOrRefreshEntry(addr, channel_number, ""); -} - -bool TurnPort::CreateOrRefreshEntry(const rtc::SocketAddress& addr, - int channel_number, - absl::string_view remote_ufrag) { - TurnEntry* entry = FindEntry(addr); +bool TurnPort::CreateOrRefreshEntry(Connection* conn, int channel_number) { + const Candidate& remote_candidate = conn->remote_candidate(); + TurnEntry* entry = FindEntry(remote_candidate.address()); if (entry == nullptr) { - entry = new TurnEntry(this, channel_number, addr, remote_ufrag); - entries_.push_back(entry); + entries_.push_back(std::make_unique(this, conn, channel_number)); return true; - } else { - if (entry->destruction_timestamp()) { - // Destruction should have only been scheduled (indicated by - // destruction_timestamp being set) if there were no connections using - // this address. - RTC_DCHECK(!GetConnection(addr)); - // Resetting the destruction timestamp will ensure that any queued - // destruction tasks, when executed, will see that the timestamp doesn't - // match and do nothing. We do this because (currently) there's not a - // convenient way to cancel queued tasks. - entry->reset_destruction_timestamp(); - } else { - // The only valid reason for destruction not being scheduled is that - // there's still one connection. - RTC_DCHECK(GetConnection(addr)); - } - - if (field_trials().IsEnabled("WebRTC-TurnAddMultiMapping")) { - if (entry->get_remote_ufrag() != remote_ufrag) { - RTC_LOG(LS_INFO) << ToString() - << ": remote ufrag updated." - " Sending new permission request"; - entry->set_remote_ufrag(remote_ufrag); - entry->SendCreatePermissionRequest(0); - } - } } + + // Associate this connection object with an existing entry. If the entry + // has been scheduled for deletion, this will cancel that task. + entry->TrackConnection(conn); + return false; } -void TurnPort::DestroyEntry(TurnEntry* entry) { - RTC_DCHECK(entry != NULL); - entry->destroyed_callback_list_.Send(entry); - entries_.remove(entry); - delete entry; -} - -void TurnPort::DestroyEntryIfNotCancelled(TurnEntry* entry, int64_t timestamp) { - if (!EntryExists(entry)) { - return; - } - // The destruction timestamp is used to manage pending destructions. Proceed - // with destruction if it's set, and matches the timestamp from the posted - // task. Note that CreateOrRefreshEntry will unset the timestamp, canceling - // destruction. - if (entry->destruction_timestamp() && - timestamp == *entry->destruction_timestamp()) { - DestroyEntry(entry); - } -} - void TurnPort::HandleConnectionDestroyed(Connection* conn) { // Schedule an event to destroy TurnEntry for the connection, which is - // already destroyed. + // being destroyed. const rtc::SocketAddress& remote_address = conn->remote_candidate().address(); + // We should always have an entry for this connection. TurnEntry* entry = FindEntry(remote_address); - if (!entry) { - // TODO(chromium:1374310): This happens because more than one connection - // may be associated with an entry. Previously a connection with the same - // address has been destroyed and subsequently the entry removed - // (prematurely.) - RTC_DLOG_F(LS_WARNING) << "Entry has been removed."; - return; + rtc::scoped_refptr flag = + entry->UntrackConnection(conn); + if (flag) { + // An assumption here is that the lifetime flag for the entry, is within + // the lifetime scope of `task_safety_` and therefore use of `this` is safe. + // If an entry gets reused (associated with a new connection) while this + // task is pending, the entry will reset the safety flag, thus cancel this + // task. + thread()->PostDelayedTask(SafeTask(flag, + [this, entry] { + entries_.erase(absl::c_find_if( + entries_, [entry](const auto& e) { + return e.get() == entry; + })); + }), + kTurnPermissionTimeout); } - - RTC_DCHECK(!entry->destruction_timestamp().has_value()); - int64_t timestamp = rtc::TimeMillis(); - entry->set_destruction_timestamp(timestamp); - thread()->PostDelayedTask(SafeTask(task_safety_.flag(), - [this, entry, timestamp] { - DestroyEntryIfNotCancelled(entry, - timestamp); - }), - kTurnPermissionTimeout); } void TurnPort::SetCallbacksForTest(CallbacksForTest* callbacks) { @@ -1636,15 +1607,13 @@ void TurnRefreshRequest::OnTimeout() { TurnCreatePermissionRequest::TurnCreatePermissionRequest( TurnPort* port, TurnEntry* entry, - const rtc::SocketAddress& ext_addr, - absl::string_view remote_ufrag) + const rtc::SocketAddress& ext_addr) : StunRequest( port->request_manager(), std::make_unique(TURN_CREATE_PERMISSION_REQUEST)), port_(port), entry_(entry), - ext_addr_(ext_addr), - remote_ufrag_(remote_ufrag) { + ext_addr_(ext_addr) { RTC_DCHECK(entry_); entry_->destroyed_callback_list_.AddReceiver(this, [this](TurnEntry* entry) { RTC_DCHECK(entry_ == entry); @@ -1655,10 +1624,6 @@ TurnCreatePermissionRequest::TurnCreatePermissionRequest( RTC_DCHECK_EQ(message->type(), TURN_CREATE_PERMISSION_REQUEST); message->AddAttribute(std::make_unique( STUN_ATTR_XOR_PEER_ADDRESS, ext_addr_)); - if (port_->field_trials().IsEnabled("WebRTC-TurnAddMultiMapping")) { - message->AddAttribute(std::make_unique( - STUN_ATTR_MULTI_MAPPING, remote_ufrag_)); - } port_->AddRequestAuthInfo(message); port_->TurnCustomizerMaybeModifyOutgoingStunMessage(message); } @@ -1789,23 +1754,37 @@ void TurnChannelBindRequest::OnTimeout() { } } -TurnEntry::TurnEntry(TurnPort* port, - int channel_id, - const rtc::SocketAddress& ext_addr, - absl::string_view remote_ufrag) +TurnEntry::TurnEntry(TurnPort* port, Connection* conn, int channel_id) : port_(port), channel_id_(channel_id), - ext_addr_(ext_addr), + ext_addr_(conn->remote_candidate().address()), state_(STATE_UNBOUND), - remote_ufrag_(remote_ufrag) { + connections_({conn}) { // Creating permission for `ext_addr_`. SendCreatePermissionRequest(0); } +TurnEntry::~TurnEntry() { + destroyed_callback_list_.Send(this); +} + +void TurnEntry::TrackConnection(Connection* conn) { + RTC_DCHECK(absl::c_find(connections_, conn) == connections_.end()); + if (connections_.empty()) { + task_safety_.reset(); + } + connections_.push_back(conn); +} + +rtc::scoped_refptr TurnEntry::UntrackConnection( + Connection* conn) { + connections_.erase(absl::c_find(connections_, conn)); + return connections_.empty() ? task_safety_.flag() : nullptr; +} + void TurnEntry::SendCreatePermissionRequest(int delay) { - port_->SendRequest( - new TurnCreatePermissionRequest(port_, this, ext_addr_, remote_ufrag_), - delay); + port_->SendRequest(new TurnCreatePermissionRequest(port_, this, ext_addr_), + delay); } void TurnEntry::SendChannelBindRequest(int delay) { diff --git a/p2p/base/turn_port.h b/p2p/base/turn_port.h index e51468770a..ac660d6599 100644 --- a/p2p/base/turn_port.h +++ b/p2p/base/turn_port.h @@ -13,7 +13,6 @@ #include -#include #include #include #include @@ -82,7 +81,7 @@ class TurnPort : public Port { return absl::WrapUnique( new TurnPort(args.network_thread, args.socket_factory, args.network, socket, args.username, args.password, *args.server_address, - args.config->credentials, args.config->priority, + args.config->credentials, args.relative_priority, args.config->tls_alpn_protocols, args.config->tls_elliptic_curves, args.turn_customizer, args.config->tls_cert_verifier, args.field_trials)); @@ -101,7 +100,7 @@ class TurnPort : public Port { new TurnPort(args.network_thread, args.socket_factory, args.network, min_port, max_port, args.username, args.password, *args.server_address, args.config->credentials, - args.config->priority, args.config->tls_alpn_protocols, + args.relative_priority, args.config->tls_alpn_protocols, args.config->tls_elliptic_curves, args.turn_customizer, args.config->tls_cert_verifier, args.field_trials)); } @@ -151,6 +150,14 @@ class TurnPort : public Port { int64_t packet_time_us) override; bool CanHandleIncomingPacketsFrom( const rtc::SocketAddress& addr) const override; + + // Checks if a connection exists for `addr` before forwarding the call to + // the base class. + void SendBindingErrorResponse(StunMessage* message, + const rtc::SocketAddress& addr, + int error_code, + absl::string_view reason) override; + virtual void OnReadPacket(rtc::AsyncPacketSocket* socket, const char* data, size_t size, @@ -231,11 +238,7 @@ class TurnPort : public Port { // NOTE: This method needs to be accessible for StunPort // return true if entry was created (i.e channel_number consumed). - bool CreateOrRefreshEntry(const rtc::SocketAddress& addr, int channel_number); - - bool CreateOrRefreshEntry(const rtc::SocketAddress& addr, - int channel_number, - absl::string_view remote_ufrag); + bool CreateOrRefreshEntry(Connection* conn, int channel_number); rtc::DiffServCodePoint StunDscpValue() const override; @@ -243,7 +246,6 @@ class TurnPort : public Port { void Close(); private: - typedef std::list EntryList; typedef std::map SocketOptionsMap; typedef std::set AttemptedServerSet; @@ -300,11 +302,6 @@ class TurnPort : public Port { bool HasPermission(const rtc::IPAddress& ipaddr) const; TurnEntry* FindEntry(const rtc::SocketAddress& address) const; TurnEntry* FindEntry(int channel_id) const; - bool EntryExists(TurnEntry* e); - void DestroyEntry(TurnEntry* entry); - // Destroys the entry only if `timestamp` matches the destruction timestamp - // in `entry`. - void DestroyEntryIfNotCancelled(TurnEntry* entry, int64_t timestamp); // Marks the connection with remote address `address` failed and // pruned (a.k.a. write-timed-out). Returns true if a connection is found. @@ -340,7 +337,7 @@ class TurnPort : public Port { std::string hash_; // Digest of username:realm:password int next_channel_number_; - EntryList entries_; + std::vector> entries_; PortState state_; // By default the value will be set to 0. This value will be used in diff --git a/p2p/base/turn_port_unittest.cc b/p2p/base/turn_port_unittest.cc index 83cebd1538..976bc53a86 100644 --- a/p2p/base/turn_port_unittest.cc +++ b/p2p/base/turn_port_unittest.cc @@ -22,6 +22,7 @@ #include "api/units/time_delta.h" #include "p2p/base/basic_packet_socket_factory.h" #include "p2p/base/connection.h" +#include "p2p/base/mock_dns_resolving_packet_socket_factory.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/port_allocator.h" #include "p2p/base/stun_port.h" @@ -44,8 +45,16 @@ #include "test/gtest.h" #include "test/scoped_key_value_config.h" +namespace { using rtc::SocketAddress; +using ::testing::_; +using ::testing::DoAll; +using ::testing::InvokeArgument; +using ::testing::Return; +using ::testing::ReturnPointee; +using ::testing::SetArgPointee; + static const SocketAddress kLocalAddr1("11.11.11.11", 0); static const SocketAddress kLocalAddr2("22.22.22.22", 0); static const SocketAddress kLocalIPv6Addr("2401:fa00:4:1000:be30:5bff:fee5:c3", @@ -77,6 +86,7 @@ static const SocketAddress kTurnUdpIPv6IntAddr( "2400:4030:1:2c00:be30:abcd:efab:cdef", cricket::TURN_SERVER_PORT); static const SocketAddress kTurnInvalidAddr("www.google.invalid.", 3478); +static const SocketAddress kTurnValidAddr("www.google.valid.", 3478); static const char kCandidateFoundation[] = "foundation"; static const char kIceUfrag1[] = "TESTICEUFRAG0001"; @@ -95,6 +105,8 @@ static constexpr unsigned int kConnectionDestructionDelay = 1; // See: https://bugs.chromium.org/p/webrtc/issues/detail?id=5191 static constexpr unsigned int kResolverTimeout = 10000; +constexpr uint64_t kTiebreakerDefault = 44444; + static const cricket::ProtocolAddress kTurnUdpProtoAddr(kTurnUdpIntAddr, cricket::PROTO_UDP); static const cricket::ProtocolAddress kTurnTcpProtoAddr(kTurnTcpIntAddr, @@ -112,9 +124,12 @@ static const cricket::ProtocolAddress kTurnPort80ProtoAddr(kTurnPort80Addr, cricket::PROTO_TCP); static const cricket::ProtocolAddress kTurnPort443ProtoAddr(kTurnPort443Addr, cricket::PROTO_TCP); -static const cricket::ProtocolAddress kTurnPortHostnameProtoAddr( +static const cricket::ProtocolAddress kTurnPortInvalidHostnameProtoAddr( kTurnInvalidAddr, cricket::PROTO_UDP); +static const cricket::ProtocolAddress kTurnPortValidHostnameProtoAddr( + kTurnValidAddr, + cricket::PROTO_UDP); #if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID) static int GetFDCount() { @@ -131,6 +146,8 @@ static int GetFDCount() { } #endif +} // unnamed namespace + namespace cricket { class TurnPortTestVirtualSocketServer : public rtc::VirtualSocketServer { @@ -178,8 +195,8 @@ class TurnPortTest : public ::testing::Test, TurnPortTest() : ss_(new TurnPortTestVirtualSocketServer()), main_(ss_.get()), - socket_factory_(ss_.get()), - turn_server_(&main_, ss_.get(), kTurnUdpIntAddr, kTurnUdpExtAddr) { + turn_server_(&main_, ss_.get(), kTurnUdpIntAddr, kTurnUdpExtAddr), + socket_factory_(ss_.get()) { // Some code uses "last received time == 0" to represent "nothing received // so far", so we need to start the fake clock at a nonzero time... // TODO(deadbeef): Fix this. @@ -278,7 +295,7 @@ class TurnPortTest : public ::testing::Test, config.credentials = RelayCredentials(username, password); CreateRelayPortArgs args; args.network_thread = &main_; - args.socket_factory = &socket_factory_; + args.socket_factory = socket_factory(); args.network = network; args.username = kIceUfrag1; args.password = kIcePwd1; @@ -293,6 +310,7 @@ class TurnPortTest : public ::testing::Test, } // This TURN port will be the controlling. turn_port_->SetIceRole(ICEROLE_CONTROLLING); + turn_port_->SetIceTiebreaker(kTiebreakerDefault); ConnectSignals(); if (server_address.proto == cricket::PROTO_TLS) { @@ -311,7 +329,7 @@ class TurnPortTest : public ::testing::Test, RTC_CHECK(server_address.proto == PROTO_UDP); if (!socket_) { - socket_.reset(socket_factory_.CreateUdpSocket( + socket_.reset(socket_factory()->CreateUdpSocket( rtc::SocketAddress(kLocalAddr1.ipaddr(), 0), 0, 0)); ASSERT_TRUE(socket_ != NULL); socket_->SignalReadPacket.connect(this, @@ -322,7 +340,7 @@ class TurnPortTest : public ::testing::Test, config.credentials = RelayCredentials(username, password); CreateRelayPortArgs args; args.network_thread = &main_; - args.socket_factory = &socket_factory_; + args.socket_factory = socket_factory(); args.network = MakeNetwork(kLocalAddr1); args.username = kIceUfrag1; args.password = kIcePwd1; @@ -333,6 +351,7 @@ class TurnPortTest : public ::testing::Test, turn_port_ = TurnPort::Create(args, socket_.get()); // This TURN port will be the controlling. turn_port_->SetIceRole(ICEROLE_CONTROLLING); + turn_port_->SetIceTiebreaker(kTiebreakerDefault); ConnectSignals(); } @@ -352,11 +371,12 @@ class TurnPortTest : public ::testing::Test, void CreateUdpPort() { CreateUdpPort(kLocalAddr2); } void CreateUdpPort(const SocketAddress& address) { - udp_port_ = UDPPort::Create(&main_, &socket_factory_, MakeNetwork(address), + udp_port_ = UDPPort::Create(&main_, socket_factory(), MakeNetwork(address), 0, 0, kIceUfrag2, kIcePwd2, false, absl::nullopt, &field_trials_); // UDP port will be controlled. udp_port_->SetIceRole(ICEROLE_CONTROLLED); + udp_port_->SetIceTiebreaker(kTiebreakerDefault); udp_port_->SignalPortComplete.connect(this, &TurnPortTest::OnUdpPortComplete); } @@ -431,8 +451,19 @@ class TurnPortTest : public ::testing::Test, return true; } + void TestTurnAllocateSucceeds(unsigned int timeout) { + ASSERT_TRUE(turn_port_); + turn_port_->PrepareAddress(); + EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, timeout, fake_clock_); + ASSERT_EQ(1U, turn_port_->Candidates().size()); + EXPECT_EQ(kTurnUdpExtAddr.ipaddr(), + turn_port_->Candidates()[0].address().ipaddr()); + EXPECT_NE(0, turn_port_->Candidates()[0].address().port()); + } + void TestReconstructedServerUrl(ProtocolType protocol_type, absl::string_view expected_url) { + ASSERT_TRUE(turn_port_); turn_port_->PrepareAddress(); ASSERT_TRUE_SIMULATED_WAIT( turn_ready_, TimeToGetTurnCandidate(protocol_type), fake_clock_); @@ -652,6 +683,30 @@ class TurnPortTest : public ::testing::Test, // unknown address. turn_unknown_address_ = false; fake_clock_.AdvanceTime(webrtc::TimeDelta::Seconds(5 * 60)); + + // TODO(chromium:1395625): When `TurnPort` doesn't find connection objects + // for incoming packets, it forwards calls to the parent class, `Port`. This + // happens inside `TurnPort::DispatchPacket`. The `Port` implementation may + // need to send a binding error back over a connection which, unless the + // `TurnPort` implementation handles it, could result in a null deref. + // This special check tests if dispatching messages via `TurnPort` for which + // there's no connection, results in a no-op rather than crashing. + // See `TurnPort::SendBindingErrorResponse` for the check. + // This should probably be done in a neater way both from a testing pov and + // how incoming messages are handled in the `Port` class, when an assumption + // is made about connection objects existing and when those assumptions + // may not hold. + std::string pwd = conn1->remote_password_for_test(); + conn1->set_remote_password_for_test("bad"); + auto msg = conn1->BuildPingRequestForTest(); + + rtc::ByteBufferWriter buf; + msg->Write(&buf); + conn1->Send(buf.Data(), buf.Length(), options); + + // Now restore the password before continuing. + conn1->set_remote_password_for_test(pwd); + conn1->Ping(0); EXPECT_TRUE_SIMULATED_WAIT(turn_unknown_address_, kSimulatedRtt, fake_clock_); @@ -757,6 +812,10 @@ class TurnPortTest : public ::testing::Test, } protected: + virtual rtc::PacketSocketFactory* socket_factory() { + return &socket_factory_; + } + webrtc::test::ScopedKeyValueConfig field_trials_; rtc::ScopedFakeClock fake_clock_; // When a "create port" helper method is called with an IP, we create a @@ -765,7 +824,6 @@ class TurnPortTest : public ::testing::Test, std::list networks_; std::unique_ptr ss_; rtc::AutoSocketServerThread main_; - rtc::BasicPacketSocketFactory socket_factory_; std::unique_ptr socket_; TestTurnServer turn_server_; std::unique_ptr turn_port_; @@ -784,6 +842,9 @@ class TurnPortTest : public ::testing::Test, rtc::PacketOptions options; std::unique_ptr turn_customizer_; cricket::IceCandidateErrorEvent error_event_; + + private: + rtc::BasicPacketSocketFactory socket_factory_; }; TEST_F(TurnPortTest, TestTurnPortType) { @@ -820,7 +881,8 @@ TEST_F(TurnPortTest, TestReconstructedServerUrlForTls) { } TEST_F(TurnPortTest, TestReconstructedServerUrlForHostname) { - CreateTurnPort(kTurnUsername, kTurnPassword, kTurnPortHostnameProtoAddr); + CreateTurnPort(kTurnUsername, kTurnPassword, + kTurnPortInvalidHostnameProtoAddr); // This test follows the pattern from TestTurnTcpOnAddressResolveFailure. // As VSS doesn't provide DNS resolution, name resolve will fail, // the error will be set and contain the url. @@ -835,12 +897,7 @@ TEST_F(TurnPortTest, TestReconstructedServerUrlForHostname) { TEST_F(TurnPortTest, TestTurnAllocate) { CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr); EXPECT_EQ(0, turn_port_->SetOption(rtc::Socket::OPT_SNDBUF, 10 * 1024)); - turn_port_->PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, kSimulatedRtt * 2, fake_clock_); - ASSERT_EQ(1U, turn_port_->Candidates().size()); - EXPECT_EQ(kTurnUdpExtAddr.ipaddr(), - turn_port_->Candidates()[0].address().ipaddr()); - EXPECT_NE(0, turn_port_->Candidates()[0].address().port()); + TestTurnAllocateSucceeds(kSimulatedRtt * 2); } class TurnLoggingIdValidator : public StunMessageObserver { @@ -871,24 +928,14 @@ TEST_F(TurnPortTest, TestTurnAllocateWithLoggingId) { turn_port_->SetTurnLoggingId("KESO"); turn_server_.server()->SetStunMessageObserver( std::make_unique("KESO")); - turn_port_->PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, kSimulatedRtt * 2, fake_clock_); - ASSERT_EQ(1U, turn_port_->Candidates().size()); - EXPECT_EQ(kTurnUdpExtAddr.ipaddr(), - turn_port_->Candidates()[0].address().ipaddr()); - EXPECT_NE(0, turn_port_->Candidates()[0].address().port()); + TestTurnAllocateSucceeds(kSimulatedRtt * 2); } TEST_F(TurnPortTest, TestTurnAllocateWithoutLoggingId) { CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr); turn_server_.server()->SetStunMessageObserver( std::make_unique(nullptr)); - turn_port_->PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, kSimulatedRtt * 2, fake_clock_); - ASSERT_EQ(1U, turn_port_->Candidates().size()); - EXPECT_EQ(kTurnUdpExtAddr.ipaddr(), - turn_port_->Candidates()[0].address().ipaddr()); - EXPECT_NE(0, turn_port_->Candidates()[0].address().port()); + TestTurnAllocateSucceeds(kSimulatedRtt * 2); } // Test bad credentials. @@ -907,12 +954,7 @@ TEST_F(TurnPortTest, TestTurnTcpAllocate) { turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTcpProtoAddr); EXPECT_EQ(0, turn_port_->SetOption(rtc::Socket::OPT_SNDBUF, 10 * 1024)); - turn_port_->PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, kSimulatedRtt * 3, fake_clock_); - ASSERT_EQ(1U, turn_port_->Candidates().size()); - EXPECT_EQ(kTurnUdpExtAddr.ipaddr(), - turn_port_->Candidates()[0].address().ipaddr()); - EXPECT_NE(0, turn_port_->Candidates()[0].address().port()); + TestTurnAllocateSucceeds(kSimulatedRtt * 3); } // Test case for WebRTC issue 3927 where a proxy binds to the local host address @@ -927,12 +969,7 @@ TEST_F(TurnPortTest, TestTurnTcpAllocationWhenProxyChangesAddressToLocalHost) { turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); CreateTurnPort(kLocalAddr1, kTurnUsername, kTurnPassword, kTurnTcpProtoAddr); EXPECT_EQ(0, turn_port_->SetOption(rtc::Socket::OPT_SNDBUF, 10 * 1024)); - turn_port_->PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, kSimulatedRtt * 3, fake_clock_); - ASSERT_EQ(1U, turn_port_->Candidates().size()); - EXPECT_EQ(kTurnUdpExtAddr.ipaddr(), - turn_port_->Candidates()[0].address().ipaddr()); - EXPECT_NE(0, turn_port_->Candidates()[0].address().port()); + TestTurnAllocateSucceeds(kSimulatedRtt * 3); // Verify that the socket actually used localhost, otherwise this test isn't // doing what it meant to. @@ -1520,12 +1557,7 @@ TEST_F(TurnPortTest, TestTurnLocalIPv6AddressServerIPv6ExtenalIPv4) { turn_server_.AddInternalSocket(kTurnUdpIPv6IntAddr, PROTO_UDP); CreateTurnPort(kLocalIPv6Addr, kTurnUsername, kTurnPassword, kTurnUdpIPv6ProtoAddr); - turn_port_->PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, kSimulatedRtt * 2, fake_clock_); - ASSERT_EQ(1U, turn_port_->Candidates().size()); - EXPECT_EQ(kTurnUdpExtAddr.ipaddr(), - turn_port_->Candidates()[0].address().ipaddr()); - EXPECT_NE(0, turn_port_->Candidates()[0].address().port()); + TestTurnAllocateSucceeds(kSimulatedRtt * 2); } // Tests that the local and remote candidate address families should match when @@ -1856,4 +1888,139 @@ TEST_F(TurnPortTest, TestTurnDangerousServerAllowedWithFieldTrial) { ASSERT_TRUE(turn_port_); } +class TurnPortWithMockDnsResolverTest : public TurnPortTest { + public: + TurnPortWithMockDnsResolverTest() + : TurnPortTest(), socket_factory_(ss_.get()) {} + + rtc::PacketSocketFactory* socket_factory() override { + return &socket_factory_; + } + + void SetDnsResolverExpectations( + rtc::MockDnsResolvingPacketSocketFactory::Expectations expectations) { + socket_factory_.SetExpectations(expectations); + } + + private: + rtc::MockDnsResolvingPacketSocketFactory socket_factory_; +}; + +// Test an allocation from a TURN server specified by a hostname. +TEST_F(TurnPortWithMockDnsResolverTest, TestHostnameResolved) { + CreateTurnPort(kTurnUsername, kTurnPassword, kTurnPortValidHostnameProtoAddr); + SetDnsResolverExpectations( + [](webrtc::MockAsyncDnsResolver* resolver, + webrtc::MockAsyncDnsResolverResult* resolver_result) { + EXPECT_CALL(*resolver, Start(kTurnValidAddr, _)) + .WillOnce(InvokeArgument<1>()); + EXPECT_CALL(*resolver, result) + .WillRepeatedly(ReturnPointee(resolver_result)); + EXPECT_CALL(*resolver_result, GetError).WillRepeatedly(Return(0)); + EXPECT_CALL(*resolver_result, GetResolvedAddress(AF_INET, _)) + .WillOnce(DoAll(SetArgPointee<1>(kTurnUdpIntAddr), Return(true))); + }); + TestTurnAllocateSucceeds(kSimulatedRtt * 2); +} + +// Test an allocation from a TURN server specified by a hostname on an IPv6 +// network. +TEST_F(TurnPortWithMockDnsResolverTest, TestHostnameResolvedIPv6Network) { + turn_server_.AddInternalSocket(kTurnUdpIPv6IntAddr, PROTO_UDP); + CreateTurnPort(kLocalIPv6Addr, kTurnUsername, kTurnPassword, + kTurnPortValidHostnameProtoAddr); + SetDnsResolverExpectations( + [](webrtc::MockAsyncDnsResolver* resolver, + webrtc::MockAsyncDnsResolverResult* resolver_result) { + EXPECT_CALL(*resolver, Start(kTurnValidAddr, _)) + .WillOnce(InvokeArgument<1>()); + EXPECT_CALL(*resolver, result) + .WillRepeatedly(ReturnPointee(resolver_result)); + EXPECT_CALL(*resolver_result, GetError).WillRepeatedly(Return(0)); + EXPECT_CALL(*resolver_result, GetResolvedAddress(AF_INET6, _)) + .WillOnce( + DoAll(SetArgPointee<1>(kTurnUdpIPv6IntAddr), Return(true))); + }); + TestTurnAllocateSucceeds(kSimulatedRtt * 2); +} + +// Test an allocation from a TURN server specified by a hostname on an IPv6 +// network, without network family-specific resolution. +TEST_F(TurnPortWithMockDnsResolverTest, + TestHostnameResolvedIPv6NetworkFamilyFieldTrialDisabled) { + webrtc::test::ScopedKeyValueConfig override_field_trials( + field_trials_, "WebRTC-IPv6NetworkResolutionFixes/Disabled/"); + turn_server_.AddInternalSocket(kTurnUdpIPv6IntAddr, PROTO_UDP); + CreateTurnPort(kLocalIPv6Addr, kTurnUsername, kTurnPassword, + kTurnPortValidHostnameProtoAddr); + SetDnsResolverExpectations( + [](webrtc::MockAsyncDnsResolver* resolver, + webrtc::MockAsyncDnsResolverResult* resolver_result) { + // Expect to call Resolver::Start without family arg. + EXPECT_CALL(*resolver, Start(kTurnValidAddr, _)) + .WillOnce(InvokeArgument<1>()); + EXPECT_CALL(*resolver, result) + .WillRepeatedly(ReturnPointee(resolver_result)); + EXPECT_CALL(*resolver_result, GetError).WillRepeatedly(Return(0)); + EXPECT_CALL(*resolver_result, GetResolvedAddress(AF_INET6, _)) + .WillOnce( + DoAll(SetArgPointee<1>(kTurnUdpIPv6IntAddr), Return(true))); + }); + TestTurnAllocateSucceeds(kSimulatedRtt * 2); +} + +// Test an allocation from a TURN server specified by a hostname on an IPv6 +// network, without network family-specific resolution. +TEST_F(TurnPortWithMockDnsResolverTest, + TestHostnameResolvedIPv6NetworkFamilyFieldTrialParamDisabled) { + webrtc::test::ScopedKeyValueConfig override_field_trials( + field_trials_, + "WebRTC-IPv6NetworkResolutionFixes/" + "Enabled,ResolveTurnHostnameForFamily:false/"); + turn_server_.AddInternalSocket(kTurnUdpIPv6IntAddr, PROTO_UDP); + CreateTurnPort(kLocalIPv6Addr, kTurnUsername, kTurnPassword, + kTurnPortValidHostnameProtoAddr); + SetDnsResolverExpectations( + [](webrtc::MockAsyncDnsResolver* resolver, + webrtc::MockAsyncDnsResolverResult* resolver_result) { + // Expect to call Resolver::Start without family arg. + EXPECT_CALL(*resolver, Start(kTurnValidAddr, _)) + .WillOnce(InvokeArgument<1>()); + EXPECT_CALL(*resolver, result) + .WillRepeatedly(ReturnPointee(resolver_result)); + EXPECT_CALL(*resolver_result, GetError).WillRepeatedly(Return(0)); + EXPECT_CALL(*resolver_result, GetResolvedAddress(AF_INET6, _)) + .WillOnce( + DoAll(SetArgPointee<1>(kTurnUdpIPv6IntAddr), Return(true))); + }); + TestTurnAllocateSucceeds(kSimulatedRtt * 2); +} + +// Test an allocation from a TURN server specified by a hostname on an IPv6 +// network, with network family-specific resolution. +TEST_F(TurnPortWithMockDnsResolverTest, + TestHostnameResolvedIPv6NetworkFieldTrialEnabled) { + webrtc::test::ScopedKeyValueConfig override_field_trials( + field_trials_, + "WebRTC-IPv6NetworkResolutionFixes/" + "Enabled,ResolveTurnHostnameForFamily:true/"); + turn_server_.AddInternalSocket(kTurnUdpIPv6IntAddr, PROTO_UDP); + CreateTurnPort(kLocalIPv6Addr, kTurnUsername, kTurnPassword, + kTurnPortValidHostnameProtoAddr); + SetDnsResolverExpectations( + [](webrtc::MockAsyncDnsResolver* resolver, + webrtc::MockAsyncDnsResolverResult* resolver_result) { + // Expect to call Resolver::Start _with_ family arg. + EXPECT_CALL(*resolver, Start(kTurnValidAddr, /*family=*/AF_INET6, _)) + .WillOnce(InvokeArgument<2>()); + EXPECT_CALL(*resolver, result) + .WillRepeatedly(ReturnPointee(resolver_result)); + EXPECT_CALL(*resolver_result, GetError).WillRepeatedly(Return(0)); + EXPECT_CALL(*resolver_result, GetResolvedAddress(AF_INET6, _)) + .WillOnce( + DoAll(SetArgPointee<1>(kTurnUdpIPv6IntAddr), Return(true))); + }); + TestTurnAllocateSucceeds(kSimulatedRtt * 2); +} + } // namespace cricket diff --git a/p2p/base/wrapping_active_ice_controller_unittest.cc b/p2p/base/wrapping_active_ice_controller_unittest.cc index 7dfdfef0f5..b4811bd297 100644 --- a/p2p/base/wrapping_active_ice_controller_unittest.cc +++ b/p2p/base/wrapping_active_ice_controller_unittest.cc @@ -37,6 +37,8 @@ using ::cricket::NominationMode; using ::cricket::WrappingActiveIceController; using ::testing::_; +using ::testing::ElementsAreArray; +using ::testing::IsEmpty; using ::testing::NiceMock; using ::testing::Ref; using ::testing::Return; @@ -132,7 +134,8 @@ TEST(WrappingActiveIceControllerTest, HandlesImmediateSwitchRequest) { .WillOnce(Return(switch_result)); EXPECT_CALL(agent, SwitchSelectedConnection(kConnection, reason)) .InSequence(check_then_switch); - EXPECT_CALL(agent, ForgetLearnedStateForConnections(conns_to_forget)); + EXPECT_CALL(agent, ForgetLearnedStateForConnections( + ElementsAreArray(conns_to_forget))); EXPECT_TRUE(controller.OnImmediateSwitchRequest(reason, kConnection)); @@ -146,7 +149,7 @@ TEST(WrappingActiveIceControllerTest, HandlesImmediateSwitchRequest) { SortAndSwitchConnection(IceSwitchReason::ICE_CONTROLLER_RECHECK)) .InSequence(recheck_sort) .WillOnce(Return(IceControllerInterface::SwitchResult{})); - EXPECT_CALL(agent, ForgetLearnedStateForConnections(kEmptyConnsList)); + EXPECT_CALL(agent, ForgetLearnedStateForConnections(IsEmpty())); clock.AdvanceTime(kTick); } @@ -180,7 +183,7 @@ TEST(WrappingActiveIceControllerTest, HandlesImmediateSortAndSwitchRequest) { EXPECT_CALL(*wrapped, PruneConnections()) .InSequence(sort_and_switch) .WillOnce(Return(conns_to_prune)); - EXPECT_CALL(agent, PruneConnections(conns_to_prune)) + EXPECT_CALL(agent, PruneConnections(ElementsAreArray(conns_to_prune))) .InSequence(sort_and_switch); controller.OnImmediateSortAndSwitchRequest(reason); @@ -198,8 +201,7 @@ TEST(WrappingActiveIceControllerTest, HandlesImmediateSortAndSwitchRequest) { EXPECT_CALL(*wrapped, PruneConnections()) .InSequence(recheck_sort) .WillOnce(Return(kEmptyConnsList)); - EXPECT_CALL(agent, PruneConnections(kEmptyConnsList)) - .InSequence(recheck_sort); + EXPECT_CALL(agent, PruneConnections(IsEmpty())).InSequence(recheck_sort); clock.AdvanceTime(kTick); } diff --git a/p2p/client/basic_port_allocator.cc b/p2p/client/basic_port_allocator.cc index aa6d36ce72..ecce84ebbc 100644 --- a/p2p/client/basic_port_allocator.cc +++ b/p2p/client/basic_port_allocator.cc @@ -354,7 +354,8 @@ BasicPortAllocator::CreateIceGatherer(const std::string& name) { } -void BasicPortAllocator::AddTurnServer(const RelayServerConfig& turn_server) { +void BasicPortAllocator::AddTurnServerForTesting( + const RelayServerConfig& turn_server) { CheckRunOnValidThreadAndInitialized(); std::vector new_turn_servers = turn_servers(); new_turn_servers.push_back(turn_server); @@ -1703,12 +1704,17 @@ void AllocationSequence::CreateRelayPorts() { return; } + // Relative priority of candidates from this TURN server in relation + // to the candidates from other servers. Required because ICE priorities + // need to be unique. + int relative_priority = config_->relays.size(); for (RelayServerConfig& relay : config_->relays) { - CreateTurnPort(relay); + CreateTurnPort(relay, relative_priority--); } } -void AllocationSequence::CreateTurnPort(const RelayServerConfig& config) { +void AllocationSequence::CreateTurnPort(const RelayServerConfig& config, + int relative_priority) { PortList::const_iterator relay_port; for (relay_port = config.ports.begin(); relay_port != config.ports.end(); ++relay_port) { @@ -1741,6 +1747,7 @@ void AllocationSequence::CreateTurnPort(const RelayServerConfig& config) { args.config = &config; args.turn_customizer = session_->allocator()->turn_customizer(); args.field_trials = session_->allocator()->field_trials(); + args.relative_priority = relative_priority; std::unique_ptr port; // Shared socket mode must be enabled only for UDP based ports. Hence diff --git a/p2p/client/basic_port_allocator.h b/p2p/client/basic_port_allocator.h index bbfc583c83..453d1fca43 100644 --- a/p2p/client/basic_port_allocator.h +++ b/p2p/client/basic_port_allocator.h @@ -83,7 +83,7 @@ class RTC_EXPORT BasicPortAllocator : public PortAllocator { const std::string& content_name) override; // Convenience method that adds a TURN server to the configuration. - void AddTurnServer(const RelayServerConfig& turn_server); + void AddTurnServerForTesting(const RelayServerConfig& turn_server); RelayPortFactoryInterface* relay_port_factory() { CheckRunOnValidThreadIfInitialized(); @@ -390,11 +390,9 @@ class AllocationSequence : public sigslot::has_slots<> { void Start(); void Stop(); - protected: - // For testing. - void CreateTurnPort(const RelayServerConfig& config); - private: + void CreateTurnPort(const RelayServerConfig& config, int relative_priority); + typedef std::vector ProtocolList; void Process(int epoch); diff --git a/p2p/client/basic_port_allocator_unittest.cc b/p2p/client/basic_port_allocator_unittest.cc index ac0258de84..03d5524066 100644 --- a/p2p/client/basic_port_allocator_unittest.cc +++ b/p2p/client/basic_port_allocator_unittest.cc @@ -112,6 +112,8 @@ static const char kTurnPassword[] = "test"; // Add some margin of error for slow bots. static const int kStunTimeoutMs = cricket::STUN_TOTAL_TIMEOUT; +constexpr uint64_t kTiebreakerDefault = 44444; + namespace { void CheckStunKeepaliveIntervalOfAllReadyPorts( @@ -175,6 +177,7 @@ class BasicPortAllocatorTestBase : public ::testing::Test, stun_servers, &field_trials_); allocator_->Initialize(); allocator_->set_step_delay(kMinimumStepDelay); + allocator_->SetIceTiebreaker(kTiebreakerDefault); webrtc::metrics::Reset(); } @@ -213,6 +216,7 @@ class BasicPortAllocatorTestBase : public ::testing::Test, &network_manager_, std::make_unique(fss_.get()))); allocator_->Initialize(); + allocator_->SetIceTiebreaker(kTiebreakerDefault); allocator_->set_step_delay(kMinimumStepDelay); } // Endpoint is behind a NAT, with STUN specified. @@ -248,7 +252,7 @@ class BasicPortAllocatorTestBase : public ::testing::Test, void AddTurnServers(const rtc::SocketAddress& udp_turn, const rtc::SocketAddress& tcp_turn) { RelayServerConfig turn_server = CreateTurnServers(udp_turn, tcp_turn); - allocator_->AddTurnServer(turn_server); + allocator_->AddTurnServerForTesting(turn_server); } bool CreateSession(int component) { @@ -297,6 +301,7 @@ class BasicPortAllocatorTestBase : public ::testing::Test, this, &BasicPortAllocatorTestBase::OnCandidatesRemoved); session->SignalCandidatesAllocationDone.connect( this, &BasicPortAllocatorTestBase::OnCandidatesAllocationDone); + session->set_ice_tiebreaker(kTiebreakerDefault); return session; } @@ -1814,7 +1819,7 @@ TEST_F(BasicPortAllocatorTestWithRealClock, turn_server.credentials = credentials; turn_server.ports.push_back( ProtocolAddress(rtc::SocketAddress("localhost", 3478), PROTO_UDP)); - allocator_->AddTurnServer(turn_server); + allocator_->AddTurnServerForTesting(turn_server); allocator_->set_step_delay(kMinimumStepDelay); allocator_->set_flags(allocator().flags() | @@ -2525,6 +2530,29 @@ TEST_F(BasicPortAllocatorTest, TestCreateIceGathererForForking) { EXPECT_EQ(allocator_->stun_candidate_keepalive_interval(), forked->stun_candidate_keepalive_interval()); } +// Test that candidates from different servers get assigned a unique local +// preference (the middle 16 bits of the priority) +TEST_F(BasicPortAllocatorTest, AssignsUniqueLocalPreferencetoRelayCandidates) { + allocator_->SetCandidateFilter(CF_RELAY); + allocator_->AddTurnServerForTesting( + CreateTurnServers(kTurnUdpIntAddr, SocketAddress())); + allocator_->AddTurnServerForTesting( + CreateTurnServers(kTurnUdpIntAddr, SocketAddress())); + allocator_->AddTurnServerForTesting( + CreateTurnServers(kTurnUdpIntAddr, SocketAddress())); + + AddInterface(kClientAddr); + ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); + session_->StartGettingPorts(); + ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, + kDefaultAllocationTimeout, fake_clock); + EXPECT_EQ(3u, candidates_.size()); + EXPECT_GT((candidates_[0].priority() >> 8) & 0xFFFF, + (candidates_[1].priority() >> 8) & 0xFFFF); + EXPECT_GT((candidates_[1].priority() >> 8) & 0xFFFF, + (candidates_[2].priority() >> 8) & 0xFFFF); +} + // Test that no more than allocator.max_ipv6_networks() IPv6 networks are used // to gather candidates. TEST_F(BasicPortAllocatorTest, TwoIPv6AreSelectedBecauseOfMaxIpv6Limit) { diff --git a/p2p/client/relay_port_factory_interface.h b/p2p/client/relay_port_factory_interface.h index 4eec5dbf28..edfca3697b 100644 --- a/p2p/client/relay_port_factory_interface.h +++ b/p2p/client/relay_port_factory_interface.h @@ -45,6 +45,10 @@ struct CreateRelayPortArgs { std::string password; webrtc::TurnCustomizer* turn_customizer = nullptr; const webrtc::FieldTrialsView* field_trials = nullptr; + // Relative priority of candidates from this TURN server in relation + // to the candidates from other servers. Required because ICE priorities + // need to be unique. + int relative_priority = 0; }; // A factory for creating RelayPort's. diff --git a/pc/BUILD.gn b/pc/BUILD.gn index 26a044b615..15c87dda25 100644 --- a/pc/BUILD.gn +++ b/pc/BUILD.gn @@ -415,6 +415,7 @@ rtc_source_set("rtp_sender_proxy") { deps = [ ":proxy", "../api:libjingle_peerconnection_api", + "../api:rtp_sender_interface", ] } @@ -1007,6 +1008,7 @@ rtc_source_set("rtc_stats_collector") { "../api/task_queue:task_queue", "../api/units:time_delta", "../api/video:video_rtp_headers", + "../api/video_codecs:scalability_mode", "../call:call_interfaces", "../common_video:common_video", "../media:rtc_media_base", @@ -1091,6 +1093,7 @@ rtc_source_set("sdp_offer_answer") { "../api:media_stream_interface", "../api:rtc_error", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api:sequence_checker", @@ -1181,6 +1184,7 @@ rtc_source_set("peer_connection") { "../api:rtc_error", "../api:rtc_stats_api", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api:sequence_checker", @@ -1276,6 +1280,7 @@ rtc_source_set("legacy_stats_collector") { "../api:libjingle_peerconnection_api", "../api:media_stream_interface", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:scoped_refptr", "../api:sequence_checker", "../api/audio_codecs:audio_codecs_api", @@ -1544,6 +1549,7 @@ rtc_library("rtp_transceiver") { "../api:libjingle_peerconnection_api", "../api:rtc_error", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api:sequence_checker", @@ -1588,6 +1594,7 @@ rtc_library("rtp_transmission_manager") { "../api:media_stream_interface", "../api:rtc_error", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api:sequence_checker", @@ -1618,6 +1625,7 @@ rtc_library("transceiver_list") { "../api:libjingle_peerconnection_api", "../api:rtc_error", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:scoped_refptr", "../api:sequence_checker", "../rtc_base:checks", @@ -1853,12 +1861,14 @@ rtc_library("rtp_sender") { ":legacy_stats_collector_interface", "../api:audio_options_api", "../api:dtls_transport_interface", + "../api:dtmf_sender_interface", "../api:frame_transformer_interface", "../api:libjingle_peerconnection_api", "../api:media_stream_interface", "../api:priority", "../api:rtc_error", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:scoped_refptr", "../api:sequence_checker", "../api/crypto:frame_encryptor_interface", @@ -1912,6 +1922,7 @@ rtc_library("dtmf_sender") { ] deps = [ ":proxy", + "../api:dtmf_sender_interface", "../api:libjingle_peerconnection_api", "../api:scoped_refptr", "../api:sequence_checker", @@ -2179,6 +2190,7 @@ if (rtc_include_tests && !build_with_chromium) { "../api:rtc_error", "../api:rtc_stats_api", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:scoped_refptr", "../rtc_base:checks", "../rtc_base:gunit_helpers", @@ -2193,6 +2205,7 @@ if (rtc_include_tests && !build_with_chromium) { deps = [ ":integration_test_helpers", ":pc_test_utils", + "../api:dtmf_sender_interface", "../api:libjingle_peerconnection_api", "../api:scoped_refptr", "../api/units:time_delta", @@ -2241,6 +2254,7 @@ if (rtc_include_tests && !build_with_chromium) { "peer_connection_rtp_unittest.cc", "peer_connection_signaling_unittest.cc", "peer_connection_simulcast_unittest.cc", + "peer_connection_svc_integrationtest.cc", "peer_connection_wrapper.cc", "peer_connection_wrapper.h", "proxy_unittest.cc", @@ -2313,6 +2327,7 @@ if (rtc_include_tests && !build_with_chromium) { "../api:candidate", "../api:create_peerconnection_factory", "../api:dtls_transport_interface", + "../api:dtmf_sender_interface", "../api:fake_frame_decryptor", "../api:fake_frame_encryptor", "../api:field_trials_view", @@ -2328,6 +2343,7 @@ if (rtc_include_tests && !build_with_chromium) { "../api:packet_socket_factory", "../api:priority", "../api:rtc_error", + "../api:rtp_sender_interface", "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api/adaptation:resource_adaptation_api", @@ -2352,6 +2368,7 @@ if (rtc_include_tests && !build_with_chromium) { "../api/video:video_codec_constants", "../api/video:video_frame", "../api/video:video_rtp_headers", + "../api/video_codecs:scalability_mode", "../call/adaptation:resource_adaptation_test_utilities", "../common_video", "../logging:fake_rtc_event_log", @@ -2522,6 +2539,7 @@ if (rtc_include_tests && !build_with_chromium) { "../api:rtc_error", "../api:rtc_stats_api", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api/audio:audio_mixer_api", @@ -2710,9 +2728,13 @@ if (rtc_include_tests && !build_with_chromium) { "../api:media_stream_interface", "../api:network_emulation_manager_api", "../api:peer_connection_quality_test_fixture_api", + "../api:rtc_stats_api", "../api:simulated_network_api", "../api:time_controller", "../api/test/metrics:global_metrics_logger_and_exporter", + "../api/test/pclf:media_configuration", + "../api/test/pclf:media_quality_test_params", + "../api/test/pclf:peer_configurer", "../api/video_codecs:video_codecs_api", "../call:simulated_network", "../modules/video_coding:webrtc_vp9", @@ -2723,8 +2745,8 @@ if (rtc_include_tests && !build_with_chromium) { "../test:fileutils", "../test:test_main", "../test:test_support", - "../test/pc/e2e:default_video_quality_analyzer", "../test/pc/e2e:network_quality_metrics_reporter", + "../test/pc/e2e/analyzer/video:default_video_quality_analyzer", ] if (is_ios) { diff --git a/pc/audio_rtp_receiver.cc b/pc/audio_rtp_receiver.cc index 0dbdf0b713..7af460b80e 100644 --- a/pc/audio_rtp_receiver.cc +++ b/pc/audio_rtp_receiver.cc @@ -28,7 +28,7 @@ AudioRtpReceiver::AudioRtpReceiver( std::string receiver_id, std::vector stream_ids, bool is_unified_plan, - cricket::VoiceMediaChannel* voice_channel /*= nullptr*/) + cricket::VoiceMediaReceiveChannelInterface* voice_channel /*= nullptr*/) : AudioRtpReceiver(worker_thread, receiver_id, CreateStreamsFromIds(std::move(stream_ids)), @@ -40,7 +40,7 @@ AudioRtpReceiver::AudioRtpReceiver( const std::string& receiver_id, const std::vector>& streams, bool is_unified_plan, - cricket::VoiceMediaChannel* voice_channel /*= nullptr*/) + cricket::VoiceMediaReceiveChannelInterface* voice_channel /*= nullptr*/) : worker_thread_(worker_thread), id_(receiver_id), source_(rtc::make_ref_counted( @@ -314,7 +314,8 @@ void AudioRtpReceiver::SetJitterBufferMinimumDelay( media_channel_->SetBaseMinimumPlayoutDelayMs(*ssrc_, delay_.GetMs()); } -void AudioRtpReceiver::SetMediaChannel(cricket::MediaChannel* media_channel) { +void AudioRtpReceiver::SetMediaChannel( + cricket::MediaReceiveChannelInterface* media_channel) { RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(media_channel == nullptr || media_channel->media_type() == media_type()); @@ -323,7 +324,8 @@ void AudioRtpReceiver::SetMediaChannel(cricket::MediaChannel* media_channel) { media_channel ? worker_thread_safety_->SetAlive() : worker_thread_safety_->SetNotAlive(); - media_channel_ = static_cast(media_channel); + media_channel_ = + static_cast(media_channel); } void AudioRtpReceiver::NotifyFirstPacketReceived() { diff --git a/pc/audio_rtp_receiver.h b/pc/audio_rtp_receiver.h index c68315882c..2e0f77c85c 100644 --- a/pc/audio_rtp_receiver.h +++ b/pc/audio_rtp_receiver.h @@ -50,18 +50,19 @@ class AudioRtpReceiver : public ObserverInterface, // However, when using that, the assumption is that right after construction, // a call to either `SetupUnsignaledMediaChannel` or `SetupMediaChannel` // will be made, which will internally start the source on the worker thread. - AudioRtpReceiver(rtc::Thread* worker_thread, - std::string receiver_id, - std::vector stream_ids, - bool is_unified_plan, - cricket::VoiceMediaChannel* voice_channel = nullptr); + AudioRtpReceiver( + rtc::Thread* worker_thread, + std::string receiver_id, + std::vector stream_ids, + bool is_unified_plan, + cricket::VoiceMediaReceiveChannelInterface* voice_channel = nullptr); // TODO(https://crbug.com/webrtc/9480): Remove this when streams() is removed. AudioRtpReceiver( rtc::Thread* worker_thread, const std::string& receiver_id, const std::vector>& streams, bool is_unified_plan, - cricket::VoiceMediaChannel* media_channel = nullptr); + cricket::VoiceMediaReceiveChannelInterface* media_channel = nullptr); virtual ~AudioRtpReceiver(); // ObserverInterface implementation @@ -111,7 +112,8 @@ class AudioRtpReceiver : public ObserverInterface, void SetJitterBufferMinimumDelay( absl::optional delay_seconds) override; - void SetMediaChannel(cricket::MediaChannel* media_channel) override; + void SetMediaChannel( + cricket::MediaReceiveChannelInterface* media_channel) override; std::vector GetSources() const override; int AttachmentId() const override { return attachment_id_; } @@ -134,8 +136,8 @@ class AudioRtpReceiver : public ObserverInterface, const std::string id_; const rtc::scoped_refptr source_; const rtc::scoped_refptr> track_; - cricket::VoiceMediaChannel* media_channel_ RTC_GUARDED_BY(worker_thread_) = - nullptr; + cricket::VoiceMediaReceiveChannelInterface* media_channel_ + RTC_GUARDED_BY(worker_thread_) = nullptr; absl::optional ssrc_ RTC_GUARDED_BY(worker_thread_); std::vector> streams_ RTC_GUARDED_BY(&signaling_thread_checker_); diff --git a/pc/audio_rtp_receiver_unittest.cc b/pc/audio_rtp_receiver_unittest.cc index bab6b74f9f..eb77212b2a 100644 --- a/pc/audio_rtp_receiver_unittest.cc +++ b/pc/audio_rtp_receiver_unittest.cc @@ -66,7 +66,7 @@ TEST_F(AudioRtpReceiverTest, SetOutputVolumeIsCalled) { receiver_->track(); receiver_->track()->set_enabled(true); - receiver_->SetMediaChannel(&media_channel_); + receiver_->SetMediaChannel(media_channel_.AsVoiceReceiveChannel()); EXPECT_CALL(media_channel_, SetDefaultRawAudioSink(_)).Times(0); receiver_->SetupMediaChannel(kSsrc); @@ -86,7 +86,7 @@ TEST_F(AudioRtpReceiverTest, VolumesSetBeforeStartingAreRespected) { receiver_->OnSetVolume(kVolume); receiver_->track()->set_enabled(true); - receiver_->SetMediaChannel(&media_channel_); + receiver_->SetMediaChannel(media_channel_.AsVoiceReceiveChannel()); // The previosly set initial volume should be propagated to the provided // media_channel_ as soon as SetupMediaChannel is called. diff --git a/pc/channel.cc b/pc/channel.cc index 0bf7b5b120..6d63fa1ca7 100644 --- a/pc/channel.cc +++ b/pc/channel.cc @@ -154,7 +154,7 @@ std::string BaseChannel::ToString() const { bool BaseChannel::ConnectToRtpTransport_n() { RTC_DCHECK(rtp_transport_); - RTC_DCHECK(media_channel()); + RTC_DCHECK(media_send_channel()); // We don't need to call OnDemuxerCriteriaUpdatePending/Complete because // there's no previous criteria to worry about. @@ -174,7 +174,7 @@ bool BaseChannel::ConnectToRtpTransport_n() { void BaseChannel::DisconnectFromRtpTransport_n() { RTC_DCHECK(rtp_transport_); - RTC_DCHECK(media_channel()); + RTC_DCHECK(media_send_channel()); rtp_transport_->UnregisterRtpDemuxerSink(this); rtp_transport_->SignalReadyToSend.disconnect(this); rtp_transport_->SignalNetworkRouteChanged.disconnect(this); @@ -458,7 +458,7 @@ bool BaseChannel::MaybeUpdateDemuxerAndRtpExtensions_w( // TODO(bugs.webrtc.org/13536): See if we can do this asynchronously. if (update_demuxer) - media_channel()->OnDemuxerCriteriaUpdatePending(); + media_receive_channel()->OnDemuxerCriteriaUpdatePending(); bool success = network_thread()->BlockingCall([&]() mutable { RTC_DCHECK_RUN_ON(network_thread()); @@ -481,7 +481,7 @@ bool BaseChannel::MaybeUpdateDemuxerAndRtpExtensions_w( }); if (update_demuxer) - media_channel()->OnDemuxerCriteriaUpdateComplete(); + media_receive_channel()->OnDemuxerCriteriaUpdateComplete(); return success; } @@ -584,7 +584,7 @@ bool BaseChannel::SetPayloadTypeDemuxingEnabled_w(bool enabled) { // were matched to this channel by MID or RID. Ideally we'd remove only the // streams that were matched based on payload type alone, but currently // there is no straightforward way to identify those streams. - media_channel()->ResetUnsignaledRecvStream(); + media_receive_channel()->ResetUnsignaledRecvStream(); if (!demuxer_criteria_.payload_types().empty()) { config_changed = true; demuxer_criteria_.payload_types().clear(); @@ -629,7 +629,7 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, GetStream(streams, StreamFinder(&old_stream))) { continue; } - if (!media_channel()->RemoveSendStream(old_stream.first_ssrc())) { + if (!media_send_channel()->RemoveSendStream(old_stream.first_ssrc())) { error_desc = StringFormat( "Failed to remove send stream with ssrc %u from m-section with " "mid='%s'.", @@ -672,7 +672,7 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, /* flex_fec = */ false, ssrc_generator_); } - if (media_channel()->AddSendStream(new_stream)) { + if (media_send_channel()->AddSendStream(new_stream)) { RTC_LOG(LS_INFO) << "Add send stream ssrc: " << new_stream.ssrcs[0] << " into " << ToString(); } else { @@ -709,12 +709,12 @@ bool BaseChannel::UpdateRemoteStreams_w(const MediaContentDescription* content, // If we no longer have an unsignaled stream, we would like to remove // the unsignaled stream params that are cached. if (!old_stream.has_ssrcs() && !new_has_unsignaled_ssrcs) { - media_channel()->ResetUnsignaledRecvStream(); + media_receive_channel()->ResetUnsignaledRecvStream(); RTC_LOG(LS_INFO) << "Reset unsignaled remote stream for " << ToString() << "."; } else if (old_stream.has_ssrcs() && !GetStreamBySsrc(streams, old_stream.first_ssrc())) { - if (media_channel()->RemoveRecvStream(old_stream.first_ssrc())) { + if (media_receive_channel()->RemoveRecvStream(old_stream.first_ssrc())) { RTC_LOG(LS_INFO) << "Remove remote ssrc: " << old_stream.first_ssrc() << " from " << ToString() << "."; } else { @@ -735,7 +735,7 @@ bool BaseChannel::UpdateRemoteStreams_w(const MediaContentDescription* content, // stream received later. if ((!new_stream.has_ssrcs() && !old_has_unsignaled_ssrcs) || !GetStreamBySsrc(remote_streams_, new_stream.first_ssrc())) { - if (media_channel()->AddRecvStream(new_stream)) { + if (media_receive_channel()->AddRecvStream(new_stream)) { RTC_LOG(LS_INFO) << "Add remote ssrc: " << (new_stream.has_ssrcs() ? std::to_string(new_stream.first_ssrc()) @@ -808,7 +808,7 @@ bool BaseChannel::ClearHandledPayloadTypes() { void BaseChannel::SignalSentPacket_n(const rtc::SentPacket& sent_packet) { RTC_DCHECK_RUN_ON(network_thread()); RTC_DCHECK(network_initialized()); - media_channel()->OnPacketSent(sent_packet); + media_send_channel()->OnPacketSent(sent_packet); } VoiceChannel::VoiceChannel(rtc::Thread* worker_thread, @@ -839,12 +839,12 @@ void VoiceChannel::UpdateMediaSendRecvState_w() { // content. We receive data on the default channel and multiplexed streams. bool ready_to_receive = enabled() && webrtc::RtpTransceiverDirectionHasRecv( local_content_direction()); - media_channel()->SetPlayout(ready_to_receive); + media_receive_channel()->SetPlayout(ready_to_receive); // Send outgoing data if we're the active call, we have the remote content, // and we have had some form of connectivity. bool send = IsReadyToSendMedia_w(); - media_channel()->SetSend(send); + media_send_channel()->SetSend(send); RTC_LOG(LS_INFO) << "Changing voice state, recv=" << ready_to_receive << " send=" << send << " for " << ToString(); @@ -861,7 +861,7 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content, RtpHeaderExtensions header_extensions = GetDeduplicatedRtpHeaderExtensions(content->rtp_header_extensions()); bool update_header_extensions = true; - media_channel()->SetExtmapAllowMixed(content->extmap_allow_mixed()); + media_send_channel()->SetExtmapAllowMixed(content->extmap_allow_mixed()); AudioRecvParameters recv_params = last_recv_params_; RtpParametersFromMediaDescription( @@ -869,7 +869,7 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content, webrtc::RtpTransceiverDirectionHasRecv(content->direction()), &recv_params); - if (!media_channel()->SetRecvParameters(recv_params)) { + if (!media_receive_channel()->SetRecvParameters(recv_params)) { error_desc = StringFormat( "Failed to set local audio description recv parameters for m-section " "with mid='%s'.", @@ -921,7 +921,8 @@ bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content, extensions_filter(), &send_params); send_params.mid = mid(); - bool parameters_applied = media_channel()->SetSendParameters(send_params); + bool parameters_applied = + media_send_channel()->SetSendParameters(send_params); if (!parameters_applied) { error_desc = StringFormat( "Failed to set remote audio description send parameters for m-section " @@ -979,7 +980,7 @@ void VideoChannel::UpdateMediaSendRecvState_w() { // Send outgoing data if we're the active call, we have the remote content, // and we have had some form of connectivity. bool send = IsReadyToSendMedia_w(); - media_channel()->SetSend(send); + media_send_channel()->SetSend(send); RTC_LOG(LS_INFO) << "Changing video state, send=" << send << " for " << ToString(); } @@ -995,7 +996,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, RtpHeaderExtensions header_extensions = GetDeduplicatedRtpHeaderExtensions(content->rtp_header_extensions()); bool update_header_extensions = true; - media_channel()->SetExtmapAllowMixed(content->extmap_allow_mixed()); + media_send_channel()->SetExtmapAllowMixed(content->extmap_allow_mixed()); VideoRecvParameters recv_params = last_recv_params_; @@ -1025,7 +1026,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, } } - if (!media_channel()->SetRecvParameters(recv_params)) { + if (!media_receive_channel()->SetRecvParameters(recv_params)) { error_desc = StringFormat( "Failed to set local video description recv parameters for m-section " "with mid='%s'.", @@ -1044,7 +1045,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, last_recv_params_ = recv_params; if (needs_send_params_update) { - if (!media_channel()->SetSendParameters(send_params)) { + if (!media_send_channel()->SetSendParameters(send_params)) { error_desc = StringFormat( "Failed to set send parameters for m-section with mid='%s'.", mid().c_str()); @@ -1110,7 +1111,7 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, } } - if (!media_channel()->SetSendParameters(send_params)) { + if (!media_send_channel()->SetSendParameters(send_params)) { error_desc = StringFormat( "Failed to set remote video description send parameters for m-section " "with mid='%s'.", @@ -1120,7 +1121,7 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, last_send_params_ = send_params; if (needs_recv_params_update) { - if (!media_channel()->SetRecvParameters(recv_params)) { + if (!media_receive_channel()->SetRecvParameters(recv_params)) { error_desc = StringFormat( "Failed to set recv parameters for m-section with mid='%s'.", mid().c_str()); diff --git a/pc/channel.h b/pc/channel.h index 985ac22827..38a2d3a9ff 100644 --- a/pc/channel.h +++ b/pc/channel.h @@ -32,6 +32,7 @@ #include "call/rtp_demuxer.h" #include "call/rtp_packet_sink_interface.h" #include "media/base/media_channel.h" +#include "media/base/media_channel_impl.h" #include "media/base/stream_params.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "pc/channel_interface.h" @@ -70,7 +71,7 @@ class BaseChannel : public ChannelInterface, public sigslot::has_slots<>, // TODO(tommi): Consider implementing these interfaces // via composition. - public MediaChannel::NetworkInterface, + public MediaChannelNetworkInterface, public webrtc::RtpPacketSinkInterface { public: // If `srtp_required` is true, the channel will not send or receive any @@ -155,14 +156,29 @@ class BaseChannel : public ChannelInterface, // RtpPacketSinkInterface overrides. void OnRtpPacket(const webrtc::RtpPacketReceived& packet) override; - MediaChannel* media_channel() const override { - return media_channel_.get(); + MediaChannel* media_channel() const override { return media_channel_.get(); } + + MediaSendChannelInterface* media_send_channel() const override { + return media_channel_->AsSendChannel(); } - VideoMediaChannel* video_media_channel() const override { + VideoMediaSendChannelInterface* video_media_send_channel() const override { RTC_CHECK(false) << "Attempt to fetch video channel from non-video"; return nullptr; } - VoiceMediaChannel* voice_media_channel() const override { + VoiceMediaSendChannelInterface* voice_media_send_channel() const override { + RTC_CHECK(false) << "Attempt to fetch voice channel from non-voice"; + return nullptr; + } + MediaReceiveChannelInterface* media_receive_channel() const override { + return media_channel_->AsReceiveChannel(); + } + VideoMediaReceiveChannelInterface* video_media_receive_channel() + const override { + RTC_CHECK(false) << "Attempt to fetch video channel from non-video"; + return nullptr; + } + VoiceMediaReceiveChannelInterface* voice_media_receive_channel() + const override { RTC_CHECK(false) << "Attempt to fetch voice channel from non-voice"; return nullptr; } @@ -368,12 +384,22 @@ class VoiceChannel : public BaseChannel { ~VoiceChannel(); // downcasts a MediaChannel - VoiceMediaChannel* media_channel() const override { - return static_cast(BaseChannel::media_channel()); + VoiceMediaSendChannelInterface* media_send_channel() const override { + return media_channel()->AsVoiceChannel()->AsVoiceSendChannel(); } - VoiceMediaChannel* voice_media_channel() const override { - return static_cast(media_channel()); + VoiceMediaSendChannelInterface* voice_media_send_channel() const override { + return media_send_channel(); + } + + // downcasts a MediaChannel + VoiceMediaReceiveChannelInterface* media_receive_channel() const override { + return media_channel()->AsVoiceChannel()->AsVoiceReceiveChannel(); + } + + VoiceMediaReceiveChannelInterface* voice_media_receive_channel() + const override { + return media_receive_channel(); } cricket::MediaType media_type() const override { @@ -424,12 +450,22 @@ class VideoChannel : public BaseChannel { ~VideoChannel(); // downcasts a MediaChannel - VideoMediaChannel* media_channel() const override { - return static_cast(BaseChannel::media_channel()); + VideoMediaSendChannelInterface* media_send_channel() const override { + return media_channel()->AsVideoChannel()->AsVideoSendChannel(); } - VideoMediaChannel* video_media_channel() const override { - return static_cast(media_channel()); + VideoMediaSendChannelInterface* video_media_send_channel() const override { + return media_send_channel(); + } + + // downcasts a MediaChannel + VideoMediaReceiveChannelInterface* media_receive_channel() const override { + return media_channel()->AsVideoChannel()->AsVideoReceiveChannel(); + } + + VideoMediaReceiveChannelInterface* video_media_receive_channel() + const override { + return media_receive_channel(); } cricket::MediaType media_type() const override { diff --git a/pc/channel_interface.h b/pc/channel_interface.h index 3c6ca6fe6a..445712b41f 100644 --- a/pc/channel_interface.h +++ b/pc/channel_interface.h @@ -28,6 +28,7 @@ class VideoBitrateAllocatorFactory; namespace cricket { +class MediaChannel; class MediaContentDescription; struct MediaConfig; @@ -47,11 +48,20 @@ class ChannelInterface { virtual ~ChannelInterface() = default; virtual cricket::MediaType media_type() const = 0; + // Temporary fix while MediaChannel is being reconstructed virtual MediaChannel* media_channel() const = 0; + virtual MediaSendChannelInterface* media_send_channel() const = 0; // Typecasts of media_channel(). Will cause an exception if the // channel is of the wrong type. - virtual VideoMediaChannel* video_media_channel() const = 0; - virtual VoiceMediaChannel* voice_media_channel() const = 0; + virtual VideoMediaSendChannelInterface* video_media_send_channel() const = 0; + virtual VoiceMediaSendChannelInterface* voice_media_send_channel() const = 0; + virtual MediaReceiveChannelInterface* media_receive_channel() const = 0; + // Typecasts of media_channel(). Will cause an exception if the + // channel is of the wrong type. + virtual VideoMediaReceiveChannelInterface* video_media_receive_channel() + const = 0; + virtual VoiceMediaReceiveChannelInterface* voice_media_receive_channel() + const = 0; // Returns a string view for the transport name. Fetching the transport name // must be done on the network thread only and note that the lifetime of diff --git a/pc/channel_unittest.cc b/pc/channel_unittest.cc index 2dd5d090bf..583c2923df 100644 --- a/pc/channel_unittest.cc +++ b/pc/channel_unittest.cc @@ -429,7 +429,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { } void SendRtp1(rtc::Buffer data) { - SendRtp(media_channel1(), std::move(data)); + SendRtp(media_send_channel1(), std::move(data)); } void SendRtp2() { @@ -449,7 +449,8 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { } bool CheckRtp1() { - return media_channel1()->CheckRtp(rtp_packet_.data(), rtp_packet_.size()); + return media_send_channel1()->CheckRtp(rtp_packet_.data(), + rtp_packet_.size()); } bool CheckRtp2() { return media_channel2()->CheckRtp(rtp_packet_.data(), rtp_packet_.size()); @@ -457,7 +458,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { // Methods to check custom data. bool CheckCustomRtp1(uint32_t ssrc, int sequence_number, int pl_type = -1) { rtc::Buffer data = CreateRtpData(ssrc, sequence_number, pl_type); - return media_channel1()->CheckRtp(data.data(), data.size()); + return media_send_channel1()->CheckRtp(data.data(), data.size()); } bool CheckCustomRtp2(uint32_t ssrc, int sequence_number, int pl_type = -1) { rtc::Buffer data = CreateRtpData(ssrc, sequence_number, pl_type); @@ -474,7 +475,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { return data; } - bool CheckNoRtp1() { return media_channel1()->CheckNoRtp(); } + bool CheckNoRtp1() { return media_send_channel1()->CheckNoRtp(); } bool CheckNoRtp2() { return media_channel2()->CheckNoRtp(); } void CreateContent(int flags, @@ -557,13 +558,13 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { void TestInit() { CreateChannels(0, 0); EXPECT_FALSE(IsSrtpActive(channel1_)); - EXPECT_FALSE(media_channel1()->sending()); + EXPECT_FALSE(media_send_channel1()->sending()); if (verify_playout_) { - EXPECT_FALSE(media_channel1()->playout()); + EXPECT_FALSE(media_send_channel1()->playout()); } - EXPECT_TRUE(media_channel1()->codecs().empty()); - EXPECT_TRUE(media_channel1()->recv_streams().empty()); - EXPECT_TRUE(media_channel1()->rtp_packets().empty()); + EXPECT_TRUE(media_send_channel1()->codecs().empty()); + EXPECT_TRUE(media_send_channel1()->recv_streams().empty()); + EXPECT_TRUE(media_send_channel1()->rtp_packets().empty()); } // Test that SetLocalContent and SetRemoteContent properly configure @@ -574,11 +575,11 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { CreateContent(0, kPcmuCodec, kH264Codec, &content); std::string err; EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kOffer, err)); - EXPECT_EQ(0U, media_channel1()->codecs().size()); + EXPECT_EQ(0U, media_send_channel1()->codecs().size()); EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kAnswer, err)); - ASSERT_EQ(1U, media_channel1()->codecs().size()); + ASSERT_EQ(1U, media_send_channel1()->codecs().size()); EXPECT_TRUE( - CodecMatches(content.codecs()[0], media_channel1()->codecs()[0])); + CodecMatches(content.codecs()[0], media_send_channel1()->codecs()[0])); } // Test that SetLocalContent and SetRemoteContent properly configure @@ -596,7 +597,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kOffer, err)); content.set_extmap_allow_mixed_enum(answer_enum); EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kAnswer, err)); - EXPECT_EQ(answer, media_channel1()->ExtmapAllowMixed()); + EXPECT_EQ(answer, media_send_channel1()->ExtmapAllowMixed()); } void TestSetContentsExtmapAllowMixedCallee(bool offer, bool answer) { // For a callee, SetRemoteContent() is called first with an offer and next @@ -611,7 +612,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kOffer, err)); content.set_extmap_allow_mixed_enum(answer_enum); EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kAnswer, err)); - EXPECT_EQ(answer, media_channel1()->ExtmapAllowMixed()); + EXPECT_EQ(answer, media_send_channel1()->ExtmapAllowMixed()); } // Test that SetLocalContent and SetRemoteContent properly deals @@ -622,11 +623,11 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { std::string err; EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kOffer, err)); CreateContent(0, kPcmuCodec, kH264Codec, &content); - EXPECT_EQ(0U, media_channel1()->codecs().size()); + EXPECT_EQ(0U, media_send_channel1()->codecs().size()); EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kAnswer, err)); - ASSERT_EQ(1U, media_channel1()->codecs().size()); + ASSERT_EQ(1U, media_send_channel1()->codecs().size()); EXPECT_TRUE( - CodecMatches(content.codecs()[0], media_channel1()->codecs()[0])); + CodecMatches(content.codecs()[0], media_send_channel1()->codecs()[0])); } // Test that SetLocalContent and SetRemoteContent properly set RTCP @@ -668,7 +669,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { std::string err; EXPECT_TRUE(channel1_->SetLocalContent(&content1, SdpType::kOffer, err)); channel1_->Enable(true); - EXPECT_EQ(1u, media_channel1()->send_streams().size()); + EXPECT_EQ(1u, media_send_channel1()->send_streams().size()); EXPECT_TRUE(channel2_->SetRemoteContent(&content1, SdpType::kOffer, err)); EXPECT_EQ(1u, media_channel2()->recv_streams().size()); @@ -678,7 +679,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { typename T::Content content2; CreateContent(0, kPcmuCodec, kH264Codec, &content2); EXPECT_TRUE(channel1_->SetRemoteContent(&content2, SdpType::kAnswer, err)); - EXPECT_EQ(0u, media_channel1()->recv_streams().size()); + EXPECT_EQ(0u, media_send_channel1()->recv_streams().size()); EXPECT_TRUE(channel2_->SetLocalContent(&content2, SdpType::kAnswer, err)); channel2_->Enable(true); EXPECT_EQ(0u, media_channel2()->send_streams().size()); @@ -696,14 +697,14 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_EQ(stream2, media_channel2()->send_streams()[0]); EXPECT_TRUE(channel1_->SetRemoteContent(&content3, SdpType::kOffer, err)); - ASSERT_EQ(1u, media_channel1()->recv_streams().size()); - EXPECT_EQ(stream2, media_channel1()->recv_streams()[0]); + ASSERT_EQ(1u, media_send_channel1()->recv_streams().size()); + EXPECT_EQ(stream2, media_send_channel1()->recv_streams()[0]); // Channel 1 replies but stop sending stream1. typename T::Content content4; CreateContent(0, kPcmuCodec, kH264Codec, &content4); EXPECT_TRUE(channel1_->SetLocalContent(&content4, SdpType::kAnswer, err)); - EXPECT_EQ(0u, media_channel1()->send_streams().size()); + EXPECT_EQ(0u, media_send_channel1()->send_streams().size()); EXPECT_TRUE(channel2_->SetRemoteContent(&content4, SdpType::kAnswer, err)); EXPECT_EQ(0u, media_channel2()->recv_streams().size()); @@ -717,9 +718,9 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { void TestPlayoutAndSendingStates() { CreateChannels(0, 0); if (verify_playout_) { - EXPECT_FALSE(media_channel1()->playout()); + EXPECT_FALSE(media_send_channel1()->playout()); } - EXPECT_FALSE(media_channel1()->sending()); + EXPECT_FALSE(media_send_channel1()->sending()); if (verify_playout_) { EXPECT_FALSE(media_channel2()->playout()); } @@ -727,16 +728,16 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { channel1_->Enable(true); FlushCurrentThread(); if (verify_playout_) { - EXPECT_FALSE(media_channel1()->playout()); + EXPECT_FALSE(media_send_channel1()->playout()); } - EXPECT_FALSE(media_channel1()->sending()); + EXPECT_FALSE(media_send_channel1()->sending()); std::string err; EXPECT_TRUE(channel1_->SetLocalContent(&local_media_content1_, SdpType::kOffer, err)); if (verify_playout_) { - EXPECT_TRUE(media_channel1()->playout()); + EXPECT_TRUE(media_send_channel1()->playout()); } - EXPECT_FALSE(media_channel1()->sending()); + EXPECT_FALSE(media_send_channel1()->sending()); EXPECT_TRUE(channel2_->SetRemoteContent(&local_media_content1_, SdpType::kOffer, err)); if (verify_playout_) { @@ -751,9 +752,9 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_FALSE(media_channel2()->sending()); ConnectFakeTransports(); if (verify_playout_) { - EXPECT_TRUE(media_channel1()->playout()); + EXPECT_TRUE(media_send_channel1()->playout()); } - EXPECT_FALSE(media_channel1()->sending()); + EXPECT_FALSE(media_send_channel1()->sending()); if (verify_playout_) { EXPECT_FALSE(media_channel2()->playout()); } @@ -767,9 +768,9 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_TRUE(channel1_->SetRemoteContent(&local_media_content2_, SdpType::kAnswer, err)); if (verify_playout_) { - EXPECT_TRUE(media_channel1()->playout()); + EXPECT_TRUE(media_send_channel1()->playout()); } - EXPECT_TRUE(media_channel1()->sending()); + EXPECT_TRUE(media_send_channel1()->sending()); } // Test that changing the MediaContentDirection in the local and remote @@ -787,9 +788,9 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { channel2_->Enable(true); FlushCurrentThread(); if (verify_playout_) { - EXPECT_FALSE(media_channel1()->playout()); + EXPECT_FALSE(media_send_channel1()->playout()); } - EXPECT_FALSE(media_channel1()->sending()); + EXPECT_FALSE(media_send_channel1()->sending()); if (verify_playout_) { EXPECT_FALSE(media_channel2()->playout()); } @@ -804,9 +805,9 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { ConnectFakeTransports(); if (verify_playout_) { - EXPECT_TRUE(media_channel1()->playout()); + EXPECT_TRUE(media_send_channel1()->playout()); } - EXPECT_FALSE(media_channel1()->sending()); // remote InActive + EXPECT_FALSE(media_send_channel1()->sending()); // remote InActive if (verify_playout_) { EXPECT_FALSE(media_channel2()->playout()); // local InActive } @@ -819,9 +820,9 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { channel1_->SetRemoteContent(&content2, SdpType::kPrAnswer, err)); if (verify_playout_) { - EXPECT_TRUE(media_channel1()->playout()); + EXPECT_TRUE(media_send_channel1()->playout()); } - EXPECT_TRUE(media_channel1()->sending()); + EXPECT_TRUE(media_send_channel1()->sending()); if (verify_playout_) { EXPECT_TRUE(media_channel2()->playout()); // local RecvOnly } @@ -833,9 +834,9 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_TRUE(channel1_->SetRemoteContent(&content2, SdpType::kAnswer, err)); if (verify_playout_) { - EXPECT_TRUE(media_channel1()->playout()); + EXPECT_TRUE(media_send_channel1()->playout()); } - EXPECT_TRUE(media_channel1()->sending()); + EXPECT_TRUE(media_send_channel1()->sending()); if (verify_playout_) { EXPECT_TRUE(media_channel2()->playout()); } @@ -855,15 +856,15 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { CreateChannels(DTLS, DTLS); SendInitiate(); - typename T::MediaChannel* media_channel1 = - static_cast(channel1_->media_channel()); - ASSERT_TRUE(media_channel1); + typename T::MediaChannel* media_send_channel1 = + static_cast(channel1_->media_send_channel()); + ASSERT_TRUE(media_send_channel1); // Need to wait for the threads before calling // `set_num_network_route_changes` because the network route would be set // when creating the channel. WaitForThreads(); - media_channel1->set_num_network_route_changes(0); + media_send_channel1->set_num_network_route_changes(0); SendTask(network_thread_, [this] { rtc::NetworkRoute network_route; // The transport channel becomes disconnected. @@ -871,9 +872,9 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { absl::optional(network_route)); }); WaitForThreads(); - EXPECT_EQ(1, media_channel1->num_network_route_changes()); - EXPECT_FALSE(media_channel1->last_network_route().connected); - media_channel1->set_num_network_route_changes(0); + EXPECT_EQ(1, media_send_channel1->num_network_route_changes()); + EXPECT_FALSE(media_send_channel1->last_network_route().connected); + media_send_channel1->set_num_network_route_changes(0); SendTask(network_thread_, [this] { rtc::NetworkRoute network_route; @@ -890,16 +891,16 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { absl::optional(network_route)); }); WaitForThreads(); - EXPECT_EQ(1, media_channel1->num_network_route_changes()); - EXPECT_TRUE(media_channel1->last_network_route().connected); + EXPECT_EQ(1, media_send_channel1->num_network_route_changes()); + EXPECT_TRUE(media_send_channel1->last_network_route().connected); EXPECT_EQ(kLocalNetId, - media_channel1->last_network_route().local.network_id()); + media_send_channel1->last_network_route().local.network_id()); EXPECT_EQ(kRemoteNetId, - media_channel1->last_network_route().remote.network_id()); + media_send_channel1->last_network_route().remote.network_id()); EXPECT_EQ(kLastPacketId, - media_channel1->last_network_route().last_sent_packet_id); + media_send_channel1->last_network_route().last_sent_packet_id); EXPECT_EQ(kTransportOverheadPerPacket + kSrtpOverheadPerPacket, - media_channel1->transport_overhead_per_packet()); + media_send_channel1->transport_overhead_per_packet()); } // Test setting up a call. @@ -908,13 +909,13 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_FALSE(IsSrtpActive(channel1_)); EXPECT_TRUE(SendInitiate()); if (verify_playout_) { - EXPECT_TRUE(media_channel1()->playout()); + EXPECT_TRUE(media_send_channel1()->playout()); } - EXPECT_FALSE(media_channel1()->sending()); + EXPECT_FALSE(media_send_channel1()->sending()); EXPECT_TRUE(SendAccept()); EXPECT_FALSE(IsSrtpActive(channel1_)); - EXPECT_TRUE(media_channel1()->sending()); - EXPECT_EQ(1U, media_channel1()->codecs().size()); + EXPECT_TRUE(media_send_channel1()->sending()); + EXPECT_EQ(1U, media_send_channel1()->codecs().size()); if (verify_playout_) { EXPECT_TRUE(media_channel2()->playout()); } @@ -1046,7 +1047,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { // Regain writability SendTask(network_thread_, [this] { fake_rtp_dtls_transport1_->SetWritable(true); }); - EXPECT_TRUE(media_channel1()->sending()); + EXPECT_TRUE(media_send_channel1()->sending()); SendRtp1(); SendRtp2(); WaitForThreads(); @@ -1060,7 +1061,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { bool asymmetric = true; fake_rtp_dtls_transport1_->SetDestination(nullptr, asymmetric); }); - EXPECT_TRUE(media_channel1()->sending()); + EXPECT_TRUE(media_send_channel1()->sending()); // Should fail also. SendRtp1(); @@ -1076,7 +1077,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { fake_rtp_dtls_transport1_->SetDestination(fake_rtp_dtls_transport2_.get(), asymmetric); }); - EXPECT_TRUE(media_channel1()->sending()); + EXPECT_TRUE(media_send_channel1()->sending()); SendRtp1(); SendRtp2(); WaitForThreads(); @@ -1129,17 +1130,17 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { std::unique_ptr content( CreateMediaContentWithStream(1)); - media_channel1()->set_fail_set_recv_codecs(true); + media_send_channel1()->set_fail_set_recv_codecs(true); EXPECT_FALSE( channel1_->SetLocalContent(content.get(), SdpType::kOffer, err)); EXPECT_FALSE( channel1_->SetLocalContent(content.get(), SdpType::kAnswer, err)); - media_channel1()->set_fail_set_send_codecs(true); + media_send_channel1()->set_fail_set_send_codecs(true); EXPECT_FALSE( channel1_->SetRemoteContent(content.get(), SdpType::kOffer, err)); - media_channel1()->set_fail_set_send_codecs(true); + media_send_channel1()->set_fail_set_send_codecs(true); EXPECT_FALSE( channel1_->SetRemoteContent(content.get(), SdpType::kAnswer, err)); } @@ -1152,14 +1153,14 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { CreateMediaContentWithStream(1)); EXPECT_TRUE( channel1_->SetLocalContent(content1.get(), SdpType::kOffer, err)); - EXPECT_TRUE(media_channel1()->HasSendStream(1)); + EXPECT_TRUE(media_send_channel1()->HasSendStream(1)); std::unique_ptr content2( CreateMediaContentWithStream(2)); EXPECT_TRUE( channel1_->SetLocalContent(content2.get(), SdpType::kOffer, err)); - EXPECT_FALSE(media_channel1()->HasSendStream(1)); - EXPECT_TRUE(media_channel1()->HasSendStream(2)); + EXPECT_FALSE(media_send_channel1()->HasSendStream(1)); + EXPECT_TRUE(media_send_channel1()->HasSendStream(2)); } void TestReceiveTwoOffers() { @@ -1170,14 +1171,14 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { CreateMediaContentWithStream(1)); EXPECT_TRUE( channel1_->SetRemoteContent(content1.get(), SdpType::kOffer, err)); - EXPECT_TRUE(media_channel1()->HasRecvStream(1)); + EXPECT_TRUE(media_send_channel1()->HasRecvStream(1)); std::unique_ptr content2( CreateMediaContentWithStream(2)); EXPECT_TRUE( channel1_->SetRemoteContent(content2.get(), SdpType::kOffer, err)); - EXPECT_FALSE(media_channel1()->HasRecvStream(1)); - EXPECT_TRUE(media_channel1()->HasRecvStream(2)); + EXPECT_FALSE(media_send_channel1()->HasRecvStream(1)); + EXPECT_TRUE(media_send_channel1()->HasRecvStream(2)); } void TestSendPrAnswer() { @@ -1189,24 +1190,24 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { CreateMediaContentWithStream(1)); EXPECT_TRUE( channel1_->SetRemoteContent(content1.get(), SdpType::kOffer, err)); - EXPECT_TRUE(media_channel1()->HasRecvStream(1)); + EXPECT_TRUE(media_send_channel1()->HasRecvStream(1)); // Send PR answer std::unique_ptr content2( CreateMediaContentWithStream(2)); EXPECT_TRUE( channel1_->SetLocalContent(content2.get(), SdpType::kPrAnswer, err)); - EXPECT_TRUE(media_channel1()->HasRecvStream(1)); - EXPECT_TRUE(media_channel1()->HasSendStream(2)); + EXPECT_TRUE(media_send_channel1()->HasRecvStream(1)); + EXPECT_TRUE(media_send_channel1()->HasSendStream(2)); // Send answer std::unique_ptr content3( CreateMediaContentWithStream(3)); EXPECT_TRUE( channel1_->SetLocalContent(content3.get(), SdpType::kAnswer, err)); - EXPECT_TRUE(media_channel1()->HasRecvStream(1)); - EXPECT_FALSE(media_channel1()->HasSendStream(2)); - EXPECT_TRUE(media_channel1()->HasSendStream(3)); + EXPECT_TRUE(media_send_channel1()->HasRecvStream(1)); + EXPECT_FALSE(media_send_channel1()->HasSendStream(2)); + EXPECT_TRUE(media_send_channel1()->HasSendStream(3)); } void TestReceivePrAnswer() { @@ -1218,39 +1219,39 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { CreateMediaContentWithStream(1)); EXPECT_TRUE( channel1_->SetLocalContent(content1.get(), SdpType::kOffer, err)); - EXPECT_TRUE(media_channel1()->HasSendStream(1)); + EXPECT_TRUE(media_send_channel1()->HasSendStream(1)); // Receive PR answer std::unique_ptr content2( CreateMediaContentWithStream(2)); EXPECT_TRUE( channel1_->SetRemoteContent(content2.get(), SdpType::kPrAnswer, err)); - EXPECT_TRUE(media_channel1()->HasSendStream(1)); - EXPECT_TRUE(media_channel1()->HasRecvStream(2)); + EXPECT_TRUE(media_send_channel1()->HasSendStream(1)); + EXPECT_TRUE(media_send_channel1()->HasRecvStream(2)); // Receive answer std::unique_ptr content3( CreateMediaContentWithStream(3)); EXPECT_TRUE( channel1_->SetRemoteContent(content3.get(), SdpType::kAnswer, err)); - EXPECT_TRUE(media_channel1()->HasSendStream(1)); - EXPECT_FALSE(media_channel1()->HasRecvStream(2)); - EXPECT_TRUE(media_channel1()->HasRecvStream(3)); + EXPECT_TRUE(media_send_channel1()->HasSendStream(1)); + EXPECT_FALSE(media_send_channel1()->HasRecvStream(2)); + EXPECT_TRUE(media_send_channel1()->HasRecvStream(3)); } void TestOnTransportReadyToSend() { CreateChannels(0, 0); - EXPECT_FALSE(media_channel1()->ready_to_send()); + EXPECT_FALSE(media_send_channel1()->ready_to_send()); network_thread_->PostTask( [this] { channel1_->OnTransportReadyToSend(true); }); WaitForThreads(); - EXPECT_TRUE(media_channel1()->ready_to_send()); + EXPECT_TRUE(media_send_channel1()->ready_to_send()); network_thread_->PostTask( [this] { channel1_->OnTransportReadyToSend(false); }); WaitForThreads(); - EXPECT_FALSE(media_channel1()->ready_to_send()); + EXPECT_FALSE(media_send_channel1()->ready_to_send()); } bool SetRemoteContentWithBitrateLimit(int remote_limit) { @@ -1279,8 +1280,8 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { std::string err; EXPECT_TRUE(channel1_->SetLocalContent(&local_media_content1_, SdpType::kOffer, err)); - EXPECT_EQ(media_channel1()->max_bps(), -1); - VerifyMaxBitrate(media_channel1()->GetRtpSendParameters(kSsrc1), + EXPECT_EQ(media_send_channel1()->max_bps(), -1); + VerifyMaxBitrate(media_send_channel1()->GetRtpSendParameters(kSsrc1), absl::nullopt); } @@ -1397,16 +1398,18 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { ProcessThreadQueue(rtc::Thread::Current()); } - typename T::MediaChannel* media_channel1() { + typename T::MediaChannel* media_send_channel1() { RTC_DCHECK(channel1_); - RTC_DCHECK(channel1_->media_channel()); - return static_cast(channel1_->media_channel()); + RTC_DCHECK(channel1_->media_send_channel()); + return static_cast( + channel1_->media_send_channel()); } typename T::MediaChannel* media_channel2() { RTC_DCHECK(channel2_); - RTC_DCHECK(channel2_->media_channel()); - return static_cast(channel2_->media_channel()); + RTC_DCHECK(channel2_->media_send_channel()); + return static_cast( + channel2_->media_send_channel()); } rtc::AutoThread main_thread_; @@ -1595,8 +1598,8 @@ class VideoChannelDoubleThreadTest : public ChannelTest { TEST_F(VoiceChannelSingleThreadTest, TestInit) { Base::TestInit(); - EXPECT_FALSE(media_channel1()->IsStreamMuted(0)); - EXPECT_TRUE(media_channel1()->dtmf_info_queue().empty()); + EXPECT_FALSE(media_send_channel1()->IsStreamMuted(0)); + EXPECT_TRUE(media_send_channel1()->dtmf_info_queue().empty()); } TEST_F(VoiceChannelSingleThreadTest, TestDeinit) { @@ -1732,8 +1735,8 @@ TEST_F(VoiceChannelSingleThreadTest, SocketOptionsMergedOnSetTransport) { // VoiceChannelDoubleThreadTest TEST_F(VoiceChannelDoubleThreadTest, TestInit) { Base::TestInit(); - EXPECT_FALSE(media_channel1()->IsStreamMuted(0)); - EXPECT_TRUE(media_channel1()->dtmf_info_queue().empty()); + EXPECT_FALSE(media_send_channel1()->IsStreamMuted(0)); + EXPECT_TRUE(media_send_channel1()->dtmf_info_queue().empty()); } TEST_F(VoiceChannelDoubleThreadTest, TestDeinit) { @@ -2016,14 +2019,15 @@ TEST_F(VideoChannelSingleThreadTest, TestSetLocalOfferWithPacketization) { std::string err; EXPECT_TRUE(channel1_->SetLocalContent(&video, SdpType::kOffer, err)); - EXPECT_THAT(media_channel1()->send_codecs(), testing::IsEmpty()); - ASSERT_THAT(media_channel1()->recv_codecs(), testing::SizeIs(2)); - EXPECT_TRUE( - media_channel1()->recv_codecs()[0].Matches(kVp8Codec, &field_trials_)); - EXPECT_EQ(media_channel1()->recv_codecs()[0].packetization, absl::nullopt); - EXPECT_TRUE( - media_channel1()->recv_codecs()[1].Matches(vp9_codec, &field_trials_)); - EXPECT_EQ(media_channel1()->recv_codecs()[1].packetization, + EXPECT_THAT(media_send_channel1()->send_codecs(), testing::IsEmpty()); + ASSERT_THAT(media_send_channel1()->recv_codecs(), testing::SizeIs(2)); + EXPECT_TRUE(media_send_channel1()->recv_codecs()[0].Matches(kVp8Codec, + &field_trials_)); + EXPECT_EQ(media_send_channel1()->recv_codecs()[0].packetization, + absl::nullopt); + EXPECT_TRUE(media_send_channel1()->recv_codecs()[1].Matches(vp9_codec, + &field_trials_)); + EXPECT_EQ(media_send_channel1()->recv_codecs()[1].packetization, cricket::kPacketizationParamRaw); } @@ -2039,14 +2043,15 @@ TEST_F(VideoChannelSingleThreadTest, TestSetRemoteOfferWithPacketization) { std::string err; EXPECT_TRUE(channel1_->SetRemoteContent(&video, SdpType::kOffer, err)); EXPECT_TRUE(err.empty()); - EXPECT_THAT(media_channel1()->recv_codecs(), testing::IsEmpty()); - ASSERT_THAT(media_channel1()->send_codecs(), testing::SizeIs(2)); - EXPECT_TRUE( - media_channel1()->send_codecs()[0].Matches(kVp8Codec, &field_trials_)); - EXPECT_EQ(media_channel1()->send_codecs()[0].packetization, absl::nullopt); - EXPECT_TRUE( - media_channel1()->send_codecs()[1].Matches(vp9_codec, &field_trials_)); - EXPECT_EQ(media_channel1()->send_codecs()[1].packetization, + EXPECT_THAT(media_send_channel1()->recv_codecs(), testing::IsEmpty()); + ASSERT_THAT(media_send_channel1()->send_codecs(), testing::SizeIs(2)); + EXPECT_TRUE(media_send_channel1()->send_codecs()[0].Matches(kVp8Codec, + &field_trials_)); + EXPECT_EQ(media_send_channel1()->send_codecs()[0].packetization, + absl::nullopt); + EXPECT_TRUE(media_send_channel1()->send_codecs()[1].Matches(vp9_codec, + &field_trials_)); + EXPECT_EQ(media_send_channel1()->send_codecs()[1].packetization, cricket::kPacketizationParamRaw); } @@ -2064,21 +2069,23 @@ TEST_F(VideoChannelSingleThreadTest, TestSetAnswerWithPacketization) { EXPECT_TRUE(err.empty()); EXPECT_TRUE(channel1_->SetRemoteContent(&video, SdpType::kAnswer, err)); EXPECT_TRUE(err.empty()); - ASSERT_THAT(media_channel1()->recv_codecs(), testing::SizeIs(2)); - EXPECT_TRUE( - media_channel1()->recv_codecs()[0].Matches(kVp8Codec, &field_trials_)); - EXPECT_EQ(media_channel1()->recv_codecs()[0].packetization, absl::nullopt); - EXPECT_TRUE( - media_channel1()->recv_codecs()[1].Matches(vp9_codec, &field_trials_)); - EXPECT_EQ(media_channel1()->recv_codecs()[1].packetization, + ASSERT_THAT(media_send_channel1()->recv_codecs(), testing::SizeIs(2)); + EXPECT_TRUE(media_send_channel1()->recv_codecs()[0].Matches(kVp8Codec, + &field_trials_)); + EXPECT_EQ(media_send_channel1()->recv_codecs()[0].packetization, + absl::nullopt); + EXPECT_TRUE(media_send_channel1()->recv_codecs()[1].Matches(vp9_codec, + &field_trials_)); + EXPECT_EQ(media_send_channel1()->recv_codecs()[1].packetization, cricket::kPacketizationParamRaw); - EXPECT_THAT(media_channel1()->send_codecs(), testing::SizeIs(2)); - EXPECT_TRUE( - media_channel1()->send_codecs()[0].Matches(kVp8Codec, &field_trials_)); - EXPECT_EQ(media_channel1()->send_codecs()[0].packetization, absl::nullopt); - EXPECT_TRUE( - media_channel1()->send_codecs()[1].Matches(vp9_codec, &field_trials_)); - EXPECT_EQ(media_channel1()->send_codecs()[1].packetization, + EXPECT_THAT(media_send_channel1()->send_codecs(), testing::SizeIs(2)); + EXPECT_TRUE(media_send_channel1()->send_codecs()[0].Matches(kVp8Codec, + &field_trials_)); + EXPECT_EQ(media_send_channel1()->send_codecs()[0].packetization, + absl::nullopt); + EXPECT_TRUE(media_send_channel1()->send_codecs()[1].Matches(vp9_codec, + &field_trials_)); + EXPECT_EQ(media_send_channel1()->send_codecs()[1].packetization, cricket::kPacketizationParamRaw); } @@ -2096,10 +2103,12 @@ TEST_F(VideoChannelSingleThreadTest, TestSetLocalAnswerWithoutPacketization) { std::string err; EXPECT_TRUE(channel1_->SetRemoteContent(&remote_video, SdpType::kOffer, err)); EXPECT_TRUE(channel1_->SetLocalContent(&local_video, SdpType::kAnswer, err)); - ASSERT_THAT(media_channel1()->recv_codecs(), testing::SizeIs(1)); - EXPECT_EQ(media_channel1()->recv_codecs()[0].packetization, absl::nullopt); - ASSERT_THAT(media_channel1()->send_codecs(), testing::SizeIs(1)); - EXPECT_EQ(media_channel1()->send_codecs()[0].packetization, absl::nullopt); + ASSERT_THAT(media_send_channel1()->recv_codecs(), testing::SizeIs(1)); + EXPECT_EQ(media_send_channel1()->recv_codecs()[0].packetization, + absl::nullopt); + ASSERT_THAT(media_send_channel1()->send_codecs(), testing::SizeIs(1)); + EXPECT_EQ(media_send_channel1()->send_codecs()[0].packetization, + absl::nullopt); } TEST_F(VideoChannelSingleThreadTest, TestSetRemoteAnswerWithoutPacketization) { @@ -2117,10 +2126,12 @@ TEST_F(VideoChannelSingleThreadTest, TestSetRemoteAnswerWithoutPacketization) { EXPECT_TRUE(channel1_->SetLocalContent(&local_video, SdpType::kOffer, err)); EXPECT_TRUE( channel1_->SetRemoteContent(&remote_video, SdpType::kAnswer, err)); - ASSERT_THAT(media_channel1()->recv_codecs(), testing::SizeIs(1)); - EXPECT_EQ(media_channel1()->recv_codecs()[0].packetization, absl::nullopt); - ASSERT_THAT(media_channel1()->send_codecs(), testing::SizeIs(1)); - EXPECT_EQ(media_channel1()->send_codecs()[0].packetization, absl::nullopt); + ASSERT_THAT(media_send_channel1()->recv_codecs(), testing::SizeIs(1)); + EXPECT_EQ(media_send_channel1()->recv_codecs()[0].packetization, + absl::nullopt); + ASSERT_THAT(media_send_channel1()->send_codecs(), testing::SizeIs(1)); + EXPECT_EQ(media_send_channel1()->send_codecs()[0].packetization, + absl::nullopt); } TEST_F(VideoChannelSingleThreadTest, @@ -2142,10 +2153,10 @@ TEST_F(VideoChannelSingleThreadTest, EXPECT_FALSE( channel1_->SetRemoteContent(&remote_video, SdpType::kAnswer, err)); EXPECT_FALSE(err.empty()); - ASSERT_THAT(media_channel1()->recv_codecs(), testing::SizeIs(1)); - EXPECT_EQ(media_channel1()->recv_codecs()[0].packetization, + ASSERT_THAT(media_send_channel1()->recv_codecs(), testing::SizeIs(1)); + EXPECT_EQ(media_send_channel1()->recv_codecs()[0].packetization, cricket::kPacketizationParamRaw); - EXPECT_THAT(media_channel1()->send_codecs(), testing::IsEmpty()); + EXPECT_THAT(media_send_channel1()->send_codecs(), testing::IsEmpty()); } TEST_F(VideoChannelSingleThreadTest, @@ -2165,9 +2176,10 @@ TEST_F(VideoChannelSingleThreadTest, EXPECT_TRUE(err.empty()); EXPECT_FALSE(channel1_->SetLocalContent(&local_video, SdpType::kAnswer, err)); EXPECT_FALSE(err.empty()); - EXPECT_THAT(media_channel1()->recv_codecs(), testing::IsEmpty()); - ASSERT_THAT(media_channel1()->send_codecs(), testing::SizeIs(1)); - EXPECT_EQ(media_channel1()->send_codecs()[0].packetization, absl::nullopt); + EXPECT_THAT(media_send_channel1()->recv_codecs(), testing::IsEmpty()); + ASSERT_THAT(media_send_channel1()->send_codecs(), testing::SizeIs(1)); + EXPECT_EQ(media_send_channel1()->send_codecs()[0].packetization, + absl::nullopt); } // VideoChannelDoubleThreadTest diff --git a/pc/connection_context.h b/pc/connection_context.h index 415ae121b5..0fe20c7890 100644 --- a/pc/connection_context.h +++ b/pc/connection_context.h @@ -32,10 +32,6 @@ #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" -namespace cricket { -class ChannelManager; -} - namespace rtc { class BasicPacketSocketFactory; class UniqueRandomIdGenerator; diff --git a/pc/ice_server_parsing.cc b/pc/ice_server_parsing.cc index 1a30d2def5..9322fd12d4 100644 --- a/pc/ice_server_parsing.cc +++ b/pc/ice_server_parsing.cc @@ -338,13 +338,6 @@ RTCError ParseIceServersOrError( "ICE server parsing failed: Empty uri."); } } - // Candidates must have unique priorities, so that connectivity checks - // are performed in a well-defined order. - int priority = static_cast(turn_servers->size() - 1); - for (cricket::RelayServerConfig& turn_server : *turn_servers) { - // First in the list gets highest priority. - turn_server.priority = priority--; - } return RTCError::OK(); } diff --git a/pc/ice_server_parsing_unittest.cc b/pc/ice_server_parsing_unittest.cc index 408c790346..4cb7c47b0b 100644 --- a/pc/ice_server_parsing_unittest.cc +++ b/pc/ice_server_parsing_unittest.cc @@ -237,22 +237,4 @@ TEST_F(IceServerParsingTest, ParseMultipleUrls) { EXPECT_EQ(1U, turn_servers_.size()); } -// Ensure that TURN servers are given unique priorities, -// so that their resulting candidates have unique priorities. -TEST_F(IceServerParsingTest, TurnServerPrioritiesUnique) { - PeerConnectionInterface::IceServers servers; - PeerConnectionInterface::IceServer server; - server.urls.push_back("turn:hostname"); - server.urls.push_back("turn:hostname2"); - server.username = "foo"; - server.password = "bar"; - servers.push_back(server); - - EXPECT_TRUE( - webrtc::ParseIceServersOrError(servers, &stun_servers_, &turn_servers_) - .ok()); - EXPECT_EQ(2U, turn_servers_.size()); - EXPECT_NE(turn_servers_[0].priority, turn_servers_[1].priority); -} - } // namespace webrtc diff --git a/pc/jsep_transport_controller.cc b/pc/jsep_transport_controller.cc index 9b341d1c87..4f62d28815 100644 --- a/pc/jsep_transport_controller.cc +++ b/pc/jsep_transport_controller.cc @@ -433,8 +433,13 @@ JsepTransportController::CreateIceTransport(const std::string& transport_name, init.set_async_dns_resolver_factory(async_dns_resolver_factory_); init.set_event_log(config_.event_log); init.set_field_trials(config_.field_trials); - return config_.ice_transport_factory->CreateIceTransport( + auto transport = config_.ice_transport_factory->CreateIceTransport( transport_name, component, std::move(init)); + RTC_DCHECK(transport); + transport->internal()->SetIceRole(ice_role_); + transport->internal()->SetIceTiebreaker(ice_tiebreaker_); + transport->internal()->SetIceConfig(ice_config_); + return transport; } std::unique_ptr @@ -455,9 +460,8 @@ JsepTransportController::CreateDtlsTransport( } RTC_DCHECK(dtls); - dtls->ice_transport()->SetIceRole(ice_role_); - dtls->ice_transport()->SetIceTiebreaker(ice_tiebreaker_); - dtls->ice_transport()->SetIceConfig(ice_config_); + RTC_DCHECK_EQ(ice, dtls->ice_transport()); + if (certificate_) { bool set_cert_success = dtls->SetLocalCertificate(certificate_); RTC_DCHECK(set_cert_success); @@ -1069,7 +1073,6 @@ RTCError JsepTransportController::MaybeCreateJsepTransport( rtc::scoped_refptr ice = CreateIceTransport(content_info.name, /*rtcp=*/false); - RTC_DCHECK(ice); std::unique_ptr rtp_dtls_transport = CreateDtlsTransport(content_info, ice->internal()); diff --git a/pc/legacy_stats_collector.cc b/pc/legacy_stats_collector.cc index b710bc16e6..ad9f7ad007 100644 --- a/pc/legacy_stats_collector.cc +++ b/pc/legacy_stats_collector.cc @@ -34,6 +34,7 @@ #include "api/video/video_timing.h" #include "call/call.h" #include "media/base/media_channel.h" +#include "media/base/media_channel_impl.h" #include "modules/audio_processing/include/audio_processing_statistics.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" @@ -1043,7 +1044,7 @@ void LegacyStatsCollector::ExtractBweInfo() { auto* video_channel = transceiver->internal()->channel(); if (video_channel) { video_media_channels.push_back(static_cast( - video_channel->media_channel())); + video_channel->video_media_send_channel())); } } @@ -1155,11 +1156,11 @@ std::unique_ptr CreateMediaChannelStatsGatherer( RTC_DCHECK(channel); if (channel->media_type() == cricket::MEDIA_TYPE_AUDIO) { return std::make_unique( - static_cast(channel)); + channel->AsVoiceChannel()); } else { RTC_DCHECK_EQ(channel->media_type(), cricket::MEDIA_TYPE_VIDEO); return std::make_unique( - static_cast(channel)); + channel->AsVideoChannel()); } } diff --git a/pc/legacy_stats_collector.h b/pc/legacy_stats_collector.h index 21f51c5143..cedd36c853 100644 --- a/pc/legacy_stats_collector.h +++ b/pc/legacy_stats_collector.h @@ -27,10 +27,10 @@ #include "absl/types/optional.h" #include "api/field_trials_view.h" +#include "api/legacy_stats_types.h" #include "api/media_stream_interface.h" #include "api/peer_connection_interface.h" #include "api/scoped_refptr.h" -#include "api/stats_types.h" #include "p2p/base/connection_info.h" #include "p2p/base/port.h" #include "pc/legacy_stats_collector_interface.h" diff --git a/pc/legacy_stats_collector_interface.h b/pc/legacy_stats_collector_interface.h index 3cddb284f8..a0c6f3bd65 100644 --- a/pc/legacy_stats_collector_interface.h +++ b/pc/legacy_stats_collector_interface.h @@ -17,8 +17,8 @@ #include +#include "api/legacy_stats_types.h" #include "api/media_stream_interface.h" -#include "api/stats_types.h" namespace webrtc { diff --git a/pc/media_session.h b/pc/media_session.h index 8e0d7c17c2..240eef0333 100644 --- a/pc/media_session.h +++ b/pc/media_session.h @@ -46,7 +46,6 @@ class ConnectionContext; namespace cricket { -class ChannelManager; class MediaEngineInterface; // Default RTCP CNAME for unit tests. diff --git a/pc/peer_connection.cc b/pc/peer_connection.cc index 3145822528..789f8a1873 100644 --- a/pc/peer_connection.cc +++ b/pc/peer_connection.cc @@ -288,7 +288,6 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( RtcpMuxPolicy rtcp_mux_policy; std::vector> certificates; int ice_candidate_pool_size; - bool disable_ipv6; bool disable_ipv6_on_wifi; int max_ipv6_networks; bool disable_link_local_networks; @@ -334,6 +333,7 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( webrtc::VpnPreference vpn_preference; std::vector vpn_list; PortAllocatorConfig port_allocator_config; + absl::optional pacer_burst_interval; }; static_assert(sizeof(stuff_being_tested_for_equality) == sizeof(*this), "Did you add something to RTCConfiguration and forget to " @@ -356,7 +356,7 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( certificates == o.certificates && prioritize_most_likely_ice_candidate_pairs == o.prioritize_most_likely_ice_candidate_pairs && - media_config == o.media_config && disable_ipv6 == o.disable_ipv6 && + media_config == o.media_config && disable_ipv6_on_wifi == o.disable_ipv6_on_wifi && max_ipv6_networks == o.max_ipv6_networks && disable_link_local_networks == o.disable_link_local_networks && @@ -399,7 +399,8 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( vpn_preference == o.vpn_preference && vpn_list == o.vpn_list && port_allocator_config.min_port == o.port_allocator_config.min_port && port_allocator_config.max_port == o.port_allocator_config.max_port && - port_allocator_config.flags == o.port_allocator_config.flags; + port_allocator_config.flags == o.port_allocator_config.flags && + pacer_burst_interval == o.pacer_burst_interval; } bool PeerConnectionInterface::RTCConfiguration::operator!=( @@ -593,6 +594,16 @@ RTCError PeerConnection::Initialize( return parse_error; } + // Restrict number of TURN servers. + if (!trials().IsDisabled("WebRTC-LimitTurnServers") && + turn_servers.size() > cricket::kMaxTurnServers) { + RTC_LOG(LS_WARNING) << "Number of configured TURN servers is " + << turn_servers.size() + << " which exceeds the maximum allowed number of " + << cricket::kMaxTurnServers; + turn_servers.resize(cricket::kMaxTurnServers); + } + // Add the turn logging id to all turn servers for (cricket::RelayServerConfig& turn_server : turn_servers) { turn_server.turn_logging_id = configuration.turn_logging_id; @@ -831,6 +842,20 @@ void PeerConnection::RemoveStream(MediaStreamInterface* local_stream) { RTCErrorOr> PeerConnection::AddTrack( rtc::scoped_refptr track, const std::vector& stream_ids) { + return AddTrack(std::move(track), stream_ids, nullptr); +} + +RTCErrorOr> PeerConnection::AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids, + const std::vector& init_send_encodings) { + return AddTrack(std::move(track), stream_ids, &init_send_encodings); +} + +RTCErrorOr> PeerConnection::AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids, + const std::vector* init_send_encodings) { RTC_DCHECK_RUN_ON(signaling_thread()); TRACE_EVENT0("webrtc", "PeerConnection::AddTrack"); if (!ConfiguredForMedia()) { @@ -854,7 +879,8 @@ RTCErrorOr> PeerConnection::AddTrack( RTCErrorType::INVALID_PARAMETER, "Sender already exists for track " + track->id() + "."); } - auto sender_or_error = rtp_manager()->AddTrack(track, stream_ids); + auto sender_or_error = + rtp_manager()->AddTrack(track, stream_ids, init_send_encodings); if (sender_or_error.ok()) { sdp_handler_->UpdateNegotiationNeeded(); legacy_stats_->AddTrack(track.get()); @@ -1070,7 +1096,14 @@ PeerConnection::AddTransceiver( "Attempted to set an unimplemented parameter of RtpParameters."); } - auto result = cricket::CheckRtpParametersValues(parameters); + std::vector codecs; + if (media_type == cricket::MEDIA_TYPE_VIDEO) { + // Gather the current codec capabilities to allow checking scalabilityMode + // against supported values. + codecs = context_->media_engine()->video().send_codecs(false); + } + + auto result = cricket::CheckRtpParametersValues(parameters, codecs); if (!result.ok()) { LOG_AND_RETURN_ERROR(result.type(), result.message()); } @@ -1136,14 +1169,14 @@ rtc::scoped_refptr PeerConnection::CreateSender( auto audio_sender = AudioRtpSender::Create(worker_thread(), rtc::CreateRandomUuid(), legacy_stats_.get(), rtp_manager()); - audio_sender->SetMediaChannel(rtp_manager()->voice_media_channel()); + audio_sender->SetMediaChannel(rtp_manager()->voice_media_send_channel()); new_sender = RtpSenderProxyWithInternal::Create( signaling_thread(), audio_sender); rtp_manager()->GetAudioTransceiver()->internal()->AddSender(new_sender); } else if (kind == MediaStreamTrackInterface::kVideoKind) { auto video_sender = VideoRtpSender::Create( worker_thread(), rtc::CreateRandomUuid(), rtp_manager()); - video_sender->SetMediaChannel(rtp_manager()->video_media_channel()); + video_sender->SetMediaChannel(rtp_manager()->video_media_send_channel()); new_sender = RtpSenderProxyWithInternal::Create( signaling_thread(), video_sender); rtp_manager()->GetVideoTransceiver()->internal()->AddSender(new_sender); @@ -1519,6 +1552,17 @@ RTCError PeerConnection::SetConfiguration( if (!parse_error.ok()) { return parse_error; } + + // Restrict number of TURN servers. + if (!trials().IsDisabled("WebRTC-LimitTurnServers") && + turn_servers.size() > cricket::kMaxTurnServers) { + RTC_LOG(LS_WARNING) << "Number of configured TURN servers is " + << turn_servers.size() + << " which exceeds the maximum allowed number of " + << cricket::kMaxTurnServers; + turn_servers.resize(cricket::kMaxTurnServers); + } + // Add the turn logging id to all turn servers for (cricket::RelayServerConfig& turn_server : turn_servers) { turn_server.turn_logging_id = configuration.turn_logging_id; @@ -1580,15 +1624,16 @@ RTCError PeerConnection::SetConfiguration( } if (modified_config.allow_codec_switching.has_value()) { - std::vector channels; + std::vector channels; for (const auto& transceiver : rtp_manager()->transceivers()->List()) { if (transceiver->media_type() != cricket::MEDIA_TYPE_VIDEO) continue; auto* video_channel = transceiver->internal()->channel(); if (video_channel) - channels.push_back(static_cast( - video_channel->media_channel())); + channels.push_back( + static_cast( + video_channel->media_send_channel())); } worker_thread()->BlockingCall( @@ -1943,29 +1988,6 @@ void PeerConnection::ReportFirstConnectUsageMetrics() { RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.BundlePolicy", policy, kBundlePolicyUsageMax); - // Record configured ice candidate pool size depending on the - // BUNDLE policy. See - // https://w3c.github.io/webrtc-pc/#dom-rtcconfiguration-icecandidatepoolsize - // The ICE candidate pool size is an optimization and it may be desirable - // to restrict the maximum size of the pre-gathered candidates. - switch (configuration_.bundle_policy) { - case kBundlePolicyBalanced: - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.PeerConnection.CandidatePoolUsage.Balanced", - configuration_.ice_candidate_pool_size, 0, 255, 256); - break; - case kBundlePolicyMaxBundle: - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.PeerConnection.CandidatePoolUsage.MaxBundle", - configuration_.ice_candidate_pool_size, 0, 255, 256); - break; - case kBundlePolicyMaxCompat: - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.PeerConnection.CandidatePoolUsage.MaxCompat", - configuration_.ice_candidate_pool_size, 0, 255, 256); - break; - } - // Record whether there was a local or remote provisional answer. ProvisionalAnswerUsage pranswer = kProvisionalAnswerNotUsed; if (local_description()->GetType() == SdpType::kPrAnswer) { @@ -2086,11 +2108,7 @@ PeerConnection::InitializePortAllocator_n( // enable BUNDLE here. int port_allocator_flags = port_allocator_->flags(); // RingRTC change to default flags (code removed) - // If the disable-IPv6 flag was specified, we'll not override it - // by experiment. - if (configuration.disable_ipv6) { - port_allocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6); - } else if (trials().IsDisabled("WebRTC-IPv6Default")) { + if (trials().IsDisabled("WebRTC-IPv6Default")) { port_allocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6); } if (configuration.disable_ipv6_on_wifi) { @@ -2621,8 +2639,19 @@ void PeerConnection::AddRemoteCandidate(const std::string& mid, const cricket::Candidate& candidate) { RTC_DCHECK_RUN_ON(signaling_thread()); + if (candidate.network_type() != rtc::ADAPTER_TYPE_UNKNOWN) { + RTC_DLOG(LS_WARNING) << "Using candidate with adapter type set - this " + "should only happen in test"; + } + + // Clear fields that do not make sense as remote candidates. + cricket::Candidate new_candidate(candidate); + new_candidate.set_network_type(rtc::ADAPTER_TYPE_UNKNOWN); + new_candidate.set_relay_protocol(""); + new_candidate.set_underlying_type_for_vpn(rtc::ADAPTER_TYPE_UNKNOWN); + network_thread()->PostTask(SafeTask( - network_thread_safety_, [this, mid = mid, candidate = candidate] { + network_thread_safety_, [this, mid = mid, candidate = new_candidate] { RTC_DCHECK_RUN_ON(network_thread()); std::vector candidates = {candidate}; RTCError error = diff --git a/pc/peer_connection.h b/pc/peer_connection.h index 14eb1b83e1..f67ab67119 100644 --- a/pc/peer_connection.h +++ b/pc/peer_connection.h @@ -84,10 +84,6 @@ #include "rtc_base/thread_annotations.h" #include "rtc_base/weak_ptr.h" -namespace cricket { -class ChannelManager; -} - namespace webrtc { // PeerConnection is the implementation of the PeerConnection object as defined @@ -129,6 +125,14 @@ class PeerConnection : public PeerConnectionInternal, RTCErrorOr> AddTrack( rtc::scoped_refptr track, const std::vector& stream_ids) override; + RTCErrorOr> AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids, + const std::vector& init_send_encodings) override; + RTCErrorOr> AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids, + const std::vector* init_send_encodings); RTCError RemoveTrackOrError( rtc::scoped_refptr sender) override; diff --git a/pc/peer_connection_factory.cc b/pc/peer_connection_factory.cc index bf270cfb3c..b61a4a9919 100644 --- a/pc/peer_connection_factory.cc +++ b/pc/peer_connection_factory.cc @@ -110,6 +110,10 @@ PeerConnectionFactory::PeerConnectionFactory( PeerConnectionFactory::~PeerConnectionFactory() { RTC_DCHECK_RUN_ON(signaling_thread()); + worker_thread()->BlockingCall([this] { + RTC_DCHECK_RUN_ON(worker_thread()); + metronome_ = nullptr; + }); } void PeerConnectionFactory::SetOptions(const Options& options) { @@ -246,8 +250,8 @@ PeerConnectionFactory::CreatePeerConnectionOrError( const FieldTrialsView* trials = dependencies.trials ? dependencies.trials.get() : &field_trials(); std::unique_ptr call = - worker_thread()->BlockingCall([this, &event_log, trials] { - return CreateCall_w(event_log.get(), *trials); + worker_thread()->BlockingCall([this, &event_log, trials, &configuration] { + return CreateCall_w(event_log.get(), *trials, configuration); }); auto result = PeerConnection::Create(context_, options_, std::move(event_log), @@ -306,7 +310,8 @@ std::unique_ptr PeerConnectionFactory::CreateRtcEventLog_w() { std::unique_ptr PeerConnectionFactory::CreateCall_w( RtcEventLog* event_log, - const FieldTrialsView& field_trials) { + const FieldTrialsView& field_trials, + const PeerConnectionInterface::RTCConfiguration& configuration) { RTC_DCHECK_RUN_ON(worker_thread()); webrtc::Call::Config call_config(event_log, network_thread()); @@ -350,6 +355,7 @@ std::unique_ptr PeerConnectionFactory::CreateCall_w( call_config.rtp_transport_controller_send_factory = transport_controller_send_factory_.get(); call_config.metronome = metronome_.get(); + call_config.pacer_burst_interval = configuration.pacer_burst_interval; return std::unique_ptr( context_->call_factory()->CreateCall(call_config)); } diff --git a/pc/peer_connection_factory.h b/pc/peer_connection_factory.h index 036329f57a..dac3702e37 100644 --- a/pc/peer_connection_factory.h +++ b/pc/peer_connection_factory.h @@ -46,10 +46,6 @@ #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" -namespace cricket { -class ChannelManager; -} - namespace rtc { class BasicNetworkManager; class BasicPacketSocketFactory; @@ -140,8 +136,10 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { bool IsTrialEnabled(absl::string_view key) const; std::unique_ptr CreateRtcEventLog_w(); - std::unique_ptr CreateCall_w(RtcEventLog* event_log, - const FieldTrialsView& field_trials); + std::unique_ptr CreateCall_w( + RtcEventLog* event_log, + const FieldTrialsView& field_trials, + const PeerConnectionInterface::RTCConfiguration& configuration); rtc::scoped_refptr context_; PeerConnectionFactoryInterface::Options options_ @@ -156,7 +154,7 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { std::unique_ptr neteq_factory_; const std::unique_ptr transport_controller_send_factory_; - std::unique_ptr metronome_; + std::unique_ptr metronome_ RTC_GUARDED_BY(worker_thread()); }; } // namespace webrtc diff --git a/pc/peer_connection_field_trial_tests.cc b/pc/peer_connection_field_trial_tests.cc index 0e6e451a9a..784cfa4ad5 100644 --- a/pc/peer_connection_field_trial_tests.cc +++ b/pc/peer_connection_field_trial_tests.cc @@ -17,6 +17,7 @@ #include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "api/create_peerconnection_factory.h" #include "api/peer_connection_interface.h" +#include "api/stats/rtcstats_objects.h" #include "api/task_queue/default_task_queue_factory.h" #include "api/video_codecs/builtin_video_decoder_factory.h" #include "api/video_codecs/builtin_video_encoder_factory.h" @@ -228,6 +229,10 @@ TEST_F(PeerConnectionFieldTrialTest, ApplyFakeNetworkConfig) { CreatePCFactory(std::move(field_trials)); WrapperPtr caller = CreatePeerConnection(); + BitrateSettings bitrate_settings; + bitrate_settings.start_bitrate_bps = 1'000'000; + bitrate_settings.max_bitrate_bps = 1'000'000; + caller->pc()->SetBitrate(bitrate_settings); FrameGeneratorCapturerVideoTrackSource::Config config; auto video_track_source = rtc::make_ref_counted( @@ -259,9 +264,14 @@ TEST_F(PeerConnectionFieldTrialTest, ApplyFakeNetworkConfig) { ASSERT_TRUE_WAIT(caller->IsIceConnected(), kDefaultTimeoutMs); // Send packets for kDefaultTimeoutMs - // For now, whether this field trial works or not is checked by - // whether a crash occurs. Additional validation can be added later. WAIT(false, kDefaultTimeoutMs); + + std::vector outbound_rtp_stats = + caller->GetStats()->GetStatsOfType(); + ASSERT_GE(outbound_rtp_stats.size(), 1u); + ASSERT_TRUE(outbound_rtp_stats[0]->target_bitrate.is_defined()); + // Link capacity is limited to 500k, so BWE is expected to be close to 500k. + ASSERT_LE(*outbound_rtp_stats[0]->target_bitrate, 500'000 * 1.1); } } // namespace webrtc diff --git a/pc/peer_connection_ice_unittest.cc b/pc/peer_connection_ice_unittest.cc index 9a822be62c..b0d56fcb97 100644 --- a/pc/peer_connection_ice_unittest.cc +++ b/pc/peer_connection_ice_unittest.cc @@ -85,6 +85,7 @@ using ::testing::Values; constexpr int kIceCandidatesTimeout = 10000; constexpr int64_t kWaitTimeout = 10000; +constexpr uint64_t kTiebreakerDefault = 44444; class PeerConnectionWrapperForIceTest : public PeerConnectionWrapper { public: @@ -1426,6 +1427,7 @@ class PeerConnectionIceConfigTest : public ::testing::Test { new cricket::FakePortAllocator(rtc::Thread::Current(), packet_socket_factory_.get())); port_allocator_ = port_allocator.get(); + port_allocator_->SetIceTiebreaker(kTiebreakerDefault); PeerConnectionDependencies pc_dependencies(&observer_); pc_dependencies.allocator = std::move(port_allocator); auto result = pc_factory_->CreatePeerConnectionOrError( diff --git a/pc/peer_connection_integrationtest.cc b/pc/peer_connection_integrationtest.cc index 19cc6ce3cf..7fa94527f1 100644 --- a/pc/peer_connection_integrationtest.cc +++ b/pc/peer_connection_integrationtest.cc @@ -1364,7 +1364,8 @@ TEST_P(PeerConnectionIntegrationTest, NewGetStatsManyAudioAndManyVideoStreams) { } ASSERT_TRUE(stat->track_id.is_defined()); const auto* track_stat = - caller_report->GetAs(*stat->track_id); + caller_report->GetAs( + *stat->track_id); ASSERT_TRUE(track_stat); outbound_track_ids.push_back(*track_stat->track_identifier); } @@ -1388,7 +1389,8 @@ TEST_P(PeerConnectionIntegrationTest, NewGetStatsManyAudioAndManyVideoStreams) { } ASSERT_TRUE(stat->track_id.is_defined()); const auto* track_stat = - callee_report->GetAs(*stat->track_id); + callee_report->GetAs( + *stat->track_id); ASSERT_TRUE(track_stat); inbound_track_ids.push_back(*track_stat->track_identifier); } @@ -1465,7 +1467,8 @@ TEST_P(PeerConnectionIntegrationTest, callee()->NewGetStats(); ASSERT_NE(nullptr, report); - auto media_stats = report->GetStatsOfType(); + auto media_stats = + report->GetStatsOfType(); auto audio_index = FindFirstMediaStatsIndexByKind("audio", media_stats); ASSERT_GE(audio_index, 0); EXPECT_TRUE(media_stats[audio_index]->audio_level.is_defined()); @@ -1483,11 +1486,11 @@ void ModifySsrcs(cricket::SessionDescription* desc) { } } -// Test that the "RTCMediaSteamTrackStats" object is updated correctly when -// SSRCs are unsignaled, and the SSRC of the received (audio) stream changes. -// This should result in two "RTCInboundRTPStreamStats", but only one -// "RTCMediaStreamTrackStats", whose counters go up continuously rather than -// being reset to 0 once the SSRC change occurs. +// Test that the "DEPRECATED_RTCMediaStreamTrackStats" object is updated +// correctly when SSRCs are unsignaled, and the SSRC of the received (audio) +// stream changes. This should result in two "RTCInboundRTPStreamStats", but +// only one "DEPRECATED_RTCMediaStreamTrackStats", whose counters go up +// continuously rather than being reset to 0 once the SSRC change occurs. // // Regression test for this bug: // https://bugs.chromium.org/p/webrtc/issues/detail?id=8158 @@ -1519,7 +1522,8 @@ TEST_P(PeerConnectionIntegrationTest, rtc::scoped_refptr report = callee()->NewGetStats(); ASSERT_NE(nullptr, report); - auto track_stats = report->GetStatsOfType(); + auto track_stats = + report->GetStatsOfType(); ASSERT_EQ(1U, track_stats.size()); ASSERT_TRUE(track_stats[0]->total_samples_received.is_defined()); ASSERT_GT(*track_stats[0]->total_samples_received, 0U); @@ -1539,7 +1543,8 @@ TEST_P(PeerConnectionIntegrationTest, report = callee()->NewGetStats(); ASSERT_NE(nullptr, report); - track_stats = report->GetStatsOfType(); + track_stats = + report->GetStatsOfType(); ASSERT_EQ(1U, track_stats.size()); ASSERT_TRUE(track_stats[0]->total_samples_received.is_defined()); // The "total samples received" stat should only be greater than it was @@ -2878,8 +2883,9 @@ TEST_P(PeerConnectionIntegrationTest, DisableAndEnableAudioPlayout) { double GetAudioEnergyStat(PeerConnectionIntegrationWrapper* pc) { auto report = pc->NewGetStats(); auto track_stats_list = - report->GetStatsOfType(); - const webrtc::RTCMediaStreamTrackStats* remote_track_stats = nullptr; + report->GetStatsOfType(); + const webrtc::DEPRECATED_RTCMediaStreamTrackStats* remote_track_stats = + nullptr; for (const auto* track_stats : track_stats_list) { if (track_stats->remote_source.is_defined() && *track_stats->remote_source) { diff --git a/pc/peer_connection_interface_unittest.cc b/pc/peer_connection_interface_unittest.cc index 145179f3d4..dfca4868b5 100644 --- a/pc/peer_connection_interface_unittest.cc +++ b/pc/peer_connection_interface_unittest.cc @@ -106,6 +106,7 @@ static const char kVideoTracks[][32] = {"videotrack0", "videotrack1"}; static const char kRecvonly[] = "recvonly"; static const char kSendrecv[] = "sendrecv"; +constexpr uint64_t kTiebreakerDefault = 44444; // Reference SDP with a MediaStream with label "stream1" and audio track with // id "audio_1" and a video track with id "video_1; @@ -114,7 +115,7 @@ static const char kSdpStringWithStream1PlanB[] = "o=- 0 0 IN IP4 127.0.0.1\r\n" "s=-\r\n" "t=0 0\r\n" - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" @@ -122,7 +123,7 @@ static const char kSdpStringWithStream1PlanB[] = "a=mid:audio\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" - "a=rtpmap:103 ISAC/16000\r\n" + "a=rtpmap:111 OPUS/48000/2\r\n" "a=ssrc:1 cname:stream1\r\n" "a=ssrc:1 msid:stream1 audiotrack0\r\n" "m=video 1 RTP/AVPF 120\r\n" @@ -144,7 +145,7 @@ static const char kSdpStringWithStream1UnifiedPlan[] = "o=- 0 0 IN IP4 127.0.0.1\r\n" "s=-\r\n" "t=0 0\r\n" - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" @@ -152,7 +153,7 @@ static const char kSdpStringWithStream1UnifiedPlan[] = "a=mid:0\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" - "a=rtpmap:103 ISAC/16000\r\n" + "a=rtpmap:111 OPUS/48000/2\r\n" "a=msid:stream1 audiotrack0\r\n" "a=ssrc:1 cname:stream1\r\n" "m=video 1 RTP/AVPF 120\r\n" @@ -174,14 +175,14 @@ static const char kSdpStringWithStream1AudioTrackOnly[] = "o=- 0 0 IN IP4 127.0.0.1\r\n" "s=-\r\n" "t=0 0\r\n" - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n" "a=mid:audio\r\n" "a=sendrecv\r\n" - "a=rtpmap:103 ISAC/16000\r\n" + "a=rtpmap:111 OPUS/48000/2\r\n" "a=ssrc:1 cname:stream1\r\n" "a=ssrc:1 msid:stream1 audiotrack0\r\n" "a=rtcp-mux\r\n"; @@ -195,7 +196,7 @@ static const char kSdpStringWithStream1And2PlanB[] = "s=-\r\n" "t=0 0\r\n" "a=msid-semantic: WMS stream1 stream2\r\n" - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" @@ -203,7 +204,7 @@ static const char kSdpStringWithStream1And2PlanB[] = "a=mid:audio\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" - "a=rtpmap:103 ISAC/16000\r\n" + "a=rtpmap:111 OPUS/48000/2\r\n" "a=ssrc:1 cname:stream1\r\n" "a=ssrc:1 msid:stream1 audiotrack0\r\n" "a=ssrc:3 cname:stream2\r\n" @@ -227,7 +228,7 @@ static const char kSdpStringWithStream1And2UnifiedPlan[] = "s=-\r\n" "t=0 0\r\n" "a=msid-semantic: WMS stream1 stream2\r\n" - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" @@ -235,7 +236,7 @@ static const char kSdpStringWithStream1And2UnifiedPlan[] = "a=mid:0\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" - "a=rtpmap:103 ISAC/16000\r\n" + "a=rtpmap:111 OPUS/48000/2\r\n" "a=ssrc:1 cname:stream1\r\n" "a=ssrc:1 msid:stream1 audiotrack0\r\n" "m=video 1 RTP/AVPF 120\r\n" @@ -249,7 +250,7 @@ static const char kSdpStringWithStream1And2UnifiedPlan[] = "a=rtpmap:120 VP8/0\r\n" "a=ssrc:2 cname:stream1\r\n" "a=ssrc:2 msid:stream1 videotrack0\r\n" - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" @@ -257,7 +258,7 @@ static const char kSdpStringWithStream1And2UnifiedPlan[] = "a=mid:2\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" - "a=rtpmap:103 ISAC/16000\r\n" + "a=rtpmap:111 OPUS/48000/2\r\n" "a=ssrc:3 cname:stream2\r\n" "a=ssrc:3 msid:stream2 audiotrack1\r\n" "m=video 1 RTP/AVPF 120\r\n" @@ -278,7 +279,7 @@ static const char kSdpStringWithoutStreams[] = "o=- 0 0 IN IP4 127.0.0.1\r\n" "s=-\r\n" "t=0 0\r\n" - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" @@ -286,7 +287,7 @@ static const char kSdpStringWithoutStreams[] = "a=mid:audio\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" - "a=rtpmap:103 ISAC/16000\r\n" + "a=rtpmap:111 OPUS/48000/2\r\n" "m=video 1 RTP/AVPF 120\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" @@ -304,7 +305,7 @@ static const char kSdpStringWithMsidWithoutStreams[] = "s=-\r\n" "t=0 0\r\n" "a=msid-semantic: WMS\r\n" - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" @@ -312,7 +313,7 @@ static const char kSdpStringWithMsidWithoutStreams[] = "a=mid:audio\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" - "a=rtpmap:103 ISAC/16000\r\n" + "a=rtpmap:111 OPUS/48000/2\r\n" "m=video 1 RTP/AVPF 120\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" @@ -329,7 +330,7 @@ static const char kSdpStringWithoutStreamsAudioOnly[] = "o=- 0 0 IN IP4 127.0.0.1\r\n" "s=-\r\n" "t=0 0\r\n" - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" @@ -337,7 +338,7 @@ static const char kSdpStringWithoutStreamsAudioOnly[] = "a=mid:audio\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" - "a=rtpmap:103 ISAC/16000\r\n"; + "a=rtpmap:111 OPUS/48000/2\r\n"; // Reference SENDONLY SDP without MediaStreams. Msid is not supported. static const char kSdpStringSendOnlyWithoutStreams[] = @@ -345,7 +346,7 @@ static const char kSdpStringSendOnlyWithoutStreams[] = "o=- 0 0 IN IP4 127.0.0.1\r\n" "s=-\r\n" "t=0 0\r\n" - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" @@ -354,7 +355,7 @@ static const char kSdpStringSendOnlyWithoutStreams[] = "a=sendrecv\r\n" "a=sendonly\r\n" "a=rtcp-mux\r\n" - "a=rtpmap:103 ISAC/16000\r\n" + "a=rtpmap:111 OPUS/48000/2\r\n" "m=video 1 RTP/AVPF 120\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" @@ -374,7 +375,7 @@ static const char kSdpStringInit[] = "a=msid-semantic: WMS\r\n"; static const char kSdpStringAudio[] = - "m=audio 1 RTP/AVPF 103\r\n" + "m=audio 1 RTP/AVPF 111\r\n" "a=ice-ufrag:e5785931\r\n" "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:" @@ -382,7 +383,7 @@ static const char kSdpStringAudio[] = "a=mid:audio\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" - "a=rtpmap:103 ISAC/16000\r\n"; + "a=rtpmap:111 OPUS/48000/2\r\n"; static const char kSdpStringVideo[] = "m=video 1 RTP/AVPF 120\r\n" @@ -441,7 +442,9 @@ class RtcEventLogOutputNull final : public RtcEventLogOutput { }; using ::cricket::StreamParams; +using ::testing::Eq; using ::testing::Exactly; +using ::testing::SizeIs; using ::testing::Values; using RTCConfiguration = PeerConnectionInterface::RTCConfiguration; @@ -732,6 +735,7 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { rtc::Thread::Current(), std::make_unique(vss_.get()))); port_allocator_ = port_allocator.get(); + port_allocator_->SetIceTiebreaker(kTiebreakerDefault); // Create certificate generator unless DTLS constraint is explicitly set to // false. @@ -1330,7 +1334,6 @@ TEST_P(PeerConnectionInterfaceTest, CreatePeerConnectionWithPooledCandidates) { server.uri = kStunAddressOnly; config.servers.push_back(server); config.type = PeerConnectionInterface::kRelay; - config.disable_ipv6 = true; config.tcp_candidate_policy = PeerConnectionInterface::kTcpCandidatePolicyDisabled; config.candidate_network_policy = @@ -1343,7 +1346,6 @@ TEST_P(PeerConnectionInterfaceTest, CreatePeerConnectionWithPooledCandidates) { port_allocator_->GetPooledSession()); ASSERT_NE(nullptr, session); EXPECT_EQ(1UL, session->stun_servers().size()); - EXPECT_EQ(0U, session->flags() & cricket::PORTALLOCATOR_ENABLE_IPV6); EXPECT_LT(0U, session->flags() & cricket::PORTALLOCATOR_DISABLE_TCP); EXPECT_LT(0U, session->flags() & cricket::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS); @@ -1580,6 +1582,57 @@ TEST_F(PeerConnectionInterfaceTestPlanB, AddTrackRemoveTrack) { EXPECT_FALSE(pc_->RemoveTrackOrError(video_sender).ok()); } +// Test for AddTrack with init_send_encoding. +TEST_F(PeerConnectionInterfaceTestPlanB, AddTrackWithSendEncodings) { + CreatePeerConnectionWithoutDtls(); + rtc::scoped_refptr audio_track( + CreateAudioTrack("audio_track")); + rtc::scoped_refptr video_track( + CreateVideoTrack("video_track")); + RtpEncodingParameters audio_encodings; + audio_encodings.active = false; + auto audio_sender = + pc_->AddTrack(audio_track, {kStreamId1}, {audio_encodings}).MoveValue(); + RtpEncodingParameters video_encodings; + video_encodings.active = true; + auto video_sender = + pc_->AddTrack(video_track, {kStreamId1}, {video_encodings}).MoveValue(); + EXPECT_EQ(1UL, audio_sender->stream_ids().size()); + EXPECT_EQ(kStreamId1, audio_sender->stream_ids()[0]); + EXPECT_EQ("audio_track", audio_sender->id()); + EXPECT_EQ(audio_track, audio_sender->track()); + EXPECT_EQ(1UL, video_sender->stream_ids().size()); + EXPECT_EQ(kStreamId1, video_sender->stream_ids()[0]); + EXPECT_EQ("video_track", video_sender->id()); + EXPECT_EQ(video_track, video_sender->track()); + + // Now create an offer and check for the senders. + std::unique_ptr offer; + ASSERT_TRUE(DoCreateOffer(&offer, nullptr)); + + const cricket::ContentInfo* audio_content = + cricket::GetFirstAudioContent(offer->description()); + EXPECT_TRUE(ContainsTrack(audio_content->media_description()->streams(), + kStreamId1, "audio_track")); + + const cricket::ContentInfo* video_content = + cricket::GetFirstVideoContent(offer->description()); + EXPECT_TRUE(ContainsTrack(video_content->media_description()->streams(), + kStreamId1, "video_track")); + + EXPECT_TRUE(DoSetLocalDescription(std::move(offer))); + + // Check the encodings. + ASSERT_THAT(audio_sender->GetParameters().encodings, SizeIs(1)); + EXPECT_THAT(audio_sender->GetParameters().encodings[0].active, Eq(false)); + ASSERT_THAT(video_sender->GetParameters().encodings, SizeIs(1)); + EXPECT_THAT(video_sender->GetParameters().encodings[0].active, Eq(true)); + + // Now try removing the tracks. + EXPECT_TRUE(pc_->RemoveTrackOrError(audio_sender).ok()); + EXPECT_TRUE(pc_->RemoveTrackOrError(video_sender).ok()); +} + // Test creating senders without a stream specified, // expecting a random stream ID to be generated. TEST_P(PeerConnectionInterfaceTest, AddTrackWithoutStream) { @@ -3773,10 +3826,6 @@ TEST(RTCConfigurationTest, ComparisonOperators) { f.ice_connection_receiving_timeout = 1337; EXPECT_NE(a, f); - PeerConnectionInterface::RTCConfiguration g; - g.disable_ipv6 = true; - EXPECT_NE(a, g); - PeerConnectionInterface::RTCConfiguration h( PeerConnectionInterface::RTCConfigurationType::kAggressive); EXPECT_NE(a, h); diff --git a/pc/peer_connection_message_handler.cc b/pc/peer_connection_message_handler.cc index 2d674aad4d..8ddeddea58 100644 --- a/pc/peer_connection_message_handler.cc +++ b/pc/peer_connection_message_handler.cc @@ -13,11 +13,11 @@ #include #include "api/jsep.h" +#include "api/legacy_stats_types.h" #include "api/media_stream_interface.h" #include "api/peer_connection_interface.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" -#include "api/stats_types.h" #include "api/task_queue/pending_task_safety_flag.h" #include "pc/legacy_stats_collector_interface.h" #include "rtc_base/checks.h" diff --git a/pc/peer_connection_message_handler.h b/pc/peer_connection_message_handler.h index 1351a279b6..8bd0e5ebb1 100644 --- a/pc/peer_connection_message_handler.h +++ b/pc/peer_connection_message_handler.h @@ -14,10 +14,10 @@ #include #include "api/jsep.h" +#include "api/legacy_stats_types.h" #include "api/media_stream_interface.h" #include "api/peer_connection_interface.h" #include "api/rtc_error.h" -#include "api/stats_types.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" #include "pc/legacy_stats_collector_interface.h" diff --git a/pc/peer_connection_proxy.h b/pc/peer_connection_proxy.h index 84ce41e033..146afda194 100644 --- a/pc/peer_connection_proxy.h +++ b/pc/peer_connection_proxy.h @@ -35,6 +35,11 @@ PROXY_METHOD2(RTCErrorOr>, AddTrack, rtc::scoped_refptr, const std::vector&) +PROXY_METHOD3(RTCErrorOr>, + AddTrack, + rtc::scoped_refptr, + const std::vector&, + const std::vector&) PROXY_METHOD1(RTCError, RemoveTrackOrError, rtc::scoped_refptr) diff --git a/pc/peer_connection_rtp_unittest.cc b/pc/peer_connection_rtp_unittest.cc index e17c52b5ab..e5861ea815 100644 --- a/pc/peer_connection_rtp_unittest.cc +++ b/pc/peer_connection_rtp_unittest.cc @@ -1145,6 +1145,23 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTrackReusesTransceiver) { EXPECT_EQ(audio_track, sender->track()); } +TEST_F(PeerConnectionRtpTestUnifiedPlan, + AddTrackWithSendEncodingDoesNotReuseTransceiver) { + auto caller = CreatePeerConnection(); + + auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto audio_track = caller->CreateAudioTrack("a"); + RtpEncodingParameters encoding; + auto sender = caller->AddTrack(audio_track, {}, {encoding}); + ASSERT_TRUE(sender); + + auto transceivers = caller->pc()->GetTransceivers(); + ASSERT_EQ(2u, transceivers.size()); + EXPECT_EQ(transceiver, transceivers[0]); + EXPECT_NE(sender, transceiver->sender()); + EXPECT_EQ(audio_track, sender->track()); +} + // Test that adding two tracks to a new PeerConnection creates two // RtpTransceivers in the same order. TEST_F(PeerConnectionRtpTestUnifiedPlan, TwoAddTrackCreatesTwoTransceivers) { diff --git a/pc/peer_connection_svc_integrationtest.cc b/pc/peer_connection_svc_integrationtest.cc new file mode 100644 index 0000000000..f53205ee32 --- /dev/null +++ b/pc/peer_connection_svc_integrationtest.cc @@ -0,0 +1,243 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// Integration tests for PeerConnection. +// These tests exercise a full stack for the SVC extension. + +#include + +#include + +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/rtp_transceiver_interface.h" +#include "api/scoped_refptr.h" +#include "pc/test/integration_test_helpers.h" +#include "rtc_base/gunit.h" +#include "rtc_base/helpers.h" +#include "test/gtest.h" + +namespace webrtc { + +namespace { + +class PeerConnectionSVCIntegrationTest + : public PeerConnectionIntegrationBaseTest { + protected: + PeerConnectionSVCIntegrationTest() + : PeerConnectionIntegrationBaseTest(SdpSemantics::kUnifiedPlan) {} + + RTCError SetCodecPreferences( + rtc::scoped_refptr transceiver, + absl::string_view codec_name) { + webrtc::RtpCapabilities capabilities = + caller()->pc_factory()->GetRtpSenderCapabilities( + cricket::MEDIA_TYPE_VIDEO); + std::vector codecs; + for (const webrtc::RtpCodecCapability& codec_capability : + capabilities.codecs) { + if (codec_capability.name == codec_name) + codecs.push_back(codec_capability); + } + return transceiver->SetCodecPreferences(codecs); + } +}; + +TEST_F(PeerConnectionSVCIntegrationTest, AddTransceiverAcceptsL1T1) { + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + webrtc::RtpTransceiverInit init; + webrtc::RtpEncodingParameters encoding_parameters; + encoding_parameters.scalability_mode = "L1T1"; + init.send_encodings.push_back(encoding_parameters); + auto transceiver_or_error = + caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init); + EXPECT_TRUE(transceiver_or_error.ok()); +} + +TEST_F(PeerConnectionSVCIntegrationTest, AddTransceiverAcceptsL3T3) { + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + webrtc::RtpTransceiverInit init; + webrtc::RtpEncodingParameters encoding_parameters; + encoding_parameters.scalability_mode = "L3T3"; + init.send_encodings.push_back(encoding_parameters); + auto transceiver_or_error = + caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init); + EXPECT_TRUE(transceiver_or_error.ok()); +} + +TEST_F(PeerConnectionSVCIntegrationTest, + AddTransceiverRejectsUnknownScalabilityMode) { + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + webrtc::RtpTransceiverInit init; + webrtc::RtpEncodingParameters encoding_parameters; + encoding_parameters.scalability_mode = "FOOBAR"; + init.send_encodings.push_back(encoding_parameters); + auto transceiver_or_error = + caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init); + EXPECT_FALSE(transceiver_or_error.ok()); + EXPECT_EQ(transceiver_or_error.error().type(), + webrtc::RTCErrorType::UNSUPPORTED_OPERATION); +} + +TEST_F(PeerConnectionSVCIntegrationTest, SetParametersAcceptsL1T3WithVP8) { + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + + webrtc::RtpCapabilities capabilities = + caller()->pc_factory()->GetRtpSenderCapabilities( + cricket::MEDIA_TYPE_VIDEO); + std::vector vp8_codec; + for (const webrtc::RtpCodecCapability& codec_capability : + capabilities.codecs) { + if (codec_capability.name == cricket::kVp8CodecName) + vp8_codec.push_back(codec_capability); + } + + webrtc::RtpTransceiverInit init; + webrtc::RtpEncodingParameters encoding_parameters; + init.send_encodings.push_back(encoding_parameters); + auto transceiver_or_error = + caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init); + ASSERT_TRUE(transceiver_or_error.ok()); + auto transceiver = transceiver_or_error.MoveValue(); + EXPECT_TRUE(transceiver->SetCodecPreferences(vp8_codec).ok()); + + webrtc::RtpParameters parameters = transceiver->sender()->GetParameters(); + ASSERT_EQ(parameters.encodings.size(), 1u); + parameters.encodings[0].scalability_mode = "L1T3"; + auto result = transceiver->sender()->SetParameters(parameters); + EXPECT_TRUE(result.ok()); +} + +TEST_F(PeerConnectionSVCIntegrationTest, SetParametersRejectsL3T3WithVP8) { + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + + webrtc::RtpTransceiverInit init; + webrtc::RtpEncodingParameters encoding_parameters; + init.send_encodings.push_back(encoding_parameters); + auto transceiver_or_error = + caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init); + ASSERT_TRUE(transceiver_or_error.ok()); + auto transceiver = transceiver_or_error.MoveValue(); + EXPECT_TRUE(SetCodecPreferences(transceiver, cricket::kVp8CodecName).ok()); + + webrtc::RtpParameters parameters = transceiver->sender()->GetParameters(); + ASSERT_EQ(parameters.encodings.size(), 1u); + parameters.encodings[0].scalability_mode = "L3T3"; + auto result = transceiver->sender()->SetParameters(parameters); + EXPECT_FALSE(result.ok()); + EXPECT_EQ(result.type(), webrtc::RTCErrorType::UNSUPPORTED_OPERATION); +} + +TEST_F(PeerConnectionSVCIntegrationTest, + SetParametersAcceptsL1T3WithVP8AfterNegotiation) { + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + + webrtc::RtpTransceiverInit init; + webrtc::RtpEncodingParameters encoding_parameters; + init.send_encodings.push_back(encoding_parameters); + auto transceiver_or_error = + caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init); + ASSERT_TRUE(transceiver_or_error.ok()); + auto transceiver = transceiver_or_error.MoveValue(); + EXPECT_TRUE(SetCodecPreferences(transceiver, cricket::kVp8CodecName).ok()); + + caller()->CreateAndSetAndSignalOffer(); + ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + + webrtc::RtpParameters parameters = transceiver->sender()->GetParameters(); + ASSERT_EQ(parameters.encodings.size(), 1u); + parameters.encodings[0].scalability_mode = "L1T3"; + auto result = transceiver->sender()->SetParameters(parameters); + EXPECT_TRUE(result.ok()); +} + +TEST_F(PeerConnectionSVCIntegrationTest, + SetParametersAcceptsL3T3WithVP9AfterNegotiation) { + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + + webrtc::RtpTransceiverInit init; + webrtc::RtpEncodingParameters encoding_parameters; + init.send_encodings.push_back(encoding_parameters); + auto transceiver_or_error = + caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init); + ASSERT_TRUE(transceiver_or_error.ok()); + auto transceiver = transceiver_or_error.MoveValue(); + EXPECT_TRUE(SetCodecPreferences(transceiver, cricket::kVp9CodecName).ok()); + + caller()->CreateAndSetAndSignalOffer(); + ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + + webrtc::RtpParameters parameters = transceiver->sender()->GetParameters(); + ASSERT_EQ(parameters.encodings.size(), 1u); + parameters.encodings[0].scalability_mode = "L3T3"; + auto result = transceiver->sender()->SetParameters(parameters); + EXPECT_TRUE(result.ok()); +} + +TEST_F(PeerConnectionSVCIntegrationTest, + SetParametersRejectsL3T3WithVP8AfterNegotiation) { + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + + webrtc::RtpTransceiverInit init; + webrtc::RtpEncodingParameters encoding_parameters; + init.send_encodings.push_back(encoding_parameters); + auto transceiver_or_error = + caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init); + ASSERT_TRUE(transceiver_or_error.ok()); + auto transceiver = transceiver_or_error.MoveValue(); + EXPECT_TRUE(SetCodecPreferences(transceiver, cricket::kVp8CodecName).ok()); + + caller()->CreateAndSetAndSignalOffer(); + ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + + webrtc::RtpParameters parameters = transceiver->sender()->GetParameters(); + ASSERT_EQ(parameters.encodings.size(), 1u); + parameters.encodings[0].scalability_mode = "L3T3"; + auto result = transceiver->sender()->SetParameters(parameters); + EXPECT_FALSE(result.ok()); + EXPECT_EQ(result.type(), webrtc::RTCErrorType::UNSUPPORTED_OPERATION); +} + +TEST_F(PeerConnectionSVCIntegrationTest, + SetParametersRejectsInvalidModeWithVP9AfterNegotiation) { + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + + webrtc::RtpTransceiverInit init; + webrtc::RtpEncodingParameters encoding_parameters; + init.send_encodings.push_back(encoding_parameters); + auto transceiver_or_error = + caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init); + ASSERT_TRUE(transceiver_or_error.ok()); + auto transceiver = transceiver_or_error.MoveValue(); + EXPECT_TRUE(SetCodecPreferences(transceiver, cricket::kVp9CodecName).ok()); + + caller()->CreateAndSetAndSignalOffer(); + ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + + webrtc::RtpParameters parameters = transceiver->sender()->GetParameters(); + ASSERT_EQ(parameters.encodings.size(), 1u); + parameters.encodings[0].scalability_mode = "FOOBAR"; + auto result = transceiver->sender()->SetParameters(parameters); + EXPECT_FALSE(result.ok()); + EXPECT_EQ(result.type(), webrtc::RTCErrorType::UNSUPPORTED_OPERATION); +} + +} // namespace + +} // namespace webrtc diff --git a/pc/peer_connection_wrapper.cc b/pc/peer_connection_wrapper.cc index af662fc5ca..69fc0355b2 100644 --- a/pc/peer_connection_wrapper.cc +++ b/pc/peer_connection_wrapper.cc @@ -326,6 +326,16 @@ rtc::scoped_refptr PeerConnectionWrapper::AddTrack( return result.MoveValue(); } +rtc::scoped_refptr PeerConnectionWrapper::AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids, + const std::vector& init_send_encodings) { + RTCErrorOr> result = + pc()->AddTrack(track, stream_ids, init_send_encodings); + EXPECT_EQ(RTCErrorType::NONE, result.error().type()); + return result.MoveValue(); +} + rtc::scoped_refptr PeerConnectionWrapper::AddAudioTrack( const std::string& track_label, const std::vector& stream_ids) { diff --git a/pc/peer_connection_wrapper.h b/pc/peer_connection_wrapper.h index 5c55668d2a..0df8868e4f 100644 --- a/pc/peer_connection_wrapper.h +++ b/pc/peer_connection_wrapper.h @@ -169,6 +169,11 @@ class PeerConnectionWrapper { rtc::scoped_refptr track, const std::vector& stream_ids = {}); + rtc::scoped_refptr AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids, + const std::vector& init_send_encodings); + // Calls the underlying PeerConnection's AddTrack method with an audio media // stream track not bound to any source. rtc::scoped_refptr AddAudioTrack( diff --git a/pc/remote_audio_source.cc b/pc/remote_audio_source.cc index 1058d1cbf9..a516c57617 100644 --- a/pc/remote_audio_source.cc +++ b/pc/remote_audio_source.cc @@ -70,8 +70,9 @@ RemoteAudioSource::~RemoteAudioSource() { } } -void RemoteAudioSource::Start(cricket::VoiceMediaChannel* media_channel, - absl::optional ssrc) { +void RemoteAudioSource::Start( + cricket::VoiceMediaReceiveChannelInterface* media_channel, + absl::optional ssrc) { RTC_DCHECK_RUN_ON(worker_thread_); // Register for callbacks immediately before AddSink so that we always get @@ -84,8 +85,9 @@ void RemoteAudioSource::Start(cricket::VoiceMediaChannel* media_channel, std::make_unique(this)); } -void RemoteAudioSource::Stop(cricket::VoiceMediaChannel* media_channel, - absl::optional ssrc) { +void RemoteAudioSource::Stop( + cricket::VoiceMediaReceiveChannelInterface* media_channel, + absl::optional ssrc) { RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(media_channel); ssrc ? media_channel->SetRawAudioSink(*ssrc, nullptr) diff --git a/pc/remote_audio_source.h b/pc/remote_audio_source.h index d294a0f0fb..0fac606ad4 100644 --- a/pc/remote_audio_source.h +++ b/pc/remote_audio_source.h @@ -49,9 +49,9 @@ class RemoteAudioSource : public Notifier { // Register and unregister remote audio source with the underlying media // engine. - void Start(cricket::VoiceMediaChannel* media_channel, + void Start(cricket::VoiceMediaReceiveChannelInterface* media_channel, absl::optional ssrc); - void Stop(cricket::VoiceMediaChannel* media_channel, + void Stop(cricket::VoiceMediaReceiveChannelInterface* media_channel, absl::optional ssrc); void SetState(SourceState new_state); diff --git a/pc/rtc_stats_collector.cc b/pc/rtc_stats_collector.cc index 907e14673d..32c5406aa7 100644 --- a/pc/rtc_stats_collector.cc +++ b/pc/rtc_stats_collector.cc @@ -33,8 +33,10 @@ #include "api/stats/rtcstats_objects.h" #include "api/units/time_delta.h" #include "api/video/video_content_type.h" +#include "api/video_codecs/scalability_mode.h" #include "common_video/include/quality_limitation_reason.h" #include "media/base/media_channel.h" +#include "media/base/media_channel_impl.h" #include "modules/audio_processing/include/audio_processing_statistics.h" #include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -99,7 +101,7 @@ std::string RTCIceCandidatePairStatsIDFromConnectionInfo( } // `direction` is either kDirectionInbound or kDirectionOutbound. -std::string RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( +std::string DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( const char direction, int attachment_id) { char buf[1024]; @@ -380,7 +382,7 @@ std::string GetCodecIdAndMaybeCreateCodecStats( std::make_unique(codec_id, timestamp_us)); codec_stats->payload_type = payload_type; codec_stats->mime_type = codec_params.mime_type(); - if (codec_params.clock_rate) { + if (codec_params.clock_rate.has_value()) { codec_stats->clock_rate = static_cast(*codec_params.clock_rate); } if (codec_params.num_channels) { @@ -398,7 +400,7 @@ std::string GetCodecIdAndMaybeCreateCodecStats( void SetMediaStreamTrackStatsFromMediaStreamTrackInterface( const MediaStreamTrackInterface& track, - RTCMediaStreamTrackStats* track_stats) { + DEPRECATED_RTCMediaStreamTrackStats* track_stats) { track_stats->track_identifier = track.id(); track_stats->ended = (track.state() == MediaStreamTrackInterface::kEnded); } @@ -419,17 +421,17 @@ void SetInboundRTPStreamStatsFromMediaReceiverInfo( static_cast(media_receiver_info.packets_lost); inbound_stats->jitter_buffer_delay = media_receiver_info.jitter_buffer_delay_seconds; - if (media_receiver_info.jitter_buffer_target_delay_seconds) { + if (media_receiver_info.jitter_buffer_target_delay_seconds.has_value()) { inbound_stats->jitter_buffer_target_delay = *media_receiver_info.jitter_buffer_target_delay_seconds; } - if (media_receiver_info.jitter_buffer_minimum_delay_seconds) { + if (media_receiver_info.jitter_buffer_minimum_delay_seconds.has_value()) { inbound_stats->jitter_buffer_minimum_delay = *media_receiver_info.jitter_buffer_minimum_delay_seconds; } inbound_stats->jitter_buffer_emitted_count = media_receiver_info.jitter_buffer_emitted_count; - if (media_receiver_info.nacks_sent) { + if (media_receiver_info.nacks_sent.has_value()) { inbound_stats->nack_count = *media_receiver_info.nacks_sent; } } @@ -482,11 +484,11 @@ std::unique_ptr CreateInboundAudioStreamStats( voice_receiver_info.total_output_duration; // `fir_count`, `pli_count` and `sli_count` are only valid for video and are // purposefully left undefined for audio. - if (voice_receiver_info.last_packet_received_timestamp_ms) { + if (voice_receiver_info.last_packet_received_timestamp_ms.has_value()) { inbound_audio->last_packet_received_timestamp = static_cast( *voice_receiver_info.last_packet_received_timestamp_ms); } - if (voice_receiver_info.estimated_playout_ntp_timestamp_ms) { + if (voice_receiver_info.estimated_playout_ntp_timestamp_ms.has_value()) { // TODO(bugs.webrtc.org/10529): Fix time origin. inbound_audio->estimated_playout_timestamp = static_cast( *voice_receiver_info.estimated_playout_ntp_timestamp_ms); @@ -551,7 +553,7 @@ CreateRemoteOutboundAudioStreamStats( stats->remote_timestamp = static_cast( voice_receiver_info.last_sender_report_remote_timestamp_ms.value()); stats->reports_sent = voice_receiver_info.sender_reports_reports_count; - if (voice_receiver_info.round_trip_time) { + if (voice_receiver_info.round_trip_time.has_value()) { stats->round_trip_time = voice_receiver_info.round_trip_time->seconds(); } @@ -607,7 +609,7 @@ void SetInboundRTPStreamStatsFromVideoReceiverInfo( if (video_receiver_info.framerate_decoded > 0) { inbound_video->frames_per_second = video_receiver_info.framerate_decoded; } - if (video_receiver_info.qp_sum) { + if (video_receiver_info.qp_sum.has_value()) { inbound_video->qp_sum = *video_receiver_info.qp_sum; } if (video_receiver_info.timing_frame_info.has_value()) { @@ -637,11 +639,11 @@ void SetInboundRTPStreamStatsFromVideoReceiverInfo( inbound_video->min_playout_delay = static_cast(video_receiver_info.min_playout_delay_ms) / rtc::kNumMillisecsPerSec; - if (video_receiver_info.last_packet_received_timestamp_ms) { + if (video_receiver_info.last_packet_received_timestamp_ms.has_value()) { inbound_video->last_packet_received_timestamp = static_cast( *video_receiver_info.last_packet_received_timestamp_ms); } - if (video_receiver_info.estimated_playout_ntp_timestamp_ms) { + if (video_receiver_info.estimated_playout_ntp_timestamp_ms.has_value()) { // TODO(bugs.webrtc.org/10529): Fix time origin if needed. inbound_video->estimated_playout_timestamp = static_cast( *video_receiver_info.estimated_playout_ntp_timestamp_ms); @@ -654,6 +656,10 @@ void SetInboundRTPStreamStatsFromVideoReceiverInfo( inbound_video->decoder_implementation = video_receiver_info.decoder_implementation_name; } + if (video_receiver_info.power_efficient_decoder.has_value()) { + inbound_video->power_efficient_decoder = + video_receiver_info.power_efficient_decoder.value(); + } } // Provides the media independent counters and information (both audio and @@ -665,6 +671,8 @@ void SetOutboundRTPStreamStatsFromMediaSenderInfo( outbound_stats->ssrc = media_sender_info.ssrc(); outbound_stats->packets_sent = static_cast(media_sender_info.packets_sent); + outbound_stats->total_packet_send_delay = + media_sender_info.total_packet_send_delay.seconds(); outbound_stats->retransmitted_packets_sent = media_sender_info.retransmitted_packets_sent; outbound_stats->bytes_sent = @@ -674,7 +682,6 @@ void SetOutboundRTPStreamStatsFromMediaSenderInfo( outbound_stats->retransmitted_bytes_sent = media_sender_info.retransmitted_bytes_sent; outbound_stats->nack_count = media_sender_info.nacks_rcvd; - if (media_sender_info.active.has_value()) { outbound_stats->active = *media_sender_info.active; } @@ -693,7 +700,7 @@ void SetOutboundRTPStreamStatsFromVoiceSenderInfo( outbound_audio->mid = mid; outbound_audio->media_type = "audio"; outbound_audio->kind = "audio"; - if (voice_sender_info.target_bitrate && + if (voice_sender_info.target_bitrate.has_value() && *voice_sender_info.target_bitrate > 0) { outbound_audio->target_bitrate = *voice_sender_info.target_bitrate; } @@ -738,10 +745,10 @@ void SetOutboundRTPStreamStatsFromVideoSenderInfo( static_cast(video_sender_info.firs_rcvd); outbound_video->pli_count = static_cast(video_sender_info.plis_rcvd); - if (video_sender_info.qp_sum) + if (video_sender_info.qp_sum.has_value()) outbound_video->qp_sum = *video_sender_info.qp_sum; - if (video_sender_info.target_bitrate && - video_sender_info.target_bitrate > 0) { + if (video_sender_info.target_bitrate.has_value() && + *video_sender_info.target_bitrate > 0) { outbound_video->target_bitrate = *video_sender_info.target_bitrate; } outbound_video->frames_encoded = video_sender_info.frames_encoded; @@ -764,8 +771,6 @@ void SetOutboundRTPStreamStatsFromVideoSenderInfo( } outbound_video->frames_sent = video_sender_info.frames_sent; outbound_video->huge_frames_sent = video_sender_info.huge_frames_sent; - outbound_video->total_packet_send_delay = - video_sender_info.total_packet_send_delay.seconds(); outbound_video->quality_limitation_reason = QualityLimitationReasonToRTCQualityLimitationReason( video_sender_info.quality_limitation_reason); @@ -782,9 +787,17 @@ void SetOutboundRTPStreamStatsFromVideoSenderInfo( outbound_video->encoder_implementation = video_sender_info.encoder_implementation_name; } - if (video_sender_info.rid) { + if (video_sender_info.rid.has_value()) { outbound_video->rid = *video_sender_info.rid; } + if (video_sender_info.power_efficient_encoder.has_value()) { + outbound_video->power_efficient_encoder = + video_sender_info.power_efficient_encoder.value(); + } + if (video_sender_info.scalability_mode) { + outbound_video->scalability_mode = std::string( + ScalabilityModeToString(*video_sender_info.scalability_mode)); + } } std::unique_ptr @@ -808,9 +821,11 @@ ProduceRemoteInboundRtpStreamStatsFromReportBlockData( remote_inbound->packets_lost = report_block.packets_lost; remote_inbound->fraction_lost = static_cast(report_block.fraction_lost) / (1 << 8); - remote_inbound->round_trip_time = - static_cast(report_block_data.last_rtt_ms()) / - rtc::kNumMillisecsPerSec; + if (report_block_data.num_rtts() > 0) { + remote_inbound->round_trip_time = + static_cast(report_block_data.last_rtt_ms()) / + rtc::kNumMillisecsPerSec; + } remote_inbound->total_round_trip_time = static_cast(report_block_data.sum_rtt_ms()) / rtc::kNumMillisecsPerSec; @@ -938,6 +953,9 @@ const std::string& ProduceIceCandidateStats(int64_t timestamp_us, } else { // We don't expect to know the adapter type of remote candidates. RTC_DCHECK_EQ(rtc::ADAPTER_TYPE_UNKNOWN, candidate.network_type()); + RTC_DCHECK_EQ(0, candidate.relay_protocol().compare("")); + RTC_DCHECK_EQ(rtc::ADAPTER_TYPE_UNKNOWN, + candidate.underlying_type_for_vpn()); } candidate_stats->ip = candidate.address().ipaddr().ToString(); candidate_stats->address = candidate.address().ipaddr().ToString(); @@ -969,10 +987,10 @@ const std::string& ProduceIceCandidateStats(int64_t timestamp_us, template void SetAudioProcessingStats(StatsType* stats, const AudioProcessingStats& apm_stats) { - if (apm_stats.echo_return_loss) { + if (apm_stats.echo_return_loss.has_value()) { stats->echo_return_loss = *apm_stats.echo_return_loss; } - if (apm_stats.echo_return_loss_enhancement) { + if (apm_stats.echo_return_loss_enhancement.has_value()) { stats->echo_return_loss_enhancement = *apm_stats.echo_return_loss_enhancement; } @@ -982,15 +1000,15 @@ void SetAudioProcessingStats(StatsType* stats, } } -std::unique_ptr +std::unique_ptr ProduceMediaStreamTrackStatsFromVoiceSenderInfo( int64_t timestamp_us, AudioTrackInterface& audio_track, const cricket::VoiceSenderInfo& voice_sender_info, int attachment_id) { - std::unique_ptr audio_track_stats( - std::make_unique( - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + std::unique_ptr audio_track_stats( + std::make_unique( + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( kDirectionOutbound, attachment_id), timestamp_us, RTCMediaStreamTrackKind::kAudio)); SetMediaStreamTrackStatsFromMediaStreamTrackInterface( @@ -1015,7 +1033,7 @@ ProduceMediaStreamTrackStatsFromVoiceSenderInfo( return audio_track_stats; } -std::unique_ptr +std::unique_ptr ProduceMediaStreamTrackStatsFromVoiceReceiverInfo( int64_t timestamp_us, const AudioTrackInterface& audio_track, @@ -1023,9 +1041,9 @@ ProduceMediaStreamTrackStatsFromVoiceReceiverInfo( int attachment_id) { // Since receiver tracks can't be reattached, we use the SSRC as // an attachment identifier. - std::unique_ptr audio_track_stats( - std::make_unique( - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + std::unique_ptr audio_track_stats( + std::make_unique( + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( kDirectionInbound, attachment_id), timestamp_us, RTCMediaStreamTrackKind::kAudio)); SetMediaStreamTrackStatsFromMediaStreamTrackInterface( @@ -1056,34 +1074,18 @@ ProduceMediaStreamTrackStatsFromVoiceReceiverInfo( audio_track_stats->concealment_events = voice_receiver_info.concealment_events; - // TODO(crbug.com/webrtc/14524): These metrics have been moved from "track" - // stats, delete them. - audio_track_stats->jitter_buffer_flushes = - voice_receiver_info.jitter_buffer_flushes; - audio_track_stats->delayed_packet_outage_samples = - voice_receiver_info.delayed_packet_outage_samples; - audio_track_stats->relative_packet_arrival_delay = - voice_receiver_info.relative_packet_arrival_delay_seconds; - audio_track_stats->interruption_count = - voice_receiver_info.interruption_count >= 0 - ? voice_receiver_info.interruption_count - : 0; - audio_track_stats->total_interruption_duration = - static_cast(voice_receiver_info.total_interruption_duration_ms) / - rtc::kNumMillisecsPerSec; - return audio_track_stats; } -std::unique_ptr +std::unique_ptr ProduceMediaStreamTrackStatsFromVideoSenderInfo( int64_t timestamp_us, const VideoTrackInterface& video_track, const cricket::VideoSenderInfo& video_sender_info, int attachment_id) { - std::unique_ptr video_track_stats( - std::make_unique( - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + std::unique_ptr video_track_stats( + std::make_unique( + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( kDirectionOutbound, attachment_id), timestamp_us, RTCMediaStreamTrackKind::kVideo)); SetMediaStreamTrackStatsFromMediaStreamTrackInterface( @@ -1104,15 +1106,15 @@ ProduceMediaStreamTrackStatsFromVideoSenderInfo( return video_track_stats; } -std::unique_ptr +std::unique_ptr ProduceMediaStreamTrackStatsFromVideoReceiverInfo( int64_t timestamp_us, const VideoTrackInterface& video_track, const cricket::VideoReceiverInfo& video_receiver_info, int attachment_id) { - std::unique_ptr video_track_stats( - std::make_unique( - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + std::unique_ptr video_track_stats( + std::make_unique( + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( kDirectionInbound, attachment_id), timestamp_us, RTCMediaStreamTrackKind::kVideo)); SetMediaStreamTrackStatsFromMediaStreamTrackInterface( @@ -1137,22 +1139,6 @@ ProduceMediaStreamTrackStatsFromVideoReceiverInfo( // value as "RTCInboundRTPStreamStats.framesDecoded". https://crbug.com/659137 video_track_stats->frames_decoded = video_receiver_info.frames_decoded; video_track_stats->frames_dropped = video_receiver_info.frames_dropped; - video_track_stats->total_frames_duration = - static_cast(video_receiver_info.total_frames_duration_ms) / - rtc::kNumMillisecsPerSec; - video_track_stats->sum_squared_frame_durations = - video_receiver_info.sum_squared_frame_durations; - - // TODO(crbug.com/webrtc/14521): These metrics have been moved, delete them - // from "track". - video_track_stats->freeze_count = video_receiver_info.freeze_count; - video_track_stats->pause_count = video_receiver_info.pause_count; - video_track_stats->total_freezes_duration = - static_cast(video_receiver_info.total_freezes_duration_ms) / - rtc::kNumMillisecsPerSec; - video_track_stats->total_pauses_duration = - static_cast(video_receiver_info.total_pauses_duration_ms) / - rtc::kNumMillisecsPerSec; return video_track_stats; } @@ -1191,7 +1177,7 @@ void ProduceSenderMediaTrackStats( << sender->ssrc(); } } - std::unique_ptr audio_track_stats = + std::unique_ptr audio_track_stats = ProduceMediaStreamTrackStatsFromVoiceSenderInfo( timestamp_us, *track, *voice_sender_info, sender->AttachmentId()); report->AddStats(std::move(audio_track_stats)); @@ -1218,7 +1204,7 @@ void ProduceSenderMediaTrackStats( << "No video sender info for sender with ssrc " << sender->ssrc(); } } - std::unique_ptr video_track_stats = + std::unique_ptr video_track_stats = ProduceMediaStreamTrackStatsFromVideoSenderInfo( timestamp_us, *track, *video_sender_info, sender->AttachmentId()); report->AddStats(std::move(video_track_stats)); @@ -1243,7 +1229,7 @@ void ProduceReceiverMediaTrackStats( if (!voice_receiver_info) { continue; } - std::unique_ptr audio_track_stats = + std::unique_ptr audio_track_stats = ProduceMediaStreamTrackStatsFromVoiceReceiverInfo( timestamp_us, *track, *voice_receiver_info, receiver->AttachmentId()); @@ -1256,7 +1242,7 @@ void ProduceReceiverMediaTrackStats( if (!video_receiver_info) { continue; } - std::unique_ptr video_track_stats = + std::unique_ptr video_track_stats = ProduceMediaStreamTrackStatsFromVideoReceiverInfo( timestamp_us, *track, *video_receiver_info, receiver->AttachmentId()); @@ -1281,7 +1267,7 @@ rtc::scoped_refptr CreateReportFilteredBySelector( // Because we do not implement sender stats, we look at outbound-rtp(s) // that reference the track attachment stats for the sender instead. std::string track_id = - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( kDirectionOutbound, sender_selector->AttachmentId()); for (const auto& stats : *report) { if (stats.type() != RTCOutboundRTPStreamStats::kType) @@ -1301,7 +1287,7 @@ rtc::scoped_refptr CreateReportFilteredBySelector( // Because we do not implement receiver stats, we look at inbound-rtp(s) // that reference the track attachment stats for the receiver instead. std::string track_id = - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( kDirectionInbound, receiver_selector->AttachmentId()); for (const auto& stats : *report) { if (stats.type() != RTCInboundRTPStreamStats::kType) @@ -1728,7 +1714,7 @@ void RTCStatsCollector::ProduceIceCandidateAndPairStats_n( candidate_pair_stats->total_round_trip_time = static_cast(info.total_round_trip_time_ms) / rtc::kNumMillisecsPerSec; - if (info.current_round_trip_time_ms) { + if (info.current_round_trip_time_ms.has_value()) { candidate_pair_stats->current_round_trip_time = static_cast(*info.current_round_trip_time_ms) / rtc::kNumMillisecsPerSec; @@ -1761,6 +1747,15 @@ void RTCStatsCollector::ProduceIceCandidateAndPairStats_n( info.sent_ping_requests_total - info.sent_ping_requests_before_first_response); + if (info.last_data_received.has_value()) { + candidate_pair_stats->last_packet_received_timestamp = + static_cast(info.last_data_received->ms()); + } + if (info.last_data_sent) { + candidate_pair_stats->last_packet_sent_timestamp = + static_cast(info.last_data_sent->ms()); + } + report->AddStats(std::move(candidate_pair_stats)); } @@ -1787,7 +1782,7 @@ void RTCStatsCollector::ProduceMediaStreamStats_s( for (const auto& stats : transceiver_stats_infos_) { for (const auto& sender : stats.transceiver->senders()) { std::string track_id = - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( kDirectionOutbound, sender->internal()->AttachmentId()); for (auto& stream_id : sender->stream_ids()) { track_ids[stream_id].push_back(track_id); @@ -1795,7 +1790,7 @@ void RTCStatsCollector::ProduceMediaStreamStats_s( } for (const auto& receiver : stats.transceiver->receivers()) { std::string track_id = - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( kDirectionInbound, receiver->internal()->AttachmentId()); for (auto& stream : receiver->streams()) { track_ids[stream->id()].push_back(track_id); @@ -1805,9 +1800,9 @@ void RTCStatsCollector::ProduceMediaStreamStats_s( // Build stats for each stream ID known. for (auto& it : track_ids) { - std::unique_ptr stream_stats( - std::make_unique("DEPRECATED_S" + it.first, - timestamp_us)); + std::unique_ptr stream_stats( + std::make_unique( + "DEPRECATED_S" + it.first, timestamp_us)); stream_stats->stream_identifier = it.first; stream_stats->track_ids = it.second; report->AddStats(std::move(stream_stats)); @@ -1999,7 +1994,7 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n( stats.track_media_info_map.GetAudioTrack(voice_receiver_info); if (audio_track) { inbound_audio->track_id = - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( kDirectionInbound, stats.track_media_info_map .GetAttachmentIdByTrack(audio_track.get()) .value()); @@ -2049,7 +2044,7 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n( stats.track_media_info_map.GetAttachmentIdByTrack(audio_track.get()) .value(); outbound_audio->track_id = - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( kDirectionOutbound, attachment_id); outbound_audio->media_source_id = RTCMediaSourceStatsIDFromKindAndAttachment(cricket::MEDIA_TYPE_AUDIO, @@ -2111,7 +2106,7 @@ void RTCStatsCollector::ProduceVideoRTPStreamStats_n( stats.track_media_info_map.GetVideoTrack(video_receiver_info); if (video_track) { inbound_video->track_id = - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( kDirectionInbound, stats.track_media_info_map .GetAttachmentIdByTrack(video_track.get()) .value()); @@ -2143,7 +2138,7 @@ void RTCStatsCollector::ProduceVideoRTPStreamStats_n( stats.track_media_info_map.GetAttachmentIdByTrack(video_track.get()) .value(); outbound_video->track_id = - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( kDirectionOutbound, attachment_id); outbound_video->media_source_id = RTCMediaSourceStatsIDFromKindAndAttachment(cricket::MEDIA_TYPE_VIDEO, @@ -2201,16 +2196,17 @@ void RTCStatsCollector::ProduceTransportStats_n( // exist. const auto& certificate_stats_it = transport_cert_stats.find(transport_name); + std::string local_certificate_id, remote_certificate_id; RTC_DCHECK(certificate_stats_it != transport_cert_stats.cend()); - std::string local_certificate_id; - if (certificate_stats_it->second.local) { - local_certificate_id = RTCCertificateIDFromFingerprint( - certificate_stats_it->second.local->fingerprint); - } - std::string remote_certificate_id; - if (certificate_stats_it->second.remote) { - remote_certificate_id = RTCCertificateIDFromFingerprint( - certificate_stats_it->second.remote->fingerprint); + if (certificate_stats_it != transport_cert_stats.cend()) { + if (certificate_stats_it->second.local) { + local_certificate_id = RTCCertificateIDFromFingerprint( + certificate_stats_it->second.local->fingerprint); + } + if (certificate_stats_it->second.remote) { + remote_certificate_id = RTCCertificateIDFromFingerprint( + certificate_stats_it->second.remote->fingerprint); + } } // There is one transport stats for each channel. @@ -2375,13 +2371,15 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n() { if (media_type == cricket::MEDIA_TYPE_AUDIO) { cricket::VoiceMediaChannel* voice_channel = - static_cast(channel->media_channel()); + static_cast( + channel->voice_media_send_channel()); RTC_DCHECK(voice_stats.find(voice_channel) == voice_stats.end()); voice_stats.insert( std::make_pair(voice_channel, cricket::VoiceMediaInfo())); } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { cricket::VideoMediaChannel* video_channel = - static_cast(channel->media_channel()); + static_cast( + channel->video_media_send_channel()); RTC_DCHECK(video_stats.find(video_channel) == video_stats.end()); video_stats.insert( std::make_pair(video_channel, cricket::VideoMediaInfo())); @@ -2421,13 +2419,13 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n() { if (media_type == cricket::MEDIA_TYPE_AUDIO) { cricket::VoiceMediaChannel* voice_channel = static_cast( - channel->media_channel()); + channel->voice_media_send_channel()); RTC_DCHECK(voice_stats.find(voice_channel) != voice_stats.end()); voice_media_info = std::move(voice_stats[voice_channel]); } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { cricket::VideoMediaChannel* video_channel = static_cast( - channel->media_channel()); + channel->video_media_send_channel()); RTC_DCHECK(video_stats.find(video_channel) != video_stats.end()); video_media_info = std::move(video_stats[video_channel]); } diff --git a/pc/rtc_stats_collector_unittest.cc b/pc/rtc_stats_collector_unittest.cc index 547826296a..5388355eb7 100644 --- a/pc/rtc_stats_collector_unittest.cc +++ b/pc/rtc_stats_collector_unittest.cc @@ -39,6 +39,7 @@ #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "api/video/video_timing.h" +#include "api/video_codecs/scalability_mode.h" #include "common_video/include/quality_limitation_reason.h" #include "media/base/media_channel.h" #include "modules/audio_processing/include/audio_processing_statistics.h" @@ -110,11 +111,12 @@ void PrintTo(const RTCPeerConnectionStats& stats, ::std::ostream* os) { *os << stats.ToJson(); } -void PrintTo(const RTCMediaStreamStats& stats, ::std::ostream* os) { +void PrintTo(const DEPRECATED_RTCMediaStreamStats& stats, ::std::ostream* os) { *os << stats.ToJson(); } -void PrintTo(const RTCMediaStreamTrackStats& stats, ::std::ostream* os) { +void PrintTo(const DEPRECATED_RTCMediaStreamTrackStats& stats, + ::std::ostream* os) { *os << stats.ToJson(); } @@ -794,8 +796,9 @@ class RTCStatsCollectorTest : public ::testing::Test { EXPECT_TRUE(graph.full_report->Get(graph.remote_stream_id)); EXPECT_TRUE(graph.full_report->Get(graph.peer_connection_id)); EXPECT_TRUE(graph.full_report->Get(graph.media_source_id)); - const auto& sender_track = graph.full_report->Get(graph.sender_track_id) - ->cast_to(); + const auto& sender_track = + graph.full_report->Get(graph.sender_track_id) + ->cast_to(); EXPECT_EQ(*sender_track.media_source_id, graph.media_source_id); const auto& outbound_rtp = graph.full_report->Get(graph.outbound_rtp_id) ->cast_to(); @@ -910,8 +913,9 @@ class RTCStatsCollectorTest : public ::testing::Test { // `graph.remote_outbound_rtp_id` is omitted on purpose so that expectations // can be added by the caller depending on what value it sets for the // `add_remote_outbound_stats` argument. - const auto& sender_track = graph.full_report->Get(graph.sender_track_id) - ->cast_to(); + const auto& sender_track = + graph.full_report->Get(graph.sender_track_id) + ->cast_to(); EXPECT_EQ(*sender_track.media_source_id, graph.media_source_id); const auto& outbound_rtp = graph.full_report->Get(graph.outbound_rtp_id) ->cast_to(); @@ -1937,6 +1941,8 @@ TEST_F(RTCStatsCollectorTest, CollectRTCIceCandidatePairStats) { connection_info.state = cricket::IceCandidatePairState::IN_PROGRESS; connection_info.priority = 5555; connection_info.nominated = false; + connection_info.last_data_received = Timestamp::Millis(2500); + connection_info.last_data_sent = Timestamp::Millis(5200); cricket::TransportChannelStats transport_channel_stats; transport_channel_stats.component = cricket::ICE_CANDIDATE_COMPONENT_RTP; @@ -1971,6 +1977,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCIceCandidatePairStats) { expected_pair.responses_received = 4321; expected_pair.responses_sent = 1000; expected_pair.consent_requests_sent = (2222 - 2000); + expected_pair.last_packet_received_timestamp = 2500; + expected_pair.last_packet_sent_timestamp = 5200; + // `expected_pair.current_round_trip_time` should be undefined because the // current RTT is not set. // `expected_pair.available_[outgoing/incoming]_bitrate` should be undefined @@ -2177,21 +2186,22 @@ TEST_F(RTCStatsCollectorTest, rtc::scoped_refptr report = stats_->GetStatsReport(); - RTCMediaStreamStats expected_local_stream( - IdForType(report.get()), report->timestamp_us()); + DEPRECATED_RTCMediaStreamStats expected_local_stream( + IdForType(report.get()), + report->timestamp_us()); expected_local_stream.stream_identifier = local_stream->id(); expected_local_stream.track_ids = { - IdForType(report.get())}; + IdForType(report.get())}; ASSERT_TRUE(report->Get(expected_local_stream.id())) << "Did not find " << expected_local_stream.id() << " in " << report->ToJson(); - EXPECT_EQ( - expected_local_stream, - report->Get(expected_local_stream.id())->cast_to()); + EXPECT_EQ(expected_local_stream, + report->Get(expected_local_stream.id()) + ->cast_to()); - RTCMediaStreamTrackStats expected_local_audio_track_ssrc1( - IdForType(report.get()), report->timestamp_us(), - RTCMediaStreamTrackKind::kAudio); + DEPRECATED_RTCMediaStreamTrackStats expected_local_audio_track_ssrc1( + IdForType(report.get()), + report->timestamp_us(), RTCMediaStreamTrackKind::kAudio); expected_local_audio_track_ssrc1.track_identifier = local_audio_track->id(); expected_local_audio_track_ssrc1.media_source_id = "SA11"; // Attachment ID = SSRC + 10 @@ -2205,7 +2215,7 @@ TEST_F(RTCStatsCollectorTest, << report->ToJson(); EXPECT_EQ(expected_local_audio_track_ssrc1, report->Get(expected_local_audio_track_ssrc1.id()) - ->cast_to()); + ->cast_to()); } TEST_F(RTCStatsCollectorTest, @@ -2235,13 +2245,6 @@ TEST_F(RTCStatsCollectorTest, voice_receiver_info.silent_concealed_samples = 765; voice_receiver_info.jitter_buffer_delay_seconds = 3.456; voice_receiver_info.jitter_buffer_emitted_count = 13; - // TODO(crbug.com/webrtc/14524): These metrics have been moved from "track" - // stats, no need to test these here. - voice_receiver_info.jitter_buffer_flushes = 7; - voice_receiver_info.delayed_packet_outage_samples = 15; - voice_receiver_info.relative_packet_arrival_delay_seconds = 16; - voice_receiver_info.interruption_count = 7788; - voice_receiver_info.total_interruption_duration_ms = 778899; stats_->CreateMockRtpSendersReceiversAndChannels( {}, {std::make_pair(remote_audio_track.get(), voice_receiver_info)}, {}, @@ -2249,21 +2252,22 @@ TEST_F(RTCStatsCollectorTest, rtc::scoped_refptr report = stats_->GetStatsReport(); - RTCMediaStreamStats expected_remote_stream( - IdForType(report.get()), report->timestamp_us()); + DEPRECATED_RTCMediaStreamStats expected_remote_stream( + IdForType(report.get()), + report->timestamp_us()); expected_remote_stream.stream_identifier = remote_stream->id(); expected_remote_stream.track_ids = std::vector( - {IdForType(report.get())}); + {IdForType(report.get())}); ASSERT_TRUE(report->Get(expected_remote_stream.id())) << "Did not find " << expected_remote_stream.id() << " in " << report->ToJson(); - EXPECT_EQ( - expected_remote_stream, - report->Get(expected_remote_stream.id())->cast_to()); + EXPECT_EQ(expected_remote_stream, + report->Get(expected_remote_stream.id()) + ->cast_to()); - RTCMediaStreamTrackStats expected_remote_audio_track( - IdForType(report.get()), report->timestamp_us(), - RTCMediaStreamTrackKind::kAudio); + DEPRECATED_RTCMediaStreamTrackStats expected_remote_audio_track( + IdForType(report.get()), + report->timestamp_us(), RTCMediaStreamTrackKind::kAudio); expected_remote_audio_track.track_identifier = remote_audio_track->id(); // `expected_remote_audio_track.media_source_id` should be undefined // because the track is remote. @@ -2281,17 +2285,10 @@ TEST_F(RTCStatsCollectorTest, expected_remote_audio_track.silent_concealed_samples = 765; expected_remote_audio_track.jitter_buffer_delay = 3.456; expected_remote_audio_track.jitter_buffer_emitted_count = 13; - // TODO(crbug.com/webrtc/14524): These metrics have been moved from "track" - // stats, delete them. - expected_remote_audio_track.jitter_buffer_flushes = 7; - expected_remote_audio_track.delayed_packet_outage_samples = 15; - expected_remote_audio_track.relative_packet_arrival_delay = 16; - expected_remote_audio_track.interruption_count = 7788; - expected_remote_audio_track.total_interruption_duration = 778.899; ASSERT_TRUE(report->Get(expected_remote_audio_track.id())); EXPECT_EQ(expected_remote_audio_track, report->Get(expected_remote_audio_track.id()) - ->cast_to()); + ->cast_to()); } TEST_F(RTCStatsCollectorTest, @@ -2322,23 +2319,25 @@ TEST_F(RTCStatsCollectorTest, rtc::scoped_refptr report = stats_->GetStatsReport(); - auto stats_of_my_type = report->GetStatsOfType(); + auto stats_of_my_type = + report->GetStatsOfType(); ASSERT_EQ(1U, stats_of_my_type.size()) << "No stream in " << report->ToJson(); - auto stats_of_track_type = report->GetStatsOfType(); + auto stats_of_track_type = + report->GetStatsOfType(); ASSERT_EQ(1U, stats_of_track_type.size()) << "Wrong number of tracks in " << report->ToJson(); - RTCMediaStreamStats expected_local_stream(stats_of_my_type[0]->id(), - report->timestamp_us()); + DEPRECATED_RTCMediaStreamStats expected_local_stream( + stats_of_my_type[0]->id(), report->timestamp_us()); expected_local_stream.stream_identifier = local_stream->id(); expected_local_stream.track_ids = std::vector({stats_of_track_type[0]->id()}); ASSERT_TRUE(report->Get(expected_local_stream.id())); - EXPECT_EQ( - expected_local_stream, - report->Get(expected_local_stream.id())->cast_to()); + EXPECT_EQ(expected_local_stream, + report->Get(expected_local_stream.id()) + ->cast_to()); - RTCMediaStreamTrackStats expected_local_video_track_ssrc1( + DEPRECATED_RTCMediaStreamTrackStats expected_local_video_track_ssrc1( stats_of_track_type[0]->id(), report->timestamp_us(), RTCMediaStreamTrackKind::kVideo); expected_local_video_track_ssrc1.track_identifier = local_video_track->id(); @@ -2354,7 +2353,7 @@ TEST_F(RTCStatsCollectorTest, ASSERT_TRUE(report->Get(expected_local_video_track_ssrc1.id())); EXPECT_EQ(expected_local_video_track_ssrc1, report->Get(expected_local_video_track_ssrc1.id()) - ->cast_to()); + ->cast_to()); } TEST_F(RTCStatsCollectorTest, @@ -2381,14 +2380,6 @@ TEST_F(RTCStatsCollectorTest, video_receiver_info_ssrc3.frames_decoded = 995; video_receiver_info_ssrc3.frames_dropped = 10; video_receiver_info_ssrc3.frames_rendered = 990; - video_receiver_info_ssrc3.total_frames_duration_ms = 15000; - video_receiver_info_ssrc3.sum_squared_frame_durations = 1.5; - // TODO(crbug.com/webrtc/14521): When removed from "track", there's no need to - // test these here. - video_receiver_info_ssrc3.freeze_count = 3; - video_receiver_info_ssrc3.pause_count = 2; - video_receiver_info_ssrc3.total_freezes_duration_ms = 1000; - video_receiver_info_ssrc3.total_pauses_duration_ms = 10000; stats_->CreateMockRtpSendersReceiversAndChannels( {}, {}, {}, @@ -2398,24 +2389,26 @@ TEST_F(RTCStatsCollectorTest, rtc::scoped_refptr report = stats_->GetStatsReport(); - auto stats_of_my_type = report->GetStatsOfType(); + auto stats_of_my_type = + report->GetStatsOfType(); ASSERT_EQ(1U, stats_of_my_type.size()) << "No stream in " << report->ToJson(); - auto stats_of_track_type = report->GetStatsOfType(); + auto stats_of_track_type = + report->GetStatsOfType(); ASSERT_EQ(1U, stats_of_track_type.size()) << "Wrong number of tracks in " << report->ToJson(); ASSERT_TRUE(*(stats_of_track_type[0]->remote_source)); - RTCMediaStreamStats expected_remote_stream(stats_of_my_type[0]->id(), - report->timestamp_us()); + DEPRECATED_RTCMediaStreamStats expected_remote_stream( + stats_of_my_type[0]->id(), report->timestamp_us()); expected_remote_stream.stream_identifier = remote_stream->id(); expected_remote_stream.track_ids = std::vector({stats_of_track_type[0]->id()}); ASSERT_TRUE(report->Get(expected_remote_stream.id())); - EXPECT_EQ( - expected_remote_stream, - report->Get(expected_remote_stream.id())->cast_to()); + EXPECT_EQ(expected_remote_stream, + report->Get(expected_remote_stream.id()) + ->cast_to()); - RTCMediaStreamTrackStats expected_remote_video_track_ssrc3( + DEPRECATED_RTCMediaStreamTrackStats expected_remote_video_track_ssrc3( stats_of_track_type[0]->id(), report->timestamp_us(), RTCMediaStreamTrackKind::kVideo); expected_remote_video_track_ssrc3.track_identifier = @@ -2432,18 +2425,11 @@ TEST_F(RTCStatsCollectorTest, expected_remote_video_track_ssrc3.frames_received = 1000; expected_remote_video_track_ssrc3.frames_decoded = 995; expected_remote_video_track_ssrc3.frames_dropped = 10; - expected_remote_video_track_ssrc3.total_frames_duration = 15; - expected_remote_video_track_ssrc3.sum_squared_frame_durations = 1.5; - // TODO(crbug.com/webrtc/14521): These metrics have been moved, delete them. - expected_remote_video_track_ssrc3.freeze_count = 3; - expected_remote_video_track_ssrc3.pause_count = 2; - expected_remote_video_track_ssrc3.total_freezes_duration = 1; - expected_remote_video_track_ssrc3.total_pauses_duration = 10; ASSERT_TRUE(report->Get(expected_remote_video_track_ssrc3.id())); EXPECT_EQ(expected_remote_video_track_ssrc3, report->Get(expected_remote_video_track_ssrc3.id()) - ->cast_to()); + ->cast_to()); } TEST_F(RTCStatsCollectorTest, CollectRTCInboundRTPStreamStats_Audio) { @@ -2500,7 +2486,8 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRTPStreamStats_Audio) { rtc::scoped_refptr report = stats_->GetStatsReport(); - auto stats_of_track_type = report->GetStatsOfType(); + auto stats_of_track_type = + report->GetStatsOfType(); ASSERT_EQ(1U, stats_of_track_type.size()); RTCInboundRTPStreamStats expected_audio("ITTransportName1A1", @@ -2611,6 +2598,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRTPStreamStats_Video) { absl::nullopt; video_media_info.receivers[0].decoder_implementation_name = ""; video_media_info.receivers[0].min_playout_delay_ms = 50; + video_media_info.receivers[0].power_efficient_decoder = false; // Note: these two values intentionally differ, // only the decoded one should show up. @@ -2639,7 +2627,8 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRTPStreamStats_Video) { expected_video.kind = "video"; expected_video.track_identifier = "RemoteVideoTrackID"; expected_video.mid = "VideoMid"; - expected_video.track_id = IdForType(report.get()); + expected_video.track_id = + IdForType(report.get()); expected_video.transport_id = "TTransportName1"; expected_video.codec_id = "CITTransportName1_42"; expected_video.fir_count = 5; @@ -2674,6 +2663,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRTPStreamStats_Video) { // `expected_video.decoder_implementation` should be undefined. expected_video.min_playout_delay = 0.05; expected_video.frames_per_second = 5; + expected_video.power_efficient_decoder = false; ASSERT_TRUE(report->Get(expected_video.id())); EXPECT_EQ( @@ -2691,6 +2681,8 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRTPStreamStats_Video) { expected_video.estimated_playout_timestamp = 1234; video_media_info.receivers[0].decoder_implementation_name = "libfoodecoder"; expected_video.decoder_implementation = "libfoodecoder"; + video_media_info.receivers[0].power_efficient_decoder = true; + expected_video.power_efficient_decoder = true; video_media_channel->SetStats(video_media_info); report = stats_->GetFreshStatsReport(); @@ -2747,6 +2739,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRTPStreamStats_Audio) { voice_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo()); voice_media_info.senders[0].local_stats[0].ssrc = 1; voice_media_info.senders[0].packets_sent = 2; + voice_media_info.senders[0].total_packet_send_delay = TimeDelta::Seconds(1); voice_media_info.senders[0].retransmitted_packets_sent = 20; voice_media_info.senders[0].payload_bytes_sent = 3; voice_media_info.senders[0].header_and_padding_bytes_sent = 12; @@ -2779,10 +2772,12 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRTPStreamStats_Audio) { expected_audio.ssrc = 1; expected_audio.media_type = "audio"; expected_audio.kind = "audio"; - expected_audio.track_id = IdForType(report.get()); + expected_audio.track_id = + IdForType(report.get()); expected_audio.transport_id = "TTransportName1"; expected_audio.codec_id = "COTTransportName1_42"; expected_audio.packets_sent = 2; + expected_audio.total_packet_send_delay = 1; expected_audio.retransmitted_packets_sent = 20; expected_audio.bytes_sent = 3; expected_audio.header_bytes_sent = 12; @@ -2834,12 +2829,14 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRTPStreamStats_Video) { video_media_info.senders[0].qp_sum = absl::nullopt; video_media_info.senders[0].content_type = VideoContentType::UNSPECIFIED; video_media_info.senders[0].encoder_implementation_name = ""; + video_media_info.senders[0].power_efficient_encoder = false; video_media_info.senders[0].send_frame_width = 200; video_media_info.senders[0].send_frame_height = 100; video_media_info.senders[0].framerate_sent = 10; video_media_info.senders[0].frames_sent = 5; video_media_info.senders[0].huge_frames_sent = 2; video_media_info.senders[0].active = false; + video_media_info.senders[0].scalability_mode = ScalabilityMode::kL3T3_KEY; video_media_info.aggregated_senders.push_back(video_media_info.senders[0]); RtpCodecParameters codec_parameters; codec_parameters.payload_type = 42; @@ -2859,7 +2856,8 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRTPStreamStats_Video) { auto stats_of_my_type = report->GetStatsOfType(); ASSERT_EQ(1U, stats_of_my_type.size()); - auto stats_of_track_type = report->GetStatsOfType(); + auto stats_of_track_type = + report->GetStatsOfType(); ASSERT_EQ(1U, stats_of_track_type.size()); RTCOutboundRTPStreamStats expected_video(stats_of_my_type[0]->id(), @@ -2897,6 +2895,8 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRTPStreamStats_Video) { expected_video.frames_sent = 5; expected_video.huge_frames_sent = 2; expected_video.active = false; + expected_video.power_efficient_encoder = false; + expected_video.scalability_mode = "L3T3_KEY"; // `expected_video.content_type` should be undefined. // `expected_video.qp_sum` should be undefined. // `expected_video.encoder_implementation` should be undefined. @@ -2914,6 +2914,8 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRTPStreamStats_Video) { video_media_info.senders[0].encoder_implementation_name = "libfooencoder"; video_media_info.aggregated_senders[0] = video_media_info.senders[0]; expected_video.encoder_implementation = "libfooencoder"; + video_media_info.senders[0].power_efficient_encoder = true; + expected_video.power_efficient_encoder = true; video_media_channel->SetStats(video_media_info); report = stats_->GetFreshStatsReport(); @@ -3179,6 +3181,7 @@ TEST_F(RTCStatsCollectorTest, CollectNoStreamRTCOutboundRTPStreamStats_Audio) { voice_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo()); voice_media_info.senders[0].local_stats[0].ssrc = 1; voice_media_info.senders[0].packets_sent = 2; + voice_media_info.senders[0].total_packet_send_delay = TimeDelta::Seconds(0.5); voice_media_info.senders[0].retransmitted_packets_sent = 20; voice_media_info.senders[0].payload_bytes_sent = 3; voice_media_info.senders[0].header_and_padding_bytes_sent = 4; @@ -3210,10 +3213,12 @@ TEST_F(RTCStatsCollectorTest, CollectNoStreamRTCOutboundRTPStreamStats_Audio) { expected_audio.ssrc = 1; expected_audio.media_type = "audio"; expected_audio.kind = "audio"; - expected_audio.track_id = IdForType(report.get()); + expected_audio.track_id = + IdForType(report.get()); expected_audio.transport_id = "TTransportName1"; expected_audio.codec_id = "COTTransportName1_42"; expected_audio.packets_sent = 2; + expected_audio.total_packet_send_delay = 0.5; expected_audio.retransmitted_packets_sent = 20; expected_audio.bytes_sent = 3; expected_audio.header_bytes_sent = 4; @@ -3555,6 +3560,32 @@ TEST_P(RTCStatsCollectorTestWithParamKind, } } +TEST_P(RTCStatsCollectorTestWithParamKind, + RTCRemoteInboundRtpStreamStatsRttMissingBeforeMeasurement) { + constexpr int64_t kReportBlockTimestampUtcUs = 123456789; + + RTCPReportBlock report_block; + // The remote-inbound-rtp SSRC and the outbound-rtp SSRC is the same as the + // `source_ssrc`, "SSRC of the RTP packet sender". + report_block.source_ssrc = 12; + ReportBlockData report_block_data; // AddRoundTripTimeSample() not called. + report_block_data.SetReportBlock(report_block, kReportBlockTimestampUtcUs); + + AddSenderInfoAndMediaChannel("TransportName", {report_block_data}, + absl::nullopt); + + rtc::scoped_refptr report = stats_->GetStatsReport(); + + std::string remote_inbound_rtp_id = "RI" + MediaTypeCharStr() + "12"; + ASSERT_TRUE(report->Get(remote_inbound_rtp_id)); + auto& remote_inbound_rtp = report->Get(remote_inbound_rtp_id) + ->cast_to(); + + EXPECT_TRUE(remote_inbound_rtp.round_trip_time_measurements.is_defined()); + EXPECT_EQ(0, *remote_inbound_rtp.round_trip_time_measurements); + EXPECT_FALSE(remote_inbound_rtp.round_trip_time.is_defined()); +} + TEST_P(RTCStatsCollectorTestWithParamKind, RTCRemoteInboundRtpStreamStatsWithTimestampFromReportBlock) { const int64_t kReportBlockTimestampUtcUs = 123456789; @@ -3748,9 +3779,9 @@ TEST_F(RTCStatsCollectorTest, CollectEchoReturnLossFromTrackAudioProcessor) { rtc::scoped_refptr report = stats_->GetStatsReport(); - RTCMediaStreamTrackStats expected_local_audio_track_ssrc1( - IdForType(report.get()), report->timestamp_us(), - RTCMediaStreamTrackKind::kAudio); + DEPRECATED_RTCMediaStreamTrackStats expected_local_audio_track_ssrc1( + IdForType(report.get()), + report->timestamp_us(), RTCMediaStreamTrackKind::kAudio); expected_local_audio_track_ssrc1.track_identifier = local_audio_track->id(); expected_local_audio_track_ssrc1.media_source_id = "SA11"; // Attachment ID = SSRC + 10 @@ -3764,7 +3795,7 @@ TEST_F(RTCStatsCollectorTest, CollectEchoReturnLossFromTrackAudioProcessor) { << report->ToJson(); EXPECT_EQ(expected_local_audio_track_ssrc1, report->Get(expected_local_audio_track_ssrc1.id()) - ->cast_to()); + ->cast_to()); RTCAudioSourceStats expected_audio("SA11", report->timestamp_us()); expected_audio.track_identifier = "LocalAudioTrackID"; @@ -3870,8 +3901,8 @@ TEST_F(RTCStatsCollectorTest, StatsReportedOnZeroSsrc) { rtc::scoped_refptr report = stats_->GetStatsReport(); - std::vector track_stats = - report->GetStatsOfType(); + std::vector track_stats = + report->GetStatsOfType(); EXPECT_EQ(1U, track_stats.size()); std::vector rtp_stream_stats = @@ -3891,8 +3922,8 @@ TEST_F(RTCStatsCollectorTest, DoNotCrashOnSsrcChange) { // We do not generate any matching voice_sender_info stats. rtc::scoped_refptr report = stats_->GetStatsReport(); - std::vector track_stats = - report->GetStatsOfType(); + std::vector track_stats = + report->GetStatsOfType(); EXPECT_EQ(1U, track_stats.size()); } diff --git a/pc/rtc_stats_integrationtest.cc b/pc/rtc_stats_integrationtest.cc index aee94bc295..b5abad9d63 100644 --- a/pc/rtc_stats_integrationtest.cc +++ b/pc/rtc_stats_integrationtest.cc @@ -339,8 +339,8 @@ class RTCStatsReportVerifier { stats_types.insert(RTCIceCandidatePairStats::kType); stats_types.insert(RTCLocalIceCandidateStats::kType); stats_types.insert(RTCRemoteIceCandidateStats::kType); - stats_types.insert(RTCMediaStreamStats::kType); - stats_types.insert(RTCMediaStreamTrackStats::kType); + stats_types.insert(DEPRECATED_RTCMediaStreamStats::kType); + stats_types.insert(DEPRECATED_RTCMediaStreamTrackStats::kType); stats_types.insert(RTCPeerConnectionStats::kType); stats_types.insert(RTCInboundRTPStreamStats::kType); stats_types.insert(RTCOutboundRTPStreamStats::kType); @@ -380,12 +380,12 @@ class RTCStatsReportVerifier { } else if (stats.type() == RTCRemoteIceCandidateStats::kType) { verify_successful &= VerifyRTCRemoteIceCandidateStats( stats.cast_to()); - } else if (stats.type() == RTCMediaStreamStats::kType) { - verify_successful &= - VerifyRTCMediaStreamStats(stats.cast_to()); - } else if (stats.type() == RTCMediaStreamTrackStats::kType) { - verify_successful &= VerifyRTCMediaStreamTrackStats( - stats.cast_to()); + } else if (stats.type() == DEPRECATED_RTCMediaStreamStats::kType) { + verify_successful &= DEPRECATED_VerifyRTCMediaStreamStats( + stats.cast_to()); + } else if (stats.type() == DEPRECATED_RTCMediaStreamTrackStats::kType) { + verify_successful &= VerLegacyifyRTCMediaStreamTrackStats( + stats.cast_to()); } else if (stats.type() == RTCPeerConnectionStats::kType) { verify_successful &= VerifyRTCPeerConnectionStats( stats.cast_to()); @@ -516,6 +516,8 @@ class RTCStatsReportVerifier { verifier.TestMemberIsNonNegative(candidate_pair.responses_sent); verifier.TestMemberIsNonNegative( candidate_pair.consent_requests_sent); + verifier.TestMemberIsDefined(candidate_pair.last_packet_received_timestamp); + verifier.TestMemberIsDefined(candidate_pair.last_packet_sent_timestamp); return verifier.ExpectAllMembersSuccessfullyTested(); } @@ -560,16 +562,17 @@ class RTCStatsReportVerifier { return VerifyRTCIceCandidateStats(remote_candidate); } - bool VerifyRTCMediaStreamStats(const RTCMediaStreamStats& media_stream) { + bool DEPRECATED_VerifyRTCMediaStreamStats( + const DEPRECATED_RTCMediaStreamStats& media_stream) { RTCStatsVerifier verifier(report_.get(), &media_stream); verifier.TestMemberIsDefined(media_stream.stream_identifier); - verifier.TestMemberIsIDReference(media_stream.track_ids, - RTCMediaStreamTrackStats::kType); + verifier.TestMemberIsIDReference( + media_stream.track_ids, DEPRECATED_RTCMediaStreamTrackStats::kType); return verifier.ExpectAllMembersSuccessfullyTested(); } - bool VerifyRTCMediaStreamTrackStats( - const RTCMediaStreamTrackStats& media_stream_track) { + bool VerLegacyifyRTCMediaStreamTrackStats( + const DEPRECATED_RTCMediaStreamTrackStats& media_stream_track) { RTCStatsVerifier verifier(report_.get(), &media_stream_track); verifier.TestMemberIsDefined(media_stream_track.track_identifier); verifier.TestMemberIsDefined(media_stream_track.remote_source); @@ -594,20 +597,6 @@ class RTCStatsReportVerifier { media_stream_track.frames_decoded); verifier.TestMemberIsNonNegative( media_stream_track.frames_dropped); - verifier.TestMemberIsNonNegative( - media_stream_track.total_frames_duration); - verifier.TestMemberIsNonNegative( - media_stream_track.sum_squared_frame_durations); - // TODO(crbug.com/webrtc/14521): These metrics have been moved, delete - // them from "track". - verifier.TestMemberIsNonNegative( - media_stream_track.freeze_count); - verifier.TestMemberIsNonNegative( - media_stream_track.pause_count); - verifier.TestMemberIsNonNegative( - media_stream_track.total_freezes_duration); - verifier.TestMemberIsNonNegative( - media_stream_track.total_pauses_duration); } else { verifier.TestMemberIsIDReference(media_stream_track.media_source_id, RTCVideoSourceStats::kType); @@ -622,18 +611,6 @@ class RTCStatsReportVerifier { verifier.TestMemberIsUndefined(media_stream_track.frames_received); verifier.TestMemberIsUndefined(media_stream_track.frames_decoded); verifier.TestMemberIsUndefined(media_stream_track.frames_dropped); - verifier.TestMemberIsUndefined( - media_stream_track.total_frames_duration); - verifier.TestMemberIsUndefined( - media_stream_track.sum_squared_frame_durations); - // TODO(crbug.com/webrtc/14521): These metrics have been moved, delete - // them from "track". - verifier.TestMemberIsUndefined(media_stream_track.freeze_count); - verifier.TestMemberIsUndefined(media_stream_track.pause_count); - verifier.TestMemberIsUndefined( - media_stream_track.total_freezes_duration); - verifier.TestMemberIsUndefined( - media_stream_track.total_pauses_duration); } // Video-only members verifier.TestMemberIsNonNegative( @@ -656,16 +633,6 @@ class RTCStatsReportVerifier { media_stream_track.inserted_samples_for_deceleration); verifier.TestMemberIsUndefined( media_stream_track.removed_samples_for_acceleration); - // TODO(crbug.com/webrtc/14524): These metrics have been moved from - // "track" stats, delete them. - verifier.TestMemberIsUndefined(media_stream_track.jitter_buffer_flushes); - verifier.TestMemberIsUndefined( - media_stream_track.delayed_packet_outage_samples); - verifier.TestMemberIsUndefined( - media_stream_track.relative_packet_arrival_delay); - verifier.TestMemberIsUndefined(media_stream_track.interruption_count); - verifier.TestMemberIsUndefined( - media_stream_track.total_interruption_duration); } else { RTC_DCHECK_EQ(*media_stream_track.kind, RTCMediaStreamTrackKind::kAudio); // The type of the referenced media source depends on kind. @@ -693,18 +660,6 @@ class RTCStatsReportVerifier { media_stream_track.inserted_samples_for_deceleration); verifier.TestMemberIsNonNegative( media_stream_track.removed_samples_for_acceleration); - // TODO(crbug.com/webrtc/14524): These metrics have been moved from - // "track" stats, delete them. - verifier.TestMemberIsNonNegative( - media_stream_track.jitter_buffer_flushes); - verifier.TestMemberIsNonNegative( - media_stream_track.delayed_packet_outage_samples); - verifier.TestMemberIsNonNegative( - media_stream_track.relative_packet_arrival_delay); - verifier.TestMemberIsNonNegative( - media_stream_track.interruption_count); - verifier.TestMemberIsNonNegative( - media_stream_track.total_interruption_duration); } else { verifier.TestMemberIsIDReference(media_stream_track.media_source_id, RTCAudioSourceStats::kType); @@ -726,17 +681,6 @@ class RTCStatsReportVerifier { media_stream_track.inserted_samples_for_deceleration); verifier.TestMemberIsUndefined( media_stream_track.removed_samples_for_acceleration); - // TODO(crbug.com/webrtc/14524): These metrics have been moved from - // "track" stats, delete them. - verifier.TestMemberIsUndefined( - media_stream_track.jitter_buffer_flushes); - verifier.TestMemberIsUndefined( - media_stream_track.delayed_packet_outage_samples); - verifier.TestMemberIsUndefined( - media_stream_track.relative_packet_arrival_delay); - verifier.TestMemberIsUndefined(media_stream_track.interruption_count); - verifier.TestMemberIsUndefined( - media_stream_track.total_interruption_duration); } // Video-only members should be undefined verifier.TestMemberIsUndefined(media_stream_track.frame_width); @@ -746,9 +690,6 @@ class RTCStatsReportVerifier { verifier.TestMemberIsUndefined(media_stream_track.frames_received); verifier.TestMemberIsUndefined(media_stream_track.frames_decoded); verifier.TestMemberIsUndefined(media_stream_track.frames_dropped); - verifier.TestMemberIsUndefined(media_stream_track.total_frames_duration); - verifier.TestMemberIsUndefined( - media_stream_track.sum_squared_frame_durations); // Audio-only members // TODO(hbos): `echo_return_loss` and `echo_return_loss_enhancement` are // flaky on msan bot (sometimes defined, sometimes undefined). Should the @@ -757,12 +698,6 @@ class RTCStatsReportVerifier { verifier.MarkMemberTested(media_stream_track.echo_return_loss, true); verifier.MarkMemberTested(media_stream_track.echo_return_loss_enhancement, true); - // TODO(crbug.com/webrtc/14521): These metrics have been moved, delete - // them from "track". - verifier.TestMemberIsUndefined(media_stream_track.freeze_count); - verifier.TestMemberIsUndefined(media_stream_track.pause_count); - verifier.TestMemberIsUndefined(media_stream_track.total_freezes_duration); - verifier.TestMemberIsUndefined(media_stream_track.total_pauses_duration); } return verifier.ExpectAllMembersSuccessfullyTested(); } @@ -786,8 +721,8 @@ class RTCStatsReportVerifier { if (stream.type() == RTCInboundRTPStreamStats::kType || stream.type() == RTCOutboundRTPStreamStats::kType) { verifier.TestMemberIsDefined(stream.media_type); - verifier.TestMemberIsIDReference(stream.track_id, - RTCMediaStreamTrackStats::kType); + verifier.TestMemberIsIDReference( + stream.track_id, DEPRECATED_RTCMediaStreamTrackStats::kType); } else { verifier.TestMemberIsUndefined(stream.media_type); verifier.TestMemberIsUndefined(stream.track_id); @@ -815,9 +750,15 @@ class RTCStatsReportVerifier { if (inbound_stream.kind.is_defined() && *inbound_stream.kind == "video") { verifier.TestMemberIsNonNegative(inbound_stream.qp_sum); verifier.TestMemberIsDefined(inbound_stream.decoder_implementation); + verifier.TestMemberIsDefined(inbound_stream.power_efficient_decoder); + EXPECT_EQ(inbound_stream.power_efficient_decoder.exposure_criteria(), + StatExposureCriteria::kHardwareCapability); } else { verifier.TestMemberIsUndefined(inbound_stream.qp_sum); verifier.TestMemberIsUndefined(inbound_stream.decoder_implementation); + verifier.TestMemberIsUndefined(inbound_stream.power_efficient_decoder); + EXPECT_EQ(inbound_stream.power_efficient_decoder.exposure_criteria(), + StatExposureCriteria::kHardwareCapability); } verifier.TestMemberIsNonNegative(inbound_stream.packets_received); if (inbound_stream.kind.is_defined() && *inbound_stream.kind == "audio") { @@ -997,6 +938,8 @@ class RTCStatsReportVerifier { verifier.TestMemberIsOptionalIDReference( outbound_stream.remote_id, RTCRemoteInboundRtpStreamStats::kType); verifier.TestMemberIsNonNegative(outbound_stream.packets_sent); + verifier.TestMemberIsNonNegative( + outbound_stream.total_packet_send_delay); verifier.TestMemberIsNonNegative( outbound_stream.retransmitted_packets_sent); verifier.TestMemberIsNonNegative(outbound_stream.bytes_sent); @@ -1012,8 +955,6 @@ class RTCStatsReportVerifier { outbound_stream.total_encode_time); verifier.TestMemberIsNonNegative( outbound_stream.total_encoded_bytes_target); - verifier.TestMemberIsNonNegative( - outbound_stream.total_packet_send_delay); verifier.TestMemberIsDefined(outbound_stream.quality_limitation_reason); verifier.TestMemberIsDefined( outbound_stream.quality_limitation_durations); @@ -1023,6 +964,9 @@ class RTCStatsReportVerifier { // this to be present. verifier.MarkMemberTested(outbound_stream.content_type, true); verifier.TestMemberIsDefined(outbound_stream.encoder_implementation); + verifier.TestMemberIsDefined(outbound_stream.power_efficient_encoder); + EXPECT_EQ(outbound_stream.power_efficient_encoder.exposure_criteria(), + StatExposureCriteria::kHardwareCapability); // Unless an implementation-specific amount of time has passed and at // least one frame has been encoded, undefined is reported. Because it // is hard to tell what is the case here, we treat FPS as optional. @@ -1040,14 +984,13 @@ class RTCStatsReportVerifier { verifier.TestMemberIsNonNegative( outbound_stream.huge_frames_sent); verifier.MarkMemberTested(outbound_stream.rid, true); + verifier.TestMemberIsDefined(outbound_stream.scalability_mode); } else { verifier.TestMemberIsUndefined(outbound_stream.frames_encoded); verifier.TestMemberIsUndefined(outbound_stream.key_frames_encoded); verifier.TestMemberIsUndefined(outbound_stream.total_encode_time); verifier.TestMemberIsUndefined( outbound_stream.total_encoded_bytes_target); - // TODO(https://crbug.com/webrtc/10635): Implement for audio as well. - verifier.TestMemberIsUndefined(outbound_stream.total_packet_send_delay); verifier.TestMemberIsUndefined(outbound_stream.quality_limitation_reason); verifier.TestMemberIsUndefined( outbound_stream.quality_limitation_durations); @@ -1056,12 +999,14 @@ class RTCStatsReportVerifier { verifier.TestMemberIsUndefined(outbound_stream.content_type); // TODO(hbos): Implement for audio as well. verifier.TestMemberIsUndefined(outbound_stream.encoder_implementation); + verifier.TestMemberIsUndefined(outbound_stream.power_efficient_encoder); verifier.TestMemberIsUndefined(outbound_stream.rid); verifier.TestMemberIsUndefined(outbound_stream.frames_per_second); verifier.TestMemberIsUndefined(outbound_stream.frame_height); verifier.TestMemberIsUndefined(outbound_stream.frame_width); verifier.TestMemberIsUndefined(outbound_stream.frames_sent); verifier.TestMemberIsUndefined(outbound_stream.huge_frames_sent); + verifier.TestMemberIsUndefined(outbound_stream.scalability_mode); } return verifier.ExpectAllMembersSuccessfullyTested(); } @@ -1215,7 +1160,7 @@ TEST_F(RTCStatsIntegrationTest, GetStatsWithSenderSelector) { // TODO(hbos): Include RTCRtpContributingSourceStats when implemented. RTCInboundRTPStreamStats::kType, RTCPeerConnectionStats::kType, - RTCMediaStreamStats::kType, + DEPRECATED_RTCMediaStreamStats::kType, RTCDataChannelStats::kType, }; RTCStatsReportVerifier(report.get()).VerifyReport(allowed_missing_stats); @@ -1234,7 +1179,7 @@ TEST_F(RTCStatsIntegrationTest, GetStatsWithReceiverSelector) { // TODO(hbos): Include RTCRtpContributingSourceStats when implemented. RTCOutboundRTPStreamStats::kType, RTCPeerConnectionStats::kType, - RTCMediaStreamStats::kType, + DEPRECATED_RTCMediaStreamStats::kType, RTCDataChannelStats::kType, }; RTCStatsReportVerifier(report.get()).VerifyReport(allowed_missing_stats); diff --git a/pc/rtc_stats_traversal.cc b/pc/rtc_stats_traversal.cc index 279488f135..b3f6155e3f 100644 --- a/pc/rtc_stats_traversal.cc +++ b/pc/rtc_stats_traversal.cc @@ -91,11 +91,13 @@ std::vector GetStatsReferencedIds(const RTCStats& stats) { const auto& local_or_remote_candidate = static_cast(stats); AddIdIfDefined(local_or_remote_candidate.transport_id, &neighbor_ids); - } else if (type == RTCMediaStreamStats::kType) { - const auto& stream = static_cast(stats); + } else if (type == DEPRECATED_RTCMediaStreamStats::kType) { + const auto& stream = + static_cast(stats); AddIdsIfDefined(stream.track_ids, &neighbor_ids); - } else if (type == RTCMediaStreamTrackStats::kType) { - const auto& track = static_cast(stats); + } else if (type == DEPRECATED_RTCMediaStreamTrackStats::kType) { + const auto& track = + static_cast(stats); AddIdIfDefined(track.media_source_id, &neighbor_ids); } else if (type == RTCPeerConnectionStats::kType) { // RTCPeerConnectionStats does not have any neighbor references. diff --git a/pc/rtp_receiver.h b/pc/rtp_receiver.h index 8c49f56b75..7622139f83 100644 --- a/pc/rtp_receiver.h +++ b/pc/rtp_receiver.h @@ -53,7 +53,8 @@ class RtpReceiverInternal : public RtpReceiverInterface { // * SetMediaChannel(nullptr) must be called before the media channel is // destroyed. // * This method must be invoked on the worker thread. - virtual void SetMediaChannel(cricket::MediaChannel* media_channel) = 0; + virtual void SetMediaChannel( + cricket::MediaReceiveChannelInterface* media_channel) = 0; // Configures the RtpReceiver with the underlying media channel, with the // given SSRC as the stream identifier. diff --git a/pc/rtp_sender.cc b/pc/rtp_sender.cc index cc9c02e283..af45029909 100644 --- a/pc/rtp_sender.cc +++ b/pc/rtp_sender.cc @@ -88,6 +88,46 @@ RtpParameters RestoreEncodingLayers( return result; } +class SignalingThreadCallback { + public: + SignalingThreadCallback(rtc::Thread* signaling_thread, + SetParametersCallback callback) + : signaling_thread_(signaling_thread), callback_(std::move(callback)) {} + SignalingThreadCallback(SignalingThreadCallback&& other) + : signaling_thread_(other.signaling_thread_), + callback_(std::move(other.callback_)) { + other.callback_ = nullptr; + } + + ~SignalingThreadCallback() { + if (callback_) { + Resolve(RTCError(RTCErrorType::INTERNAL_ERROR)); + + RTC_CHECK_NOTREACHED(); + } + } + + void operator()(const RTCError& error) { Resolve(error); } + + private: + void Resolve(const RTCError& error) { + if (!signaling_thread_->IsCurrent()) { + signaling_thread_->PostTask( + [callback = std::move(callback_), error]() mutable { + webrtc::InvokeSetParametersCallback(callback, error); + }); + callback_ = nullptr; + return; + } + + webrtc::InvokeSetParametersCallback(callback_, error); + callback_ = nullptr; + } + + rtc::Thread* signaling_thread_; + SetParametersCallback callback_; +}; + } // namespace // Returns true if any RtpParameters member that isn't implemented contains a @@ -146,7 +186,8 @@ void RtpSenderBase::SetEncoderSelectorOnChannel() { } } -void RtpSenderBase::SetMediaChannel(cricket::MediaChannel* media_channel) { +void RtpSenderBase::SetMediaChannel( + cricket::MediaSendChannelInterface* media_channel) { RTC_DCHECK(media_channel == nullptr || media_channel->media_type() == media_type()); media_channel_ = media_channel; @@ -189,34 +230,60 @@ RtpParameters RtpSenderBase::GetParameters() const { return result; } -RTCError RtpSenderBase::SetParametersInternal(const RtpParameters& parameters) { +void RtpSenderBase::SetParametersInternal(const RtpParameters& parameters, + SetParametersCallback callback, + bool blocking) { RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK(!stopped_); if (UnimplementedRtpParameterHasValue(parameters)) { - LOG_AND_RETURN_ERROR( + RTCError error( RTCErrorType::UNSUPPORTED_PARAMETER, "Attempted to set an unimplemented parameter of RtpParameters."); + RTC_LOG(LS_ERROR) << error.message() << " (" + << ::webrtc::ToString(error.type()) << ")"; + webrtc::InvokeSetParametersCallback(callback, error); + return; } if (!media_channel_ || !ssrc_) { auto result = cricket::CheckRtpParametersInvalidModificationAndValues( - init_parameters_, parameters); + init_parameters_, parameters, video_codec_preferences_); if (result.ok()) { init_parameters_ = parameters; } - return result; + webrtc::InvokeSetParametersCallback(callback, result); + return; } - return worker_thread_->BlockingCall([&] { + auto task = [&, callback = std::move(callback), + parameters = std::move(parameters)]() mutable { RtpParameters rtp_parameters = parameters; + RtpParameters old_parameters = media_channel_->GetRtpSendParameters(ssrc_); if (!disabled_rids_.empty()) { // Need to add the inactive layers. - RtpParameters old_parameters = - media_channel_->GetRtpSendParameters(ssrc_); rtp_parameters = RestoreEncodingLayers(parameters, disabled_rids_, old_parameters.encodings); } - return media_channel_->SetRtpSendParameters(ssrc_, rtp_parameters); - }); + + RTCError result = cricket::CheckRtpParametersInvalidModificationAndValues( + old_parameters, rtp_parameters); + if (!result.ok()) { + webrtc::InvokeSetParametersCallback(callback, result); + return; + } + + result = CheckSVCParameters(rtp_parameters); + if (!result.ok()) { + webrtc::InvokeSetParametersCallback(callback, result); + return; + } + + media_channel_->SetRtpSendParameters(ssrc_, rtp_parameters, + std::move(callback)); + }; + if (blocking) + worker_thread_->BlockingCall(task); + else + worker_thread_->PostTask(std::move(task)); } RTCError RtpSenderBase::SetParametersInternalWithAllLayers( @@ -231,7 +298,7 @@ RTCError RtpSenderBase::SetParametersInternalWithAllLayers( } if (!media_channel_ || !ssrc_) { auto result = cricket::CheckRtpParametersInvalidModificationAndValues( - init_parameters_, parameters); + init_parameters_, parameters, video_codec_preferences_); if (result.ok()) { init_parameters_ = parameters; } @@ -239,13 +306,12 @@ RTCError RtpSenderBase::SetParametersInternalWithAllLayers( } return worker_thread_->BlockingCall([&] { RtpParameters rtp_parameters = parameters; - return media_channel_->SetRtpSendParameters(ssrc_, rtp_parameters); + return media_channel_->SetRtpSendParameters(ssrc_, rtp_parameters, nullptr); }); } -RTCError RtpSenderBase::SetParameters(const RtpParameters& parameters) { +RTCError RtpSenderBase::CheckSetParameters(const RtpParameters& parameters) { RTC_DCHECK_RUN_ON(signaling_thread_); - TRACE_EVENT0("webrtc", "RtpSenderBase::SetParameters"); if (is_transceiver_stopped_) { LOG_AND_RETURN_ERROR( RTCErrorType::INVALID_STATE, @@ -255,10 +321,6 @@ RTCError RtpSenderBase::SetParameters(const RtpParameters& parameters) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE, "Cannot set parameters on a stopped sender."); } - if (stopped_) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE, - "Cannot set parameters on a stopped sender."); - } if (!last_transaction_id_) { LOG_AND_RETURN_ERROR( RTCErrorType::INVALID_STATE, @@ -272,11 +334,55 @@ RTCError RtpSenderBase::SetParameters(const RtpParameters& parameters) { " the last value returned from getParameters()"); } - RTCError result = SetParametersInternal(parameters); + return RTCError::OK(); +} + +RTCError RtpSenderBase::SetParameters(const RtpParameters& parameters) { + RTC_DCHECK_RUN_ON(signaling_thread_); + TRACE_EVENT0("webrtc", "RtpSenderBase::SetParameters"); + RTCError result = CheckSetParameters(parameters); + if (!result.ok()) + return result; + + // Some tests rely on working in single thread mode without a run loop and a + // blocking call is required to keep them working. The encoder configuration + // also involves another thread with an asynchronous task, thus we still do + // need to wait for the callback to be resolved this way. + std::unique_ptr done_event = std::make_unique(); + SetParametersInternal( + parameters, + [done = done_event.get(), &result](RTCError error) { + result = error; + done->Set(); + }, + true); + done_event->Wait(rtc::Event::kForever); last_transaction_id_.reset(); return result; } +void RtpSenderBase::SetParametersAsync(const RtpParameters& parameters, + SetParametersCallback callback) { + RTC_DCHECK_RUN_ON(signaling_thread_); + RTC_DCHECK(callback); + TRACE_EVENT0("webrtc", "RtpSenderBase::SetParametersAsync"); + RTCError result = CheckSetParameters(parameters); + if (!result.ok()) { + webrtc::InvokeSetParametersCallback(callback, result); + return; + } + + SetParametersInternal( + parameters, + SignalingThreadCallback( + signaling_thread_, + [this, callback = std::move(callback)](RTCError error) mutable { + last_transaction_id_.reset(); + webrtc::InvokeSetParametersCallback(callback, error); + }), + false); +} + void RtpSenderBase::SetStreams(const std::vector& stream_ids) { set_stream_ids(stream_ids); if (set_streams_observer_) @@ -363,7 +469,7 @@ void RtpSenderBase::SetSsrc(uint32_t ssrc) { } current_parameters.degradation_preference = init_parameters_.degradation_preference; - media_channel_->SetRtpSendParameters(ssrc_, current_parameters); + media_channel_->SetRtpSendParameters(ssrc_, current_parameters, nullptr); init_parameters_.encodings.clear(); init_parameters_.degradation_preference = absl::nullopt; }); @@ -587,6 +693,14 @@ rtc::scoped_refptr AudioRtpSender::GetDtmfSender() const { return dtmf_sender_proxy_; } +RTCError AudioRtpSender::GenerateKeyFrame( + const std::vector& rids) { + RTC_DCHECK_RUN_ON(signaling_thread_); + RTC_DLOG(LS_ERROR) << "Tried to get generate a key frame for audio."; + return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, + "Generating key frames for audio is not supported."); +} + void AudioRtpSender::SetSend() { RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK(!stopped_); @@ -677,6 +791,34 @@ rtc::scoped_refptr VideoRtpSender::GetDtmfSender() const { return nullptr; } +RTCError VideoRtpSender::GenerateKeyFrame( + const std::vector& rids) { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (video_media_channel() && ssrc_ && !stopped_) { + auto parameters = GetParameters(); + for (const auto& rid : rids) { + if (rid.empty()) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "Attempted to specify an empty rid."); + } + if (!absl::c_any_of(parameters.encodings, + [&rid](const RtpEncodingParameters& parameters) { + return parameters.rid == rid; + })) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "Attempted to specify a rid not configured."); + } + } + worker_thread_->PostTask([&, rids] { + video_media_channel()->GenerateSendKeyFrame(ssrc_, rids); + }); + } else { + RTC_LOG(LS_WARNING) << "Tried to generate key frame for sender that is " + "stopped or has no media channel."; + } + return RTCError::OK(); +} + void VideoRtpSender::SetSend() { RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK(!stopped_); @@ -725,4 +867,20 @@ void VideoRtpSender::ClearSend() { [&] { video_media_channel()->SetVideoSend(ssrc_, nullptr, nullptr); }); } +RTCError VideoRtpSender::CheckSVCParameters(const RtpParameters& parameters) { + cricket::VideoCodec codec; + video_media_channel()->GetSendCodec(&codec); + + // Match the currently used codec against the codec preferences to gather + // the SVC capabilities. + std::vector codecs; + for (const auto& codec_preference : video_codec_preferences_) { + if (codec.Matches(codec_preference)) { + codecs.push_back(codec_preference); + } + } + + return cricket::CheckScalabilityModeValues(parameters, codecs); +} + } // namespace webrtc diff --git a/pc/rtp_sender.h b/pc/rtp_sender.h index 33d613905b..29e5f16cfe 100644 --- a/pc/rtp_sender.h +++ b/pc/rtp_sender.h @@ -54,7 +54,8 @@ class RtpSenderInternal : public RtpSenderInterface { // A VoiceMediaChannel should be used for audio RtpSenders and // a VideoMediaChannel should be used for video RtpSenders. // Must call SetMediaChannel(nullptr) before the media channel is destroyed. - virtual void SetMediaChannel(cricket::MediaChannel* media_channel) = 0; + virtual void SetMediaChannel( + cricket::MediaSendChannelInterface* media_channel) = 0; // Used to set the SSRC of the sender, once a local description has been set. // If `ssrc` is 0, this indiates that the sender should disconnect from the @@ -73,7 +74,9 @@ class RtpSenderInternal : public RtpSenderInterface { // `GetParameters` and `SetParameters` operate with a transactional model. // Allow access to get/set parameters without invalidating transaction id. virtual RtpParameters GetParametersInternal() const = 0; - virtual RTCError SetParametersInternal(const RtpParameters& parameters) = 0; + virtual void SetParametersInternal(const RtpParameters& parameters, + SetParametersCallback, + bool blocking) = 0; // GetParameters and SetParameters will remove deactivated simulcast layers // and restore them on SetParameters. This is probably a Bad Idea, but we @@ -82,6 +85,11 @@ class RtpSenderInternal : public RtpSenderInterface { virtual RTCError SetParametersInternalWithAllLayers( const RtpParameters& parameters) = 0; + // Additional checks that are specific to the Sender type + virtual RTCError CheckSVCParameters(const RtpParameters& parameters) { + return webrtc::RTCError::OK(); + } + // Returns an ID that changes every time SetTrack() is called, but // otherwise remains constant. Used to generate IDs for stats. // The special value zero means that no track is attached. @@ -93,6 +101,11 @@ class RtpSenderInternal : public RtpSenderInterface { const std::vector& rid) = 0; virtual void SetTransceiverAsStopped() = 0; + + // Used by the owning transceiver to inform the sender on the currently + // selected codecs. + virtual void SetVideoCodecPreferences( + std::vector codec_preferences) = 0; }; // Shared implementation for RtpSenderInternal interface. @@ -108,7 +121,8 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { // A VoiceMediaChannel should be used for audio RtpSenders and // a VideoMediaChannel should be used for video RtpSenders. // Must call SetMediaChannel(nullptr) before the media channel is destroyed. - void SetMediaChannel(cricket::MediaChannel* media_channel) override; + void SetMediaChannel( + cricket::MediaSendChannelInterface* media_channel) override; bool SetTrack(MediaStreamTrackInterface* track) override; rtc::scoped_refptr track() const override { @@ -120,11 +134,16 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { RtpParameters GetParameters() const override; RTCError SetParameters(const RtpParameters& parameters) override; + void SetParametersAsync(const RtpParameters& parameters, + SetParametersCallback callback) override; // `GetParameters` and `SetParameters` operate with a transactional model. // Allow access to get/set parameters without invalidating transaction id. RtpParameters GetParametersInternal() const override; - RTCError SetParametersInternal(const RtpParameters& parameters) override; + void SetParametersInternal(const RtpParameters& parameters, + SetParametersCallback callback = nullptr, + bool blocking = true) override; + RTCError CheckSetParameters(const RtpParameters& parameters); RtpParameters GetParametersInternalWithAllLayers() const override; RTCError SetParametersInternalWithAllLayers( const RtpParameters& parameters) override; @@ -203,6 +222,11 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { is_transceiver_stopped_ = true; } + void SetVideoCodecPreferences( + std::vector codec_preferences) override { + video_codec_preferences_ = codec_preferences; + } + protected: // If `set_streams_observer` is not null, it is invoked when SetStreams() // is called. `set_streams_observer` is not owned by this object. If not @@ -238,13 +262,14 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { std::vector stream_ids_; RtpParameters init_parameters_; + std::vector video_codec_preferences_; // TODO(tommi): `media_channel_` and several other member variables in this // class (ssrc_, stopped_, etc) are accessed from more than one thread without // a guard or lock. Internally there are also several Invoke()s that we could // remove since the upstream code may already be performing several operations // on the worker thread. - cricket::MediaChannel* media_channel_ = nullptr; + cricket::MediaSendChannelInterface* media_channel_ = nullptr; rtc::scoped_refptr track_; rtc::scoped_refptr dtls_transport_; @@ -335,6 +360,7 @@ class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase { } rtc::scoped_refptr GetDtmfSender() const override; + RTCError GenerateKeyFrame(const std::vector& rids) override; protected: AudioRtpSender(rtc::Thread* worker_thread, @@ -352,8 +378,8 @@ class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase { void RemoveTrackFromStats() override; private: - cricket::VoiceMediaChannel* voice_media_channel() { - return static_cast(media_channel_); + cricket::VoiceMediaSendChannelInterface* voice_media_channel() { + return media_channel_->AsVoiceSendChannel(); } rtc::scoped_refptr audio_track() const { return rtc::scoped_refptr( @@ -394,6 +420,9 @@ class VideoRtpSender : public RtpSenderBase { } rtc::scoped_refptr GetDtmfSender() const override; + RTCError GenerateKeyFrame(const std::vector& rids) override; + + RTCError CheckSVCParameters(const RtpParameters& parameters) override; protected: VideoRtpSender(rtc::Thread* worker_thread, @@ -407,8 +436,8 @@ class VideoRtpSender : public RtpSenderBase { void AttachTrack() override; private: - cricket::VideoMediaChannel* video_media_channel() { - return static_cast(media_channel_); + cricket::VideoMediaSendChannelInterface* video_media_channel() { + return media_channel_->AsVideoSendChannel(); } rtc::scoped_refptr video_track() const { return rtc::scoped_refptr( diff --git a/pc/rtp_sender_proxy.h b/pc/rtp_sender_proxy.h index 140b5ff97e..236ac10fa2 100644 --- a/pc/rtp_sender_proxy.h +++ b/pc/rtp_sender_proxy.h @@ -35,6 +35,10 @@ PROXY_CONSTMETHOD0(std::vector, stream_ids) PROXY_CONSTMETHOD0(std::vector, init_send_encodings) PROXY_CONSTMETHOD0(RtpParameters, GetParameters) PROXY_METHOD1(RTCError, SetParameters, const RtpParameters&) +PROXY_METHOD2(void, + SetParametersAsync, + const RtpParameters&, + SetParametersCallback) PROXY_CONSTMETHOD0(rtc::scoped_refptr, GetDtmfSender) PROXY_METHOD1(void, SetFrameEncryptor, @@ -48,6 +52,7 @@ PROXY_METHOD1(void, PROXY_METHOD1(void, SetEncoderSelector, std::unique_ptr) +PROXY_METHOD1(RTCError, GenerateKeyFrame, const std::vector&) END_PROXY_MAP(RtpSender) } // namespace webrtc diff --git a/pc/rtp_sender_receiver_unittest.cc b/pc/rtp_sender_receiver_unittest.cc index d66ee65f4c..a189e6517b 100644 --- a/pc/rtp_sender_receiver_unittest.cc +++ b/pc/rtp_sender_receiver_unittest.cc @@ -204,7 +204,7 @@ class RtpSenderReceiverTest ASSERT_TRUE(audio_rtp_sender_->SetTrack(audio_track_.get())); EXPECT_CALL(*set_streams_observer, OnSetStreams()); audio_rtp_sender_->SetStreams({local_stream_->id()}); - audio_rtp_sender_->SetMediaChannel(voice_media_channel()); + audio_rtp_sender_->SetMediaChannel(voice_media_channel()->AsSendChannel()); audio_rtp_sender_->SetSsrc(kAudioSsrc); VerifyVoiceChannelInput(); } @@ -212,7 +212,8 @@ class RtpSenderReceiverTest void CreateAudioRtpSenderWithNoTrack() { audio_rtp_sender_ = AudioRtpSender::Create(worker_thread_, /*id=*/"", nullptr, nullptr); - audio_rtp_sender_->SetMediaChannel(voice_media_channel()); + audio_rtp_sender_->SetMediaChannel( + voice_media_channel()->AsVoiceSendChannel()); } void CreateVideoRtpSender(uint32_t ssrc) { @@ -264,14 +265,16 @@ class RtpSenderReceiverTest ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_.get())); EXPECT_CALL(*set_streams_observer, OnSetStreams()); video_rtp_sender_->SetStreams({local_stream_->id()}); - video_rtp_sender_->SetMediaChannel(video_media_channel()); + video_rtp_sender_->SetMediaChannel( + video_media_channel()->AsVideoSendChannel()); video_rtp_sender_->SetSsrc(ssrc); VerifyVideoChannelInput(ssrc); } void CreateVideoRtpSenderWithNoTrack() { video_rtp_sender_ = VideoRtpSender::Create(worker_thread_, /*id=*/"", nullptr); - video_rtp_sender_->SetMediaChannel(video_media_channel()); + video_rtp_sender_->SetMediaChannel( + video_media_channel()->AsVideoSendChannel()); } void DestroyAudioRtpSender() { @@ -289,7 +292,8 @@ class RtpSenderReceiverTest audio_rtp_receiver_ = rtc::make_ref_counted( rtc::Thread::Current(), kAudioTrackId, streams, /*is_unified_plan=*/true); - audio_rtp_receiver_->SetMediaChannel(voice_media_channel()); + audio_rtp_receiver_->SetMediaChannel( + voice_media_channel()->AsVoiceReceiveChannel()); audio_rtp_receiver_->SetupMediaChannel(kAudioSsrc); audio_track_ = audio_rtp_receiver_->audio_track(); VerifyVoiceChannelOutput(); @@ -299,7 +303,8 @@ class RtpSenderReceiverTest std::vector> streams = {}) { video_rtp_receiver_ = rtc::make_ref_counted( rtc::Thread::Current(), kVideoTrackId, streams); - video_rtp_receiver_->SetMediaChannel(video_media_channel()); + video_rtp_receiver_->SetMediaChannel( + video_media_channel()->AsVideoReceiveChannel()); video_rtp_receiver_->SetupMediaChannel(kVideoSsrc); video_track_ = video_rtp_receiver_->video_track(); VerifyVideoChannelOutput(); @@ -319,7 +324,8 @@ class RtpSenderReceiverTest video_rtp_receiver_ = rtc::make_ref_counted( rtc::Thread::Current(), kVideoTrackId, streams); - video_rtp_receiver_->SetMediaChannel(video_media_channel()); + video_rtp_receiver_->SetMediaChannel( + video_media_channel()->AsVideoReceiveChannel()); video_rtp_receiver_->SetupMediaChannel(primary_ssrc); video_track_ = video_rtp_receiver_->video_track(); } @@ -689,15 +695,17 @@ TEST_F(RtpSenderReceiverTest, RemoteAudioTrackSetVolume) { TEST_F(RtpSenderReceiverTest, AudioRtpReceiverDelay) { CreateAudioRtpReceiver(); - VerifyRtpReceiverDelayBehaviour(voice_media_channel(), - audio_rtp_receiver_.get(), kAudioSsrc); + VerifyRtpReceiverDelayBehaviour( + voice_media_channel()->AsVoiceReceiveChannel(), audio_rtp_receiver_.get(), + kAudioSsrc); DestroyAudioRtpReceiver(); } TEST_F(RtpSenderReceiverTest, VideoRtpReceiverDelay) { CreateVideoRtpReceiver(); - VerifyRtpReceiverDelayBehaviour(video_media_channel(), - video_rtp_receiver_.get(), kVideoSsrc); + VerifyRtpReceiverDelayBehaviour( + video_media_channel()->AsVideoReceiveChannel(), video_rtp_receiver_.get(), + kVideoSsrc); DestroyVideoRtpReceiver(); } @@ -855,6 +863,20 @@ TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParameters) { DestroyAudioRtpSender(); } +TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParametersAsync) { + CreateAudioRtpSender(); + + RtpParameters params = audio_rtp_sender_->GetParameters(); + EXPECT_EQ(1u, params.encodings.size()); + absl::optional result; + audio_rtp_sender_->SetParametersAsync( + params, [&result](webrtc::RTCError error) { result = error; }); + run_loop_.Flush(); + EXPECT_TRUE(result->ok()); + + DestroyAudioRtpSender(); +} + TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParametersBeforeNegotiation) { audio_rtp_sender_ = AudioRtpSender::Create(worker_thread_, /*id=*/"", nullptr, nullptr); @@ -865,8 +887,34 @@ TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParametersBeforeNegotiation) { EXPECT_TRUE(audio_rtp_sender_->SetParameters(params).ok()); params = audio_rtp_sender_->GetParameters(); - EXPECT_TRUE(audio_rtp_sender_->SetParameters(params).ok()); EXPECT_EQ(params.encodings[0].max_bitrate_bps, 90000); + EXPECT_TRUE(audio_rtp_sender_->SetParameters(params).ok()); + + DestroyAudioRtpSender(); +} + +TEST_F(RtpSenderReceiverTest, + AudioSenderCanSetParametersAsyncBeforeNegotiation) { + audio_rtp_sender_ = + AudioRtpSender::Create(worker_thread_, /*id=*/"", nullptr, nullptr); + + absl::optional result; + RtpParameters params = audio_rtp_sender_->GetParameters(); + ASSERT_EQ(1u, params.encodings.size()); + params.encodings[0].max_bitrate_bps = 90000; + + audio_rtp_sender_->SetParametersAsync( + params, [&result](webrtc::RTCError error) { result = error; }); + run_loop_.Flush(); + EXPECT_TRUE(result->ok()); + + params = audio_rtp_sender_->GetParameters(); + EXPECT_EQ(params.encodings[0].max_bitrate_bps, 90000); + + audio_rtp_sender_->SetParametersAsync( + params, [&result](webrtc::RTCError error) { result = error; }); + run_loop_.Flush(); + EXPECT_TRUE(result->ok()); DestroyAudioRtpSender(); } @@ -896,7 +944,8 @@ TEST_F(RtpSenderReceiverTest, AudioSenderInitParametersMovedAfterNegotiation) { cricket::StreamParams stream_params = cricket::CreateSimStreamParams("cname", ssrcs); voice_media_channel()->AddSendStream(stream_params); - audio_rtp_sender_->SetMediaChannel(voice_media_channel()); + audio_rtp_sender_->SetMediaChannel( + voice_media_channel()->AsVoiceSendChannel()); audio_rtp_sender_->SetSsrc(1); params = audio_rtp_sender_->GetParameters(); @@ -941,6 +990,25 @@ TEST_F(RtpSenderReceiverTest, DestroyAudioRtpSender(); } +TEST_F(RtpSenderReceiverTest, + AudioSenderSetParametersAsyncInvalidatesTransactionId) { + CreateAudioRtpSender(); + + RtpParameters params = audio_rtp_sender_->GetParameters(); + EXPECT_EQ(1u, params.encodings.size()); + absl::optional result; + audio_rtp_sender_->SetParametersAsync( + params, [&result](webrtc::RTCError error) { result = error; }); + run_loop_.Flush(); + EXPECT_TRUE(result->ok()); + audio_rtp_sender_->SetParametersAsync( + params, [&result](webrtc::RTCError error) { result = error; }); + run_loop_.Flush(); + EXPECT_EQ(RTCErrorType::INVALID_STATE, result->type()); + + DestroyAudioRtpSender(); +} + TEST_F(RtpSenderReceiverTest, AudioSenderDetectTransactionIdModification) { CreateAudioRtpSender(); @@ -1047,6 +1115,20 @@ TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParameters) { DestroyVideoRtpSender(); } +TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParametersAsync) { + CreateVideoRtpSender(); + + RtpParameters params = video_rtp_sender_->GetParameters(); + EXPECT_EQ(1u, params.encodings.size()); + absl::optional result; + video_rtp_sender_->SetParametersAsync( + params, [&result](webrtc::RTCError error) { result = error; }); + run_loop_.Flush(); + EXPECT_TRUE(result->ok()); + + DestroyVideoRtpSender(); +} + TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParametersBeforeNegotiation) { video_rtp_sender_ = VideoRtpSender::Create(worker_thread_, /*id=*/"", nullptr); @@ -1063,6 +1145,30 @@ TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParametersBeforeNegotiation) { DestroyVideoRtpSender(); } +TEST_F(RtpSenderReceiverTest, + VideoSenderCanSetParametersAsyncBeforeNegotiation) { + video_rtp_sender_ = + VideoRtpSender::Create(worker_thread_, /*id=*/"", nullptr); + + absl::optional result; + RtpParameters params = video_rtp_sender_->GetParameters(); + ASSERT_EQ(1u, params.encodings.size()); + params.encodings[0].max_bitrate_bps = 90000; + video_rtp_sender_->SetParametersAsync( + params, [&result](webrtc::RTCError error) { result = error; }); + run_loop_.Flush(); + EXPECT_TRUE(result->ok()); + + params = video_rtp_sender_->GetParameters(); + EXPECT_EQ(params.encodings[0].max_bitrate_bps, 90000); + video_rtp_sender_->SetParametersAsync( + params, [&result](webrtc::RTCError error) { result = error; }); + run_loop_.Flush(); + EXPECT_TRUE(result->ok()); + + DestroyVideoRtpSender(); +} + TEST_F(RtpSenderReceiverTest, VideoSenderInitParametersMovedAfterNegotiation) { AddVideoTrack(false); @@ -1092,7 +1198,8 @@ TEST_F(RtpSenderReceiverTest, VideoSenderInitParametersMovedAfterNegotiation) { cricket::StreamParams stream_params = cricket::CreateSimStreamParams("cname", ssrcs); video_media_channel()->AddSendStream(stream_params); - video_rtp_sender_->SetMediaChannel(video_media_channel()); + video_rtp_sender_->SetMediaChannel( + video_media_channel()->AsVideoSendChannel()); video_rtp_sender_->SetSsrc(kVideoSsrcSimulcast); params = video_rtp_sender_->GetParameters(); @@ -1132,7 +1239,8 @@ TEST_F(RtpSenderReceiverTest, cricket::StreamParams stream_params = cricket::CreateSimStreamParams("cname", ssrcs); video_media_channel()->AddSendStream(stream_params); - video_rtp_sender_->SetMediaChannel(video_media_channel()); + video_rtp_sender_->SetMediaChannel( + video_media_channel()->AsVideoSendChannel()); video_rtp_sender_->SetSsrc(kVideoSsrcSimulcast); params = video_rtp_sender_->GetParameters(); @@ -1175,7 +1283,8 @@ TEST_F(RtpSenderReceiverDeathTest, cricket::StreamParams stream_params = cricket::StreamParams::CreateLegacy(kVideoSsrc); video_media_channel()->AddSendStream(stream_params); - video_rtp_sender_->SetMediaChannel(video_media_channel()); + video_rtp_sender_->SetMediaChannel( + video_media_channel()->AsVideoSendChannel()); EXPECT_DEATH(video_rtp_sender_->SetSsrc(kVideoSsrcSimulcast), ""); } #endif @@ -1215,6 +1324,25 @@ TEST_F(RtpSenderReceiverTest, DestroyVideoRtpSender(); } +TEST_F(RtpSenderReceiverTest, + VideoSenderSetParametersAsyncInvalidatesTransactionId) { + CreateVideoRtpSender(); + + RtpParameters params = video_rtp_sender_->GetParameters(); + EXPECT_EQ(1u, params.encodings.size()); + absl::optional result; + video_rtp_sender_->SetParametersAsync( + params, [&result](webrtc::RTCError error) { result = error; }); + run_loop_.Flush(); + EXPECT_TRUE(result->ok()); + video_rtp_sender_->SetParametersAsync( + params, [&result](webrtc::RTCError error) { result = error; }); + run_loop_.Flush(); + EXPECT_EQ(RTCErrorType::INVALID_STATE, result->type()); + + DestroyVideoRtpSender(); +} + TEST_F(RtpSenderReceiverTest, VideoSenderDetectTransactionIdModification) { CreateVideoRtpSender(); @@ -1468,6 +1596,36 @@ TEST_F(RtpSenderReceiverTest, VideoReceiverCanGetParametersWithSimulcast) { DestroyVideoRtpReceiver(); } +TEST_F(RtpSenderReceiverTest, GenerateKeyFrameWithAudio) { + CreateAudioRtpSender(); + + auto error = audio_rtp_sender_->GenerateKeyFrame({}); + EXPECT_FALSE(error.ok()); + EXPECT_EQ(error.type(), RTCErrorType::UNSUPPORTED_OPERATION); + + DestroyAudioRtpSender(); +} + +TEST_F(RtpSenderReceiverTest, GenerateKeyFrameWithVideo) { + CreateVideoRtpSenderWithSimulcast({"1", "2", "3"}); + + auto error = video_rtp_sender_->GenerateKeyFrame({}); + EXPECT_TRUE(error.ok()); + + error = video_rtp_sender_->GenerateKeyFrame({"1"}); + EXPECT_TRUE(error.ok()); + + error = video_rtp_sender_->GenerateKeyFrame({""}); + EXPECT_FALSE(error.ok()); + EXPECT_EQ(error.type(), RTCErrorType::INVALID_PARAMETER); + + error = video_rtp_sender_->GenerateKeyFrame({"no-such-rid"}); + EXPECT_FALSE(error.ok()); + EXPECT_EQ(error.type(), RTCErrorType::INVALID_PARAMETER); + + DestroyVideoRtpSender(); +} + // Test that makes sure that a video track content hint translates to the proper // value for sources that are not screencast. TEST_F(RtpSenderReceiverTest, PropagatesVideoTrackContentHint) { @@ -1541,7 +1699,8 @@ TEST_F(RtpSenderReceiverTest, ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_.get())); EXPECT_CALL(*set_streams_observer, OnSetStreams()); video_rtp_sender_->SetStreams({local_stream_->id()}); - video_rtp_sender_->SetMediaChannel(video_media_channel()); + video_rtp_sender_->SetMediaChannel( + video_media_channel()->AsVideoSendChannel()); video_track_->set_enabled(true); // Sender is not ready to send (no SSRC) so no option should have been set. @@ -1715,9 +1874,9 @@ TEST_F(RtpSenderReceiverTest, RtpParameters parameters = video_rtp_sender_->GetParameters(); RtpParameters new_parameters = video_rtp_sender_->GetParametersInternal(); new_parameters.encodings[0].active = false; - video_rtp_sender_->SetParametersInternal(new_parameters); + video_rtp_sender_->SetParametersInternal(new_parameters, nullptr, true); new_parameters.encodings[0].active = true; - video_rtp_sender_->SetParametersInternal(new_parameters); + video_rtp_sender_->SetParametersInternal(new_parameters, nullptr, true); parameters.encodings[0].active = false; EXPECT_TRUE(video_rtp_sender_->SetParameters(parameters).ok()); } diff --git a/pc/rtp_transceiver.cc b/pc/rtp_transceiver.cc index db49468c07..5f9e876b4c 100644 --- a/pc/rtp_transceiver.cc +++ b/pc/rtp_transceiver.cc @@ -107,6 +107,25 @@ RTCError VerifyCodecPreferences(const std::vector& codecs, return RTCError::OK(); } +// Matches the list of codecs as capabilities (potentially without SVC related +// information) to the list of send codecs and returns the list of codecs with +// all the SVC related information. +std::vector MatchCodecPreferences( + const std::vector& codecs, + const std::vector& send_codecs) { + std::vector result; + + for (const auto& codec_preference : codecs) { + for (const cricket::VideoCodec& send_codec : send_codecs) { + if (send_codec.MatchesCapability(codec_preference)) { + result.push_back(send_codec); + } + } + } + + return result; +} + TaskQueueBase* GetCurrentTaskQueueOrThread() { TaskQueueBase* current = TaskQueueBase::Current(); if (!current) @@ -142,6 +161,9 @@ RtpTransceiver::RtpTransceiver( RTC_DCHECK(media_type_ == cricket::MEDIA_TYPE_AUDIO || media_type_ == cricket::MEDIA_TYPE_VIDEO); RTC_DCHECK_EQ(sender->media_type(), receiver->media_type()); + if (sender->media_type() == cricket::MEDIA_TYPE_VIDEO) + sender->internal()->SetVideoCodecPreferences( + media_engine()->video().send_codecs(false)); senders_.push_back(sender); receivers_.push_back(receiver); } @@ -317,13 +339,16 @@ void RtpTransceiver::PushNewMediaChannelAndDeleteChannel( } context()->worker_thread()->BlockingCall([&]() { // Push down the new media_channel, if any, otherwise clear it. - auto* media_channel = channel_ ? channel_->media_channel() : nullptr; + auto* media_send_channel = + channel_ ? channel_->media_send_channel() : nullptr; for (const auto& sender : senders_) { - sender->internal()->SetMediaChannel(media_channel); + sender->internal()->SetMediaChannel(media_send_channel); } + auto* media_receive_channel = + channel_ ? channel_->media_receive_channel() : nullptr; for (const auto& receiver : receivers_) { - receiver->internal()->SetMediaChannel(media_channel); + receiver->internal()->SetMediaChannel(media_receive_channel); } // Destroy the channel, if we had one, now _after_ updating the receivers @@ -342,6 +367,14 @@ void RtpTransceiver::AddSender( RTC_DCHECK(sender); RTC_DCHECK_EQ(media_type(), sender->media_type()); RTC_DCHECK(!absl::c_linear_search(senders_, sender)); + if (media_type() == cricket::MEDIA_TYPE_VIDEO) { + std::vector send_codecs = + media_engine()->video().send_codecs(false); + sender->internal()->SetVideoCodecPreferences( + codec_preferences_.empty() + ? send_codecs + : MatchCodecPreferences(codec_preferences_, send_codecs)); + } senders_.push_back(sender); } @@ -590,6 +623,9 @@ RTCError RtpTransceiver::SetCodecPreferences( // to codecs and abort these steps. if (codec_capabilities.empty()) { codec_preferences_.clear(); + if (media_type() == cricket::MEDIA_TYPE_VIDEO) + senders_.front()->internal()->SetVideoCodecPreferences( + media_engine()->video().send_codecs(false)); return RTCError::OK(); } @@ -612,6 +648,11 @@ RTCError RtpTransceiver::SetCodecPreferences( send_codecs = media_engine()->video().send_codecs(context()->use_rtx()); recv_codecs = media_engine()->video().recv_codecs(context()->use_rtx()); result = VerifyCodecPreferences(codecs, send_codecs, recv_codecs); + + if (result.ok()) { + senders_.front()->internal()->SetVideoCodecPreferences( + MatchCodecPreferences(codecs, send_codecs)); + } } if (result.ok()) { diff --git a/pc/rtp_transceiver.h b/pc/rtp_transceiver.h index 625d0a52c5..0844b349b6 100644 --- a/pc/rtp_transceiver.h +++ b/pc/rtp_transceiver.h @@ -47,7 +47,6 @@ #include "rtc_base/thread_annotations.h" namespace cricket { -class ChannelManager; class MediaEngineInterface; } diff --git a/pc/rtp_transceiver_unittest.cc b/pc/rtp_transceiver_unittest.cc index 7961747b64..a2f2c362dd 100644 --- a/pc/rtp_transceiver_unittest.cc +++ b/pc/rtp_transceiver_unittest.cc @@ -344,7 +344,8 @@ TEST_F(RtpTransceiverTestForHeaderExtensions, EXPECT_CALL(*mock_channel, SetFirstPacketReceivedCallback(_)); EXPECT_CALL(*mock_channel, media_type()) .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO)); - EXPECT_CALL(*mock_channel, media_channel()).WillRepeatedly(Return(nullptr)); + EXPECT_CALL(*mock_channel, media_send_channel()) + .WillRepeatedly(Return(nullptr)); EXPECT_CALL(*mock_channel, mid()).WillRepeatedly(ReturnRef(content_name)); EXPECT_CALL(*mock_channel, SetRtpTransport(_)).WillRepeatedly(Return(true)); transceiver_->SetChannel(std::move(mock_channel), @@ -368,7 +369,8 @@ TEST_F(RtpTransceiverTestForHeaderExtensions, ReturnsNegotiatedHdrExts) { EXPECT_CALL(*mock_channel, SetFirstPacketReceivedCallback(_)); EXPECT_CALL(*mock_channel, media_type()) .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO)); - EXPECT_CALL(*mock_channel, media_channel()).WillRepeatedly(Return(nullptr)); + EXPECT_CALL(*mock_channel, media_send_channel()) + .WillRepeatedly(Return(nullptr)); EXPECT_CALL(*mock_channel, mid()).WillRepeatedly(ReturnRef(content_name)); EXPECT_CALL(*mock_channel, SetRtpTransport(_)).WillRepeatedly(Return(true)); diff --git a/pc/rtp_transmission_manager.cc b/pc/rtp_transmission_manager.cc index 09b0b73fbf..96b748b4b4 100644 --- a/pc/rtp_transmission_manager.cc +++ b/pc/rtp_transmission_manager.cc @@ -72,25 +72,48 @@ PeerConnectionObserver* RtpTransmissionManager::Observer() const { return observer_; } -cricket::VoiceMediaChannel* RtpTransmissionManager::voice_media_channel() - const { +cricket::VoiceMediaSendChannelInterface* +RtpTransmissionManager::voice_media_send_channel() const { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(!IsUnifiedPlan()); auto* voice_channel = GetAudioTransceiver()->internal()->channel(); if (voice_channel) { - return voice_channel->voice_media_channel(); + return voice_channel->voice_media_send_channel(); } else { return nullptr; } } -cricket::VideoMediaChannel* RtpTransmissionManager::video_media_channel() - const { +cricket::VideoMediaSendChannelInterface* +RtpTransmissionManager::video_media_send_channel() const { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(!IsUnifiedPlan()); auto* video_channel = GetVideoTransceiver()->internal()->channel(); if (video_channel) { - return video_channel->video_media_channel(); + return video_channel->video_media_send_channel(); + } else { + return nullptr; + } +} +cricket::VoiceMediaReceiveChannelInterface* +RtpTransmissionManager::voice_media_receive_channel() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(!IsUnifiedPlan()); + auto* voice_channel = GetAudioTransceiver()->internal()->channel(); + if (voice_channel) { + return voice_channel->voice_media_receive_channel(); + } else { + return nullptr; + } +} + +cricket::VideoMediaReceiveChannelInterface* +RtpTransmissionManager::video_media_receive_channel() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(!IsUnifiedPlan()); + auto* video_channel = GetVideoTransceiver()->internal()->channel(); + if (video_channel) { + return video_channel->video_media_receive_channel(); } else { return nullptr; } @@ -99,17 +122,20 @@ cricket::VideoMediaChannel* RtpTransmissionManager::video_media_channel() RTCErrorOr> RtpTransmissionManager::AddTrack( rtc::scoped_refptr track, - const std::vector& stream_ids) { + const std::vector& stream_ids, + const std::vector* init_send_encodings) { RTC_DCHECK_RUN_ON(signaling_thread()); - return (IsUnifiedPlan() ? AddTrackUnifiedPlan(track, stream_ids) - : AddTrackPlanB(track, stream_ids)); + return (IsUnifiedPlan() + ? AddTrackUnifiedPlan(track, stream_ids, init_send_encodings) + : AddTrackPlanB(track, stream_ids, init_send_encodings)); } RTCErrorOr> RtpTransmissionManager::AddTrackPlanB( rtc::scoped_refptr track, - const std::vector& stream_ids) { + const std::vector& stream_ids, + const std::vector* init_send_encodings) { RTC_DCHECK_RUN_ON(signaling_thread()); if (stream_ids.size() > 1u) { LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION, @@ -125,9 +151,11 @@ RtpTransmissionManager::AddTrackPlanB( ? cricket::MEDIA_TYPE_AUDIO : cricket::MEDIA_TYPE_VIDEO); auto new_sender = - CreateSender(media_type, track->id(), track, adjusted_stream_ids, {}); + CreateSender(media_type, track->id(), track, adjusted_stream_ids, + init_send_encodings ? *init_send_encodings + : std::vector()); if (track->kind() == MediaStreamTrackInterface::kAudioKind) { - new_sender->internal()->SetMediaChannel(voice_media_channel()); + new_sender->internal()->SetMediaChannel(voice_media_send_channel()); GetAudioTransceiver()->internal()->AddSender(new_sender); const RtpSenderInfo* sender_info = FindSenderInfo(local_audio_sender_infos_, @@ -137,7 +165,7 @@ RtpTransmissionManager::AddTrackPlanB( } } else { RTC_DCHECK_EQ(MediaStreamTrackInterface::kVideoKind, track->kind()); - new_sender->internal()->SetMediaChannel(video_media_channel()); + new_sender->internal()->SetMediaChannel(video_media_send_channel()); GetVideoTransceiver()->internal()->AddSender(new_sender); const RtpSenderInfo* sender_info = FindSenderInfo(local_video_sender_infos_, @@ -152,8 +180,10 @@ RtpTransmissionManager::AddTrackPlanB( RTCErrorOr> RtpTransmissionManager::AddTrackUnifiedPlan( rtc::scoped_refptr track, - const std::vector& stream_ids) { - auto transceiver = FindFirstTransceiverForAddedTrack(track); + const std::vector& stream_ids, + const std::vector* init_send_encodings) { + auto transceiver = + FindFirstTransceiverForAddedTrack(track, init_send_encodings); if (transceiver) { RTC_LOG(LS_INFO) << "Reusing an existing " << cricket::MediaTypeToString(transceiver->media_type()) @@ -187,7 +217,10 @@ RtpTransmissionManager::AddTrackUnifiedPlan( if (FindSenderById(sender_id)) { sender_id = rtc::CreateRandomUuid(); } - auto sender = CreateSender(media_type, sender_id, track, stream_ids, {}); + auto sender = CreateSender(media_type, sender_id, track, stream_ids, + init_send_encodings + ? *init_send_encodings + : std::vector()); auto receiver = CreateReceiver(media_type, rtc::CreateRandomUuid()); transceiver = CreateAndAddTransceiver(sender, receiver); transceiver->internal()->set_created_by_addtrack(true); @@ -280,9 +313,13 @@ RtpTransmissionManager::CreateAndAddTransceiver( rtc::scoped_refptr> RtpTransmissionManager::FindFirstTransceiverForAddedTrack( - rtc::scoped_refptr track) { + rtc::scoped_refptr track, + const std::vector* init_send_encodings) { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(track); + if (init_send_encodings != nullptr) { + return nullptr; + } for (auto transceiver : transceivers()->List()) { if (!transceiver->sender()->track() && cricket::MediaTypeToString(transceiver->media_type()) == @@ -375,7 +412,7 @@ void RtpTransmissionManager::AddAudioTrack(AudioTrackInterface* track, auto new_sender = CreateSender(cricket::MEDIA_TYPE_AUDIO, track->id(), rtc::scoped_refptr(track), {stream->id()}, {}); - new_sender->internal()->SetMediaChannel(voice_media_channel()); + new_sender->internal()->SetMediaChannel(voice_media_send_channel()); GetAudioTransceiver()->internal()->AddSender(new_sender); // If the sender has already been configured in SDP, we call SetSsrc, // which will connect the sender to the underlying transport. This can @@ -422,7 +459,7 @@ void RtpTransmissionManager::AddVideoTrack(VideoTrackInterface* track, auto new_sender = CreateSender(cricket::MEDIA_TYPE_VIDEO, track->id(), rtc::scoped_refptr(track), {stream->id()}, {}); - new_sender->internal()->SetMediaChannel(video_media_channel()); + new_sender->internal()->SetMediaChannel(video_media_send_channel()); GetVideoTransceiver()->internal()->AddSender(new_sender); const RtpSenderInfo* sender_info = FindSenderInfo(local_video_sender_infos_, stream->id(), track->id()); @@ -454,7 +491,7 @@ void RtpTransmissionManager::CreateAudioReceiver( // the constructor taking stream IDs instead. auto audio_receiver = rtc::make_ref_counted( worker_thread(), remote_sender_info.sender_id, streams, IsUnifiedPlan(), - voice_media_channel()); + voice_media_receive_channel()); if (remote_sender_info.sender_id == kDefaultAudioSenderId) { audio_receiver->SetupUnsignaledMediaChannel(); } else { @@ -483,7 +520,7 @@ void RtpTransmissionManager::CreateVideoReceiver( remote_sender_info.sender_id == kDefaultVideoSenderId ? absl::nullopt : absl::optional(remote_sender_info.first_ssrc), - video_media_channel()); + video_media_receive_channel()); auto receiver = RtpReceiverProxyWithInternal::Create( signaling_thread(), worker_thread(), std::move(video_receiver)); diff --git a/pc/rtp_transmission_manager.h b/pc/rtp_transmission_manager.h index 90c3d8a99e..b41848c917 100644 --- a/pc/rtp_transmission_manager.h +++ b/pc/rtp_transmission_manager.h @@ -40,10 +40,6 @@ #include "rtc_base/thread_annotations.h" #include "rtc_base/weak_ptr.h" -namespace cricket { -class ChannelManager; -} - namespace rtc { class Thread; } @@ -95,7 +91,8 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { // Add a new track, creating transceiver if required. RTCErrorOr> AddTrack( rtc::scoped_refptr track, - const std::vector& stream_ids); + const std::vector& stream_ids, + const std::vector* init_send_encodings); // Create a new RTP sender. Does not associate with a transceiver. rtc::scoped_refptr> @@ -121,7 +118,8 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { // transceiver is available. rtc::scoped_refptr> FindFirstTransceiverForAddedTrack( - rtc::scoped_refptr track); + rtc::scoped_refptr track, + const std::vector* init_send_encodings); // Returns the list of senders currently associated with some // registered transceiver @@ -206,8 +204,12 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { // Plan B helpers for getting the voice/video media channels for the single // audio/video transceiver, if it exists. - cricket::VoiceMediaChannel* voice_media_channel() const; - cricket::VideoMediaChannel* video_media_channel() const; + cricket::VoiceMediaSendChannelInterface* voice_media_send_channel() const; + cricket::VideoMediaSendChannelInterface* video_media_send_channel() const; + cricket::VoiceMediaReceiveChannelInterface* voice_media_receive_channel() + const; + cricket::VideoMediaReceiveChannelInterface* video_media_receive_channel() + const; private: rtc::Thread* signaling_thread() const { return context_->signaling_thread(); } @@ -220,11 +222,13 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { // AddTrack implementation when Unified Plan is specified. RTCErrorOr> AddTrackUnifiedPlan( rtc::scoped_refptr track, - const std::vector& stream_ids); + const std::vector& stream_ids, + const std::vector* init_send_encodings); // AddTrack implementation when Plan B is specified. RTCErrorOr> AddTrackPlanB( rtc::scoped_refptr track, - const std::vector& stream_ids); + const std::vector& stream_ids, + const std::vector* init_send_encodings); // Create an RtpReceiver that sources an audio track. void CreateAudioReceiver(MediaStreamInterface* stream, diff --git a/pc/sdp_offer_answer.cc b/pc/sdp_offer_answer.cc index 0e2b8660b8..7fdcf0ced4 100644 --- a/pc/sdp_offer_answer.cc +++ b/pc/sdp_offer_answer.cc @@ -120,8 +120,7 @@ const char kSimulcastDisabled[] = "WebRTC.PeerConnection.Simulcast.Disabled"; static const int kRtcpCnameLength = 16; // The maximum length of the MID attribute. -// TODO(bugs.webrtc.org/12517) - reduce to 16 again. -static constexpr size_t kMidMaxSize = 32; +static constexpr size_t kMidMaxSize = 16; const char kDefaultStreamId[] = "default"; // NOTE: Duplicated in peer_connection.cc: @@ -409,25 +408,88 @@ bool VerifyIceUfragPwdPresent( RTCError ValidateMids(const cricket::SessionDescription& description) { std::set mids; - size_t max_length = 0; for (const cricket::ContentInfo& content : description.contents()) { if (content.name.empty()) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, "A media section is missing a MID attribute."); } - max_length = std::max(max_length, content.name.size()); if (content.name.size() > kMidMaxSize) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, "The MID attribute exceeds the maximum supported " - "length of 32 characters."); + "length of 16 characters."); } if (!mids.insert(content.name).second) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, "Duplicate a=mid value '" + content.name + "'."); } } - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.PeerConnection.Mid.Size", max_length, 0, - 31, 32); + return RTCError::OK(); +} + +RTCError FindDuplicateCodecParameters( + const RtpCodecParameters codec_parameters, + std::map& payload_to_codec_parameters) { + auto existing_codec_parameters = + payload_to_codec_parameters.find(codec_parameters.payload_type); + if (existing_codec_parameters != payload_to_codec_parameters.end() && + codec_parameters != existing_codec_parameters->second) { + return RTCError(RTCErrorType::INVALID_PARAMETER, + "A BUNDLE group contains a codec collision for " + "payload_type='" + + rtc::ToString(codec_parameters.payload_type) + + ". All codecs must share the same type, " + "encoding name, clock rate and parameters."); + } + payload_to_codec_parameters.insert( + std::make_pair(codec_parameters.payload_type, codec_parameters)); + return RTCError::OK(); +} + +RTCError ValidateBundledPayloadTypes( + const cricket::SessionDescription& description) { + // https://www.rfc-editor.org/rfc/rfc8843#name-payload-type-pt-value-reuse + // ... all codecs associated with the payload type number MUST share an + // identical codec configuration. This means that the codecs MUST share + // the same media type, encoding name, clock rate, and any parameter + // that can affect the codec configuration and packetization. + std::map payload_to_codec_parameters; + std::vector bundle_groups = + description.GetGroupsByName(cricket::GROUP_TYPE_BUNDLE); + for (const cricket::ContentGroup* bundle_group : bundle_groups) { + std::map payload_to_codec_parameters; + for (const std::string& content_name : bundle_group->content_names()) { + const cricket::MediaContentDescription* media_description = + description.GetContentDescriptionByName(content_name); + if (!media_description) { + return RTCError(RTCErrorType::INVALID_PARAMETER, + "A BUNDLE group contains a MID='" + content_name + + "' matching no m= section."); + } + if (!media_description->has_codecs()) { + continue; + } + const auto type = media_description->type(); + if (type == cricket::MEDIA_TYPE_AUDIO) { + RTC_DCHECK(media_description->as_audio()); + for (const auto& c : media_description->as_audio()->codecs()) { + auto error = FindDuplicateCodecParameters( + c.ToCodecParameters(), payload_to_codec_parameters); + if (!error.ok()) { + return error; + } + } + } else if (type == cricket::MEDIA_TYPE_VIDEO) { + RTC_DCHECK(media_description->as_video()); + for (const auto& c : media_description->as_video()->codecs()) { + auto error = FindDuplicateCodecParameters( + c.ToCodecParameters(), payload_to_codec_parameters); + if (!error.ok()) { + return error; + } + } + } + } + } return RTCError::OK(); } @@ -3316,6 +3378,12 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( return RTCError(RTCErrorType::INVALID_PARAMETER, kSdpWithoutIceUfragPwd); } + // Validate bundle, payload types and that there are no collisions. + error = ValidateBundledPayloadTypes(*sdesc->description()); + // TODO(bugs.webrtc.org/14420): actually reject. + RTC_HISTOGRAM_BOOLEAN("WebRTC.PeerConnection.ValidBundledPayloadTypes", + error.ok()); + if (!pc_->ValidateBundleSettings(sdesc->description(), bundle_groups_by_mid)) { return RTCError(RTCErrorType::INVALID_PARAMETER, kBundleWithoutRtcpMux); diff --git a/pc/sdp_offer_answer.h b/pc/sdp_offer_answer.h index c493dc0229..2124ed8697 100644 --- a/pc/sdp_offer_answer.h +++ b/pc/sdp_offer_answer.h @@ -63,10 +63,6 @@ #include "rtc_base/unique_id_generator.h" #include "rtc_base/weak_ptr.h" -namespace cricket { -class ChannelManager; -} - namespace webrtc { // SdpOfferAnswerHandler is a component @@ -569,7 +565,6 @@ class SdpOfferAnswerHandler : public SdpStateProvider { // ================================================================== // Access to pc_ variables - cricket::ChannelManager* channel_manager() const; cricket::MediaEngineInterface* media_engine() const; TransceiverList* transceivers(); const TransceiverList* transceivers() const; diff --git a/pc/sdp_offer_answer_unittest.cc b/pc/sdp_offer_answer_unittest.cc index 4f16de43ac..ecac7f8de8 100644 --- a/pc/sdp_offer_answer_unittest.cc +++ b/pc/sdp_offer_answer_unittest.cc @@ -114,4 +114,154 @@ TEST_F(SdpOfferAnswerTest, OnTrackReturnsProxiedObject) { transceiver->stopped(); } +TEST_F(SdpOfferAnswerTest, BundleRejectsCodecCollisionsAudioVideo) { + auto pc = CreatePeerConnection(); + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=group:BUNDLE 0 1\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "m=audio 9 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=mid:0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=mid:1\r\n" + "a=rtpmap:111 H264/90000\r\n" + "a=fmtp:111 " + "level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "42e01f\r\n"; + + auto desc = CreateSessionDescription(SdpType::kOffer, sdp); + ASSERT_NE(desc, nullptr); + RTCError error; + pc->SetRemoteDescription(std::move(desc), &error); + EXPECT_TRUE(error.ok()); + EXPECT_METRIC_EQ( + 1, webrtc::metrics::NumEvents( + "WebRTC.PeerConnection.ValidBundledPayloadTypes", false)); +} + +TEST_F(SdpOfferAnswerTest, BundleRejectsCodecCollisionsVideoFmtp) { + auto pc = CreatePeerConnection(); + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=group:BUNDLE 0 1\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=mid:0\r\n" + "a=rtpmap:111 H264/90000\r\n" + "a=fmtp:111 " + "level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "42e01f\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=mid:1\r\n" + "a=rtpmap:111 H264/90000\r\n" + "a=fmtp:111 " + "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "42e01f\r\n"; + + auto desc = CreateSessionDescription(SdpType::kOffer, sdp); + ASSERT_NE(desc, nullptr); + RTCError error; + pc->SetRemoteDescription(std::move(desc), &error); + EXPECT_TRUE(error.ok()); + EXPECT_METRIC_EQ( + 1, webrtc::metrics::NumEvents( + "WebRTC.PeerConnection.ValidBundledPayloadTypes", false)); +} + +TEST_F(SdpOfferAnswerTest, BundleCodecCollisionInDifferentBundlesAllowed) { + auto pc = CreatePeerConnection(); + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=group:BUNDLE 0\r\n" + "a=group:BUNDLE 1\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=mid:0\r\n" + "a=rtpmap:111 H264/90000\r\n" + "a=fmtp:111 " + "level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "42e01f\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=mid:1\r\n" + "a=rtpmap:111 H264/90000\r\n" + "a=fmtp:111 " + "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "42e01f\r\n"; + + auto desc = CreateSessionDescription(SdpType::kOffer, sdp); + ASSERT_NE(desc, nullptr); + RTCError error; + pc->SetRemoteDescription(std::move(desc), &error); + EXPECT_TRUE(error.ok()); + EXPECT_METRIC_EQ( + 0, webrtc::metrics::NumEvents( + "WebRTC.PeerConnection.ValidBundledPayloadTypes", false)); +} + +TEST_F(SdpOfferAnswerTest, LargeMidsAreRejected) { + auto pc = CreatePeerConnection(); + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=rtpmap:111 VP8/90000\r\n" + "a=mid:01234567890123456\r\n"; + auto desc = CreateSessionDescription(SdpType::kOffer, sdp); + ASSERT_NE(desc, nullptr); + RTCError error; + pc->SetRemoteDescription(std::move(desc), &error); + EXPECT_FALSE(error.ok()); + EXPECT_EQ(error.type(), RTCErrorType::INVALID_PARAMETER); +} + } // namespace webrtc diff --git a/pc/session_description.h b/pc/session_description.h index a7259e1f1d..f68e044db2 100644 --- a/pc/session_description.h +++ b/pc/session_description.h @@ -95,45 +95,41 @@ class MediaContentDescription { // `protocol` is the expected media transport protocol, such as RTP/AVPF, // RTP/SAVPF or SCTP/DTLS. - virtual std::string protocol() const { return protocol_; } + std::string protocol() const { return protocol_; } virtual void set_protocol(absl::string_view protocol) { protocol_ = std::string(protocol); } - virtual webrtc::RtpTransceiverDirection direction() const { - return direction_; - } - virtual void set_direction(webrtc::RtpTransceiverDirection direction) { + webrtc::RtpTransceiverDirection direction() const { return direction_; } + void set_direction(webrtc::RtpTransceiverDirection direction) { direction_ = direction; } - virtual bool rtcp_mux() const { return rtcp_mux_; } - virtual void set_rtcp_mux(bool mux) { rtcp_mux_ = mux; } + bool rtcp_mux() const { return rtcp_mux_; } + void set_rtcp_mux(bool mux) { rtcp_mux_ = mux; } - virtual bool rtcp_reduced_size() const { return rtcp_reduced_size_; } - virtual void set_rtcp_reduced_size(bool reduced_size) { + bool rtcp_reduced_size() const { return rtcp_reduced_size_; } + void set_rtcp_reduced_size(bool reduced_size) { rtcp_reduced_size_ = reduced_size; } // Indicates support for the remote network estimate packet type. This // functionality is experimental and subject to change without notice. - virtual bool remote_estimate() const { return remote_estimate_; } - virtual void set_remote_estimate(bool remote_estimate) { + bool remote_estimate() const { return remote_estimate_; } + void set_remote_estimate(bool remote_estimate) { remote_estimate_ = remote_estimate; } - virtual int bandwidth() const { return bandwidth_; } - virtual void set_bandwidth(int bandwidth) { bandwidth_ = bandwidth; } - virtual std::string bandwidth_type() const { return bandwidth_type_; } - virtual void set_bandwidth_type(std::string bandwidth_type) { + int bandwidth() const { return bandwidth_; } + void set_bandwidth(int bandwidth) { bandwidth_ = bandwidth; } + std::string bandwidth_type() const { return bandwidth_type_; } + void set_bandwidth_type(std::string bandwidth_type) { bandwidth_type_ = bandwidth_type; } - virtual const std::vector& cryptos() const { return cryptos_; } - virtual void AddCrypto(const CryptoParams& params) { - cryptos_.push_back(params); - } - virtual void set_cryptos(const std::vector& cryptos) { + const std::vector& cryptos() const { return cryptos_; } + void AddCrypto(const CryptoParams& params) { cryptos_.push_back(params); } + void set_cryptos(const std::vector& cryptos) { cryptos_ = cryptos; } @@ -142,19 +138,18 @@ class MediaContentDescription { // are present. // Use RtpExtension::FindHeaderExtensionByUri for finding and // RtpExtension::DeduplicateHeaderExtensions for filtering. - virtual const RtpHeaderExtensions& rtp_header_extensions() const { + const RtpHeaderExtensions& rtp_header_extensions() const { return rtp_header_extensions_; } - virtual void set_rtp_header_extensions( - const RtpHeaderExtensions& extensions) { + void set_rtp_header_extensions(const RtpHeaderExtensions& extensions) { rtp_header_extensions_ = extensions; rtp_header_extensions_set_ = true; } - virtual void AddRtpHeaderExtension(const webrtc::RtpExtension& ext) { + void AddRtpHeaderExtension(const webrtc::RtpExtension& ext) { rtp_header_extensions_.push_back(ext); rtp_header_extensions_set_ = true; } - virtual void ClearRtpHeaderExtensions() { + void ClearRtpHeaderExtensions() { rtp_header_extensions_.clear(); rtp_header_extensions_set_ = true; } @@ -163,14 +158,12 @@ class MediaContentDescription { // signal them. For now we assume an empty list means no signaling, but // provide the ClearRtpHeaderExtensions method to allow "no support" to be // clearly indicated (i.e. when derived from other information). - virtual bool rtp_header_extensions_set() const { - return rtp_header_extensions_set_; - } - virtual const StreamParamsVec& streams() const { return send_streams_; } + bool rtp_header_extensions_set() const { return rtp_header_extensions_set_; } + const StreamParamsVec& streams() const { return send_streams_; } // TODO(pthatcher): Remove this by giving mediamessage.cc access // to MediaContentDescription - virtual StreamParamsVec& mutable_streams() { return send_streams_; } - virtual void AddStream(const StreamParams& stream) { + StreamParamsVec& mutable_streams() { return send_streams_; } + void AddStream(const StreamParams& stream) { send_streams_.push_back(stream); } // Legacy streams have an ssrc, but nothing else. @@ -183,37 +176,36 @@ class MediaContentDescription { AddStream(sp); } - virtual uint32_t first_ssrc() const { + uint32_t first_ssrc() const { if (send_streams_.empty()) { return 0; } return send_streams_[0].first_ssrc(); } - virtual bool has_ssrcs() const { + bool has_ssrcs() const { if (send_streams_.empty()) { return false; } return send_streams_[0].has_ssrcs(); } - virtual void set_conference_mode(bool enable) { conference_mode_ = enable; } - virtual bool conference_mode() const { return conference_mode_; } + void set_conference_mode(bool enable) { conference_mode_ = enable; } + bool conference_mode() const { return conference_mode_; } // https://tools.ietf.org/html/rfc4566#section-5.7 // May be present at the media or session level of SDP. If present at both // levels, the media-level attribute overwrites the session-level one. - virtual void set_connection_address(const rtc::SocketAddress& address) { + void set_connection_address(const rtc::SocketAddress& address) { connection_address_ = address; } - virtual const rtc::SocketAddress& connection_address() const { + const rtc::SocketAddress& connection_address() const { return connection_address_; } // Determines if it's allowed to mix one- and two-byte rtp header extensions // within the same rtp stream. enum ExtmapAllowMixed { kNo, kSession, kMedia }; - virtual void set_extmap_allow_mixed_enum( - ExtmapAllowMixed new_extmap_allow_mixed) { + void set_extmap_allow_mixed_enum(ExtmapAllowMixed new_extmap_allow_mixed) { if (new_extmap_allow_mixed == kMedia && extmap_allow_mixed_enum_ == kSession) { // Do not downgrade from session level to media level. @@ -221,27 +213,24 @@ class MediaContentDescription { } extmap_allow_mixed_enum_ = new_extmap_allow_mixed; } - virtual ExtmapAllowMixed extmap_allow_mixed_enum() const { + ExtmapAllowMixed extmap_allow_mixed_enum() const { return extmap_allow_mixed_enum_; } - virtual bool extmap_allow_mixed() const { - return extmap_allow_mixed_enum_ != kNo; - } + bool extmap_allow_mixed() const { return extmap_allow_mixed_enum_ != kNo; } // Simulcast functionality. - virtual bool HasSimulcast() const { return !simulcast_.empty(); } - virtual SimulcastDescription& simulcast_description() { return simulcast_; } - virtual const SimulcastDescription& simulcast_description() const { + bool HasSimulcast() const { return !simulcast_.empty(); } + SimulcastDescription& simulcast_description() { return simulcast_; } + const SimulcastDescription& simulcast_description() const { return simulcast_; } - virtual void set_simulcast_description( - const SimulcastDescription& simulcast) { + void set_simulcast_description(const SimulcastDescription& simulcast) { simulcast_ = simulcast; } - virtual const std::vector& receive_rids() const { + const std::vector& receive_rids() const { return receive_rids_; } - virtual void set_receive_rids(const std::vector& rids) { + void set_receive_rids(const std::vector& rids) { receive_rids_ = rids; } @@ -283,10 +272,10 @@ class MediaContentDescriptionImpl : public MediaContentDescription { typedef C CodecType; // Codecs should be in preference order (most preferred codec first). - virtual const std::vector& codecs() const { return codecs_; } - virtual void set_codecs(const std::vector& codecs) { codecs_ = codecs; } + const std::vector& codecs() const { return codecs_; } + void set_codecs(const std::vector& codecs) { codecs_ = codecs; } bool has_codecs() const override { return !codecs_.empty(); } - virtual bool HasCodec(int id) { + bool HasCodec(int id) { bool found = false; for (typename std::vector::iterator iter = codecs_.begin(); iter != codecs_.end(); ++iter) { @@ -297,8 +286,8 @@ class MediaContentDescriptionImpl : public MediaContentDescription { } return found; } - virtual void AddCodec(const C& codec) { codecs_.push_back(codec); } - virtual void AddOrReplaceCodec(const C& codec) { + void AddCodec(const C& codec) { codecs_.push_back(codec); } + void AddOrReplaceCodec(const C& codec) { for (typename std::vector::iterator iter = codecs_.begin(); iter != codecs_.end(); ++iter) { if (iter->id == codec.id) { @@ -308,7 +297,7 @@ class MediaContentDescriptionImpl : public MediaContentDescription { } AddCodec(codec); } - virtual void AddCodecs(const std::vector& codecs) { + void AddCodecs(const std::vector& codecs) { typename std::vector::const_iterator codec; for (codec = codecs.begin(); codec != codecs.end(); ++codec) { AddCodec(*codec); diff --git a/pc/test/fake_peer_connection_base.h b/pc/test/fake_peer_connection_base.h index e3ab72d242..ac70344d42 100644 --- a/pc/test/fake_peer_connection_base.h +++ b/pc/test/fake_peer_connection_base.h @@ -50,6 +50,13 @@ class FakePeerConnectionBase : public PeerConnectionInternal { return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); } + RTCErrorOr> AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids, + const std::vector& init_send_encodings) override { + return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); + } + RTCError RemoveTrackOrError( rtc::scoped_refptr sender) override { return RTCError(RTCErrorType::UNSUPPORTED_OPERATION); diff --git a/pc/test/fake_peer_connection_for_stats.h b/pc/test/fake_peer_connection_for_stats.h index d4ccfa30bd..ca7bf0bebc 100644 --- a/pc/test/fake_peer_connection_for_stats.h +++ b/pc/test/fake_peer_connection_for_stats.h @@ -167,6 +167,7 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { dependencies.network_thread = rtc::Thread::Current(); dependencies.worker_thread = rtc::Thread::Current(); dependencies.signaling_thread = rtc::Thread::Current(); + dependencies.media_engine = std::make_unique(); return dependencies; } diff --git a/pc/test/integration_test_helpers.cc b/pc/test/integration_test_helpers.cc index f6f38ad670..a014d02d75 100644 --- a/pc/test/integration_test_helpers.cc +++ b/pc/test/integration_test_helpers.cc @@ -46,7 +46,7 @@ void RemoveSsrcsAndKeepMsids(cricket::SessionDescription* desc) { int FindFirstMediaStatsIndexByKind( const std::string& kind, - const std::vector& + const std::vector& media_stats_vec) { for (size_t i = 0; i < media_stats_vec.size(); i++) { if (media_stats_vec[i]->kind.ValueToString() == kind) { @@ -56,44 +56,37 @@ int FindFirstMediaStatsIndexByKind( return -1; } -TaskQueueMetronome::TaskQueueMetronome(TaskQueueFactory* factory, - TimeDelta tick_period) - : tick_period_(tick_period), - queue_(factory->CreateTaskQueue("MetronomeQueue", - TaskQueueFactory::Priority::HIGH)) { - tick_task_ = RepeatingTaskHandle::Start(queue_.Get(), [this] { - MutexLock lock(&mutex_); - for (auto* listener : listeners_) { - listener->OnTickTaskQueue()->PostTask([listener] { listener->OnTick(); }); - } - return tick_period_; - }); +TaskQueueMetronome::TaskQueueMetronome(TimeDelta tick_period) + : tick_period_(tick_period) { + sequence_checker_.Detach(); } TaskQueueMetronome::~TaskQueueMetronome() { - RTC_DCHECK(listeners_.empty()); - rtc::Event stop_event; - queue_.PostTask([this, &stop_event] { - tick_task_.Stop(); - stop_event.Set(); - }); - stop_event.Wait(TimeDelta::Seconds(1)); + RTC_DCHECK_RUN_ON(&sequence_checker_); } - -void TaskQueueMetronome::AddListener(TickListener* listener) { - MutexLock lock(&mutex_); - auto [it, inserted] = listeners_.insert(listener); - RTC_DCHECK(inserted); -} - -void TaskQueueMetronome::RemoveListener(TickListener* listener) { - MutexLock lock(&mutex_); - auto it = listeners_.find(listener); - RTC_DCHECK(it != listeners_.end()); - listeners_.erase(it); +void TaskQueueMetronome::RequestCallOnNextTick( + absl::AnyInvocable callback) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + callbacks_.push_back(std::move(callback)); + // Only schedule a tick callback for the first `callback` addition. + // Schedule on the current task queue to comply with RequestCallOnNextTick + // requirements. + if (callbacks_.size() == 1) { + TaskQueueBase::Current()->PostDelayedTask( + SafeTask(safety_.flag(), + [this] { + RTC_DCHECK_RUN_ON(&sequence_checker_); + std::vector> callbacks; + callbacks_.swap(callbacks); + for (auto& callback : callbacks) + std::move(callback)(); + }), + tick_period_); + } } TimeDelta TaskQueueMetronome::TickPeriod() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); return tick_period_; } diff --git a/pc/test/integration_test_helpers.h b/pc/test/integration_test_helpers.h index 1d9882ed81..64a06ebb7d 100644 --- a/pc/test/integration_test_helpers.h +++ b/pc/test/integration_test_helpers.h @@ -175,25 +175,23 @@ void RemoveSsrcsAndKeepMsids(cricket::SessionDescription* desc); // metrics we're interested in are already available in "inbound-rtp". int FindFirstMediaStatsIndexByKind( const std::string& kind, - const std::vector& + const std::vector& media_stats_vec); class TaskQueueMetronome : public webrtc::Metronome { public: - TaskQueueMetronome(TaskQueueFactory* factory, TimeDelta tick_period); + explicit TaskQueueMetronome(TimeDelta tick_period); ~TaskQueueMetronome() override; // webrtc::Metronome implementation. - void AddListener(TickListener* listener) override; - void RemoveListener(TickListener* listener) override; + void RequestCallOnNextTick(absl::AnyInvocable callback) override; TimeDelta TickPeriod() const override; private: - Mutex mutex_; const TimeDelta tick_period_; - std::set listeners_ RTC_GUARDED_BY(mutex_); - RepeatingTaskHandle tick_task_; - rtc::TaskQueue queue_; + SequenceChecker sequence_checker_; + std::vector> callbacks_; + ScopedTaskSafetyDetached safety_; }; class SignalingMessageReceiver { @@ -775,8 +773,8 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, pc_factory_dependencies.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory(); pc_factory_dependencies.trials = std::make_unique(); - pc_factory_dependencies.metronome = std::make_unique( - pc_factory_dependencies.task_queue_factory.get(), TimeDelta::Millis(8)); + pc_factory_dependencies.metronome = + std::make_unique(TimeDelta::Millis(8)); cricket::MediaEngineDependencies media_deps; media_deps.task_queue_factory = pc_factory_dependencies.task_queue_factory.get(); diff --git a/pc/test/mock_channel_interface.h b/pc/test/mock_channel_interface.h index 97e873e724..273e4a19f0 100644 --- a/pc/test/mock_channel_interface.h +++ b/pc/test/mock_channel_interface.h @@ -26,8 +26,24 @@ class MockChannelInterface : public cricket::ChannelInterface { public: MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); MOCK_METHOD(MediaChannel*, media_channel, (), (const, override)); - MOCK_METHOD(VoiceMediaChannel*, voice_media_channel, (), (const, override)); - MOCK_METHOD(VideoMediaChannel*, video_media_channel, (), (const, override)); + MOCK_METHOD(MediaChannel*, media_send_channel, (), (const, override)); + MOCK_METHOD(VoiceMediaChannel*, + voice_media_send_channel, + (), + (const, override)); + MOCK_METHOD(VideoMediaChannel*, + video_media_send_channel, + (), + (const, override)); + MOCK_METHOD(MediaChannel*, media_receive_channel, (), (const, override)); + MOCK_METHOD(VoiceMediaChannel*, + voice_media_receive_channel, + (), + (const, override)); + MOCK_METHOD(VideoMediaChannel*, + video_media_receive_channel, + (), + (const, override)); MOCK_METHOD(absl::string_view, transport_name, (), (const, override)); MOCK_METHOD(const std::string&, mid, (), (const, override)); MOCK_METHOD(void, Enable, (bool), (override)); diff --git a/pc/test/mock_peer_connection_internal.h b/pc/test/mock_peer_connection_internal.h index 733ec41961..967f9b605e 100644 --- a/pc/test/mock_peer_connection_internal.h +++ b/pc/test/mock_peer_connection_internal.h @@ -42,6 +42,12 @@ class MockPeerConnectionInternal : public PeerConnectionInternal { (rtc::scoped_refptr, const std::vector&), (override)); + MOCK_METHOD(RTCErrorOr>, + AddTrack, + (rtc::scoped_refptr, + const std::vector&, + const std::vector&), + (override)); MOCK_METHOD(RTCError, RemoveTrackOrError, (rtc::scoped_refptr), diff --git a/pc/test/mock_rtp_receiver_internal.h b/pc/test/mock_rtp_receiver_internal.h index 779dcdcf08..e2a81c0dd3 100644 --- a/pc/test/mock_rtp_receiver_internal.h +++ b/pc/test/mock_rtp_receiver_internal.h @@ -57,7 +57,10 @@ class MockRtpReceiverInternal : public RtpReceiverInternal { // RtpReceiverInternal methods. MOCK_METHOD(void, Stop, (), (override)); - MOCK_METHOD(void, SetMediaChannel, (cricket::MediaChannel*), (override)); + MOCK_METHOD(void, + SetMediaChannel, + (cricket::MediaReceiveChannelInterface*), + (override)); MOCK_METHOD(void, SetupMediaChannel, (uint32_t), (override)); MOCK_METHOD(void, SetupUnsignaledMediaChannel, (), (override)); MOCK_METHOD(uint32_t, ssrc, (), (const, override)); diff --git a/pc/test/mock_rtp_sender_internal.h b/pc/test/mock_rtp_sender_internal.h index 5261d47b82..8ed0ede21b 100644 --- a/pc/test/mock_rtp_sender_internal.h +++ b/pc/test/mock_rtp_sender_internal.h @@ -52,14 +52,23 @@ class MockRtpSenderInternal : public RtpSenderInternal { (), (const, override)); MOCK_METHOD(RTCError, SetParameters, (const RtpParameters&), (override)); - MOCK_METHOD(RTCError, + MOCK_METHOD(void, + SetParametersAsync, + (const RtpParameters&, SetParametersCallback), + (override)); + MOCK_METHOD(void, SetParametersInternal, - (const RtpParameters&), + (const RtpParameters&, SetParametersCallback, bool blocking), (override)); MOCK_METHOD(RTCError, SetParametersInternalWithAllLayers, (const RtpParameters&), (override)); + MOCK_METHOD(RTCError, CheckSVCParameters, (const RtpParameters&), (override)); + MOCK_METHOD(void, + SetVideoCodecPreferences, + (std::vector), + (override)); MOCK_METHOD(rtc::scoped_refptr, GetDtmfSender, (), @@ -82,7 +91,7 @@ class MockRtpSenderInternal : public RtpSenderInternal { (override)); // RtpSenderInternal methods. - MOCK_METHOD1(SetMediaChannel, void(cricket::MediaChannel*)); + MOCK_METHOD1(SetMediaChannel, void(cricket::MediaSendChannelInterface*)); MOCK_METHOD1(SetSsrc, void(uint32_t)); MOCK_METHOD1(set_stream_ids, void(const std::vector&)); MOCK_METHOD1(SetStreams, void(const std::vector&)); diff --git a/pc/test/mock_voice_media_channel.h b/pc/test/mock_voice_media_channel.h index 444ca5aed6..5a9b8802dd 100644 --- a/pc/test/mock_voice_media_channel.h +++ b/pc/test/mock_voice_media_channel.h @@ -16,6 +16,7 @@ #include "api/call/audio_sink.h" #include "media/base/media_channel.h" +#include "media/base/media_channel_impl.h" #include "rtc_base/gunit.h" #include "test/gmock.h" #include "test/gtest.h" @@ -29,7 +30,10 @@ class MockVoiceMediaChannel : public VoiceMediaChannel { explicit MockVoiceMediaChannel(webrtc::TaskQueueBase* network_thread) : VoiceMediaChannel(network_thread) {} - MOCK_METHOD(void, SetInterface, (NetworkInterface * iface), (override)); + MOCK_METHOD(void, + SetInterface, + (MediaChannelNetworkInterface * iface), + (override)); MOCK_METHOD(void, OnPacketReceived, (rtc::CopyOnWriteBuffer packet, int64_t packet_time_us), @@ -64,14 +68,15 @@ class MockVoiceMediaChannel : public VoiceMediaChannel { (uint32_t ssrc, rtc::scoped_refptr frame_decryptor), (override)); - MOCK_METHOD(void, SetVideoCodecSwitchingEnabled, (bool enabled), (override)); MOCK_METHOD(webrtc::RtpParameters, GetRtpSendParameters, (uint32_t ssrc), (const, override)); MOCK_METHOD(webrtc::RTCError, SetRtpSendParameters, - (uint32_t ssrc, const webrtc::RtpParameters& parameters), + (uint32_t ssrc, + const webrtc::RtpParameters& parameters, + webrtc::SetParametersCallback callback), (override)); MOCK_METHOD( void, diff --git a/pc/test/svc_e2e_tests.cc b/pc/test/svc_e2e_tests.cc index 264b990e1b..dea0763758 100644 --- a/pc/test/svc_e2e_tests.cc +++ b/pc/test/svc_e2e_tests.cc @@ -13,12 +13,16 @@ #include #include "api/media_stream_interface.h" +#include "api/stats/rtcstats_objects.h" #include "api/test/create_network_emulation_manager.h" #include "api/test/create_peer_connection_quality_test_frame_generator.h" #include "api/test/create_peerconnection_quality_test_fixture.h" #include "api/test/frame_generator_interface.h" #include "api/test/metrics/global_metrics_logger_and_exporter.h" #include "api/test/network_emulation_manager.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/media_quality_test_params.h" +#include "api/test/pclf/peer_configurer.h" #include "api/test/peerconnection_quality_test_fixture.h" #include "api/test/simulated_network.h" #include "api/test/time_controller.h" @@ -38,26 +42,21 @@ namespace webrtc { namespace { -using PeerConfigurer = ::webrtc::webrtc_pc_e2e:: - PeerConnectionE2EQualityTestFixture::PeerConfigurer; -using RunParams = - ::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::RunParams; -using VideoConfig = - ::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::VideoConfig; -using ScreenShareConfig = ::webrtc::webrtc_pc_e2e:: - PeerConnectionE2EQualityTestFixture::ScreenShareConfig; -using VideoCodecConfig = ::webrtc::webrtc_pc_e2e:: - PeerConnectionE2EQualityTestFixture::VideoCodecConfig; -using EmulatedSFUConfig = ::webrtc::webrtc_pc_e2e:: - PeerConnectionE2EQualityTestFixture::EmulatedSFUConfig; using ::cricket::kAv1CodecName; using ::cricket::kH264CodecName; using ::cricket::kVp8CodecName; using ::cricket::kVp9CodecName; using ::testing::Combine; +using ::testing::Optional; using ::testing::UnitTest; using ::testing::Values; using ::testing::ValuesIn; +using ::webrtc::webrtc_pc_e2e::EmulatedSFUConfig; +using ::webrtc::webrtc_pc_e2e::PeerConfigurer; +using ::webrtc::webrtc_pc_e2e::RunParams; +using ::webrtc::webrtc_pc_e2e::ScreenShareConfig; +using ::webrtc::webrtc_pc_e2e::VideoCodecConfig; +using ::webrtc::webrtc_pc_e2e::VideoConfig; std::unique_ptr CreateTestFixture(absl::string_view test_case_name, @@ -71,10 +70,14 @@ CreateTestFixture(absl::string_view test_case_name, auto fixture = webrtc_pc_e2e::CreatePeerConnectionE2EQualityTestFixture( std::string(test_case_name), time_controller, nullptr, std::move(video_quality_analyzer)); - fixture->AddPeer(network_links.first->network_dependencies(), - alice_configurer); - fixture->AddPeer(network_links.second->network_dependencies(), - bob_configurer); + auto alice = std::make_unique( + network_links.first->network_dependencies()); + auto bob = std::make_unique( + network_links.second->network_dependencies()); + alice_configurer(alice.get()); + bob_configurer(bob.get()); + fixture->AddPeer(std::move(alice)); + fixture->AddPeer(std::move(bob)); return fixture; } @@ -202,16 +205,32 @@ class SvcVideoQualityAnalyzer : public DefaultVideoQualityAnalyzer { input_image); } + void OnStatsReports( + absl::string_view pc_label, + const rtc::scoped_refptr& report) override { + // Extract the scalability mode reported in the stats. + auto outbound_stats = report->GetStatsOfType(); + for (const auto& stat : outbound_stats) { + if (stat->scalability_mode.is_defined()) { + reported_scalability_mode_ = *stat->scalability_mode; + } + } + } + const SpatialTemporalLayerCounts& encoder_layers_seen() const { return encoder_layers_seen_; } const SpatialTemporalLayerCounts& decoder_layers_seen() const { return decoder_layers_seen_; } + const absl::optional reported_scalability_mode() const { + return reported_scalability_mode_; + } private: SpatialTemporalLayerCounts encoder_layers_seen_; SpatialTemporalLayerCounts decoder_layers_seen_; + absl::optional reported_scalability_mode_; }; MATCHER_P2(HasSpatialAndTemporalLayers, @@ -341,6 +360,8 @@ TEST_P(SvcTest, ScalabilityModeSupported) { SvcTestParameters().expected_spatial_layers, SvcTestParameters().expected_temporal_layers)); } + EXPECT_THAT(analyzer_ptr->reported_scalability_mode(), + Optional(SvcTestParameters().scalability_mode)); RTC_LOG(LS_INFO) << "Encoder layers seen: " << analyzer_ptr->encoder_layers_seen().size(); diff --git a/pc/video_rtp_receiver.cc b/pc/video_rtp_receiver.cc index bed848345e..0ab5d56a22 100644 --- a/pc/video_rtp_receiver.cc +++ b/pc/video_rtp_receiver.cc @@ -252,7 +252,8 @@ void VideoRtpReceiver::SetJitterBufferMinimumDelay( media_channel_->SetBaseMinimumPlayoutDelayMs(*ssrc_, delay_.GetMs()); } -void VideoRtpReceiver::SetMediaChannel(cricket::MediaChannel* media_channel) { +void VideoRtpReceiver::SetMediaChannel( + cricket::MediaReceiveChannelInterface* media_channel) { RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(media_channel == nullptr || media_channel->media_type() == media_type()); @@ -260,7 +261,8 @@ void VideoRtpReceiver::SetMediaChannel(cricket::MediaChannel* media_channel) { SetMediaChannel_w(media_channel); } -void VideoRtpReceiver::SetMediaChannel_w(cricket::MediaChannel* media_channel) { +void VideoRtpReceiver::SetMediaChannel_w( + cricket::MediaReceiveChannelInterface* media_channel) { RTC_DCHECK_RUN_ON(worker_thread_); if (media_channel == media_channel_) return; @@ -275,12 +277,16 @@ void VideoRtpReceiver::SetMediaChannel_w(cricket::MediaChannel* media_channel) { SetEncodedSinkEnabled(false); } - media_channel_ = static_cast(media_channel); + if (media_channel) { + media_channel_ = media_channel->AsVideoReceiveChannel(); + } else { + media_channel_ = nullptr; + } if (media_channel_) { if (saved_generate_keyframe_) { // TODO(bugs.webrtc.org/8694): Stop using 0 to mean unsignalled SSRC - media_channel_->GenerateKeyFrame(ssrc_.value_or(0)); + media_channel_->RequestRecvKeyFrame(ssrc_.value_or(0)); saved_generate_keyframe_ = false; } if (encoded_sink_enabled) { @@ -311,8 +317,9 @@ std::vector VideoRtpReceiver::GetSources() const { return media_channel_->GetSources(*ssrc_); } -void VideoRtpReceiver::SetupMediaChannel(absl::optional ssrc, - cricket::MediaChannel* media_channel) { +void VideoRtpReceiver::SetupMediaChannel( + absl::optional ssrc, + cricket::MediaReceiveChannelInterface* media_channel) { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); RTC_DCHECK(media_channel); MediaSourceInterface::SourceState state = source_->state(); @@ -332,7 +339,7 @@ void VideoRtpReceiver::OnGenerateKeyFrame() { return; } // TODO(bugs.webrtc.org/8694): Stop using 0 to mean unsignalled SSRC - media_channel_->GenerateKeyFrame(ssrc_.value_or(0)); + media_channel_->RequestRecvKeyFrame(ssrc_.value_or(0)); // We need to remember to request generation of a new key frame if the media // channel changes, because there's no feedback whether the keyframe // generation has completed on the channel. diff --git a/pc/video_rtp_receiver.h b/pc/video_rtp_receiver.h index 8b1f3c4140..086246daae 100644 --- a/pc/video_rtp_receiver.h +++ b/pc/video_rtp_receiver.h @@ -102,7 +102,8 @@ class VideoRtpReceiver : public RtpReceiverInternal { void SetJitterBufferMinimumDelay( absl::optional delay_seconds) override; - void SetMediaChannel(cricket::MediaChannel* media_channel) override; + void SetMediaChannel( + cricket::MediaReceiveChannelInterface* media_channel) override; int AttachmentId() const override { return attachment_id_; } @@ -111,7 +112,7 @@ class VideoRtpReceiver : public RtpReceiverInternal { // Combines SetMediaChannel, SetupMediaChannel and // SetupUnsignaledMediaChannel. void SetupMediaChannel(absl::optional ssrc, - cricket::MediaChannel* media_channel); + cricket::MediaReceiveChannelInterface* media_channel); private: void RestartMediaChannel(absl::optional ssrc) @@ -121,7 +122,7 @@ class VideoRtpReceiver : public RtpReceiverInternal { RTC_RUN_ON(worker_thread_); void SetSink(rtc::VideoSinkInterface* sink) RTC_RUN_ON(worker_thread_); - void SetMediaChannel_w(cricket::MediaChannel* media_channel) + void SetMediaChannel_w(cricket::MediaReceiveChannelInterface* media_channel) RTC_RUN_ON(worker_thread_); // VideoRtpTrackSource::Callback @@ -148,8 +149,8 @@ class VideoRtpReceiver : public RtpReceiverInternal { rtc::Thread* const worker_thread_; const std::string id_; - cricket::VideoMediaChannel* media_channel_ RTC_GUARDED_BY(worker_thread_) = - nullptr; + cricket::VideoMediaReceiveChannelInterface* media_channel_ + RTC_GUARDED_BY(worker_thread_) = nullptr; absl::optional ssrc_ RTC_GUARDED_BY(worker_thread_); // `source_` is held here to be able to change the state of the source when // the VideoRtpReceiver is stopped. diff --git a/pc/video_rtp_receiver_unittest.cc b/pc/video_rtp_receiver_unittest.cc index 401987960c..3ec9a28295 100644 --- a/pc/video_rtp_receiver_unittest.cc +++ b/pc/video_rtp_receiver_unittest.cc @@ -49,7 +49,11 @@ class VideoRtpReceiverTest : public testing::Test { ClearRecordableEncodedFrameCallback, (uint32_t), (override)); - MOCK_METHOD(void, GenerateKeyFrame, (uint32_t), (override)); + MOCK_METHOD(void, RequestRecvKeyFrame, (uint32_t), (override)); + MOCK_METHOD(void, + GenerateSendKeyFrame, + (uint32_t, const std::vector&), + (override)); }; class MockVideoSink : public rtc::VideoSinkInterface { @@ -96,7 +100,7 @@ TEST_F(VideoRtpReceiverTest, SupportsEncodedOutput) { } TEST_F(VideoRtpReceiverTest, GeneratesKeyFrame) { - EXPECT_CALL(channel_, GenerateKeyFrame(0)); + EXPECT_CALL(channel_, RequestRecvKeyFrame(0)); Source()->GenerateKeyFrame(); } @@ -105,17 +109,17 @@ TEST_F(VideoRtpReceiverTest, // A channel switch without previous call to GenerateKeyFrame shouldn't // cause a call to happen on the new channel. MockVideoMediaChannel channel2(nullptr, cricket::VideoOptions()); - EXPECT_CALL(channel_, GenerateKeyFrame).Times(0); - EXPECT_CALL(channel2, GenerateKeyFrame).Times(0); + EXPECT_CALL(channel_, RequestRecvKeyFrame).Times(0); + EXPECT_CALL(channel2, RequestRecvKeyFrame).Times(0); SetMediaChannel(&channel2); Mock::VerifyAndClearExpectations(&channel2); // Generate a key frame. When we switch channel next time, we will have to // re-generate it as we don't know if it was eventually received - EXPECT_CALL(channel2, GenerateKeyFrame).Times(1); + EXPECT_CALL(channel2, RequestRecvKeyFrame).Times(1); Source()->GenerateKeyFrame(); MockVideoMediaChannel channel3(nullptr, cricket::VideoOptions()); - EXPECT_CALL(channel3, GenerateKeyFrame); + EXPECT_CALL(channel3, RequestRecvKeyFrame); SetMediaChannel(&channel3); // Switching to a new channel should now not cause calls to GenerateKeyFrame. diff --git a/pc/webrtc_sdp.cc b/pc/webrtc_sdp.cc index 39b16901a1..69fa62ca37 100644 --- a/pc/webrtc_sdp.cc +++ b/pc/webrtc_sdp.cc @@ -1940,7 +1940,7 @@ void BuildRtpmap(const MediaContentDescription* media_desc, if (GetMinValue(maxptimes, &min_maxptime)) { AddAttributeLine(kCodecParamMaxPTime, min_maxptime, message); } - RTC_DCHECK(min_maxptime > max_minptime); + RTC_DCHECK_GE(min_maxptime, max_minptime); // Populate the ptime attribute with the smallest ptime or the largest // minptime, whichever is the largest, for all codecs under the same m-line. int ptime = INT_MAX; @@ -2333,40 +2333,57 @@ static bool ParseMsidAttribute(absl::string_view line, std::vector* stream_ids, std::string* track_id, SdpParseError* error) { - // https://datatracker.ietf.org/doc/draft-ietf-mmusic-msid/16/ - // a=msid: + // https://datatracker.ietf.org/doc/rfc8830/ + // a=msid: // msid-value = msid-id [ SP msid-appdata ] // msid-id = 1*64token-char ; see RFC 4566 // msid-appdata = 1*64token-char ; see RFC 4566 - std::string field1; - std::string new_stream_id; - std::string new_track_id; - if (!rtc::tokenize_first(line.substr(kLinePrefixLength), - kSdpDelimiterSpaceChar, &field1, &new_track_id)) { - const size_t expected_fields = 2; - return ParseFailedExpectFieldNum(line, expected_fields, error); + // Note that JSEP stipulates not sending msid-appdata so + // a=msid: + // is supported for backward compability reasons only. + std::vector fields; + size_t num_fields = rtc::tokenize(line.substr(kLinePrefixLength), + kSdpDelimiterSpaceChar, &fields); + if (num_fields < 1 || num_fields > 2) { + return ParseFailed(line, "Expected a stream ID and optionally a track ID", + error); } + if (num_fields == 1) { + if (line.back() == kSdpDelimiterSpaceChar) { + return ParseFailed(line, "Missing track ID in msid attribute.", error); + } + if (!track_id->empty()) { + fields.push_back(*track_id); + } else { + // Ending with an empty string track will cause a random track id + // to be generated later in the process. + fields.push_back(""); + } + } + RTC_DCHECK_EQ(fields.size(), 2); - if (new_track_id.empty()) { - return ParseFailed(line, "Missing track ID in msid attribute.", error); - } // All track ids should be the same within an m section in a Unified Plan SDP. - if (!track_id->empty() && new_track_id.compare(*track_id) != 0) { + if (!track_id->empty() && track_id->compare(fields[1]) != 0) { return ParseFailed( line, "Two different track IDs in msid attribute in one m= section", error); } - *track_id = new_track_id; + *track_id = fields[1]; // msid: - if (!GetValue(field1, kAttributeMsid, &new_stream_id, error)) { + std::string new_stream_id; + if (!GetValue(fields[0], kAttributeMsid, &new_stream_id, error)) { return false; } if (new_stream_id.empty()) { return ParseFailed(line, "Missing stream ID in msid attribute.", error); } // The special value "-" indicates "no MediaStream". - if (new_stream_id.compare(kNoStreamMsid) != 0) { + if (new_stream_id.compare(kNoStreamMsid) != 0 && + !absl::c_any_of(*stream_ids, + [&new_stream_id](const std::string& existing_stream_id) { + return new_stream_id == existing_stream_id; + })) { stream_ids->push_back(new_stream_id); } return true; @@ -3330,6 +3347,10 @@ bool ParseContent(absl::string_view message, // still create a track. This isn't done for data media types because // StreamParams aren't used for SCTP streams, and RTP data channels don't // support unsignaled SSRCs. + // If track id was not specified, create a random one. + if (track_id.empty()) { + track_id = rtc::CreateRandomString(8); + } CreateTrackWithNoSsrcs(stream_ids, track_id, send_rids, &tracks); } diff --git a/pc/webrtc_sdp_unittest.cc b/pc/webrtc_sdp_unittest.cc index 7880af0693..9f1cfc9c96 100644 --- a/pc/webrtc_sdp_unittest.cc +++ b/pc/webrtc_sdp_unittest.cc @@ -4055,11 +4055,54 @@ TEST_F(WebRtcSdpTest, DeserializeInvalidPortInCandidateAttribute) { EXPECT_FALSE(SdpDeserialize(kSdpWithInvalidCandidatePort, &jdesc_output)); } +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithStreamIdAndTrackId) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:stream_id track_id\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output)); + auto stream = jdesc_output.description() + ->contents()[0] + .media_description() + ->streams()[0]; + ASSERT_EQ(stream.stream_ids().size(), 1u); + EXPECT_EQ(stream.stream_ids()[0], "stream_id"); + EXPECT_EQ(stream.id, "track_id"); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithEmptyStreamIdAndTrackId) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:- track_id\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output)); + auto stream = jdesc_output.description() + ->contents()[0] + .media_description() + ->streams()[0]; + ASSERT_EQ(stream.stream_ids().size(), 0u); + EXPECT_EQ(stream.id, "track_id"); +} + // Test that "a=msid" with a missing track ID is rejected and doesn't crash. // Regression test for: // https://bugs.chromium.org/p/chromium/issues/detail?id=686405 TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithMissingTrackId) { - static const char kSdpWithMissingTrackId[] = + std::string sdp = "v=0\r\n" "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" "s=-\r\n" @@ -4070,11 +4113,226 @@ TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithMissingTrackId) { "a=msid:stream_id \r\n"; JsepSessionDescription jdesc_output(kDummyType); - EXPECT_FALSE(SdpDeserialize(kSdpWithMissingTrackId, &jdesc_output)); + EXPECT_FALSE(SdpDeserialize(sdp, &jdesc_output)); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutColon) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + EXPECT_FALSE(SdpDeserialize(sdp, &jdesc_output)); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAttributes) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + EXPECT_FALSE(SdpDeserialize(sdp, &jdesc_output)); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithTooManySpaces) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:stream_id track_id bogus\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + EXPECT_FALSE(SdpDeserialize(sdp, &jdesc_output)); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithDifferentTrackIds) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:stream_id track_id\r\n" + "a=msid:stream_id2 track_id2\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + EXPECT_FALSE(SdpDeserialize(sdp, &jdesc_output)); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAppData) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:stream_id\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output)); + auto stream = jdesc_output.description() + ->contents()[0] + .media_description() + ->streams()[0]; + ASSERT_EQ(stream.stream_ids().size(), 1u); + EXPECT_EQ(stream.stream_ids()[0], "stream_id"); + // Track id is randomly generated. + EXPECT_NE(stream.id, ""); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAppDataTwoStreams) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:stream_id\r\n" + "a=msid:stream_id2\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output)); + auto stream = jdesc_output.description() + ->contents()[0] + .media_description() + ->streams()[0]; + ASSERT_EQ(stream.stream_ids().size(), 2u); + EXPECT_EQ(stream.stream_ids()[0], "stream_id"); + EXPECT_EQ(stream.stream_ids()[1], "stream_id2"); + // Track id is randomly generated. + EXPECT_NE(stream.id, ""); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAppDataDuplicate) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:stream_id\r\n" + "a=msid:stream_id\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + // This is somewhat silly but accept it. Duplicates get filtered. + EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output)); + auto stream = jdesc_output.description() + ->contents()[0] + .media_description() + ->streams()[0]; + ASSERT_EQ(stream.stream_ids().size(), 1u); + EXPECT_EQ(stream.stream_ids()[0], "stream_id"); + // Track id is randomly generated. + EXPECT_NE(stream.id, ""); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAppDataMixed) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:stream_id\r\n" + "a=msid:stream_id2 track_id\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + // Mixing the syntax like this is not a good idea but we accept it + // and the result is the second track_id. + EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output)); + auto stream = jdesc_output.description() + ->contents()[0] + .media_description() + ->streams()[0]; + ASSERT_EQ(stream.stream_ids().size(), 2u); + EXPECT_EQ(stream.stream_ids()[0], "stream_id"); + EXPECT_EQ(stream.stream_ids()[1], "stream_id2"); + + // Track id is taken from second line. + EXPECT_EQ(stream.id, "track_id"); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAppDataMixed2) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:stream_id track_id\r\n" + "a=msid:stream_id2\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + // Mixing the syntax like this is not a good idea but we accept it + // and the result is the second track_id. + EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output)); + auto stream = jdesc_output.description() + ->contents()[0] + .media_description() + ->streams()[0]; + ASSERT_EQ(stream.stream_ids().size(), 2u); + EXPECT_EQ(stream.stream_ids()[0], "stream_id"); + EXPECT_EQ(stream.stream_ids()[1], "stream_id2"); + + // Track id is taken from first line. + EXPECT_EQ(stream.id, "track_id"); +} + +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAppDataMixedNoStream) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:stream_id\r\n" + "a=msid:- track_id\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + // This is somewhat undefined behavior but accept it and expect a single + // stream. + EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output)); + auto stream = jdesc_output.description() + ->contents()[0] + .media_description() + ->streams()[0]; + ASSERT_EQ(stream.stream_ids().size(), 1u); + EXPECT_EQ(stream.stream_ids()[0], "stream_id"); + EXPECT_EQ(stream.id, "track_id"); } TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithMissingStreamId) { - static const char kSdpWithMissingStreamId[] = + std::string sdp = "v=0\r\n" "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" "s=-\r\n" @@ -4085,7 +4343,7 @@ TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithMissingStreamId) { "a=msid: track_id\r\n"; JsepSessionDescription jdesc_output(kDummyType); - EXPECT_FALSE(SdpDeserialize(kSdpWithMissingStreamId, &jdesc_output)); + EXPECT_FALSE(SdpDeserialize(sdp, &jdesc_output)); } // Tests that if both session-level address and media-level address exist, use diff --git a/rtc_base/BUILD.gn b/rtc_base/BUILD.gn index b171b162bf..8ac6e4102c 100644 --- a/rtc_base/BUILD.gn +++ b/rtc_base/BUILD.gn @@ -314,7 +314,10 @@ rtc_library("platform_thread_types") { "platform_thread_types.cc", "platform_thread_types.h", ] - deps = [ ":macromagic" ] + deps = [ + ":checks", + ":macromagic", + ] } rtc_source_set("refcount") { @@ -961,6 +964,7 @@ rtc_library("threading") { "../api/task_queue", "../api/task_queue:pending_task_safety_flag", "../api/units:time_delta", + "../system_wrappers:field_trial", "synchronization:mutex", "system:no_unique_address", "system:rtc_export", @@ -1498,6 +1502,7 @@ if (rtc_include_tests) { ":timeutils", "../api/units:time_delta", "../system_wrappers", + "../test:field_trial", "../test:fileutils", "../test:test_main", "../test:test_support", @@ -1677,9 +1682,14 @@ if (rtc_include_tests) { "numerics/percentile_filter_unittest.cc", "numerics/running_statistics_unittest.cc", "numerics/sequence_number_util_unittest.cc", + "numerics/sequence_numbers_conformance_test.cc", ] deps = [ ":rtc_numerics", + ":strong_alias", + ":timeutils", + "../modules:module_api_public", + "../net/dcsctp/common:sequence_numbers", "../test:test_main", "../test:test_support", ] diff --git a/rtc_base/DEPS b/rtc_base/DEPS index 3a77b5502a..3882f5acb5 100644 --- a/rtc_base/DEPS +++ b/rtc_base/DEPS @@ -12,4 +12,7 @@ specific_include_rules = { "gunit\.h": [ "+testing/base/public/gunit.h" ], + "sequence_numbers_conformance_test\.cc": [ + "+net/dcsctp/common/sequence_numbers.h", + ], } diff --git a/rtc_base/async_udp_socket.cc b/rtc_base/async_udp_socket.cc index b4aefd6837..7e4b9378f6 100644 --- a/rtc_base/async_udp_socket.cc +++ b/rtc_base/async_udp_socket.cc @@ -19,10 +19,15 @@ #include "rtc_base/network/sent_packet.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/time_utils.h" +#include "system_wrappers/include/field_trial.h" namespace rtc { -static const int BUF_SIZE = 64 * 1024; +// Returns true if the the client is in the experiment to get timestamps +// from the socket implementation. +static bool IsScmTimeStampExperimentEnabled() { + return webrtc::field_trial::IsEnabled("WebRTC-SCM-Timestamp"); +} AsyncUDPSocket* AsyncUDPSocket::Create(Socket* socket, const SocketAddress& bind_address) { @@ -43,18 +48,12 @@ AsyncUDPSocket* AsyncUDPSocket::Create(SocketFactory* factory, } AsyncUDPSocket::AsyncUDPSocket(Socket* socket) : socket_(socket) { - size_ = BUF_SIZE; - buf_ = new char[size_]; - + sequence_checker_.Detach(); // The socket should start out readable but not writable. socket_->SignalReadEvent.connect(this, &AsyncUDPSocket::OnReadEvent); socket_->SignalWriteEvent.connect(this, &AsyncUDPSocket::OnWriteEvent); } -AsyncUDPSocket::~AsyncUDPSocket() { - delete[] buf_; -} - SocketAddress AsyncUDPSocket::GetLocalAddress() const { return socket_->GetLocalAddress(); } @@ -112,10 +111,12 @@ void AsyncUDPSocket::SetError(int error) { void AsyncUDPSocket::OnReadEvent(Socket* socket) { RTC_DCHECK(socket_.get() == socket); + RTC_DCHECK_RUN_ON(&sequence_checker_); SocketAddress remote_addr; - int64_t timestamp; - int len = socket_->RecvFrom(buf_, size_, &remote_addr, ×tamp); + int64_t timestamp = -1; + int len = socket_->RecvFrom(buf_, BUF_SIZE, &remote_addr, ×tamp); + if (len < 0) { // An error here typically means we got an ICMP error in response to our // send datagram, indicating the remote address was unreachable. @@ -126,11 +127,21 @@ void AsyncUDPSocket::OnReadEvent(Socket* socket) { << "] receive failed with error " << socket_->GetError(); return; } + if (timestamp == -1) { + // Timestamp from socket is not available. + timestamp = TimeMicros(); + } else { + if (!socket_time_offset_) { + socket_time_offset_ = + IsScmTimeStampExperimentEnabled() ? TimeMicros() - timestamp : 0; + } + timestamp += *socket_time_offset_; + } // TODO: Make sure that we got all of the packet. // If we did not, then we should resize our buffer to be large enough. SignalReadPacket(this, buf_, static_cast(len), remote_addr, - (timestamp > -1 ? timestamp : TimeMicros())); + timestamp); } void AsyncUDPSocket::OnWriteEvent(Socket* socket) { diff --git a/rtc_base/async_udp_socket.h b/rtc_base/async_udp_socket.h index 5d738ffe94..b20281fd17 100644 --- a/rtc_base/async_udp_socket.h +++ b/rtc_base/async_udp_socket.h @@ -13,12 +13,16 @@ #include +#include #include +#include "absl/types/optional.h" +#include "api/sequence_checker.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/socket.h" #include "rtc_base/socket_address.h" #include "rtc_base/socket_factory.h" +#include "rtc_base/thread_annotations.h" namespace rtc { @@ -36,7 +40,7 @@ class AsyncUDPSocket : public AsyncPacketSocket { static AsyncUDPSocket* Create(SocketFactory* factory, const SocketAddress& bind_address); explicit AsyncUDPSocket(Socket* socket); - ~AsyncUDPSocket() override; + ~AsyncUDPSocket() = default; SocketAddress GetLocalAddress() const override; SocketAddress GetRemoteAddress() const override; @@ -61,9 +65,11 @@ class AsyncUDPSocket : public AsyncPacketSocket { // Called when the underlying socket is ready to send. void OnWriteEvent(Socket* socket); + RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker sequence_checker_; std::unique_ptr socket_; - char* buf_; - size_t size_; + static constexpr int BUF_SIZE = 64 * 1024; + char buf_[BUF_SIZE] RTC_GUARDED_BY(sequence_checker_); + absl::optional socket_time_offset_ RTC_GUARDED_BY(sequence_checker_); }; } // namespace rtc diff --git a/rtc_base/byte_order.h b/rtc_base/byte_order.h index ae1c6345ba..b8f8ae9f7a 100644 --- a/rtc_base/byte_order.h +++ b/rtc_base/byte_order.h @@ -13,6 +13,8 @@ #include +#include + #if defined(WEBRTC_POSIX) && !defined(__native_client__) #include #endif @@ -107,51 +109,69 @@ inline uint8_t Get8(const void* memory, size_t offset) { } inline void SetBE16(void* memory, uint16_t v) { - *static_cast(memory) = htobe16(v); + uint16_t val = htobe16(v); + memcpy(memory, &val, sizeof(val)); } inline void SetBE32(void* memory, uint32_t v) { - *static_cast(memory) = htobe32(v); + uint32_t val = htobe32(v); + memcpy(memory, &val, sizeof(val)); } inline void SetBE64(void* memory, uint64_t v) { - *static_cast(memory) = htobe64(v); + uint64_t val = htobe64(v); + memcpy(memory, &val, sizeof(val)); } inline uint16_t GetBE16(const void* memory) { - return be16toh(*static_cast(memory)); + uint16_t val; + memcpy(&val, memory, sizeof(val)); + return be16toh(val); } inline uint32_t GetBE32(const void* memory) { - return be32toh(*static_cast(memory)); + uint32_t val; + memcpy(&val, memory, sizeof(val)); + return be32toh(val); } inline uint64_t GetBE64(const void* memory) { - return be64toh(*static_cast(memory)); + uint64_t val; + memcpy(&val, memory, sizeof(val)); + return be64toh(val); } inline void SetLE16(void* memory, uint16_t v) { - *static_cast(memory) = htole16(v); + uint16_t val = htole16(v); + memcpy(memory, &val, sizeof(val)); } inline void SetLE32(void* memory, uint32_t v) { - *static_cast(memory) = htole32(v); + uint32_t val = htole32(v); + memcpy(memory, &val, sizeof(val)); } inline void SetLE64(void* memory, uint64_t v) { - *static_cast(memory) = htole64(v); + uint64_t val = htole64(v); + memcpy(memory, &val, sizeof(val)); } inline uint16_t GetLE16(const void* memory) { - return le16toh(*static_cast(memory)); + uint16_t val; + memcpy(&val, memory, sizeof(val)); + return le16toh(val); } inline uint32_t GetLE32(const void* memory) { - return le32toh(*static_cast(memory)); + uint32_t val; + memcpy(&val, memory, sizeof(val)); + return le32toh(val); } inline uint64_t GetLE64(const void* memory) { - return le64toh(*static_cast(memory)); + uint64_t val; + memcpy(&val, memory, sizeof(val)); + return le64toh(val); } // Check if the current host is big endian. diff --git a/rtc_base/containers/flat_map.h b/rtc_base/containers/flat_map.h index 1dfae51655..d1f757f669 100644 --- a/rtc_base/containers/flat_map.h +++ b/rtc_base/containers/flat_map.h @@ -19,7 +19,7 @@ #include #include "rtc_base/checks.h" -#include "rtc_base/containers/flat_tree.h" +#include "rtc_base/containers/flat_tree.h" // IWYU pragma: export namespace webrtc { diff --git a/rtc_base/containers/flat_set.h b/rtc_base/containers/flat_set.h index e088cc5314..355690b09d 100644 --- a/rtc_base/containers/flat_set.h +++ b/rtc_base/containers/flat_set.h @@ -16,7 +16,7 @@ #include #include -#include "rtc_base/containers/flat_tree.h" +#include "rtc_base/containers/flat_tree.h" // IWYU pragma: export #include "rtc_base/containers/identity.h" namespace webrtc { diff --git a/rtc_base/event.h b/rtc_base/event.h index 941497ca7b..12f6a7dca2 100644 --- a/rtc_base/event.h +++ b/rtc_base/event.h @@ -12,6 +12,7 @@ #define RTC_BASE_EVENT_H_ #include "api/units/time_delta.h" + #if defined(WEBRTC_WIN) #include #elif defined(WEBRTC_POSIX) @@ -20,8 +21,38 @@ #error "Must define either WEBRTC_WIN or WEBRTC_POSIX." #endif +#include "rtc_base/synchronization/yield_policy.h" + namespace rtc { +// RTC_DISALLOW_WAIT() utility +// +// Sets a stack-scoped flag that disallows use of `rtc::Event::Wait` by means +// of raising a DCHECK when a call to `rtc::Event::Wait()` is made.. +// This is useful to guard synchronization-free scopes against regressions. +// +// Example of what this would catch (`ScopeToProtect` calls `Foo`): +// +// void Foo(TaskQueue* tq) { +// Event event; +// tq->PostTask([&event]() { +// event.Set(); +// }); +// event.Wait(Event::kForever); // <- Will trigger a DCHECK. +// } +// +// void ScopeToProtect() { +// TaskQueue* tq = GetSomeTaskQueue(); +// RTC_DISALLOW_WAIT(); // Policy takes effect. +// Foo(tq); +// } +// +#if RTC_DCHECK_IS_ON +#define RTC_DISALLOW_WAIT() ScopedDisallowWait disallow_wait_##__LINE__ +#else +#define RTC_DISALLOW_WAIT() +#endif + class Event { public: // TODO(bugs.webrtc.org/14366): Consider removing this redundant alias. @@ -87,6 +118,20 @@ class ScopedAllowBaseSyncPrimitivesForTesting { ~ScopedAllowBaseSyncPrimitivesForTesting() {} }; +#if RTC_DCHECK_IS_ON +class ScopedDisallowWait { + public: + ScopedDisallowWait() = default; + + private: + class DisallowYieldHandler : public YieldInterface { + public: + void YieldExecution() override { RTC_DCHECK_NOTREACHED(); } + } handler_; + rtc::ScopedYieldPolicy policy{&handler_}; +}; +#endif + } // namespace rtc #endif // RTC_BASE_EVENT_H_ diff --git a/rtc_base/event_unittest.cc b/rtc_base/event_unittest.cc index e35373142e..17f50dc2d1 100644 --- a/rtc_base/event_unittest.cc +++ b/rtc_base/event_unittest.cc @@ -102,4 +102,14 @@ TEST(EventTest, DISABLED_PerformanceMultiThread) { thread.Stop(); } +#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) +// Tests that we crash if we attempt to call rtc::Event::Wait while we're +// not allowed to (as per `RTC_DISALLOW_WAIT()`). +TEST(EventTestDeathTest, DisallowEventWait) { + Event event; + RTC_DISALLOW_WAIT(); + EXPECT_DEATH(event.Wait(Event::kForever), ""); +} +#endif // RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) + } // namespace rtc diff --git a/rtc_base/experiments/encoder_info_settings.cc b/rtc_base/experiments/encoder_info_settings.cc index 8af52d6646..5f0bf2d7ac 100644 --- a/rtc_base/experiments/encoder_info_settings.cc +++ b/rtc_base/experiments/encoder_info_settings.cc @@ -188,7 +188,7 @@ EncoderInfoSettings::EncoderInfoSettings(absl::string_view name) resolution_bitrate_limits_ = ToResolutionBitrateLimits(bitrate_limits.Get()); } -absl::optional EncoderInfoSettings::requested_resolution_alignment() +absl::optional EncoderInfoSettings::requested_resolution_alignment() const { if (requested_resolution_alignment_ && requested_resolution_alignment_.Value() < 1) { diff --git a/rtc_base/experiments/encoder_info_settings.h b/rtc_base/experiments/encoder_info_settings.h index d450697f47..f4227ed631 100644 --- a/rtc_base/experiments/encoder_info_settings.h +++ b/rtc_base/experiments/encoder_info_settings.h @@ -33,7 +33,7 @@ class EncoderInfoSettings { int max_bitrate_bps = 0; // The maximum bitrate. }; - absl::optional requested_resolution_alignment() const; + absl::optional requested_resolution_alignment() const; bool apply_alignment_to_all_simulcast_layers() const { return apply_alignment_to_all_simulcast_layers_.Get(); } @@ -62,7 +62,7 @@ class EncoderInfoSettings { explicit EncoderInfoSettings(absl::string_view name); private: - FieldTrialOptional requested_resolution_alignment_; + FieldTrialOptional requested_resolution_alignment_; FieldTrialFlag apply_alignment_to_all_simulcast_layers_; std::vector resolution_bitrate_limits_; }; diff --git a/rtc_base/experiments/encoder_info_settings_unittest.cc b/rtc_base/experiments/encoder_info_settings_unittest.cc index aabb68718c..929c777821 100644 --- a/rtc_base/experiments/encoder_info_settings_unittest.cc +++ b/rtc_base/experiments/encoder_info_settings_unittest.cc @@ -38,7 +38,7 @@ TEST(SimulcastEncoderAdapterSettingsTest, GetResolutionAlignment) { "requested_resolution_alignment:2/"); SimulcastEncoderAdapterEncoderInfoSettings settings; - EXPECT_EQ(2, settings.requested_resolution_alignment()); + EXPECT_EQ(2u, settings.requested_resolution_alignment()); EXPECT_FALSE(settings.apply_alignment_to_all_simulcast_layers()); EXPECT_TRUE(settings.resolution_bitrate_limits().empty()); } @@ -50,7 +50,7 @@ TEST(SimulcastEncoderAdapterSettingsTest, GetApplyAlignment) { "apply_alignment_to_all_simulcast_layers/"); SimulcastEncoderAdapterEncoderInfoSettings settings; - EXPECT_EQ(3, settings.requested_resolution_alignment()); + EXPECT_EQ(3u, settings.requested_resolution_alignment()); EXPECT_TRUE(settings.apply_alignment_to_all_simulcast_layers()); EXPECT_TRUE(settings.resolution_bitrate_limits().empty()); } @@ -94,9 +94,9 @@ TEST(EncoderSettingsTest, CommonSettingsUsedIfEncoderNameUnspecified) { "WebRTC-GetEncoderInfoOverride/requested_resolution_alignment:3/"); LibvpxVp8EncoderInfoSettings vp8_settings; - EXPECT_EQ(2, vp8_settings.requested_resolution_alignment()); + EXPECT_EQ(2u, vp8_settings.requested_resolution_alignment()); LibvpxVp9EncoderInfoSettings vp9_settings; - EXPECT_EQ(3, vp9_settings.requested_resolution_alignment()); + EXPECT_EQ(3u, vp9_settings.requested_resolution_alignment()); } } // namespace webrtc diff --git a/rtc_base/experiments/field_trial_parser_unittest.cc b/rtc_base/experiments/field_trial_parser_unittest.cc index 9916edee97..33d275a31c 100644 --- a/rtc_base/experiments/field_trial_parser_unittest.cc +++ b/rtc_base/experiments/field_trial_parser_unittest.cc @@ -18,7 +18,8 @@ namespace webrtc { namespace { -const char kDummyExperiment[] = "WebRTC-DummyExperiment"; + +constexpr char kDummyExperiment[] = "WebRTC-DummyExperiment"; struct DummyExperiment { FieldTrialFlag enabled = FieldTrialFlag("Enabled"); @@ -29,15 +30,15 @@ struct DummyExperiment { FieldTrialParameter hash = FieldTrialParameter("h", "a80"); + field_trial::FieldTrialsAllowedInScopeForTesting k{{kDummyExperiment}}; + + DummyExperiment() + : DummyExperiment(field_trial::FindFullName(kDummyExperiment)) {} + explicit DummyExperiment(absl::string_view field_trial) { ParseFieldTrial({&enabled, &factor, &retries, &size, &ping, &hash}, field_trial); } - DummyExperiment() { - std::string trial_string = field_trial::FindFullName(kDummyExperiment); - ParseFieldTrial({&enabled, &factor, &retries, &size, &ping, &hash}, - trial_string); - } }; enum class CustomEnum { diff --git a/rtc_base/experiments/rate_control_settings.cc b/rtc_base/experiments/rate_control_settings.cc index ea5f90ab39..84e7b1bcc1 100644 --- a/rtc_base/experiments/rate_control_settings.cc +++ b/rtc_base/experiments/rate_control_settings.cc @@ -68,7 +68,6 @@ std::unique_ptr VideoRateControlConfig::Parser() { "vp8_min_pixels", &vp8_min_pixels, // "trust_vp8", &trust_vp8, // "trust_vp9", &trust_vp9, // - "probe_max_allocation", &probe_max_allocation, // "bitrate_adjuster", &bitrate_adjuster, // "adjuster_use_headroom", &adjuster_use_headroom, // "vp8_s0_boost", &vp8_s0_boost, // @@ -173,10 +172,6 @@ bool RateControlSettings::Vp8BaseHeavyTl3RateAllocation() const { return video_config_.vp8_base_heavy_tl3_alloc; } -bool RateControlSettings::TriggerProbeOnMaxAllocatedBitrateChange() const { - return video_config_.probe_max_allocation; -} - bool RateControlSettings::UseEncoderBitrateAdjuster() const { return video_config_.bitrate_adjuster; } diff --git a/rtc_base/experiments/rate_control_settings.h b/rtc_base/experiments/rate_control_settings.h index 6aff70a686..05e942d39f 100644 --- a/rtc_base/experiments/rate_control_settings.h +++ b/rtc_base/experiments/rate_control_settings.h @@ -38,7 +38,6 @@ struct VideoRateControlConfig { absl::optional vp8_min_pixels; bool trust_vp8 = true; bool trust_vp9 = true; - bool probe_max_allocation = true; bool bitrate_adjuster = true; bool adjuster_use_headroom = true; bool vp8_s0_boost = false; @@ -79,7 +78,6 @@ class RateControlSettings final { bool Vp8BaseHeavyTl3RateAllocation() const; - bool TriggerProbeOnMaxAllocatedBitrateChange() const; bool UseEncoderBitrateAdjuster() const; bool BitrateAdjusterCanUseNetworkHeadroom() const; diff --git a/rtc_base/experiments/rate_control_settings_unittest.cc b/rtc_base/experiments/rate_control_settings_unittest.cc index 79f19e15b5..91ebf531bd 100644 --- a/rtc_base/experiments/rate_control_settings_unittest.cc +++ b/rtc_base/experiments/rate_control_settings_unittest.cc @@ -172,16 +172,6 @@ TEST(RateControlSettingsTest, EXPECT_FALSE(settings_after.Vp8BaseHeavyTl3RateAllocation()); } -TEST(RateControlSettingsTest, TriggerProbeOnMaxAllocatedBitrateChange) { - EXPECT_TRUE(RateControlSettings::ParseFromFieldTrials() - .TriggerProbeOnMaxAllocatedBitrateChange()); - - test::ScopedFieldTrials field_trials( - "WebRTC-VideoRateControl/probe_max_allocation:0/"); - EXPECT_FALSE(RateControlSettings::ParseFromFieldTrials() - .TriggerProbeOnMaxAllocatedBitrateChange()); -} - TEST(RateControlSettingsTest, UseEncoderBitrateAdjuster) { // Should be on by default. EXPECT_TRUE( diff --git a/rtc_base/memory/fifo_buffer.cc b/rtc_base/memory/fifo_buffer.cc index 116badd915..c159bc979f 100644 --- a/rtc_base/memory/fifo_buffer.cc +++ b/rtc_base/memory/fifo_buffer.cc @@ -49,23 +49,20 @@ StreamState FifoBuffer::GetState() const { return state_; } -StreamResult FifoBuffer::Read(void* buffer, - size_t bytes, - size_t* bytes_read, - int* error) { +StreamResult FifoBuffer::Read(rtc::ArrayView buffer, + size_t& bytes_read, + int& error) { webrtc::MutexLock lock(&mutex_); const bool was_writable = data_length_ < buffer_length_; size_t copy = 0; - StreamResult result = ReadLocked(buffer, bytes, ©); + StreamResult result = ReadLocked(buffer.data(), buffer.size(), ©); if (result == SR_SUCCESS) { // If read was successful then adjust the read position and number of // bytes buffered. read_position_ = (read_position_ + copy) % buffer_length_; data_length_ -= copy; - if (bytes_read) { - *bytes_read = copy; - } + bytes_read = copy; // if we were full before, and now we're not, post an event if (!was_writable && copy > 0) { @@ -75,23 +72,19 @@ StreamResult FifoBuffer::Read(void* buffer, return result; } -StreamResult FifoBuffer::Write(const void* buffer, - size_t bytes, - size_t* bytes_written, - int* error) { +StreamResult FifoBuffer::Write(rtc::ArrayView buffer, + size_t& bytes_written, + int& error) { webrtc::MutexLock lock(&mutex_); const bool was_readable = (data_length_ > 0); size_t copy = 0; - StreamResult result = WriteLocked(buffer, bytes, ©); + StreamResult result = WriteLocked(buffer.data(), buffer.size(), ©); if (result == SR_SUCCESS) { // If write was successful then adjust the number of readable bytes. data_length_ += copy; - if (bytes_written) { - *bytes_written = copy; - } - + bytes_written = copy; // if we didn't have any data to read before, and now we do, post an event if (!was_readable && copy > 0) { PostEvent(SE_READ, 0); diff --git a/rtc_base/memory/fifo_buffer.h b/rtc_base/memory/fifo_buffer.h index aa3164f09a..a225c688ac 100644 --- a/rtc_base/memory/fifo_buffer.h +++ b/rtc_base/memory/fifo_buffer.h @@ -37,14 +37,12 @@ class FifoBuffer final : public StreamInterface { // StreamInterface methods StreamState GetState() const override; - StreamResult Read(void* buffer, - size_t bytes, - size_t* bytes_read, - int* error) override; - StreamResult Write(const void* buffer, - size_t bytes, - size_t* bytes_written, - int* error) override; + StreamResult Read(rtc::ArrayView buffer, + size_t& bytes_read, + int& error) override; + StreamResult Write(rtc::ArrayView buffer, + size_t& bytes_written, + int& error) override; void Close() override; // Seek to a byte offset from the beginning of the stream. Returns false if diff --git a/rtc_base/memory/fifo_buffer_unittest.cc b/rtc_base/memory/fifo_buffer_unittest.cc index 0e44bf2095..27eb8d8b45 100644 --- a/rtc_base/memory/fifo_buffer_unittest.cc +++ b/rtc_base/memory/fifo_buffer_unittest.cc @@ -19,8 +19,8 @@ namespace rtc { TEST(FifoBufferTest, TestAll) { rtc::AutoThread main_thread; const size_t kSize = 16; - const char in[kSize * 2 + 1] = "0123456789ABCDEFGHIJKLMNOPQRSTUV"; - char out[kSize * 2]; + const uint8_t in[kSize * 2 + 1] = "0123456789ABCDEFGHIJKLMNOPQRSTUV"; + uint8_t out[kSize * 2]; void* p; const void* q; size_t bytes; @@ -28,49 +28,58 @@ TEST(FifoBufferTest, TestAll) { // Test assumptions about base state EXPECT_EQ(SS_OPEN, buf.GetState()); - EXPECT_EQ(SR_BLOCK, buf.Read(out, kSize, &bytes, nullptr)); + int error; + EXPECT_EQ(SR_BLOCK, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); EXPECT_TRUE(nullptr != buf.GetWriteBuffer(&bytes)); EXPECT_EQ(kSize, bytes); buf.ConsumeWriteBuffer(0); // Try a full write - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, buf.Write(rtc::MakeArrayView(in, kSize), bytes, error)); EXPECT_EQ(kSize, bytes); // Try a write that should block - EXPECT_EQ(SR_BLOCK, buf.Write(in, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_BLOCK, buf.Write(rtc::MakeArrayView(in, kSize), bytes, error)); // Try a full read - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); EXPECT_EQ(kSize, bytes); EXPECT_EQ(0, memcmp(in, out, kSize)); // Try a read that should block - EXPECT_EQ(SR_BLOCK, buf.Read(out, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_BLOCK, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); // Try a too-big write - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize * 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Write(rtc::MakeArrayView(in, kSize * 2), bytes, error)); EXPECT_EQ(bytes, kSize); // Try a too-big read - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize * 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize * 2), bytes, error)); EXPECT_EQ(kSize, bytes); EXPECT_EQ(0, memcmp(in, out, kSize)); // Try some small writes and reads - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); @@ -82,22 +91,29 @@ TEST(FifoBufferTest, TestAll) { // XXXXWWWWWWWWXXXX 4567012345670123 // RRRRXXXXXXXXRRRR ....01234567.... // ....RRRRRRRR.... ................ - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize * 3 / 4, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Write(rtc::MakeArrayView(in, kSize * 3 / 4), bytes, error)); EXPECT_EQ(kSize * 3 / 4, bytes); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize / 4, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize / 4), bytes, error)); EXPECT_EQ(kSize / 4, bytes); EXPECT_EQ(0, memcmp(in + kSize / 2, out, kSize / 4)); - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); @@ -106,16 +122,16 @@ TEST(FifoBufferTest, TestAll) { buf.ConsumeWriteBuffer(0); // Try using GetReadData to do a full read - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, buf.Write(rtc::MakeArrayView(in, kSize), bytes, error)); q = buf.GetReadData(&bytes); EXPECT_TRUE(nullptr != q); EXPECT_EQ(kSize, bytes); EXPECT_EQ(0, memcmp(q, in, kSize)); buf.ConsumeReadData(kSize); - EXPECT_EQ(SR_BLOCK, buf.Read(out, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_BLOCK, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); // Try using GetReadData to do some small reads - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, buf.Write(rtc::MakeArrayView(in, kSize), bytes, error)); q = buf.GetReadData(&bytes); EXPECT_TRUE(nullptr != q); EXPECT_EQ(kSize, bytes); @@ -126,7 +142,7 @@ TEST(FifoBufferTest, TestAll) { EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(q, in + kSize / 2, kSize / 2)); buf.ConsumeReadData(kSize / 2); - EXPECT_EQ(SR_BLOCK, buf.Read(out, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_BLOCK, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); // Try using GetReadData in a wraparound case // WWWWWWWWWWWWWWWW 0123456789ABCDEF @@ -134,9 +150,11 @@ TEST(FifoBufferTest, TestAll) { // WWWWWWWW....XXXX 01234567....CDEF // ............RRRR 01234567........ // RRRRRRRR........ ................ - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize, &bytes, nullptr)); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize * 3 / 4, &bytes, nullptr)); - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, buf.Write(rtc::MakeArrayView(in, kSize), bytes, error)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize * 3 / 4), bytes, error)); + EXPECT_EQ(SR_SUCCESS, + buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); q = buf.GetReadData(&bytes); EXPECT_TRUE(nullptr != q); EXPECT_EQ(kSize / 4, bytes); @@ -158,7 +176,7 @@ TEST(FifoBufferTest, TestAll) { EXPECT_EQ(kSize, bytes); memcpy(p, in, kSize); buf.ConsumeWriteBuffer(kSize); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); EXPECT_EQ(kSize, bytes); EXPECT_EQ(0, memcmp(in, out, kSize)); @@ -173,7 +191,7 @@ TEST(FifoBufferTest, TestAll) { EXPECT_EQ(kSize / 2, bytes); memcpy(p, in + kSize / 2, kSize / 2); buf.ConsumeWriteBuffer(kSize / 2); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); EXPECT_EQ(kSize, bytes); EXPECT_EQ(0, memcmp(in, out, kSize)); @@ -183,8 +201,10 @@ TEST(FifoBufferTest, TestAll) { // ........XXXXWWWW ........89AB0123 // WWWW....XXXXXXXX 4567....89AB0123 // RRRR....RRRRRRRR ................ - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize * 3 / 4, &bytes, nullptr)); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Write(rtc::MakeArrayView(in, kSize * 3 / 4), bytes, error)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); p = buf.GetWriteBuffer(&bytes); EXPECT_TRUE(nullptr != p); EXPECT_EQ(kSize / 4, bytes); @@ -195,22 +215,25 @@ TEST(FifoBufferTest, TestAll) { EXPECT_EQ(kSize / 2, bytes); memcpy(p, in + kSize / 4, kSize / 4); buf.ConsumeWriteBuffer(kSize / 4); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize * 3 / 4, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize * 3 / 4), bytes, error)); EXPECT_EQ(kSize * 3 / 4, bytes); EXPECT_EQ(0, memcmp(in + kSize / 2, out, kSize / 4)); EXPECT_EQ(0, memcmp(in, out + kSize / 4, kSize / 4)); // Check that the stream is now empty - EXPECT_EQ(SR_BLOCK, buf.Read(out, kSize, &bytes, nullptr)); + EXPECT_EQ(SR_BLOCK, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); // Write to the stream, close it, read the remaining bytes - EXPECT_EQ(SR_SUCCESS, buf.Write(in, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_SUCCESS, + buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); buf.Close(); EXPECT_EQ(SS_CLOSED, buf.GetState()); - EXPECT_EQ(SR_EOS, buf.Write(in, kSize / 2, &bytes, nullptr)); - EXPECT_EQ(SR_SUCCESS, buf.Read(out, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_EOS, buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); + EXPECT_EQ(SR_SUCCESS, + buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); - EXPECT_EQ(SR_EOS, buf.Read(out, kSize / 2, &bytes, nullptr)); + EXPECT_EQ(SR_EOS, buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); } TEST(FifoBufferTest, FullBufferCheck) { diff --git a/rtc_base/memory_stream.cc b/rtc_base/memory_stream.cc index cbd78ac14a..8ceab7aa9b 100644 --- a/rtc_base/memory_stream.cc +++ b/rtc_base/memory_stream.cc @@ -23,38 +23,37 @@ StreamState MemoryStream::GetState() const { return SS_OPEN; } -StreamResult MemoryStream::Read(void* buffer, - size_t bytes, - size_t* bytes_read, - int* error) { +StreamResult MemoryStream::Read(rtc::ArrayView buffer, + size_t& bytes_read, + int& error) { if (seek_position_ >= data_length_) { return SR_EOS; } size_t available = data_length_ - seek_position_; - if (bytes > available) { + size_t bytes; + if (buffer.size() > available) { // Read partial buffer bytes = available; + } else { + bytes = buffer.size(); } - memcpy(buffer, &buffer_[seek_position_], bytes); + memcpy(buffer.data(), &buffer_[seek_position_], bytes); seek_position_ += bytes; - if (bytes_read) { - *bytes_read = bytes; - } + bytes_read = bytes; return SR_SUCCESS; } -StreamResult MemoryStream::Write(const void* buffer, - size_t bytes, - size_t* bytes_written, - int* error) { +StreamResult MemoryStream::Write(rtc::ArrayView buffer, + size_t& bytes_written, + int& error) { size_t available = buffer_length_ - seek_position_; if (0 == available) { // Increase buffer size to the larger of: // a) new position rounded up to next 256 bytes // b) double the previous length - size_t new_buffer_length = - std::max(((seek_position_ + bytes) | 0xFF) + 1, buffer_length_ * 2); - StreamResult result = DoReserve(new_buffer_length, error); + size_t new_buffer_length = std::max( + ((seek_position_ + buffer.size()) | 0xFF) + 1, buffer_length_ * 2); + StreamResult result = DoReserve(new_buffer_length, &error); if (SR_SUCCESS != result) { return result; } @@ -62,17 +61,16 @@ StreamResult MemoryStream::Write(const void* buffer, available = buffer_length_ - seek_position_; } + size_t bytes = buffer.size(); if (bytes > available) { bytes = available; } - memcpy(&buffer_[seek_position_], buffer, bytes); + memcpy(&buffer_[seek_position_], buffer.data(), bytes); seek_position_ += bytes; if (data_length_ < seek_position_) { data_length_ = seek_position_; } - if (bytes_written) { - *bytes_written = bytes; - } + bytes_written = bytes; return SR_SUCCESS; } diff --git a/rtc_base/memory_stream.h b/rtc_base/memory_stream.h index 7deb4b3006..07e07f0694 100644 --- a/rtc_base/memory_stream.h +++ b/rtc_base/memory_stream.h @@ -25,14 +25,12 @@ class MemoryStream final : public StreamInterface { ~MemoryStream() override; StreamState GetState() const override; - StreamResult Read(void* buffer, - size_t bytes, - size_t* bytes_read, - int* error) override; - StreamResult Write(const void* buffer, - size_t bytes, - size_t* bytes_written, - int* error) override; + StreamResult Read(rtc::ArrayView buffer, + size_t& bytes_read, + int& error) override; + StreamResult Write(rtc::ArrayView buffer, + size_t& bytes_written, + int& error) override; void Close() override; bool GetSize(size_t* size) const; bool ReserveSize(size_t size); diff --git a/rtc_base/network.cc b/rtc_base/network.cc index 5ff8d143d9..bbcf690915 100644 --- a/rtc_base/network.cc +++ b/rtc_base/network.cc @@ -50,9 +50,11 @@ using ::webrtc::SafeTask; using ::webrtc::TimeDelta; // List of MAC addresses of known VPN (for windows). -constexpr uint8_t kVpns[2][6] = { - // Cisco AnyConnect. +constexpr uint8_t kVpns[3][6] = { + // Cisco AnyConnect SSL VPN Client. {0x0, 0x5, 0x9A, 0x3C, 0x7A, 0x0}, + // Cisco AnyConnect IPSEC VPN Client. + {0x0, 0x5, 0x9A, 0x3C, 0x78, 0x0}, // GlobalProtect Virtual Ethernet. {0x2, 0x50, 0x41, 0x0, 0x0, 0x1}, }; @@ -316,12 +318,22 @@ NetworkManagerBase::enumeration_permission() const { return enumeration_permission_; } +std::unique_ptr NetworkManagerBase::CreateNetwork( + absl::string_view name, + absl::string_view description, + const IPAddress& prefix, + int prefix_length, + AdapterType type) const { + return std::make_unique(name, description, prefix, prefix_length, + type, field_trials_.get()); +} + std::vector NetworkManagerBase::GetAnyAddressNetworks() { std::vector networks; if (!ipv4_any_address_network_) { const rtc::IPAddress ipv4_any_address(INADDR_ANY); - ipv4_any_address_network_ = std::make_unique( - "any", "any", ipv4_any_address, 0, ADAPTER_TYPE_ANY, field_trials_); + ipv4_any_address_network_ = + CreateNetwork("any", "any", ipv4_any_address, 0, ADAPTER_TYPE_ANY); ipv4_any_address_network_->set_default_local_address_provider(this); ipv4_any_address_network_->set_mdns_responder_provider(this); ipv4_any_address_network_->AddIP(ipv4_any_address); @@ -330,8 +342,8 @@ std::vector NetworkManagerBase::GetAnyAddressNetworks() { if (!ipv6_any_address_network_) { const rtc::IPAddress ipv6_any_address(in6addr_any); - ipv6_any_address_network_ = std::make_unique( - "any", "any", ipv6_any_address, 0, ADAPTER_TYPE_ANY, field_trials_); + ipv6_any_address_network_ = + CreateNetwork("any", "any", ipv6_any_address, 0, ADAPTER_TYPE_ANY); ipv6_any_address_network_->set_default_local_address_provider(this); ipv6_any_address_network_->set_mdns_responder_provider(this); ipv6_any_address_network_->AddIP(ipv6_any_address); @@ -531,14 +543,14 @@ bool NetworkManagerBase::IsVpnMacAddress( BasicNetworkManager::BasicNetworkManager( NetworkMonitorFactory* network_monitor_factory, SocketFactory* socket_factory, - const webrtc::FieldTrialsView* field_trials) - : field_trials_(field_trials), + const webrtc::FieldTrialsView* field_trials_view) + : NetworkManagerBase(field_trials_view), network_monitor_factory_(network_monitor_factory), socket_factory_(socket_factory), allow_mac_based_ipv6_( - field_trials_->IsEnabled("WebRTC-AllowMACBasedIPv6")), + field_trials()->IsEnabled("WebRTC-AllowMACBasedIPv6")), bind_using_ifname_( - !field_trials_->IsDisabled("WebRTC-BindUsingInterfaceName")) { + !field_trials()->IsDisabled("WebRTC-BindUsingInterfaceName")) { RTC_DCHECK(socket_factory_); } @@ -668,9 +680,8 @@ void BasicNetworkManager::ConvertIfAddrs( if_info.adapter_type = ADAPTER_TYPE_VPN; } - auto network = std::make_unique( - cursor->ifa_name, cursor->ifa_name, prefix, prefix_length, - if_info.adapter_type, field_trials_.get()); + auto network = CreateNetwork(cursor->ifa_name, cursor->ifa_name, prefix, + prefix_length, if_info.adapter_type); network->set_default_local_address_provider(this); network->set_scope_id(scope_id); network->AddIP(ip); @@ -855,12 +866,14 @@ bool BasicNetworkManager::CreateNetworks( reinterpret_cast( adapter_addrs->PhysicalAddress), adapter_addrs->PhysicalAddressLength))) { - underlying_type_for_vpn = adapter_type; + // With MAC-based detection we do not know the + // underlying adapter type. + underlying_type_for_vpn = ADAPTER_TYPE_UNKNOWN; adapter_type = ADAPTER_TYPE_VPN; } - auto network = std::make_unique(name, description, prefix, - prefix_length, adapter_type); + auto network = CreateNetwork(name, description, prefix, prefix_length, + adapter_type); network->set_underlying_type_for_vpn(underlying_type_for_vpn); network->set_default_local_address_provider(this); network->set_mdns_responder_provider(this); @@ -965,7 +978,7 @@ void BasicNetworkManager::StartNetworkMonitor() { } if (!network_monitor_) { network_monitor_.reset( - network_monitor_factory_->CreateNetworkMonitor(*field_trials_)); + network_monitor_factory_->CreateNetworkMonitor(*field_trials())); if (!network_monitor_) { return; } diff --git a/rtc_base/network.h b/rtc_base/network.h index d82ddeed88..c7d73bff7a 100644 --- a/rtc_base/network.h +++ b/rtc_base/network.h @@ -231,9 +231,21 @@ class RTC_EXPORT NetworkManagerBase : public NetworkManager { // refactoring of the interface GetNetworks method. const std::vector& GetNetworksInternal() const { return networks_; } + std::unique_ptr CreateNetwork(absl::string_view name, + absl::string_view description, + const IPAddress& prefix, + int prefix_length, + AdapterType type) const; + + const webrtc::FieldTrialsView* field_trials() const { + return field_trials_.get(); + } + private: friend class NetworkTest; - const webrtc::FieldTrialsView* field_trials_ = nullptr; + webrtc::AlwaysValidPointer + field_trials_; EnumerationPermission enumeration_permission_; std::vector networks_; @@ -348,10 +360,7 @@ class RTC_EXPORT BasicNetworkManager : public NetworkManagerBase, Thread* thread_ = nullptr; bool sent_first_update_ = true; int start_count_ = 0; - // Chromium create BasicNetworkManager() w/o field trials. - webrtc::AlwaysValidPointer - field_trials_; + std::vector network_ignore_list_; NetworkMonitorFactory* const network_monitor_factory_; SocketFactory* const socket_factory_; diff --git a/rtc_base/numerics/sequence_numbers_conformance_test.cc b/rtc_base/numerics/sequence_numbers_conformance_test.cc new file mode 100644 index 0000000000..8c5bc62e56 --- /dev/null +++ b/rtc_base/numerics/sequence_numbers_conformance_test.cc @@ -0,0 +1,202 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include +#include + +#include "modules/include/module_common_types_public.h" +#include "net/dcsctp/common/sequence_numbers.h" +#include "rtc_base/numerics/sequence_number_util.h" +#include "rtc_base/strong_alias.h" +#include "rtc_base/time_utils.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::Test; + +using dcsctp::UnwrappedSequenceNumber; +using Wrapped = webrtc::StrongAlias; +using TestSequence = UnwrappedSequenceNumber; + +template +class UnwrapperHelper; + +template <> +class UnwrapperHelper { + public: + int64_t Unwrap(uint32_t val) { + TestSequence s = unwrapper_.Unwrap(Wrapped(val)); + // UnwrappedSequenceNumber starts counting at 2^32. + constexpr int64_t kDcsctpUnwrapStart = int64_t{1} << 32; + return s.value() - kDcsctpUnwrapStart; + } + + private: + TestSequence::Unwrapper unwrapper_; +}; + +// MaxVal is the max of the wrapped space, ie MaxVal + 1 = 0 when wrapped. +template ::max()> +struct FixtureParams { + using Unwrapper = U; + static constexpr int64_t kMaxVal = MaxVal; +}; + +template +class UnwrapperConformanceFixture : public Test { + public: + static constexpr int64_t kMaxVal = F::kMaxVal; + static constexpr int64_t kMaxIncrease = kMaxVal / 2; + static constexpr int64_t kMaxBackwardsIncrease = kMaxVal - kMaxIncrease + 1; + + template + static constexpr bool UnwrapperIs() { + return std::is_same(); + } + + typename F::Unwrapper ref_unwrapper_; +}; + +TYPED_TEST_SUITE_P(UnwrapperConformanceFixture); + +TYPED_TEST_P(UnwrapperConformanceFixture, PositiveWrapAround) { + EXPECT_EQ(0, this->ref_unwrapper_.Unwrap(0)); + EXPECT_EQ(TestFixture::kMaxIncrease, + this->ref_unwrapper_.Unwrap(TestFixture::kMaxIncrease)); + EXPECT_EQ(2 * TestFixture::kMaxIncrease, + this->ref_unwrapper_.Unwrap(2 * TestFixture::kMaxIncrease)); + // Now unwrapping 0 should wrap around to be kMaxVal + 1. + EXPECT_EQ(TestFixture::kMaxVal + 1, this->ref_unwrapper_.Unwrap(0)); + EXPECT_EQ(TestFixture::kMaxVal + 1 + TestFixture::kMaxIncrease, + this->ref_unwrapper_.Unwrap(TestFixture::kMaxIncrease)); +} + +TYPED_TEST_P(UnwrapperConformanceFixture, NegativeUnwrap) { + using UnwrapperT = decltype(this->ref_unwrapper_); + // webrtc::TimestampUnwrapper known to not handle negative numbers. + // rtc::TimestampWrapAroundHandler does not wrap around correctly. + if constexpr (std::is_same() || + std::is_same()) { + return; + } + EXPECT_EQ(0, this->ref_unwrapper_.Unwrap(0)); + // Max backwards wrap is negative. + EXPECT_EQ(-TestFixture::kMaxIncrease, + this->ref_unwrapper_.Unwrap(this->kMaxBackwardsIncrease)); + // Increase to a larger negative number. + EXPECT_EQ(-2, this->ref_unwrapper_.Unwrap(TestFixture::kMaxVal - 1)); + // Increase back positive. + EXPECT_EQ(1, this->ref_unwrapper_.Unwrap(1)); +} + +TYPED_TEST_P(UnwrapperConformanceFixture, BackwardUnwrap) { + EXPECT_EQ(127, this->ref_unwrapper_.Unwrap(127)); + EXPECT_EQ(128, this->ref_unwrapper_.Unwrap(128)); + EXPECT_EQ(127, this->ref_unwrapper_.Unwrap(127)); +} + +TYPED_TEST_P(UnwrapperConformanceFixture, MultiplePositiveWrapArounds) { + using UnwrapperT = decltype(this->ref_unwrapper_); + // rtc::TimestampWrapAroundHandler does not wrap around correctly. + if constexpr (std::is_same()) { + return; + } + int64_t val = 0; + uint32_t wrapped_val = 0; + for (int i = 0; i < 16; ++i) { + EXPECT_EQ(val, this->ref_unwrapper_.Unwrap(wrapped_val)); + val += TestFixture::kMaxIncrease; + wrapped_val = + (wrapped_val + TestFixture::kMaxIncrease) % (TestFixture::kMaxVal + 1); + } +} + +TYPED_TEST_P(UnwrapperConformanceFixture, WrapBoundaries) { + EXPECT_EQ(0, this->ref_unwrapper_.Unwrap(0)); + EXPECT_EQ(TestFixture::kMaxIncrease, + this->ref_unwrapper_.Unwrap(TestFixture::kMaxIncrease)); + // Increases by more than TestFixture::kMaxIncrease which indicates a negative + // rollback. + EXPECT_EQ(0, this->ref_unwrapper_.Unwrap(0)); + EXPECT_EQ(10, this->ref_unwrapper_.Unwrap(10)); +} + +TYPED_TEST_P(UnwrapperConformanceFixture, MultipleNegativeWrapArounds) { + using UnwrapperT = decltype(this->ref_unwrapper_); + // webrtc::TimestampUnwrapper known to not handle negative numbers. + // webrtc::SequenceNumberUnwrapper can only wrap negative once. + // rtc::TimestampWrapAroundHandler does not wrap around correctly. + if constexpr (std::is_same() || + std::is_same>() || + std::is_same()) { + return; + } + int64_t val = 0; + uint32_t wrapped_val = 0; + for (int i = 0; i < 16; ++i) { + EXPECT_EQ(val, this->ref_unwrapper_.Unwrap(wrapped_val)); + val -= TestFixture::kMaxIncrease; + wrapped_val = (wrapped_val + this->kMaxBackwardsIncrease) % + (TestFixture::kMaxVal + 1); + } +} + +REGISTER_TYPED_TEST_SUITE_P(UnwrapperConformanceFixture, + NegativeUnwrap, + PositiveWrapAround, + BackwardUnwrap, + WrapBoundaries, + MultiplePositiveWrapArounds, + MultipleNegativeWrapArounds); + +constexpr int64_t k15BitMax = (int64_t{1} << 15) - 1; +using UnwrapperTypes = ::testing::Types< + FixtureParams, + FixtureParams, + FixtureParams>, + FixtureParams>, + // SeqNumUnwrapper supports arbitrary limits. + FixtureParams, k15BitMax>>; + +class TestNames { + public: + template + static std::string GetName(int) { + if constexpr (std::is_same()) + return "TimestampWrapAroundHandler"; + if constexpr (std::is_same()) + return "TimestampUnwrapper"; + if constexpr (std::is_same>()) + return "SeqNumUnwrapper"; + if constexpr (std::is_same< + typename T::Unwrapper, + webrtc::SeqNumUnwrapper>()) + return "SeqNumUnwrapper15bit"; + if constexpr (std::is_same>()) + return "UnwrappedSequenceNumber"; + } +}; + +INSTANTIATE_TYPED_TEST_SUITE_P(UnwrapperConformanceTest, + UnwrapperConformanceFixture, + UnwrapperTypes, + TestNames); + +} // namespace +} // namespace webrtc diff --git a/rtc_base/openssl_stream_adapter.cc b/rtc_base/openssl_stream_adapter.cc index da484ad3bf..9fd8c8f395 100644 --- a/rtc_base/openssl_stream_adapter.cc +++ b/rtc_base/openssl_stream_adapter.cc @@ -28,6 +28,7 @@ #include #include +#include "api/array_view.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" @@ -215,7 +216,8 @@ static int stream_read(BIO* b, char* out, int outl) { BIO_clear_retry_flags(b); size_t read; int error; - StreamResult result = stream->Read(out, outl, &read, &error); + StreamResult result = stream->Read( + rtc::MakeArrayView(reinterpret_cast(out), outl), read, error); if (result == SR_SUCCESS) { return checked_cast(read); } else if (result == SR_BLOCK) { @@ -232,7 +234,9 @@ static int stream_write(BIO* b, const char* in, int inl) { BIO_clear_retry_flags(b); size_t written; int error; - StreamResult result = stream->Write(in, inl, &written, &error); + StreamResult result = stream->Write( + rtc::MakeArrayView(reinterpret_cast(in), inl), written, + error); if (result == SR_SUCCESS) { return checked_cast(written); } else if (result == SR_BLOCK) { @@ -390,9 +394,10 @@ std::string OpenSSLStreamAdapter::SslCipherSuiteToName(int cipher_suite) { } return SSL_CIPHER_standard_name(ssl_cipher); #else + const int openssl_cipher_id = 0x03000000L | cipher_suite; for (const SslCipherMapEntry* entry = kSslCipherMap; entry->rfc_name; ++entry) { - if (cipher_suite == static_cast(entry->openssl_id)) { + if (openssl_cipher_id == static_cast(entry->openssl_id)) { return entry->rfc_name; } } @@ -556,17 +561,15 @@ void OpenSSLStreamAdapter::SetInitialRetransmissionTimeout(int timeout_ms) { // // StreamInterface Implementation // - -StreamResult OpenSSLStreamAdapter::Write(const void* data, - size_t data_len, - size_t* written, - int* error) { - RTC_DLOG(LS_VERBOSE) << "OpenSSLStreamAdapter::Write(" << data_len << ")"; +StreamResult OpenSSLStreamAdapter::Write(rtc::ArrayView data, + size_t& written, + int& error) { + RTC_DLOG(LS_VERBOSE) << "OpenSSLStreamAdapter::Write(" << data.size() << ")"; switch (state_) { case SSL_NONE: // pass-through in clear text - return stream_->Write(data, data_len, written, error); + return stream_->Write(data, written, error); case SSL_WAIT: case SSL_CONNECTING: @@ -581,31 +584,26 @@ StreamResult OpenSSLStreamAdapter::Write(const void* data, case SSL_ERROR: case SSL_CLOSED: default: - if (error) { - *error = ssl_error_code_; - } + error = ssl_error_code_; return SR_ERROR; } // OpenSSL will return an error if we try to write zero bytes - if (data_len == 0) { - if (written) { - *written = 0; - } + if (data.size() == 0) { + written = 0; return SR_SUCCESS; } ssl_write_needs_read_ = false; - int code = SSL_write(ssl_, data, checked_cast(data_len)); + int code = SSL_write(ssl_, data.data(), checked_cast(data.size())); int ssl_error = SSL_get_error(ssl_, code); switch (ssl_error) { case SSL_ERROR_NONE: RTC_DLOG(LS_VERBOSE) << " -- success"; RTC_DCHECK_GT(code, 0); - RTC_DCHECK_LE(code, data_len); - if (written) - *written = code; + RTC_DCHECK_LE(code, data.size()); + written = code; return SR_SUCCESS; case SSL_ERROR_WANT_READ: RTC_DLOG(LS_VERBOSE) << " -- error want read"; @@ -618,23 +616,20 @@ StreamResult OpenSSLStreamAdapter::Write(const void* data, case SSL_ERROR_ZERO_RETURN: default: Error("SSL_write", (ssl_error ? ssl_error : -1), 0, false); - if (error) { - *error = ssl_error_code_; - } + error = ssl_error_code_; return SR_ERROR; } // not reached } -StreamResult OpenSSLStreamAdapter::Read(void* data, - size_t data_len, - size_t* read, - int* error) { - RTC_DLOG(LS_VERBOSE) << "OpenSSLStreamAdapter::Read(" << data_len << ")"; +StreamResult OpenSSLStreamAdapter::Read(rtc::ArrayView data, + size_t& read, + int& error) { + RTC_DLOG(LS_VERBOSE) << "OpenSSLStreamAdapter::Read(" << data.size() << ")"; switch (state_) { case SSL_NONE: // pass-through in clear text - return stream_->Read(data, data_len, read, error); + return stream_->Read(data, read, error); case SSL_WAIT: case SSL_CONNECTING: return SR_BLOCK; @@ -647,33 +642,27 @@ StreamResult OpenSSLStreamAdapter::Read(void* data, return SR_EOS; case SSL_ERROR: default: - if (error) { - *error = ssl_error_code_; - } + error = ssl_error_code_; return SR_ERROR; } // Don't trust OpenSSL with zero byte reads - if (data_len == 0) { - if (read) { - *read = 0; - } + if (data.size() == 0) { + read = 0; return SR_SUCCESS; } ssl_read_needs_write_ = false; - const int code = SSL_read(ssl_, data, checked_cast(data_len)); + const int code = SSL_read(ssl_, data.data(), checked_cast(data.size())); const int ssl_error = SSL_get_error(ssl_, code); switch (ssl_error) { case SSL_ERROR_NONE: RTC_DLOG(LS_VERBOSE) << " -- success"; RTC_DCHECK_GT(code, 0); - RTC_DCHECK_LE(code, data_len); - if (read) { - *read = code; - } + RTC_DCHECK_LE(code, data.size()); + read = code; if (ssl_mode_ == SSL_MODE_DTLS) { // Enforce atomic reads -- this is a short read @@ -682,9 +671,7 @@ StreamResult OpenSSLStreamAdapter::Read(void* data, if (pending) { RTC_DLOG(LS_INFO) << " -- short DTLS read. flushing"; FlushInput(pending); - if (error) { - *error = SSE_MSG_TRUNC; - } + error = SSE_MSG_TRUNC; return SR_ERROR; } } @@ -702,9 +689,7 @@ StreamResult OpenSSLStreamAdapter::Read(void* data, return SR_EOS; default: Error("SSL_read", (ssl_error ? ssl_error : -1), 0, false); - if (error) { - *error = ssl_error_code_; - } + error = ssl_error_code_; return SR_ERROR; } // not reached diff --git a/rtc_base/openssl_stream_adapter.h b/rtc_base/openssl_stream_adapter.h index 891f0e6193..aee8d36aad 100644 --- a/rtc_base/openssl_stream_adapter.h +++ b/rtc_base/openssl_stream_adapter.h @@ -95,14 +95,12 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter { void SetMaxProtocolVersion(SSLProtocolVersion version) override; void SetInitialRetransmissionTimeout(int timeout_ms) override; - StreamResult Read(void* data, - size_t data_len, - size_t* read, - int* error) override; - StreamResult Write(const void* data, - size_t data_len, - size_t* written, - int* error) override; + StreamResult Read(rtc::ArrayView data, + size_t& read, + int& error) override; + StreamResult Write(rtc::ArrayView data, + size_t& written, + int& error) override; void Close() override; StreamState GetState() const override; diff --git a/rtc_base/physical_socket_server.cc b/rtc_base/physical_socket_server.cc index 154462ddc0..6a6ee5e82b 100644 --- a/rtc_base/physical_socket_server.cc +++ b/rtc_base/physical_socket_server.cc @@ -52,6 +52,7 @@ #include "rtc_base/null_socket_server.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/time_utils.h" +#include "system_wrappers/include/field_trial.h" #if defined(WEBRTC_LINUX) #include @@ -118,6 +119,12 @@ class ScopedSetTrue { private: bool* value_; }; + +// Returns true if the the client is in the experiment to get timestamps +// from the socket implementation. +bool IsScmTimeStampExperimentEnabled() { + return webrtc::field_trial::IsEnabled("WebRTC-SCM-Timestamp"); +} } // namespace namespace rtc { @@ -127,7 +134,8 @@ PhysicalSocket::PhysicalSocket(PhysicalSocketServer* ss, SOCKET s) s_(s), error_(0), state_((s == INVALID_SOCKET) ? CS_CLOSED : CS_CONNECTED), - resolver_(nullptr) { + resolver_(nullptr), + read_scm_timestamp_experiment_(IsScmTimeStampExperimentEnabled()) { if (s_ != INVALID_SOCKET) { SetEnabledEvents(DE_READ | DE_WRITE); @@ -395,7 +403,7 @@ int PhysicalSocket::SendTo(const void* buffer, int PhysicalSocket::Recv(void* buffer, size_t length, int64_t* timestamp) { int received = - ::recv(s_, static_cast(buffer), static_cast(length), 0); + DoReadFromSocket(buffer, length, /*out_addr*/ nullptr, timestamp); if ((received == 0) && (length != 0)) { // Note: on graceful shutdown, recv can return 0. In this case, we // pretend it is blocking, and then signal close, so that simplifying @@ -407,9 +415,7 @@ int PhysicalSocket::Recv(void* buffer, size_t length, int64_t* timestamp) { SetError(EWOULDBLOCK); return SOCKET_ERROR; } - if (timestamp) { - *timestamp = GetSocketRecvTimestamp(s_); - } + UpdateLastError(); int error = GetError(); bool success = (received >= 0) || IsBlockingError(error); @@ -426,17 +432,8 @@ int PhysicalSocket::RecvFrom(void* buffer, size_t length, SocketAddress* out_addr, int64_t* timestamp) { - sockaddr_storage addr_storage; - socklen_t addr_len = sizeof(addr_storage); - sockaddr* addr = reinterpret_cast(&addr_storage); - int received = ::recvfrom(s_, static_cast(buffer), - static_cast(length), 0, addr, &addr_len); - if (timestamp) { - *timestamp = GetSocketRecvTimestamp(s_); - } + int received = DoReadFromSocket(buffer, length, out_addr, timestamp); UpdateLastError(); - if ((received >= 0) && (out_addr != nullptr)) - SocketAddressFromSockAddrStorage(addr_storage, out_addr); int error = GetError(); bool success = (received >= 0) || IsBlockingError(error); if (udp_ || success) { @@ -448,6 +445,84 @@ int PhysicalSocket::RecvFrom(void* buffer, return received; } +int PhysicalSocket::DoReadFromSocket(void* buffer, + size_t length, + SocketAddress* out_addr, + int64_t* timestamp) { + sockaddr_storage addr_storage; + socklen_t addr_len = sizeof(addr_storage); + sockaddr* addr = reinterpret_cast(&addr_storage); + +#if defined(WEBRTC_POSIX) + int received = 0; + if (read_scm_timestamp_experiment_) { + iovec iov = {.iov_base = buffer, .iov_len = length}; + msghdr msg = {.msg_iov = &iov, .msg_iovlen = 1}; + if (out_addr) { + out_addr->Clear(); + msg.msg_name = addr; + msg.msg_namelen = addr_len; + } + char control[CMSG_SPACE(sizeof(struct timeval))] = {}; + if (timestamp) { + *timestamp = -1; + msg.msg_control = &control; + msg.msg_controllen = sizeof(control); + } + received = ::recvmsg(s_, &msg, 0); + if (received <= 0) { + // An error occured or shut down. + return received; + } + if (timestamp) { + struct cmsghdr* cmsg; + for (cmsg = CMSG_FIRSTHDR(&msg); cmsg; cmsg = CMSG_NXTHDR(&msg, cmsg)) { + if (cmsg->cmsg_level != SOL_SOCKET) + continue; + if (cmsg->cmsg_type == SCM_TIMESTAMP) { + timeval* ts = reinterpret_cast(CMSG_DATA(cmsg)); + *timestamp = + rtc::kNumMicrosecsPerSec * static_cast(ts->tv_sec) + + static_cast(ts->tv_usec); + break; + } + } + } + if (out_addr) { + SocketAddressFromSockAddrStorage(addr_storage, out_addr); + } + } else { // !read_scm_timestamp_experiment_ + if (out_addr) { + received = ::recvfrom(s_, static_cast(buffer), + static_cast(length), 0, addr, &addr_len); + SocketAddressFromSockAddrStorage(addr_storage, out_addr); + } else { + received = + ::recv(s_, static_cast(buffer), static_cast(length), 0); + } + if (timestamp) { + *timestamp = GetSocketRecvTimestamp(s_); + } + } + return received; + +#else + int received = 0; + if (out_addr) { + received = ::recvfrom(s_, static_cast(buffer), + static_cast(length), 0, addr, &addr_len); + SocketAddressFromSockAddrStorage(addr_storage, out_addr); + } else { + received = + ::recv(s_, static_cast(buffer), static_cast(length), 0); + } + if (timestamp) { + *timestamp = -1; + } + return received; +#endif +} + int PhysicalSocket::Listen(int backlog) { int err = ::listen(s_, backlog); UpdateLastError(); @@ -643,7 +718,16 @@ bool SocketDispatcher::Initialize() { ioctlsocket(s_, FIONBIO, &argp); #elif defined(WEBRTC_POSIX) fcntl(s_, F_SETFL, fcntl(s_, F_GETFL, 0) | O_NONBLOCK); + if (IsScmTimeStampExperimentEnabled()) { + int value = 1; + // Attempt to get receive packet timestamp from the socket. + if (::setsockopt(s_, SOL_SOCKET, SO_TIMESTAMP, &value, sizeof(value)) != + 0) { + RTC_DLOG(LS_ERROR) << "::setsockopt failed. errno: " << LAST_SYSTEM_ERROR; + } + } #endif + #if defined(WEBRTC_IOS) // iOS may kill sockets when the app is moved to the background // (specifically, if the app doesn't use the "voip" UIBackgroundMode). When @@ -651,7 +735,9 @@ bool SocketDispatcher::Initialize() { // default will terminate the process, which we don't want. By specifying // this socket option, SIGPIPE will be disabled for the socket. int value = 1; - ::setsockopt(s_, SOL_SOCKET, SO_NOSIGPIPE, &value, sizeof(value)); + if (::setsockopt(s_, SOL_SOCKET, SO_NOSIGPIPE, &value, sizeof(value)) != 0) { + RTC_DLOG(LS_ERROR) << "::setsockopt failed. errno: " << LAST_SYSTEM_ERROR; + } #endif ss_->Add(this); return true; diff --git a/rtc_base/physical_socket_server.h b/rtc_base/physical_socket_server.h index f97271f422..5a3acbf84f 100644 --- a/rtc_base/physical_socket_server.h +++ b/rtc_base/physical_socket_server.h @@ -190,6 +190,11 @@ class PhysicalSocket : public Socket, public sigslot::has_slots<> { const struct sockaddr* dest_addr, socklen_t addrlen); + int DoReadFromSocket(void* buffer, + size_t length, + SocketAddress* out_addr, + int64_t* timestamp); + void OnResolveResult(AsyncResolverInterface* resolver); void UpdateLastError(); @@ -216,6 +221,7 @@ class PhysicalSocket : public Socket, public sigslot::has_slots<> { #endif private: + const bool read_scm_timestamp_experiment_; uint8_t enabled_events_ = 0; }; diff --git a/rtc_base/physical_socket_server_unittest.cc b/rtc_base/physical_socket_server_unittest.cc index 3da777a235..ba8ac85e6c 100644 --- a/rtc_base/physical_socket_server_unittest.cc +++ b/rtc_base/physical_socket_server_unittest.cc @@ -23,6 +23,7 @@ #include "rtc_base/socket_unittest.h" #include "rtc_base/test_utils.h" #include "rtc_base/thread.h" +#include "test/field_trial.h" #include "test/gtest.h" namespace rtc { @@ -460,8 +461,9 @@ TEST_F(PhysicalSocketTest, TestGetSetOptionsIPv6) { #if defined(WEBRTC_POSIX) -// We don't get recv timestamps on Mac. #if !defined(WEBRTC_MAC) +// We don't get recv timestamps on Mac without the experiment +// WebRTC-SCM-Timestamp TEST_F(PhysicalSocketTest, TestSocketRecvTimestampIPv4) { MAYBE_SKIP_IPV4; SocketTest::TestSocketRecvTimestampIPv4(); @@ -472,6 +474,17 @@ TEST_F(PhysicalSocketTest, TestSocketRecvTimestampIPv6) { } #endif +TEST_F(PhysicalSocketTest, TestSocketRecvTimestampIPv4ScmExperiment) { + MAYBE_SKIP_IPV4; + webrtc::test::ScopedFieldTrials trial("WebRTC-SCM-Timestamp/Enabled/"); + SocketTest::TestSocketRecvTimestampIPv4(); +} + +TEST_F(PhysicalSocketTest, TestSocketRecvTimestampIPv6ScmExperiment) { + webrtc::test::ScopedFieldTrials trial("WebRTC-SCM-Timestamp/Enabled/"); + SocketTest::TestSocketRecvTimestampIPv6(); +} + // Verify that if the socket was unable to be bound to a real network interface // (not loopback), Bind will return an error. TEST_F(PhysicalSocketTest, @@ -511,4 +524,15 @@ TEST_F(PhysicalSocketTest, #endif +TEST_F(PhysicalSocketTest, UdpSocketRecvTimestampUseRtcEpochIPv4ScmExperiment) { + MAYBE_SKIP_IPV4; + webrtc::test::ScopedFieldTrials trial("WebRTC-SCM-Timestamp/Enabled/"); + SocketTest::TestUdpSocketRecvTimestampUseRtcEpochIPv4(); +} + +TEST_F(PhysicalSocketTest, UdpSocketRecvTimestampUseRtcEpochIPv6ScmExperiment) { + webrtc::test::ScopedFieldTrials trial("WebRTC-SCM-Timestamp/Enabled/"); + SocketTest::TestUdpSocketRecvTimestampUseRtcEpochIPv6(); +} + } // namespace rtc diff --git a/rtc_base/platform_thread_types.cc b/rtc_base/platform_thread_types.cc index b0243b41dc..d64ea689bb 100644 --- a/rtc_base/platform_thread_types.cc +++ b/rtc_base/platform_thread_types.cc @@ -25,6 +25,13 @@ typedef HRESULT(WINAPI* RTC_SetThreadDescription)(HANDLE hThread, PCWSTR lpThreadDescription); #endif +#if defined(WEBRTC_FUCHSIA) +#include +#include + +#include "rtc_base/checks.h" +#endif + namespace rtc { PlatformThreadId CurrentThreadId() { @@ -109,6 +116,10 @@ void SetCurrentThreadName(const char* name) { prctl(PR_SET_NAME, reinterpret_cast(name)); // NOLINT #elif defined(WEBRTC_MAC) || defined(WEBRTC_IOS) pthread_setname_np(name); +#elif defined(WEBRTC_FUCHSIA) + zx_status_t status = zx_object_set_property(zx_thread_self(), ZX_PROP_NAME, + name, strlen(name)); + RTC_DCHECK_EQ(status, ZX_OK); #endif } diff --git a/rtc_base/socket_stream.cc b/rtc_base/socket_stream.cc index a526f0c0c8..5c993ea233 100644 --- a/rtc_base/socket_stream.cc +++ b/rtc_base/socket_stream.cc @@ -60,42 +60,36 @@ StreamState SocketStream::GetState() const { } } -StreamResult SocketStream::Read(void* buffer, - size_t buffer_len, - size_t* read, - int* error) { +StreamResult SocketStream::Read(rtc::ArrayView buffer, + size_t& read, + int& error) { RTC_DCHECK(socket_ != nullptr); - int result = socket_->Recv(buffer, buffer_len, nullptr); + int result = socket_->Recv(buffer.data(), buffer.size(), nullptr); if (result < 0) { if (socket_->IsBlocking()) return SR_BLOCK; - if (error) - *error = socket_->GetError(); + error = socket_->GetError(); return SR_ERROR; } - if ((result > 0) || (buffer_len == 0)) { - if (read) - *read = result; + if ((result > 0) || (buffer.size() == 0)) { + read = result; return SR_SUCCESS; } return SR_EOS; } -StreamResult SocketStream::Write(const void* data, - size_t data_len, - size_t* written, - int* error) { +StreamResult SocketStream::Write(rtc::ArrayView data, + size_t& written, + int& error) { RTC_DCHECK(socket_ != nullptr); - int result = socket_->Send(data, data_len); + int result = socket_->Send(data.data(), data.size()); if (result < 0) { if (socket_->IsBlocking()) return SR_BLOCK; - if (error) - *error = socket_->GetError(); + error = socket_->GetError(); return SR_ERROR; } - if (written) - *written = result; + written = result; return SR_SUCCESS; } diff --git a/rtc_base/socket_stream.h b/rtc_base/socket_stream.h index f678f805d7..492cc42e96 100644 --- a/rtc_base/socket_stream.h +++ b/rtc_base/socket_stream.h @@ -36,15 +36,13 @@ class SocketStream : public StreamInterface, public sigslot::has_slots<> { StreamState GetState() const override; - StreamResult Read(void* buffer, - size_t buffer_len, - size_t* read, - int* error) override; + StreamResult Read(rtc::ArrayView buffer, + size_t& read, + int& error) override; - StreamResult Write(const void* data, - size_t data_len, - size_t* written, - int* error) override; + StreamResult Write(rtc::ArrayView data, + size_t& written, + int& error) override; void Close() override; diff --git a/rtc_base/socket_unittest.cc b/rtc_base/socket_unittest.cc index ea6407ae6e..40da8c26cf 100644 --- a/rtc_base/socket_unittest.cc +++ b/rtc_base/socket_unittest.cc @@ -14,6 +14,8 @@ #include #include +#include +#include #include #include "absl/memory/memory.h" @@ -221,6 +223,15 @@ void SocketTest::TestSocketRecvTimestampIPv6() { SocketRecvTimestamp(kIPv6Loopback); } +void SocketTest::TestUdpSocketRecvTimestampUseRtcEpochIPv4() { + UdpSocketRecvTimestampUseRtcEpoch(kIPv4Loopback); +} + +void SocketTest::TestUdpSocketRecvTimestampUseRtcEpochIPv6() { + MAYBE_SKIP_IPV6; + UdpSocketRecvTimestampUseRtcEpoch(kIPv6Loopback); +} + // For unbound sockets, GetLocalAddress / GetRemoteAddress return AF_UNSPEC // values on Windows, but an empty address of the same family on Linux/MacOS X. bool IsUnspecOrEmptyIP(const IPAddress& address) { @@ -1070,25 +1081,31 @@ void SocketTest::GetSetOptionsInternal(const IPAddress& loopback) { } void SocketTest::SocketRecvTimestamp(const IPAddress& loopback) { + StreamSink sink; std::unique_ptr socket( socket_factory_->CreateSocket(loopback.family(), SOCK_DGRAM)); EXPECT_EQ(0, socket->Bind(SocketAddress(loopback, 0))); SocketAddress address = socket->GetLocalAddress(); + sink.Monitor(socket.get()); int64_t send_time_1 = TimeMicros(); socket->SendTo("foo", 3, address); + int64_t recv_timestamp_1; + // Wait until data is available. + EXPECT_TRUE_WAIT(sink.Check(socket.get(), SSE_READ), kTimeout); char buffer[3]; - socket->RecvFrom(buffer, 3, nullptr, &recv_timestamp_1); - EXPECT_GT(recv_timestamp_1, -1); + ASSERT_GT(socket->RecvFrom(buffer, 3, nullptr, &recv_timestamp_1), 0); const int64_t kTimeBetweenPacketsMs = 100; Thread::SleepMs(kTimeBetweenPacketsMs); int64_t send_time_2 = TimeMicros(); socket->SendTo("bar", 3, address); + // Wait until data is available. + EXPECT_TRUE_WAIT(sink.Check(socket.get(), SSE_READ), kTimeout); int64_t recv_timestamp_2; - socket->RecvFrom(buffer, 3, nullptr, &recv_timestamp_2); + ASSERT_GT(socket->RecvFrom(buffer, 3, nullptr, &recv_timestamp_2), 0); int64_t system_time_diff = send_time_2 - send_time_1; int64_t recv_timestamp_diff = recv_timestamp_2 - recv_timestamp_1; @@ -1097,4 +1114,30 @@ void SocketTest::SocketRecvTimestamp(const IPAddress& loopback) { EXPECT_NEAR(system_time_diff, recv_timestamp_diff, 10000); } +void SocketTest::UdpSocketRecvTimestampUseRtcEpoch(const IPAddress& loopback) { + SocketAddress empty = EmptySocketAddressWithFamily(loopback.family()); + std::unique_ptr socket( + socket_factory_->CreateSocket(loopback.family(), SOCK_DGRAM)); + ASSERT_EQ(socket->Bind(SocketAddress(loopback, 0)), 0); + SocketAddress address = socket->GetLocalAddress(); + socket = nullptr; + + auto client1 = std::make_unique( + absl::WrapUnique(AsyncUDPSocket::Create(socket_factory_, address))); + auto client2 = std::make_unique( + absl::WrapUnique(AsyncUDPSocket::Create(socket_factory_, empty))); + + SocketAddress addr2; + client2->SendTo("foo", 3, address); + std::unique_ptr packet_1 = client1->NextPacket(10000); + ASSERT_TRUE(packet_1 != nullptr); + EXPECT_NEAR(packet_1->packet_time_us, rtc::TimeMicros(), 1000'000); + + Thread::SleepMs(100); + client2->SendTo("bar", 3, address); + std::unique_ptr packet_2 = client1->NextPacket(10000); + ASSERT_TRUE(packet_2 != nullptr); + EXPECT_GT(packet_2->packet_time_us, packet_1->packet_time_us); + EXPECT_NEAR(packet_2->packet_time_us, rtc::TimeMicros(), 1000'000); +} } // namespace rtc diff --git a/rtc_base/socket_unittest.h b/rtc_base/socket_unittest.h index 20ef003a80..db79be2eb8 100644 --- a/rtc_base/socket_unittest.h +++ b/rtc_base/socket_unittest.h @@ -62,6 +62,8 @@ class SocketTest : public ::testing::Test { void TestGetSetOptionsIPv6(); void TestSocketRecvTimestampIPv4(); void TestSocketRecvTimestampIPv6(); + void TestUdpSocketRecvTimestampUseRtcEpochIPv4(); + void TestUdpSocketRecvTimestampUseRtcEpochIPv6(); static const int kTimeout = 5000; // ms const IPAddress kIPv4Loopback; @@ -92,6 +94,7 @@ class SocketTest : public ::testing::Test { void UdpReadyToSend(const IPAddress& loopback); void GetSetOptionsInternal(const IPAddress& loopback); void SocketRecvTimestamp(const IPAddress& loopback); + void UdpSocketRecvTimestampUseRtcEpoch(const IPAddress& loopback); SocketFactory* socket_factory_; }; diff --git a/rtc_base/ssl_adapter_unittest.cc b/rtc_base/ssl_adapter_unittest.cc index cd63249190..2da59ddbb2 100644 --- a/rtc_base/ssl_adapter_unittest.cc +++ b/rtc_base/ssl_adapter_unittest.cc @@ -204,7 +204,9 @@ class SSLAdapterTestDummyServer : public sigslot::has_slots<> { int error; rtc::StreamResult r = ssl_stream_adapter_->Write( - message.data(), message.length(), &written, &error); + rtc::MakeArrayView(reinterpret_cast(message.data()), + message.size()), + written, error); if (r == rtc::SR_SUCCESS) { return written; } else { @@ -236,18 +238,19 @@ class SSLAdapterTestDummyServer : public sigslot::has_slots<> { void OnSSLStreamAdapterEvent(rtc::StreamInterface* stream, int sig, int err) { if (sig & rtc::SE_READ) { - char buffer[4096] = ""; + uint8_t buffer[4096] = ""; size_t read; int error; // Read data received from the client and store it in our internal // buffer. - rtc::StreamResult r = - stream->Read(buffer, sizeof(buffer) - 1, &read, &error); + rtc::StreamResult r = stream->Read(buffer, read, error); if (r == rtc::SR_SUCCESS) { buffer[read] = '\0'; - RTC_LOG(LS_INFO) << "Server received '" << buffer << "'"; - data_ += buffer; + // Here we assume that the buffer is interpretable as string. + char* buffer_as_char = reinterpret_cast(buffer); + RTC_LOG(LS_INFO) << "Server received '" << buffer_as_char << "'"; + data_ += buffer_as_char; } } } diff --git a/rtc_base/ssl_stream_adapter_unittest.cc b/rtc_base/ssl_stream_adapter_unittest.cc index 49cbbe0a02..fb909e7ea1 100644 --- a/rtc_base/ssl_stream_adapter_unittest.cc +++ b/rtc_base/ssl_stream_adapter_unittest.cc @@ -17,6 +17,7 @@ #include "absl/memory/memory.h" #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/task_queue/pending_task_safety_flag.h" #include "rtc_base/buffer_queue.h" #include "rtc_base/checks.h" @@ -159,13 +160,12 @@ class SSLDummyStreamBase : public rtc::StreamInterface, rtc::StreamState GetState() const override { return rtc::SS_OPEN; } - rtc::StreamResult Read(void* buffer, - size_t buffer_len, - size_t* read, - int* error) override { + rtc::StreamResult Read(rtc::ArrayView buffer, + size_t& read, + int& error) override { rtc::StreamResult r; - r = in_->Read(buffer, buffer_len, read, error); + r = in_->Read(buffer, read, error); if (r == rtc::SR_BLOCK) return rtc::SR_BLOCK; if (r == rtc::SR_EOS) @@ -201,17 +201,15 @@ class SSLDummyStreamBase : public rtc::StreamInterface, } // Write to the outgoing FifoBuffer - rtc::StreamResult WriteData(const void* data, - size_t data_len, - size_t* written, - int* error) { - return out_->Write(data, data_len, written, error); + rtc::StreamResult WriteData(rtc::ArrayView data, + size_t& written, + int& error) { + return out_->Write(data, written, error); } - rtc::StreamResult Write(const void* data, - size_t data_len, - size_t* written, - int* error) override; + rtc::StreamResult Write(rtc::ArrayView data, + size_t& written, + int& error) override; void Close() override { RTC_LOG(LS_INFO) << "Closing outbound stream"; @@ -254,12 +252,11 @@ class BufferQueueStream : public rtc::StreamInterface { rtc::StreamState GetState() const override { return rtc::SS_OPEN; } // Reading a buffer queue stream will either succeed or block. - rtc::StreamResult Read(void* buffer, - size_t buffer_len, - size_t* read, - int* error) override { + rtc::StreamResult Read(rtc::ArrayView buffer, + size_t& read, + int& error) override { const bool was_writable = buffer_.is_writable(); - if (!buffer_.ReadFront(buffer, buffer_len, read)) + if (!buffer_.ReadFront(buffer.data(), buffer.size(), &read)) return rtc::SR_BLOCK; if (!was_writable) @@ -269,12 +266,11 @@ class BufferQueueStream : public rtc::StreamInterface { } // Writing to a buffer queue stream will either succeed or block. - rtc::StreamResult Write(const void* data, - size_t data_len, - size_t* written, - int* error) override { + rtc::StreamResult Write(rtc::ArrayView data, + size_t& written, + int& error) override { const bool was_readable = buffer_.is_readable(); - if (!buffer_.WriteBack(data, data_len, written)) + if (!buffer_.WriteBack(data.data(), data.size(), &written)) return rtc::SR_BLOCK; if (!was_readable) @@ -583,10 +579,11 @@ class SSLStreamAdapterTestBase : public ::testing::Test, // SS_OPENING and writes should return SR_BLOCK. EXPECT_EQ(rtc::SS_OPENING, client_ssl_->GetState()); EXPECT_EQ(rtc::SS_OPENING, server_ssl_->GetState()); - unsigned char packet[1]; + uint8_t packet[1]; size_t sent; - EXPECT_EQ(rtc::SR_BLOCK, client_ssl_->Write(&packet, 1, &sent, 0)); - EXPECT_EQ(rtc::SR_BLOCK, server_ssl_->Write(&packet, 1, &sent, 0)); + int error; + EXPECT_EQ(rtc::SR_BLOCK, client_ssl_->Write(packet, sent, error)); + EXPECT_EQ(rtc::SR_BLOCK, server_ssl_->Write(packet, sent, error)); // Collect both of the certificate digests; needs to be done before calling // SetPeerCertificateDigest as that may reset the identity. @@ -625,8 +622,10 @@ class SSLStreamAdapterTestBase : public ::testing::Test, EXPECT_EQ(rtc::SS_OPEN, client_ssl_->GetState()); // If the client sends a packet while the server still hasn't verified the // client identity, the server should continue to return SR_BLOCK. - EXPECT_EQ(rtc::SR_SUCCESS, client_ssl_->Write(&packet, 1, &sent, 0)); - EXPECT_EQ(rtc::SR_BLOCK, server_ssl_->Read(&packet, 1, 0, 0)); + int error; + EXPECT_EQ(rtc::SR_SUCCESS, client_ssl_->Write(packet, sent, error)); + size_t read; + EXPECT_EQ(rtc::SR_BLOCK, server_ssl_->Read(packet, read, error)); } else { EXPECT_EQ(rtc::SS_CLOSED, client_ssl_->GetState()); } @@ -646,17 +645,17 @@ class SSLStreamAdapterTestBase : public ::testing::Test, rtc::StreamResult DataWritten(SSLDummyStreamBase* from, const void* data, size_t data_len, - size_t* written, - int* error) { + size_t& written, + int& error) { // Randomly drop loss_ percent of packets if (rtc::CreateRandomId() % 100 < static_cast(loss_)) { RTC_LOG(LS_VERBOSE) << "Randomly dropping packet, size=" << data_len; - *written = data_len; + written = data_len; return rtc::SR_SUCCESS; } if (dtls_ && (data_len > mtu_)) { RTC_LOG(LS_VERBOSE) << "Dropping packet > mtu, size=" << data_len; - *written = data_len; + written = data_len; return rtc::SR_SUCCESS; } @@ -664,17 +663,19 @@ class SSLStreamAdapterTestBase : public ::testing::Test, // handshake packets and we damage the last byte to keep the header // intact but break the MAC. if (damage_ && (*static_cast(data) == 23)) { - std::vector buf(data_len); + std::vector buf(data_len); RTC_LOG(LS_VERBOSE) << "Damaging packet"; memcpy(&buf[0], data, data_len); buf[data_len - 1]++; - - return from->WriteData(&buf[0], data_len, written, error); + return from->WriteData(rtc::MakeArrayView(&buf[0], data_len), written, + error); } - return from->WriteData(data, data_len, written, error); + return from->WriteData( + rtc::MakeArrayView(reinterpret_cast(data), data_len), + written, error); } void SetDelay(int delay) { delay_ = delay; } @@ -809,8 +810,10 @@ class SSLStreamAdapterTestTLS send_stream_.ReserveSize(size); for (int i = 0; i < size; ++i) { - char ch = static_cast(i); - send_stream_.Write(&ch, 1, nullptr, nullptr); + uint8_t ch = static_cast(i); + size_t written; + int error; + send_stream_.Write(rtc::MakeArrayView(&ch, 1), written, error); } send_stream_.Rewind(); @@ -835,7 +838,7 @@ class SSLStreamAdapterTestTLS size_t position, tosend, size; rtc::StreamResult rv; size_t sent; - char block[kBlockSize]; + uint8_t block[kBlockSize]; send_stream_.GetSize(&size); if (!size) @@ -843,9 +846,10 @@ class SSLStreamAdapterTestTLS for (;;) { send_stream_.GetPosition(&position); - if (send_stream_.Read(block, sizeof(block), &tosend, nullptr) != - rtc::SR_EOS) { - rv = client_ssl_->Write(block, tosend, &sent, 0); + int dummy_error; + if (send_stream_.Read(block, tosend, dummy_error) != rtc::SR_EOS) { + int error; + rv = client_ssl_->Write(rtc::MakeArrayView(block, tosend), sent, error); if (rv == rtc::SR_SUCCESS) { send_stream_.SetPosition(position + sent); @@ -868,13 +872,13 @@ class SSLStreamAdapterTestTLS } void ReadData(rtc::StreamInterface* stream) override { - char buffer[1600]; + uint8_t buffer[1600]; size_t bread; int err2; rtc::StreamResult r; for (;;) { - r = stream->Read(buffer, sizeof(buffer), &bread, &err2); + r = stream->Read(buffer, bread, err2); if (r == rtc::SR_ERROR || r == rtc::SR_EOS) { // Unfortunately, errors are the way that the stream adapter @@ -888,8 +892,9 @@ class SSLStreamAdapterTestTLS ASSERT_EQ(rtc::SR_SUCCESS, r); RTC_LOG(LS_VERBOSE) << "Read " << bread; - - recv_stream_.Write(buffer, bread, nullptr, nullptr); + size_t written; + int error; + recv_stream_.Write(rtc::MakeArrayView(buffer, bread), written, error); } } @@ -927,7 +932,7 @@ class SSLStreamAdapterTestDTLSBase : public SSLStreamAdapterTestBase { } void WriteData() override { - unsigned char* packet = new unsigned char[1600]; + uint8_t* packet = new uint8_t[1600]; while (sent_ < count_) { unsigned int rand_state = sent_; @@ -939,7 +944,9 @@ class SSLStreamAdapterTestDTLSBase : public SSLStreamAdapterTestBase { } size_t sent; - rtc::StreamResult rv = client_ssl_->Write(packet, packet_size_, &sent, 0); + int error; + rtc::StreamResult rv = client_ssl_->Write( + rtc::MakeArrayView(packet, packet_size_), sent, error); if (rv == rtc::SR_SUCCESS) { RTC_LOG(LS_VERBOSE) << "Sent: " << sent_; sent_++; @@ -956,13 +963,13 @@ class SSLStreamAdapterTestDTLSBase : public SSLStreamAdapterTestBase { } void ReadData(rtc::StreamInterface* stream) override { - unsigned char buffer[2000]; + uint8_t buffer[2000]; size_t bread; int err2; rtc::StreamResult r; for (;;) { - r = stream->Read(buffer, 2000, &bread, &err2); + r = stream->Read(buffer, bread, err2); if (r == rtc::SR_ERROR) { // Unfortunately, errors are the way that the stream adapter @@ -1034,22 +1041,22 @@ class SSLStreamAdapterTestDTLS : SSLStreamAdapterTestDTLSBase(cert_pem, private_key_pem) {} }; -rtc::StreamResult SSLDummyStreamBase::Write(const void* data, - size_t data_len, - size_t* written, - int* error) { - RTC_LOG(LS_VERBOSE) << "Writing to loopback " << data_len; +rtc::StreamResult SSLDummyStreamBase::Write(rtc::ArrayView data, + size_t& written, + int& error) { + RTC_LOG(LS_VERBOSE) << "Writing to loopback " << data.size(); if (first_packet_) { first_packet_ = false; if (test_base_->GetLoseFirstPacket()) { - RTC_LOG(LS_INFO) << "Losing initial packet of length " << data_len; - *written = data_len; // Fake successful writing also to writer. + RTC_LOG(LS_INFO) << "Losing initial packet of length " << data.size(); + written = data.size(); // Fake successful writing also to writer. return rtc::SR_SUCCESS; } } - return test_base_->DataWritten(this, data, data_len, written, error); + return test_base_->DataWritten(this, data.data(), data.size(), written, + error); } class SSLStreamAdapterTestDTLSFromPEMStrings : public SSLStreamAdapterTestDTLS { @@ -1166,15 +1173,16 @@ TEST_P(SSLStreamAdapterTestTLS, ReadWriteAfterClose) { client_ssl_->Close(); rtc::StreamResult rv; - char block[kBlockSize]; + uint8_t block[kBlockSize]; size_t dummy; + int error; // It's an error to write after closed. - rv = client_ssl_->Write(block, sizeof(block), &dummy, nullptr); + rv = client_ssl_->Write(block, dummy, error); ASSERT_EQ(rtc::SR_ERROR, rv); // But after closed read gives you EOS. - rv = client_ssl_->Read(block, sizeof(block), &dummy, nullptr); + rv = client_ssl_->Read(block, dummy, error); ASSERT_EQ(rtc::SR_EOS, rv); } diff --git a/rtc_base/stream.cc b/rtc_base/stream.cc index e1aab8cc22..e6b74b49ac 100644 --- a/rtc_base/stream.cc +++ b/rtc_base/stream.cc @@ -15,6 +15,7 @@ #include #include +#include "api/array_view.h" #include "rtc_base/checks.h" #include "rtc_base/thread.h" @@ -31,8 +32,10 @@ StreamResult StreamInterface::WriteAll(const void* data, StreamResult result = SR_SUCCESS; size_t total_written = 0, current_written; while (total_written < data_len) { - result = Write(static_cast(data) + total_written, - data_len - total_written, ¤t_written, error); + result = Write(ArrayView( + reinterpret_cast(data) + total_written, + data_len - total_written), + current_written, *error); if (result != SR_SUCCESS) break; total_written += current_written; diff --git a/rtc_base/stream.h b/rtc_base/stream.h index 7a9a588733..e02349aed3 100644 --- a/rtc_base/stream.h +++ b/rtc_base/stream.h @@ -13,6 +13,7 @@ #include +#include "api/array_view.h" #include "rtc_base/buffer.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/third_party/sigslot/sigslot.h" @@ -68,14 +69,14 @@ class RTC_EXPORT StreamInterface { // block, or the stream is in SS_OPENING state. // SR_EOS: the end-of-stream has been reached, or the stream is in the // SS_CLOSED state. - virtual StreamResult Read(void* buffer, - size_t buffer_len, - size_t* read, - int* error) = 0; - virtual StreamResult Write(const void* data, - size_t data_len, - size_t* written, - int* error) = 0; + + virtual StreamResult Read(rtc::ArrayView buffer, + size_t& read, + int& error) = 0; + virtual StreamResult Write(rtc::ArrayView data, + size_t& written, + int& error) = 0; + // Attempt to transition to the SS_CLOSED state. SE_CLOSE will not be // signalled as a result of this call. virtual void Close() = 0; @@ -104,10 +105,19 @@ class RTC_EXPORT StreamInterface { // unlike Write, the argument 'written' is always set, and may be non-zero // on results other than SR_SUCCESS. The remaining arguments have the // same semantics as Write. - StreamResult WriteAll(const void* data, - size_t data_len, - size_t* written, - int* error); + [[deprecated("Use version with ArrayView")]] StreamResult + WriteAll(const void* data, size_t data_len, size_t* written, int* error); + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + // TODO(bugs.webrc.org/14632): Remove pragmas and change underlying + // implementation when downstream code is converted. + StreamResult WriteAll(ArrayView data, + size_t& written, + int& error) { + return WriteAll(data.data(), data.size(), &written, &error); + } +#pragma clang diagnostic pop protected: StreamInterface(); diff --git a/rtc_base/system_time.cc b/rtc_base/system_time.cc index d53d923148..058e6c2990 100644 --- a/rtc_base/system_time.cc +++ b/rtc_base/system_time.cc @@ -69,6 +69,10 @@ int64_t SystemTimeNanos() { #elif defined(WINUWP) ticks = WinUwpSystemTimeNanos(); #elif defined(WEBRTC_WIN) + // TODO(webrtc:14601): Fix the volatile increment instead of suppressing the + // warning. +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-volatile" static volatile LONG last_timegettime = 0; static volatile int64_t num_wrap_timegettime = 0; volatile LONG* last_timegettime_ptr = &last_timegettime; @@ -87,6 +91,7 @@ int64_t SystemTimeNanos() { // TODO(deadbeef): Calculate with nanosecond precision. Otherwise, we're // just wasting a multiply and divide when doing Time() on Windows. ticks = ticks * kNumNanosecsPerMillisec; +#pragma clang diagnostic pop #else #error Unsupported platform. #endif diff --git a/rtc_tools/BUILD.gn b/rtc_tools/BUILD.gn index d219f598a9..60e39f22a2 100644 --- a/rtc_tools/BUILD.gn +++ b/rtc_tools/BUILD.gn @@ -14,14 +14,11 @@ group("rtc_tools") { # This target shall build all targets in tools/. testonly = true - deps = [ - ":frame_analyzer", - ":video_file_reader", - ] + deps = [ ":video_file_reader" ] if (!build_with_chromium) { deps += [ + ":frame_analyzer", ":psnr_ssim_analyzer", - ":rgba_to_i420_converter", ":video_quality_analysis", ] } @@ -29,10 +26,7 @@ group("rtc_tools") { deps += [ ":chart_proto" ] } if (!build_with_chromium && rtc_include_tests) { - deps += [ - ":tools_unittests", - ":yuv_to_ivf_converter", - ] + deps += [ ":tools_unittests" ] } if (rtc_include_tests && rtc_enable_protobuf) { deps += [ @@ -124,40 +118,6 @@ rtc_library("video_quality_analysis") { absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } -# Abseil dependencies are not moved to the absl_deps field deliberately. -# If build_with_chromium is true, the absl_deps replaces the dependencies with -# the "//third_party/abseil-cpp:absl" target. Which doesn't include absl/flags -# (and some others) because they cannot be used in Chromiums. Special exception -# for the "frame_analyzer" target in "third_party/abseil-cpp/absl.gni" allows -# it to be build in chromium. -rtc_executable("frame_analyzer") { - visibility = [ "*" ] - testonly = true - sources = [ "frame_analyzer/frame_analyzer.cc" ] - - deps = [ - ":video_file_reader", - ":video_file_writer", - ":video_quality_analysis", - "../api:make_ref_counted", - "../api:scoped_refptr", - "../api/test/metrics:chrome_perf_dashboard_metrics_exporter", - "../api/test/metrics:global_metrics_logger_and_exporter", - "../api/test/metrics:metrics_exporter", - "../api/test/metrics:stdout_metrics_exporter", - "../rtc_base:stringutils", - "//third_party/abseil-cpp/absl/flags:flag", - "//third_party/abseil-cpp/absl/flags:parse", - "//third_party/abseil-cpp/absl/strings", - ] - - if (build_with_chromium) { - # When building from Chromium, WebRTC's metrics and field trial - # implementations need to be replaced by the Chromium ones. - deps += [ "//third_party/webrtc_overrides:webrtc_component" ] - } -} - # TODO(bugs.webrtc.org/11474): Enable this on win if needed. For now it # is only required for Linux and Android. if (!build_with_chromium && !build_with_mozilla && !is_win && !is_ios) { @@ -182,6 +142,44 @@ if (!is_component_build) { # (and some others) because they cannot be used in Chromiums. Special exception # for the "frame_analyzer" target in "third_party/abseil-cpp/absl.gni" allows # it to be build in chromium. + rtc_executable("frame_analyzer") { + visibility = [ "*" ] + testonly = true + sources = [ "frame_analyzer/frame_analyzer.cc" ] + + deps = [ + ":video_file_reader", + ":video_file_writer", + ":video_quality_analysis", + "../api:make_ref_counted", + "../api:scoped_refptr", + "../api/test/metrics:chrome_perf_dashboard_metrics_exporter", + "../api/test/metrics:global_metrics_logger_and_exporter", + "../api/test/metrics:metrics_exporter", + "../api/test/metrics:stdout_metrics_exporter", + "../rtc_base:stringutils", + "//third_party/abseil-cpp/absl/flags:flag", + "//third_party/abseil-cpp/absl/flags:parse", + "//third_party/abseil-cpp/absl/strings", + ] + + if (build_with_chromium) { + # When building from Chromium, WebRTC's metrics and field trial + # implementations need to be replaced by the Chromium ones. + deps += [ "//third_party/webrtc_overrides:webrtc_component" ] + } + } + + # This target can be built from Chromium but it doesn't support + # is_component_build=true because it depends on WebRTC testonly code + # which is not part of //third_party/webrtc_overrides:webrtc_component. + + # Abseil dependencies are not moved to the absl_deps field deliberately. + # If build_with_chromium is true, the absl_deps replaces the dependencies with + # the "//third_party/abseil-cpp:absl" target. Which doesn't include absl/flags + # (and some others) because they cannot be used in Chromiums. Special exception + # for the "rtp_generator" target in "third_party/abseil-cpp/absl.gni" allows + # it to be build in chromium. rtc_executable("rtp_generator") { visibility = [ "*" ] testonly = true @@ -241,7 +239,7 @@ if (!is_component_build) { # If build_with_chromium is true, the absl_deps replaces the dependencies with # the "//third_party/abseil-cpp:absl" target. Which doesn't include absl/flags # (and some others) because they cannot be used in Chromiums. Special exception - # for the "frame_analyzer" target in "third_party/abseil-cpp/absl.gni" allows + # for the "video_replay" target in "third_party/abseil-cpp/absl.gni" allows # it to be build in chromium. rtc_executable("video_replay") { visibility = [ "*" ] @@ -338,24 +336,6 @@ if (!build_with_chromium) { ] } - rtc_executable("rgba_to_i420_converter") { - visibility = [ "*" ] - testonly = true - sources = [ - "converter/converter.cc", - "converter/converter.h", - "converter/rgba_to_i420_converter.cc", - ] - - deps = [ - "../common_video", - "//third_party/abseil-cpp/absl/flags:flag", - "//third_party/abseil-cpp/absl/flags:parse", - "//third_party/abseil-cpp/absl/flags:usage", - "//third_party/libyuv", - ] - } - if (rtc_enable_protobuf) { proto_library("chart_proto") { visibility = [ "*" ] @@ -437,41 +417,6 @@ if (!build_with_chromium) { if (rtc_include_tests) { if (!build_with_chromium) { - rtc_executable("yuv_to_ivf_converter") { - visibility = [ "*" ] - testonly = true - sources = [ "converter/yuv_to_ivf_converter.cc" ] - deps = [ - "../api:create_frame_generator", - "../api:frame_generator_api", - "../api/task_queue:default_task_queue_factory", - "../api/video:encoded_image", - "../api/video:video_frame", - "../api/video_codecs:video_codecs_api", - "../media:rtc_media_base", - "../modules/rtp_rtcp:rtp_rtcp_format", - "../modules/video_coding:video_codec_interface", - "../modules/video_coding:video_coding_utility", - "../modules/video_coding:webrtc_h264", - "../modules/video_coding:webrtc_vp8", - "../modules/video_coding:webrtc_vp9", - "../rtc_base:checks", - "../rtc_base:criticalsection", - "../rtc_base:logging", - "../rtc_base:rtc_event", - "../rtc_base:rtc_task_queue", - "../rtc_base/synchronization:mutex", - "../rtc_base/system:file_wrapper", - "../test:video_test_common", - "../test:video_test_support", - "//third_party/abseil-cpp/absl/debugging:failure_signal_handler", - "//third_party/abseil-cpp/absl/debugging:symbolize", - "//third_party/abseil-cpp/absl/flags:flag", - "//third_party/abseil-cpp/absl/flags:parse", - "//third_party/abseil-cpp/absl/strings", - ] - } - if (rtc_enable_protobuf) { rtc_executable("event_log_visualizer") { # TODO(bugs.webrtc.org/14248): Remove once usage of std::tmpnam diff --git a/rtc_tools/converter/converter.cc b/rtc_tools/converter/converter.cc deleted file mode 100644 index ca5eb26703..0000000000 --- a/rtc_tools/converter/converter.cc +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "rtc_tools/converter/converter.h" - -#include -#include - -#include -#include - -#include "third_party/libyuv/include/libyuv/compare.h" -#include "third_party/libyuv/include/libyuv/convert.h" - -#ifdef WIN32 -#define SEPARATOR '\\' -#define STAT _stat -#else -#define SEPARATOR '/' -#define STAT stat -#endif - -namespace webrtc { -namespace test { - -Converter::Converter(int width, int height) : width_(width), height_(height) {} - -bool Converter::ConvertRGBAToI420Video(std::string frames_dir, - std::string output_file_name, - bool delete_frames) { - FILE* output_file = fopen(output_file_name.c_str(), "wb"); - - // Open output file in append mode. - if (output_file == NULL) { - fprintf(stderr, "Couldn't open input file for reading: %s\n", - output_file_name.c_str()); - return false; - } - - int input_frame_size = InputFrameSize(); - uint8_t* rgba_buffer = new uint8_t[input_frame_size]; - int y_plane_size = YPlaneSize(); - uint8_t* dst_y = new uint8_t[y_plane_size]; - int u_plane_size = UPlaneSize(); - uint8_t* dst_u = new uint8_t[u_plane_size]; - int v_plane_size = VPlaneSize(); - uint8_t* dst_v = new uint8_t[v_plane_size]; - - int counter = 0; // Counter to form frame names. - bool success = false; // Is conversion successful. - - while (true) { - std::string file_name = FormFrameName(4, counter); - // Get full path file name. - std::string input_file_name = FindFullFileName(frames_dir, file_name); - - if (FileExists(input_file_name)) { - ++counter; // Update counter for the next round. - } else { - fprintf(stdout, "Reached end of frames list\n"); - break; - } - - // Read the RGBA frame into rgba_buffer. - ReadRGBAFrame(input_file_name.c_str(), input_frame_size, rgba_buffer); - - // Delete the input frame. - if (delete_frames) { - if (remove(input_file_name.c_str()) != 0) { - fprintf(stderr, "Cannot delete file %s\n", input_file_name.c_str()); - } - } - - // Convert to I420 frame. - libyuv::ABGRToI420(rgba_buffer, SrcStrideFrame(), dst_y, DstStrideY(), - dst_u, DstStrideU(), dst_v, DstStrideV(), width_, - height_); - - // Add the I420 frame to the YUV video file. - success = AddYUVToFile(dst_y, y_plane_size, dst_u, u_plane_size, dst_v, - v_plane_size, output_file); - - if (!success) { - fprintf(stderr, "LibYUV error during RGBA to I420 frame conversion\n"); - break; - } - } - - delete[] rgba_buffer; - delete[] dst_y; - delete[] dst_u; - delete[] dst_v; - - fclose(output_file); - - return success; -} - -bool Converter::AddYUVToFile(uint8_t* y_plane, - int y_plane_size, - uint8_t* u_plane, - int u_plane_size, - uint8_t* v_plane, - int v_plane_size, - FILE* output_file) { - bool success = AddYUVPlaneToFile(y_plane, y_plane_size, output_file) && - AddYUVPlaneToFile(u_plane, u_plane_size, output_file) && - AddYUVPlaneToFile(v_plane, v_plane_size, output_file); - return success; -} - -bool Converter::AddYUVPlaneToFile(uint8_t* yuv_plane, - int yuv_plane_size, - FILE* file) { - size_t bytes_written = fwrite(yuv_plane, 1, yuv_plane_size, file); - - if (bytes_written != static_cast(yuv_plane_size)) { - fprintf(stderr, - "Number of bytes written (%d) doesn't match size of y plane" - " (%d)\n", - static_cast(bytes_written), yuv_plane_size); - return false; - } - return true; -} - -bool Converter::ReadRGBAFrame(const char* input_file_name, - int input_frame_size, - unsigned char* buffer) { - FILE* input_file = fopen(input_file_name, "rb"); - if (input_file == NULL) { - fprintf(stderr, "Couldn't open input file for reading: %s\n", - input_file_name); - return false; - } - - size_t nbr_read = fread(buffer, 1, input_frame_size, input_file); - fclose(input_file); - - if (nbr_read != static_cast(input_frame_size)) { - fprintf(stderr, "Error reading from input file: %s\n", input_file_name); - return false; - } - - return true; -} - -std::string Converter::FindFullFileName(std::string dir_name, - std::string file_name) { - return dir_name + SEPARATOR + file_name; -} - -bool Converter::FileExists(std::string file_name_to_check) { - struct STAT file_info; - int result = STAT(file_name_to_check.c_str(), &file_info); - return (result == 0); -} - -std::string Converter::FormFrameName(int width, int number) { - std::stringstream tmp; - - // Zero-pad number to a string. - tmp << std::setfill('0') << std::setw(width) << number; - - return "frame_" + tmp.str(); -} - -} // namespace test -} // namespace webrtc diff --git a/rtc_tools/converter/converter.h b/rtc_tools/converter/converter.h deleted file mode 100644 index 976458cf88..0000000000 --- a/rtc_tools/converter/converter.h +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_TOOLS_CONVERTER_CONVERTER_H_ -#define RTC_TOOLS_CONVERTER_CONVERTER_H_ - -#include - -#include - -namespace webrtc { -namespace test { - -// Handles a conversion between a set of RGBA frames to a YUV (I420) video. -class Converter { - public: - Converter(int width, int height); - - // Converts RGBA to YUV video. If the delete_frames argument is true, the - // method will delete the input frames after conversion. - bool ConvertRGBAToI420Video(std::string frames_dir, - std::string output_file_name, - bool delete_frames); - - private: - int width_; // Width of the video (respectively of the RGBA frames). - int height_; // Height of the video (respectively of the RGBA frames). - - // Returns the size of the Y plane in bytes. - int YPlaneSize() const { return width_ * height_; } - - // Returns the size of the U plane in bytes. - int UPlaneSize() const { return ((width_ + 1) / 2) * ((height_) / 2); } - - // Returns the size of the V plane in bytes. - int VPlaneSize() const { return ((width_ + 1) / 2) * ((height_) / 2); } - - // Returns the number of bytes per row in the RGBA frame. - int SrcStrideFrame() const { return width_ * 4; } - - // Returns the number of bytes in the Y plane. - int DstStrideY() const { return width_; } - - // Returns the number of bytes in the U plane. - int DstStrideU() const { return (width_ + 1) / 2; } - - // Returns the number of bytes in the V plane. - int DstStrideV() const { return (width_ + 1) / 2; } - - // Returns the size in bytes of the input RGBA frames. - int InputFrameSize() const { return width_ * height_ * 4; } - - // Writes the Y, U and V (in this order) planes to the file, thus adding a - // raw YUV frame to the file. - bool AddYUVToFile(uint8_t* y_plane, - int y_plane_size, - uint8_t* u_plane, - int u_plane_size, - uint8_t* v_plane, - int v_plane_size, - FILE* output_file); - - // Adds the Y, U or V plane to the file. - bool AddYUVPlaneToFile(uint8_t* yuv_plane, int yuv_plane_size, FILE* file); - - // Reads a RGBA frame from input_file_name with input_frame_size size in bytes - // into the buffer. - bool ReadRGBAFrame(const char* input_file_name, - int input_frame_size, - unsigned char* buffer); - - // Finds the full path name of the file - concatenates the directory and file - // names. - std::string FindFullFileName(std::string dir_name, std::string file_name); - - // Checks if a file exists. - bool FileExists(std::string file_name_to_check); - - // Returns the name of the file in the form frame_, where is - // 4 zero padded (i.e. frame_0000, frame_0001, etc.). - std::string FormFrameName(int width, int number); -}; - -} // namespace test -} // namespace webrtc - -#endif // RTC_TOOLS_CONVERTER_CONVERTER_H_ diff --git a/rtc_tools/converter/rgba_to_i420_converter.cc b/rtc_tools/converter/rgba_to_i420_converter.cc deleted file mode 100644 index 6e186ae8fe..0000000000 --- a/rtc_tools/converter/rgba_to_i420_converter.cc +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include - -#include - -#include "absl/flags/flag.h" -#include "absl/flags/parse.h" -#include "absl/flags/usage.h" -#include "rtc_tools/converter/converter.h" - -ABSL_FLAG(int, width, -1, "Width in pixels of the frames in the input file"); -ABSL_FLAG(int, height, -1, "Height in pixels of the frames in the input file"); -ABSL_FLAG(std::string, - frames_dir, - ".", - "The path to the directory where the frames reside"); -ABSL_FLAG(std::string, - output_file, - "output.yuv", - " The output file to which frames are written"); -ABSL_FLAG(bool, - delete_frames, - false, - " Whether or not to delete the input frames after the conversion"); - -/* - * A command-line tool based on libyuv to convert a set of RGBA files to a YUV - * video. - * Usage: - * rgba_to_i420_converter --frames_dir= - * --output_file= --width= - * --height= - */ -int main(int argc, char* argv[]) { - absl::SetProgramUsageMessage( - "Converts RGBA raw image files to I420 frames " - "for YUV.\n" - "Example usage:\n" - "./rgba_to_i420_converter --frames_dir=. " - "--output_file=output.yuv --width=320 " - "--height=240\n" - "IMPORTANT: If you pass the --delete_frames " - "command line parameter, the tool will delete " - "the input frames after conversion.\n"); - absl::ParseCommandLine(argc, argv); - - int width = absl::GetFlag(FLAGS_width); - int height = absl::GetFlag(FLAGS_height); - - if (width <= 0 || height <= 0) { - fprintf(stderr, "Error: width or height cannot be <= 0!\n"); - return -1; - } - - bool del_frames = absl::GetFlag(FLAGS_delete_frames); - - webrtc::test::Converter converter(width, height); - bool success = converter.ConvertRGBAToI420Video( - absl::GetFlag(FLAGS_frames_dir), absl::GetFlag(FLAGS_output_file), - del_frames); - - if (success) { - fprintf(stdout, "Successful conversion of RGBA frames to YUV video!\n"); - return 0; - } else { - fprintf(stdout, "Unsuccessful conversion of RGBA frames to YUV video!\n"); - return -1; - } -} diff --git a/rtc_tools/converter/yuv_to_ivf_converter.cc b/rtc_tools/converter/yuv_to_ivf_converter.cc deleted file mode 100644 index 7c2c2ad1e0..0000000000 --- a/rtc_tools/converter/yuv_to_ivf_converter.cc +++ /dev/null @@ -1,285 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include - -#include "absl/debugging/failure_signal_handler.h" -#include "absl/debugging/symbolize.h" -#include "absl/flags/flag.h" -#include "absl/flags/parse.h" -#include "absl/strings/match.h" -#include "api/task_queue/default_task_queue_factory.h" -#include "api/test/create_frame_generator.h" -#include "api/test/frame_generator_interface.h" -#include "api/video/encoded_image.h" -#include "api/video/video_codec_type.h" -#include "api/video_codecs/video_codec.h" -#include "api/video_codecs/video_encoder.h" -#include "media/base/media_constants.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/video_coding/codecs/vp8/include/vp8.h" -#include "modules/video_coding/codecs/vp9/include/vp9.h" -#include "modules/video_coding/include/video_error_codes.h" -#include "modules/video_coding/utility/ivf_file_writer.h" -#include "rtc_base/checks.h" -#include "rtc_base/event.h" -#include "rtc_base/logging.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/system/file_wrapper.h" -#include "rtc_base/task_queue.h" -#include "test/testsupport/frame_reader.h" -#include "test/video_codec_settings.h" - -#if defined(WEBRTC_USE_H264) -#include "modules/video_coding/codecs/h264/include/h264.h" -#endif - -ABSL_FLAG(std::string, input, "", "Input YUV file to convert to IVF"); -ABSL_FLAG(int, width, 0, "Input frame width"); -ABSL_FLAG(int, height, 0, "Input frame height"); -ABSL_FLAG(std::string, codec, cricket::kVp8CodecName, "Codec to use"); -ABSL_FLAG(std::string, output, "", "Output IVF file"); - -namespace webrtc { -namespace test { -namespace { - -constexpr int kMaxFramerate = 30; -// We use very big value here to ensure that codec won't hit any limits. -constexpr uint32_t kBitrateBps = 100000000; -constexpr int kKeyFrameIntervalMs = 30000; -constexpr TimeDelta kMaxFrameEncodeWaitTimeout = TimeDelta::Seconds(2); -constexpr int kFrameLogInterval = 100; -static const VideoEncoder::Capabilities kCapabilities(false); - -class IvfFileWriterEncodedCallback : public EncodedImageCallback { - public: - IvfFileWriterEncodedCallback(const std::string& file_name, - VideoCodecType video_codec_type, - int expected_frames_count) - : file_writer_( - IvfFileWriter::Wrap(FileWrapper::OpenWriteOnly(file_name), 0)), - video_codec_type_(video_codec_type), - expected_frames_count_(expected_frames_count) { - RTC_CHECK(file_writer_.get()); - } - ~IvfFileWriterEncodedCallback() { RTC_CHECK(file_writer_->Close()); } - - Result OnEncodedImage(const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info) override { - RTC_CHECK(file_writer_->WriteFrame(encoded_image, video_codec_type_)); - - MutexLock lock(&lock_); - received_frames_count_++; - RTC_CHECK_LE(received_frames_count_, expected_frames_count_); - if (received_frames_count_ % kFrameLogInterval == 0) { - RTC_LOG(LS_INFO) << received_frames_count_ << " out of " - << expected_frames_count_ << " frames written"; - } - next_frame_written_.Set(); - return Result(Result::Error::OK); - } - - void WaitNextFrameWritten(TimeDelta timeout) { - RTC_CHECK(next_frame_written_.Wait(timeout)); - next_frame_written_.Reset(); - } - - private: - std::unique_ptr file_writer_; - const VideoCodecType video_codec_type_; - const int expected_frames_count_; - - Mutex lock_; - int received_frames_count_ RTC_GUARDED_BY(lock_) = 0; - rtc::Event next_frame_written_; -}; - -class Encoder { - public: - Encoder(int width, - int height, - int frames_count, - const std::string& output_file_name, - VideoCodecType video_codec_type, - std::unique_ptr video_encoder) - : video_encoder_(std::move(video_encoder)), - task_queue_(CreateDefaultTaskQueueFactory()->CreateTaskQueue( - "Encoder", - TaskQueueFactory::Priority::HIGH)) { - ivf_writer_callback_ = std::make_unique( - output_file_name, video_codec_type, frames_count); - - task_queue_.PostTask([width, height, video_codec_type, this]() { - VideoCodec codec_settings; - CodecSettings(video_codec_type, &codec_settings); - codec_settings.width = width; - codec_settings.height = height; - codec_settings.maxFramerate = kMaxFramerate; - codec_settings.startBitrate = kBitrateBps; - codec_settings.minBitrate = kBitrateBps; - codec_settings.maxBitrate = kBitrateBps; - codec_settings.SetFrameDropEnabled(false); - switch (video_codec_type) { - case VideoCodecType::kVideoCodecVP8: { - VideoCodecVP8* vp8_settings = codec_settings.VP8(); - vp8_settings->keyFrameInterval = kKeyFrameIntervalMs; - vp8_settings->denoisingOn = false; - } break; - case VideoCodecType::kVideoCodecVP9: { - VideoCodecVP9* vp9_settings = codec_settings.VP9(); - vp9_settings->denoisingOn = false; - vp9_settings->keyFrameInterval = kKeyFrameIntervalMs; - vp9_settings->automaticResizeOn = false; - } break; - case VideoCodecType::kVideoCodecH264: { - VideoCodecH264* h264_settings = codec_settings.H264(); - h264_settings->keyFrameInterval = kKeyFrameIntervalMs; - } break; - default: - RTC_CHECK(false) << "Unsupported codec type"; - } - VideoBitrateAllocation bitrate_allocation; - bitrate_allocation.SetBitrate(0, 0, kBitrateBps); - - video_encoder_->RegisterEncodeCompleteCallback( - ivf_writer_callback_.get()); - RTC_CHECK_EQ( - WEBRTC_VIDEO_CODEC_OK, - video_encoder_->InitEncode( - &codec_settings, - VideoEncoder::Settings(kCapabilities, /*number_of_cores=*/4, - /*max_payload_size=*/0))); - video_encoder_->SetRates(VideoEncoder::RateControlParameters( - bitrate_allocation, - static_cast(codec_settings.maxFramerate))); - }); - } - - void Encode(const VideoFrame& frame) { - task_queue_.PostTask([frame, this]() { - RTC_CHECK_EQ(WEBRTC_VIDEO_CODEC_OK, - video_encoder_->Encode(frame, nullptr)); - }); - } - - void WaitNextFrameWritten(TimeDelta timeout) { - ivf_writer_callback_->WaitNextFrameWritten(timeout); - } - - private: - std::unique_ptr video_encoder_; - std::unique_ptr ivf_writer_callback_; - - rtc::TaskQueue task_queue_; -}; - -int GetFrameCount(std::string yuv_file_name, int width, int height) { - std::unique_ptr yuv_reader = - std::make_unique(std::move(yuv_file_name), width, - height); - RTC_CHECK(yuv_reader->Init()); - int frames_count = yuv_reader->NumberOfFrames(); - yuv_reader->Close(); - return frames_count; -} - -VideoFrame BuildFrame(FrameGeneratorInterface::VideoFrameData frame_data, - uint32_t rtp_timestamp) { - return VideoFrame::Builder() - .set_video_frame_buffer(frame_data.buffer) - .set_update_rect(frame_data.update_rect) - .set_timestamp_rtp(rtp_timestamp) - .build(); -} - -void WriteVideoFile(std::string input_file_name, - int width, - int height, - std::string output_file_name, - VideoCodecType video_codec_type, - std::unique_ptr video_encoder) { - int frames_count = GetFrameCount(input_file_name, width, height); - - std::unique_ptr frame_generator = - CreateFromYuvFileFrameGenerator({input_file_name}, width, height, - /*frame_repeat_count=*/1); - - Encoder encoder(width, height, frames_count, output_file_name, - video_codec_type, std::move(video_encoder)); - - uint32_t last_frame_timestamp = 0; - - for (int i = 0; i < frames_count; ++i) { - const uint32_t timestamp = - last_frame_timestamp + kVideoPayloadTypeFrequency / kMaxFramerate; - VideoFrame frame = BuildFrame(frame_generator->NextFrame(), timestamp); - - last_frame_timestamp = timestamp; - - encoder.Encode(frame); - encoder.WaitNextFrameWritten(kMaxFrameEncodeWaitTimeout); - - if ((i + 1) % kFrameLogInterval == 0) { - RTC_LOG(LS_INFO) << i + 1 << " out of " << frames_count - << " frames are sent for encoding"; - } - } - RTC_LOG(LS_INFO) << "All " << frames_count << " frame are sent for encoding"; -} - -} // namespace -} // namespace test -} // namespace webrtc - -int main(int argc, char* argv[]) { - // Initialize the symbolizer to get a human-readable stack trace. - absl::InitializeSymbolizer(argv[0]); - - absl::FailureSignalHandlerOptions options; - absl::InstallFailureSignalHandler(options); - - absl::ParseCommandLine(argc, argv); - - std::string codec_name = absl::GetFlag(FLAGS_codec); - std::string input_file_name = absl::GetFlag(FLAGS_input); - std::string output_file_name = absl::GetFlag(FLAGS_output); - int width = absl::GetFlag(FLAGS_width); - int height = absl::GetFlag(FLAGS_height); - RTC_CHECK_NE(input_file_name, "") << "--input is required"; - RTC_CHECK_NE(output_file_name, "") << "--output is required"; - RTC_CHECK_GT(width, 0) << "width must be greater then 0"; - RTC_CHECK_GT(height, 0) << "height must be greater then 0"; - if (absl::EqualsIgnoreCase(codec_name, cricket::kVp8CodecName)) { - webrtc::test::WriteVideoFile( - input_file_name, width, height, output_file_name, - webrtc::VideoCodecType::kVideoCodecVP8, webrtc::VP8Encoder::Create()); - return 0; - } - if (absl::EqualsIgnoreCase(codec_name, cricket::kVp9CodecName)) { - webrtc::test::WriteVideoFile( - input_file_name, width, height, output_file_name, - webrtc::VideoCodecType::kVideoCodecVP9, webrtc::VP9Encoder::Create()); - return 0; - } -#if defined(WEBRTC_USE_H264) - if (absl::EqualsIgnoreCase(codec_name, cricket::kH264CodecName)) { - webrtc::test::WriteVideoFile( - input_file_name, width, height, output_file_name, - webrtc::VideoCodecType::kVideoCodecH264, - webrtc::H264Encoder::Create( - cricket::VideoCodec(cricket::kH264CodecName))); - return 0; - } -#endif - RTC_CHECK(false) << "Unsupported codec: " << codec_name; - return 1; -} diff --git a/rtc_tools/video_replay.cc b/rtc_tools/video_replay.cc index 173439af12..b829ab519b 100644 --- a/rtc_tools/video_replay.cc +++ b/rtc_tools/video_replay.cc @@ -159,6 +159,13 @@ ABSL_FLAG(bool, disable_preview, false, "Disable decoded video preview."); ABSL_FLAG(bool, disable_decoding, false, "Disable video decoding."); +ABSL_FLAG(int, + extend_run_time_duration, + 0, + "Extends the run time of the receiving client after the last RTP " + "packet has been delivered. Typically useful to let the last few " + "frames be decoded and rendered. Duration given in seconds."); + namespace { bool ValidatePayloadType(int32_t payload_type) { return payload_type > 0 && payload_type <= 127; @@ -605,9 +612,10 @@ class RtpReplayer final { } } } - // One more call to SleepOrAdvanceTime is required to process the last + // Note that even when `extend_run_time_duration` is zero + // `SleepOrAdvanceTime` should still be called in order to process the last // delivered packet when running in simulated time. - SleepOrAdvanceTime(0); + SleepOrAdvanceTime(absl::GetFlag(FLAGS_extend_run_time_duration) * 1000); fprintf(stderr, "num_packets: %d\n", num_packets); @@ -672,6 +680,7 @@ int main(int argc, char* argv[]) { RTC_CHECK(ValidateRtpHeaderExtensionId( absl::GetFlag(FLAGS_transmission_offset_id))); RTC_CHECK(ValidateInputFilenameNotEmpty(absl::GetFlag(FLAGS_input_file))); + RTC_CHECK_GE(absl::GetFlag(FLAGS_extend_run_time_duration), 0); rtc::ThreadManager::Instance()->WrapCurrentThread(); webrtc::test::RunTest(webrtc::RtpReplay); diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index e74c41a096..3e876b2636 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -1062,11 +1062,13 @@ if (is_ios || is_mac) { ":videorendereradapter_objc", ":videosource_objc", ":videotoolbox_objc", + "../api:dtmf_sender_interface", "../api:libjingle_peerconnection_api", "../api:media_stream_interface", "../api:rtc_event_log_output_file", "../api:rtc_stats_api", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:scoped_refptr", "../api/audio_codecs:audio_codecs_api", "../api/audio_codecs:builtin_audio_decoder_factory", diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn index 901b6f6a67..fb51a9c5e8 100644 --- a/sdk/android/BUILD.gn +++ b/sdk/android/BUILD.gn @@ -528,7 +528,6 @@ if (is_android) { deps = [ ":base_java", ":dav1d_java", - ":libaom_av1_encoder_java", ":libvpx_vp8_java", ":libvpx_vp9_java", ":video_api_java", @@ -769,10 +768,12 @@ if (current_os == "linux" || is_android) { ":native_api_stacktrace", "..:media_constraints", "../../api:callfactory_api", + "../../api:dtmf_sender_interface", "../../api:libjingle_peerconnection_api", "../../api:media_stream_interface", "../../api:rtc_event_log_output_file", "../../api:rtp_parameters", + "../../api:rtp_sender_interface", "../../api:turn_customizer", "../../api/crypto:options", "../../api/rtc_event_log:rtc_event_log_factory", @@ -889,9 +890,20 @@ if (current_os == "linux" || is_android) { rtc_library("swcodecs_jni") { visibility = [ "*" ] allow_poison = [ "software_video_codecs" ] + sources = [ + "src/jni/software_video_decoder_factory.cc", + "src/jni/software_video_encoder_factory.cc", + ] deps = [ + ":base_jni", + ":generated_swcodecs_jni", ":libvpx_vp8_jni", ":libvpx_vp9_jni", + ":native_api_jni", + ":video_jni", + "../../api/video_codecs:builtin_video_decoder_factory", + "../../api/video_codecs:builtin_video_encoder_factory", + "../../api/video_codecs:video_codecs_api", ] } @@ -1357,6 +1369,16 @@ if (current_os == "linux" || is_android) { jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h" } + generate_jni("generated_swcodecs_jni") { + sources = [ + "api/org/webrtc/SoftwareVideoDecoderFactory.java", + "api/org/webrtc/SoftwareVideoEncoderFactory.java", + ] + + namespace = "webrtc::jni" + jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h" + } + generate_jni("generated_peerconnection_jni") { sources = [ "api/org/webrtc/AddIceObserver.java", @@ -1475,6 +1497,8 @@ if (is_android) { "instrumentationtests/src/org/webrtc/RtcCertificatePemTest.java", "instrumentationtests/src/org/webrtc/RtpSenderTest.java", "instrumentationtests/src/org/webrtc/RtpTransceiverTest.java", + "instrumentationtests/src/org/webrtc/SoftwareVideoDecoderFactoryTest.java", + "instrumentationtests/src/org/webrtc/SoftwareVideoEncoderFactoryTest.java", "instrumentationtests/src/org/webrtc/SurfaceTextureHelperTest.java", "instrumentationtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java", "instrumentationtests/src/org/webrtc/TestConstants.java", diff --git a/sdk/android/api/org/webrtc/Camera2Enumerator.java b/sdk/android/api/org/webrtc/Camera2Enumerator.java index 8a0ce4a7bc..456d8cd060 100644 --- a/sdk/android/api/org/webrtc/Camera2Enumerator.java +++ b/sdk/android/api/org/webrtc/Camera2Enumerator.java @@ -13,13 +13,13 @@ package org.webrtc; import android.content.Context; import android.graphics.Rect; import android.graphics.SurfaceTexture; +import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraManager; import android.hardware.camera2.CameraMetadata; import android.hardware.camera2.params.StreamConfigurationMap; import android.os.Build; import android.os.SystemClock; -import android.util.AndroidException; import android.util.Range; import androidx.annotation.Nullable; import java.util.ArrayList; @@ -50,10 +50,7 @@ public class Camera2Enumerator implements CameraEnumerator { public String[] getDeviceNames() { try { return cameraManager.getCameraIdList(); - // On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a - // catch statement with an Exception from a newer API, even if the code is never executed. - // https://code.google.com/p/android/issues/detail?id=209129 - } catch (/* CameraAccessException */ AndroidException e) { + } catch (CameraAccessException e) { Logging.e(TAG, "Camera access exception", e); return new String[] {}; } @@ -92,10 +89,7 @@ public class Camera2Enumerator implements CameraEnumerator { private @Nullable CameraCharacteristics getCameraCharacteristics(String deviceName) { try { return cameraManager.getCameraCharacteristics(deviceName); - // On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a - // catch statement with an Exception from a newer API, even if the code is never executed. - // https://code.google.com/p/android/issues/detail?id=209129 - } catch (/* CameraAccessException */ AndroidException e) { + } catch (CameraAccessException | RuntimeException e) { Logging.e(TAG, "Camera access exception", e); return null; } @@ -115,10 +109,7 @@ public class Camera2Enumerator implements CameraEnumerator { return false; } } - // On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a - // catch statement with an Exception from a newer API, even if the code is never executed. - // https://code.google.com/p/android/issues/detail?id=209129 - } catch (/* CameraAccessException */ AndroidException | RuntimeException e) { + } catch (CameraAccessException | RuntimeException e) { Logging.e(TAG, "Failed to check if camera2 is supported", e); return false; } diff --git a/sdk/android/api/org/webrtc/NetworkMonitorAutoDetect.java b/sdk/android/api/org/webrtc/NetworkMonitorAutoDetect.java index a894c0d6b1..a6f24c2858 100644 --- a/sdk/android/api/org/webrtc/NetworkMonitorAutoDetect.java +++ b/sdk/android/api/org/webrtc/NetworkMonitorAutoDetect.java @@ -104,7 +104,10 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver implements Netwo @Override public void onAvailable(Network network) { - Logging.d(TAG, "Network becomes available: " + network.toString()); + Logging.d(TAG, + "Network" + + " handle: " + networkToNetId(network) + + " becomes available: " + network.toString()); synchronized (availableNetworks) { availableNetworks.add(network); @@ -116,7 +119,9 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver implements Netwo public void onCapabilitiesChanged(Network network, NetworkCapabilities networkCapabilities) { // A capabilities change may indicate the ConnectionType has changed, // so forward the new NetworkInformation along to the observer. - Logging.d(TAG, "capabilities changed: " + networkCapabilities.toString()); + Logging.d(TAG, + "handle: " + networkToNetId(network) + + " capabilities changed: " + networkCapabilities.toString()); onNetworkChanged(network); } @@ -127,7 +132,7 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver implements Netwo // // linkProperties.toString() has PII that cannot be redacted // very reliably, so do not include in log. - Logging.d(TAG, "link properties changed"); + Logging.d(TAG, "handle: " + networkToNetId(network) + " link properties changed"); onNetworkChanged(network); } @@ -135,13 +140,18 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver implements Netwo public void onLosing(Network network, int maxMsToLive) { // Tell the network is going to lose in MaxMsToLive milliseconds. // We may use this signal later. - Logging.d( - TAG, "Network " + network.toString() + " is about to lose in " + maxMsToLive + "ms"); + Logging.d(TAG, + "Network" + + " handle: " + networkToNetId(network) + ", " + network.toString() + + " is about to lose in " + maxMsToLive + "ms"); } @Override public void onLost(Network network) { - Logging.d(TAG, "Network " + network.toString() + " is disconnected"); + Logging.d(TAG, + "Network" + + " handle: " + networkToNetId(network) + ", " + network.toString() + + " is disconnected"); synchronized (availableNetworks) { availableNetworks.remove(network); @@ -789,6 +799,8 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver implements Netwo case ConnectivityManager.TYPE_BLUETOOTH: return NetworkChangeDetector.ConnectionType.CONNECTION_BLUETOOTH; case ConnectivityManager.TYPE_MOBILE: + case ConnectivityManager.TYPE_MOBILE_DUN: + case ConnectivityManager.TYPE_MOBILE_HIPRI: // Use information from TelephonyManager to classify the connection. switch (networkSubtype) { case TelephonyManager.NETWORK_TYPE_GPRS: diff --git a/sdk/android/api/org/webrtc/PeerConnection.java b/sdk/android/api/org/webrtc/PeerConnection.java index 724f70468d..e334a3fd9d 100644 --- a/sdk/android/api/org/webrtc/PeerConnection.java +++ b/sdk/android/api/org/webrtc/PeerConnection.java @@ -524,7 +524,6 @@ public class PeerConnection { // These values will be overridden by MediaStream constraints if deprecated constraints-based // create peerconnection interface is used. - public boolean disableIpv6; public boolean enableDscp; public boolean enableCpuOveruseDetection; public boolean suspendBelowMinBitrate; @@ -604,7 +603,6 @@ public class PeerConnection { stableWritableConnectionPingIntervalMs = null; disableIPv6OnWifi = false; maxIPv6Networks = 5; - disableIpv6 = false; enableDscp = false; enableCpuOveruseDetection = true; suspendBelowMinBitrate = false; @@ -769,11 +767,6 @@ public class PeerConnection { return turnCustomizer; } - @CalledByNative("RTCConfiguration") - boolean getDisableIpv6() { - return disableIpv6; - } - @CalledByNative("RTCConfiguration") boolean getEnableDscp() { return enableDscp; @@ -1176,6 +1169,22 @@ public class PeerConnection { nativeNewGetStats(callback); } + /** + * Gets stats using the new stats collection API, see webrtc/api/stats/. These + * will replace old stats collection API when the new API has matured enough. + */ + public void getStats(RtpSender sender, RTCStatsCollectorCallback callback) { + nativeNewGetStatsSender(sender.getNativeRtpSender(), callback); + } + + /** + * Gets stats using the new stats collection API, see webrtc/api/stats/. These + * will replace old stats collection API when the new API has matured enough. + */ + public void getStats(RtpReceiver receiver, RTCStatsCollectorCallback callback) { + nativeNewGetStatsReceiver(receiver.getNativeRtpReceiver(), callback); + } + /** * Limits the bandwidth allocated for all RTP streams sent by this * PeerConnection. Pass null to leave a value unchanged. @@ -1310,6 +1319,8 @@ public class PeerConnection { private native void nativeRemoveLocalStream(long stream); private native boolean nativeOldGetStats(StatsObserver observer, long nativeTrack); private native void nativeNewGetStats(RTCStatsCollectorCallback callback); + private native void nativeNewGetStatsSender(long sender, RTCStatsCollectorCallback callback); + private native void nativeNewGetStatsReceiver(long receiver, RTCStatsCollectorCallback callback); private native RtpSender nativeCreateSender(String kind, String stream_id); private native List nativeGetSenders(); private native List nativeGetReceivers(); diff --git a/sdk/android/api/org/webrtc/RtpReceiver.java b/sdk/android/api/org/webrtc/RtpReceiver.java index a5710f92e3..c3cff3dd31 100644 --- a/sdk/android/api/org/webrtc/RtpReceiver.java +++ b/sdk/android/api/org/webrtc/RtpReceiver.java @@ -49,6 +49,12 @@ public class RtpReceiver { return nativeGetId(nativeRtpReceiver); } + /** Returns a pointer to webrtc::RtpReceiverInterface. */ + long getNativeRtpReceiver() { + checkRtpReceiverExists(); + return nativeRtpReceiver; + } + @CalledByNative public void dispose() { checkRtpReceiverExists(); diff --git a/sdk/android/api/org/webrtc/RtpSender.java b/sdk/android/api/org/webrtc/RtpSender.java index b78bbf6b3c..2d0bc6c1e1 100644 --- a/sdk/android/api/org/webrtc/RtpSender.java +++ b/sdk/android/api/org/webrtc/RtpSender.java @@ -12,6 +12,7 @@ package org.webrtc; import androidx.annotation.Nullable; import java.util.List; +import org.webrtc.MediaStreamTrack; /** Java wrapper for a C++ RtpSenderInterface. */ public class RtpSender { @@ -27,8 +28,12 @@ public class RtpSender { long nativeTrack = nativeGetTrack(nativeRtpSender); cachedTrack = MediaStreamTrack.createMediaStreamTrack(nativeTrack); - long nativeDtmfSender = nativeGetDtmfSender(nativeRtpSender); - dtmfSender = (nativeDtmfSender != 0) ? new DtmfSender(nativeDtmfSender) : null; + if (nativeGetMediaType(nativeRtpSender).equalsIgnoreCase(MediaStreamTrack.AUDIO_TRACK_KIND)) { + long nativeDtmfSender = nativeGetDtmfSender(nativeRtpSender); + dtmfSender = (nativeDtmfSender != 0) ? new DtmfSender(nativeDtmfSender) : null; + } else { + dtmfSender = null; + } } /** @@ -143,4 +148,6 @@ public class RtpSender { private static native String nativeGetId(long rtpSender); private static native void nativeSetFrameEncryptor(long rtpSender, long nativeFrameEncryptor); + + private static native String nativeGetMediaType(long rtpSender); }; diff --git a/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java b/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java index abbd522146..2ac42e834e 100644 --- a/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java +++ b/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java @@ -11,45 +11,43 @@ package org.webrtc; import androidx.annotation.Nullable; -import java.util.ArrayList; -import java.util.HashMap; +import java.util.Arrays; import java.util.List; public class SoftwareVideoDecoderFactory implements VideoDecoderFactory { + private static final String TAG = "SoftwareVideoDecoderFactory"; + + private final long nativeFactory; + + public SoftwareVideoDecoderFactory() { + this.nativeFactory = nativeCreateFactory(); + } + @Nullable @Override - public VideoDecoder createDecoder(VideoCodecInfo codecInfo) { - String codecName = codecInfo.getName(); - - if (codecName.equalsIgnoreCase(VideoCodecMimeType.VP8.name())) { - return new LibvpxVp8Decoder(); - } - if (codecName.equalsIgnoreCase(VideoCodecMimeType.VP9.name()) - && LibvpxVp9Decoder.nativeIsSupported()) { - return new LibvpxVp9Decoder(); - } - if (codecName.equalsIgnoreCase(VideoCodecMimeType.AV1.name())) { - return new Dav1dDecoder(); + public VideoDecoder createDecoder(VideoCodecInfo info) { + long nativeDecoder = nativeCreateDecoder(nativeFactory, info); + if (nativeDecoder == 0) { + Logging.w(TAG, "Trying to create decoder for unsupported format. " + info); + return null; } - return null; + return new WrappedNativeVideoDecoder() { + @Override + public long createNativeVideoDecoder() { + return nativeDecoder; + } + }; } @Override public VideoCodecInfo[] getSupportedCodecs() { - return supportedCodecs(); + return nativeGetSupportedCodecs(nativeFactory).toArray(new VideoCodecInfo[0]); } - static VideoCodecInfo[] supportedCodecs() { - List codecs = new ArrayList(); + private static native long nativeCreateFactory(); - codecs.add(new VideoCodecInfo(VideoCodecMimeType.VP8.name(), new HashMap<>())); - if (LibvpxVp9Decoder.nativeIsSupported()) { - codecs.add(new VideoCodecInfo(VideoCodecMimeType.VP9.name(), new HashMap<>())); - } + private static native long nativeCreateDecoder(long factory, VideoCodecInfo videoCodecInfo); - codecs.add(new VideoCodecInfo(VideoCodecMimeType.AV1.name(), new HashMap<>())); - - return codecs.toArray(new VideoCodecInfo[codecs.size()]); - } + private static native List nativeGetSupportedCodecs(long factory); } diff --git a/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java b/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java index c4ac229071..7f4c457b97 100644 --- a/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java +++ b/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java @@ -11,44 +11,48 @@ package org.webrtc; import androidx.annotation.Nullable; -import java.util.ArrayList; -import java.util.HashMap; +import java.util.Arrays; import java.util.List; public class SoftwareVideoEncoderFactory implements VideoEncoderFactory { + private static final String TAG = "SoftwareVideoEncoderFactory"; + + private final long nativeFactory; + + public SoftwareVideoEncoderFactory() { + this.nativeFactory = nativeCreateFactory(); + } + @Nullable @Override - public VideoEncoder createEncoder(VideoCodecInfo codecInfo) { - String codecName = codecInfo.getName(); - - if (codecName.equalsIgnoreCase(VideoCodecMimeType.VP8.name())) { - return new LibvpxVp8Encoder(); - } - if (codecName.equalsIgnoreCase(VideoCodecMimeType.VP9.name()) - && LibvpxVp9Encoder.nativeIsSupported()) { - return new LibvpxVp9Encoder(); - } - if (codecName.equalsIgnoreCase(VideoCodecMimeType.AV1.name())) { - return new LibaomAv1Encoder(); + public VideoEncoder createEncoder(VideoCodecInfo info) { + long nativeEncoder = nativeCreateEncoder(nativeFactory, info); + if (nativeEncoder == 0) { + Logging.w(TAG, "Trying to create encoder for unsupported format. " + info); + return null; } - return null; + return new WrappedNativeVideoEncoder() { + @Override + public long createNativeVideoEncoder() { + return nativeEncoder; + } + + @Override + public boolean isHardwareEncoder() { + return false; + } + }; } @Override public VideoCodecInfo[] getSupportedCodecs() { - return supportedCodecs(); + return nativeGetSupportedCodecs(nativeFactory).toArray(new VideoCodecInfo[0]); } - static VideoCodecInfo[] supportedCodecs() { - List codecs = new ArrayList(); + private static native long nativeCreateFactory(); - codecs.add(new VideoCodecInfo(VideoCodecMimeType.VP8.name(), new HashMap<>())); - if (LibvpxVp9Encoder.nativeIsSupported()) { - codecs.add(new VideoCodecInfo(VideoCodecMimeType.VP9.name(), new HashMap<>())); - } - codecs.add(new VideoCodecInfo(VideoCodecMimeType.AV1.name(), new HashMap<>())); + private static native long nativeCreateEncoder(long factory, VideoCodecInfo videoCodecInfo); - return codecs.toArray(new VideoCodecInfo[codecs.size()]); - } + private static native List nativeGetSupportedCodecs(long factory); } diff --git a/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java b/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java index 9721cbd818..fe608c794e 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java @@ -22,27 +22,10 @@ import org.junit.Test; /** Unit tests for {@link DefaultVideoEncoderFactory}. */ public class DefaultVideoEncoderFactoryTest { static class CustomHardwareVideoEncoderFactory implements VideoEncoderFactory { - private ArrayList codecs = new ArrayList<>(); + private VideoCodecInfo supportedCodec; - public CustomHardwareVideoEncoderFactory(boolean includeVP8, boolean includeH264High) { - if (includeVP8) { - codecs.add(new VideoCodecInfo("VP8", new HashMap<>())); - } - codecs.add(new VideoCodecInfo("VP9", new HashMap<>())); - - HashMap baselineParams = new HashMap(); - baselineParams.put("profile-level-id", "42e01f"); - baselineParams.put("level-asymmetry-allowed", "1"); - baselineParams.put("packetization-mode", "1"); - codecs.add(new VideoCodecInfo("H264", baselineParams)); - - if (includeH264High) { - HashMap highParams = new HashMap(); - highParams.put("profile-level-id", "640c1f"); - highParams.put("level-asymmetry-allowed", "1"); - highParams.put("packetization-mode", "1"); - codecs.add(new VideoCodecInfo("H264", highParams)); - } + public CustomHardwareVideoEncoderFactory(VideoCodecInfo supportedCodec) { + this.supportedCodec = supportedCodec; } @Override @@ -52,7 +35,7 @@ public class DefaultVideoEncoderFactoryTest { @Override public VideoCodecInfo[] getSupportedCodecs() { - return codecs.toArray(new VideoCodecInfo[codecs.size()]); + return new VideoCodecInfo[] {supportedCodec}; } } @@ -63,47 +46,32 @@ public class DefaultVideoEncoderFactoryTest { @SmallTest @Test - public void testGetSupportedCodecsWithHardwareH264HighProfile() { - VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(true, true); - DefaultVideoEncoderFactory dvef = new DefaultVideoEncoderFactory(hwFactory); - VideoCodecInfo[] videoCodecs = dvef.getSupportedCodecs(); - assertEquals(5, videoCodecs.length); - assertEquals("VP8", videoCodecs[0].name); - assertEquals("VP9", videoCodecs[1].name); - assertEquals("AV1", videoCodecs[2].name); - assertEquals("H264", videoCodecs[3].name); - assertEquals("42e01f", videoCodecs[3].params.get("profile-level-id")); - assertEquals("H264", videoCodecs[4].name); - assertEquals("640c1f", videoCodecs[4].params.get("profile-level-id")); + public void getSupportedCodecs_hwVp8SameParamsAsSwVp8_oneVp8() { + VideoCodecInfo hwVp8Encoder = new VideoCodecInfo("VP8", new HashMap<>()); + VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(hwVp8Encoder); + DefaultVideoEncoderFactory defFactory = new DefaultVideoEncoderFactory(hwFactory); + VideoCodecInfo[] supportedCodecs = defFactory.getSupportedCodecs(); + assertEquals(3, supportedCodecs.length); + assertEquals("VP8", supportedCodecs[0].name); + assertEquals("AV1", supportedCodecs[1].name); + assertEquals("VP9", supportedCodecs[2].name); } @SmallTest @Test - public void testGetSupportedCodecsWithoutHardwareH264HighProfile() { - VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(true, false); - DefaultVideoEncoderFactory dvef = new DefaultVideoEncoderFactory(hwFactory); - VideoCodecInfo[] videoCodecs = dvef.getSupportedCodecs(); - assertEquals(4, videoCodecs.length); - assertEquals("VP8", videoCodecs[0].name); - assertEquals("VP9", videoCodecs[1].name); - assertEquals("AV1", videoCodecs[2].name); - assertEquals("H264", videoCodecs[3].name); - assertEquals("42e01f", videoCodecs[3].params.get("profile-level-id")); - } - - @SmallTest - @Test - public void testGetSupportedCodecsWithoutHardwareVP8() { - VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(false, true); - DefaultVideoEncoderFactory dvef = new DefaultVideoEncoderFactory(hwFactory); - VideoCodecInfo[] videoCodecs = dvef.getSupportedCodecs(); - assertEquals(5, videoCodecs.length); - assertEquals("VP8", videoCodecs[0].name); - assertEquals("VP9", videoCodecs[1].name); - assertEquals("AV1", videoCodecs[2].name); - assertEquals("H264", videoCodecs[3].name); - assertEquals("42e01f", videoCodecs[3].params.get("profile-level-id")); - assertEquals("H264", videoCodecs[4].name); - assertEquals("640c1f", videoCodecs[4].params.get("profile-level-id")); + public void getSupportedCodecs_hwVp8WithDifferentParams_twoVp8() { + VideoCodecInfo hwVp8Encoder = new VideoCodecInfo("VP8", new HashMap() { + { put("param", "value"); } + }); + VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(hwVp8Encoder); + DefaultVideoEncoderFactory defFactory = new DefaultVideoEncoderFactory(hwFactory); + VideoCodecInfo[] supportedCodecs = defFactory.getSupportedCodecs(); + assertEquals(4, supportedCodecs.length); + assertEquals("VP8", supportedCodecs[0].name); + assertEquals("AV1", supportedCodecs[1].name); + assertEquals("VP9", supportedCodecs[2].name); + assertEquals("VP8", supportedCodecs[3].name); + assertEquals(1, supportedCodecs[3].params.size()); + assertEquals("value", supportedCodecs[3].params.get("param")); } } diff --git a/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionEndToEndTest.java b/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionEndToEndTest.java index f71bd36063..d763ff2190 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionEndToEndTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionEndToEndTest.java @@ -931,6 +931,23 @@ public class PeerConnectionEndToEndTest { assertTrue(offeringPC.setBitrate(100000, 5000000, 500000000)); assertFalse(offeringPC.setBitrate(3, 2, 1)); + // Test getStats by Sender interface + offeringExpectations.expectNewStatsCallback(); + offeringPC.getStats(videoSender, offeringExpectations); + assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS)); + + // Test getStats by Receiver interface + RtpReceiver videoReceiver = null; + for (RtpReceiver receiver : answeringPC.getReceivers()) { + if (receiver.track().kind().equals("video")) { + videoReceiver = receiver; + } + } + assertNotNull(videoReceiver); + answeringExpectations.expectNewStatsCallback(); + answeringPC.getStats(videoReceiver, answeringExpectations); + assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS)); + // Free the Java-land objects and collect them. shutdownPC(offeringPC, offeringExpectations); offeringPC = null; diff --git a/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoDecoderFactoryTest.java b/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoDecoderFactoryTest.java new file mode 100644 index 0000000000..8a5d9788ee --- /dev/null +++ b/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoDecoderFactoryTest.java @@ -0,0 +1,62 @@ +/* + * Copyright 2022 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import static com.google.common.truth.Truth.assertThat; + +import androidx.annotation.Nullable; +import androidx.test.filters.SmallTest; +import java.util.HashMap; +import org.junit.Before; +import org.junit.Test; + +/** Unit tests for {@link SoftwareVideoDecoderFactory}. */ +public class SoftwareVideoDecoderFactoryTest { + @Before + public void setUp() { + NativeLibrary.initialize(new NativeLibrary.DefaultLoader(), TestConstants.NATIVE_LIBRARY); + } + + @SmallTest + @Test + public void getSupportedCodecs_returnsDefaultCodecs() { + VideoDecoderFactory factory = new SoftwareVideoDecoderFactory(); + VideoCodecInfo[] codecs = factory.getSupportedCodecs(); + assertThat(codecs.length).isEqualTo(6); + assertThat(codecs[0].name).isEqualTo("VP8"); + assertThat(codecs[1].name).isEqualTo("VP9"); + assertThat(codecs[2].name).isEqualTo("VP9"); + assertThat(codecs[3].name).isEqualTo("VP9"); + assertThat(codecs[4].name).isEqualTo("AV1"); + assertThat(codecs[5].name).isEqualTo("AV1"); + } + + @SmallTest + @Test + public void createDecoder_supportedCodec_returnsNotNull() { + VideoDecoderFactory factory = new SoftwareVideoDecoderFactory(); + VideoCodecInfo[] codecs = factory.getSupportedCodecs(); + assertThat(codecs.length).isGreaterThan(0); + for (VideoCodecInfo codec : codecs) { + VideoDecoder decoder = factory.createDecoder(codec); + assertThat(decoder).isNotNull(); + } + } + + @SmallTest + @Test + public void createDecoder_unsupportedCodec_returnsNull() { + VideoDecoderFactory factory = new SoftwareVideoDecoderFactory(); + VideoCodecInfo codec = new VideoCodecInfo("unsupported", new HashMap()); + VideoDecoder decoder = factory.createDecoder(codec); + assertThat(decoder).isNull(); + } +} diff --git a/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoEncoderFactoryTest.java b/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoEncoderFactoryTest.java new file mode 100644 index 0000000000..696b423cde --- /dev/null +++ b/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoEncoderFactoryTest.java @@ -0,0 +1,59 @@ +/* + * Copyright 2022 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import static com.google.common.truth.Truth.assertThat; + +import androidx.annotation.Nullable; +import androidx.test.filters.SmallTest; +import java.util.HashMap; +import org.junit.Before; +import org.junit.Test; + +/** Unit tests for {@link SoftwareVideoEncoderFactory}. */ +public class SoftwareVideoEncoderFactoryTest { + @Before + public void setUp() { + NativeLibrary.initialize(new NativeLibrary.DefaultLoader(), TestConstants.NATIVE_LIBRARY); + } + + @SmallTest + @Test + public void getSupportedCodecs_returnsDefaultCodecs() { + VideoEncoderFactory factory = new SoftwareVideoEncoderFactory(); + VideoCodecInfo[] codecs = factory.getSupportedCodecs(); + assertThat(codecs.length).isEqualTo(3); + assertThat(codecs[0].name).isEqualTo("VP8"); + assertThat(codecs[1].name).isEqualTo("AV1"); + assertThat(codecs[2].name).isEqualTo("VP9"); + } + + @SmallTest + @Test + public void createEncoder_supportedCodec_returnsNotNull() { + VideoEncoderFactory factory = new SoftwareVideoEncoderFactory(); + VideoCodecInfo[] codecs = factory.getSupportedCodecs(); + assertThat(codecs.length).isGreaterThan(0); + for (VideoCodecInfo codec : codecs) { + VideoEncoder encoder = factory.createEncoder(codec); + assertThat(encoder).isNotNull(); + } + } + + @SmallTest + @Test + public void createEncoder_unsupportedCodec_returnsNull() { + VideoEncoderFactory factory = new SoftwareVideoEncoderFactory(); + VideoCodecInfo codec = new VideoCodecInfo("unsupported", new HashMap()); + VideoEncoder encoder = factory.createEncoder(codec); + assertThat(encoder).isNull(); + } +} diff --git a/sdk/android/native_unittests/android_network_monitor_unittest.cc b/sdk/android/native_unittests/android_network_monitor_unittest.cc index 9aec62d630..f47e8ffb1a 100644 --- a/sdk/android/native_unittests/android_network_monitor_unittest.cc +++ b/sdk/android/native_unittests/android_network_monitor_unittest.cc @@ -25,6 +25,10 @@ static const uint32_t kTestIpv4Address = 0xC0A80011; // 192.168.0.17 static const char kTestIpv6Address1[] = "2a00:8a00:a000:1190:0000:0001:000:252"; static const char kTestIpv6Address2[] = "2a00:8a00:a000:1190:0000:0002:000:253"; +static const char kTestIfName1[] = "testlan0"; +static const char kTestIfName1V4[] = "v4-testlan0"; +static const char kTestIfName2[] = "testnet0"; + jni::NetworkInformation CreateNetworkInformation( const std::string& interface_name, jni::NetworkHandle network_handle, @@ -76,7 +80,7 @@ TEST_F(AndroidNetworkMonitorTest, TestFindNetworkHandleUsingIpv4Address) { jni::NetworkHandle ipv4_handle = 100; rtc::IPAddress ipv4_address(kTestIpv4Address); jni::NetworkInformation net_info = - CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address); + CreateNetworkInformation(kTestIfName1, ipv4_handle, ipv4_address); std::vector net_infos(1, net_info); network_monitor_->SetNetworkInfos(net_infos); @@ -93,9 +97,9 @@ TEST_F(AndroidNetworkMonitorTest, TestFindNetworkHandleUsingFullIpv6Address) { rtc::IPAddress ipv6_address2 = GetIpAddressFromIpv6String(kTestIpv6Address2); // Set up an IPv6 network. jni::NetworkInformation net_info = - CreateNetworkInformation("wlan0", ipv6_handle, ipv6_address1); + CreateNetworkInformation(kTestIfName1, ipv6_handle, ipv6_address1); std::vector net_infos(1, net_info); - network_monitor_->SetNetworkInfos(net_infos); + network_monitor_->OnNetworkConnected_n(net_info); auto network_handle1 = network_monitor_->FindNetworkHandleFromAddressOrName(ipv6_address1, ""); @@ -119,9 +123,9 @@ TEST_F(AndroidNetworkMonitorTest, rtc::IPAddress ipv6_address2 = GetIpAddressFromIpv6String(kTestIpv6Address2); // Set up an IPv6 network. jni::NetworkInformation net_info = - CreateNetworkInformation("wlan0", ipv6_handle, ipv6_address1); + CreateNetworkInformation(kTestIfName1, ipv6_handle, ipv6_address1); std::vector net_infos(1, net_info); - network_monitor_->SetNetworkInfos(net_infos); + network_monitor_->OnNetworkConnected_n(net_info); auto network_handle1 = network_monitor_->FindNetworkHandleFromAddressOrName(ipv6_address1, ""); @@ -142,9 +146,9 @@ TEST_F(AndroidNetworkMonitorTest, TestFindNetworkHandleUsingIfName) { // Set up an IPv6 network. jni::NetworkInformation net_info = - CreateNetworkInformation("wlan0", ipv6_handle, ipv6_address1); + CreateNetworkInformation(kTestIfName1, ipv6_handle, ipv6_address1); std::vector net_infos(1, net_info); - network_monitor_->SetNetworkInfos(net_infos); + network_monitor_->OnNetworkConnected_n(net_info); rtc::IPAddress ipv4_address(kTestIpv4Address); @@ -154,7 +158,7 @@ TEST_F(AndroidNetworkMonitorTest, TestFindNetworkHandleUsingIfName) { // Search using ip address AND if_name (for typical ipv4 over ipv6 tunnel). auto network_handle2 = network_monitor_->FindNetworkHandleFromAddressOrName( - ipv4_address, "v4-wlan0"); + ipv4_address, kTestIfName1V4); ASSERT_FALSE(network_handle1.has_value()); ASSERT_TRUE(network_handle2.has_value()); @@ -167,14 +171,14 @@ TEST_F(AndroidNetworkMonitorTest, TestUnderlyingVpnType) { jni::NetworkHandle ipv4_handle = 100; rtc::IPAddress ipv4_address(kTestIpv4Address); jni::NetworkInformation net_info = - CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address); + CreateNetworkInformation(kTestIfName1, ipv4_handle, ipv4_address); net_info.type = jni::NETWORK_VPN; net_info.underlying_type_for_vpn = jni::NETWORK_WIFI; - network_monitor_->SetNetworkInfos({net_info}); + network_monitor_->OnNetworkConnected_n(net_info); - EXPECT_EQ( - rtc::ADAPTER_TYPE_WIFI, - network_monitor_->GetInterfaceInfo("v4-wlan0").underlying_type_for_vpn); + EXPECT_EQ(rtc::ADAPTER_TYPE_WIFI, + network_monitor_->GetInterfaceInfo(kTestIfName1V4) + .underlying_type_for_vpn); } // Verify that Disconnect makes interface unavailable. @@ -184,25 +188,26 @@ TEST_F(AndroidNetworkMonitorTest, Disconnect) { jni::NetworkHandle ipv4_handle = 100; rtc::IPAddress ipv4_address(kTestIpv4Address); jni::NetworkInformation net_info = - CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address); + CreateNetworkInformation(kTestIfName1, ipv4_handle, ipv4_address); net_info.type = jni::NETWORK_WIFI; - network_monitor_->SetNetworkInfos({net_info}); + network_monitor_->OnNetworkConnected_n(net_info); - EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available); - EXPECT_TRUE(network_monitor_ - ->FindNetworkHandleFromAddressOrName(ipv4_address, "v4-wlan0") - .has_value()); - EXPECT_EQ(network_monitor_->GetInterfaceInfo("v4-wlan0").adapter_type, + EXPECT_TRUE(network_monitor_->GetInterfaceInfo(kTestIfName1).available); + EXPECT_TRUE( + network_monitor_ + ->FindNetworkHandleFromAddressOrName(ipv4_address, kTestIfName1V4) + .has_value()); + EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1V4).adapter_type, rtc::ADAPTER_TYPE_WIFI); // Check that values are reset on disconnect(). Disconnect(ipv4_handle); - EXPECT_FALSE(network_monitor_->GetInterfaceInfo("wlan0").available); + EXPECT_FALSE(network_monitor_->GetInterfaceInfo(kTestIfName1).available); EXPECT_FALSE( network_monitor_ - ->FindNetworkHandleFromAddressOrName(ipv4_address, "v4-wlan0") + ->FindNetworkHandleFromAddressOrName(ipv4_address, kTestIfName1V4) .has_value()); - EXPECT_EQ(network_monitor_->GetInterfaceInfo("v4-wlan0").adapter_type, + EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1V4).adapter_type, rtc::ADAPTER_TYPE_UNKNOWN); } @@ -213,25 +218,26 @@ TEST_F(AndroidNetworkMonitorTest, Reset) { jni::NetworkHandle ipv4_handle = 100; rtc::IPAddress ipv4_address(kTestIpv4Address); jni::NetworkInformation net_info = - CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address); + CreateNetworkInformation(kTestIfName1, ipv4_handle, ipv4_address); net_info.type = jni::NETWORK_WIFI; - network_monitor_->SetNetworkInfos({net_info}); + network_monitor_->OnNetworkConnected_n(net_info); - EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available); - EXPECT_TRUE(network_monitor_ - ->FindNetworkHandleFromAddressOrName(ipv4_address, "v4-wlan0") - .has_value()); - EXPECT_EQ(network_monitor_->GetInterfaceInfo("v4-wlan0").adapter_type, + EXPECT_TRUE(network_monitor_->GetInterfaceInfo(kTestIfName1).available); + EXPECT_TRUE( + network_monitor_ + ->FindNetworkHandleFromAddressOrName(ipv4_address, kTestIfName1V4) + .has_value()); + EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1V4).adapter_type, rtc::ADAPTER_TYPE_WIFI); // Check that values are reset on Stop(). network_monitor_->Stop(); - EXPECT_FALSE(network_monitor_->GetInterfaceInfo("wlan0").available); + EXPECT_FALSE(network_monitor_->GetInterfaceInfo(kTestIfName1).available); EXPECT_FALSE( network_monitor_ - ->FindNetworkHandleFromAddressOrName(ipv4_address, "v4-wlan0") + ->FindNetworkHandleFromAddressOrName(ipv4_address, kTestIfName1V4) .has_value()); - EXPECT_EQ(network_monitor_->GetInterfaceInfo("v4-wlan0").adapter_type, + EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1V4).adapter_type, rtc::ADAPTER_TYPE_UNKNOWN); } @@ -241,20 +247,21 @@ TEST_F(AndroidNetworkMonitorTest, DuplicateIfname) { jni::NetworkHandle ipv4_handle = 100; rtc::IPAddress ipv4_address(kTestIpv4Address); jni::NetworkInformation net_info1 = - CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address); + CreateNetworkInformation(kTestIfName1, ipv4_handle, ipv4_address); net_info1.type = jni::NETWORK_WIFI; jni::NetworkHandle ipv6_handle = 101; rtc::IPAddress ipv6_address = GetIpAddressFromIpv6String(kTestIpv6Address1); jni::NetworkInformation net_info2 = - CreateNetworkInformation("wlan0", ipv6_handle, ipv6_address); + CreateNetworkInformation(kTestIfName1, ipv6_handle, ipv6_address); net_info2.type = jni::NETWORK_UNKNOWN_CELLULAR; - network_monitor_->SetNetworkInfos({net_info1, net_info2}); + network_monitor_->OnNetworkConnected_n(net_info1); + network_monitor_->OnNetworkConnected_n(net_info2); // The last added. - EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available); - EXPECT_EQ(network_monitor_->GetInterfaceInfo("v-wlan0").adapter_type, + EXPECT_TRUE(network_monitor_->GetInterfaceInfo(kTestIfName1).available); + EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1V4).adapter_type, rtc::ADAPTER_TYPE_CELLULAR); // But both IP addresses are still searchable. @@ -272,27 +279,28 @@ TEST_F(AndroidNetworkMonitorTest, DuplicateIfnameDisconnectOwner) { jni::NetworkHandle ipv4_handle = 100; rtc::IPAddress ipv4_address(kTestIpv4Address); jni::NetworkInformation net_info1 = - CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address); + CreateNetworkInformation(kTestIfName1, ipv4_handle, ipv4_address); net_info1.type = jni::NETWORK_WIFI; jni::NetworkHandle ipv6_handle = 101; rtc::IPAddress ipv6_address = GetIpAddressFromIpv6String(kTestIpv6Address1); jni::NetworkInformation net_info2 = - CreateNetworkInformation("wlan0", ipv6_handle, ipv6_address); + CreateNetworkInformation(kTestIfName1, ipv6_handle, ipv6_address); net_info2.type = jni::NETWORK_UNKNOWN_CELLULAR; - network_monitor_->SetNetworkInfos({net_info1, net_info2}); + network_monitor_->OnNetworkConnected_n(net_info1); + network_monitor_->OnNetworkConnected_n(net_info2); // The last added. - EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available); - EXPECT_EQ(network_monitor_->GetInterfaceInfo("v-wlan0").adapter_type, + EXPECT_TRUE(network_monitor_->GetInterfaceInfo(kTestIfName1).available); + EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1V4).adapter_type, rtc::ADAPTER_TYPE_CELLULAR); Disconnect(ipv6_handle); // We should now find ipv4_handle. - EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available); - EXPECT_EQ(network_monitor_->GetInterfaceInfo("v-wlan0").adapter_type, + EXPECT_TRUE(network_monitor_->GetInterfaceInfo(kTestIfName1).available); + EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1V4).adapter_type, rtc::ADAPTER_TYPE_WIFI); } @@ -302,29 +310,67 @@ TEST_F(AndroidNetworkMonitorTest, DuplicateIfnameDisconnectNonOwner) { jni::NetworkHandle ipv4_handle = 100; rtc::IPAddress ipv4_address(kTestIpv4Address); jni::NetworkInformation net_info1 = - CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address); + CreateNetworkInformation(kTestIfName1, ipv4_handle, ipv4_address); net_info1.type = jni::NETWORK_WIFI; jni::NetworkHandle ipv6_handle = 101; rtc::IPAddress ipv6_address = GetIpAddressFromIpv6String(kTestIpv6Address1); jni::NetworkInformation net_info2 = - CreateNetworkInformation("wlan0", ipv6_handle, ipv6_address); + CreateNetworkInformation(kTestIfName1, ipv6_handle, ipv6_address); net_info2.type = jni::NETWORK_UNKNOWN_CELLULAR; - network_monitor_->SetNetworkInfos({net_info1, net_info2}); + network_monitor_->OnNetworkConnected_n(net_info1); + network_monitor_->OnNetworkConnected_n(net_info2); // The last added. - EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available); - EXPECT_EQ(network_monitor_->GetInterfaceInfo("wlan0").adapter_type, + EXPECT_TRUE(network_monitor_->GetInterfaceInfo(kTestIfName1).available); + EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1).adapter_type, rtc::ADAPTER_TYPE_CELLULAR); Disconnect(ipv4_handle); // We should still find ipv6 network. - EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available); - EXPECT_EQ(network_monitor_->GetInterfaceInfo("v-wlan0").adapter_type, + EXPECT_TRUE(network_monitor_->GetInterfaceInfo(kTestIfName1).available); + EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1V4).adapter_type, rtc::ADAPTER_TYPE_CELLULAR); } +TEST_F(AndroidNetworkMonitorTest, ReconnectWithoutDisconnect) { + network_monitor_->Start(); + + jni::NetworkHandle ipv4_handle = 100; + rtc::IPAddress ipv4_address(kTestIpv4Address); + jni::NetworkInformation net_info1 = + CreateNetworkInformation(kTestIfName1, ipv4_handle, ipv4_address); + net_info1.type = jni::NETWORK_WIFI; + + rtc::IPAddress ipv6_address = GetIpAddressFromIpv6String(kTestIpv6Address1); + jni::NetworkInformation net_info2 = + CreateNetworkInformation(kTestIfName2, ipv4_handle, ipv6_address); + net_info2.type = jni::NETWORK_UNKNOWN_CELLULAR; + + network_monitor_->OnNetworkConnected_n(net_info1); + network_monitor_->OnNetworkConnected_n(net_info2); + + // Only last one should still be there! + EXPECT_TRUE(network_monitor_->GetInterfaceInfo(kTestIfName2).available); + EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName2).adapter_type, + rtc::ADAPTER_TYPE_CELLULAR); + + EXPECT_FALSE(network_monitor_->GetInterfaceInfo(kTestIfName1).available); + EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1).adapter_type, + rtc::ADAPTER_TYPE_UNKNOWN); + + Disconnect(ipv4_handle); + + // Should be empty! + EXPECT_FALSE(network_monitor_->GetInterfaceInfo(kTestIfName2).available); + EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName2).adapter_type, + rtc::ADAPTER_TYPE_UNKNOWN); + EXPECT_FALSE(network_monitor_->GetInterfaceInfo(kTestIfName1).available); + EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1).adapter_type, + rtc::ADAPTER_TYPE_UNKNOWN); +} + } // namespace test } // namespace webrtc diff --git a/sdk/android/src/jni/android_network_monitor.cc b/sdk/android/src/jni/android_network_monitor.cc index bb2e0d22fb..1d9101d27c 100644 --- a/sdk/android/src/jni/android_network_monitor.cc +++ b/sdk/android/src/jni/android_network_monitor.cc @@ -416,6 +416,25 @@ void AndroidNetworkMonitor::OnNetworkConnected_n( const NetworkInformation& network_info) { RTC_DCHECK_RUN_ON(network_thread_); RTC_LOG(LS_INFO) << "Network connected: " << network_info.ToString(); + + // We speculate that OnNetworkConnected_n can be called with the same handle + // and different if_names. Handle this as if the network was first + // disconnected. + auto iter = network_info_by_handle_.find(network_info.handle); + if (iter != network_info_by_handle_.end()) { + // Remove old if_name for this handle if they don't match. + if (network_info.interface_name != iter->second.interface_name) { + RTC_LOG(LS_INFO) << "Network" + << " handle " << network_info.handle + << " change if_name from: " + << iter->second.interface_name + << " to: " << network_info.interface_name; + RTC_DCHECK(network_handle_by_if_name_[iter->second.interface_name] == + network_info.handle); + network_handle_by_if_name_.erase(iter->second.interface_name); + } + } + network_info_by_handle_[network_info.handle] = network_info; for (const rtc::IPAddress& address : network_info.ip_addresses) { network_handle_by_address_[address] = network_info.handle; @@ -431,7 +450,6 @@ AndroidNetworkMonitor::FindNetworkHandleFromAddressOrName( const rtc::IPAddress& ip_address, absl::string_view if_name) const { RTC_DCHECK_RUN_ON(network_thread_); - RTC_LOG(LS_INFO) << "Find network handle."; if (find_network_handle_without_ipv6_temporary_part_) { for (auto const& iter : network_info_by_handle_) { const std::vector& addresses = iter.second.ip_addresses; @@ -484,12 +502,13 @@ void AndroidNetworkMonitor::OnNetworkDisconnected_n(NetworkHandle handle) { return; } - for (const rtc::IPAddress& address : iter->second.ip_addresses) { + const auto& network_info = iter->second; + for (const rtc::IPAddress& address : network_info.ip_addresses) { network_handle_by_address_.erase(address); } // We've discovered that the if_name is not always unique, - // i.e it can be several network conencted with same if_name. + // i.e it can be several network connected with same if_name. // // This is handled the following way, // 1) OnNetworkConnected_n overwrites any previous "owner" of an interface @@ -501,7 +520,7 @@ void AndroidNetworkMonitor::OnNetworkDisconnected_n(NetworkHandle handle) { // network_handle_by_if_name_. // Check if we are registered as "owner" of if_name. - const auto& if_name = iter->second.interface_name; + const auto& if_name = network_info.interface_name; auto iter2 = network_handle_by_if_name_.find(if_name); RTC_DCHECK(iter2 != network_handle_by_if_name_.end()); if (iter2 != network_handle_by_if_name_.end() && iter2->second == handle) { diff --git a/sdk/android/src/jni/android_network_monitor.h b/sdk/android/src/jni/android_network_monitor.h index d0aad5ea76..f94650be1f 100644 --- a/sdk/android/src/jni/android_network_monitor.h +++ b/sdk/android/src/jni/android_network_monitor.h @@ -114,6 +114,9 @@ class AndroidNetworkMonitor : public rtc::NetworkMonitorInterface { const JavaRef& j_connection_type, jint preference); + // Visible for testing. + void OnNetworkConnected_n(const NetworkInformation& network_info); + // Visible for testing. absl::optional FindNetworkHandleFromAddressOrName( const rtc::IPAddress& address, @@ -121,7 +124,6 @@ class AndroidNetworkMonitor : public rtc::NetworkMonitorInterface { private: void reset(); - void OnNetworkConnected_n(const NetworkInformation& network_info); void OnNetworkDisconnected_n(NetworkHandle network_handle); void OnNetworkPreference_n(NetworkType type, rtc::NetworkPreference preference); diff --git a/sdk/android/src/jni/android_video_track_source.cc b/sdk/android/src/jni/android_video_track_source.cc index 4f3152dc6f..d553af2445 100644 --- a/sdk/android/src/jni/android_video_track_source.cc +++ b/sdk/android/src/jni/android_video_track_source.cc @@ -65,8 +65,6 @@ void AndroidVideoTrackSource::SetState(JNIEnv* env, if (rtc::Thread::Current() == signaling_thread_) { FireOnChanged(); } else { - // TODO(sakal): Is this even necessary, does FireOnChanged have to be - // called from signaling thread? signaling_thread_->PostTask([this] { FireOnChanged(); }); } } diff --git a/sdk/android/src/jni/pc/peer_connection.cc b/sdk/android/src/jni/pc/peer_connection.cc index 03321a53c1..9983ae7df2 100644 --- a/sdk/android/src/jni/pc/peer_connection.cc +++ b/sdk/android/src/jni/pc/peer_connection.cc @@ -252,8 +252,6 @@ void JavaToNativeRTCConfiguration( rtc_config->turn_customizer = GetNativeTurnCustomizer(jni, j_turn_customizer); - rtc_config->disable_ipv6 = - Java_RTCConfiguration_getDisableIpv6(jni, j_rtc_config); rtc_config->media_config.enable_dscp = Java_RTCConfiguration_getEnableDscp(jni, j_rtc_config); rtc_config->media_config.video.enable_cpu_adaptation = @@ -842,6 +840,32 @@ static void JNI_PeerConnection_NewGetStats( ExtractNativePC(jni, j_pc)->GetStats(callback.get()); } +static void JNI_PeerConnection_NewGetStatsSender( + JNIEnv* jni, + const JavaParamRef& j_pc, + jlong native_sender, + const JavaParamRef& j_callback) { + auto callback = + rtc::make_ref_counted(jni, j_callback); + ExtractNativePC(jni, j_pc)->GetStats( + rtc::scoped_refptr( + reinterpret_cast(native_sender)), + rtc::scoped_refptr(callback.get())); +} + +static void JNI_PeerConnection_NewGetStatsReceiver( + JNIEnv* jni, + const JavaParamRef& j_pc, + jlong native_receiver, + const JavaParamRef& j_callback) { + auto callback = + rtc::make_ref_counted(jni, j_callback); + ExtractNativePC(jni, j_pc)->GetStats( + rtc::scoped_refptr( + reinterpret_cast(native_receiver)), + rtc::scoped_refptr(callback.get())); +} + static jboolean JNI_PeerConnection_SetBitrate( JNIEnv* jni, const JavaParamRef& j_pc, diff --git a/sdk/android/src/jni/pc/rtp_sender.cc b/sdk/android/src/jni/pc/rtp_sender.cc index 233a353654..fc83862958 100644 --- a/sdk/android/src/jni/pc/rtp_sender.cc +++ b/sdk/android/src/jni/pc/rtp_sender.cc @@ -110,5 +110,15 @@ static void JNI_RtpSender_SetFrameEncryptor(JNIEnv* jni, j_frame_encryptor_pointer))); } +static ScopedJavaLocalRef JNI_RtpSender_GetMediaType( + JNIEnv* jni, + jlong j_rtp_sender_pointer) { + cricket::MediaType media_type = + reinterpret_cast(j_rtp_sender_pointer)->media_type(); + return media_type == cricket::MEDIA_TYPE_AUDIO + ? NativeToJavaString(jni, "audio") + : NativeToJavaString(jni, "video"); +} + } // namespace jni } // namespace webrtc diff --git a/sdk/android/src/jni/software_video_decoder_factory.cc b/sdk/android/src/jni/software_video_decoder_factory.cc new file mode 100644 index 0000000000..151bf5f205 --- /dev/null +++ b/sdk/android/src/jni/software_video_decoder_factory.cc @@ -0,0 +1,53 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video_codecs/builtin_video_decoder_factory.h" +#include "api/video_codecs/video_decoder.h" +#include "sdk/android/generated_swcodecs_jni/SoftwareVideoDecoderFactory_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/video_codec_info.h" + +namespace webrtc { +namespace jni { + +static jlong JNI_SoftwareVideoDecoderFactory_CreateFactory(JNIEnv* env) { + return webrtc::NativeToJavaPointer( + CreateBuiltinVideoDecoderFactory().release()); +} + +static jlong JNI_SoftwareVideoDecoderFactory_CreateDecoder( + JNIEnv* env, + jlong j_factory, + const webrtc::JavaParamRef& j_video_codec_info) { + auto* const native_factory = + reinterpret_cast(j_factory); + const auto video_format = + webrtc::jni::VideoCodecInfoToSdpVideoFormat(env, j_video_codec_info); + + auto decoder = native_factory->CreateVideoDecoder(video_format); + if (decoder == nullptr) { + return 0; + } + return webrtc::NativeToJavaPointer(decoder.release()); +} + +static webrtc::ScopedJavaLocalRef +JNI_SoftwareVideoDecoderFactory_GetSupportedCodecs(JNIEnv* env, + jlong j_factory) { + auto* const native_factory = + reinterpret_cast(j_factory); + + return webrtc::NativeToJavaList(env, native_factory->GetSupportedFormats(), + &webrtc::jni::SdpVideoFormatToVideoCodecInfo); +} + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/software_video_encoder_factory.cc b/sdk/android/src/jni/software_video_encoder_factory.cc new file mode 100644 index 0000000000..4b86960198 --- /dev/null +++ b/sdk/android/src/jni/software_video_encoder_factory.cc @@ -0,0 +1,53 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video_codecs/builtin_video_encoder_factory.h" +#include "api/video_codecs/video_encoder.h" +#include "sdk/android/generated_swcodecs_jni/SoftwareVideoEncoderFactory_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/video_codec_info.h" + +namespace webrtc { +namespace jni { + +static jlong JNI_SoftwareVideoEncoderFactory_CreateFactory(JNIEnv* env) { + return webrtc::NativeToJavaPointer( + CreateBuiltinVideoEncoderFactory().release()); +} + +static jlong JNI_SoftwareVideoEncoderFactory_CreateEncoder( + JNIEnv* env, + jlong j_factory, + const webrtc::JavaParamRef& j_video_codec_info) { + auto* const native_factory = + reinterpret_cast(j_factory); + const auto video_format = + webrtc::jni::VideoCodecInfoToSdpVideoFormat(env, j_video_codec_info); + + auto encoder = native_factory->CreateVideoEncoder(video_format); + if (encoder == nullptr) { + return 0; + } + return webrtc::NativeToJavaPointer(encoder.release()); +} + +static webrtc::ScopedJavaLocalRef +JNI_SoftwareVideoEncoderFactory_GetSupportedCodecs(JNIEnv* env, + jlong j_factory) { + auto* const native_factory = + reinterpret_cast(j_factory); + + return webrtc::NativeToJavaList(env, native_factory->GetSupportedFormats(), + &webrtc::jni::SdpVideoFormatToVideoCodecInfo); +} + +} // namespace jni +} // namespace webrtc diff --git a/sdk/media_constraints.cc b/sdk/media_constraints.cc index c77bf88929..bbb46edaae 100644 --- a/sdk/media_constraints.cc +++ b/sdk/media_constraints.cc @@ -115,7 +115,6 @@ const char MediaConstraints::kUseRtpMux[] = "googUseRtpMUX"; // Below constraints should be used during PeerConnection construction. // Google-specific constraint keys. const char MediaConstraints::kEnableDscp[] = "googDscp"; -const char MediaConstraints::kEnableIPv6[] = "googIPv6"; const char MediaConstraints::kEnableVideoSuspendBelowMinBitrate[] = "googSuspendBelowMinBitrate"; const char MediaConstraints::kCombinedAudioVideoBwe[] = @@ -151,11 +150,6 @@ void CopyConstraintsIntoRtcConfiguration( return; } - bool enable_ipv6; - if (FindConstraint(constraints, MediaConstraints::kEnableIPv6, &enable_ipv6, - nullptr)) { - configuration->disable_ipv6 = !enable_ipv6; - } FindConstraint(constraints, MediaConstraints::kEnableDscp, &configuration->media_config.enable_dscp, nullptr); FindConstraint(constraints, MediaConstraints::kCpuOveruseDetection, diff --git a/sdk/media_constraints_unittest.cc b/sdk/media_constraints_unittest.cc index 478ba98a2d..2d25da03e7 100644 --- a/sdk/media_constraints_unittest.cc +++ b/sdk/media_constraints_unittest.cc @@ -20,8 +20,7 @@ namespace { // plus audio_jitter_buffer_max_packets. bool Matches(const PeerConnectionInterface::RTCConfiguration& a, const PeerConnectionInterface::RTCConfiguration& b) { - return a.disable_ipv6 == b.disable_ipv6 && - a.audio_jitter_buffer_max_packets == + return a.audio_jitter_buffer_max_packets == b.audio_jitter_buffer_max_packets && a.screencast_min_bitrate == b.screencast_min_bitrate && a.combined_audio_video_bwe == b.combined_audio_video_bwe && @@ -36,18 +35,6 @@ TEST(MediaConstraints, CopyConstraintsIntoRtcConfiguration) { CopyConstraintsIntoRtcConfiguration(&constraints_empty, &configuration); EXPECT_TRUE(Matches(old_configuration, configuration)); - const MediaConstraints constraits_enable_ipv6( - {MediaConstraints::Constraint(MediaConstraints::kEnableIPv6, "true")}, - {}); - CopyConstraintsIntoRtcConfiguration(&constraits_enable_ipv6, &configuration); - EXPECT_FALSE(configuration.disable_ipv6); - const MediaConstraints constraints_disable_ipv6( - {MediaConstraints::Constraint(MediaConstraints::kEnableIPv6, "false")}, - {}); - CopyConstraintsIntoRtcConfiguration(&constraints_disable_ipv6, - &configuration); - EXPECT_TRUE(configuration.disable_ipv6); - const MediaConstraints constraints_screencast( {MediaConstraints::Constraint(MediaConstraints::kScreencastMinBitrate, "27")}, diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.h b/sdk/objc/api/peerconnection/RTCConfiguration.h index 345bf179bc..1b0d14baf1 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration.h +++ b/sdk/objc/api/peerconnection/RTCConfiguration.h @@ -97,11 +97,6 @@ RTC_OBJC_EXPORT @property(nonatomic, assign) RTCCandidateNetworkPolicy candidateNetworkPolicy; @property(nonatomic, assign) RTCContinualGatheringPolicy continualGatheringPolicy; -/** If set to YES, don't gather IPv6 ICE candidates. - * Default is NO. - */ -@property(nonatomic, assign) BOOL disableIPV6; - /** If set to YES, don't gather IPv6 ICE candidates on Wi-Fi. * Only intended to be used on specific devices. Certain phones disable IPv6 * when the screen is turned off and it would be better to just disable the diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.mm b/sdk/objc/api/peerconnection/RTCConfiguration.mm index 5139cc27bf..4331718348 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration.mm +++ b/sdk/objc/api/peerconnection/RTCConfiguration.mm @@ -32,7 +32,6 @@ @synthesize tcpCandidatePolicy = _tcpCandidatePolicy; @synthesize candidateNetworkPolicy = _candidateNetworkPolicy; @synthesize continualGatheringPolicy = _continualGatheringPolicy; -@synthesize disableIPV6 = _disableIPV6; @synthesize disableIPV6OnWiFi = _disableIPV6OnWiFi; @synthesize maxIPv6Networks = _maxIPv6Networks; @synthesize disableLinkLocalNetworks = _disableLinkLocalNetworks; @@ -104,9 +103,7 @@ candidateNetworkPolicyForNativePolicy:config.candidate_network_policy]; webrtc::PeerConnectionInterface::ContinualGatheringPolicy nativePolicy = config.continual_gathering_policy; - _continualGatheringPolicy = - [[self class] continualGatheringPolicyForNativePolicy:nativePolicy]; - _disableIPV6 = config.disable_ipv6; + _continualGatheringPolicy = [[self class] continualGatheringPolicyForNativePolicy:nativePolicy]; _disableIPV6OnWiFi = config.disable_ipv6_on_wifi; _maxIPv6Networks = config.max_ipv6_networks; _disableLinkLocalNetworks = config.disable_link_local_networks; @@ -191,7 +188,6 @@ _shouldSurfaceIceCandidatesOnIceTransportTypeChanged, _iceCheckMinInterval, _disableLinkLocalNetworks, - _disableIPV6, _disableIPV6OnWiFi, _maxIPv6Networks, _activeResetSrtpParams, @@ -221,9 +217,8 @@ [[self class] nativeTcpCandidatePolicyForPolicy:_tcpCandidatePolicy]; nativeConfig->candidate_network_policy = [[self class] nativeCandidateNetworkPolicyForPolicy:_candidateNetworkPolicy]; - nativeConfig->continual_gathering_policy = [[self class] - nativeContinualGatheringPolicyForPolicy:_continualGatheringPolicy]; - nativeConfig->disable_ipv6 = _disableIPV6; + nativeConfig->continual_gathering_policy = + [[self class] nativeContinualGatheringPolicyForPolicy:_continualGatheringPolicy]; nativeConfig->disable_ipv6_on_wifi = _disableIPV6OnWiFi; nativeConfig->max_ipv6_networks = _maxIPv6Networks; nativeConfig->disable_link_local_networks = _disableLinkLocalNetworks; diff --git a/sdk/objc/api/peerconnection/RTCFieldTrials.h b/sdk/objc/api/peerconnection/RTCFieldTrials.h index 1f290d8a66..3e8fcc8075 100644 --- a/sdk/objc/api/peerconnection/RTCFieldTrials.h +++ b/sdk/objc/api/peerconnection/RTCFieldTrials.h @@ -13,9 +13,7 @@ #import "RTCMacros.h" /** The only valid value for the following if set is kRTCFieldTrialEnabledValue. */ -RTC_EXTERN NSString * const kRTCFieldTrialAudioForceNoTWCCKey; -RTC_EXTERN NSString * const kRTCFieldTrialAudioForceABWENoTWCCKey; -RTC_EXTERN NSString * const kRTCFieldTrialSendSideBweWithOverheadKey; +RTC_EXTERN NSString *const kRTCFieldTrialAudioForceABWENoTWCCKey; RTC_EXTERN NSString * const kRTCFieldTrialFlexFec03AdvertisedKey; RTC_EXTERN NSString * const kRTCFieldTrialFlexFec03Key; RTC_EXTERN NSString * const kRTCFieldTrialH264HighProfileKey; diff --git a/sdk/objc/api/peerconnection/RTCFieldTrials.mm b/sdk/objc/api/peerconnection/RTCFieldTrials.mm index 852aeeec84..193da9e4f7 100644 --- a/sdk/objc/api/peerconnection/RTCFieldTrials.mm +++ b/sdk/objc/api/peerconnection/RTCFieldTrials.mm @@ -16,9 +16,7 @@ #include "system_wrappers/include/field_trial.h" -NSString * const kRTCFieldTrialAudioForceNoTWCCKey = @"WebRTC-Audio-ForceNoTWCC"; -NSString * const kRTCFieldTrialAudioForceABWENoTWCCKey = @"WebRTC-Audio-ABWENoTWCC"; -NSString * const kRTCFieldTrialSendSideBweWithOverheadKey = @"WebRTC-SendSideBwe-WithOverhead"; +NSString *const kRTCFieldTrialAudioForceABWENoTWCCKey = @"WebRTC-Audio-ABWENoTWCC"; NSString * const kRTCFieldTrialFlexFec03AdvertisedKey = @"WebRTC-FlexFEC-03-Advertised"; NSString * const kRTCFieldTrialFlexFec03Key = @"WebRTC-FlexFEC-03"; NSString * const kRTCFieldTrialH264HighProfileKey = @"WebRTC-H264HighProfile"; diff --git a/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h b/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h index faa7962821..7374b2b72f 100644 --- a/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h +++ b/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h @@ -10,7 +10,7 @@ #import "RTCLegacyStatsReport.h" -#include "api/stats_types.h" +#include "api/legacy_stats_types.h" NS_ASSUME_NONNULL_BEGIN diff --git a/stats/rtc_stats.cc b/stats/rtc_stats.cc index 375e1f75c0..ae352fa170 100644 --- a/stats/rtc_stats.cc +++ b/stats/rtc_stats.cc @@ -187,12 +187,12 @@ RTCStats::MembersOfThisObjectAndAncestors(size_t additional_capacity) const { } \ template <> \ std::string RTCStatsMember::ValueToString() const { \ - RTC_DCHECK(is_defined_); \ + RTC_DCHECK(value_.has_value()); \ return to_str; \ } \ template <> \ std::string RTCStatsMember::ValueToJson() const { \ - RTC_DCHECK(is_defined_); \ + RTC_DCHECK(value_.has_value()); \ return to_json; \ } \ template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) RTCStatsMember @@ -201,139 +201,144 @@ WEBRTC_DEFINE_RTCSTATSMEMBER(bool, kBool, false, false, - rtc::ToString(value_), - rtc::ToString(value_)); + rtc::ToString(*value_), + rtc::ToString(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(int32_t, kInt32, false, false, - rtc::ToString(value_), - rtc::ToString(value_)); + rtc::ToString(*value_), + rtc::ToString(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(uint32_t, kUint32, false, false, - rtc::ToString(value_), - rtc::ToString(value_)); + rtc::ToString(*value_), + rtc::ToString(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(int64_t, kInt64, false, false, - rtc::ToString(value_), - ToStringAsDouble(value_)); + rtc::ToString(*value_), + ToStringAsDouble(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(uint64_t, kUint64, false, false, - rtc::ToString(value_), - ToStringAsDouble(value_)); + rtc::ToString(*value_), + ToStringAsDouble(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(double, kDouble, false, false, - rtc::ToString(value_), - ToStringAsDouble(value_)); -WEBRTC_DEFINE_RTCSTATSMEMBER(std::string, kString, false, true, value_, value_); + rtc::ToString(*value_), + ToStringAsDouble(*value_)); +WEBRTC_DEFINE_RTCSTATSMEMBER(std::string, + kString, + false, + true, + *value_, + *value_); WEBRTC_DEFINE_RTCSTATSMEMBER(std::vector, kSequenceBool, true, false, - VectorToString(value_), - VectorToString(value_)); + VectorToString(*value_), + VectorToString(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(std::vector, kSequenceInt32, true, false, - VectorToString(value_), - VectorToString(value_)); + VectorToString(*value_), + VectorToString(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(std::vector, kSequenceUint32, true, false, - VectorToString(value_), - VectorToString(value_)); + VectorToString(*value_), + VectorToString(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(std::vector, kSequenceInt64, true, false, - VectorToString(value_), - VectorToStringAsDouble(value_)); + VectorToString(*value_), + VectorToStringAsDouble(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(std::vector, kSequenceUint64, true, false, - VectorToString(value_), - VectorToStringAsDouble(value_)); + VectorToString(*value_), + VectorToStringAsDouble(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(std::vector, kSequenceDouble, true, false, - VectorToString(value_), - VectorToStringAsDouble(value_)); + VectorToString(*value_), + VectorToStringAsDouble(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(std::vector, kSequenceString, true, false, - VectorOfStringsToString(value_), - VectorOfStringsToString(value_)); + VectorOfStringsToString(*value_), + VectorOfStringsToString(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(rtc_stats_internal::MapStringUint64, kMapStringUint64, false, false, - MapToString(value_), - MapToStringAsDouble(value_)); + MapToString(*value_), + MapToStringAsDouble(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(rtc_stats_internal::MapStringDouble, kMapStringDouble, false, false, - MapToString(value_), - MapToStringAsDouble(value_)); + MapToString(*value_), + MapToStringAsDouble(*value_)); // Restricted members that expose hardware capabilites. -template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) RTCRestrictedStatsMember; -template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) RTCRestrictedStatsMember; -template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) RTCRestrictedStatsMember; -template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) RTCRestrictedStatsMember; -template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) RTCRestrictedStatsMember; -template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) RTCRestrictedStatsMember; -template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) RTCRestrictedStatsMember; -template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) RTCRestrictedStatsMember, StatExposureCriteria::kHardwareCapability>; -template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) RTCRestrictedStatsMember, StatExposureCriteria::kHardwareCapability>; -template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) RTCRestrictedStatsMember, StatExposureCriteria::kHardwareCapability>; -template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) RTCRestrictedStatsMember, StatExposureCriteria::kHardwareCapability>; -template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) RTCRestrictedStatsMember, StatExposureCriteria::kHardwareCapability>; -template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) RTCRestrictedStatsMember, StatExposureCriteria::kHardwareCapability>; -template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) RTCRestrictedStatsMember, StatExposureCriteria::kHardwareCapability>; -template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) RTCRestrictedStatsMember, StatExposureCriteria::kHardwareCapability>; -template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) RTCRestrictedStatsMember, StatExposureCriteria::kHardwareCapability>; diff --git a/stats/rtcstats_objects.cc b/stats/rtcstats_objects.cc index c57df4e672..de8f65da7d 100644 --- a/stats/rtcstats_objects.cc +++ b/stats/rtcstats_objects.cc @@ -185,7 +185,9 @@ WEBRTC_RTCSTATS_IMPL(RTCIceCandidatePairStats, RTCStats, "candidate-pair", &responses_sent, &consent_requests_sent, &packets_discarded_on_send, - &bytes_discarded_on_send) + &bytes_discarded_on_send, + &last_packet_received_timestamp, + &last_packet_sent_timestamp) // clang-format on RTCIceCandidatePairStats::RTCIceCandidatePairStats(const std::string& id, @@ -216,7 +218,9 @@ RTCIceCandidatePairStats::RTCIceCandidatePairStats(std::string&& id, responses_sent("responsesSent"), consent_requests_sent("consentRequestsSent"), packets_discarded_on_send("packetsDiscardedOnSend"), - bytes_discarded_on_send("bytesDiscardedOnSend") {} + bytes_discarded_on_send("bytesDiscardedOnSend"), + last_packet_received_timestamp("lastPacketReceivedTimestamp"), + last_packet_sent_timestamp("lastPacketSentTimestamp") {} RTCIceCandidatePairStats::RTCIceCandidatePairStats( const RTCIceCandidatePairStats& other) = default; @@ -315,27 +319,30 @@ const char* RTCRemoteIceCandidateStats::type() const { } // clang-format off -WEBRTC_RTCSTATS_IMPL(RTCMediaStreamStats, RTCStats, "stream", +WEBRTC_RTCSTATS_IMPL(DEPRECATED_RTCMediaStreamStats, RTCStats, "stream", &stream_identifier, &track_ids) // clang-format on -RTCMediaStreamStats::RTCMediaStreamStats(const std::string& id, - int64_t timestamp_us) - : RTCMediaStreamStats(std::string(id), timestamp_us) {} +DEPRECATED_RTCMediaStreamStats::DEPRECATED_RTCMediaStreamStats( + const std::string& id, + int64_t timestamp_us) + : DEPRECATED_RTCMediaStreamStats(std::string(id), timestamp_us) {} -RTCMediaStreamStats::RTCMediaStreamStats(std::string&& id, int64_t timestamp_us) +DEPRECATED_RTCMediaStreamStats::DEPRECATED_RTCMediaStreamStats( + std::string&& id, + int64_t timestamp_us) : RTCStats(std::move(id), timestamp_us), stream_identifier("streamIdentifier"), track_ids("trackIds") {} -RTCMediaStreamStats::RTCMediaStreamStats(const RTCMediaStreamStats& other) = - default; +DEPRECATED_RTCMediaStreamStats::DEPRECATED_RTCMediaStreamStats( + const DEPRECATED_RTCMediaStreamStats& other) = default; -RTCMediaStreamStats::~RTCMediaStreamStats() {} +DEPRECATED_RTCMediaStreamStats::~DEPRECATED_RTCMediaStreamStats() {} // clang-format off -WEBRTC_RTCSTATS_IMPL(RTCMediaStreamTrackStats, RTCStats, "track", +WEBRTC_RTCSTATS_IMPL(DEPRECATED_RTCMediaStreamTrackStats, RTCStats, "track", &track_identifier, &media_source_id, &remote_source, @@ -362,28 +369,20 @@ WEBRTC_RTCSTATS_IMPL(RTCMediaStreamTrackStats, RTCStats, "track", &silent_concealed_samples, &concealment_events, &inserted_samples_for_deceleration, - &removed_samples_for_acceleration, - &jitter_buffer_flushes, - &delayed_packet_outage_samples, - &relative_packet_arrival_delay, - &interruption_count, - &total_interruption_duration, - &total_frames_duration, - &sum_squared_frame_durations, - &freeze_count, - &pause_count, - &total_freezes_duration, - &total_pauses_duration) + &removed_samples_for_acceleration) // clang-format on -RTCMediaStreamTrackStats::RTCMediaStreamTrackStats(const std::string& id, - int64_t timestamp_us, - const char* kind) - : RTCMediaStreamTrackStats(std::string(id), timestamp_us, kind) {} +DEPRECATED_RTCMediaStreamTrackStats::DEPRECATED_RTCMediaStreamTrackStats( + const std::string& id, + int64_t timestamp_us, + const char* kind) + : DEPRECATED_RTCMediaStreamTrackStats(std::string(id), timestamp_us, kind) { +} -RTCMediaStreamTrackStats::RTCMediaStreamTrackStats(std::string&& id, - int64_t timestamp_us, - const char* kind) +DEPRECATED_RTCMediaStreamTrackStats::DEPRECATED_RTCMediaStreamTrackStats( + std::string&& id, + int64_t timestamp_us, + const char* kind) : RTCStats(std::move(id), timestamp_us), track_identifier("trackIdentifier"), media_source_id("mediaSourceId"), @@ -411,33 +410,15 @@ RTCMediaStreamTrackStats::RTCMediaStreamTrackStats(std::string&& id, silent_concealed_samples("silentConcealedSamples"), concealment_events("concealmentEvents"), inserted_samples_for_deceleration("insertedSamplesForDeceleration"), - removed_samples_for_acceleration("removedSamplesForAcceleration"), - jitter_buffer_flushes( - "jitterBufferFlushes", - {NonStandardGroupId::kRtcAudioJitterBufferMaxPackets}), - delayed_packet_outage_samples( - "delayedPacketOutageSamples", - {NonStandardGroupId::kRtcAudioJitterBufferMaxPackets, - NonStandardGroupId::kRtcStatsRelativePacketArrivalDelay}), - relative_packet_arrival_delay( - "relativePacketArrivalDelay", - {NonStandardGroupId::kRtcStatsRelativePacketArrivalDelay}), - interruption_count("interruptionCount"), - total_interruption_duration("totalInterruptionDuration"), - total_frames_duration("totalFramesDuration"), - sum_squared_frame_durations("sumOfSquaredFramesDuration"), - freeze_count("freezeCount"), - pause_count("pauseCount"), - total_freezes_duration("totalFreezesDuration"), - total_pauses_duration("totalPausesDuration") { + removed_samples_for_acceleration("removedSamplesForAcceleration") { RTC_DCHECK(kind == RTCMediaStreamTrackKind::kAudio || kind == RTCMediaStreamTrackKind::kVideo); } -RTCMediaStreamTrackStats::RTCMediaStreamTrackStats( - const RTCMediaStreamTrackStats& other) = default; +DEPRECATED_RTCMediaStreamTrackStats::DEPRECATED_RTCMediaStreamTrackStats( + const DEPRECATED_RTCMediaStreamTrackStats& other) = default; -RTCMediaStreamTrackStats::~RTCMediaStreamTrackStats() {} +DEPRECATED_RTCMediaStreamTrackStats::~DEPRECATED_RTCMediaStreamTrackStats() {} // clang-format off WEBRTC_RTCSTATS_IMPL(RTCPeerConnectionStats, RTCStats, "peer-connection", @@ -582,6 +563,7 @@ WEBRTC_RTCSTATS_IMPL( &nack_count, &qp_sum, &goog_timing_frame_info, + &power_efficient_decoder, &jitter_buffer_flushes, &delayed_packet_outage_samples, &relative_packet_arrival_delay, @@ -646,6 +628,7 @@ RTCInboundRTPStreamStats::RTCInboundRTPStreamStats(std::string&& id, nack_count("nackCount"), qp_sum("qpSum"), goog_timing_frame_info("googTimingFrameInfo"), + power_efficient_decoder("powerEfficientDecoder"), jitter_buffer_flushes( "jitterBufferFlushes", {NonStandardGroupId::kRtcAudioJitterBufferMaxPackets}), @@ -696,7 +679,9 @@ WEBRTC_RTCSTATS_IMPL( &pli_count, &nack_count, &qp_sum, - &active) + &active, + &power_efficient_encoder, + &scalability_mode) // clang-format on RTCOutboundRTPStreamStats::RTCOutboundRTPStreamStats(const std::string& id, @@ -736,7 +721,9 @@ RTCOutboundRTPStreamStats::RTCOutboundRTPStreamStats(std::string&& id, pli_count("pliCount"), nack_count("nackCount"), qp_sum("qpSum"), - active("active") {} + active("active"), + power_efficient_encoder("powerEfficientEncoder"), + scalability_mode("scalabilityMode") {} RTCOutboundRTPStreamStats::RTCOutboundRTPStreamStats( const RTCOutboundRTPStreamStats& other) = default; diff --git a/system_wrappers/BUILD.gn b/system_wrappers/BUILD.gn index c979a6aae6..a6bcc9c28e 100644 --- a/system_wrappers/BUILD.gn +++ b/system_wrappers/BUILD.gn @@ -87,11 +87,16 @@ rtc_library("field_trial") { defines = [ "WEBRTC_EXCLUDE_FIELD_TRIAL_DEFAULT" ] } deps = [ + "../experiments:registered_field_trials", "../rtc_base:checks", "../rtc_base:logging", "../rtc_base:stringutils", + "../rtc_base/containers:flat_set", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("metrics") { diff --git a/system_wrappers/include/field_trial.h b/system_wrappers/include/field_trial.h index eb57f2908e..8d0ad258c1 100644 --- a/system_wrappers/include/field_trial.h +++ b/system_wrappers/include/field_trial.h @@ -14,6 +14,7 @@ #include #include "absl/strings/string_view.h" +#include "rtc_base/containers/flat_set.h" // Field trials allow webrtc clients (such as Chrome) to turn on feature code // in binaries out in the field and gather information with that. @@ -97,6 +98,18 @@ bool FieldTrialsStringIsValid(absl::string_view trials_string); std::string MergeFieldTrialsStrings(absl::string_view first, absl::string_view second); +// This helper allows to temporary "register" a field trial within the current +// scope. This is only useful for tests that use the global field trial string, +// otherwise you can use `webrtc::FieldTrialsRegistry`. +// +// If you want to isolate changes to the global field trial string itself within +// the current scope you should use `webrtc::test::ScopedFieldTrials`. +class FieldTrialsAllowedInScopeForTesting { + public: + explicit FieldTrialsAllowedInScopeForTesting(flat_set keys); + ~FieldTrialsAllowedInScopeForTesting(); +}; + } // namespace field_trial } // namespace webrtc diff --git a/system_wrappers/source/DEPS b/system_wrappers/source/DEPS new file mode 100644 index 0000000000..ac7f5a234f --- /dev/null +++ b/system_wrappers/source/DEPS @@ -0,0 +1,6 @@ +specific_include_rules = { + # TODO(bugs.webrtc.org/10335): Remove rule when global string is removed. + "field_trial\.cc": [ + "+experiments/registered_field_trials.h", + ], +} diff --git a/system_wrappers/source/field_trial.cc b/system_wrappers/source/field_trial.cc index f83876be03..8f15b4eb7a 100644 --- a/system_wrappers/source/field_trial.cc +++ b/system_wrappers/source/field_trial.cc @@ -13,9 +13,13 @@ #include #include +#include +#include "absl/algorithm/container.h" #include "absl/strings/string_view.h" +#include "experiments/registered_field_trials.h" #include "rtc_base/checks.h" +#include "rtc_base/containers/flat_set.h" #include "rtc_base/logging.h" #include "rtc_base/string_encode.h" @@ -27,7 +31,14 @@ namespace field_trial { static const char* trials_init_string = NULL; namespace { + constexpr char kPersistentStringSeparator = '/'; + +flat_set& TestKeys() { + static auto* test_keys = new flat_set(); + return *test_keys; +} + // Validates the given field trial string. // E.g.: // "WebRTC-experimentFoo/Enabled/WebRTC-experimentBar/Enabled100kbps/" @@ -67,6 +78,7 @@ bool FieldTrialsStringIsValidInternal(const absl::string_view trials) { return true; } + } // namespace bool FieldTrialsStringIsValid(absl::string_view trials_string) { @@ -104,6 +116,12 @@ std::string MergeFieldTrialsStrings(absl::string_view first, #ifndef WEBRTC_EXCLUDE_FIELD_TRIAL_DEFAULT std::string FindFullName(absl::string_view name) { +#if WEBRTC_STRICT_FIELD_TRIALS + RTC_DCHECK(absl::c_linear_search(kRegisteredFieldTrials, name) || + TestKeys().contains(name)) + << name << " is not registered."; +#endif + if (trials_init_string == NULL) return std::string(); @@ -150,5 +168,14 @@ const char* GetFieldTrialString() { return trials_init_string; } +FieldTrialsAllowedInScopeForTesting::FieldTrialsAllowedInScopeForTesting( + flat_set keys) { + TestKeys() = std::move(keys); +} + +FieldTrialsAllowedInScopeForTesting::~FieldTrialsAllowedInScopeForTesting() { + TestKeys().clear(); +} + } // namespace field_trial } // namespace webrtc diff --git a/test/BUILD.gn b/test/BUILD.gn index 8d226d9c72..524e3a421e 100644 --- a/test/BUILD.gn +++ b/test/BUILD.gn @@ -255,7 +255,7 @@ rtc_library("explicit_key_value_config") { ] deps = [ - "../api:field_trials_view", + "../api:field_trials_registry", "../rtc_base:checks", ] absl_deps = [ "//third_party/abseil-cpp/absl/strings:strings" ] @@ -271,7 +271,7 @@ rtc_library("scoped_key_value_config") { deps = [ ":field_trial", - "../api:field_trials_view", + "../api:field_trials_registry", "../rtc_base:checks", "../system_wrappers:field_trial", ] @@ -332,6 +332,7 @@ if (is_ios) { "../api/test/metrics:chrome_perf_dashboard_metrics_exporter", "../api/test/metrics:global_metrics_logger_and_exporter", "../api/test/metrics:metrics_exporter", + "../api/test/metrics:metrics_set_proto_file_exporter", "../api/test/metrics:print_result_proxy_metrics_exporter", "../api/test/metrics:stdout_metrics_exporter", "../sdk:helpers_objc", @@ -444,6 +445,7 @@ rtc_library("video_test_support") { "../api:sequence_checker", "../api/test/video:video_frame_writer", "../api/video:encoded_image", + "../api/video:resolution", "../api/video:video_frame", "../api/video_codecs:video_codecs_api", "../common_video", @@ -518,6 +520,7 @@ if (rtc_include_tests && !build_with_chromium) { "../api/test/metrics:chrome_perf_dashboard_metrics_exporter", "../api/test/metrics:global_metrics_logger_and_exporter", "../api/test/metrics:metrics_exporter", + "../api/test/metrics:metrics_set_proto_file_exporter", "../api/test/metrics:print_result_proxy_metrics_exporter", "../api/test/metrics:stdout_metrics_exporter", "../rtc_base", @@ -653,6 +656,7 @@ if (rtc_include_tests && !build_with_chromium) { "../rtc_base/synchronization:mutex", "../rtc_base/system:file_wrapper", "pc/e2e:e2e_unittests", + "pc/e2e/analyzer/video:video_analyzer_unittests", "peer_scenario/tests", "scenario:scenario_unittests", "time_controller:time_controller", @@ -1084,6 +1088,7 @@ if (is_mac) { "OpenGL.framework", "CoreVideo.framework", ] + defines = [ "GL_SILENCE_DEPRECATION" ] } } @@ -1119,6 +1124,9 @@ rtc_library("test_renderer_generic") { "gl/gl_renderer.h", ] } + if (is_mac) { + defines = [ "GL_SILENCE_DEPRECATION" ] + } if ((is_linux || is_chromeos) && rtc_use_x11) { sources += [ diff --git a/test/OWNERS b/test/OWNERS index 9938fac846..a1bd812244 100644 --- a/test/OWNERS +++ b/test/OWNERS @@ -3,3 +3,5 @@ srte@webrtc.org stefan@webrtc.org titovartem@webrtc.org landrey@webrtc.org +mbonadei@webrtc.org +jleconte@webrtc.org diff --git a/test/call_test.cc b/test/call_test.cc index 7e7c9bb674..156b8a7f9e 100644 --- a/test/call_test.cc +++ b/test/call_test.cc @@ -591,8 +591,11 @@ void CallTest::Start() { } void CallTest::StartVideoStreams() { - for (VideoSendStream* video_send_stream : video_send_streams_) - video_send_stream->Start(); + for (size_t i = 0; i < video_send_streams_.size(); ++i) { + std::vector active_rtp_streams( + video_send_configs_[i].rtp.ssrcs.size(), true); + video_send_streams_[i]->StartPerRtpStream(active_rtp_streams); + } for (VideoReceiveStreamInterface* video_recv_stream : video_receive_streams_) video_recv_stream->Start(); } diff --git a/test/explicit_key_value_config.cc b/test/explicit_key_value_config.cc index c9e5ac1c28..90690c0514 100644 --- a/test/explicit_key_value_config.cc +++ b/test/explicit_key_value_config.cc @@ -11,7 +11,6 @@ #include "test/explicit_key_value_config.h" #include "absl/strings/string_view.h" -#include "api/field_trials_view.h" #include "rtc_base/checks.h" namespace webrtc { @@ -46,7 +45,7 @@ ExplicitKeyValueConfig::ExplicitKeyValueConfig(absl::string_view s) { RTC_CHECK_EQ(field_start, s.size()); } -std::string ExplicitKeyValueConfig::Lookup(absl::string_view key) const { +std::string ExplicitKeyValueConfig::GetValue(absl::string_view key) const { auto it = key_value_map_.find(key); if (it != key_value_map_.end()) return it->second; diff --git a/test/explicit_key_value_config.h b/test/explicit_key_value_config.h index 5685c13604..f14a10432c 100644 --- a/test/explicit_key_value_config.h +++ b/test/explicit_key_value_config.h @@ -16,17 +16,18 @@ #include #include "absl/strings/string_view.h" -#include "api/field_trials_view.h" +#include "api/field_trials_registry.h" namespace webrtc { namespace test { -class ExplicitKeyValueConfig : public FieldTrialsView { +class ExplicitKeyValueConfig : public FieldTrialsRegistry { public: explicit ExplicitKeyValueConfig(absl::string_view s); - std::string Lookup(absl::string_view key) const override; private: + std::string GetValue(absl::string_view key) const override; + // Unlike std::less, std::less<> is transparent and allows // heterogeneous lookup directly with absl::string_view. std::map> key_value_map_; diff --git a/test/fake_decoder.cc b/test/fake_decoder.cc index b71f960c9a..53fce37de1 100644 --- a/test/fake_decoder.cc +++ b/test/fake_decoder.cc @@ -95,7 +95,7 @@ const char* FakeDecoder::kImplementationName = "fake_decoder"; VideoDecoder::DecoderInfo FakeDecoder::GetDecoderInfo() const { DecoderInfo info; info.implementation_name = kImplementationName; - info.is_hardware_accelerated = false; + info.is_hardware_accelerated = true; return info; } const char* FakeDecoder::ImplementationName() const { diff --git a/test/fake_encoder.cc b/test/fake_encoder.cc index 5752a41fda..bfc72c123d 100644 --- a/test/fake_encoder.cc +++ b/test/fake_encoder.cc @@ -275,6 +275,7 @@ const char* FakeEncoder::kImplementationName = "fake_encoder"; VideoEncoder::EncoderInfo FakeEncoder::GetEncoderInfo() const { EncoderInfo info; info.implementation_name = kImplementationName; + info.is_hardware_accelerated = true; MutexLock lock(&mutex_); for (int sid = 0; sid < config_.numberOfSimulcastStreams; ++sid) { int number_of_temporal_layers = diff --git a/test/fuzzers/BUILD.gn b/test/fuzzers/BUILD.gn index 9896e07563..fd67372506 100644 --- a/test/fuzzers/BUILD.gn +++ b/test/fuzzers/BUILD.gn @@ -269,22 +269,6 @@ webrtc_fuzzer_test("audio_decoder_ilbc_fuzzer") { ] } -webrtc_fuzzer_test("audio_decoder_isac_fuzzer") { - sources = [ "audio_decoder_isac_fuzzer.cc" ] - deps = [ - ":audio_decoder_fuzzer", - "../../modules/audio_coding:isac", - ] -} - -webrtc_fuzzer_test("audio_decoder_isacfix_fuzzer") { - sources = [ "audio_decoder_isacfix_fuzzer.cc" ] - deps = [ - ":audio_decoder_fuzzer", - "../../modules/audio_coding:isac_fix", - ] -} - webrtc_fuzzer_test("audio_decoder_opus_fuzzer") { sources = [ "audio_decoder_opus_fuzzer.cc" ] deps = [ @@ -350,24 +334,6 @@ webrtc_fuzzer_test("audio_encoder_opus_fuzzer") { ] } -webrtc_fuzzer_test("audio_encoder_isac_fixed_fuzzer") { - sources = [ "audio_encoder_isac_fixed_fuzzer.cc" ] - deps = [ - ":audio_encoder_fuzzer", - "../../api/audio_codecs/isac:audio_encoder_isac_fix", - "../../rtc_base:checks", - ] -} - -webrtc_fuzzer_test("audio_encoder_isac_float_fuzzer") { - sources = [ "audio_encoder_isac_float_fuzzer.cc" ] - deps = [ - ":audio_encoder_fuzzer", - "../../api/audio_codecs/isac:audio_encoder_isac_float", - "../../rtc_base:checks", - ] -} - webrtc_fuzzer_test("turn_unwrap_fuzzer") { sources = [ "turn_unwrap_fuzzer.cc" ] deps = [ diff --git a/test/fuzzers/audio_decoder_isac_fuzzer.cc b/test/fuzzers/audio_decoder_isac_fuzzer.cc deleted file mode 100644 index 96fa75d6ce..0000000000 --- a/test/fuzzers/audio_decoder_isac_fuzzer.cc +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h" -#include "test/fuzzers/audio_decoder_fuzzer.h" - -namespace webrtc { -void FuzzOneInput(const uint8_t* data, size_t size) { - if (size > 20000) { - return; - } - const int sample_rate_hz = size % 2 == 0 ? 16000 : 32000; // 16 or 32 kHz. - static const size_t kAllocatedOuputSizeSamples = 32000 / 10; // 100 ms. - int16_t output[kAllocatedOuputSizeSamples]; - AudioDecoderIsacFloatImpl::Config c; - c.sample_rate_hz = sample_rate_hz; - AudioDecoderIsacFloatImpl dec(c); - FuzzAudioDecoder(DecoderFunctionType::kNormalDecode, data, size, &dec, - sample_rate_hz, sizeof(output), output); -} -} // namespace webrtc diff --git a/test/fuzzers/audio_decoder_isacfix_fuzzer.cc b/test/fuzzers/audio_decoder_isacfix_fuzzer.cc deleted file mode 100644 index 08aa69feb4..0000000000 --- a/test/fuzzers/audio_decoder_isacfix_fuzzer.cc +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h" -#include "test/fuzzers/audio_decoder_fuzzer.h" - -namespace webrtc { -void FuzzOneInput(const uint8_t* data, size_t size) { - if (size > 20000) { - return; - } - static const int kSampleRateHz = 16000; - static const size_t kAllocatedOuputSizeSamples = 16000 / 10; // 100 ms. - int16_t output[kAllocatedOuputSizeSamples]; - AudioDecoderIsacFixImpl::Config c; - c.sample_rate_hz = kSampleRateHz; - AudioDecoderIsacFixImpl dec(c); - FuzzAudioDecoder(DecoderFunctionType::kNormalDecode, data, size, &dec, - kSampleRateHz, sizeof(output), output); -} -} // namespace webrtc diff --git a/test/fuzzers/audio_encoder_isac_fixed_fuzzer.cc b/test/fuzzers/audio_encoder_isac_fixed_fuzzer.cc deleted file mode 100644 index 5357dc1b3e..0000000000 --- a/test/fuzzers/audio_encoder_isac_fixed_fuzzer.cc +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/audio_codecs/isac/audio_encoder_isac_fix.h" -#include "rtc_base/checks.h" -#include "test/fuzzers/audio_encoder_fuzzer.h" - -namespace webrtc { - -void FuzzOneInput(const uint8_t* data, size_t size) { - AudioEncoderIsacFix::Config config; - RTC_CHECK(config.IsOk()); - constexpr int kPayloadType = 100; - FuzzAudioEncoder( - /*data_view=*/{data, size}, - /*encoder=*/AudioEncoderIsacFix::MakeAudioEncoder(config, kPayloadType)); -} - -} // namespace webrtc diff --git a/test/fuzzers/audio_encoder_isac_float_fuzzer.cc b/test/fuzzers/audio_encoder_isac_float_fuzzer.cc deleted file mode 100644 index f9e2e0206d..0000000000 --- a/test/fuzzers/audio_encoder_isac_float_fuzzer.cc +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/audio_codecs/isac/audio_encoder_isac_float.h" -#include "rtc_base/checks.h" -#include "test/fuzzers/audio_encoder_fuzzer.h" - -namespace webrtc { - -void FuzzOneInput(const uint8_t* data, size_t size) { - AudioEncoderIsacFloat::Config config; - config.sample_rate_hz = 16000; - RTC_CHECK(config.IsOk()); - constexpr int kPayloadType = 100; - FuzzAudioEncoder(/*data_view=*/{data, size}, - /*encoder=*/AudioEncoderIsacFloat::MakeAudioEncoder( - config, kPayloadType)); -} - -} // namespace webrtc diff --git a/test/fuzzers/audio_processing_sample_rate_fuzzer.cc b/test/fuzzers/audio_processing_sample_rate_fuzzer.cc index 825303d31a..ca3946988c 100644 --- a/test/fuzzers/audio_processing_sample_rate_fuzzer.cc +++ b/test/fuzzers/audio_processing_sample_rate_fuzzer.cc @@ -13,8 +13,6 @@ #include #include -#include "api/audio/audio_frame.h" -#include "modules/audio_processing/include/audio_frame_proxies.h" #include "modules/audio_processing/include/audio_processing.h" #include "modules/audio_processing/test/audio_processing_builder_for_testing.h" #include "rtc_base/checks.h" @@ -23,13 +21,14 @@ namespace webrtc { namespace { constexpr int kMaxNumChannels = 2; -constexpr int kMaxSamplesPerChannel = - AudioFrame::kMaxDataSizeSamples / kMaxNumChannels; +// APM supported max rate is 384000 Hz, using a limit slightly above lets the +// fuzzer exercise the handling of too high rates. +constexpr int kMaxSampleRateHz = 400000; +constexpr int kMaxSamplesPerChannel = kMaxSampleRateHz / 100; void GenerateFloatFrame(test::FuzzDataHelper& fuzz_data, int input_rate, int num_channels, - bool is_capture, float* const* float_frames) { const int samples_per_input_channel = AudioProcessing::GetFrameSize(input_rate); @@ -45,20 +44,16 @@ void GenerateFloatFrame(test::FuzzDataHelper& fuzz_data, void GenerateFixedFrame(test::FuzzDataHelper& fuzz_data, int input_rate, int num_channels, - AudioFrame& fixed_frame) { + int16_t* fixed_frames) { const int samples_per_input_channel = AudioProcessing::GetFrameSize(input_rate); - fixed_frame.samples_per_channel_ = samples_per_input_channel; - fixed_frame.sample_rate_hz_ = input_rate; - fixed_frame.num_channels_ = num_channels; - RTC_DCHECK_LE(samples_per_input_channel * num_channels, - AudioFrame::kMaxDataSizeSamples); + RTC_DCHECK_LE(samples_per_input_channel, kMaxSamplesPerChannel); // Write interleaved samples. for (int ch = 0; ch < num_channels; ++ch) { const int16_t channel_value = fuzz_data.ReadOrDefaultValue(0); for (int i = ch; i < samples_per_input_channel * num_channels; i += num_channels) { - fixed_frame.mutable_data()[i] = channel_value; + fixed_frames[i] = channel_value; } } } @@ -103,7 +98,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { .Create(); RTC_DCHECK(apm); - AudioFrame fixed_frame; + std::array fixed_frame; std::array, kMaxNumChannels> float_frames; std::array float_frame_ptrs; @@ -112,12 +107,6 @@ void FuzzOneInput(const uint8_t* data, size_t size) { } float* const* ptr_to_float_frames = &float_frame_ptrs[0]; - // These are all the sample rates logged by UMA metric - // WebAudio.AudioContext.HardwareSampleRate. - constexpr int kSampleRatesHz[] = {8000, 11025, 16000, 22050, 24000, - 32000, 44100, 46875, 48000, 88200, - 96000, 176400, 192000, 352800, 384000}; - // Choose whether to fuzz the float or int16_t interfaces of APM. const bool is_float = fuzz_data.ReadOrDefaultValue(true); @@ -126,18 +115,19 @@ void FuzzOneInput(const uint8_t* data, size_t size) { // iteration. while (fuzz_data.CanReadBytes(1)) { // Decide input/output rate for this iteration. - const int input_rate = fuzz_data.SelectOneOf(kSampleRatesHz); - const int output_rate = fuzz_data.SelectOneOf(kSampleRatesHz); + const int input_rate = static_cast( + fuzz_data.ReadOrDefaultValue(8000) % kMaxSampleRateHz); + const int output_rate = static_cast( + fuzz_data.ReadOrDefaultValue(8000) % kMaxSampleRateHz); const int num_channels = fuzz_data.ReadOrDefaultValue(true) ? 2 : 1; // Since render and capture calls have slightly different reinitialization // procedures, we let the fuzzer choose the order. const bool is_capture = fuzz_data.ReadOrDefaultValue(true); - // Fill the arrays with audio samples from the data. int apm_return_code = AudioProcessing::Error::kNoError; if (is_float) { - GenerateFloatFrame(fuzz_data, input_rate, num_channels, is_capture, + GenerateFloatFrame(fuzz_data, input_rate, num_channels, ptr_to_float_frames); if (is_capture) { @@ -149,20 +139,23 @@ void FuzzOneInput(const uint8_t* data, size_t size) { ptr_to_float_frames, StreamConfig(input_rate, num_channels), StreamConfig(output_rate, num_channels), ptr_to_float_frames); } - RTC_DCHECK_EQ(apm_return_code, AudioProcessing::kNoError); } else { - GenerateFixedFrame(fuzz_data, input_rate, num_channels, fixed_frame); + GenerateFixedFrame(fuzz_data, input_rate, num_channels, + fixed_frame.data()); if (is_capture) { - apm_return_code = ProcessAudioFrame(apm.get(), &fixed_frame); + apm_return_code = apm->ProcessStream( + fixed_frame.data(), StreamConfig(input_rate, num_channels), + StreamConfig(output_rate, num_channels), fixed_frame.data()); } else { - apm_return_code = ProcessReverseAudioFrame(apm.get(), &fixed_frame); + apm_return_code = apm->ProcessReverseStream( + fixed_frame.data(), StreamConfig(input_rate, num_channels), + StreamConfig(output_rate, num_channels), fixed_frame.data()); } - // The AudioFrame interface does not allow non-native sample rates, but it - // should not crash. - RTC_DCHECK(apm_return_code == AudioProcessing::kNoError || - apm_return_code == AudioProcessing::kBadSampleRateError); } + // APM may flag an error on unsupported audio formats, but should not crash. + RTC_DCHECK(apm_return_code == AudioProcessing::kNoError || + apm_return_code == AudioProcessing::kBadSampleRateError); } } diff --git a/test/ios/test_support.h b/test/ios/test_support.h index 2699923e09..5ac731393f 100644 --- a/test/ios/test_support.h +++ b/test/ios/test_support.h @@ -27,6 +27,7 @@ void InitTestSuite(int (*test_suite)(void), char* argv[], bool save_chartjson_result, bool export_perf_results_new_api, + std::string webrtc_test_metrics_output_path, absl::optional> metrics_to_plot); // Returns true if unittests should be run by the XCTest runnner. diff --git a/test/ios/test_support.mm b/test/ios/test_support.mm index 1c79682375..d3c9ee0c74 100644 --- a/test/ios/test_support.mm +++ b/test/ios/test_support.mm @@ -13,6 +13,7 @@ #include "api/test/metrics/chrome_perf_dashboard_metrics_exporter.h" #include "api/test/metrics/global_metrics_logger_and_exporter.h" #include "api/test/metrics/metrics_exporter.h" +#include "api/test/metrics/metrics_set_proto_file_exporter.h" #include "api/test/metrics/print_result_proxy_metrics_exporter.h" #include "api/test/metrics/stdout_metrics_exporter.h" #include "test/ios/coverage_util_ios.h" @@ -44,6 +45,7 @@ static int g_argc; static char **g_argv; static bool g_write_perf_output; static bool g_export_perf_results_new_api; +static std::string g_webrtc_test_metrics_output_path; static absl::optional g_is_xctest; static absl::optional> g_metrics_to_plot; @@ -95,14 +97,14 @@ static absl::optional> g_metrics_to_plot; int exitStatus = g_test_suite(); + NSArray *outputDirectories = + NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); std::vector> exporters; if (g_export_perf_results_new_api) { exporters.push_back(std::make_unique()); if (g_write_perf_output) { // Stores data into a proto file under the app's document directory. NSString *fileName = @"perftest-output.pb"; - NSArray *outputDirectories = - NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); if ([outputDirectories count] != 0) { NSString *outputPath = [outputDirectories[0] stringByAppendingPathComponent:fileName]; @@ -110,6 +112,18 @@ static absl::optional> g_metrics_to_plot; [NSString stdStringForString:outputPath])); } } + if (!g_webrtc_test_metrics_output_path.empty()) { + RTC_CHECK_EQ(g_webrtc_test_metrics_output_path.find('/'), std::string::npos) + << "On iOS, --webrtc_test_metrics_output_path must only be a file name."; + if ([outputDirectories count] != 0) { + NSString *fileName = [NSString stringWithCString:g_webrtc_test_metrics_output_path.c_str() + encoding:[NSString defaultCStringEncoding]]; + NSString *outputPath = [outputDirectories[0] stringByAppendingPathComponent:fileName]; + exporters.push_back(std::make_unique( + webrtc::test::MetricsSetProtoFileExporter::Options( + [NSString stdStringForString:outputPath]))); + } + } } else { exporters.push_back(std::make_unique()); } @@ -118,8 +132,6 @@ static absl::optional> g_metrics_to_plot; if (g_write_perf_output) { // Stores data into a proto file under the app's document directory. NSString *fileName = @"perftest-output.pb"; - NSArray *outputDirectories = - NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); if ([outputDirectories count] != 0) { NSString *outputPath = [outputDirectories[0] stringByAppendingPathComponent:fileName]; @@ -167,12 +179,14 @@ void InitTestSuite(int (*test_suite)(void), char *argv[], bool write_perf_output, bool export_perf_results_new_api, + std::string webrtc_test_metrics_output_path, absl::optional> metrics_to_plot) { g_test_suite = test_suite; g_argc = argc; g_argv = argv; g_write_perf_output = write_perf_output; g_export_perf_results_new_api = export_perf_results_new_api; + g_webrtc_test_metrics_output_path = webrtc_test_metrics_output_path; g_metrics_to_plot = std::move(metrics_to_plot); } diff --git a/test/network/BUILD.gn b/test/network/BUILD.gn index 71cf2d79f3..379f6048cd 100644 --- a/test/network/BUILD.gn +++ b/test/network/BUILD.gn @@ -76,6 +76,7 @@ rtc_library("emulated_network") { "../../rtc_base:threading", "../../rtc_base/memory:always_valid_pointer", "../../rtc_base/synchronization:mutex", + "../../rtc_base/system:no_unique_address", "../../rtc_base/task_utils:repeating_task", "../../system_wrappers", "../../test:scoped_key_value_config", diff --git a/test/network/cross_traffic_unittest.cc b/test/network/cross_traffic_unittest.cc index 0c1bb46302..36aff67bb2 100644 --- a/test/network/cross_traffic_unittest.cc +++ b/test/network/cross_traffic_unittest.cc @@ -53,6 +53,7 @@ struct TrafficCounterFixture { /*id=*/1, rtc::IPAddress(kTestIpAddress), EmulatedEndpointConfig(), + EmulatedNetworkStatsGatheringMode::kDefault, }, /*is_enabled=*/true, &task_queue_, &clock}; }; @@ -124,7 +125,8 @@ TEST(CrossTrafficTest, RandomWalkCrossTraffic) { } TEST(TcpMessageRouteTest, DeliveredOnLossyNetwork) { - NetworkEmulationManagerImpl net(TimeMode::kSimulated); + NetworkEmulationManagerImpl net(TimeMode::kSimulated, + EmulatedNetworkStatsGatheringMode::kDefault); BuiltInNetworkBehaviorConfig send; // 800 kbps means that the 100 kB message would be delivered in ca 1 second // under ideal conditions and no overhead. diff --git a/test/network/emulated_network_manager.cc b/test/network/emulated_network_manager.cc index 5bc3c094cb..fa4037e5db 100644 --- a/test/network/emulated_network_manager.cc +++ b/test/network/emulated_network_manager.cc @@ -85,8 +85,7 @@ void EmulatedNetworkManager::StopUpdating() { } void EmulatedNetworkManager::GetStats( - std::function)> stats_callback) - const { + std::function stats_callback) const { task_queue_->PostTask([stats_callback, this]() { stats_callback(endpoints_container_->GetStats()); }); diff --git a/test/network/emulated_network_manager.h b/test/network/emulated_network_manager.h index 29d2b0943f..fb4ee1ee85 100644 --- a/test/network/emulated_network_manager.h +++ b/test/network/emulated_network_manager.h @@ -58,8 +58,8 @@ class EmulatedNetworkManager : public rtc::NetworkManagerBase, std::vector endpoints() const override { return endpoints_container_->GetEndpoints(); } - void GetStats(std::function)> - stats_callback) const override; + void GetStats( + std::function stats_callback) const override; private: void UpdateNetworksOnce(); diff --git a/test/network/feedback_generator.cc b/test/network/feedback_generator.cc index 68fbcc02a5..e339fd87b0 100644 --- a/test/network/feedback_generator.cc +++ b/test/network/feedback_generator.cc @@ -18,7 +18,7 @@ namespace webrtc { FeedbackGeneratorImpl::FeedbackGeneratorImpl( FeedbackGeneratorImpl::Config config) : conf_(config), - net_(TimeMode::kSimulated), + net_(TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDefault), send_link_{new SimulatedNetwork(conf_.send_link)}, ret_link_{new SimulatedNetwork(conf_.return_link)}, route_(this, diff --git a/test/network/network_emulation.cc b/test/network/network_emulation.cc index 4cd2fda1d0..f1c9ca80dd 100644 --- a/test/network/network_emulation.cc +++ b/test/network/network_emulation.cc @@ -17,212 +17,204 @@ #include "absl/types/optional.h" #include "api/numerics/samples_stats_counter.h" +#include "api/sequence_checker.h" #include "api/test/network_emulation/network_emulation_interfaces.h" +#include "api/test/network_emulation_manager.h" #include "api/units/data_size.h" +#include "api/units/time_delta.h" #include "rtc_base/logging.h" namespace webrtc { +namespace { -DataRate EmulatedNetworkOutgoingStatsImpl::AverageSendRate() const { - RTC_DCHECK_GE(packets_sent_, 2); - RTC_DCHECK(first_packet_sent_time_.IsFinite()); - RTC_DCHECK(last_packet_sent_time_.IsFinite()); - return (bytes_sent_ - first_sent_packet_size_) / - (last_packet_sent_time_ - first_packet_sent_time_); -} - -DataRate EmulatedNetworkIncomingStatsImpl::AverageReceiveRate() const { - RTC_DCHECK_GE(packets_received_, 2); - RTC_DCHECK(first_packet_received_time_.IsFinite()); - RTC_DCHECK(last_packet_received_time_.IsFinite()); - return (bytes_received_ - first_received_packet_size_) / - (last_packet_received_time_ - first_packet_received_time_); -} - -std::map> -EmulatedNetworkStatsImpl::OutgoingStatsPerDestination() const { - std::map> out; - for (const auto& entry : outgoing_stats_per_destination_) { - out.emplace(entry.first, std::make_unique( - *entry.second)); - } - return out; -} - -std::map> -EmulatedNetworkStatsImpl::IncomingStatsPerSource() const { - std::map> out; - for (const auto& entry : incoming_stats_per_source_) { - out.emplace(entry.first, std::make_unique( - *entry.second)); - } - return out; -} - -std::unique_ptr -EmulatedNetworkStatsImpl::GetOverallOutgoingStats() const { - EmulatedNetworkOutgoingStatsBuilder builder; - for (const auto& entry : outgoing_stats_per_destination_) { - builder.AddOutgoingStats(*entry.second); +EmulatedNetworkOutgoingStats GetOverallOutgoingStats( + const std::map& + outgoing_stats, + EmulatedNetworkStatsGatheringMode mode) { + EmulatedNetworkOutgoingStatsBuilder builder(mode); + for (const auto& entry : outgoing_stats) { + builder.AddOutgoingStats(entry.second); } return builder.Build(); } -std::unique_ptr -EmulatedNetworkStatsImpl::GetOverallIncomingStats() const { - EmulatedNetworkIncomingStatsBuilder builder; - for (const auto& entry : incoming_stats_per_source_) { - builder.AddIncomingStats(*entry.second); +EmulatedNetworkIncomingStats GetOverallIncomingStats( + const std::map& + incoming_stats, + EmulatedNetworkStatsGatheringMode mode) { + EmulatedNetworkIncomingStatsBuilder builder(mode); + for (const auto& entry : incoming_stats) { + builder.AddIncomingStats(entry.second); } return builder.Build(); } -EmulatedNetworkOutgoingStatsBuilder::EmulatedNetworkOutgoingStatsBuilder() { +} // namespace + +EmulatedNetworkOutgoingStatsBuilder::EmulatedNetworkOutgoingStatsBuilder( + EmulatedNetworkStatsGatheringMode stats_gathering_mode) + : stats_gathering_mode_(stats_gathering_mode) { sequence_checker_.Detach(); } -void EmulatedNetworkOutgoingStatsBuilder::OnPacketSent( - Timestamp sent_time, - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode) { +void EmulatedNetworkOutgoingStatsBuilder::OnPacketSent(Timestamp sent_time, + DataSize packet_size) { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_CHECK_GE(packet_size, DataSize::Zero()); - if (first_packet_sent_time_.IsInfinite()) { - first_packet_sent_time_ = sent_time; - first_sent_packet_size_ = packet_size; + if (stats_.first_packet_sent_time.IsInfinite()) { + stats_.first_packet_sent_time = sent_time; + stats_.first_sent_packet_size = packet_size; } - last_packet_sent_time_ = sent_time; - packets_sent_++; - bytes_sent_ += packet_size; - if (mode == EmulatedEndpointConfig::StatsGatheringMode::kDebug) { - sent_packets_size_counter_.AddSample(packet_size.bytes()); + stats_.last_packet_sent_time = sent_time; + stats_.packets_sent++; + stats_.bytes_sent += packet_size; + if (stats_gathering_mode_ == EmulatedNetworkStatsGatheringMode::kDebug) { + stats_.sent_packets_size.AddSample(packet_size.bytes()); } } void EmulatedNetworkOutgoingStatsBuilder::AddOutgoingStats( const EmulatedNetworkOutgoingStats& stats) { RTC_DCHECK_RUN_ON(&sequence_checker_); - packets_sent_ += stats.PacketsSent(); - bytes_sent_ += stats.BytesSent(); - sent_packets_size_counter_.AddSamples(stats.SentPacketsSizeCounter()); - if (first_packet_sent_time_ > stats.FirstPacketSentTime()) { - first_packet_sent_time_ = stats.FirstPacketSentTime(); - first_sent_packet_size_ = stats.FirstSentPacketSize(); + stats_.packets_sent += stats.packets_sent; + stats_.bytes_sent += stats.bytes_sent; + stats_.sent_packets_size.AddSamples(stats.sent_packets_size); + if (stats_.first_packet_sent_time > stats.first_packet_sent_time) { + stats_.first_packet_sent_time = stats.first_packet_sent_time; + stats_.first_sent_packet_size = stats.first_sent_packet_size; } - if (last_packet_sent_time_ < stats.LastPacketSentTime()) { - last_packet_sent_time_ = stats.LastPacketSentTime(); + if (stats_.last_packet_sent_time < stats.last_packet_sent_time) { + stats_.last_packet_sent_time = stats.last_packet_sent_time; } } -std::unique_ptr -EmulatedNetworkOutgoingStatsBuilder::Build() const { +EmulatedNetworkOutgoingStats EmulatedNetworkOutgoingStatsBuilder::Build() + const { RTC_DCHECK_RUN_ON(&sequence_checker_); - return std::make_unique( - packets_sent_, bytes_sent_, sent_packets_size_counter_, - first_sent_packet_size_, first_packet_sent_time_, last_packet_sent_time_); + return stats_; } -EmulatedNetworkIncomingStatsBuilder::EmulatedNetworkIncomingStatsBuilder() { +EmulatedNetworkIncomingStatsBuilder::EmulatedNetworkIncomingStatsBuilder( + EmulatedNetworkStatsGatheringMode stats_gathering_mode) + : stats_gathering_mode_(stats_gathering_mode) { sequence_checker_.Detach(); } void EmulatedNetworkIncomingStatsBuilder::OnPacketDropped( - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode) { + DataSize packet_size) { RTC_DCHECK_RUN_ON(&sequence_checker_); - packets_dropped_++; - bytes_dropped_ += packet_size; - if (mode == EmulatedEndpointConfig::StatsGatheringMode::kDebug) { - dropped_packets_size_counter_.AddSample(packet_size.bytes()); + stats_.packets_discarded_no_receiver++; + stats_.bytes_discarded_no_receiver += packet_size; + if (stats_gathering_mode_ == EmulatedNetworkStatsGatheringMode::kDebug) { + stats_.packets_discarded_no_receiver_size.AddSample(packet_size.bytes()); } } void EmulatedNetworkIncomingStatsBuilder::OnPacketReceived( Timestamp received_time, - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode) { + DataSize packet_size) { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_CHECK_GE(packet_size, DataSize::Zero()); - if (first_packet_received_time_.IsInfinite()) { - first_packet_received_time_ = received_time; - first_received_packet_size_ = packet_size; + if (stats_.first_packet_received_time.IsInfinite()) { + stats_.first_packet_received_time = received_time; + stats_.first_received_packet_size = packet_size; } - last_packet_received_time_ = received_time; - packets_received_++; - bytes_received_ += packet_size; - if (mode == EmulatedEndpointConfig::StatsGatheringMode::kDebug) { - received_packets_size_counter_.AddSample(packet_size.bytes()); + stats_.last_packet_received_time = received_time; + stats_.packets_received++; + stats_.bytes_received += packet_size; + if (stats_gathering_mode_ == EmulatedNetworkStatsGatheringMode::kDebug) { + stats_.received_packets_size.AddSample(packet_size.bytes()); } } void EmulatedNetworkIncomingStatsBuilder::AddIncomingStats( const EmulatedNetworkIncomingStats& stats) { RTC_DCHECK_RUN_ON(&sequence_checker_); - packets_received_ += stats.PacketsReceived(); - bytes_received_ += stats.BytesReceived(); - received_packets_size_counter_.AddSamples(stats.ReceivedPacketsSizeCounter()); - packets_dropped_ += stats.PacketsDropped(); - bytes_dropped_ += stats.BytesDropped(); - dropped_packets_size_counter_.AddSamples(stats.DroppedPacketsSizeCounter()); - if (first_packet_received_time_ > stats.FirstPacketReceivedTime()) { - first_packet_received_time_ = stats.FirstPacketReceivedTime(); - first_received_packet_size_ = stats.FirstReceivedPacketSize(); + stats_.packets_received += stats.packets_received; + stats_.bytes_received += stats.bytes_received; + stats_.received_packets_size.AddSamples(stats.received_packets_size); + stats_.packets_discarded_no_receiver += stats.packets_discarded_no_receiver; + stats_.bytes_discarded_no_receiver += stats.bytes_discarded_no_receiver; + stats_.packets_discarded_no_receiver_size.AddSamples( + stats.packets_discarded_no_receiver_size); + if (stats_.first_packet_received_time > stats.first_packet_received_time) { + stats_.first_packet_received_time = stats.first_packet_received_time; + stats_.first_received_packet_size = stats.first_received_packet_size; } - if (last_packet_received_time_ < stats.LastPacketReceivedTime()) { - last_packet_received_time_ = stats.LastPacketReceivedTime(); + if (stats_.last_packet_received_time < stats.last_packet_received_time) { + stats_.last_packet_received_time = stats.last_packet_received_time; } } -std::unique_ptr -EmulatedNetworkIncomingStatsBuilder::Build() const { +EmulatedNetworkIncomingStats EmulatedNetworkIncomingStatsBuilder::Build() + const { RTC_DCHECK_RUN_ON(&sequence_checker_); - return std::make_unique( - packets_received_, bytes_received_, received_packets_size_counter_, - packets_dropped_, bytes_dropped_, dropped_packets_size_counter_, - first_received_packet_size_, first_packet_received_time_, - last_packet_received_time_); + return stats_; } -EmulatedNetworkStatsBuilder::EmulatedNetworkStatsBuilder() { +EmulatedNetworkStatsBuilder::EmulatedNetworkStatsBuilder( + EmulatedNetworkStatsGatheringMode stats_gathering_mode) + : stats_gathering_mode_(stats_gathering_mode) { sequence_checker_.Detach(); } EmulatedNetworkStatsBuilder::EmulatedNetworkStatsBuilder( - rtc::IPAddress local_ip) { + rtc::IPAddress local_ip, + EmulatedNetworkStatsGatheringMode stats_gathering_mode) + : stats_gathering_mode_(stats_gathering_mode) { local_addresses_.push_back(local_ip); sequence_checker_.Detach(); } -void EmulatedNetworkStatsBuilder::OnPacketSent( - Timestamp queued_time, - Timestamp sent_time, - rtc::IPAddress destination_ip, - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode) { +void EmulatedNetworkStatsBuilder::OnPacketSent(Timestamp queued_time, + Timestamp sent_time, + rtc::IPAddress destination_ip, + DataSize packet_size) { RTC_DCHECK_RUN_ON(&sequence_checker_); - if (mode == EmulatedEndpointConfig::StatsGatheringMode::kDebug) { + if (stats_gathering_mode_ == EmulatedNetworkStatsGatheringMode::kDebug) { sent_packets_queue_wait_time_us_.AddSample((sent_time - queued_time).us()); } - outgoing_stats_per_destination_[destination_ip].OnPacketSent( - sent_time, packet_size, mode); + auto it = outgoing_stats_per_destination_.find(destination_ip); + if (it == outgoing_stats_per_destination_.end()) { + outgoing_stats_per_destination_ + .emplace(destination_ip, + std::make_unique( + stats_gathering_mode_)) + .first->second->OnPacketSent(sent_time, packet_size); + } else { + it->second->OnPacketSent(sent_time, packet_size); + } } -void EmulatedNetworkStatsBuilder::OnPacketDropped( - rtc::IPAddress source_ip, - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode) { +void EmulatedNetworkStatsBuilder::OnPacketDropped(rtc::IPAddress source_ip, + DataSize packet_size) { RTC_DCHECK_RUN_ON(&sequence_checker_); - incoming_stats_per_source_[source_ip].OnPacketDropped(packet_size, mode); + auto it = incoming_stats_per_source_.find(source_ip); + if (it == incoming_stats_per_source_.end()) { + incoming_stats_per_source_ + .emplace(source_ip, + std::make_unique( + stats_gathering_mode_)) + .first->second->OnPacketDropped(packet_size); + } else { + it->second->OnPacketDropped(packet_size); + } } -void EmulatedNetworkStatsBuilder::OnPacketReceived( - Timestamp received_time, - rtc::IPAddress source_ip, - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode) { +void EmulatedNetworkStatsBuilder::OnPacketReceived(Timestamp received_time, + rtc::IPAddress source_ip, + DataSize packet_size) { RTC_DCHECK_RUN_ON(&sequence_checker_); - incoming_stats_per_source_[source_ip].OnPacketReceived(received_time, - packet_size, mode); + auto it = incoming_stats_per_source_.find(source_ip); + if (it == incoming_stats_per_source_.end()) { + incoming_stats_per_source_ + .emplace(source_ip, + std::make_unique( + stats_gathering_mode_)) + .first->second->OnPacketReceived(received_time, packet_size); + } else { + it->second->OnPacketReceived(received_time, packet_size); + } } void EmulatedNetworkStatsBuilder::AddEmulatedNetworkStats( @@ -230,45 +222,91 @@ void EmulatedNetworkStatsBuilder::AddEmulatedNetworkStats( RTC_DCHECK_RUN_ON(&sequence_checker_); // Append IPs from other endpoints stats to the builder. - for (const rtc::IPAddress& addr : stats.LocalAddresses()) { + for (const rtc::IPAddress& addr : stats.local_addresses) { local_addresses_.push_back(addr); } sent_packets_queue_wait_time_us_.AddSamples( - stats.SentPacketsQueueWaitTimeUs()); + stats.sent_packets_queue_wait_time_us); // Add outgoing stats from other endpoints to the builder. - const std::map> - outgoing_stats_per_destination = stats.OutgoingStatsPerDestination(); - for (const auto& entry : outgoing_stats_per_destination) { - outgoing_stats_per_destination_[entry.first].AddOutgoingStats( - *entry.second); + for (const auto& entry : stats.outgoing_stats_per_destination) { + auto it = outgoing_stats_per_destination_.find(entry.first); + if (it == outgoing_stats_per_destination_.end()) { + outgoing_stats_per_destination_ + .emplace(entry.first, + std::make_unique( + stats_gathering_mode_)) + .first->second->AddOutgoingStats(entry.second); + } else { + it->second->AddOutgoingStats(entry.second); + } } // Add incoming stats from other endpoints to the builder. - const std::map> - incoming_stats_per_source = stats.IncomingStatsPerSource(); - for (const auto& entry : incoming_stats_per_source) { - incoming_stats_per_source_[entry.first].AddIncomingStats(*entry.second); + for (const auto& entry : stats.incoming_stats_per_source) { + auto it = incoming_stats_per_source_.find(entry.first); + if (it == incoming_stats_per_source_.end()) { + incoming_stats_per_source_ + .emplace(entry.first, + std::make_unique( + stats_gathering_mode_)) + .first->second->AddIncomingStats(entry.second); + } else { + it->second->AddIncomingStats(entry.second); + } } } -std::unique_ptr EmulatedNetworkStatsBuilder::Build() - const { +EmulatedNetworkStats EmulatedNetworkStatsBuilder::Build() const { RTC_DCHECK_RUN_ON(&sequence_checker_); - std::map> - outgoing_stats; + std::map outgoing_stats; for (const auto& entry : outgoing_stats_per_destination_) { - outgoing_stats.emplace(entry.first, entry.second.Build()); + outgoing_stats.emplace(entry.first, entry.second->Build()); } - std::map> - incoming_stats; + std::map incoming_stats; for (const auto& entry : incoming_stats_per_source_) { - incoming_stats.emplace(entry.first, entry.second.Build()); + incoming_stats.emplace(entry.first, entry.second->Build()); } - return std::make_unique( - local_addresses_, sent_packets_queue_wait_time_us_, - std::move(outgoing_stats), std::move(incoming_stats)); + return EmulatedNetworkStats{ + .local_addresses = local_addresses_, + .overall_outgoing_stats = + GetOverallOutgoingStats(outgoing_stats, stats_gathering_mode_), + .overall_incoming_stats = + GetOverallIncomingStats(incoming_stats, stats_gathering_mode_), + .outgoing_stats_per_destination = std::move(outgoing_stats), + .incoming_stats_per_source = std::move(incoming_stats), + .sent_packets_queue_wait_time_us = sent_packets_queue_wait_time_us_}; +} + +EmulatedNetworkNodeStatsBuilder::EmulatedNetworkNodeStatsBuilder( + EmulatedNetworkStatsGatheringMode stats_gathering_mode) + : stats_gathering_mode_(stats_gathering_mode) { + sequence_checker_.Detach(); +} + +void EmulatedNetworkNodeStatsBuilder::AddPacketTransportTime( + TimeDelta time, + size_t packet_size) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + if (stats_gathering_mode_ == EmulatedNetworkStatsGatheringMode::kDebug) { + stats_.packet_transport_time.AddSample(time.ms()); + stats_.size_to_packet_transport_time.AddSample(packet_size / + time.ms()); + } +} + +void EmulatedNetworkNodeStatsBuilder::AddEmulatedNetworkNodeStats( + const EmulatedNetworkNodeStats& stats) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + stats_.packet_transport_time.AddSamples(stats.packet_transport_time); + stats_.size_to_packet_transport_time.AddSamples( + stats.size_to_packet_transport_time); +} + +EmulatedNetworkNodeStats EmulatedNetworkNodeStatsBuilder::Build() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return stats_; } void LinkEmulation::OnPacketReceived(EmulatedIpPacket packet) { @@ -279,7 +317,10 @@ void LinkEmulation::OnPacketReceived(EmulatedIpPacket packet) { bool sent = network_behavior_->EnqueuePacket(PacketInFlightInfo( packet.ip_packet_size(), packet.arrival_time.us(), packet_id)); if (sent) { - packets_.emplace_back(StoredPacket{packet_id, std::move(packet), false}); + packets_.emplace_back(StoredPacket{.id = packet_id, + .sent_time = clock_->CurrentTime(), + .packet = std::move(packet), + .removed = false}); } if (process_task_.Running()) return; @@ -308,6 +349,11 @@ void LinkEmulation::OnPacketReceived(EmulatedIpPacket packet) { }); } +EmulatedNetworkNodeStats LinkEmulation::stats() const { + RTC_DCHECK_RUN_ON(task_queue_); + return stats_builder_.Build(); +} + void LinkEmulation::Process(Timestamp at_time) { std::vector delivery_infos = network_behavior_->DequeueDeliverablePackets(at_time.us()); @@ -322,6 +368,9 @@ void LinkEmulation::Process(Timestamp at_time) { RTC_CHECK(packet); RTC_DCHECK(!packet->removed); packet->removed = true; + stats_builder_.AddPacketTransportTime( + clock_->CurrentTime() - packet->sent_time, + packet->packet.ip_packet_size()); if (delivery_info.receive_time_us != PacketDeliveryInfo::kNotReceived) { packet->packet.arrival_time = @@ -411,14 +460,23 @@ void NetworkRouterNode::SetFilter( EmulatedNetworkNode::EmulatedNetworkNode( Clock* clock, rtc::TaskQueue* task_queue, - std::unique_ptr network_behavior) + std::unique_ptr network_behavior, + EmulatedNetworkStatsGatheringMode stats_gathering_mode) : router_(task_queue), - link_(clock, task_queue, std::move(network_behavior), &router_) {} + link_(clock, + task_queue, + std::move(network_behavior), + &router_, + stats_gathering_mode) {} void EmulatedNetworkNode::OnPacketReceived(EmulatedIpPacket packet) { link_.OnPacketReceived(std::move(packet)); } +EmulatedNetworkNodeStats EmulatedNetworkNode::stats() const { + return link_.stats(); +} + void EmulatedNetworkNode::CreateRoute( const rtc::IPAddress& receiver_ip, std::vector nodes, @@ -437,12 +495,14 @@ void EmulatedNetworkNode::ClearRoute(const rtc::IPAddress& receiver_ip, EmulatedNetworkNode::~EmulatedNetworkNode() = default; -EmulatedEndpointImpl::Options::Options(uint64_t id, - const rtc::IPAddress& ip, - const EmulatedEndpointConfig& config) +EmulatedEndpointImpl::Options::Options( + uint64_t id, + const rtc::IPAddress& ip, + const EmulatedEndpointConfig& config, + EmulatedNetworkStatsGatheringMode stats_gathering_mode) : id(id), ip(ip), - stats_gathering_mode(config.stats_gathering_mode), + stats_gathering_mode(stats_gathering_mode), type(config.type), allow_send_packet_with_different_source_ip( config.allow_send_packet_with_different_source_ip), @@ -460,7 +520,7 @@ EmulatedEndpointImpl::EmulatedEndpointImpl(const Options& options, task_queue_(task_queue), router_(task_queue_), next_port_(kFirstEphemeralPort), - stats_builder_(options_.ip) { + stats_builder_(options_.ip, options_.stats_gathering_mode) { constexpr int kIPv4NetworkPrefixLength = 24; constexpr int kIPv6NetworkPrefixLength = 64; @@ -499,8 +559,7 @@ void EmulatedEndpointImpl::SendPacket(const rtc::SocketAddress& from, RTC_DCHECK_RUN_ON(task_queue_); stats_builder_.OnPacketSent(packet.arrival_time, clock_->CurrentTime(), packet.to.ipaddr(), - DataSize::Bytes(packet.ip_packet_size()), - options_.stats_gathering_mode); + DataSize::Bytes(packet.ip_packet_size())); if (packet.to.ipaddr() == options_.ip) { OnPacketReceived(std::move(packet)); @@ -606,8 +665,7 @@ void EmulatedEndpointImpl::OnPacketReceived(EmulatedIpPacket packet) { } MutexLock lock(&receiver_lock_); stats_builder_.OnPacketReceived(clock_->CurrentTime(), packet.from.ipaddr(), - DataSize::Bytes(packet.ip_packet_size()), - options_.stats_gathering_mode); + DataSize::Bytes(packet.ip_packet_size())); auto it = port_to_receiver_.find(packet.to.port()); if (it == port_to_receiver_.end()) { if (default_receiver_.has_value()) { @@ -619,10 +677,10 @@ void EmulatedEndpointImpl::OnPacketReceived(EmulatedIpPacket packet) { // process: one peer closed connection, second still sending data. RTC_LOG(LS_INFO) << "Drop packet: no receiver registered in " << options_.log_name << "; id=" << options_.id - << " on port " << packet.to.port(); + << " on port " << packet.to.port() + << ". Packet source: " << packet.from.ToString(); stats_builder_.OnPacketDropped(packet.from.ipaddr(), - DataSize::Bytes(packet.ip_packet_size()), - options_.stats_gathering_mode); + DataSize::Bytes(packet.ip_packet_size())); return; } // Endpoint holds lock during packet processing to ensure that a call to @@ -652,15 +710,11 @@ bool EmulatedEndpointImpl::Enabled() const { return is_enabled_; } -std::unique_ptr EmulatedEndpointImpl::stats() const { +EmulatedNetworkStats EmulatedEndpointImpl::stats() const { RTC_DCHECK_RUN_ON(task_queue_); return stats_builder_.Build(); } -EndpointsContainer::EndpointsContainer( - const std::vector& endpoints) - : endpoints_(endpoints) {} - EmulatedEndpointImpl* EndpointsContainer::LookupByLocalAddress( const rtc::IPAddress& local_ip) const { for (auto* endpoint : endpoints_) { @@ -672,6 +726,11 @@ EmulatedEndpointImpl* EndpointsContainer::LookupByLocalAddress( RTC_CHECK(false) << "No network found for address" << local_ip.ToString(); } +EndpointsContainer::EndpointsContainer( + const std::vector& endpoints, + EmulatedNetworkStatsGatheringMode stats_gathering_mode) + : endpoints_(endpoints), stats_gathering_mode_(stats_gathering_mode) {} + bool EndpointsContainer::HasEndpoint(EmulatedEndpointImpl* endpoint) const { for (auto* e : endpoints_) { if (e->GetId() == endpoint->GetId()) { @@ -697,10 +756,10 @@ std::vector EndpointsContainer::GetEndpoints() const { return std::vector(endpoints_.begin(), endpoints_.end()); } -std::unique_ptr EndpointsContainer::GetStats() const { - EmulatedNetworkStatsBuilder stats_builder; +EmulatedNetworkStats EndpointsContainer::GetStats() const { + EmulatedNetworkStatsBuilder stats_builder(stats_gathering_mode_); for (auto* endpoint : endpoints_) { - stats_builder.AddEmulatedNetworkStats(*endpoint->stats()); + stats_builder.AddEmulatedNetworkStats(endpoint->stats()); } return stats_builder.Build(); } diff --git a/test/network/network_emulation.h b/test/network/network_emulation.h index 61dc468661..dffabafa7c 100644 --- a/test/network/network_emulation.h +++ b/test/network/network_emulation.h @@ -23,14 +23,17 @@ #include "api/array_view.h" #include "api/numerics/samples_stats_counter.h" #include "api/sequence_checker.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" #include "api/test/network_emulation_manager.h" #include "api/test/simulated_network.h" +#include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/network.h" #include "rtc_base/network_constants.h" #include "rtc_base/socket_address.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" @@ -38,372 +41,127 @@ namespace webrtc { -// This class is immutable and so thread safe. -class EmulatedNetworkOutgoingStatsImpl final - : public EmulatedNetworkOutgoingStats { - public: - EmulatedNetworkOutgoingStatsImpl( - int64_t packets_sent, - DataSize bytes_sent, - SamplesStatsCounter sent_packets_size_counter, - DataSize first_sent_packet_size, - Timestamp first_packet_sent_time, - Timestamp last_packet_sent_time) - : packets_sent_(packets_sent), - bytes_sent_(bytes_sent), - sent_packets_size_counter_(std::move(sent_packets_size_counter)), - first_sent_packet_size_(first_sent_packet_size), - first_packet_sent_time_(first_packet_sent_time), - last_packet_sent_time_(last_packet_sent_time) {} - explicit EmulatedNetworkOutgoingStatsImpl( - const EmulatedNetworkOutgoingStats& stats) - : packets_sent_(stats.PacketsSent()), - bytes_sent_(stats.BytesSent()), - sent_packets_size_counter_(stats.SentPacketsSizeCounter()), - first_sent_packet_size_(stats.FirstSentPacketSize()), - first_packet_sent_time_(stats.FirstPacketSentTime()), - last_packet_sent_time_(stats.LastPacketSentTime()) {} - ~EmulatedNetworkOutgoingStatsImpl() override = default; - - int64_t PacketsSent() const override { return packets_sent_; } - - DataSize BytesSent() const override { return bytes_sent_; } - - const SamplesStatsCounter& SentPacketsSizeCounter() const override { - return sent_packets_size_counter_; - } - - DataSize FirstSentPacketSize() const override { - return first_sent_packet_size_; - } - - Timestamp FirstPacketSentTime() const override { - return first_packet_sent_time_; - } - - Timestamp LastPacketSentTime() const override { - return last_packet_sent_time_; - } - - DataRate AverageSendRate() const override; - - private: - const int64_t packets_sent_; - const DataSize bytes_sent_; - const SamplesStatsCounter sent_packets_size_counter_; - const DataSize first_sent_packet_size_; - const Timestamp first_packet_sent_time_; - const Timestamp last_packet_sent_time_; -}; - -// This class is immutable and so thread safe. -class EmulatedNetworkIncomingStatsImpl final - : public EmulatedNetworkIncomingStats { - public: - EmulatedNetworkIncomingStatsImpl( - int64_t packets_received, - DataSize bytes_received, - SamplesStatsCounter received_packets_size_counter, - int64_t packets_dropped, - DataSize bytes_dropped, - SamplesStatsCounter dropped_packets_size_counter, - DataSize first_received_packet_size, - Timestamp first_packet_received_time, - Timestamp last_packet_received_time) - : packets_received_(packets_received), - bytes_received_(bytes_received), - received_packets_size_counter_(received_packets_size_counter), - packets_dropped_(packets_dropped), - bytes_dropped_(bytes_dropped), - dropped_packets_size_counter_(dropped_packets_size_counter), - first_received_packet_size_(first_received_packet_size), - first_packet_received_time_(first_packet_received_time), - last_packet_received_time_(last_packet_received_time) {} - explicit EmulatedNetworkIncomingStatsImpl( - const EmulatedNetworkIncomingStats& stats) - : packets_received_(stats.PacketsReceived()), - bytes_received_(stats.BytesReceived()), - received_packets_size_counter_(stats.ReceivedPacketsSizeCounter()), - packets_dropped_(stats.PacketsDropped()), - bytes_dropped_(stats.BytesDropped()), - dropped_packets_size_counter_(stats.DroppedPacketsSizeCounter()), - first_received_packet_size_(stats.FirstReceivedPacketSize()), - first_packet_received_time_(stats.FirstPacketReceivedTime()), - last_packet_received_time_(stats.LastPacketReceivedTime()) {} - ~EmulatedNetworkIncomingStatsImpl() override = default; - - int64_t PacketsReceived() const override { return packets_received_; } - - DataSize BytesReceived() const override { return bytes_received_; } - - const SamplesStatsCounter& ReceivedPacketsSizeCounter() const override { - return received_packets_size_counter_; - } - - int64_t PacketsDropped() const override { return packets_dropped_; } - - DataSize BytesDropped() const override { return bytes_dropped_; } - - const SamplesStatsCounter& DroppedPacketsSizeCounter() const override { - return dropped_packets_size_counter_; - } - - DataSize FirstReceivedPacketSize() const override { - return first_received_packet_size_; - } - - Timestamp FirstPacketReceivedTime() const override { - return first_packet_received_time_; - } - - Timestamp LastPacketReceivedTime() const override { - return last_packet_received_time_; - } - - DataRate AverageReceiveRate() const override; - - private: - const int64_t packets_received_; - const DataSize bytes_received_; - const SamplesStatsCounter received_packets_size_counter_; - const int64_t packets_dropped_; - const DataSize bytes_dropped_; - const SamplesStatsCounter dropped_packets_size_counter_; - const DataSize first_received_packet_size_; - const Timestamp first_packet_received_time_; - const Timestamp last_packet_received_time_; -}; - -// This class is immutable and so is thread safe. -class EmulatedNetworkStatsImpl final : public EmulatedNetworkStats { - public: - EmulatedNetworkStatsImpl( - std::vector local_addresses, - SamplesStatsCounter sent_packets_queue_wait_time_us, - std::map> - outgoing_stats_per_destination, - std::map> - incoming_stats_per_source) - : local_addresses_(std::move(local_addresses)), - sent_packets_queue_wait_time_us_(sent_packets_queue_wait_time_us), - outgoing_stats_per_destination_( - std::move(outgoing_stats_per_destination)), - incoming_stats_per_source_(std::move(incoming_stats_per_source)), - overall_outgoing_stats_(GetOverallOutgoingStats()), - overall_incoming_stats_(GetOverallIncomingStats()) {} - ~EmulatedNetworkStatsImpl() override = default; - - std::vector LocalAddresses() const override { - return local_addresses_; - } - - int64_t PacketsSent() const override { - return overall_outgoing_stats_->PacketsSent(); - } - - DataSize BytesSent() const override { - return overall_outgoing_stats_->BytesSent(); - } - - const SamplesStatsCounter& SentPacketsSizeCounter() const override { - return overall_outgoing_stats_->SentPacketsSizeCounter(); - } - - const SamplesStatsCounter& SentPacketsQueueWaitTimeUs() const override { - return sent_packets_queue_wait_time_us_; - } - - DataSize FirstSentPacketSize() const override { - return overall_outgoing_stats_->FirstSentPacketSize(); - } - - Timestamp FirstPacketSentTime() const override { - return overall_outgoing_stats_->FirstPacketSentTime(); - } - - Timestamp LastPacketSentTime() const override { - return overall_outgoing_stats_->LastPacketSentTime(); - } - - DataRate AverageSendRate() const override { - return overall_outgoing_stats_->AverageSendRate(); - } - - int64_t PacketsReceived() const override { - return overall_incoming_stats_->PacketsReceived(); - } - - DataSize BytesReceived() const override { - return overall_incoming_stats_->BytesReceived(); - } - - const SamplesStatsCounter& ReceivedPacketsSizeCounter() const override { - return overall_incoming_stats_->ReceivedPacketsSizeCounter(); - } - - int64_t PacketsDropped() const override { - return overall_incoming_stats_->PacketsDropped(); - } - - DataSize BytesDropped() const override { - return overall_incoming_stats_->BytesDropped(); - } - - const SamplesStatsCounter& DroppedPacketsSizeCounter() const override { - return overall_incoming_stats_->DroppedPacketsSizeCounter(); - } - - DataSize FirstReceivedPacketSize() const override { - return overall_incoming_stats_->FirstReceivedPacketSize(); - } - - Timestamp FirstPacketReceivedTime() const override { - return overall_incoming_stats_->FirstPacketReceivedTime(); - } - - Timestamp LastPacketReceivedTime() const override { - return overall_incoming_stats_->LastPacketReceivedTime(); - } - - DataRate AverageReceiveRate() const override { - return overall_incoming_stats_->AverageReceiveRate(); - } - - std::map> - OutgoingStatsPerDestination() const override; - - std::map> - IncomingStatsPerSource() const override; - - private: - std::unique_ptr GetOverallOutgoingStats() const; - std::unique_ptr GetOverallIncomingStats() const; - - const std::vector local_addresses_; - const SamplesStatsCounter sent_packets_queue_wait_time_us_; - const std::map> - outgoing_stats_per_destination_; - const std::map> - incoming_stats_per_source_; - const std::unique_ptr overall_outgoing_stats_; - const std::unique_ptr overall_incoming_stats_; -}; - +// All methods of EmulatedNetworkOutgoingStatsBuilder have to be used on a +// single thread. It may be created on another thread. class EmulatedNetworkOutgoingStatsBuilder { public: - EmulatedNetworkOutgoingStatsBuilder(); + explicit EmulatedNetworkOutgoingStatsBuilder( + EmulatedNetworkStatsGatheringMode stats_gathering_mode); - void OnPacketSent(Timestamp sent_time, - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode); + void OnPacketSent(Timestamp sent_time, DataSize packet_size); void AddOutgoingStats(const EmulatedNetworkOutgoingStats& stats); - std::unique_ptr Build() const; + EmulatedNetworkOutgoingStats Build() const; private: - SequenceChecker sequence_checker_; + const EmulatedNetworkStatsGatheringMode stats_gathering_mode_; - int64_t packets_sent_ RTC_GUARDED_BY(sequence_checker_) = 0; - DataSize bytes_sent_ RTC_GUARDED_BY(sequence_checker_) = DataSize::Zero(); - SamplesStatsCounter sent_packets_size_counter_ - RTC_GUARDED_BY(sequence_checker_); - DataSize first_sent_packet_size_ RTC_GUARDED_BY(sequence_checker_) = - DataSize::Zero(); - Timestamp first_packet_sent_time_ RTC_GUARDED_BY(sequence_checker_) = - Timestamp::PlusInfinity(); - Timestamp last_packet_sent_time_ RTC_GUARDED_BY(sequence_checker_) = - Timestamp::MinusInfinity(); + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; + EmulatedNetworkOutgoingStats stats_ RTC_GUARDED_BY(sequence_checker_); }; +// All methods of EmulatedNetworkIncomingStatsBuilder have to be used on a +// single thread. It may be created on another thread. class EmulatedNetworkIncomingStatsBuilder { public: - EmulatedNetworkIncomingStatsBuilder(); + explicit EmulatedNetworkIncomingStatsBuilder( + EmulatedNetworkStatsGatheringMode stats_gathering_mode); - void OnPacketDropped(DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode); + void OnPacketDropped(DataSize packet_size); - void OnPacketReceived(Timestamp received_time, - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode); + void OnPacketReceived(Timestamp received_time, DataSize packet_size); // Adds stats collected from another endpoints to the builder. void AddIncomingStats(const EmulatedNetworkIncomingStats& stats); - std::unique_ptr Build() const; + EmulatedNetworkIncomingStats Build() const; private: - SequenceChecker sequence_checker_; + const EmulatedNetworkStatsGatheringMode stats_gathering_mode_; - int64_t packets_received_ RTC_GUARDED_BY(sequence_checker_) = 0; - DataSize bytes_received_ RTC_GUARDED_BY(sequence_checker_) = DataSize::Zero(); - SamplesStatsCounter received_packets_size_counter_ - RTC_GUARDED_BY(sequence_checker_); - int64_t packets_dropped_ RTC_GUARDED_BY(sequence_checker_) = 0; - DataSize bytes_dropped_ RTC_GUARDED_BY(sequence_checker_) = DataSize::Zero(); - SamplesStatsCounter dropped_packets_size_counter_ - RTC_GUARDED_BY(sequence_checker_); - DataSize first_received_packet_size_ RTC_GUARDED_BY(sequence_checker_) = - DataSize::Zero(); - Timestamp first_packet_received_time_ RTC_GUARDED_BY(sequence_checker_) = - Timestamp::PlusInfinity(); - Timestamp last_packet_received_time_ RTC_GUARDED_BY(sequence_checker_) = - Timestamp::MinusInfinity(); + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; + EmulatedNetworkIncomingStats stats_ RTC_GUARDED_BY(sequence_checker_); }; // All methods of EmulatedNetworkStatsBuilder have to be used on a single // thread. It may be created on another thread. class EmulatedNetworkStatsBuilder { public: - EmulatedNetworkStatsBuilder(); - explicit EmulatedNetworkStatsBuilder(rtc::IPAddress local_ip); + explicit EmulatedNetworkStatsBuilder( + EmulatedNetworkStatsGatheringMode stats_gathering_mode); + explicit EmulatedNetworkStatsBuilder( + rtc::IPAddress local_ip, + EmulatedNetworkStatsGatheringMode stats_gathering_mode); void OnPacketSent(Timestamp queued_time, Timestamp sent_time, rtc::IPAddress destination_ip, - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode); + DataSize packet_size); - void OnPacketDropped(rtc::IPAddress source_ip, - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode); + void OnPacketDropped(rtc::IPAddress source_ip, DataSize packet_size); void OnPacketReceived(Timestamp received_time, rtc::IPAddress source_ip, - DataSize packet_size, - EmulatedEndpointConfig::StatsGatheringMode mode); + DataSize packet_size); void AddEmulatedNetworkStats(const EmulatedNetworkStats& stats); - std::unique_ptr Build() const; + EmulatedNetworkStats Build() const; private: - SequenceChecker sequence_checker_; + const EmulatedNetworkStatsGatheringMode stats_gathering_mode_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; std::vector local_addresses_ RTC_GUARDED_BY(sequence_checker_); SamplesStatsCounter sent_packets_queue_wait_time_us_; - std::map + std::map> outgoing_stats_per_destination_ RTC_GUARDED_BY(sequence_checker_); - std::map + std::map> incoming_stats_per_source_ RTC_GUARDED_BY(sequence_checker_); }; +// All methods of EmulatedNetworkNodeStatsBuilder have to be used on a +// single thread. It may be created on another thread. +class EmulatedNetworkNodeStatsBuilder { + public: + explicit EmulatedNetworkNodeStatsBuilder( + EmulatedNetworkStatsGatheringMode stats_gathering_mode); + + void AddPacketTransportTime(TimeDelta time, size_t packet_size); + + void AddEmulatedNetworkNodeStats(const EmulatedNetworkNodeStats& stats); + + EmulatedNetworkNodeStats Build() const; + + private: + const EmulatedNetworkStatsGatheringMode stats_gathering_mode_; + + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; + EmulatedNetworkNodeStats stats_ RTC_GUARDED_BY(sequence_checker_); +}; + class LinkEmulation : public EmulatedNetworkReceiverInterface { public: LinkEmulation(Clock* clock, rtc::TaskQueue* task_queue, std::unique_ptr network_behavior, - EmulatedNetworkReceiverInterface* receiver) + EmulatedNetworkReceiverInterface* receiver, + EmulatedNetworkStatsGatheringMode stats_gathering_mode) : clock_(clock), task_queue_(task_queue), network_behavior_(std::move(network_behavior)), - receiver_(receiver) {} + receiver_(receiver), + stats_builder_(stats_gathering_mode) {} void OnPacketReceived(EmulatedIpPacket packet) override; + EmulatedNetworkNodeStats stats() const; + private: struct StoredPacket { uint64_t id; + Timestamp sent_time; EmulatedIpPacket packet; bool removed; }; @@ -414,9 +172,12 @@ class LinkEmulation : public EmulatedNetworkReceiverInterface { const std::unique_ptr network_behavior_ RTC_GUARDED_BY(task_queue_); EmulatedNetworkReceiverInterface* const receiver_; + RepeatingTaskHandle process_task_ RTC_GUARDED_BY(task_queue_); std::deque packets_ RTC_GUARDED_BY(task_queue_); uint64_t next_packet_id_ RTC_GUARDED_BY(task_queue_) = 1; + + EmulatedNetworkNodeStatsBuilder stats_builder_ RTC_GUARDED_BY(task_queue_); }; // Represents a component responsible for routing packets based on their IP @@ -464,7 +225,8 @@ class EmulatedNetworkNode : public EmulatedNetworkReceiverInterface { EmulatedNetworkNode( Clock* clock, rtc::TaskQueue* task_queue, - std::unique_ptr network_behavior); + std::unique_ptr network_behavior, + EmulatedNetworkStatsGatheringMode stats_gathering_mode); ~EmulatedNetworkNode() override; EmulatedNetworkNode(const EmulatedNetworkNode&) = delete; @@ -474,6 +236,7 @@ class EmulatedNetworkNode : public EmulatedNetworkReceiverInterface { LinkEmulation* link() { return &link_; } NetworkRouterNode* router() { return &router_; } + EmulatedNetworkNodeStats stats() const; // Creates a route for the given receiver_ip over all the given nodes to the // given receiver. @@ -497,13 +260,14 @@ class EmulatedEndpointImpl : public EmulatedEndpoint { struct Options { Options(uint64_t id, const rtc::IPAddress& ip, - const EmulatedEndpointConfig& config); + const EmulatedEndpointConfig& config, + EmulatedNetworkStatsGatheringMode stats_gathering_mode); // TODO(titovartem) check if we can remove id. uint64_t id; // Endpoint local IP address. rtc::IPAddress ip; - EmulatedEndpointConfig::StatsGatheringMode stats_gathering_mode; + EmulatedNetworkStatsGatheringMode stats_gathering_mode; rtc::AdapterType type; // Allow endpoint to send packets specifying source IP address different to // the current endpoint IP address. If false endpoint will crash if attempt @@ -555,7 +319,7 @@ class EmulatedEndpointImpl : public EmulatedEndpoint { const rtc::Network& network() const { return *network_.get(); } - std::unique_ptr stats() const; + EmulatedNetworkStats stats() const; private: struct ReceiverBinding { @@ -572,7 +336,7 @@ class EmulatedEndpointImpl : public EmulatedEndpoint { uint16_t NextPort() RTC_EXCLUSIVE_LOCKS_REQUIRED(receiver_lock_); Mutex receiver_lock_; - SequenceChecker enabled_state_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker enabled_state_checker_; const Options options_; bool is_enabled_ RTC_GUARDED_BY(enabled_state_checker_); @@ -612,8 +376,8 @@ class EmulatedRoute { // This object is immutable and so thread safe. class EndpointsContainer { public: - explicit EndpointsContainer( - const std::vector& endpoints); + EndpointsContainer(const std::vector& endpoints, + EmulatedNetworkStatsGatheringMode stats_gathering_mode); EmulatedEndpointImpl* LookupByLocalAddress( const rtc::IPAddress& local_ip) const; @@ -622,10 +386,11 @@ class EndpointsContainer { // returned rtc::Network objects. std::vector> GetEnabledNetworks() const; std::vector GetEndpoints() const; - std::unique_ptr GetStats() const; + EmulatedNetworkStats GetStats() const; private: const std::vector endpoints_; + const EmulatedNetworkStatsGatheringMode stats_gathering_mode_; }; template diff --git a/test/network/network_emulation_manager.cc b/test/network/network_emulation_manager.cc index a02b5f415c..97c0bc1ba8 100644 --- a/test/network/network_emulation_manager.cc +++ b/test/network/network_emulation_manager.cc @@ -16,7 +16,6 @@ #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "call/simulated_network.h" -#include "rtc_base/fake_network.h" #include "test/network/emulated_turn_server.h" #include "test/network/traffic_route.h" #include "test/time_controller/real_time_controller.h" @@ -45,8 +44,11 @@ std::unique_ptr CreateTimeController(TimeMode mode) { } } // namespace -NetworkEmulationManagerImpl::NetworkEmulationManagerImpl(TimeMode mode) +NetworkEmulationManagerImpl::NetworkEmulationManagerImpl( + TimeMode mode, + EmulatedNetworkStatsGatheringMode stats_gathering_mode) : time_mode_(mode), + stats_gathering_mode_(stats_gathering_mode), time_controller_(CreateTimeController(mode)), clock_(time_controller_->GetClock()), next_node_id_(1), @@ -74,7 +76,7 @@ EmulatedNetworkNode* NetworkEmulationManagerImpl::CreateEmulatedNode( EmulatedNetworkNode* NetworkEmulationManagerImpl::CreateEmulatedNode( std::unique_ptr network_behavior) { auto node = std::make_unique( - clock_, &task_queue_, std::move(network_behavior)); + clock_, &task_queue_, std::move(network_behavior), stats_gathering_mode_); EmulatedNetworkNode* out = node.get(); task_queue_.PostTask([this, node = std::move(node)]() mutable { network_nodes_.push_back(std::move(node)); @@ -107,7 +109,8 @@ EmulatedEndpointImpl* NetworkEmulationManagerImpl::CreateEndpoint( bool res = used_ip_addresses_.insert(*ip).second; RTC_CHECK(res) << "IP=" << ip->ToString() << " already in use"; auto node = std::make_unique( - EmulatedEndpointImpl::Options(next_node_id_++, *ip, config), + EmulatedEndpointImpl::Options(next_node_id_++, *ip, config, + stats_gathering_mode_), config.start_as_enabled, &task_queue_, clock_); EmulatedEndpointImpl* out = node.get(); endpoints_.push_back(std::move(node)); @@ -279,8 +282,8 @@ NetworkEmulationManagerImpl::CreateEmulatedNetworkManagerInterface( for (EmulatedEndpoint* endpoint : endpoints) { endpoint_impls.push_back(static_cast(endpoint)); } - auto endpoints_container = - std::make_unique(endpoint_impls); + auto endpoints_container = std::make_unique( + endpoint_impls, stats_gathering_mode_); auto network_manager = std::make_unique( time_controller_.get(), &task_queue_, endpoints_container.get()); for (auto* endpoint : endpoints) { @@ -302,20 +305,34 @@ NetworkEmulationManagerImpl::CreateEmulatedNetworkManagerInterface( void NetworkEmulationManagerImpl::GetStats( rtc::ArrayView endpoints, - std::function)> stats_callback) { - task_queue_.PostTask([endpoints, stats_callback]() { - EmulatedNetworkStatsBuilder stats_builder; + std::function stats_callback) { + task_queue_.PostTask([endpoints, stats_callback, + stats_gathering_mode = stats_gathering_mode_]() { + EmulatedNetworkStatsBuilder stats_builder(stats_gathering_mode); for (auto* endpoint : endpoints) { // It's safe to cast here because EmulatedEndpointImpl can be the only // implementation of EmulatedEndpoint, because only it has access to // EmulatedEndpoint constructor. auto endpoint_impl = static_cast(endpoint); - stats_builder.AddEmulatedNetworkStats(*endpoint_impl->stats()); + stats_builder.AddEmulatedNetworkStats(endpoint_impl->stats()); } stats_callback(stats_builder.Build()); }); } +void NetworkEmulationManagerImpl::GetStats( + rtc::ArrayView nodes, + std::function stats_callback) { + task_queue_.PostTask( + [nodes, stats_callback, stats_gathering_mode = stats_gathering_mode_]() { + EmulatedNetworkNodeStatsBuilder stats_builder(stats_gathering_mode); + for (auto* node : nodes) { + stats_builder.AddEmulatedNetworkNodeStats(node->stats()); + } + stats_callback(stats_builder.Build()); + }); +} + absl::optional NetworkEmulationManagerImpl::GetNextIPv4Address() { uint32_t addresses_count = kMaxIPv4Address - kMinIPv4Address; diff --git a/test/network/network_emulation_manager.h b/test/network/network_emulation_manager.h index 449441a3c1..29debca693 100644 --- a/test/network/network_emulation_manager.h +++ b/test/network/network_emulation_manager.h @@ -23,16 +23,12 @@ #include "api/test/time_controller.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "rtc_base/logging.h" -#include "rtc_base/network.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/task_utils/repeating_task.h" -#include "rtc_base/thread.h" #include "system_wrappers/include/clock.h" #include "test/network/cross_traffic.h" #include "test/network/emulated_network_manager.h" #include "test/network/emulated_turn_server.h" -#include "test/network/fake_network_socket_server.h" #include "test/network/network_emulation.h" namespace webrtc { @@ -40,7 +36,9 @@ namespace test { class NetworkEmulationManagerImpl : public NetworkEmulationManager { public: - explicit NetworkEmulationManagerImpl(TimeMode mode); + NetworkEmulationManagerImpl( + TimeMode mode, + EmulatedNetworkStatsGatheringMode stats_gathering_mode); ~NetworkEmulationManagerImpl(); EmulatedNetworkNode* CreateEmulatedNode(BuiltInNetworkBehaviorConfig config, @@ -81,9 +79,13 @@ class NetworkEmulationManagerImpl : public NetworkEmulationManager { EmulatedNetworkManagerInterface* CreateEmulatedNetworkManagerInterface( const std::vector& endpoints) override; - void GetStats(rtc::ArrayView endpoints, - std::function)> - stats_callback) override; + void GetStats( + rtc::ArrayView endpoints, + std::function stats_callback) override; + + void GetStats( + rtc::ArrayView nodes, + std::function stats_callback) override; TimeController* time_controller() override { return time_controller_.get(); } @@ -101,6 +103,7 @@ class NetworkEmulationManagerImpl : public NetworkEmulationManager { absl::optional GetNextIPv4Address(); const TimeMode time_mode_; + const EmulatedNetworkStatsGatheringMode stats_gathering_mode_; const std::unique_ptr time_controller_; Clock* const clock_; int next_node_id_; diff --git a/test/network/network_emulation_pc_unittest.cc b/test/network/network_emulation_pc_unittest.cc index 0519dd816d..51a45a8234 100644 --- a/test/network/network_emulation_pc_unittest.cc +++ b/test/network/network_emulation_pc_unittest.cc @@ -118,7 +118,8 @@ TEST(NetworkEmulationManagerPCTest, Run) { signaling_thread->Start(); // Setup emulated network - NetworkEmulationManagerImpl emulation(TimeMode::kRealTime); + NetworkEmulationManagerImpl emulation( + TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); EmulatedNetworkNode* alice_node = emulation.CreateEmulatedNode( std::make_unique(BuiltInNetworkBehaviorConfig())); @@ -209,7 +210,8 @@ TEST(NetworkEmulationManagerPCTest, RunTURN) { signaling_thread->Start(); // Setup emulated network - NetworkEmulationManagerImpl emulation(TimeMode::kRealTime); + NetworkEmulationManagerImpl emulation( + TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); EmulatedNetworkNode* alice_node = emulation.CreateEmulatedNode( std::make_unique(BuiltInNetworkBehaviorConfig())); diff --git a/test/network/network_emulation_unittest.cc b/test/network/network_emulation_unittest.cc index 591cc2c473..2e67a5a00a 100644 --- a/test/network/network_emulation_unittest.cc +++ b/test/network/network_emulation_unittest.cc @@ -142,7 +142,8 @@ class NetworkEmulationManagerThreeNodesRoutingTest : public ::testing::Test { MockReceiver r_e1_e3_; MockReceiver r_e3_e1_; - NetworkEmulationManagerImpl emulation_{TimeMode::kRealTime}; + NetworkEmulationManagerImpl emulation_{ + TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault}; EmulatedEndpoint* e1_; EmulatedEndpoint* e2_; EmulatedEndpoint* e3_; @@ -159,7 +160,8 @@ EmulatedNetworkNode* CreateEmulatedNodeWithDefaultBuiltInConfig( using ::testing::_; TEST(NetworkEmulationManagerTest, GeneratedIpv4AddressDoesNotCollide) { - NetworkEmulationManagerImpl network_manager(TimeMode::kRealTime); + NetworkEmulationManagerImpl network_manager( + TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); std::set ips; EmulatedEndpointConfig config; config.generated_ip_family = EmulatedEndpointConfig::IpAddressFamily::kIpv4; @@ -172,7 +174,8 @@ TEST(NetworkEmulationManagerTest, GeneratedIpv4AddressDoesNotCollide) { } TEST(NetworkEmulationManagerTest, GeneratedIpv6AddressDoesNotCollide) { - NetworkEmulationManagerImpl network_manager(TimeMode::kRealTime); + NetworkEmulationManagerImpl network_manager( + TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); std::set ips; EmulatedEndpointConfig config; config.generated_ip_family = EmulatedEndpointConfig::IpAddressFamily::kIpv6; @@ -185,7 +188,8 @@ TEST(NetworkEmulationManagerTest, GeneratedIpv6AddressDoesNotCollide) { } TEST(NetworkEmulationManagerTest, Run) { - NetworkEmulationManagerImpl network_manager(TimeMode::kRealTime); + NetworkEmulationManagerImpl network_manager( + TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); EmulatedNetworkNode* alice_node = network_manager.CreateEmulatedNode( std::make_unique(BuiltInNetworkBehaviorConfig())); @@ -251,82 +255,84 @@ TEST(NetworkEmulationManagerTest, Run) { const int64_t single_packet_size = data.size() + kOverheadIpv4Udp; std::atomic received_stats_count{0}; - nt1->GetStats([&](std::unique_ptr st) { - EXPECT_EQ(st->PacketsSent(), 2000l); - EXPECT_EQ(st->BytesSent().bytes(), single_packet_size * 2000l); - EXPECT_THAT(st->LocalAddresses(), + nt1->GetStats([&](EmulatedNetworkStats st) { + EXPECT_EQ(st.PacketsSent(), 2000l); + EXPECT_EQ(st.BytesSent().bytes(), single_packet_size * 2000l); + EXPECT_THAT(st.local_addresses, ElementsAreArray({alice_endpoint->GetPeerLocalAddress()})); - EXPECT_EQ(st->PacketsReceived(), 2000l); - EXPECT_EQ(st->BytesReceived().bytes(), single_packet_size * 2000l); - EXPECT_EQ(st->PacketsDropped(), 0l); - EXPECT_EQ(st->BytesDropped().bytes(), 0l); + EXPECT_EQ(st.PacketsReceived(), 2000l); + EXPECT_EQ(st.BytesReceived().bytes(), single_packet_size * 2000l); + EXPECT_EQ(st.PacketsDiscardedNoReceiver(), 0l); + EXPECT_EQ(st.BytesDiscardedNoReceiver().bytes(), 0l); rtc::IPAddress bob_ip = bob_endpoint->GetPeerLocalAddress(); - std::map> - source_st = st->IncomingStatsPerSource(); + std::map source_st = + st.incoming_stats_per_source; ASSERT_EQ(source_st.size(), 1lu); - EXPECT_EQ(source_st.at(bob_ip)->PacketsReceived(), 2000l); - EXPECT_EQ(source_st.at(bob_ip)->BytesReceived().bytes(), + EXPECT_EQ(source_st.at(bob_ip).packets_received, 2000l); + EXPECT_EQ(source_st.at(bob_ip).bytes_received.bytes(), single_packet_size * 2000l); - EXPECT_EQ(source_st.at(bob_ip)->PacketsDropped(), 0l); - EXPECT_EQ(source_st.at(bob_ip)->BytesDropped().bytes(), 0l); + EXPECT_EQ(source_st.at(bob_ip).packets_discarded_no_receiver, 0l); + EXPECT_EQ(source_st.at(bob_ip).bytes_discarded_no_receiver.bytes(), 0l); - std::map> - dest_st = st->OutgoingStatsPerDestination(); + std::map dest_st = + st.outgoing_stats_per_destination; ASSERT_EQ(dest_st.size(), 1lu); - EXPECT_EQ(dest_st.at(bob_ip)->PacketsSent(), 2000l); - EXPECT_EQ(dest_st.at(bob_ip)->BytesSent().bytes(), + EXPECT_EQ(dest_st.at(bob_ip).packets_sent, 2000l); + EXPECT_EQ(dest_st.at(bob_ip).bytes_sent.bytes(), single_packet_size * 2000l); // No debug stats are collected by default. - EXPECT_TRUE(st->SentPacketsSizeCounter().IsEmpty()); - EXPECT_TRUE(st->SentPacketsQueueWaitTimeUs().IsEmpty()); - EXPECT_TRUE(st->ReceivedPacketsSizeCounter().IsEmpty()); - EXPECT_TRUE(st->DroppedPacketsSizeCounter().IsEmpty()); - EXPECT_TRUE(dest_st.at(bob_ip)->SentPacketsSizeCounter().IsEmpty()); - EXPECT_TRUE(source_st.at(bob_ip)->ReceivedPacketsSizeCounter().IsEmpty()); - EXPECT_TRUE(source_st.at(bob_ip)->DroppedPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE(st.SentPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE(st.sent_packets_queue_wait_time_us.IsEmpty()); + EXPECT_TRUE(st.ReceivedPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE(st.PacketsDiscardedNoReceiverSizeCounter().IsEmpty()); + EXPECT_TRUE(dest_st.at(bob_ip).sent_packets_size.IsEmpty()); + EXPECT_TRUE(source_st.at(bob_ip).received_packets_size.IsEmpty()); + EXPECT_TRUE( + source_st.at(bob_ip).packets_discarded_no_receiver_size.IsEmpty()); received_stats_count++; }); - nt2->GetStats([&](std::unique_ptr st) { - EXPECT_EQ(st->PacketsSent(), 2000l); - EXPECT_EQ(st->BytesSent().bytes(), single_packet_size * 2000l); - EXPECT_THAT(st->LocalAddresses(), + nt2->GetStats([&](EmulatedNetworkStats st) { + EXPECT_EQ(st.PacketsSent(), 2000l); + EXPECT_EQ(st.BytesSent().bytes(), single_packet_size * 2000l); + EXPECT_THAT(st.local_addresses, ElementsAreArray({bob_endpoint->GetPeerLocalAddress()})); - EXPECT_EQ(st->PacketsReceived(), 2000l); - EXPECT_EQ(st->BytesReceived().bytes(), single_packet_size * 2000l); - EXPECT_EQ(st->PacketsDropped(), 0l); - EXPECT_EQ(st->BytesDropped().bytes(), 0l); - EXPECT_GT(st->FirstReceivedPacketSize(), DataSize::Zero()); - EXPECT_TRUE(st->FirstPacketReceivedTime().IsFinite()); - EXPECT_TRUE(st->LastPacketReceivedTime().IsFinite()); + EXPECT_EQ(st.PacketsReceived(), 2000l); + EXPECT_EQ(st.BytesReceived().bytes(), single_packet_size * 2000l); + EXPECT_EQ(st.PacketsDiscardedNoReceiver(), 0l); + EXPECT_EQ(st.BytesDiscardedNoReceiver().bytes(), 0l); + EXPECT_GT(st.FirstReceivedPacketSize(), DataSize::Zero()); + EXPECT_TRUE(st.FirstPacketReceivedTime().IsFinite()); + EXPECT_TRUE(st.LastPacketReceivedTime().IsFinite()); rtc::IPAddress alice_ip = alice_endpoint->GetPeerLocalAddress(); - std::map> - source_st = st->IncomingStatsPerSource(); + std::map source_st = + st.incoming_stats_per_source; ASSERT_EQ(source_st.size(), 1lu); - EXPECT_EQ(source_st.at(alice_ip)->PacketsReceived(), 2000l); - EXPECT_EQ(source_st.at(alice_ip)->BytesReceived().bytes(), + EXPECT_EQ(source_st.at(alice_ip).packets_received, 2000l); + EXPECT_EQ(source_st.at(alice_ip).bytes_received.bytes(), single_packet_size * 2000l); - EXPECT_EQ(source_st.at(alice_ip)->PacketsDropped(), 0l); - EXPECT_EQ(source_st.at(alice_ip)->BytesDropped().bytes(), 0l); + EXPECT_EQ(source_st.at(alice_ip).packets_discarded_no_receiver, 0l); + EXPECT_EQ(source_st.at(alice_ip).bytes_discarded_no_receiver.bytes(), 0l); - std::map> - dest_st = st->OutgoingStatsPerDestination(); + std::map dest_st = + st.outgoing_stats_per_destination; ASSERT_EQ(dest_st.size(), 1lu); - EXPECT_EQ(dest_st.at(alice_ip)->PacketsSent(), 2000l); - EXPECT_EQ(dest_st.at(alice_ip)->BytesSent().bytes(), + EXPECT_EQ(dest_st.at(alice_ip).packets_sent, 2000l); + EXPECT_EQ(dest_st.at(alice_ip).bytes_sent.bytes(), single_packet_size * 2000l); // No debug stats are collected by default. - EXPECT_TRUE(st->SentPacketsSizeCounter().IsEmpty()); - EXPECT_TRUE(st->SentPacketsQueueWaitTimeUs().IsEmpty()); - EXPECT_TRUE(st->ReceivedPacketsSizeCounter().IsEmpty()); - EXPECT_TRUE(st->DroppedPacketsSizeCounter().IsEmpty()); - EXPECT_TRUE(dest_st.at(alice_ip)->SentPacketsSizeCounter().IsEmpty()); - EXPECT_TRUE(source_st.at(alice_ip)->ReceivedPacketsSizeCounter().IsEmpty()); - EXPECT_TRUE(source_st.at(alice_ip)->DroppedPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE(st.SentPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE(st.sent_packets_queue_wait_time_us.IsEmpty()); + EXPECT_TRUE(st.ReceivedPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE(st.PacketsDiscardedNoReceiverSizeCounter().IsEmpty()); + EXPECT_TRUE(dest_st.at(alice_ip).sent_packets_size.IsEmpty()); + EXPECT_TRUE(source_st.at(alice_ip).received_packets_size.IsEmpty()); + EXPECT_TRUE( + source_st.at(alice_ip).packets_discarded_no_receiver_size.IsEmpty()); received_stats_count++; }); @@ -336,17 +342,15 @@ TEST(NetworkEmulationManagerTest, Run) { } TEST(NetworkEmulationManagerTest, DebugStatsCollectedInDebugMode) { - NetworkEmulationManagerImpl network_manager(TimeMode::kSimulated); + NetworkEmulationManagerImpl network_manager( + TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDebug); EmulatedNetworkNode* alice_node = network_manager.CreateEmulatedNode( std::make_unique(BuiltInNetworkBehaviorConfig())); EmulatedNetworkNode* bob_node = network_manager.CreateEmulatedNode( std::make_unique(BuiltInNetworkBehaviorConfig())); - EmulatedEndpointConfig debug_config; - debug_config.stats_gathering_mode = - EmulatedEndpointConfig::StatsGatheringMode::kDebug; EmulatedEndpoint* alice_endpoint = - network_manager.CreateEndpoint(debug_config); + network_manager.CreateEndpoint(EmulatedEndpointConfig()); EmulatedEndpoint* bob_endpoint = network_manager.CreateEndpoint(EmulatedEndpointConfig()); network_manager.CreateRoute(alice_endpoint, {alice_node}, bob_endpoint); @@ -405,31 +409,30 @@ TEST(NetworkEmulationManagerTest, DebugStatsCollectedInDebugMode) { const int64_t single_packet_size = data.size() + kOverheadIpv4Udp; std::atomic received_stats_count{0}; - nt1->GetStats([&](std::unique_ptr st) { + nt1->GetStats([&](EmulatedNetworkStats st) { rtc::IPAddress bob_ip = bob_endpoint->GetPeerLocalAddress(); - std::map> - source_st = st->IncomingStatsPerSource(); + std::map source_st = + st.incoming_stats_per_source; ASSERT_EQ(source_st.size(), 1lu); - std::map> - dest_st = st->OutgoingStatsPerDestination(); + std::map dest_st = + st.outgoing_stats_per_destination; ASSERT_EQ(dest_st.size(), 1lu); // No debug stats are collected by default. - EXPECT_EQ(st->SentPacketsSizeCounter().NumSamples(), 2000l); - EXPECT_EQ(st->ReceivedPacketsSizeCounter().GetAverage(), + EXPECT_EQ(st.SentPacketsSizeCounter().NumSamples(), 2000l); + EXPECT_EQ(st.ReceivedPacketsSizeCounter().GetAverage(), single_packet_size); + EXPECT_EQ(st.sent_packets_queue_wait_time_us.NumSamples(), 2000l); + EXPECT_LT(st.sent_packets_queue_wait_time_us.GetMax(), 1); + EXPECT_TRUE(st.PacketsDiscardedNoReceiverSizeCounter().IsEmpty()); + EXPECT_EQ(dest_st.at(bob_ip).sent_packets_size.NumSamples(), 2000l); + EXPECT_EQ(dest_st.at(bob_ip).sent_packets_size.GetAverage(), single_packet_size); - EXPECT_EQ(st->SentPacketsQueueWaitTimeUs().NumSamples(), 2000l); - EXPECT_LT(st->SentPacketsQueueWaitTimeUs().GetMax(), 1); - EXPECT_TRUE(st->DroppedPacketsSizeCounter().IsEmpty()); - EXPECT_EQ(dest_st.at(bob_ip)->SentPacketsSizeCounter().NumSamples(), 2000l); - EXPECT_EQ(dest_st.at(bob_ip)->SentPacketsSizeCounter().GetAverage(), + EXPECT_EQ(source_st.at(bob_ip).received_packets_size.NumSamples(), 2000l); + EXPECT_EQ(source_st.at(bob_ip).received_packets_size.GetAverage(), single_packet_size); - EXPECT_EQ(source_st.at(bob_ip)->ReceivedPacketsSizeCounter().NumSamples(), - 2000l); - EXPECT_EQ(source_st.at(bob_ip)->ReceivedPacketsSizeCounter().GetAverage(), - single_packet_size); - EXPECT_TRUE(source_st.at(bob_ip)->DroppedPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE( + source_st.at(bob_ip).packets_discarded_no_receiver_size.IsEmpty()); received_stats_count++; }); @@ -439,7 +442,8 @@ TEST(NetworkEmulationManagerTest, DebugStatsCollectedInDebugMode) { } TEST(NetworkEmulationManagerTest, ThroughputStats) { - NetworkEmulationManagerImpl network_manager(TimeMode::kRealTime); + NetworkEmulationManagerImpl network_manager( + TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); EmulatedNetworkNode* alice_node = network_manager.CreateEmulatedNode( std::make_unique(BuiltInNetworkBehaviorConfig())); @@ -489,7 +493,7 @@ TEST(NetworkEmulationManagerTest, ThroughputStats) { SendTask(t1, [&] { s1->Connect(a2); }); SendTask(t2, [&] { s2->Connect(a1); }); - // Send 11 packets, totalizing 1 second between the first and the last. + // Send 11 packets, totalizing 1 second between the first and the last-> const int kNumPacketsSent = 11; const TimeDelta kDelay = TimeDelta::Millis(100); for (int i = 0; i < kNumPacketsSent; i++) { @@ -499,14 +503,14 @@ TEST(NetworkEmulationManagerTest, ThroughputStats) { } std::atomic received_stats_count{0}; - nt1->GetStats([&](std::unique_ptr st) { - EXPECT_EQ(st->PacketsSent(), kNumPacketsSent); - EXPECT_EQ(st->BytesSent().bytes(), kSinglePacketSize * kNumPacketsSent); + nt1->GetStats([&](EmulatedNetworkStats st) { + EXPECT_EQ(st.PacketsSent(), kNumPacketsSent); + EXPECT_EQ(st.BytesSent().bytes(), kSinglePacketSize * kNumPacketsSent); const double tolerance = 0.95; // Accept 5% tolerance for timing. - EXPECT_GE(st->LastPacketSentTime() - st->FirstPacketSentTime(), + EXPECT_GE(st.LastPacketSentTime() - st.FirstPacketSentTime(), (kNumPacketsSent - 1) * kDelay * tolerance); - EXPECT_GT(st->AverageSendRate().bps(), 0); + EXPECT_GT(st.AverageSendRate().bps(), 0); received_stats_count++; }); @@ -569,7 +573,8 @@ TEST_F(NetworkEmulationManagerThreeNodesRoutingTest, } TEST(NetworkEmulationManagerTest, EndpointLoopback) { - NetworkEmulationManagerImpl network_manager(TimeMode::kSimulated); + NetworkEmulationManagerImpl network_manager( + TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDefault); auto endpoint = network_manager.CreateEndpoint(EmulatedEndpointConfig()); MockReceiver receiver; @@ -585,7 +590,8 @@ TEST(NetworkEmulationManagerTest, EndpointLoopback) { TEST(NetworkEmulationManagerTest, EndpointCanSendWithDifferentSourceIp) { constexpr uint32_t kEndpointIp = 0xC0A80011; // 192.168.0.17 constexpr uint32_t kSourceIp = 0xC0A80012; // 192.168.0.18 - NetworkEmulationManagerImpl network_manager(TimeMode::kSimulated); + NetworkEmulationManagerImpl network_manager( + TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDefault); EmulatedEndpointConfig endpoint_config; endpoint_config.ip = rtc::IPAddress(kEndpointIp); endpoint_config.allow_send_packet_with_different_source_ip = true; @@ -605,7 +611,8 @@ TEST(NetworkEmulationManagerTest, EndpointCanReceiveWithDifferentDestIpThroughDefaultRoute) { constexpr uint32_t kDestEndpointIp = 0xC0A80011; // 192.168.0.17 constexpr uint32_t kDestIp = 0xC0A80012; // 192.168.0.18 - NetworkEmulationManagerImpl network_manager(TimeMode::kSimulated); + NetworkEmulationManagerImpl network_manager( + TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDefault); auto sender_endpoint = network_manager.CreateEndpoint(EmulatedEndpointConfig()); EmulatedEndpointConfig endpoint_config; @@ -628,7 +635,8 @@ TEST(NetworkEmulationManagerTest, } TEST(NetworkEmulationManagerTURNTest, GetIceServerConfig) { - NetworkEmulationManagerImpl network_manager(TimeMode::kRealTime); + NetworkEmulationManagerImpl network_manager( + TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); auto turn = network_manager.CreateTURNServer(EmulatedTURNServerConfig()); EXPECT_GT(turn->GetIceServerConfig().username.size(), 0u); @@ -639,7 +647,8 @@ TEST(NetworkEmulationManagerTURNTest, GetIceServerConfig) { } TEST(NetworkEmulationManagerTURNTest, ClientTraffic) { - NetworkEmulationManagerImpl emulation(TimeMode::kSimulated); + NetworkEmulationManagerImpl emulation( + TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDefault); auto* ep = emulation.CreateEndpoint(EmulatedEndpointConfig()); auto* turn = emulation.CreateTURNServer(EmulatedTURNServerConfig()); auto* node = CreateEmulatedNodeWithDefaultBuiltInConfig(&emulation); diff --git a/test/pc/e2e/BUILD.gn b/test/pc/e2e/BUILD.gn index 83a1b59090..2b9a69afc5 100644 --- a/test/pc/e2e/BUILD.gn +++ b/test/pc/e2e/BUILD.gn @@ -8,40 +8,20 @@ import("../../../webrtc.gni") -rtc_library("video_dumping") { +rtc_library("metric_metadata_keys") { testonly = true - sources = [ - "analyzer/video/video_dumping.cc", - "analyzer/video/video_dumping.h", - ] - deps = [ - "../..:video_test_support", - "../../../api/test/video:video_frame_writer", - "../../../api/video:video_frame", - "../../../rtc_base:checks", - "../../../rtc_base:logging", - "../../../system_wrappers", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + sources = [ "metric_metadata_keys.h" ] } if (!build_with_chromium) { group("e2e") { testonly = true - deps = [ - ":encoded_image_data_injector_api", - ":example_video_quality_analyzer", - ":quality_analyzing_video_decoder", - ":quality_analyzing_video_encoder", - ":single_process_encoded_image_data_injector", - ":video_frame_tracking_id_injector", - ] + deps = [ ":metric_metadata_keys" ] if (rtc_include_tests) { deps += [ ":peerconnection_quality_test", ":test_peer", - ":video_quality_analyzer_injection_helper", ] } } @@ -51,246 +31,46 @@ if (!build_with_chromium) { testonly = true deps = [ - ":default_video_quality_analyzer_frames_comparator_test", - ":default_video_quality_analyzer_metric_names_test", - ":default_video_quality_analyzer_stream_state_test", - ":default_video_quality_analyzer_test", - ":multi_reader_queue_test", - ":names_collection_test", ":peer_connection_e2e_smoke_test", ":peer_connection_quality_test_metric_names_test", - ":simulcast_dummy_buffer_helper_test", - ":single_process_encoded_image_data_injector_unittest", + ":peer_connection_quality_test_test", + ":stats_based_network_quality_metrics_reporter_test", ":stats_poller_test", - ":video_dumping_test", - ":video_frame_tracking_id_injector_unittest", ] } } - rtc_library("peer_connection_quality_test_params") { - visibility = [ "*" ] - testonly = true - sources = [ "peer_connection_quality_test_params.h" ] - - deps = [ - "../../../api:callfactory_api", - "../../../api:fec_controller_api", - "../../../api:field_trials_view", - "../../../api:libjingle_peerconnection_api", - "../../../api:packet_socket_factory", - "../../../api:peer_connection_quality_test_fixture_api", - "../../../api/audio:audio_mixer_api", - "../../../api/rtc_event_log", - "../../../api/task_queue", - "../../../api/transport:network_control", - "../../../api/video_codecs:video_codecs_api", - "../../../modules/audio_processing:api", - "../../../p2p:rtc_p2p", - "../../../rtc_base", - "../../../rtc_base:threading", - ] - } - - rtc_library("encoded_image_data_injector_api") { - visibility = [ "*" ] - testonly = true - sources = [ "analyzer/video/encoded_image_data_injector.h" ] - - deps = [ "../../../api/video:encoded_image" ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } - - rtc_library("single_process_encoded_image_data_injector") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/single_process_encoded_image_data_injector.cc", - "analyzer/video/single_process_encoded_image_data_injector.h", - ] - - deps = [ - ":encoded_image_data_injector_api", - "../../../api/video:encoded_image", - "../../../rtc_base:checks", - "../../../rtc_base:criticalsection", - "../../../rtc_base/synchronization:mutex", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] - } - - rtc_library("video_frame_tracking_id_injector") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/video_frame_tracking_id_injector.cc", - "analyzer/video/video_frame_tracking_id_injector.h", - ] - - deps = [ - ":encoded_image_data_injector_api", - "../../../api/video:encoded_image", - "../../../rtc_base:checks", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] - } - - rtc_library("simulcast_dummy_buffer_helper") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/simulcast_dummy_buffer_helper.cc", - "analyzer/video/simulcast_dummy_buffer_helper.h", - ] - deps = [ "../../../api/video:video_frame" ] - } - - rtc_library("simulcast_dummy_buffer_helper_test") { - testonly = true - sources = [ "analyzer/video/simulcast_dummy_buffer_helper_test.cc" ] - deps = [ - ":simulcast_dummy_buffer_helper", - "../..:test_support", - "../../../api/video:video_frame", - "../../../rtc_base:random", - ] - } - - rtc_library("quality_analyzing_video_decoder") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/quality_analyzing_video_decoder.cc", - "analyzer/video/quality_analyzing_video_decoder.h", - ] - deps = [ - ":encoded_image_data_injector_api", - ":simulcast_dummy_buffer_helper", - "../../../api:video_quality_analyzer_api", - "../../../api/video:encoded_image", - "../../../api/video:video_frame", - "../../../api/video:video_rtp_headers", - "../../../api/video_codecs:video_codecs_api", - "../../../modules/video_coding:video_codec_interface", - "../../../rtc_base:criticalsection", - "../../../rtc_base:logging", - "../../../rtc_base/synchronization:mutex", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] - } - - rtc_library("quality_analyzing_video_encoder") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/quality_analyzing_video_encoder.cc", - "analyzer/video/quality_analyzing_video_encoder.h", - ] - deps = [ - ":encoded_image_data_injector_api", - "../../../api:peer_connection_quality_test_fixture_api", - "../../../api:video_quality_analyzer_api", - "../../../api/video:encoded_image", - "../../../api/video:video_frame", - "../../../api/video:video_rtp_headers", - "../../../api/video_codecs:video_codecs_api", - "../../../modules/video_coding:video_codec_interface", - "../../../modules/video_coding/svc:scalability_mode_util", - "../../../rtc_base:criticalsection", - "../../../rtc_base:logging", - "../../../rtc_base/synchronization:mutex", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] - } - if (rtc_include_tests) { - rtc_library("video_dumping_test") { - testonly = true - sources = [ "analyzer/video/video_dumping_test.cc" ] - deps = [ - ":video_dumping", - "../..:fileutils", - "../..:test_support", - "../..:video_test_support", - "../../../api:scoped_refptr", - "../../../api/video:video_frame", - "../../../rtc_base:random", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } - - rtc_library("video_quality_analyzer_injection_helper") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/video_quality_analyzer_injection_helper.cc", - "analyzer/video/video_quality_analyzer_injection_helper.h", - ] - deps = [ - ":encoded_image_data_injector_api", - ":quality_analyzing_video_decoder", - ":quality_analyzing_video_encoder", - ":simulcast_dummy_buffer_helper", - ":video_dumping", - "../..:fixed_fps_video_frame_writer_adapter", - "../..:test_renderer", - "../../../api:array_view", - "../../../api:peer_connection_quality_test_fixture_api", - "../../../api:stats_observer_interface", - "../../../api:video_quality_analyzer_api", - "../../../api/video:video_frame", - "../../../api/video:video_rtp_headers", - "../../../api/video_codecs:video_codecs_api", - "../../../rtc_base:checks", - "../../../rtc_base:criticalsection", - "../../../rtc_base:logging", - "../../../rtc_base:stringutils", - "../../../rtc_base/synchronization:mutex", - "../../../system_wrappers", - "../../../test:video_test_common", - "../../../test:video_test_support", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - ] - } - rtc_library("echo_emulation") { - visibility = [ "*" ] testonly = true sources = [ "echo/echo_emulation.cc", "echo/echo_emulation.h", ] deps = [ - "../../../api:peer_connection_quality_test_fixture_api", + "../../../api/test/pclf:media_configuration", "../../../modules/audio_device:audio_device_impl", "../../../rtc_base:swap_queue", ] } rtc_library("test_peer") { - visibility = [ "*" ] testonly = true sources = [ "test_peer.cc", "test_peer.h", ] deps = [ - ":peer_configurer", - ":peer_connection_quality_test_params", ":stats_provider", "../../../api:frame_generator_api", "../../../api:function_view", "../../../api:libjingle_peerconnection_api", - "../../../api:peer_connection_quality_test_fixture_api", "../../../api:scoped_refptr", "../../../api:sequence_checker", "../../../api/task_queue:pending_task_safety_flag", + "../../../api/test/pclf:media_configuration", + "../../../api/test/pclf:media_quality_test_params", + "../../../api/test/pclf:peer_configurer", "../../../modules/audio_processing:api", "../../../pc:peerconnection_wrapper", "../../../rtc_base:logging", @@ -305,7 +85,6 @@ if (!build_with_chromium) { } rtc_library("test_peer_factory") { - visibility = [ "*" ] testonly = true sources = [ "test_peer_factory.cc", @@ -313,17 +92,15 @@ if (!build_with_chromium) { ] deps = [ ":echo_emulation", - ":peer_configurer", - ":peer_connection_quality_test_params", - ":quality_analyzing_video_encoder", ":test_peer", - ":video_quality_analyzer_injection_helper", "../..:copy_to_file_audio_capturer", "../../../api:create_time_controller", - "../../../api:peer_connection_quality_test_fixture_api", "../../../api:time_controller", "../../../api/rtc_event_log:rtc_event_log_factory", "../../../api/task_queue:default_task_queue_factory", + "../../../api/test/pclf:media_configuration", + "../../../api/test/pclf:media_quality_test_params", + "../../../api/test/pclf:peer_configurer", "../../../api/transport:field_trial_based_config", "../../../api/video_codecs:builtin_video_decoder_factory", "../../../api/video_codecs:builtin_video_encoder_factory", @@ -334,6 +111,8 @@ if (!build_with_chromium) { "../../../p2p:rtc_p2p", "../../../rtc_base:rtc_task_queue", "../../../rtc_base:threading", + "analyzer/video:quality_analyzing_video_encoder", + "analyzer/video:video_quality_analyzer_injection_helper", ] absl_deps = [ "//third_party/abseil-cpp/absl/memory", @@ -342,7 +121,6 @@ if (!build_with_chromium) { } rtc_library("media_helper") { - visibility = [ "*" ] testonly = true sources = [ "media/media_helper.cc", @@ -350,55 +128,44 @@ if (!build_with_chromium) { "media/test_video_capturer_video_track_source.h", ] deps = [ - ":peer_configurer", ":test_peer", - ":video_quality_analyzer_injection_helper", "../..:fileutils", "../..:platform_video_capturer", "../..:video_test_common", "../../../api:create_frame_generator", "../../../api:frame_generator_api", "../../../api:media_stream_interface", - "../../../api:peer_connection_quality_test_fixture_api", + "../../../api/test/pclf:media_configuration", + "../../../api/test/pclf:peer_configurer", "../../../api/video:video_frame", "../../../pc:session_description", "../../../pc:video_track_source", + "analyzer/video:video_quality_analyzer_injection_helper", ] absl_deps = [ "//third_party/abseil-cpp/absl/types:variant" ] } - rtc_library("peer_configurer") { + rtc_library("peer_params_preprocessor") { visibility = [ "*" ] testonly = true sources = [ - "peer_configurer.cc", - "peer_configurer.h", + "peer_params_preprocessor.cc", + "peer_params_preprocessor.h", ] deps = [ - ":peer_connection_quality_test_params", "../..:fileutils", - "../../../api:callfactory_api", - "../../../api:create_peer_connection_quality_test_frame_generator", - "../../../api:fec_controller_api", - "../../../api:packet_socket_factory", - "../../../api:peer_connection_quality_test_fixture_api", - "../../../api/audio:audio_mixer_api", - "../../../api/rtc_event_log", - "../../../api/task_queue", - "../../../api/transport:network_control", - "../../../api/video_codecs:video_codecs_api", - "../../../modules/audio_processing:api", + "../../../api:peer_network_dependencies", + "../../../api/test/pclf:media_configuration", + "../../../api/test/pclf:media_quality_test_params", + "../../../api/test/pclf:peer_configurer", "../../../modules/video_coding/svc:scalability_mode_util", "../../../modules/video_coding/svc:scalability_structures", - "../../../rtc_base", "../../../rtc_base:macromagic", - "../../../rtc_base:threading", ] absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("test_activities_executor") { - visibility = [ "*" ] testonly = true sources = [ "test_activities_executor.cc", @@ -423,7 +190,6 @@ if (!build_with_chromium) { } rtc_library("peerconnection_quality_test") { - visibility = [ "*" ] testonly = true sources = [ @@ -434,19 +200,14 @@ if (!build_with_chromium) { ":analyzer_helper", ":cross_media_metrics_reporter", ":default_audio_quality_analyzer", - ":default_video_quality_analyzer", ":media_helper", - ":peer_configurer", - ":peer_connection_quality_test_params", + ":metric_metadata_keys", + ":peer_params_preprocessor", ":sdp_changer", - ":single_process_encoded_image_data_injector", ":stats_poller", ":test_activities_executor", ":test_peer", ":test_peer_factory", - ":video_frame_tracking_id_injector", - ":video_quality_analyzer_injection_helper", - ":video_quality_metrics_reporter", "../..:field_trial", "../..:fileutils", "../..:perf_test", @@ -462,7 +223,9 @@ if (!build_with_chromium) { "../../../api/task_queue", "../../../api/test/metrics:metric", "../../../api/test/metrics:metrics_logger", - "../../../api/test/metrics:metrics_logger_and_exporter", + "../../../api/test/pclf:media_configuration", + "../../../api/test/pclf:media_quality_test_params", + "../../../api/test/pclf:peer_configurer", "../../../api/units:time_delta", "../../../api/units:timestamp", "../../../pc:pc_test_utils", @@ -477,35 +240,15 @@ if (!build_with_chromium) { "../../../rtc_base/synchronization:mutex", "../../../system_wrappers", "../../../system_wrappers:field_trial", + "analyzer/video:default_video_quality_analyzer", + "analyzer/video:single_process_encoded_image_data_injector", + "analyzer/video:video_frame_tracking_id_injector", + "analyzer/video:video_quality_analyzer_injection_helper", + "analyzer/video:video_quality_metrics_reporter", ] absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } - rtc_library("single_process_encoded_image_data_injector_unittest") { - testonly = true - sources = [ - "analyzer/video/single_process_encoded_image_data_injector_unittest.cc", - ] - deps = [ - ":single_process_encoded_image_data_injector", - "../../../api/video:encoded_image", - "../../../rtc_base:buffer", - "../../../test:test_support", - ] - } - - rtc_library("video_frame_tracking_id_injector_unittest") { - testonly = true - sources = - [ "analyzer/video/video_frame_tracking_id_injector_unittest.cc" ] - deps = [ - ":video_frame_tracking_id_injector", - "../../../api/video:encoded_image", - "../../../rtc_base:buffer", - "../../../test:test_support", - ] - } - peer_connection_e2e_smoke_test_resources = [ "../../../resources/pc_quality_smoke_test_alice_source.wav", "../../../resources/pc_quality_smoke_test_bob_source.wav", @@ -524,8 +267,6 @@ if (!build_with_chromium) { sources = [ "peer_connection_e2e_smoke_test.cc" ] deps = [ ":default_audio_quality_analyzer", - ":default_video_quality_analyzer", - ":default_video_quality_analyzer_shared", ":network_quality_metrics_reporter", ":stats_based_network_quality_metrics_reporter", "../../../api:callfactory_api", @@ -541,6 +282,9 @@ if (!build_with_chromium) { "../../../api/audio_codecs:builtin_audio_decoder_factory", "../../../api/audio_codecs:builtin_audio_encoder_factory", "../../../api/test/metrics:global_metrics_logger_and_exporter", + "../../../api/test/pclf:media_configuration", + "../../../api/test/pclf:media_quality_test_params", + "../../../api/test/pclf:peer_configurer", "../../../api/video_codecs:builtin_video_decoder_factory", "../../../api/video_codecs:builtin_video_encoder_factory", "../../../call:simulated_network", @@ -557,6 +301,8 @@ if (!build_with_chromium) { "../../../test:field_trial", "../../../test:fileutils", "../../../test:test_support", + "analyzer/video:default_video_quality_analyzer", + "analyzer/video:default_video_quality_analyzer_shared", ] data = peer_connection_e2e_smoke_test_resources if (is_mac || is_ios) { @@ -580,21 +326,67 @@ if (!build_with_chromium) { "../../../api:network_emulation_manager_api", "../../../api:peer_connection_quality_test_fixture_api", "../../../api/test/metrics:metrics_logger", - "../../../api/test/metrics:metrics_logger_and_exporter", "../../../api/test/metrics:stdout_metrics_exporter", + "../../../api/test/pclf:media_configuration", + "../../../api/test/pclf:media_quality_test_params", + "../../../api/test/pclf:peer_configurer", "../../../api/units:time_delta", ] } + rtc_library("stats_based_network_quality_metrics_reporter_test") { + testonly = true + sources = [ "stats_based_network_quality_metrics_reporter_test.cc" ] + deps = [ + ":metric_metadata_keys", + ":peerconnection_quality_test", + ":stats_based_network_quality_metrics_reporter", + "../..:test_support", + "../../../api:array_view", + "../../../api:create_network_emulation_manager", + "../../../api:create_peer_connection_quality_test_frame_generator", + "../../../api:network_emulation_manager_api", + "../../../api:peer_connection_quality_test_fixture_api", + "../../../api/test/metrics:metrics_logger", + "../../../api/test/metrics:stdout_metrics_exporter", + "../../../api/test/pclf:media_configuration", + "../../../api/test/pclf:media_quality_test_params", + "../../../api/test/pclf:peer_configurer", + "../../../api/units:time_delta", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] + } + + rtc_library("peer_connection_quality_test_test") { + testonly = true + sources = [ "peer_connection_quality_test_test.cc" ] + deps = [ + ":peerconnection_quality_test", + "../..:fileutils", + "../..:test_support", + "../..:video_test_support", + "../../../api:create_network_emulation_manager", + "../../../api:network_emulation_manager_api", + "../../../api:peer_connection_quality_test_fixture_api", + "../../../api/test/metrics:global_metrics_logger_and_exporter", + "../../../api/test/pclf:media_configuration", + "../../../api/test/pclf:media_quality_test_params", + "../../../api/test/pclf:peer_configurer", + "../../../api/units:time_delta", + "../../../rtc_base:timeutils", + ] + } + rtc_library("stats_provider") { - visibility = [ "*" ] testonly = true sources = [ "stats_provider.h" ] deps = [ "../../../api:rtc_stats_api" ] } rtc_library("stats_poller") { - visibility = [ "*" ] testonly = true sources = [ "stats_poller.cc", @@ -621,104 +413,9 @@ if (!build_with_chromium) { "../../../api:rtc_stats_api", ] } - - rtc_library("default_video_quality_analyzer_test") { - testonly = true - sources = [ "analyzer/video/default_video_quality_analyzer_test.cc" ] - deps = [ - ":default_video_quality_analyzer", - ":default_video_quality_analyzer_shared", - "../..:test_support", - "../../../api:create_frame_generator", - "../../../api:rtp_packet_info", - "../../../api/test/metrics:global_metrics_logger_and_exporter", - "../../../api/video:encoded_image", - "../../../api/video:video_frame", - "../../../common_video", - "../../../modules/rtp_rtcp:rtp_rtcp_format", - "../../../rtc_base:stringutils", - "../../../rtc_tools:video_quality_analysis", - "../../../system_wrappers", - ] - } - - rtc_library("default_video_quality_analyzer_metric_names_test") { - testonly = true - sources = [ - "analyzer/video/default_video_quality_analyzer_metric_names_test.cc", - ] - deps = [ - ":default_video_quality_analyzer", - ":default_video_quality_analyzer_shared", - "../..:test_support", - "../../../api:create_frame_generator", - "../../../api:rtp_packet_info", - "../../../api/test/metrics:metric", - "../../../api/test/metrics:metrics_logger", - "../../../api/test/metrics:metrics_logger_and_exporter", - "../../../api/test/metrics:stdout_metrics_exporter", - "../../../api/video:encoded_image", - "../../../api/video:video_frame", - "../../../common_video", - "../../../rtc_base:stringutils", - "../../../rtc_tools:video_quality_analysis", - "../../../system_wrappers", - ] - } - - rtc_library("default_video_quality_analyzer_frames_comparator_test") { - testonly = true - sources = [ "analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc" ] - deps = [ - ":default_video_quality_analyzer_internal", - ":default_video_quality_analyzer_shared", - "../..:test_support", - "../../../api:create_frame_generator", - "../../../api/units:timestamp", - "../../../rtc_base:stringutils", - "../../../system_wrappers", - ] - } - - rtc_library("names_collection_test") { - testonly = true - sources = [ "analyzer/video/names_collection_test.cc" ] - deps = [ - ":default_video_quality_analyzer_internal", - "../..:test_support", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", - ] - } - - rtc_library("multi_reader_queue_test") { - testonly = true - sources = [ "analyzer/video/multi_reader_queue_test.cc" ] - deps = [ - ":multi_reader_queue", - "../../../test:test_support", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } - - rtc_library("default_video_quality_analyzer_stream_state_test") { - testonly = true - sources = [ - "analyzer/video/default_video_quality_analyzer_stream_state_test.cc", - ] - deps = [ - ":default_video_quality_analyzer_internal", - "../../../api/units:timestamp", - "../../../test:test_support", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } } rtc_library("analyzer_helper") { - visibility = [ "*" ] sources = [ "analyzer_helper.cc", "analyzer_helper.h", @@ -728,11 +425,13 @@ if (!build_with_chromium) { "../../../api:track_id_stream_info_map", "../../../rtc_base:macromagic", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] } rtc_library("default_audio_quality_analyzer") { - visibility = [ "*" ] testonly = true sources = [ "analyzer/audio/default_audio_quality_analyzer.cc", @@ -740,6 +439,7 @@ if (!build_with_chromium) { ] deps = [ + ":metric_metadata_keys", "../..:perf_test", "../../../api:audio_quality_analyzer_api", "../../../api:rtc_stats_api", @@ -748,7 +448,6 @@ if (!build_with_chromium) { "../../../api/numerics", "../../../api/test/metrics:metric", "../../../api/test/metrics:metrics_logger", - "../../../api/test/metrics:metrics_logger_and_exporter", "../../../api/units:time_delta", "../../../api/units:timestamp", "../../../rtc_base:checks", @@ -760,180 +459,7 @@ if (!build_with_chromium) { absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } - rtc_library("example_video_quality_analyzer") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/example_video_quality_analyzer.cc", - "analyzer/video/example_video_quality_analyzer.h", - ] - - deps = [ - "../../../api:array_view", - "../../../api:video_quality_analyzer_api", - "../../../api/video:encoded_image", - "../../../api/video:video_frame", - "../../../api/video:video_rtp_headers", - "../../../rtc_base:criticalsection", - "../../../rtc_base:logging", - "../../../rtc_base/synchronization:mutex", - ] - } - - rtc_library("video_quality_metrics_reporter") { - visibility = [ "*" ] - - testonly = true - sources = [ - "analyzer/video/video_quality_metrics_reporter.cc", - "analyzer/video/video_quality_metrics_reporter.h", - ] - deps = [ - ":metric_metadata_keys", - "../..:perf_test", - "../../../api:peer_connection_quality_test_fixture_api", - "../../../api:rtc_stats_api", - "../../../api:track_id_stream_info_map", - "../../../api/numerics", - "../../../api/test/metrics:metric", - "../../../api/test/metrics:metrics_logger", - "../../../api/test/metrics:metrics_logger_and_exporter", - "../../../api/units:data_rate", - "../../../api/units:data_size", - "../../../api/units:time_delta", - "../../../api/units:timestamp", - "../../../rtc_base:checks", - "../../../rtc_base:criticalsection", - "../../../rtc_base:rtc_numerics", - "../../../rtc_base/synchronization:mutex", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] - } - - rtc_library("metric_metadata_keys") { - visibility = [ "*" ] - testonly = true - sources = [ "metric_metadata_keys.h" ] - } - - rtc_library("default_video_quality_analyzer") { - visibility = [ "*" ] - - testonly = true - sources = [ - "analyzer/video/default_video_quality_analyzer.cc", - "analyzer/video/default_video_quality_analyzer.h", - ] - - deps = [ - ":default_video_quality_analyzer_internal", - ":default_video_quality_analyzer_shared", - ":metric_metadata_keys", - "../..:perf_test", - "../../../api:array_view", - "../../../api:video_quality_analyzer_api", - "../../../api/numerics", - "../../../api/test/metrics:metric", - "../../../api/test/metrics:metrics_logger", - "../../../api/test/metrics:metrics_logger_and_exporter", - "../../../api/units:data_size", - "../../../api/units:time_delta", - "../../../api/units:timestamp", - "../../../api/video:encoded_image", - "../../../api/video:video_frame", - "../../../api/video:video_frame_type", - "../../../api/video:video_rtp_headers", - "../../../common_video", - "../../../rtc_base:checks", - "../../../rtc_base:criticalsection", - "../../../rtc_base:logging", - "../../../rtc_base:macromagic", - "../../../rtc_base:platform_thread", - "../../../rtc_base:rtc_event", - "../../../rtc_base:rtc_numerics", - "../../../rtc_base:stringutils", - "../../../rtc_base:timeutils", - "../../../rtc_base/synchronization:mutex", - "../../../rtc_tools:video_quality_analysis", - "../../../system_wrappers", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } - - # This target contains implementation details of DefaultVideoQualityAnalyzer, - # so headers exported by it shouldn't be used in other places. - rtc_library("default_video_quality_analyzer_internal") { - visibility = [ - ":default_video_quality_analyzer", - ":default_video_quality_analyzer_frames_comparator_test", - ":default_video_quality_analyzer_stream_state_test", - ":names_collection_test", - ] - - testonly = true - sources = [ - "analyzer/video/default_video_quality_analyzer_cpu_measurer.cc", - "analyzer/video/default_video_quality_analyzer_cpu_measurer.h", - "analyzer/video/default_video_quality_analyzer_frame_in_flight.cc", - "analyzer/video/default_video_quality_analyzer_frame_in_flight.h", - "analyzer/video/default_video_quality_analyzer_frames_comparator.cc", - "analyzer/video/default_video_quality_analyzer_frames_comparator.h", - "analyzer/video/default_video_quality_analyzer_internal_shared_objects.cc", - "analyzer/video/default_video_quality_analyzer_internal_shared_objects.h", - "analyzer/video/default_video_quality_analyzer_stream_state.cc", - "analyzer/video/default_video_quality_analyzer_stream_state.h", - "analyzer/video/names_collection.cc", - "analyzer/video/names_collection.h", - ] - - deps = [ - ":default_video_quality_analyzer_shared", - ":metric_metadata_keys", - ":multi_reader_queue", - "../../../api:array_view", - "../../../api:scoped_refptr", - "../../../api/numerics:numerics", - "../../../api/units:data_size", - "../../../api/units:timestamp", - "../../../api/video:video_frame", - "../../../api/video:video_frame_type", - "../../../common_video", - "../../../rtc_base:checks", - "../../../rtc_base:platform_thread", - "../../../rtc_base:rtc_base_tests_utils", - "../../../rtc_base:rtc_event", - "../../../rtc_base:stringutils", - "../../../rtc_base:timeutils", - "../../../rtc_base/synchronization:mutex", - "../../../rtc_tools:video_quality_analysis", - "../../../system_wrappers:system_wrappers", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", - ] - } - - rtc_library("default_video_quality_analyzer_shared") { - visibility = [ "*" ] - - testonly = true - sources = [ - "analyzer/video/default_video_quality_analyzer_shared_objects.cc", - "analyzer/video/default_video_quality_analyzer_shared_objects.h", - ] - - deps = [ - "../../../api/numerics:numerics", - "../../../api/units:timestamp", - "../../../rtc_base:checks", - "../../../rtc_base:stringutils", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } - rtc_library("network_quality_metrics_reporter") { - visibility = [ "*" ] testonly = true sources = [ "network_quality_metrics_reporter.cc", @@ -947,7 +473,6 @@ if (!build_with_chromium) { "../../../api:track_id_stream_info_map", "../../../api/test/metrics:metric", "../../../api/test/metrics:metrics_logger", - "../../../api/test/metrics:metrics_logger_and_exporter", "../../../api/units:data_size", "../../../rtc_base:checks", "../../../rtc_base:criticalsection", @@ -959,23 +484,23 @@ if (!build_with_chromium) { } rtc_library("stats_based_network_quality_metrics_reporter") { - visibility = [ "*" ] testonly = true sources = [ "stats_based_network_quality_metrics_reporter.cc", "stats_based_network_quality_metrics_reporter.h", ] deps = [ + ":metric_metadata_keys", "../..:perf_test", "../../../api:array_view", "../../../api:network_emulation_manager_api", "../../../api:peer_connection_quality_test_fixture_api", "../../../api:rtc_stats_api", "../../../api:scoped_refptr", + "../../../api:sequence_checker", "../../../api/numerics", "../../../api/test/metrics:metric", "../../../api/test/metrics:metrics_logger", - "../../../api/test/metrics:metrics_logger_and_exporter", "../../../api/test/network_emulation", "../../../api/units:data_rate", "../../../api/units:data_size", @@ -986,19 +511,20 @@ if (!build_with_chromium) { "../../../rtc_base:rtc_event", "../../../rtc_base:stringutils", "../../../rtc_base/synchronization:mutex", + "../../../rtc_base/system:no_unique_address", "../../../system_wrappers:field_trial", ] absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("cross_media_metrics_reporter") { - visibility = [ "*" ] testonly = true sources = [ "cross_media_metrics_reporter.cc", "cross_media_metrics_reporter.h", ] deps = [ + ":metric_metadata_keys", "../..:perf_test", "../../../api:network_emulation_manager_api", "../../../api:peer_connection_quality_test_fixture_api", @@ -1007,7 +533,6 @@ if (!build_with_chromium) { "../../../api/numerics", "../../../api/test/metrics:metric", "../../../api/test/metrics:metrics_logger", - "../../../api/test/metrics:metrics_logger_and_exporter", "../../../api/units:timestamp", "../../../rtc_base:checks", "../../../rtc_base:criticalsection", @@ -1023,7 +548,6 @@ if (!build_with_chromium) { } rtc_library("sdp_changer") { - visibility = [ "*" ] testonly = true sources = [ "sdp/sdp_changer.cc", @@ -1032,8 +556,8 @@ if (!build_with_chromium) { deps = [ "../../../api:array_view", "../../../api:libjingle_peerconnection_api", - "../../../api:peer_connection_quality_test_fixture_api", "../../../api:rtp_parameters", + "../../../api/test/pclf:media_configuration", "../../../media:rtc_media_base", "../../../p2p:rtc_p2p", "../../../pc:sdp_utils", @@ -1047,12 +571,4 @@ if (!build_with_chromium) { "//third_party/abseil-cpp/absl/types:optional", ] } - - rtc_library("multi_reader_queue") { - visibility = [ "*" ] - testonly = true - sources = [ "analyzer/video/multi_reader_queue.h" ] - deps = [ "../../../rtc_base:checks" ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } } diff --git a/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc b/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc index 4a0061ed13..98d0c533c2 100644 --- a/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc +++ b/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc @@ -13,8 +13,10 @@ #include "api/stats/rtc_stats.h" #include "api/stats/rtcstats_objects.h" #include "api/test/metrics/metric.h" +#include "api/test/track_id_stream_info_map.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "test/pc/e2e/metric_metadata_keys.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -62,11 +64,12 @@ void DefaultAudioQualityAnalyzer::OnStatsReports( sample.jitter_buffer_emitted_count = stat->jitter_buffer_emitted_count.ValueOrDefault(0ul); - const std::string stream_label = std::string( - analyzer_helper_->GetStreamLabelFromTrackId(*stat->track_identifier)); + TrackIdStreamInfoMap::StreamInfo stream_info = + analyzer_helper_->GetStreamInfoFromTrackId(*stat->track_identifier); MutexLock lock(&lock_); - StatsSample prev_sample = last_stats_sample_[stream_label]; + stream_info_.emplace(stream_info.stream_label, stream_info); + StatsSample prev_sample = last_stats_sample_[stream_info.stream_label]; RTC_CHECK_GE(sample.total_samples_received, prev_sample.total_samples_received); double total_samples_diff = static_cast( @@ -75,7 +78,8 @@ void DefaultAudioQualityAnalyzer::OnStatsReports( return; } - AudioStreamStats& audio_stream_stats = streams_stats_[stream_label]; + AudioStreamStats& audio_stream_stats = + streams_stats_[stream_info.stream_label]; audio_stream_stats.expand_rate.AddSample( (sample.concealed_samples - prev_sample.concealed_samples) / total_samples_diff); @@ -113,7 +117,7 @@ void DefaultAudioQualityAnalyzer::OnStatsReports( jitter_buffer_emitted_count_diff); } - last_stats_sample_[stream_label] = sample; + last_stats_sample_[stream_info.stream_label] = sample; } } @@ -125,27 +129,39 @@ std::string DefaultAudioQualityAnalyzer::GetTestCaseName( void DefaultAudioQualityAnalyzer::Stop() { MutexLock lock(&lock_); for (auto& item : streams_stats_) { + const TrackIdStreamInfoMap::StreamInfo& stream_info = + stream_info_[item.first]; + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. + std::map metric_metadata{ + {MetricMetadataKey::kAudioStreamMetadataKey, item.first}, + {MetricMetadataKey::kPeerMetadataKey, stream_info.receiver_peer}, + {MetricMetadataKey::kReceiverMetadataKey, stream_info.receiver_peer}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}; + metrics_logger_->LogMetric("expand_rate", GetTestCaseName(item.first), item.second.expand_rate, Unit::kUnitless, - ImprovementDirection::kSmallerIsBetter); + ImprovementDirection::kSmallerIsBetter, + metric_metadata); metrics_logger_->LogMetric("accelerate_rate", GetTestCaseName(item.first), item.second.accelerate_rate, Unit::kUnitless, - ImprovementDirection::kSmallerIsBetter); + ImprovementDirection::kSmallerIsBetter, + metric_metadata); metrics_logger_->LogMetric("preemptive_rate", GetTestCaseName(item.first), item.second.preemptive_rate, Unit::kUnitless, - ImprovementDirection::kSmallerIsBetter); - metrics_logger_->LogMetric("speech_expand_rate", - GetTestCaseName(item.first), - item.second.speech_expand_rate, Unit::kUnitless, - ImprovementDirection::kSmallerIsBetter); + ImprovementDirection::kSmallerIsBetter, + metric_metadata); + metrics_logger_->LogMetric( + "speech_expand_rate", GetTestCaseName(item.first), + item.second.speech_expand_rate, Unit::kUnitless, + ImprovementDirection::kSmallerIsBetter, metric_metadata); metrics_logger_->LogMetric( "average_jitter_buffer_delay_ms", GetTestCaseName(item.first), item.second.average_jitter_buffer_delay_ms, Unit::kMilliseconds, - ImprovementDirection::kNeitherIsBetter); + ImprovementDirection::kNeitherIsBetter, metric_metadata); metrics_logger_->LogMetric( "preferred_buffer_size_ms", GetTestCaseName(item.first), item.second.preferred_buffer_size_ms, Unit::kMilliseconds, - ImprovementDirection::kNeitherIsBetter); + ImprovementDirection::kNeitherIsBetter, metric_metadata); } } diff --git a/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h b/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h index f4084468d7..9e427afed8 100644 --- a/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h +++ b/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h @@ -70,6 +70,8 @@ class DefaultAudioQualityAnalyzer : public AudioQualityAnalyzerInterface { mutable Mutex lock_; std::map streams_stats_ RTC_GUARDED_BY(lock_); + std::map stream_info_ + RTC_GUARDED_BY(lock_); std::map last_stats_sample_ RTC_GUARDED_BY(lock_); }; diff --git a/test/pc/e2e/analyzer/video/BUILD.gn b/test/pc/e2e/analyzer/video/BUILD.gn new file mode 100644 index 0000000000..cbb4c078f3 --- /dev/null +++ b/test/pc/e2e/analyzer/video/BUILD.gn @@ -0,0 +1,573 @@ +# Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../../../webrtc.gni") + +if (!build_with_chromium) { + group("video_analyzer") { + testonly = true + + deps = [ + ":analyzing_video_sinks_helper", + ":default_video_quality_analyzer_internal", + ":encoded_image_data_injector_api", + ":example_video_quality_analyzer", + ":multi_reader_queue", + ":quality_analyzing_video_decoder", + ":quality_analyzing_video_encoder", + ":simulcast_dummy_buffer_helper", + ":single_process_encoded_image_data_injector", + ":video_dumping", + ":video_frame_tracking_id_injector", + ":video_quality_metrics_reporter", + ] + if (rtc_include_tests) { + deps += [ + ":analyzing_video_sink", + ":video_quality_analyzer_injection_helper", + ] + } + } + + if (rtc_include_tests) { + group("video_analyzer_unittests") { + testonly = true + + deps = [ + ":analyzing_video_sink_test", + ":analyzing_video_sinks_helper_test", + ":default_video_quality_analyzer_frames_comparator_test", + ":default_video_quality_analyzer_metric_names_test", + ":default_video_quality_analyzer_stream_state_test", + ":default_video_quality_analyzer_test", + ":multi_reader_queue_test", + ":names_collection_test", + ":simulcast_dummy_buffer_helper_test", + ":single_process_encoded_image_data_injector_unittest", + ":video_dumping_test", + ":video_frame_tracking_id_injector_unittest", + ] + } + } +} + +rtc_library("video_dumping") { + testonly = true + sources = [ + "video_dumping.cc", + "video_dumping.h", + ] + deps = [ + "../../../..:video_test_support", + "../../../../../api/test/video:video_frame_writer", + "../../../../../api/video:video_frame", + "../../../../../rtc_base:logging", + "../../../../../system_wrappers", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] +} + +rtc_library("encoded_image_data_injector_api") { + testonly = true + sources = [ "encoded_image_data_injector.h" ] + + deps = [ "../../../../../api/video:encoded_image" ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("single_process_encoded_image_data_injector") { + testonly = true + sources = [ + "single_process_encoded_image_data_injector.cc", + "single_process_encoded_image_data_injector.h", + ] + + deps = [ + ":encoded_image_data_injector_api", + "../../../../../api/video:encoded_image", + "../../../../../rtc_base:checks", + "../../../../../rtc_base/synchronization:mutex", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] +} + +rtc_library("video_frame_tracking_id_injector") { + testonly = true + sources = [ + "video_frame_tracking_id_injector.cc", + "video_frame_tracking_id_injector.h", + ] + + deps = [ + ":encoded_image_data_injector_api", + "../../../../../api/video:encoded_image", + "../../../../../rtc_base:checks", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] +} + +rtc_library("simulcast_dummy_buffer_helper") { + testonly = true + sources = [ + "simulcast_dummy_buffer_helper.cc", + "simulcast_dummy_buffer_helper.h", + ] + deps = [ "../../../../../api/video:video_frame" ] +} + +rtc_library("quality_analyzing_video_decoder") { + testonly = true + sources = [ + "quality_analyzing_video_decoder.cc", + "quality_analyzing_video_decoder.h", + ] + deps = [ + ":encoded_image_data_injector_api", + ":simulcast_dummy_buffer_helper", + "../../../../../api:video_quality_analyzer_api", + "../../../../../api/video:encoded_image", + "../../../../../api/video:video_frame", + "../../../../../api/video_codecs:video_codecs_api", + "../../../../../modules/video_coding:video_codec_interface", + "../../../../../rtc_base:logging", + "../../../../../rtc_base/synchronization:mutex", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("quality_analyzing_video_encoder") { + testonly = true + sources = [ + "quality_analyzing_video_encoder.cc", + "quality_analyzing_video_encoder.h", + ] + deps = [ + ":encoded_image_data_injector_api", + "../../../../../api:video_quality_analyzer_api", + "../../../../../api/test/pclf:media_configuration", + "../../../../../api/video:video_frame", + "../../../../../api/video_codecs:video_codecs_api", + "../../../../../modules/video_coding:video_codec_interface", + "../../../../../modules/video_coding/svc:scalability_mode_util", + "../../../../../rtc_base:logging", + "../../../../../rtc_base/synchronization:mutex", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] +} + +rtc_library("analyzing_video_sinks_helper") { + testonly = true + sources = [ + "analyzing_video_sinks_helper.cc", + "analyzing_video_sinks_helper.h", + ] + deps = [ + "../../../../../api/test/pclf:media_configuration", + "../../../../../api/test/video:video_frame_writer", + "../../../../../rtc_base:macromagic", + "../../../../../rtc_base/synchronization:mutex", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("example_video_quality_analyzer") { + testonly = true + sources = [ + "example_video_quality_analyzer.cc", + "example_video_quality_analyzer.h", + ] + + deps = [ + "../../../../../api:array_view", + "../../../../../api:video_quality_analyzer_api", + "../../../../../api/video:encoded_image", + "../../../../../api/video:video_frame", + "../../../../../rtc_base:logging", + "../../../../../rtc_base/synchronization:mutex", + ] +} + +# This target contains implementation details of DefaultVideoQualityAnalyzer, +# so headers exported by it shouldn't be used in other places. +rtc_library("default_video_quality_analyzer_internal") { + visibility = [ + ":default_video_quality_analyzer", + ":default_video_quality_analyzer_frames_comparator_test", + ":default_video_quality_analyzer_stream_state_test", + ":names_collection_test", + ":video_analyzer", + ] + + testonly = true + sources = [ + "default_video_quality_analyzer_cpu_measurer.cc", + "default_video_quality_analyzer_cpu_measurer.h", + "default_video_quality_analyzer_frame_in_flight.cc", + "default_video_quality_analyzer_frame_in_flight.h", + "default_video_quality_analyzer_frames_comparator.cc", + "default_video_quality_analyzer_frames_comparator.h", + "default_video_quality_analyzer_internal_shared_objects.cc", + "default_video_quality_analyzer_internal_shared_objects.h", + "default_video_quality_analyzer_stream_state.cc", + "default_video_quality_analyzer_stream_state.h", + "names_collection.cc", + "names_collection.h", + ] + + deps = [ + ":default_video_quality_analyzer_shared", + ":multi_reader_queue", + "../..:metric_metadata_keys", + "../../../../../api:array_view", + "../../../../../api:scoped_refptr", + "../../../../../api/numerics", + "../../../../../api/units:data_size", + "../../../../../api/units:timestamp", + "../../../../../api/video:video_frame", + "../../../../../api/video:video_frame_type", + "../../../../../common_video", + "../../../../../rtc_base:checks", + "../../../../../rtc_base:platform_thread", + "../../../../../rtc_base:rtc_base_tests_utils", + "../../../../../rtc_base:rtc_event", + "../../../../../rtc_base:stringutils", + "../../../../../rtc_base:timeutils", + "../../../../../rtc_base/synchronization:mutex", + "../../../../../rtc_tools:video_quality_analysis", + "../../../../../system_wrappers", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings:strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("multi_reader_queue") { + testonly = true + sources = [ "multi_reader_queue.h" ] + deps = [ "../../../../../rtc_base:checks" ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("video_quality_metrics_reporter") { + testonly = true + sources = [ + "video_quality_metrics_reporter.cc", + "video_quality_metrics_reporter.h", + ] + deps = [ + "../..:metric_metadata_keys", + "../../../../../api:peer_connection_quality_test_fixture_api", + "../../../../../api:rtc_stats_api", + "../../../../../api:track_id_stream_info_map", + "../../../../../api/numerics", + "../../../../../api/test/metrics:metric", + "../../../../../api/test/metrics:metrics_logger", + "../../../../../api/units:data_rate", + "../../../../../api/units:data_size", + "../../../../../api/units:time_delta", + "../../../../../api/units:timestamp", + "../../../../../rtc_base:checks", + "../../../../../rtc_base/synchronization:mutex", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] +} + +rtc_library("default_video_quality_analyzer") { + testonly = true + sources = [ + "default_video_quality_analyzer.cc", + "default_video_quality_analyzer.h", + ] + + deps = [ + ":default_video_quality_analyzer_internal", + ":default_video_quality_analyzer_shared", + "../..:metric_metadata_keys", + "../../../../../api:array_view", + "../../../../../api:video_quality_analyzer_api", + "../../../../../api/numerics", + "../../../../../api/test/metrics:metric", + "../../../../../api/test/metrics:metrics_logger", + "../../../../../api/units:data_size", + "../../../../../api/units:time_delta", + "../../../../../api/units:timestamp", + "../../../../../api/video:encoded_image", + "../../../../../api/video:video_frame", + "../../../../../rtc_base:checks", + "../../../../../rtc_base:logging", + "../../../../../rtc_base:macromagic", + "../../../../../rtc_base:stringutils", + "../../../../../rtc_base/synchronization:mutex", + "../../../../../system_wrappers", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("default_video_quality_analyzer_shared") { + testonly = true + sources = [ + "default_video_quality_analyzer_shared_objects.cc", + "default_video_quality_analyzer_shared_objects.h", + ] + + deps = [ + "../../../../../api/numerics", + "../../../../../api/units:timestamp", + "../../../../../rtc_base:checks", + "../../../../../rtc_base:stringutils", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("analyzing_video_sink") { + testonly = true + sources = [ + "analyzing_video_sink.cc", + "analyzing_video_sink.h", + ] + deps = [ + ":analyzing_video_sinks_helper", + ":simulcast_dummy_buffer_helper", + ":video_dumping", + "../../../..:fixed_fps_video_frame_writer_adapter", + "../../../..:test_renderer", + "../../../../../api:video_quality_analyzer_api", + "../../../../../api/numerics", + "../../../../../api/test/pclf:media_configuration", + "../../../../../api/test/video:video_frame_writer", + "../../../../../api/units:timestamp", + "../../../../../api/video:video_frame", + "../../../../../rtc_base:checks", + "../../../../../rtc_base:logging", + "../../../../../rtc_base:macromagic", + "../../../../../rtc_base/synchronization:mutex", + "../../../../../system_wrappers", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/memory:memory", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("video_quality_analyzer_injection_helper") { + testonly = true + sources = [ + "video_quality_analyzer_injection_helper.cc", + "video_quality_analyzer_injection_helper.h", + ] + deps = [ + ":analyzing_video_sink", + ":analyzing_video_sinks_helper", + ":encoded_image_data_injector_api", + ":quality_analyzing_video_decoder", + ":quality_analyzing_video_encoder", + ":simulcast_dummy_buffer_helper", + ":video_dumping", + "../../../..:fixed_fps_video_frame_writer_adapter", + "../../../..:test_renderer", + "../../../..:video_test_common", + "../../../..:video_test_support", + "../../../../../api:array_view", + "../../../../../api:stats_observer_interface", + "../../../../../api:video_quality_analyzer_api", + "../../../../../api/test/pclf:media_configuration", + "../../../../../api/video:video_frame", + "../../../../../api/video_codecs:video_codecs_api", + "../../../../../rtc_base:checks", + "../../../../../rtc_base:logging", + "../../../../../rtc_base:stringutils", + "../../../../../rtc_base/synchronization:mutex", + "../../../../../system_wrappers", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", + ] +} + +if (rtc_include_tests) { + rtc_library("simulcast_dummy_buffer_helper_test") { + testonly = true + sources = [ "simulcast_dummy_buffer_helper_test.cc" ] + deps = [ + ":simulcast_dummy_buffer_helper", + "../../../..:test_support", + "../../../../../api/video:video_frame", + "../../../../../rtc_base:random", + ] + } + + rtc_library("analyzing_video_sink_test") { + testonly = true + sources = [ "analyzing_video_sink_test.cc" ] + deps = [ + ":analyzing_video_sink", + ":example_video_quality_analyzer", + "../../../..:fileutils", + "../../../..:test_support", + "../../../..:video_test_support", + "../../../../../api:create_frame_generator", + "../../../../../api:frame_generator_api", + "../../../../../api:scoped_refptr", + "../../../../../api/test/pclf:media_configuration", + "../../../../../api/units:time_delta", + "../../../../../api/units:timestamp", + "../../../../../api/video:video_frame", + "../../../../../common_video", + "../../../../../rtc_base:timeutils", + "../../../../../system_wrappers", + "../../../../time_controller", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] + } + + rtc_library("analyzing_video_sinks_helper_test") { + testonly = true + sources = [ "analyzing_video_sinks_helper_test.cc" ] + deps = [ + ":analyzing_video_sinks_helper", + "../../../..:test_support", + "../../../../../api/test/pclf:media_configuration", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } + + rtc_library("default_video_quality_analyzer_frames_comparator_test") { + testonly = true + sources = [ "default_video_quality_analyzer_frames_comparator_test.cc" ] + deps = [ + ":default_video_quality_analyzer_internal", + ":default_video_quality_analyzer_shared", + "../../../..:test_support", + "../../../../../api:create_frame_generator", + "../../../../../api/units:timestamp", + "../../../../../rtc_base:stringutils", + "../../../../../system_wrappers", + ] + } + + rtc_library("names_collection_test") { + testonly = true + sources = [ "names_collection_test.cc" ] + deps = [ + ":default_video_quality_analyzer_internal", + "../../../..:test_support", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings:strings", + "//third_party/abseil-cpp/absl/types:optional", + ] + } + + rtc_library("multi_reader_queue_test") { + testonly = true + sources = [ "multi_reader_queue_test.cc" ] + deps = [ + ":multi_reader_queue", + "../../../..:test_support", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } + + rtc_library("default_video_quality_analyzer_stream_state_test") { + testonly = true + sources = [ "default_video_quality_analyzer_stream_state_test.cc" ] + deps = [ + ":default_video_quality_analyzer_internal", + "../../../..:test_support", + "../../../../../api/units:timestamp", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } + + rtc_library("default_video_quality_analyzer_test") { + testonly = true + sources = [ "default_video_quality_analyzer_test.cc" ] + deps = [ + ":default_video_quality_analyzer", + ":default_video_quality_analyzer_shared", + "../../../..:test_support", + "../../../../../api:create_frame_generator", + "../../../../../api:rtp_packet_info", + "../../../../../api/test/metrics:global_metrics_logger_and_exporter", + "../../../../../api/video:encoded_image", + "../../../../../api/video:video_frame", + "../../../../../common_video", + "../../../../../rtc_base:stringutils", + "../../../../../rtc_tools:video_quality_analysis", + "../../../../../system_wrappers", + ] + } + + rtc_library("default_video_quality_analyzer_metric_names_test") { + testonly = true + sources = [ "default_video_quality_analyzer_metric_names_test.cc" ] + deps = [ + ":default_video_quality_analyzer", + "../../../..:test_support", + "../../../../../api:create_frame_generator", + "../../../../../api:rtp_packet_info", + "../../../../../api/test/metrics:metric", + "../../../../../api/test/metrics:metrics_logger", + "../../../../../api/test/metrics:stdout_metrics_exporter", + "../../../../../api/video:encoded_image", + "../../../../../api/video:video_frame", + "../../../../../common_video", + "../../../../../rtc_tools:video_quality_analysis", + "../../../../../system_wrappers", + ] + } + + rtc_library("video_dumping_test") { + testonly = true + sources = [ "video_dumping_test.cc" ] + deps = [ + ":video_dumping", + "../../../..:fileutils", + "../../../..:test_support", + "../../../..:video_test_support", + "../../../../../api:scoped_refptr", + "../../../../../api/video:video_frame", + "../../../../../rtc_base:random", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } + + rtc_library("single_process_encoded_image_data_injector_unittest") { + testonly = true + sources = [ "single_process_encoded_image_data_injector_unittest.cc" ] + deps = [ + ":single_process_encoded_image_data_injector", + "../../../..:test_support", + "../../../../../api/video:encoded_image", + "../../../../../rtc_base:buffer", + ] + } + + rtc_library("video_frame_tracking_id_injector_unittest") { + testonly = true + sources = [ "video_frame_tracking_id_injector_unittest.cc" ] + deps = [ + ":video_frame_tracking_id_injector", + "../../../..:test_support", + "../../../../../api/video:encoded_image", + "../../../../../rtc_base:buffer", + ] + } +} diff --git a/test/pc/e2e/analyzer/video/analyzing_video_sink.cc b/test/pc/e2e/analyzer/video/analyzing_video_sink.cc new file mode 100644 index 0000000000..fb221e6797 --- /dev/null +++ b/test/pc/e2e/analyzer/video/analyzing_video_sink.cc @@ -0,0 +1,220 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "test/pc/e2e/analyzer/video/analyzing_video_sink.h" + +#include +#include +#include + +#include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/video/video_frame_writer.h" +#include "api/units/timestamp.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" +#include "test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h" +#include "test/pc/e2e/analyzer/video/video_dumping.h" +#include "test/testsupport/fixed_fps_video_frame_writer_adapter.h" +#include "test/video_renderer.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +AnalyzingVideoSink::AnalyzingVideoSink(absl::string_view peer_name, + Clock* clock, + VideoQualityAnalyzerInterface& analyzer, + AnalyzingVideoSinksHelper& sinks_helper, + const VideoSubscription& subscription, + bool report_infra_stats) + : peer_name_(peer_name), + report_infra_stats_(report_infra_stats), + clock_(clock), + analyzer_(&analyzer), + sinks_helper_(&sinks_helper), + subscription_(subscription) {} + +void AnalyzingVideoSink::UpdateSubscription( + const VideoSubscription& subscription) { + // For peers with changed resolutions we need to close current writers and + // open new ones. This is done by removing existing sinks, which will force + // creation of the new sinks when next frame will be received. + std::set writers_to_close; + { + MutexLock lock(&mutex_); + subscription_ = subscription; + for (auto it = stream_sinks_.cbegin(); it != stream_sinks_.cend();) { + absl::optional new_requested_resolution = + subscription_.GetResolutionForPeer(it->second.sender_peer_name); + if (!new_requested_resolution.has_value() || + (*new_requested_resolution != it->second.resolution)) { + RTC_LOG(LS_INFO) << peer_name_ << ": Subscribed resolution for stream " + << it->first << " from " << it->second.sender_peer_name + << " was updated from " + << it->second.resolution.ToString() << " to " + << new_requested_resolution->ToString() + << ". Repopulating all video sinks and recreating " + << "requested video writers"; + writers_to_close.insert(it->second.video_frame_writer); + it = stream_sinks_.erase(it); + } else { + ++it; + } + } + } + sinks_helper_->CloseAndRemoveVideoWriters(writers_to_close); +} + +void AnalyzingVideoSink::OnFrame(const VideoFrame& frame) { + if (IsDummyFrame(frame)) { + // This is dummy frame, so we don't need to process it further. + return; + } + + if (frame.id() == VideoFrame::kNotSetId) { + // If frame ID is unknown we can't get required render resolution, so pass + // to the analyzer in the actual resolution of the frame. + AnalyzeFrame(frame); + } else { + std::string stream_label = analyzer_->GetStreamLabel(frame.id()); + MutexLock lock(&mutex_); + Timestamp processing_started = clock_->CurrentTime(); + SinksDescriptor* sinks_descriptor = PopulateSinks(stream_label); + RTC_CHECK(sinks_descriptor != nullptr); + + VideoFrame scaled_frame = + ScaleVideoFrame(frame, sinks_descriptor->resolution); + AnalyzeFrame(scaled_frame); + for (auto& sink : sinks_descriptor->sinks) { + sink->OnFrame(scaled_frame); + } + Timestamp processing_finished = clock_->CurrentTime(); + + if (report_infra_stats_) { + stats_.analyzing_sink_processing_time_ms.AddSample( + (processing_finished - processing_started).ms()); + } + } +} + +AnalyzingVideoSink::Stats AnalyzingVideoSink::stats() const { + MutexLock lock(&mutex_); + return stats_; +} + +VideoFrame AnalyzingVideoSink::ScaleVideoFrame( + const VideoFrame& frame, + const VideoResolution& required_resolution) { + Timestamp processing_started = clock_->CurrentTime(); + if (required_resolution.width() == static_cast(frame.width()) && + required_resolution.height() == static_cast(frame.height())) { + if (report_infra_stats_) { + stats_.scaling_tims_ms.AddSample( + (clock_->CurrentTime() - processing_started).ms()); + } + return frame; + } + + // We allow some difference in the aspect ration because when decoder + // downscales video stream it may round up some dimensions to make them even, + // ex: 960x540 -> 480x270 -> 240x136 instead of 240x135. + RTC_CHECK_LE(std::abs(static_cast(required_resolution.width()) / + required_resolution.height() - + static_cast(frame.width()) / frame.height()), + 0.1) + << peer_name_ + << ": Received frame has too different aspect ratio compared to " + << "requested video resolution: required resolution=" + << required_resolution.ToString() + << "; actual resolution=" << frame.width() << "x" << frame.height(); + + rtc::scoped_refptr scaled_buffer(I420Buffer::Create( + required_resolution.width(), required_resolution.height())); + scaled_buffer->ScaleFrom(*frame.video_frame_buffer()->ToI420()); + + VideoFrame scaled_frame = frame; + scaled_frame.set_video_frame_buffer(scaled_buffer); + if (report_infra_stats_) { + stats_.scaling_tims_ms.AddSample( + (clock_->CurrentTime() - processing_started).ms()); + } + return scaled_frame; +} + +void AnalyzingVideoSink::AnalyzeFrame(const VideoFrame& frame) { + VideoFrame frame_copy = frame; + frame_copy.set_video_frame_buffer( + I420Buffer::Copy(*frame.video_frame_buffer()->ToI420())); + analyzer_->OnFrameRendered(peer_name_, frame_copy); +} + +AnalyzingVideoSink::SinksDescriptor* AnalyzingVideoSink::PopulateSinks( + absl::string_view stream_label) { + // Fast pass: sinks already exists. + auto sinks_it = stream_sinks_.find(std::string(stream_label)); + if (sinks_it != stream_sinks_.end()) { + return &sinks_it->second; + } + + // Slow pass: we need to create and save sinks + absl::optional> peer_and_config = + sinks_helper_->GetPeerAndConfig(stream_label); + RTC_CHECK(peer_and_config.has_value()) + << "No video config for stream " << stream_label; + const std::string& sender_peer_name = peer_and_config->first; + const VideoConfig& config = peer_and_config->second; + + absl::optional resolution = + subscription_.GetResolutionForPeer(sender_peer_name); + if (!resolution.has_value()) { + RTC_LOG(LS_ERROR) << peer_name_ << " received stream " << stream_label + << " from " << sender_peer_name + << " for which they were not subscribed"; + resolution = config.GetResolution(); + } + if (!resolution->IsRegular()) { + RTC_LOG(LS_ERROR) << peer_name_ << " received stream " << stream_label + << " from " << sender_peer_name + << " for which resolution wasn't resolved"; + resolution = config.GetResolution(); + } + + RTC_CHECK(resolution.has_value()); + + SinksDescriptor sinks_descriptor(sender_peer_name, *resolution); + if (config.output_dump_options.has_value()) { + std::unique_ptr writer = + config.output_dump_options->CreateOutputDumpVideoFrameWriter( + stream_label, peer_name_, *resolution); + if (config.output_dump_use_fixed_framerate) { + writer = std::make_unique( + resolution->fps(), clock_, std::move(writer)); + } + sinks_descriptor.sinks.push_back(std::make_unique( + writer.get(), config.output_dump_options->sampling_modulo())); + sinks_descriptor.video_frame_writer = + sinks_helper_->AddVideoWriter(std::move(writer)); + } + if (config.show_on_screen) { + sinks_descriptor.sinks.push_back( + absl::WrapUnique(test::VideoRenderer::Create( + (*config.stream_label + "-render").c_str(), resolution->width(), + resolution->height()))); + } + return &stream_sinks_.emplace(stream_label, std::move(sinks_descriptor)) + .first->second; +} + +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/test/pc/e2e/analyzer/video/analyzing_video_sink.h b/test/pc/e2e/analyzer/video/analyzing_video_sink.h new file mode 100644 index 0000000000..1834bbe469 --- /dev/null +++ b/test/pc/e2e/analyzer/video/analyzing_video_sink.h @@ -0,0 +1,106 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef TEST_PC_E2E_ANALYZER_VIDEO_ANALYZING_VIDEO_SINK_H_ +#define TEST_PC_E2E_ANALYZER_VIDEO_ANALYZING_VIDEO_SINK_H_ + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/numerics/samples_stats_counter.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/video/video_frame_writer.h" +#include "api/test/video_quality_analyzer_interface.h" +#include "api/video/video_frame.h" +#include "api/video/video_sink_interface.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" +#include "system_wrappers/include/clock.h" +#include "test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +// A sink to inject video quality analyzer as a sink into WebRTC. +class AnalyzingVideoSink : public rtc::VideoSinkInterface { + public: + struct Stats { + // Time required to scale video frame to the requested rendered resolution. + // Collected only for frames with ID set and iff `report_infra_stats` is + // true. + SamplesStatsCounter scaling_tims_ms; + // Time required to process single video frame. Collected only for frames + // with ID set and iff `report_infra_stats` is true. + SamplesStatsCounter analyzing_sink_processing_time_ms; + }; + + AnalyzingVideoSink(absl::string_view peer_name, + Clock* clock, + VideoQualityAnalyzerInterface& analyzer, + AnalyzingVideoSinksHelper& sinks_helper, + const VideoSubscription& subscription, + bool report_infra_stats); + + // Updates subscription used by this peer to render received video. + void UpdateSubscription(const VideoSubscription& subscription); + + void OnFrame(const VideoFrame& frame) override; + + Stats stats() const; + + private: + struct SinksDescriptor { + SinksDescriptor(absl::string_view sender_peer_name, + const VideoResolution& resolution) + : sender_peer_name(sender_peer_name), resolution(resolution) {} + + // Required to be able to resolve resolutions on new subscription and + // understand if we need to recreate `video_frame_writer` and `sinks`. + std::string sender_peer_name; + // Resolution which was used to create `video_frame_writer` and `sinks`. + VideoResolution resolution; + + // Is set if dumping of output video was requested; + test::VideoFrameWriter* video_frame_writer = nullptr; + std::vector>> sinks; + }; + + // Scales video frame to `required_resolution` if necessary. Crashes if video + // frame and `required_resolution` have different aspect ratio. + VideoFrame ScaleVideoFrame(const VideoFrame& frame, + const VideoResolution& required_resolution) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + // Creates full copy of the frame to free any frame owned internal buffers + // and passes created copy to analyzer. Uses `I420Buffer` to represent + // frame content. + void AnalyzeFrame(const VideoFrame& frame); + // Populates sink for specified stream and caches them in `stream_sinks_`. + SinksDescriptor* PopulateSinks(absl::string_view stream_label) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + + const std::string peer_name_; + const bool report_infra_stats_; + Clock* const clock_; + VideoQualityAnalyzerInterface* const analyzer_; + AnalyzingVideoSinksHelper* const sinks_helper_; + + mutable Mutex mutex_; + VideoSubscription subscription_ RTC_GUARDED_BY(mutex_); + std::map stream_sinks_ RTC_GUARDED_BY(mutex_); + Stats stats_ RTC_GUARDED_BY(mutex_); +}; + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // TEST_PC_E2E_ANALYZER_VIDEO_ANALYZING_VIDEO_SINK_H_ diff --git a/test/pc/e2e/analyzer/video/analyzing_video_sink_test.cc b/test/pc/e2e/analyzer/video/analyzing_video_sink_test.cc new file mode 100644 index 0000000000..6cd89551ea --- /dev/null +++ b/test/pc/e2e/analyzer/video/analyzing_video_sink_test.cc @@ -0,0 +1,598 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "test/pc/e2e/analyzer/video/analyzing_video_sink.h" + +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/scoped_refptr.h" +#include "api/test/create_frame_generator.h" +#include "api/test/frame_generator_interface.h" +#include "api/test/pclf/media_configuration.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" +#include "rtc_base/time_utils.h" +#include "system_wrappers/include/clock.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/pc/e2e/analyzer/video/example_video_quality_analyzer.h" +#include "test/testsupport/file_utils.h" +#include "test/testsupport/frame_reader.h" +#include "test/time_controller/simulated_time_controller.h" + +namespace webrtc { +namespace webrtc_pc_e2e { +namespace { + +using ::testing::ElementsAreArray; +using ::testing::Eq; +using ::testing::Ge; +using ::testing::Test; + +// Remove files and directories in a directory non-recursively. +void CleanDir(absl::string_view dir, size_t expected_output_files_count) { + absl::optional> dir_content = + test::ReadDirectory(dir); + if (expected_output_files_count == 0) { + ASSERT_TRUE(!dir_content.has_value() || dir_content->empty()) + << "Empty directory is expected"; + } else { + ASSERT_TRUE(dir_content.has_value()) << "Test directory is empty!"; + EXPECT_EQ(dir_content->size(), expected_output_files_count); + for (const auto& entry : *dir_content) { + if (test::DirExists(entry)) { + EXPECT_TRUE(test::RemoveDir(entry)) + << "Failed to remove sub directory: " << entry; + } else if (test::FileExists(entry)) { + EXPECT_TRUE(test::RemoveFile(entry)) + << "Failed to remove file: " << entry; + } else { + FAIL() << "Can't remove unknown file type: " << entry; + } + } + } + EXPECT_TRUE(test::RemoveDir(dir)) << "Failed to remove directory: " << dir; +} + +VideoFrame CreateFrame(test::FrameGeneratorInterface& frame_generator) { + test::FrameGeneratorInterface::VideoFrameData frame_data = + frame_generator.NextFrame(); + return VideoFrame::Builder() + .set_video_frame_buffer(frame_data.buffer) + .set_update_rect(frame_data.update_rect) + .build(); +} + +std::unique_ptr CreateFrameGenerator( + size_t width, + size_t height) { + return test::CreateSquareFrameGenerator(width, height, + /*type=*/absl::nullopt, + /*num_squares=*/absl::nullopt); +} + +void AssertFrameIdsAre(const std::string& filename, + std::vector expected_ids) { + FILE* file = fopen(filename.c_str(), "r"); + ASSERT_TRUE(file != nullptr) << "Failed to open frame ids file: " << filename; + std::vector actual_ids; + char buffer[8]; + while (fgets(buffer, sizeof buffer, file) != nullptr) { + std::string current_id(buffer); + EXPECT_GE(current_id.size(), 2lu) + << "Found invalid frame id: [" << current_id << "]"; + if (current_id.size() < 2) { + continue; + } + // Trim "\n" at the end. + actual_ids.push_back(current_id.substr(0, current_id.size() - 1)); + } + fclose(file); + EXPECT_THAT(actual_ids, ElementsAreArray(expected_ids)); +} + +class AnalyzingVideoSinkTest : public Test { + protected: + ~AnalyzingVideoSinkTest() override = default; + + void SetUp() override { + // Create an empty temporary directory for this test. + test_directory_ = test::JoinFilename( + test::OutputPath(), + "TestDir_AnalyzingVideoSinkTest_" + + std::string( + testing::UnitTest::GetInstance()->current_test_info()->name())); + test::CreateDir(test_directory_); + } + + void TearDown() override { + CleanDir(test_directory_, expected_output_files_count_); + } + + void ExpectOutputFilesCount(size_t count) { + expected_output_files_count_ = count; + } + + std::string test_directory_; + size_t expected_output_files_count_ = 0; +}; + +TEST_F(AnalyzingVideoSinkTest, VideoFramesAreDumpedCorrectly) { + VideoSubscription subscription; + subscription.SubscribeToPeer( + "alice", VideoResolution(/*width=*/640, /*height=*/360, /*fps=*/30)); + VideoConfig video_config("alice_video", /*width=*/1280, /*height=*/720, + /*fps=*/30); + video_config.output_dump_options = VideoDumpOptions(test_directory_); + + ExampleVideoQualityAnalyzer analyzer; + std::unique_ptr frame_generator = + CreateFrameGenerator(/*width=*/1280, /*height=*/720); + VideoFrame frame = CreateFrame(*frame_generator); + frame.set_id(analyzer.OnFrameCaptured("alice", "alice_video", frame)); + + { + // `helper` and `sink` has to be destroyed so all frames will be written + // to the disk. + AnalyzingVideoSinksHelper helper; + helper.AddConfig("alice", video_config); + AnalyzingVideoSink sink("bob", Clock::GetRealTimeClock(), analyzer, helper, + subscription, /*report_infra_stats=*/false); + sink.OnFrame(frame); + } + + EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast(1))); + + auto frame_reader = test::CreateY4mFrameReader( + test::JoinFilename(test_directory_, "alice_video_bob_640x360_30.y4m")); + EXPECT_THAT(frame_reader->num_frames(), Eq(1)); + rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); + rtc::scoped_refptr expected_frame = + frame.video_frame_buffer()->ToI420(); + double psnr = I420PSNR(*expected_frame, *actual_frame); + double ssim = I420SSIM(*expected_frame, *actual_frame); + // Actual should be downscaled version of expected. + EXPECT_GT(ssim, 0.98); + EXPECT_GT(psnr, 38); + + ExpectOutputFilesCount(1); +} + +TEST_F(AnalyzingVideoSinkTest, + FallbackOnConfigResolutionIfNoSubscriptionProvided) { + VideoSubscription subscription; + VideoConfig video_config("alice_video", /*width=*/320, /*height=*/240, + /*fps=*/30); + video_config.output_dump_options = VideoDumpOptions(test_directory_); + + ExampleVideoQualityAnalyzer analyzer; + std::unique_ptr frame_generator = + CreateFrameGenerator(/*width=*/320, /*height=*/240); + VideoFrame frame = CreateFrame(*frame_generator); + frame.set_id(analyzer.OnFrameCaptured("alice", "alice_video", frame)); + + { + // `helper` and `sink` has to be destroyed so all frames will be written + // to the disk. + AnalyzingVideoSinksHelper helper; + helper.AddConfig("alice", video_config); + AnalyzingVideoSink sink("bob", Clock::GetRealTimeClock(), analyzer, helper, + subscription, /*report_infra_stats=*/false); + sink.OnFrame(frame); + } + + EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast(1))); + + auto frame_reader = test::CreateY4mFrameReader( + test::JoinFilename(test_directory_, "alice_video_bob_320x240_30.y4m")); + EXPECT_THAT(frame_reader->num_frames(), Eq(1)); + rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); + rtc::scoped_refptr expected_frame = + frame.video_frame_buffer()->ToI420(); + double psnr = I420PSNR(*expected_frame, *actual_frame); + double ssim = I420SSIM(*expected_frame, *actual_frame); + // Frames should be equal. + EXPECT_DOUBLE_EQ(ssim, 1.00); + EXPECT_DOUBLE_EQ(psnr, 48); + + ExpectOutputFilesCount(1); +} + +TEST_F(AnalyzingVideoSinkTest, + FallbackOnConfigResolutionIfNoSubscriptionIsNotResolved) { + VideoSubscription subscription; + subscription.SubscribeToAllPeers( + VideoResolution(VideoResolution::Spec::kMaxFromSender)); + VideoConfig video_config("alice_video", /*width=*/320, /*height=*/240, + /*fps=*/30); + video_config.output_dump_options = VideoDumpOptions(test_directory_); + + ExampleVideoQualityAnalyzer analyzer; + std::unique_ptr frame_generator = + CreateFrameGenerator(/*width=*/320, /*height=*/240); + VideoFrame frame = CreateFrame(*frame_generator); + frame.set_id(analyzer.OnFrameCaptured("alice", "alice_video", frame)); + + { + // `helper` and `sink` has to be destroyed so all frames will be written + // to the disk. + AnalyzingVideoSinksHelper helper; + helper.AddConfig("alice", video_config); + AnalyzingVideoSink sink("bob", Clock::GetRealTimeClock(), analyzer, helper, + subscription, /*report_infra_stats=*/false); + sink.OnFrame(frame); + } + + EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast(1))); + + auto frame_reader = test::CreateY4mFrameReader( + test::JoinFilename(test_directory_, "alice_video_bob_320x240_30.y4m")); + EXPECT_THAT(frame_reader->num_frames(), Eq(1)); + rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); + rtc::scoped_refptr expected_frame = + frame.video_frame_buffer()->ToI420(); + double psnr = I420PSNR(*expected_frame, *actual_frame); + double ssim = I420SSIM(*expected_frame, *actual_frame); + // Frames should be equal. + EXPECT_DOUBLE_EQ(ssim, 1.00); + EXPECT_DOUBLE_EQ(psnr, 48); + + ExpectOutputFilesCount(1); +} + +TEST_F(AnalyzingVideoSinkTest, + VideoFramesAreDumpedCorrectlyWhenSubscriptionChanged) { + VideoSubscription subscription_before; + subscription_before.SubscribeToPeer( + "alice", VideoResolution(/*width=*/1280, /*height=*/720, /*fps=*/30)); + VideoSubscription subscription_after; + subscription_after.SubscribeToPeer( + "alice", VideoResolution(/*width=*/640, /*height=*/360, /*fps=*/30)); + VideoConfig video_config("alice_video", /*width=*/1280, /*height=*/720, + /*fps=*/30); + video_config.output_dump_options = VideoDumpOptions(test_directory_); + + ExampleVideoQualityAnalyzer analyzer; + std::unique_ptr frame_generator = + CreateFrameGenerator(/*width=*/1280, /*height=*/720); + VideoFrame frame_before = CreateFrame(*frame_generator); + frame_before.set_id( + analyzer.OnFrameCaptured("alice", "alice_video", frame_before)); + VideoFrame frame_after = CreateFrame(*frame_generator); + frame_after.set_id( + analyzer.OnFrameCaptured("alice", "alice_video", frame_after)); + + { + // `helper` and `sink` has to be destroyed so all frames will be written + // to the disk. + AnalyzingVideoSinksHelper helper; + helper.AddConfig("alice", video_config); + AnalyzingVideoSink sink("bob", Clock::GetRealTimeClock(), analyzer, helper, + subscription_before, /*report_infra_stats=*/false); + sink.OnFrame(frame_before); + + sink.UpdateSubscription(subscription_after); + sink.OnFrame(frame_after); + } + + EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast(2))); + + { + auto frame_reader = test::CreateY4mFrameReader( + test::JoinFilename(test_directory_, "alice_video_bob_1280x720_30.y4m")); + EXPECT_THAT(frame_reader->num_frames(), Eq(1)); + rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); + rtc::scoped_refptr expected_frame = + frame_before.video_frame_buffer()->ToI420(); + double psnr = I420PSNR(*expected_frame, *actual_frame); + double ssim = I420SSIM(*expected_frame, *actual_frame); + // Frames should be equal. + EXPECT_DOUBLE_EQ(ssim, 1.00); + EXPECT_DOUBLE_EQ(psnr, 48); + } + { + auto frame_reader = test::CreateY4mFrameReader( + test::JoinFilename(test_directory_, "alice_video_bob_640x360_30.y4m")); + EXPECT_THAT(frame_reader->num_frames(), Eq(1)); + rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); + rtc::scoped_refptr expected_frame = + frame_after.video_frame_buffer()->ToI420(); + double psnr = I420PSNR(*expected_frame, *actual_frame); + double ssim = I420SSIM(*expected_frame, *actual_frame); + // Actual should be downscaled version of expected. + EXPECT_GT(ssim, 0.98); + EXPECT_GT(psnr, 38); + } + + ExpectOutputFilesCount(2); +} + +TEST_F(AnalyzingVideoSinkTest, + VideoFramesAreDumpedCorrectlyWhenSubscriptionChangedOnTheSameOne) { + VideoSubscription subscription_before; + subscription_before.SubscribeToPeer( + "alice", VideoResolution(/*width=*/640, /*height=*/360, /*fps=*/30)); + VideoSubscription subscription_after; + subscription_after.SubscribeToPeer( + "alice", VideoResolution(/*width=*/640, /*height=*/360, /*fps=*/30)); + VideoConfig video_config("alice_video", /*width=*/640, /*height=*/360, + /*fps=*/30); + video_config.output_dump_options = VideoDumpOptions(test_directory_); + + ExampleVideoQualityAnalyzer analyzer; + std::unique_ptr frame_generator = + CreateFrameGenerator(/*width=*/640, /*height=*/360); + VideoFrame frame_before = CreateFrame(*frame_generator); + frame_before.set_id( + analyzer.OnFrameCaptured("alice", "alice_video", frame_before)); + VideoFrame frame_after = CreateFrame(*frame_generator); + frame_after.set_id( + analyzer.OnFrameCaptured("alice", "alice_video", frame_after)); + + { + // `helper` and `sink` has to be destroyed so all frames will be written + // to the disk. + AnalyzingVideoSinksHelper helper; + helper.AddConfig("alice", video_config); + AnalyzingVideoSink sink("bob", Clock::GetRealTimeClock(), analyzer, helper, + subscription_before, /*report_infra_stats=*/false); + sink.OnFrame(frame_before); + + sink.UpdateSubscription(subscription_after); + sink.OnFrame(frame_after); + } + + EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast(2))); + + { + auto frame_reader = test::CreateY4mFrameReader( + test::JoinFilename(test_directory_, "alice_video_bob_640x360_30.y4m")); + EXPECT_THAT(frame_reader->num_frames(), Eq(2)); + // Read the first frame. + rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); + rtc::scoped_refptr expected_frame = + frame_before.video_frame_buffer()->ToI420(); + // Frames should be equal. + EXPECT_DOUBLE_EQ(I420SSIM(*expected_frame, *actual_frame), 1.00); + EXPECT_DOUBLE_EQ(I420PSNR(*expected_frame, *actual_frame), 48); + // Read the second frame. + actual_frame = frame_reader->PullFrame(); + expected_frame = frame_after.video_frame_buffer()->ToI420(); + // Frames should be equal. + EXPECT_DOUBLE_EQ(I420SSIM(*expected_frame, *actual_frame), 1.00); + EXPECT_DOUBLE_EQ(I420PSNR(*expected_frame, *actual_frame), 48); + } + + ExpectOutputFilesCount(1); +} + +TEST_F(AnalyzingVideoSinkTest, SmallDiviationsInAspectRationAreAllowed) { + VideoSubscription subscription; + subscription.SubscribeToPeer( + "alice", VideoResolution(/*width=*/480, /*height=*/270, /*fps=*/30)); + VideoConfig video_config("alice_video", /*width=*/480, /*height=*/270, + /*fps=*/30); + video_config.output_dump_options = VideoDumpOptions(test_directory_); + + ExampleVideoQualityAnalyzer analyzer; + // Generator produces downscaled frames with a bit different aspect ration. + std::unique_ptr frame_generator = + CreateFrameGenerator(/*width=*/240, /*height=*/136); + VideoFrame frame = CreateFrame(*frame_generator); + frame.set_id(analyzer.OnFrameCaptured("alice", "alice_video", frame)); + + { + // `helper` and `sink` has to be destroyed so all frames will be written + // to the disk. + AnalyzingVideoSinksHelper helper; + helper.AddConfig("alice", video_config); + AnalyzingVideoSink sink("bob", Clock::GetRealTimeClock(), analyzer, helper, + subscription, /*report_infra_stats=*/false); + sink.OnFrame(frame); + } + + EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast(1))); + + { + auto frame_reader = test::CreateY4mFrameReader( + test::JoinFilename(test_directory_, "alice_video_bob_480x270_30.y4m")); + EXPECT_THAT(frame_reader->num_frames(), Eq(1)); + // Read the first frame. + rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); + rtc::scoped_refptr expected_frame = + frame.video_frame_buffer()->ToI420(); + // Actual frame is upscaled version of the expected. But because rendered + // resolution is equal to the actual frame size we need to upscale expected + // during comparison and then they have to be the same. + EXPECT_DOUBLE_EQ(I420SSIM(*actual_frame, *expected_frame), 1); + EXPECT_DOUBLE_EQ(I420PSNR(*actual_frame, *expected_frame), 48); + } + + ExpectOutputFilesCount(1); +} + +TEST_F(AnalyzingVideoSinkTest, VideoFramesIdsAreDumpedWhenRequested) { + VideoSubscription subscription; + subscription.SubscribeToPeer( + "alice", VideoResolution(/*width=*/320, /*height=*/240, /*fps=*/30)); + VideoConfig video_config("alice_video", /*width=*/320, /*height=*/240, + /*fps=*/30); + video_config.output_dump_options = + VideoDumpOptions(test_directory_, /*export_frame_ids=*/true); + + ExampleVideoQualityAnalyzer analyzer; + std::unique_ptr frame_generator = + CreateFrameGenerator(/*width=*/320, /*height=*/240); + + std::vector expected_frame_ids; + { + // `helper` and `sink` has to be destroyed so all frames will be written + // to the disk. + AnalyzingVideoSinksHelper helper; + helper.AddConfig("alice", video_config); + AnalyzingVideoSink sink("bob", Clock::GetRealTimeClock(), analyzer, helper, + subscription, /*report_infra_stats=*/false); + for (int i = 0; i < 10; ++i) { + VideoFrame frame = CreateFrame(*frame_generator); + frame.set_id(analyzer.OnFrameCaptured("alice", "alice_video", frame)); + expected_frame_ids.push_back(std::to_string(frame.id())); + sink.OnFrame(frame); + } + } + + EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast(10))); + + AssertFrameIdsAre( + test::JoinFilename(test_directory_, + "alice_video_bob_320x240_30.frame_ids.txt"), + expected_frame_ids); + + ExpectOutputFilesCount(2); +} + +TEST_F(AnalyzingVideoSinkTest, + VideoFramesAndIdsAreDumpedWithFixedFpsWhenRequested) { + GlobalSimulatedTimeController simulated_time(Timestamp::Seconds(100000)); + + VideoSubscription subscription; + subscription.SubscribeToPeer( + "alice", VideoResolution(/*width=*/320, /*height=*/240, /*fps=*/10)); + VideoConfig video_config("alice_video", /*width=*/320, /*height=*/240, + /*fps=*/10); + video_config.output_dump_options = + VideoDumpOptions(test_directory_, /*export_frame_ids=*/true); + video_config.output_dump_use_fixed_framerate = true; + + ExampleVideoQualityAnalyzer analyzer; + std::unique_ptr frame_generator = + CreateFrameGenerator(/*width=*/320, /*height=*/240); + + VideoFrame frame1 = CreateFrame(*frame_generator); + frame1.set_id(analyzer.OnFrameCaptured("alice", "alice_video", frame1)); + VideoFrame frame2 = CreateFrame(*frame_generator); + frame2.set_id(analyzer.OnFrameCaptured("alice", "alice_video", frame2)); + + { + // `helper` and `sink` has to be destroyed so all frames will be written + // to the disk. + AnalyzingVideoSinksHelper helper; + helper.AddConfig("alice", video_config); + AnalyzingVideoSink sink("bob", simulated_time.GetClock(), analyzer, helper, + subscription, /*report_infra_stats=*/false); + sink.OnFrame(frame1); + // Advance almost 1 second, so the first frame has to be repeated 9 time + // more. + simulated_time.AdvanceTime(TimeDelta::Millis(990)); + sink.OnFrame(frame2); + simulated_time.AdvanceTime(TimeDelta::Millis(100)); + } + + EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast(2))); + + auto frame_reader = test::CreateY4mFrameReader( + test::JoinFilename(test_directory_, "alice_video_bob_320x240_10.y4m")); + EXPECT_THAT(frame_reader->num_frames(), Eq(11)); + for (int i = 0; i < 10; ++i) { + rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); + rtc::scoped_refptr expected_frame = + frame1.video_frame_buffer()->ToI420(); + double psnr = I420PSNR(*expected_frame, *actual_frame); + double ssim = I420SSIM(*expected_frame, *actual_frame); + // Frames should be equal. + EXPECT_DOUBLE_EQ(ssim, 1.00); + EXPECT_DOUBLE_EQ(psnr, 48); + } + rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); + rtc::scoped_refptr expected_frame = + frame2.video_frame_buffer()->ToI420(); + double psnr = I420PSNR(*expected_frame, *actual_frame); + double ssim = I420SSIM(*expected_frame, *actual_frame); + // Frames should be equal. + EXPECT_DOUBLE_EQ(ssim, 1.00); + EXPECT_DOUBLE_EQ(psnr, 48); + + AssertFrameIdsAre( + test::JoinFilename(test_directory_, + "alice_video_bob_320x240_10.frame_ids.txt"), + {std::to_string(frame1.id()), std::to_string(frame1.id()), + std::to_string(frame1.id()), std::to_string(frame1.id()), + std::to_string(frame1.id()), std::to_string(frame1.id()), + std::to_string(frame1.id()), std::to_string(frame1.id()), + std::to_string(frame1.id()), std::to_string(frame1.id()), + std::to_string(frame2.id())}); + + ExpectOutputFilesCount(2); +} + +TEST_F(AnalyzingVideoSinkTest, InfraMetricsCollectedWhenRequested) { + VideoSubscription subscription; + subscription.SubscribeToPeer( + "alice", VideoResolution(/*width=*/1280, /*height=*/720, /*fps=*/30)); + VideoConfig video_config("alice_video", /*width=*/640, /*height=*/360, + /*fps=*/30); + + ExampleVideoQualityAnalyzer analyzer; + std::unique_ptr frame_generator = + CreateFrameGenerator(/*width=*/640, /*height=*/360); + VideoFrame frame = CreateFrame(*frame_generator); + frame.set_id(analyzer.OnFrameCaptured("alice", "alice_video", frame)); + + AnalyzingVideoSinksHelper helper; + helper.AddConfig("alice", video_config); + AnalyzingVideoSink sink("bob", Clock::GetRealTimeClock(), analyzer, helper, + subscription, /*report_infra_stats=*/true); + sink.OnFrame(frame); + + AnalyzingVideoSink::Stats stats = sink.stats(); + EXPECT_THAT(stats.scaling_tims_ms.NumSamples(), Eq(1)); + EXPECT_THAT(stats.scaling_tims_ms.GetAverage(), Ge(0)); + EXPECT_THAT(stats.analyzing_sink_processing_time_ms.NumSamples(), Eq(1)); + EXPECT_THAT(stats.analyzing_sink_processing_time_ms.GetAverage(), + Ge(stats.scaling_tims_ms.GetAverage())); + + ExpectOutputFilesCount(0); +} + +TEST_F(AnalyzingVideoSinkTest, InfraMetricsNotCollectedWhenNotRequested) { + VideoSubscription subscription; + subscription.SubscribeToPeer( + "alice", VideoResolution(/*width=*/1280, /*height=*/720, /*fps=*/30)); + VideoConfig video_config("alice_video", /*width=*/640, /*height=*/360, + /*fps=*/30); + + ExampleVideoQualityAnalyzer analyzer; + std::unique_ptr frame_generator = + CreateFrameGenerator(/*width=*/640, /*height=*/360); + VideoFrame frame = CreateFrame(*frame_generator); + frame.set_id(analyzer.OnFrameCaptured("alice", "alice_video", frame)); + + AnalyzingVideoSinksHelper helper; + helper.AddConfig("alice", video_config); + AnalyzingVideoSink sink("bob", Clock::GetRealTimeClock(), analyzer, helper, + subscription, /*report_infra_stats=*/false); + sink.OnFrame(frame); + + AnalyzingVideoSink::Stats stats = sink.stats(); + EXPECT_THAT(stats.scaling_tims_ms.NumSamples(), Eq(0)); + EXPECT_THAT(stats.analyzing_sink_processing_time_ms.NumSamples(), Eq(0)); + + ExpectOutputFilesCount(0); +} + +} // namespace +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.cc b/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.cc new file mode 100644 index 0000000000..70dc4b00b5 --- /dev/null +++ b/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.cc @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h" + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/video/video_frame_writer.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +void AnalyzingVideoSinksHelper::AddConfig(absl::string_view sender_peer_name, + VideoConfig config) { + MutexLock lock(&mutex_); + auto it = video_configs_.find(*config.stream_label); + if (it == video_configs_.end()) { + std::string stream_label = *config.stream_label; + video_configs_.emplace( + std::move(stream_label), + std::pair{std::string(sender_peer_name), std::move(config)}); + } else { + it->second = std::pair{std::string(sender_peer_name), std::move(config)}; + } +} + +absl::optional> +AnalyzingVideoSinksHelper::GetPeerAndConfig(absl::string_view stream_label) { + MutexLock lock(&mutex_); + auto it = video_configs_.find(std::string(stream_label)); + if (it == video_configs_.end()) { + return absl::nullopt; + } + return it->second; +} + +void AnalyzingVideoSinksHelper::RemoveConfig(absl::string_view stream_label) { + MutexLock lock(&mutex_); + video_configs_.erase(std::string(stream_label)); +} + +test::VideoFrameWriter* AnalyzingVideoSinksHelper::AddVideoWriter( + std::unique_ptr video_writer) { + MutexLock lock(&mutex_); + test::VideoFrameWriter* out = video_writer.get(); + video_writers_.push_back(std::move(video_writer)); + return out; +} + +void AnalyzingVideoSinksHelper::CloseAndRemoveVideoWriters( + std::set writers_to_close) { + MutexLock lock(&mutex_); + for (auto it = video_writers_.cbegin(); it != video_writers_.cend();) { + if (writers_to_close.find(it->get()) != writers_to_close.end()) { + (*it)->Close(); + it = video_writers_.erase(it); + } else { + ++it; + } + } +} + +void AnalyzingVideoSinksHelper::Clear() { + MutexLock lock(&mutex_); + video_configs_.clear(); + for (const auto& video_writer : video_writers_) { + video_writer->Close(); + } + video_writers_.clear(); +} + +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h b/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h new file mode 100644 index 0000000000..5f38c5a40e --- /dev/null +++ b/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef TEST_PC_E2E_ANALYZER_VIDEO_ANALYZING_VIDEO_SINKS_HELPER_H_ +#define TEST_PC_E2E_ANALYZER_VIDEO_ANALYZING_VIDEO_SINKS_HELPER_H_ + +#include +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/video/video_frame_writer.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +// Registry of known video configs and video writers. +// This class is thread safe. +class AnalyzingVideoSinksHelper { + public: + // Adds config in the registry. If config with such stream label was + // registered before, the new value will override the old one. + void AddConfig(absl::string_view sender_peer_name, VideoConfig config); + absl::optional> GetPeerAndConfig( + absl::string_view stream_label); + // Removes video config for specified stream label. If there are no know video + // config for such stream label - does nothing. + void RemoveConfig(absl::string_view stream_label); + + // Takes ownership of the provided video writer. All video writers owned by + // this class will be closed during `AnalyzingVideoSinksHelper` destruction + // and guaranteed to be alive either until explicitly removed by + // `CloseAndRemoveVideoWriters` or until `AnalyzingVideoSinksHelper` is + // destroyed. + // + // Returns pointer to the added writer. Ownership is maintained by + // `AnalyzingVideoSinksHelper`. + test::VideoFrameWriter* AddVideoWriter( + std::unique_ptr video_writer); + // For each provided `writers_to_close`, if it is known, will close and + // destroy it, otherwise does nothing with it. + void CloseAndRemoveVideoWriters( + std::set writers_to_close); + + // Removes all added configs and close and removes all added writers. + void Clear(); + + private: + Mutex mutex_; + std::map> video_configs_ + RTC_GUARDED_BY(mutex_); + std::list> video_writers_ + RTC_GUARDED_BY(mutex_); +}; + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // TEST_PC_E2E_ANALYZER_VIDEO_ANALYZING_VIDEO_SINKS_HELPER_H_ diff --git a/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper_test.cc b/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper_test.cc new file mode 100644 index 0000000000..1a820a5229 --- /dev/null +++ b/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper_test.cc @@ -0,0 +1,160 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h" + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/test/pclf/media_configuration.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace webrtc_pc_e2e { +namespace { + +using ::testing::Eq; + +// Asserts equality of the main fields of the video config. We don't compare +// the full config due to the lack of equality definition for a lot of subtypes. +void AssertConfigsAreEquals(const VideoConfig& actual, + const VideoConfig& expected) { + EXPECT_THAT(actual.stream_label, Eq(expected.stream_label)); + EXPECT_THAT(actual.width, Eq(expected.width)); + EXPECT_THAT(actual.height, Eq(expected.height)); + EXPECT_THAT(actual.fps, Eq(expected.fps)); +} + +TEST(AnalyzingVideoSinksHelperTest, ConfigsCanBeAdded) { + VideoConfig config("alice_video", /*width=*/1280, /*height=*/720, /*fps=*/30); + + AnalyzingVideoSinksHelper helper; + helper.AddConfig("alice", config); + + absl::optional> registred_config = + helper.GetPeerAndConfig("alice_video"); + ASSERT_TRUE(registred_config.has_value()); + EXPECT_THAT(registred_config->first, Eq("alice")); + AssertConfigsAreEquals(registred_config->second, config); +} + +TEST(AnalyzingVideoSinksHelperTest, AddingForExistingLabelWillOverwriteValue) { + VideoConfig config_before("alice_video", /*width=*/1280, /*height=*/720, + /*fps=*/30); + VideoConfig config_after("alice_video", /*width=*/640, /*height=*/360, + /*fps=*/15); + + AnalyzingVideoSinksHelper helper; + helper.AddConfig("alice", config_before); + + absl::optional> registred_config = + helper.GetPeerAndConfig("alice_video"); + ASSERT_TRUE(registred_config.has_value()); + EXPECT_THAT(registred_config->first, Eq("alice")); + AssertConfigsAreEquals(registred_config->second, config_before); + + helper.AddConfig("alice", config_after); + + registred_config = helper.GetPeerAndConfig("alice_video"); + ASSERT_TRUE(registred_config.has_value()); + EXPECT_THAT(registred_config->first, Eq("alice")); + AssertConfigsAreEquals(registred_config->second, config_after); +} + +TEST(AnalyzingVideoSinksHelperTest, ConfigsCanBeRemoved) { + VideoConfig config("alice_video", /*width=*/1280, /*height=*/720, /*fps=*/30); + + AnalyzingVideoSinksHelper helper; + helper.AddConfig("alice", config); + + ASSERT_TRUE(helper.GetPeerAndConfig("alice_video").has_value()); + + helper.RemoveConfig("alice_video"); + ASSERT_FALSE(helper.GetPeerAndConfig("alice_video").has_value()); +} + +TEST(AnalyzingVideoSinksHelperTest, RemoveOfNonExistingConfigDontCrash) { + AnalyzingVideoSinksHelper helper; + helper.RemoveConfig("alice_video"); +} + +TEST(AnalyzingVideoSinksHelperTest, ClearRemovesAllConfigs) { + VideoConfig config1("alice_video", /*width=*/640, /*height=*/360, /*fps=*/30); + VideoConfig config2("bob_video", /*width=*/640, /*height=*/360, /*fps=*/30); + + AnalyzingVideoSinksHelper helper; + helper.AddConfig("alice", config1); + helper.AddConfig("bob", config2); + + ASSERT_TRUE(helper.GetPeerAndConfig("alice_video").has_value()); + ASSERT_TRUE(helper.GetPeerAndConfig("bob_video").has_value()); + + helper.Clear(); + ASSERT_FALSE(helper.GetPeerAndConfig("alice_video").has_value()); + ASSERT_FALSE(helper.GetPeerAndConfig("bob_video").has_value()); +} + +struct TestVideoFrameWriterFactory { + int closed_writers_count = 0; + int deleted_writers_count = 0; + + std::unique_ptr CreateWriter() { + return std::make_unique(this); + } + + private: + class TestVideoFrameWriter : public test::VideoFrameWriter { + public: + explicit TestVideoFrameWriter(TestVideoFrameWriterFactory* factory) + : factory_(factory) {} + ~TestVideoFrameWriter() override { factory_->deleted_writers_count++; } + + bool WriteFrame(const VideoFrame& frame) override { return true; } + + void Close() override { factory_->closed_writers_count++; } + + private: + TestVideoFrameWriterFactory* factory_; + }; +}; + +TEST(AnalyzingVideoSinksHelperTest, RemovingWritersCloseAndDestroyAllOfThem) { + TestVideoFrameWriterFactory factory; + + AnalyzingVideoSinksHelper helper; + test::VideoFrameWriter* writer1 = + helper.AddVideoWriter(factory.CreateWriter()); + test::VideoFrameWriter* writer2 = + helper.AddVideoWriter(factory.CreateWriter()); + + helper.CloseAndRemoveVideoWriters({writer1, writer2}); + + EXPECT_THAT(factory.closed_writers_count, Eq(2)); + EXPECT_THAT(factory.deleted_writers_count, Eq(2)); +} + +TEST(AnalyzingVideoSinksHelperTest, ClearCloseAndDestroyAllWriters) { + TestVideoFrameWriterFactory factory; + + AnalyzingVideoSinksHelper helper; + helper.AddVideoWriter(factory.CreateWriter()); + helper.AddVideoWriter(factory.CreateWriter()); + + helper.Clear(); + + EXPECT_THAT(factory.closed_writers_count, Eq(2)); + EXPECT_THAT(factory.deleted_writers_count, Eq(2)); +} + +} // namespace +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc index 2cb4409e9d..15caa87ad4 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc @@ -22,15 +22,11 @@ #include "api/numerics/samples_stats_counter.h" #include "api/test/metrics/metric.h" #include "api/units/time_delta.h" -#include "api/video/i420_buffer.h" +#include "api/units/timestamp.h" #include "api/video/video_frame.h" -#include "common_video/libyuv/include/webrtc_libyuv.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/platform_thread.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/time_utils.h" -#include "rtc_tools/frame_analyzer/video_geometry_aligner.h" #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h" #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.h" #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h" @@ -269,6 +265,10 @@ uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured( RTC_DCHECK(is_removed) << "Invalid stream state: alive frame is removed already"; } + if (options_.report_infra_metrics) { + analyzer_stats_.on_frame_captured_processing_time_ms.AddSample( + (Now() - captured_time).ms()); + } } return frame_id; } @@ -276,6 +276,7 @@ uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured( void DefaultVideoQualityAnalyzer::OnFramePreEncode( absl::string_view peer_name, const webrtc::VideoFrame& frame) { + Timestamp processing_started = Now(); MutexLock lock(&mutex_); RTC_CHECK_EQ(state_, State::kActive) << "DefaultVideoQualityAnalyzer has to be started before use"; @@ -293,6 +294,11 @@ void DefaultVideoQualityAnalyzer::OnFramePreEncode( } } frame_in_flight.SetPreEncodeTime(Now()); + + if (options_.report_infra_metrics) { + analyzer_stats_.on_frame_pre_encode_processing_time_ms.AddSample( + (Now() - processing_started).ms()); + } } void DefaultVideoQualityAnalyzer::OnFrameEncoded( @@ -303,6 +309,8 @@ void DefaultVideoQualityAnalyzer::OnFrameEncoded( bool discarded) { if (discarded) return; + + Timestamp processing_started = Now(); MutexLock lock(&mutex_); RTC_CHECK_EQ(state_, State::kActive) << "DefaultVideoQualityAnalyzer has to be started before use"; @@ -339,9 +347,14 @@ void DefaultVideoQualityAnalyzer::OnFrameEncoded( used_encoder.last_frame_id = frame_id; used_encoder.switched_on_at = now; used_encoder.switched_from_at = now; - frame_in_flight.OnFrameEncoded(now, encoded_image._frameType, - DataSize::Bytes(encoded_image.size()), - stats.target_encode_bitrate, used_encoder); + frame_in_flight.OnFrameEncoded( + now, encoded_image._frameType, DataSize::Bytes(encoded_image.size()), + stats.target_encode_bitrate, stats.qp, used_encoder); + + if (options_.report_infra_metrics) { + analyzer_stats_.on_frame_encoded_processing_time_ms.AddSample( + (Now() - processing_started).ms()); + } } void DefaultVideoQualityAnalyzer::OnFrameDropped( @@ -354,6 +367,7 @@ void DefaultVideoQualityAnalyzer::OnFramePreDecode( absl::string_view peer_name, uint16_t frame_id, const webrtc::EncodedImage& input_image) { + Timestamp processing_started = Now(); MutexLock lock(&mutex_); RTC_CHECK_EQ(state_, State::kActive) << "DefaultVideoQualityAnalyzer has to be started before use"; @@ -395,12 +409,18 @@ void DefaultVideoQualityAnalyzer::OnFramePreDecode( /*decode_start_time=*/Now(), input_image._frameType, DataSize::Bytes(input_image.size())); + + if (options_.report_infra_metrics) { + analyzer_stats_.on_frame_pre_decode_processing_time_ms.AddSample( + (Now() - processing_started).ms()); + } } void DefaultVideoQualityAnalyzer::OnFrameDecoded( absl::string_view peer_name, const webrtc::VideoFrame& frame, const DecoderStats& stats) { + Timestamp processing_started = Now(); MutexLock lock(&mutex_); RTC_CHECK_EQ(state_, State::kActive) << "DefaultVideoQualityAnalyzer has to be started before use"; @@ -434,12 +454,19 @@ void DefaultVideoQualityAnalyzer::OnFrameDecoded( used_decoder.last_frame_id = frame.id(); used_decoder.switched_on_at = now; used_decoder.switched_from_at = now; - it->second.OnFrameDecoded(peer_index, now, used_decoder); + it->second.OnFrameDecoded(peer_index, now, frame.width(), frame.height(), + used_decoder); + + if (options_.report_infra_metrics) { + analyzer_stats_.on_frame_decoded_processing_time_ms.AddSample( + (Now() - processing_started).ms()); + } } void DefaultVideoQualityAnalyzer::OnFrameRendered( absl::string_view peer_name, const webrtc::VideoFrame& frame) { + Timestamp processing_started = Now(); MutexLock lock(&mutex_); RTC_CHECK_EQ(state_, State::kActive) << "DefaultVideoQualityAnalyzer has to be started before use"; @@ -490,8 +517,7 @@ void DefaultVideoQualityAnalyzer::OnFrameRendered( stream_frame_counters_.at(stats_key).rendered++; // Update current frame stats. - frame_in_flight->OnFrameRendered(peer_index, Now(), frame.width(), - frame.height()); + frame_in_flight->OnFrameRendered(peer_index, Now()); // After we received frame here we need to check if there are any dropped // frames between this one and last one, that was rendered for this video @@ -543,6 +569,11 @@ void DefaultVideoQualityAnalyzer::OnFrameRendered( if (frame_it->second.HaveAllPeersReceived()) { captured_frames_in_flight_.erase(frame_it); } + + if (options_.report_infra_metrics) { + analyzer_stats_.on_frame_rendered_processing_time_ms.AddSample( + (Now() - processing_started).ms()); + } } void DefaultVideoQualityAnalyzer::OnEncoderError( @@ -560,6 +591,7 @@ void DefaultVideoQualityAnalyzer::OnDecoderError(absl::string_view peer_name, RTC_LOG(LS_ERROR) << "Decoder error for frame_id=" << frame_id << ", code=" << error_code; + Timestamp processing_started = Now(); MutexLock lock(&mutex_); RTC_CHECK_EQ(state_, State::kActive) << "DefaultVideoQualityAnalyzer has to be started before use"; @@ -594,6 +626,11 @@ void DefaultVideoQualityAnalyzer::OnDecoderError(absl::string_view peer_name, used_decoder.switched_on_at = now; used_decoder.switched_from_at = now; it->second.OnDecoderError(peer_index, used_decoder); + + if (options_.report_infra_metrics) { + analyzer_stats_.on_decoder_error_processing_time_ms.AddSample( + (Now() - processing_started).ms()); + } } void DefaultVideoQualityAnalyzer::RegisterParticipantInCall( @@ -757,7 +794,7 @@ void DefaultVideoQualityAnalyzer::Stop() { FramesComparatorStats frames_comparator_stats = frames_comparator_.frames_comparator_stats(); analyzer_stats_.comparisons_queue_size = - frames_comparator_stats.comparisons_queue_size; + std::move(frames_comparator_stats.comparisons_queue_size); analyzer_stats_.comparisons_done = frames_comparator_stats.comparisons_done; analyzer_stats_.cpu_overloaded_comparisons_done = frames_comparator_stats.cpu_overloaded_comparisons_done; @@ -899,9 +936,11 @@ void DefaultVideoQualityAnalyzer::ReportResults() { ReportResults(item.first, item.second, stream_frame_counters_.at(item.first)); } - metrics_logger_->LogSingleValueMetric("cpu_usage_%", test_label_, - GetCpuUsagePercent(), Unit::kUnitless, - ImprovementDirection::kSmallerIsBetter); + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. + metrics_logger_->LogSingleValueMetric( + "cpu_usage_%", test_label_, GetCpuUsagePercent(), Unit::kUnitless, + ImprovementDirection::kSmallerIsBetter, + {{MetricMetadataKey::kExperimentalTestNameMetadataKey, test_label_}}); LogFrameCounters("Global", frame_counters_); if (!unknown_sender_frame_counters_.empty()) { RTC_LOG(LS_INFO) << "Received frame counters with unknown frame id:"; @@ -932,6 +971,59 @@ void DefaultVideoQualityAnalyzer::ReportResults() { << analyzer_stats_.cpu_overloaded_comparisons_done; RTC_LOG(LS_INFO) << "memory_overloaded_comparisons_done=" << analyzer_stats_.memory_overloaded_comparisons_done; + if (options_.report_infra_metrics) { + metrics_logger_->LogMetric("comparisons_queue_size", test_label_, + analyzer_stats_.comparisons_queue_size, + Unit::kCount, + ImprovementDirection::kSmallerIsBetter); + metrics_logger_->LogMetric("frames_in_flight_left_count", test_label_, + analyzer_stats_.frames_in_flight_left_count, + Unit::kCount, + ImprovementDirection::kSmallerIsBetter); + metrics_logger_->LogSingleValueMetric( + "comparisons_done", test_label_, analyzer_stats_.comparisons_done, + Unit::kCount, ImprovementDirection::kNeitherIsBetter); + metrics_logger_->LogSingleValueMetric( + "cpu_overloaded_comparisons_done", test_label_, + analyzer_stats_.cpu_overloaded_comparisons_done, Unit::kCount, + ImprovementDirection::kNeitherIsBetter); + metrics_logger_->LogSingleValueMetric( + "memory_overloaded_comparisons_done", test_label_, + analyzer_stats_.memory_overloaded_comparisons_done, Unit::kCount, + ImprovementDirection::kNeitherIsBetter); + metrics_logger_->LogSingleValueMetric( + "test_duration", test_label_, (Now() - start_time_).ms(), + Unit::kMilliseconds, ImprovementDirection::kNeitherIsBetter); + + metrics_logger_->LogMetric( + "on_frame_captured_processing_time_ms", test_label_, + analyzer_stats_.on_frame_captured_processing_time_ms, + Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter); + metrics_logger_->LogMetric( + "on_frame_pre_encode_processing_time_ms", test_label_, + analyzer_stats_.on_frame_pre_encode_processing_time_ms, + Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter); + metrics_logger_->LogMetric( + "on_frame_encoded_processing_time_ms", test_label_, + analyzer_stats_.on_frame_encoded_processing_time_ms, + Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter); + metrics_logger_->LogMetric( + "on_frame_pre_decode_processing_time_ms", test_label_, + analyzer_stats_.on_frame_pre_decode_processing_time_ms, + Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter); + metrics_logger_->LogMetric( + "on_frame_decoded_processing_time_ms", test_label_, + analyzer_stats_.on_frame_decoded_processing_time_ms, + Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter); + metrics_logger_->LogMetric( + "on_frame_rendered_processing_time_ms", test_label_, + analyzer_stats_.on_frame_rendered_processing_time_ms, + Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter); + metrics_logger_->LogMetric( + "on_decoder_error_processing_time_ms", test_label_, + analyzer_stats_.on_decoder_error_processing_time_ms, + Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter); + } } void DefaultVideoQualityAnalyzer::ReportResults( @@ -940,10 +1032,13 @@ void DefaultVideoQualityAnalyzer::ReportResults( const FrameCounters& frame_counters) { TimeDelta test_duration = Now() - start_time_; std::string test_case_name = GetTestCaseName(ToMetricName(key)); + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. std::map metric_metadata{ {MetricMetadataKey::kPeerMetadataKey, peers_->name(key.sender)}, - {MetricMetadataKey::kStreamMetadataKey, streams_.name(key.stream)}, - {MetricMetadataKey::kReceiverMetadataKey, peers_->name(key.receiver)}}; + {MetricMetadataKey::kVideoStreamMetadataKey, streams_.name(key.stream)}, + {MetricMetadataKey::kSenderMetadataKey, peers_->name(key.sender)}, + {MetricMetadataKey::kReceiverMetadataKey, peers_->name(key.receiver)}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_label_}}; double sum_squared_interframe_delays_secs = 0; Timestamp video_start_time = Timestamp::PlusInfinity(); @@ -1010,7 +1105,7 @@ void DefaultVideoQualityAnalyzer::ReportResults( ImprovementDirection::kSmallerIsBetter, metric_metadata); metrics_logger_->LogMetric( - "pixels_per_frame", test_case_name, stats.resolution_of_rendered_frame, + "pixels_per_frame", test_case_name, stats.resolution_of_decoded_frame, Unit::kCount, ImprovementDirection::kBiggerIsBetter, metric_metadata); metrics_logger_->LogSingleValueMetric( "min_psnr_dB", test_case_name, @@ -1041,6 +1136,9 @@ void DefaultVideoQualityAnalyzer::ReportResults( "target_encode_bitrate", test_case_name, stats.target_encode_bitrate / 1000, Unit::kKilobitsPerSecond, ImprovementDirection::kNeitherIsBetter, metric_metadata); + metrics_logger_->LogMetric("qp", test_case_name, stats.qp, Unit::kUnitless, + ImprovementDirection::kSmallerIsBetter, + metric_metadata); metrics_logger_->LogSingleValueMetric( "actual_encode_bitrate", test_case_name, static_cast(stats.total_encoded_images_payload) / @@ -1049,6 +1147,12 @@ void DefaultVideoQualityAnalyzer::ReportResults( metric_metadata); if (options_.report_detailed_frame_stats) { + metrics_logger_->LogSingleValueMetric( + "capture_frame_rate", test_case_name, + stats.capture_frame_rate.IsEmpty() + ? 0 + : stats.capture_frame_rate.GetEventsPerSecond(), + Unit::kHertz, ImprovementDirection::kBiggerIsBetter, metric_metadata); metrics_logger_->LogSingleValueMetric( "num_encoded_frames", test_case_name, frame_counters.encoded, Unit::kCount, ImprovementDirection::kBiggerIsBetter, metric_metadata); diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h index bc3edf7dd2..b67e5a0147 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h @@ -21,16 +21,12 @@ #include #include "api/array_view.h" -#include "api/numerics/samples_stats_counter.h" #include "api/test/metrics/metrics_logger.h" #include "api/test/video_quality_analyzer_interface.h" #include "api/units/data_size.h" #include "api/units/timestamp.h" #include "api/video/encoded_image.h" #include "api/video/video_frame.h" -#include "api/video/video_frame_type.h" -#include "rtc_base/event.h" -#include "rtc_base/platform_thread.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.cc index 968c6c6555..24f829e089 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.cc @@ -88,11 +88,14 @@ void FrameInFlight::OnFrameEncoded(webrtc::Timestamp time, VideoFrameType frame_type, DataSize encoded_image_size, uint32_t target_encode_bitrate, + int qp, StreamCodecInfo used_encoder) { encoded_time_ = time; frame_type_ = frame_type; encoded_image_size_ = encoded_image_size; target_encode_bitrate_ += target_encode_bitrate; + qp_values_.AddSample(SamplesStatsCounter::StatsSample{ + .value = static_cast(qp), .time = time}); // Update used encoder info. If simulcast/SVC is used, this method can // be called multiple times, in such case we should preserve the value // of `used_encoder_.switched_on_at` from the first invocation as the @@ -129,9 +132,13 @@ bool FrameInFlight::HasReceivedTime(size_t peer) const { void FrameInFlight::OnFrameDecoded(size_t peer, webrtc::Timestamp time, + int width, + int height, const StreamCodecInfo& used_decoder) { receiver_stats_[peer].decode_end_time = time; receiver_stats_[peer].used_decoder = used_decoder; + receiver_stats_[peer].decoded_frame_width = width; + receiver_stats_[peer].decoded_frame_height = height; } void FrameInFlight::OnDecoderError(size_t peer, @@ -148,13 +155,8 @@ bool FrameInFlight::HasDecodeEndTime(size_t peer) const { return it->second.decode_end_time.IsFinite(); } -void FrameInFlight::OnFrameRendered(size_t peer, - webrtc::Timestamp time, - int width, - int height) { +void FrameInFlight::OnFrameRendered(size_t peer, webrtc::Timestamp time) { receiver_stats_[peer].rendered_time = time; - receiver_stats_[peer].rendered_frame_width = width; - receiver_stats_[peer].rendered_frame_height = height; } bool FrameInFlight::HasRenderedTime(size_t peer) const { @@ -183,6 +185,7 @@ FrameStats FrameInFlight::GetStatsForPeer(size_t peer) const { stats.encoded_frame_type = frame_type_; stats.encoded_image_size = encoded_image_size_; stats.used_encoder = used_encoder_; + stats.qp_values = qp_values_; absl::optional receiver_stats = MaybeGetValue(receiver_stats_, peer); @@ -192,8 +195,8 @@ FrameStats FrameInFlight::GetStatsForPeer(size_t peer) const { stats.decode_end_time = receiver_stats->decode_end_time; stats.rendered_time = receiver_stats->rendered_time; stats.prev_frame_rendered_time = receiver_stats->prev_frame_rendered_time; - stats.rendered_frame_width = receiver_stats->rendered_frame_width; - stats.rendered_frame_height = receiver_stats->rendered_frame_height; + stats.decoded_frame_width = receiver_stats->decoded_frame_width; + stats.decoded_frame_height = receiver_stats->decoded_frame_height; stats.used_decoder = receiver_stats->used_decoder; stats.pre_decoded_frame_type = receiver_stats->frame_type; stats.pre_decoded_image_size = receiver_stats->encoded_image_size; diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h index 92c031a442..7ee910effe 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h @@ -17,6 +17,7 @@ #include #include "absl/types/optional.h" +#include "api/numerics/samples_stats_counter.h" #include "api/units/data_size.h" #include "api/units/timestamp.h" #include "api/video/video_frame.h" @@ -37,8 +38,8 @@ struct ReceiverFrameStats { VideoFrameType frame_type = VideoFrameType::kEmptyFrame; DataSize encoded_image_size = DataSize::Bytes(0); - absl::optional rendered_frame_width = absl::nullopt; - absl::optional rendered_frame_height = absl::nullopt; + absl::optional decoded_frame_width = absl::nullopt; + absl::optional decoded_frame_height = absl::nullopt; // Can be not set if frame was dropped in the network. absl::optional used_decoder = absl::nullopt; @@ -87,6 +88,7 @@ class FrameInFlight { VideoFrameType frame_type, DataSize encoded_image_size, uint32_t target_encode_bitrate, + int qp, StreamCodecInfo used_encoder); bool HasEncodedTime() const { return encoded_time_.IsFinite(); } @@ -101,15 +103,14 @@ class FrameInFlight { void OnFrameDecoded(size_t peer, webrtc::Timestamp time, + int width, + int height, const StreamCodecInfo& used_decoder); void OnDecoderError(size_t peer, const StreamCodecInfo& used_decoder); bool HasDecodeEndTime(size_t peer) const; - void OnFrameRendered(size_t peer, - webrtc::Timestamp time, - int width, - int height); + void OnFrameRendered(size_t peer, webrtc::Timestamp time); bool HasRenderedTime(size_t peer) const; @@ -153,6 +154,7 @@ class FrameInFlight { VideoFrameType frame_type_ = VideoFrameType::kEmptyFrame; DataSize encoded_image_size_ = DataSize::Bytes(0); uint32_t target_encode_bitrate_ = 0; + SamplesStatsCounter qp_values_; // Can be not set if frame was dropped by encoder. absl::optional used_encoder_ = absl::nullopt; // Map from the receiver peer's index to frame stats for that peer. diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.cc index 739ab3d895..77418b7e5d 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.cc @@ -80,10 +80,10 @@ FrameComparison ValidateFrameComparison(FrameComparison comparison) { << "Regular comparison has to have finite decode_end_time"; RTC_DCHECK(comparison.frame_stats.rendered_time.IsFinite()) << "Regular comparison has to have finite rendered_time"; - RTC_DCHECK(comparison.frame_stats.rendered_frame_width.has_value()) - << "Regular comparison has to have rendered_frame_width"; - RTC_DCHECK(comparison.frame_stats.rendered_frame_height.has_value()) - << "Regular comparison has to have rendered_frame_height"; + RTC_DCHECK(comparison.frame_stats.decoded_frame_width.has_value()) + << "Regular comparison has to have decoded_frame_width"; + RTC_DCHECK(comparison.frame_stats.decoded_frame_height.has_value()) + << "Regular comparison has to have decoded_frame_height"; RTC_DCHECK(comparison.frame_stats.used_encoder.has_value()) << "Regular comparison has to have used_encoder"; RTC_DCHECK(comparison.frame_stats.used_decoder.has_value()) @@ -119,23 +119,16 @@ FrameComparison ValidateFrameComparison(FrameComparison comparison) { RTC_DCHECK(comparison.frame_stats.used_decoder.has_value()) << "Dropped frame comparison has to have used_decoder when " << "decode_end_time is set or decoder_failed is true"; - } else { - RTC_DCHECK(!comparison.frame_stats.received_time.IsFinite()) - << "Dropped frame comparison can't have received_time when " - << "decode_end_time is not set and there were no decoder failures"; - RTC_DCHECK(!comparison.frame_stats.decode_start_time.IsFinite()) - << "Dropped frame comparison can't have decode_start_time when " - << "decode_end_time is not set and there were no decoder failures"; - RTC_DCHECK(!comparison.frame_stats.used_decoder.has_value()) - << "Dropped frame comparison can't have used_decoder when " - << "decode_end_time is not set and there were no decoder failures"; + } else if (comparison.frame_stats.decode_end_time.IsFinite()) { + RTC_DCHECK(comparison.frame_stats.decoded_frame_width.has_value()) + << "Dropped frame comparison has to have decoded_frame_width when " + << "decode_end_time is set"; + RTC_DCHECK(comparison.frame_stats.decoded_frame_height.has_value()) + << "Dropped frame comparison has to have decoded_frame_height when " + << "decode_end_time is set"; } RTC_DCHECK(!comparison.frame_stats.rendered_time.IsFinite()) << "Dropped frame comparison can't have rendered_time"; - RTC_DCHECK(!comparison.frame_stats.rendered_frame_width.has_value()) - << "Dropped frame comparison can't have rendered_frame_width"; - RTC_DCHECK(!comparison.frame_stats.rendered_frame_height.has_value()) - << "Dropped frame comparison can't have rendered_frame_height"; break; case FrameComparisonType::kFrameInFlight: // Frame in flight comparison may miss almost any FrameStats, but if @@ -147,10 +140,6 @@ FrameComparison ValidateFrameComparison(FrameComparison comparison) { << "Frame in flight comparison can't have rendered frame"; RTC_DCHECK(!comparison.frame_stats.rendered_time.IsFinite()) << "Frame in flight comparison can't have rendered_time"; - RTC_DCHECK(!comparison.frame_stats.rendered_frame_width.has_value()) - << "Frame in flight comparison can't have rendered_frame_width"; - RTC_DCHECK(!comparison.frame_stats.rendered_frame_height.has_value()) - << "Frame in flight comparison can't have rendered_frame_height"; if (comparison.frame_stats.decode_end_time.IsFinite() || comparison.frame_stats.decoder_failed) { @@ -162,6 +151,14 @@ FrameComparison ValidateFrameComparison(FrameComparison comparison) { << "decode_start_time when decode_end_time is finite or " << "decoder_failed is true."; } + if (comparison.frame_stats.decode_end_time.IsFinite()) { + RTC_DCHECK(comparison.frame_stats.decoded_frame_width.has_value()) + << "Frame in flight comparison has to have decoded_frame_width " + << "when decode_end_time is set."; + RTC_DCHECK(comparison.frame_stats.decoded_frame_height.has_value()) + << "Frame in flight comparison has to have decoded_frame_height " + << "when decode_end_time is set."; + } if (comparison.frame_stats.decode_start_time.IsFinite()) { RTC_DCHECK(comparison.frame_stats.received_time.IsFinite()) << "Frame in flight comparison has to have finite received_time " @@ -434,14 +431,14 @@ void DefaultVideoQualityAnalyzerFramesComparator::ProcessComparison( stats->ssim.AddSample( StatsSample(ssim, frame_stats.received_time, metadata)); } + stats->capture_frame_rate.AddEvent(frame_stats.captured_time); // Compute dropped phase for dropped frame if (comparison.type == FrameComparisonType::kDroppedFrame) { FrameDropPhase dropped_phase; if (frame_stats.decode_end_time.IsFinite()) { dropped_phase = FrameDropPhase::kAfterDecoder; - } else if (frame_stats.decode_start_time.IsFinite() && - frame_stats.decoder_failed) { + } else if (frame_stats.decode_start_time.IsFinite()) { dropped_phase = FrameDropPhase::kByDecoder; } else if (frame_stats.encoded_time.IsFinite()) { dropped_phase = FrameDropPhase::kTransport; @@ -462,6 +459,11 @@ void DefaultVideoQualityAnalyzerFramesComparator::ProcessComparison( frame_stats.encoded_image_size.bytes(); stats->target_encode_bitrate.AddSample(StatsSample( frame_stats.target_encode_bitrate, frame_stats.encoded_time, metadata)); + for (SamplesStatsCounter::StatsSample qp : + frame_stats.qp_values.GetTimedSamples()) { + qp.metadata = metadata; + stats->qp.AddSample(std::move(qp)); + } // Stats sliced on encoded frame type. if (frame_stats.encoded_frame_type == VideoFrameType::kVideoFrameKey) { @@ -472,10 +474,6 @@ void DefaultVideoQualityAnalyzerFramesComparator::ProcessComparison( if (comparison.type != FrameComparisonType::kDroppedFrame || comparison.frame_stats.decoder_failed) { if (frame_stats.rendered_time.IsFinite()) { - stats->resolution_of_rendered_frame.AddSample( - StatsSample(*comparison.frame_stats.rendered_frame_width * - *comparison.frame_stats.rendered_frame_height, - frame_stats.rendered_time, metadata)); stats->total_delay_incl_transport_ms.AddSample( StatsSample(frame_stats.rendered_time - frame_stats.captured_time, frame_stats.received_time, metadata)); @@ -506,6 +504,10 @@ void DefaultVideoQualityAnalyzerFramesComparator::ProcessComparison( stats->decode_time_ms.AddSample(StatsSample( frame_stats.decode_end_time - frame_stats.decode_start_time, frame_stats.decode_end_time, metadata)); + stats->resolution_of_decoded_frame.AddSample( + StatsSample(*comparison.frame_stats.decoded_frame_width * + *comparison.frame_stats.decoded_frame_height, + frame_stats.decode_end_time, metadata)); } if (frame_stats.prev_frame_rendered_time.IsFinite() && diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc index 3f9d1a4b5b..2cfb0c3d79 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc @@ -86,8 +86,8 @@ FrameStats FrameStatsWith10msDeltaBetweenPhasesAnd10x10Frame( frame_stats.used_encoder = Vp8CodecForOneFrame(1, frame_stats.encoded_time); frame_stats.used_decoder = Vp8CodecForOneFrame(1, frame_stats.decode_end_time); - frame_stats.rendered_frame_width = 10; - frame_stats.rendered_frame_height = 10; + frame_stats.decoded_frame_width = 10; + frame_stats.decoded_frame_height = 10; return frame_stats; } @@ -102,12 +102,22 @@ FrameStats ShiftStatsOn(const FrameStats& stats, TimeDelta delta) { frame_stats.used_encoder = stats.used_encoder; frame_stats.used_decoder = stats.used_decoder; - frame_stats.rendered_frame_width = stats.rendered_frame_width; - frame_stats.rendered_frame_height = stats.rendered_frame_height; + frame_stats.decoded_frame_width = stats.decoded_frame_width; + frame_stats.decoded_frame_height = stats.decoded_frame_height; return frame_stats; } +SamplesStatsCounter StatsCounter( + const std::vector>& samples) { + SamplesStatsCounter counter; + for (const std::pair& sample : samples) { + counter.AddSample(SamplesStatsCounter::StatsSample{.value = sample.first, + .time = sample.second}); + } + return counter; +} + double GetFirstOrDie(const SamplesStatsCounter& counter) { EXPECT_FALSE(counter.IsEmpty()) << "Counter has to be not empty"; return counter.GetSamples()[0]; @@ -174,7 +184,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.at(stats_key).receive_to_render_time_ms), 30.0); EXPECT_DOUBLE_EQ( - GetFirstOrDie(stats.at(stats_key).resolution_of_rendered_frame), 100.0); + GetFirstOrDie(stats.at(stats_key).resolution_of_decoded_frame), 100.0); } TEST( @@ -274,13 +284,13 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.captured_time + TimeDelta::Millis(50); frame_stats.used_decoder = Vp8CodecForOneFrame(1, frame_stats.decode_end_time); + frame_stats.decoded_frame_width = 10; + frame_stats.decoded_frame_height = 10; stats.push_back(frame_stats); // 6th stat frame_stats = ShiftStatsOn(frame_stats, TimeDelta::Millis(15)); frame_stats.frame_id = 6; frame_stats.rendered_time = frame_stats.captured_time + TimeDelta::Millis(60); - frame_stats.rendered_frame_width = 10; - frame_stats.rendered_frame_height = 10; stats.push_back(frame_stats); comparator.Start(/*max_threads_count=*/1); @@ -323,9 +333,9 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, << ToString(result_stats.receive_to_render_time_ms); EXPECT_EQ(result_stats.receive_to_render_time_ms.NumSamples(), 1); - EXPECT_DOUBLE_EQ(result_stats.resolution_of_rendered_frame.GetAverage(), 100) - << ToString(result_stats.resolution_of_rendered_frame); - EXPECT_EQ(result_stats.resolution_of_rendered_frame.NumSamples(), 1); + EXPECT_DOUBLE_EQ(result_stats.resolution_of_decoded_frame.GetAverage(), 100) + << ToString(result_stats.resolution_of_decoded_frame); + EXPECT_EQ(result_stats.resolution_of_decoded_frame.NumSamples(), 2); EXPECT_DOUBLE_EQ(result_stats.encode_frame_rate.GetEventsPerSecond(), 4.0 / 45 * 1000) @@ -375,8 +385,9 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.skipped_between_rendered); expectEmpty(stats.freeze_time_ms); expectEmpty(stats.time_between_freezes_ms); - expectEmpty(stats.resolution_of_rendered_frame); + expectEmpty(stats.resolution_of_decoded_frame); expectEmpty(stats.target_encode_bitrate); + expectEmpty(stats.qp); expectEmpty(stats.recv_key_frame_size_bytes); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 0); @@ -434,8 +445,9 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.skipped_between_rendered); expectEmpty(stats.freeze_time_ms); expectEmpty(stats.time_between_freezes_ms); - expectEmpty(stats.resolution_of_rendered_frame); + expectEmpty(stats.resolution_of_decoded_frame); expectEmpty(stats.target_encode_bitrate); + expectEmpty(stats.qp); expectEmpty(stats.recv_key_frame_size_bytes); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 0); @@ -476,6 +488,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); comparator.Start(/*max_threads_count=*/1); comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, @@ -501,8 +515,9 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.skipped_between_rendered); expectEmpty(stats.freeze_time_ms); expectEmpty(stats.time_between_freezes_ms); - expectEmpty(stats.resolution_of_rendered_frame); + expectEmpty(stats.resolution_of_decoded_frame); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.qp), 5.0); expectEmpty(stats.recv_key_frame_size_bytes); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -544,6 +559,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameDelta; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); comparator.Start(/*max_threads_count=*/1); comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, @@ -569,8 +586,9 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.skipped_between_rendered); expectEmpty(stats.freeze_time_ms); expectEmpty(stats.time_between_freezes_ms); - expectEmpty(stats.resolution_of_rendered_frame); + expectEmpty(stats.resolution_of_decoded_frame); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.qp), 5.0); expectEmpty(stats.recv_key_frame_size_bytes); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -612,6 +630,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); // Frame pre decoded frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.pre_decoded_image_size = DataSize::Bytes(500); @@ -642,8 +662,9 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.skipped_between_rendered); expectEmpty(stats.freeze_time_ms); expectEmpty(stats.time_between_freezes_ms); - expectEmpty(stats.resolution_of_rendered_frame); + expectEmpty(stats.resolution_of_decoded_frame); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.qp), 5.0); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.recv_key_frame_size_bytes), 500.0); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -685,6 +706,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); // Frame pre decoded frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.pre_decoded_image_size = DataSize::Bytes(500); @@ -692,6 +715,9 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.decode_start_time = captured_time + TimeDelta::Millis(40); // Frame decoded frame_stats.decode_end_time = captured_time + TimeDelta::Millis(50); + frame_stats.decoded_frame_width = 200; + frame_stats.decoded_frame_height = 100; + frame_stats.used_decoder = Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time); @@ -719,8 +745,9 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.skipped_between_rendered); expectEmpty(stats.freeze_time_ms); expectEmpty(stats.time_between_freezes_ms); - expectEmpty(stats.resolution_of_rendered_frame); + EXPECT_GE(GetFirstOrDie(stats.resolution_of_decoded_frame), 200 * 100.0); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.qp), 5.0); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.recv_key_frame_size_bytes), 500.0); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -763,6 +790,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); // Frame pre decoded frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.pre_decoded_image_size = DataSize::Bytes(500); @@ -797,8 +826,9 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.skipped_between_rendered); expectEmpty(stats.freeze_time_ms); expectEmpty(stats.time_between_freezes_ms); - expectEmpty(stats.resolution_of_rendered_frame); + expectEmpty(stats.resolution_of_decoded_frame); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.qp), 5.0); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.recv_key_frame_size_bytes), 500.0); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -859,8 +889,9 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.skipped_between_rendered); expectEmpty(stats.freeze_time_ms); expectEmpty(stats.time_between_freezes_ms); - expectEmpty(stats.resolution_of_rendered_frame); + expectEmpty(stats.resolution_of_decoded_frame); expectEmpty(stats.target_encode_bitrate); + expectEmpty(stats.qp); expectEmpty(stats.recv_key_frame_size_bytes); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 0); @@ -918,8 +949,9 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.skipped_between_rendered); expectEmpty(stats.freeze_time_ms); expectEmpty(stats.time_between_freezes_ms); - expectEmpty(stats.resolution_of_rendered_frame); + expectEmpty(stats.resolution_of_decoded_frame); expectEmpty(stats.target_encode_bitrate); + expectEmpty(stats.qp); expectEmpty(stats.recv_key_frame_size_bytes); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 0); @@ -960,6 +992,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); comparator.Start(/*max_threads_count=*/1); comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, @@ -985,8 +1019,9 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.skipped_between_rendered); expectEmpty(stats.freeze_time_ms); expectEmpty(stats.time_between_freezes_ms); - expectEmpty(stats.resolution_of_rendered_frame); + expectEmpty(stats.resolution_of_decoded_frame); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.qp), 5.0); expectEmpty(stats.recv_key_frame_size_bytes); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -1028,6 +1063,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameDelta; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); comparator.Start(/*max_threads_count=*/1); comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, @@ -1053,8 +1090,9 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.skipped_between_rendered); expectEmpty(stats.freeze_time_ms); expectEmpty(stats.time_between_freezes_ms); - expectEmpty(stats.resolution_of_rendered_frame); + expectEmpty(stats.resolution_of_decoded_frame); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.qp), 5.0); expectEmpty(stats.recv_key_frame_size_bytes); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -1071,8 +1109,78 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, EXPECT_THAT(stats.decoders, IsEmpty()); } -// TODO(titovartem): add test that just pre decoded frame can't be received as -// dropped one because decoder always returns either decoded frame or error. +TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, + PreDecodedDroppedKeyFrameAccountedInStats) { + DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer; + DefaultVideoQualityAnalyzerFramesComparator comparator( + Clock::GetRealTimeClock(), cpu_measurer, + DefaultVideoQualityAnalyzerOptions()); + + Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime(); + uint16_t frame_id = 1; + size_t stream = 0; + size_t sender = 0; + size_t receiver = 1; + InternalStatsKey stats_key(stream, sender, receiver); + + // Frame captured + FrameStats frame_stats(/*frame_id=*/1, captured_time); + // Frame pre encoded + frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10); + // Frame encoded + frame_stats.encoded_time = captured_time + TimeDelta::Millis(20); + frame_stats.used_encoder = + Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time); + frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; + frame_stats.encoded_image_size = DataSize::Bytes(1000); + frame_stats.target_encode_bitrate = 2000; + // Frame pre decoded + frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; + frame_stats.pre_decoded_image_size = DataSize::Bytes(500); + frame_stats.received_time = captured_time + TimeDelta::Millis(30); + frame_stats.decode_start_time = captured_time + TimeDelta::Millis(40); + + comparator.Start(/*max_threads_count=*/1); + comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, + captured_time, captured_time); + comparator.AddComparison(stats_key, + /*captured=*/absl::nullopt, + /*rendered=*/absl::nullopt, + FrameComparisonType::kDroppedFrame, frame_stats); + comparator.Stop(/*last_rendered_frame_times=*/{}); + + EXPECT_EQ(comparator.stream_stats().size(), 1lu); + StreamStats stats = comparator.stream_stats().at(stats_key); + EXPECT_EQ(stats.stream_started_time, captured_time); + expectEmpty(stats.psnr); + expectEmpty(stats.ssim); + expectEmpty(stats.transport_time_ms); + expectEmpty(stats.total_delay_incl_transport_ms); + expectEmpty(stats.time_between_rendered_frames_ms); + expectEmpty(stats.encode_frame_rate); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.encode_time_ms), 10.0); + expectEmpty(stats.decode_time_ms); + expectEmpty(stats.receive_to_render_time_ms); + expectEmpty(stats.skipped_between_rendered); + expectEmpty(stats.freeze_time_ms); + expectEmpty(stats.time_between_freezes_ms); + expectEmpty(stats.resolution_of_decoded_frame); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + expectEmpty(stats.recv_key_frame_size_bytes); + expectEmpty(stats.recv_delta_frame_size_bytes); + EXPECT_EQ(stats.total_encoded_images_payload, 1000); + EXPECT_EQ(stats.num_send_key_frames, 1); + EXPECT_EQ(stats.num_recv_key_frames, 0); + EXPECT_THAT(stats.dropped_by_phase, Eq(std::map{ + {FrameDropPhase::kBeforeEncoder, 0}, + {FrameDropPhase::kByEncoder, 0}, + {FrameDropPhase::kTransport, 0}, + {FrameDropPhase::kByDecoder, 1}, + {FrameDropPhase::kAfterDecoder, 0}})); + EXPECT_EQ(stats.encoders, + std::vector{*frame_stats.used_encoder}); + EXPECT_THAT(stats.decoders, IsEmpty()); +} TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, DecodedDroppedKeyFrameAccountedInStats) { @@ -1102,6 +1210,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); // Frame pre decoded frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.pre_decoded_image_size = DataSize::Bytes(500); @@ -1111,6 +1221,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.decode_end_time = captured_time + TimeDelta::Millis(50); frame_stats.used_decoder = Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time); + frame_stats.decoded_frame_width = 200; + frame_stats.decoded_frame_height = 100; comparator.Start(/*max_threads_count=*/1); comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, @@ -1136,8 +1248,9 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.skipped_between_rendered); expectEmpty(stats.freeze_time_ms); expectEmpty(stats.time_between_freezes_ms); - expectEmpty(stats.resolution_of_rendered_frame); + expectEmpty(stats.resolution_of_decoded_frame); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.qp), 5.0); expectEmpty(stats.recv_key_frame_size_bytes); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -1180,6 +1293,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); // Frame pre decoded frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.pre_decoded_image_size = DataSize::Bytes(500); @@ -1214,8 +1329,9 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.skipped_between_rendered); expectEmpty(stats.freeze_time_ms); expectEmpty(stats.time_between_freezes_ms); - expectEmpty(stats.resolution_of_rendered_frame); + expectEmpty(stats.resolution_of_decoded_frame); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.qp), 5.0); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.recv_key_frame_size_bytes), 500.0); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -1262,6 +1378,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); // Frame pre decoded frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.pre_decoded_image_size = DataSize::Bytes(500); @@ -1271,10 +1389,10 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.decode_end_time = captured_time + TimeDelta::Millis(50); frame_stats.used_decoder = Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time); + frame_stats.decoded_frame_width = 200; + frame_stats.decoded_frame_height = 100; // Frame rendered frame_stats.rendered_time = captured_time + TimeDelta::Millis(60); - frame_stats.rendered_frame_width = 200; - frame_stats.rendered_frame_height = 100; comparator.Start(/*max_threads_count=*/1); comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, @@ -1300,8 +1418,9 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, expectEmpty(stats.skipped_between_rendered); expectEmpty(stats.freeze_time_ms); expectEmpty(stats.time_between_freezes_ms); - EXPECT_GE(GetFirstOrDie(stats.resolution_of_rendered_frame), 200 * 100.0); + EXPECT_GE(GetFirstOrDie(stats.resolution_of_decoded_frame), 200 * 100.0); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.target_encode_bitrate), 2000.0); + EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.qp), 5.0); EXPECT_DOUBLE_EQ(GetFirstOrDie(stats.recv_key_frame_size_bytes), 500.0); expectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -1345,6 +1464,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, AllStatsHaveMetadataSet) { frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.encoded_image_size = DataSize::Bytes(1000); frame_stats.target_encode_bitrate = 2000; + frame_stats.qp_values = StatsCounter( + /*samples=*/{{5, Timestamp::Seconds(1)}, {5, Timestamp::Seconds(2)}}); // Frame pre decoded frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey; frame_stats.pre_decoded_image_size = DataSize::Bytes(500); @@ -1356,8 +1477,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, AllStatsHaveMetadataSet) { Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time); // Frame rendered frame_stats.rendered_time = captured_time + TimeDelta::Millis(60); - frame_stats.rendered_frame_width = 200; - frame_stats.rendered_frame_height = 100; + frame_stats.decoded_frame_width = 200; + frame_stats.decoded_frame_height = 100; comparator.Start(/*max_threads_count=*/1); comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, @@ -1378,9 +1499,10 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, AllStatsHaveMetadataSet) { AssertFirstMetadataHasField(stats.encode_time_ms, "frame_id", "1"); AssertFirstMetadataHasField(stats.decode_time_ms, "frame_id", "1"); AssertFirstMetadataHasField(stats.receive_to_render_time_ms, "frame_id", "1"); - AssertFirstMetadataHasField(stats.resolution_of_rendered_frame, "frame_id", + AssertFirstMetadataHasField(stats.resolution_of_decoded_frame, "frame_id", "1"); AssertFirstMetadataHasField(stats.target_encode_bitrate, "frame_id", "1"); + AssertFirstMetadataHasField(stats.qp, "frame_id", "1"); AssertFirstMetadataHasField(stats.recv_key_frame_size_bytes, "frame_id", "1"); expectEmpty(stats.recv_delta_frame_size_bytes); diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h index 93d45e4e26..3e65e2b888 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h @@ -14,8 +14,10 @@ #include #include #include +#include #include "absl/types/optional.h" +#include "api/numerics/samples_stats_counter.h" #include "api/units/data_size.h" #include "api/units/timestamp.h" #include "api/video/video_frame.h" @@ -62,9 +64,13 @@ struct FrameStats { VideoFrameType pre_decoded_frame_type = VideoFrameType::kEmptyFrame; DataSize pre_decoded_image_size = DataSize::Bytes(0); uint32_t target_encode_bitrate = 0; + // There can be multiple qp values for single video frame when simulcast + // or SVC is used. In such case multiple EncodedImage's are created by encoder + // and each of it will have its own qp value. + SamplesStatsCounter qp_values; - absl::optional rendered_frame_width = absl::nullopt; - absl::optional rendered_frame_height = absl::nullopt; + absl::optional decoded_frame_width = absl::nullopt; + absl::optional decoded_frame_height = absl::nullopt; // Can be not set if frame was dropped by encoder. absl::optional used_encoder = absl::nullopt; diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_metric_names_test.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_metric_names_test.cc index e1458d4812..106daac584 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_metric_names_test.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_metric_names_test.cc @@ -277,11 +277,21 @@ TEST(DefaultVideoQualityAnalyzerMetricNamesTest, MetricNamesForP2PAreCorrect) { .name = "target_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter}, + MetricValidationInfo{ + .test_case = "test_case/alice_video", + .name = "qp", + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "actual_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter}, + MetricValidationInfo{ + .test_case = "test_case/alice_video", + .name = "capture_frame_rate", + .unit = Unit::kHertz, + .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "num_encoded_frames", @@ -441,11 +451,21 @@ TEST(DefaultVideoQualityAnalyzerMetricNamesTest, .name = "target_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter}, + MetricValidationInfo{ + .test_case = "test_case/alice_video_alice_bob", + .name = "qp", + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ .test_case = "test_case/alice_video_alice_bob", .name = "actual_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter}, + MetricValidationInfo{ + .test_case = "test_case/alice_video_alice_bob", + .name = "capture_frame_rate", + .unit = Unit::kHertz, + .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ .test_case = "test_case/alice_video_alice_bob", .name = "num_encoded_frames", @@ -573,11 +593,21 @@ TEST(DefaultVideoQualityAnalyzerMetricNamesTest, .name = "target_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter}, + MetricValidationInfo{ + .test_case = "test_case/alice_video_alice_charlie", + .name = "qp", + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ .test_case = "test_case/alice_video_alice_charlie", .name = "actual_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter}, + MetricValidationInfo{ + .test_case = "test_case/alice_video_alice_charlie", + .name = "capture_frame_rate", + .unit = Unit::kHertz, + .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ .test_case = "test_case/alice_video_alice_charlie", .name = "num_encoded_frames", @@ -641,10 +671,10 @@ TEST(DefaultVideoQualityAnalyzerMetricNamesTest, std::vector metrics = ToTestCases(metrics_logger.GetCollectedMetrics()); - EXPECT_THAT(metrics, SizeIs(53)); - EXPECT_THAT(metrics, Contains("test_case/alice_video_alice_bob").Times(26)); + EXPECT_THAT(metrics, SizeIs(57)); + EXPECT_THAT(metrics, Contains("test_case/alice_video_alice_bob").Times(28)); EXPECT_THAT(metrics, - Contains("test_case/alice_video_alice_charlie").Times(26)); + Contains("test_case/alice_video_alice_charlie").Times(28)); EXPECT_THAT(metrics, Contains("test_case").Times(1)); } diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h index a52914e6b7..a71dad71c1 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h @@ -11,6 +11,7 @@ #ifndef TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_SHARED_OBJECTS_H_ #define TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_SHARED_OBJECTS_H_ +#include #include #include #include @@ -125,6 +126,7 @@ struct StreamStats { SamplesStatsCounter total_delay_incl_transport_ms; // Time between frames out from renderer. SamplesStatsCounter time_between_rendered_frames_ms; + SamplesRateCounter capture_frame_rate; SamplesRateCounter encode_frame_rate; SamplesStatsCounter encode_time_ms; SamplesStatsCounter decode_time_ms; @@ -141,8 +143,9 @@ struct StreamStats { SamplesStatsCounter freeze_time_ms; // Mean time between one freeze end and next freeze start. SamplesStatsCounter time_between_freezes_ms; - SamplesStatsCounter resolution_of_rendered_frame; + SamplesStatsCounter resolution_of_decoded_frame; SamplesStatsCounter target_encode_bitrate; + SamplesStatsCounter qp; int64_t total_encoded_images_payload = 0; // Counters on which phase how many frames were dropped. @@ -181,6 +184,16 @@ struct AnalyzerStats { // Count of frames in flight in analyzer measured when new comparison is added // and after analyzer was stopped. SamplesStatsCounter frames_in_flight_left_count; + + // Next metrics are collected and reported iff + // `DefaultVideoQualityAnalyzerOptions::report_infra_metrics` is true. + SamplesStatsCounter on_frame_captured_processing_time_ms; + SamplesStatsCounter on_frame_pre_encode_processing_time_ms; + SamplesStatsCounter on_frame_encoded_processing_time_ms; + SamplesStatsCounter on_frame_pre_decode_processing_time_ms; + SamplesStatsCounter on_frame_decoded_processing_time_ms; + SamplesStatsCounter on_frame_rendered_processing_time_ms; + SamplesStatsCounter on_decoder_error_processing_time_ms; }; struct StatsKey { @@ -244,6 +257,9 @@ struct DefaultVideoQualityAnalyzerOptions { // Tells DefaultVideoQualityAnalyzer if detailed frame stats should be // reported. bool report_detailed_frame_stats = false; + // Tells DefaultVideoQualityAnalyzer if infra metrics related to the + // performance and stability of the analyzer itself should be reported. + bool report_infra_metrics = false; // If true DefaultVideoQualityAnalyzer will try to adjust frames before // computing PSNR and SSIM for them. In some cases picture may be shifted by // a few pixels after the encode/decode step. Those difference is invisible diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc index adda17c65e..fc970e1ea2 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc @@ -553,10 +553,10 @@ TEST(DefaultVideoQualityAnalyzerTest, NormalScenario2Receivers) { EXPECT_GE(it->second.encode_time_ms.GetMin(), 20); ASSERT_FALSE(it->second.decode_time_ms.IsEmpty()); EXPECT_GE(it->second.decode_time_ms.GetMin(), 30); - ASSERT_FALSE(it->second.resolution_of_rendered_frame.IsEmpty()); - EXPECT_GE(it->second.resolution_of_rendered_frame.GetMin(), + ASSERT_FALSE(it->second.resolution_of_decoded_frame.IsEmpty()); + EXPECT_GE(it->second.resolution_of_decoded_frame.GetMin(), kFrameWidth * kFrameHeight - 1); - EXPECT_LE(it->second.resolution_of_rendered_frame.GetMax(), + EXPECT_LE(it->second.resolution_of_decoded_frame.GetMax(), kFrameWidth * kFrameHeight + 1); } { @@ -566,10 +566,10 @@ TEST(DefaultVideoQualityAnalyzerTest, NormalScenario2Receivers) { EXPECT_GE(it->second.encode_time_ms.GetMin(), 20); ASSERT_FALSE(it->second.decode_time_ms.IsEmpty()); EXPECT_GE(it->second.decode_time_ms.GetMin(), 30); - ASSERT_FALSE(it->second.resolution_of_rendered_frame.IsEmpty()); - EXPECT_GE(it->second.resolution_of_rendered_frame.GetMin(), + ASSERT_FALSE(it->second.resolution_of_decoded_frame.IsEmpty()); + EXPECT_GE(it->second.resolution_of_decoded_frame.GetMin(), kFrameWidth * kFrameHeight - 1); - EXPECT_LE(it->second.resolution_of_rendered_frame.GetMax(), + EXPECT_LE(it->second.resolution_of_decoded_frame.GetMax(), kFrameWidth * kFrameHeight + 1); } } @@ -2059,6 +2059,109 @@ TEST(DefaultVideoQualityAnalyzerTest, EXPECT_GE(stats.encode_time_ms.GetAverage(), 10); } +TEST(DefaultVideoQualityAnalyzerTest, InfraMetricsAreReportedWhenRequested) { + std::unique_ptr frame_generator = + test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, + /*type=*/absl::nullopt, + /*num_squares=*/absl::nullopt); + + DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); + options.report_infra_metrics = true; + DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), + test::GetGlobalMetricsLogger(), options); + analyzer.Start("test_case", std::vector{"alice", "bob"}, + kAnalyzerMaxThreadsCount); + + PassFramesThroughAnalyzer(analyzer, "alice", "alice_video", {"bob"}, + /*frames_count=*/1, *frame_generator); + + // Give analyzer some time to process frames on async thread. The computations + // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it + // means we have an issue! + SleepMs(100); + analyzer.Stop(); + + AnalyzerStats stats = analyzer.GetAnalyzerStats(); + EXPECT_EQ(stats.on_frame_captured_processing_time_ms.NumSamples(), 1); + EXPECT_EQ(stats.on_frame_pre_encode_processing_time_ms.NumSamples(), 1); + EXPECT_EQ(stats.on_frame_encoded_processing_time_ms.NumSamples(), 1); + EXPECT_EQ(stats.on_frame_pre_decode_processing_time_ms.NumSamples(), 1); + EXPECT_EQ(stats.on_frame_decoded_processing_time_ms.NumSamples(), 1); + EXPECT_EQ(stats.on_frame_rendered_processing_time_ms.NumSamples(), 1); + EXPECT_EQ(stats.on_decoder_error_processing_time_ms.NumSamples(), 0); +} + +TEST(DefaultVideoQualityAnalyzerTest, InfraMetricsNotCollectedByDefault) { + std::unique_ptr frame_generator = + test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, + /*type=*/absl::nullopt, + /*num_squares=*/absl::nullopt); + + DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); + options.report_infra_metrics = false; + DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), + test::GetGlobalMetricsLogger(), options); + analyzer.Start("test_case", std::vector{"alice", "bob"}, + kAnalyzerMaxThreadsCount); + + PassFramesThroughAnalyzer(analyzer, "alice", "alice_video", {"bob"}, + /*frames_count=*/1, *frame_generator); + + // Give analyzer some time to process frames on async thread. The computations + // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it + // means we have an issue! + SleepMs(100); + analyzer.Stop(); + + AnalyzerStats stats = analyzer.GetAnalyzerStats(); + EXPECT_EQ(stats.on_frame_captured_processing_time_ms.NumSamples(), 0); + EXPECT_EQ(stats.on_frame_pre_encode_processing_time_ms.NumSamples(), 0); + EXPECT_EQ(stats.on_frame_encoded_processing_time_ms.NumSamples(), 0); + EXPECT_EQ(stats.on_frame_pre_decode_processing_time_ms.NumSamples(), 0); + EXPECT_EQ(stats.on_frame_decoded_processing_time_ms.NumSamples(), 0); + EXPECT_EQ(stats.on_frame_rendered_processing_time_ms.NumSamples(), 0); + EXPECT_EQ(stats.on_decoder_error_processing_time_ms.NumSamples(), 0); +} + +TEST(DefaultVideoQualityAnalyzerTest, + FrameDroppedByDecoderIsAccountedCorrectly) { + std::unique_ptr frame_generator = + test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, + /*type=*/absl::nullopt, + /*num_squares=*/absl::nullopt); + + DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); + options.report_infra_metrics = false; + DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), + test::GetGlobalMetricsLogger(), options); + analyzer.Start("test_case", std::vector{"alice", "bob"}, + kAnalyzerMaxThreadsCount); + + VideoFrame to_be_dropped_frame = + NextFrame(frame_generator.get(), /*timestamp_us=*/1); + uint16_t frame_id = + analyzer.OnFrameCaptured("alice", "alice_video", to_be_dropped_frame); + to_be_dropped_frame.set_id(frame_id); + analyzer.OnFramePreEncode("alice", to_be_dropped_frame); + analyzer.OnFrameEncoded("alice", to_be_dropped_frame.id(), + FakeEncode(to_be_dropped_frame), + VideoQualityAnalyzerInterface::EncoderStats(), false); + VideoFrame received_to_be_dropped_frame = DeepCopy(to_be_dropped_frame); + analyzer.OnFramePreDecode("bob", received_to_be_dropped_frame.id(), + FakeEncode(received_to_be_dropped_frame)); + PassFramesThroughAnalyzer(analyzer, "alice", "alice_video", {"bob"}, + /*frames_count=*/1, *frame_generator); + + // Give analyzer some time to process frames on async thread. The computations + // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it + // means we have an issue! + SleepMs(100); + analyzer.Stop(); + + StreamStats stats = analyzer.GetStats().at(StatsKey("alice_video", "bob")); + ASSERT_EQ(stats.dropped_by_phase[FrameDropPhase::kByDecoder], 1); +} + class DefaultVideoQualityAnalyzerTimeBetweenFreezesTest : public TestWithParam {}; diff --git a/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc b/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc index f7d4550b0b..da9c53beb9 100644 --- a/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc +++ b/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc @@ -29,6 +29,9 @@ uint16_t ExampleVideoQualityAnalyzer::OnFrameCaptured( const webrtc::VideoFrame& frame) { MutexLock lock(&lock_); uint16_t frame_id = next_frame_id_++; + if (frame_id == VideoFrame::kNotSetId) { + frame_id = next_frame_id_++; + } auto it = frames_in_flight_.find(frame_id); if (it == frames_in_flight_.end()) { frames_in_flight_.insert(frame_id); diff --git a/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h b/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h index b12c11ba7c..af4868a961 100644 --- a/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h +++ b/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h @@ -86,7 +86,7 @@ class ExampleVideoQualityAnalyzer : public VideoQualityAnalyzerInterface { // process frame id overlap. std::set frames_in_flight_ RTC_GUARDED_BY(lock_); std::map frames_to_stream_label_ RTC_GUARDED_BY(lock_); - uint16_t next_frame_id_ RTC_GUARDED_BY(lock_) = 0; + uint16_t next_frame_id_ RTC_GUARDED_BY(lock_) = 1; uint64_t frames_captured_ RTC_GUARDED_BY(lock_) = 0; uint64_t frames_pre_encoded_ RTC_GUARDED_BY(lock_) = 0; uint64_t frames_encoded_ RTC_GUARDED_BY(lock_) = 0; diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc index 98c12f665c..7f742972cb 100644 --- a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc +++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc @@ -290,6 +290,7 @@ EncodedImageCallback::Result QualityAnalyzingVideoEncoder::OnEncodedImage( VideoQualityAnalyzerInterface::EncoderStats stats; stats.encoder_name = codec_name; stats.target_encode_bitrate = target_encode_bitrate; + stats.qp = encoded_image.qp_; analyzer_->OnFrameEncoded(peer_name_, frame_id, encoded_image, stats, discard); diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h index 4e765911b4..476136c468 100644 --- a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h +++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h @@ -17,7 +17,7 @@ #include #include "absl/strings/string_view.h" -#include "api/test/peerconnection_quality_test_fixture.h" +#include "api/test/pclf/media_configuration.h" #include "api/test/video_quality_analyzer_interface.h" #include "api/video/video_frame.h" #include "api/video_codecs/sdp_video_format.h" @@ -50,9 +50,8 @@ namespace webrtc_pc_e2e { class QualityAnalyzingVideoEncoder : public VideoEncoder, public EncodedImageCallback { public: - using EmulatedSFUConfigMap = std::map< - std::string, - absl::optional>; + using EmulatedSFUConfigMap = + std::map>; QualityAnalyzingVideoEncoder(absl::string_view peer_name, std::unique_ptr delegate, diff --git a/test/pc/e2e/analyzer/video/video_dumping.cc b/test/pc/e2e/analyzer/video/video_dumping.cc index 2e10e9008d..4fec0a8f9e 100644 --- a/test/pc/e2e/analyzer/video/video_dumping.cc +++ b/test/pc/e2e/analyzer/video/video_dumping.cc @@ -32,6 +32,7 @@ class VideoFrameIdsWriter final : public test::VideoFrameWriter { explicit VideoFrameIdsWriter(absl::string_view file_name) : file_name_(file_name) { output_file_ = fopen(file_name_.c_str(), "wb"); + RTC_LOG(LS_INFO) << "Writing VideoFrame IDs into " << file_name_; RTC_CHECK(output_file_ != nullptr) << "Failed to open file to dump frame ids for writing: " << file_name_; } @@ -50,6 +51,7 @@ class VideoFrameIdsWriter final : public test::VideoFrameWriter { void Close() override { if (output_file_ != nullptr) { + RTC_LOG(LS_INFO) << "Closing file for VideoFrame IDs: " << file_name_; fclose(output_file_); output_file_ = nullptr; } diff --git a/test/pc/e2e/analyzer/video/video_dumping_test.cc b/test/pc/e2e/analyzer/video/video_dumping_test.cc index a7c95107ab..5dd4021516 100644 --- a/test/pc/e2e/analyzer/video/video_dumping_test.cc +++ b/test/pc/e2e/analyzer/video/video_dumping_test.cc @@ -136,12 +136,10 @@ TEST_F(CreateVideoFrameWithIdsWriterTest, VideoIsWritenWithFrameIds) { ASSERT_TRUE(writer->WriteFrame(frame2)); writer->Close(); - test::Y4mFrameReaderImpl frame_reader(video_filename_, /*width=*/2, - /*height=*/2); - ASSERT_TRUE(frame_reader.Init()); - EXPECT_THAT(frame_reader.NumberOfFrames(), Eq(2)); - AssertFramesEqual(frame_reader.ReadFrame(), frame1.video_frame_buffer()); - AssertFramesEqual(frame_reader.ReadFrame(), frame2.video_frame_buffer()); + auto frame_reader = test::CreateY4mFrameReader(video_filename_); + EXPECT_THAT(frame_reader->num_frames(), Eq(2)); + AssertFramesEqual(frame_reader->PullFrame(), frame1.video_frame_buffer()); + AssertFramesEqual(frame_reader->PullFrame(), frame2.video_frame_buffer()); AssertFrameIdsAre(ids_filename_, {"1", "2"}); } @@ -163,12 +161,10 @@ TEST_F(VideoWriterTest, AllFramesAreWrittenWithSamplingModulo1) { frame_writer.Close(); } - test::Y4mFrameReaderImpl frame_reader(video_filename_, /*width=*/2, - /*height=*/2); - ASSERT_TRUE(frame_reader.Init()); - EXPECT_THAT(frame_reader.NumberOfFrames(), Eq(2)); - AssertFramesEqual(frame_reader.ReadFrame(), frame1.video_frame_buffer()); - AssertFramesEqual(frame_reader.ReadFrame(), frame2.video_frame_buffer()); + auto frame_reader = test::CreateY4mFrameReader(video_filename_); + EXPECT_THAT(frame_reader->num_frames(), Eq(2)); + AssertFramesEqual(frame_reader->PullFrame(), frame1.video_frame_buffer()); + AssertFramesEqual(frame_reader->PullFrame(), frame2.video_frame_buffer()); } TEST_F(VideoWriterTest, OnlyEvery2ndFramesIsWrittenWithSamplingModulo2) { @@ -189,12 +185,10 @@ TEST_F(VideoWriterTest, OnlyEvery2ndFramesIsWrittenWithSamplingModulo2) { frame_writer.Close(); } - test::Y4mFrameReaderImpl frame_reader(video_filename_, /*width=*/2, - /*height=*/2); - ASSERT_TRUE(frame_reader.Init()); - EXPECT_THAT(frame_reader.NumberOfFrames(), Eq(2)); - AssertFramesEqual(frame_reader.ReadFrame(), frame1.video_frame_buffer()); - AssertFramesEqual(frame_reader.ReadFrame(), frame3.video_frame_buffer()); + auto frame_reader = test::CreateY4mFrameReader(video_filename_); + EXPECT_THAT(frame_reader->num_frames(), Eq(2)); + AssertFramesEqual(frame_reader->PullFrame(), frame1.video_frame_buffer()); + AssertFramesEqual(frame_reader->PullFrame(), frame3.video_frame_buffer()); } } // namespace diff --git a/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc b/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc index 5c6c9e3aa7..87c11886cc 100644 --- a/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc +++ b/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc @@ -19,11 +19,13 @@ #include "absl/memory/memory.h" #include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/test/pclf/media_configuration.h" #include "api/video/i420_buffer.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" #include "system_wrappers/include/clock.h" +#include "test/pc/e2e/analyzer/video/analyzing_video_sink.h" #include "test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h" #include "test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h" #include "test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h" @@ -35,6 +37,7 @@ namespace webrtc { namespace webrtc_pc_e2e { namespace { +using webrtc::webrtc_pc_e2e::VideoConfig; using EmulatedSFUConfigMap = ::webrtc::webrtc_pc_e2e::QualityAnalyzingVideoEncoder::EmulatedSFUConfigMap; @@ -128,6 +131,7 @@ VideoQualityAnalyzerInjectionHelper::CreateFramePreprocessor( test::VideoRenderer::Create((*config.stream_label + "-capture").c_str(), config.width, config.height))); } + sinks_helper_.AddConfig(peer_name, config); { MutexLock lock(&mutex_); known_video_configs_.insert({*config.stream_label, config}); @@ -140,7 +144,17 @@ VideoQualityAnalyzerInjectionHelper::CreateFramePreprocessor( std::unique_ptr> VideoQualityAnalyzerInjectionHelper::CreateVideoSink( absl::string_view peer_name) { - return std::make_unique(peer_name, this); + return std::make_unique(peer_name, this); +} + +std::unique_ptr +VideoQualityAnalyzerInjectionHelper::CreateVideoSink( + absl::string_view peer_name, + const VideoSubscription& subscription, + bool report_infra_metrics) { + return std::make_unique(peer_name, clock_, *analyzer_, + sinks_helper_, subscription, + report_infra_metrics); } void VideoQualityAnalyzerInjectionHelper::Start( @@ -181,6 +195,7 @@ void VideoQualityAnalyzerInjectionHelper::Stop() { video_writer->Close(); } video_writers_.clear(); + sinks_helper_.Clear(); } void VideoQualityAnalyzerInjectionHelper::OnFrame(absl::string_view peer_name, diff --git a/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h b/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h index 3eee5a0566..8000edadb1 100644 --- a/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h +++ b/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h @@ -20,7 +20,7 @@ #include "absl/strings/string_view.h" #include "api/array_view.h" -#include "api/test/peerconnection_quality_test_fixture.h" +#include "api/test/pclf/media_configuration.h" #include "api/test/stats_observer_interface.h" #include "api/test/video_quality_analyzer_interface.h" #include "api/video/video_frame.h" @@ -29,6 +29,8 @@ #include "api/video_codecs/video_encoder_factory.h" #include "rtc_base/synchronization/mutex.h" #include "system_wrappers/include/clock.h" +#include "test/pc/e2e/analyzer/video/analyzing_video_sink.h" +#include "test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h" #include "test/pc/e2e/analyzer/video/encoded_image_data_injector.h" #include "test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h" #include "test/test_video_capturer.h" @@ -41,8 +43,6 @@ namespace webrtc_pc_e2e { // VideoQualityAnalyzerInterface into PeerConnection pipeline. class VideoQualityAnalyzerInjectionHelper : public StatsObserverInterface { public: - using VideoConfig = PeerConnectionE2EQualityTestFixture::VideoConfig; - VideoQualityAnalyzerInjectionHelper( Clock* clock, std::unique_ptr analyzer, @@ -69,14 +69,20 @@ class VideoQualityAnalyzerInjectionHelper : public StatsObserverInterface { // `input_dump_file_name`, video will be written into that file. std::unique_ptr CreateFramePreprocessor(absl::string_view peer_name, - const VideoConfig& config); + const webrtc::webrtc_pc_e2e::VideoConfig& config); // Creates sink, that will allow video quality analyzer to get access to // the rendered frames. If corresponding video track has // `output_dump_file_name` in its VideoConfig, which was used for // CreateFramePreprocessor(...), then video also will be written // into that file. + // TODO(titovartem): Remove method with `peer_name` only parameter. std::unique_ptr> CreateVideoSink( absl::string_view peer_name); + // TODO(titovartem): Remove default value for `report_infra_metrics`. + std::unique_ptr CreateVideoSink( + absl::string_view peer_name, + const VideoSubscription& subscription, + bool report_infra_metrics = false); void Start(std::string test_case_name, rtc::ArrayView peer_names, @@ -101,12 +107,13 @@ class VideoQualityAnalyzerInjectionHelper : public StatsObserverInterface { void Stop(); private: - class AnalyzingVideoSink final : public rtc::VideoSinkInterface { + // Deprecated, to be removed when old API isn't used anymore. + class AnalyzingVideoSink2 final : public rtc::VideoSinkInterface { public: - explicit AnalyzingVideoSink(absl::string_view peer_name, - VideoQualityAnalyzerInjectionHelper* helper) + explicit AnalyzingVideoSink2(absl::string_view peer_name, + VideoQualityAnalyzerInjectionHelper* helper) : peer_name_(peer_name), helper_(helper) {} - ~AnalyzingVideoSink() override = default; + ~AnalyzingVideoSink2() override = default; void OnFrame(const VideoFrame& frame) override { helper_->OnFrame(peer_name_, frame); @@ -147,10 +154,11 @@ class VideoQualityAnalyzerInjectionHelper : public StatsObserverInterface { std::vector> video_writers_; + AnalyzingVideoSinksHelper sinks_helper_; Mutex mutex_; int peers_count_ RTC_GUARDED_BY(mutex_); // Map from stream label to the video config. - std::map known_video_configs_ + std::map known_video_configs_ RTC_GUARDED_BY(mutex_); std::map>>> diff --git a/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc b/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc index 085f1b3cf6..fa7f5b1c12 100644 --- a/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc +++ b/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc @@ -136,8 +136,10 @@ void VideoQualityMetricsReporter::ReportVideoBweResults( const std::string& peer_name, const VideoBweStats& video_bwe_stats) { std::string test_case_name = GetTestCaseName(peer_name); + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. std::map metric_metadata{ - {MetricMetadataKey::kPeerMetadataKey, peer_name}}; + {MetricMetadataKey::kPeerMetadataKey, peer_name}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}; metrics_logger_->LogMetric( "available_send_bandwidth", test_case_name, diff --git a/test/pc/e2e/analyzer_helper.cc b/test/pc/e2e/analyzer_helper.cc index 852f0a3435..76cd9a7c78 100644 --- a/test/pc/e2e/analyzer_helper.cc +++ b/test/pc/e2e/analyzer_helper.cc @@ -10,6 +10,7 @@ #include "test/pc/e2e/analyzer_helper.h" +#include #include namespace webrtc { @@ -19,6 +20,21 @@ AnalyzerHelper::AnalyzerHelper() { signaling_sequence_checker_.Detach(); } +void AnalyzerHelper::AddTrackToStreamMapping( + absl::string_view track_id, + absl::string_view receiver_peer, + absl::string_view stream_label, + absl::optional sync_group) { + RTC_DCHECK_RUN_ON(&signaling_sequence_checker_); + track_to_stream_map_.insert( + {std::string(track_id), + StreamInfo{.receiver_peer = std::string(receiver_peer), + .stream_label = std::string(stream_label), + .sync_group = sync_group.has_value() + ? *sync_group + : std::string(stream_label)}}); +} + void AnalyzerHelper::AddTrackToStreamMapping(std::string track_id, std::string stream_label) { RTC_DCHECK_RUN_ON(&signaling_sequence_checker_); @@ -35,7 +51,7 @@ void AnalyzerHelper::AddTrackToStreamMapping(std::string track_id, StreamInfo{std::move(stream_label), std::move(sync_group)}}); } -const AnalyzerHelper::StreamInfo& AnalyzerHelper::GetStreamInfoFromTrackId( +AnalyzerHelper::StreamInfo AnalyzerHelper::GetStreamInfoFromTrackId( absl::string_view track_id) const { RTC_DCHECK_RUN_ON(&signaling_sequence_checker_); auto track_to_stream_pair = track_to_stream_map_.find(std::string(track_id)); @@ -43,15 +59,5 @@ const AnalyzerHelper::StreamInfo& AnalyzerHelper::GetStreamInfoFromTrackId( return track_to_stream_pair->second; } -absl::string_view AnalyzerHelper::GetStreamLabelFromTrackId( - absl::string_view track_id) const { - return GetStreamInfoFromTrackId(track_id).stream_label; -} - -absl::string_view AnalyzerHelper::GetSyncGroupLabelFromTrackId( - absl::string_view track_id) const { - return GetStreamInfoFromTrackId(track_id).sync_group; -} - } // namespace webrtc_pc_e2e } // namespace webrtc diff --git a/test/pc/e2e/analyzer_helper.h b/test/pc/e2e/analyzer_helper.h index 9cebd7015e..d0b47c4fb9 100644 --- a/test/pc/e2e/analyzer_helper.h +++ b/test/pc/e2e/analyzer_helper.h @@ -15,6 +15,7 @@ #include #include "absl/strings/string_view.h" +#include "absl/types/optional.h" #include "api/sequence_checker.h" #include "api/test/track_id_stream_info_map.h" #include "rtc_base/thread_annotations.h" @@ -31,30 +32,24 @@ namespace webrtc_pc_e2e { // AddTrackToStreamMapping, GetStreamLabelFromTrackId and // GetSyncGroupLabelFromTrackId must be invoked from the signaling thread. Get // methods should be invoked only after all data is added. Mixing Get methods -// with adding new data may lead to undefined behaviour. +// with adding new data may lead to undefined behavior. class AnalyzerHelper : public TrackIdStreamInfoMap { public: AnalyzerHelper(); + void AddTrackToStreamMapping(absl::string_view track_id, + absl::string_view receiver_peer, + absl::string_view stream_label, + absl::optional sync_group); void AddTrackToStreamMapping(std::string track_id, std::string stream_label); void AddTrackToStreamMapping(std::string track_id, std::string stream_label, std::string sync_group); - absl::string_view GetStreamLabelFromTrackId( - absl::string_view track_id) const override; - - absl::string_view GetSyncGroupLabelFromTrackId( + StreamInfo GetStreamInfoFromTrackId( absl::string_view track_id) const override; private: - struct StreamInfo { - std::string stream_label; - std::string sync_group; - }; - - const StreamInfo& GetStreamInfoFromTrackId(absl::string_view track_id) const; - SequenceChecker signaling_sequence_checker_; std::map track_to_stream_map_ RTC_GUARDED_BY(signaling_sequence_checker_); diff --git a/test/pc/e2e/cross_media_metrics_reporter.cc b/test/pc/e2e/cross_media_metrics_reporter.cc index f14901236c..b2c91089c8 100644 --- a/test/pc/e2e/cross_media_metrics_reporter.cc +++ b/test/pc/e2e/cross_media_metrics_reporter.cc @@ -19,6 +19,7 @@ #include "rtc_base/checks.h" #include "rtc_base/event.h" #include "system_wrappers/include/field_trial.h" +#include "test/pc/e2e/metric_metadata_keys.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -43,15 +44,17 @@ void CrossMediaMetricsReporter::OnStatsReports( absl::string_view pc_label, const rtc::scoped_refptr& report) { auto inbound_stats = report->GetStatsOfType(); - std::map> + std::map> sync_group_stats; for (const auto& stat : inbound_stats) { auto media_source_stat = - report->GetAs(*stat->track_id); + report->GetAs(*stat->track_id); if (stat->estimated_playout_timestamp.ValueOrDefault(0.) > 0 && media_source_stat->track_identifier.is_defined()) { - sync_group_stats[reporter_helper_->GetSyncGroupLabelFromTrackId( - *media_source_stat->track_identifier)] + sync_group_stats[reporter_helper_ + ->GetStreamInfoFromTrackId( + *media_source_stat->track_identifier) + .sync_group] .push_back(stat); } } @@ -78,17 +81,19 @@ void CrossMediaMetricsReporter::OnStatsReports( // it only once. if (stats_info_.find(sync_group) == stats_info_.end()) { auto audio_source_stat = - report->GetAs(*audio_stat->track_id); + report->GetAs( + *audio_stat->track_id); auto video_source_stat = - report->GetAs(*video_stat->track_id); + report->GetAs( + *video_stat->track_id); // *_source_stat->track_identifier is always defined here because we // checked it while grouping stats. - stats_info_[sync_group].audio_stream_label = - std::string(reporter_helper_->GetStreamLabelFromTrackId( - *audio_source_stat->track_identifier)); - stats_info_[sync_group].video_stream_label = - std::string(reporter_helper_->GetStreamLabelFromTrackId( - *video_source_stat->track_identifier)); + stats_info_[sync_group].audio_stream_info = + reporter_helper_->GetStreamInfoFromTrackId( + *audio_source_stat->track_identifier); + stats_info_[sync_group].video_stream_info = + reporter_helper_->GetStreamInfoFromTrackId( + *video_source_stat->track_identifier); } double audio_video_playout_diff = *audio_stat->estimated_playout_timestamp - @@ -109,16 +114,39 @@ void CrossMediaMetricsReporter::StopAndReportResults() { MutexLock lock(&mutex_); for (const auto& pair : stats_info_) { const std::string& sync_group = pair.first; + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. + std::map audio_metric_metadata{ + {MetricMetadataKey::kPeerSyncGroupMetadataKey, sync_group}, + {MetricMetadataKey::kAudioStreamMetadataKey, + pair.second.audio_stream_info.stream_label}, + {MetricMetadataKey::kPeerMetadataKey, + pair.second.audio_stream_info.receiver_peer}, + {MetricMetadataKey::kReceiverMetadataKey, + pair.second.audio_stream_info.receiver_peer}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}; metrics_logger_->LogMetric( "audio_ahead_ms", - GetTestCaseName(pair.second.audio_stream_label, sync_group), + GetTestCaseName(pair.second.audio_stream_info.stream_label, sync_group), pair.second.audio_ahead_ms, Unit::kMilliseconds, - webrtc::test::ImprovementDirection::kSmallerIsBetter); + webrtc::test::ImprovementDirection::kSmallerIsBetter, + std::move(audio_metric_metadata)); + + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. + std::map video_metric_metadata{ + {MetricMetadataKey::kPeerSyncGroupMetadataKey, sync_group}, + {MetricMetadataKey::kAudioStreamMetadataKey, + pair.second.video_stream_info.stream_label}, + {MetricMetadataKey::kPeerMetadataKey, + pair.second.video_stream_info.receiver_peer}, + {MetricMetadataKey::kReceiverMetadataKey, + pair.second.video_stream_info.receiver_peer}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}; metrics_logger_->LogMetric( "video_ahead_ms", - GetTestCaseName(pair.second.video_stream_label, sync_group), + GetTestCaseName(pair.second.video_stream_info.stream_label, sync_group), pair.second.video_ahead_ms, Unit::kMilliseconds, - webrtc::test::ImprovementDirection::kSmallerIsBetter); + webrtc::test::ImprovementDirection::kSmallerIsBetter, + std::move(video_metric_metadata)); } } diff --git a/test/pc/e2e/cross_media_metrics_reporter.h b/test/pc/e2e/cross_media_metrics_reporter.h index 42baf43a71..2d51ebb20f 100644 --- a/test/pc/e2e/cross_media_metrics_reporter.h +++ b/test/pc/e2e/cross_media_metrics_reporter.h @@ -44,6 +44,8 @@ class CrossMediaMetricsReporter SamplesStatsCounter audio_ahead_ms; SamplesStatsCounter video_ahead_ms; + TrackIdStreamInfoMap::StreamInfo audio_stream_info; + TrackIdStreamInfoMap::StreamInfo video_stream_info; std::string audio_stream_label; std::string video_stream_label; }; diff --git a/test/pc/e2e/echo/echo_emulation.cc b/test/pc/e2e/echo/echo_emulation.cc index f2b4be9e0d..8fdabeb16f 100644 --- a/test/pc/e2e/echo/echo_emulation.cc +++ b/test/pc/e2e/echo/echo_emulation.cc @@ -12,6 +12,8 @@ #include #include +#include "api/test/pclf/media_configuration.h" + namespace webrtc { namespace webrtc_pc_e2e { namespace { @@ -22,7 +24,7 @@ constexpr int kSingleBufferDurationMs = 10; EchoEmulatingCapturer::EchoEmulatingCapturer( std::unique_ptr capturer, - PeerConnectionE2EQualityTestFixture::EchoEmulationConfig config) + EchoEmulationConfig config) : delegate_(std::move(capturer)), config_(config), renderer_queue_(2 * config_.echo_delay.ms() / kSingleBufferDurationMs), diff --git a/test/pc/e2e/echo/echo_emulation.h b/test/pc/e2e/echo/echo_emulation.h index d1d41f63a8..359a481e46 100644 --- a/test/pc/e2e/echo/echo_emulation.h +++ b/test/pc/e2e/echo/echo_emulation.h @@ -16,7 +16,7 @@ #include #include -#include "api/test/peerconnection_quality_test_fixture.h" +#include "api/test/pclf/media_configuration.h" #include "modules/audio_device/include/test_audio_device.h" #include "rtc_base/swap_queue.h" @@ -29,7 +29,7 @@ class EchoEmulatingCapturer : public TestAudioDeviceModule::Capturer { public: EchoEmulatingCapturer( std::unique_ptr capturer, - PeerConnectionE2EQualityTestFixture::EchoEmulationConfig config); + EchoEmulationConfig config); void OnAudioRendered(rtc::ArrayView data); @@ -41,7 +41,7 @@ class EchoEmulatingCapturer : public TestAudioDeviceModule::Capturer { private: std::unique_ptr delegate_; - const PeerConnectionE2EQualityTestFixture::EchoEmulationConfig config_; + const EchoEmulationConfig config_; SwapQueue> renderer_queue_; diff --git a/test/pc/e2e/media/media_helper.cc b/test/pc/e2e/media/media_helper.cc index 4885a28ad0..3f6d069429 100644 --- a/test/pc/e2e/media/media_helper.cc +++ b/test/pc/e2e/media/media_helper.cc @@ -15,22 +15,14 @@ #include "absl/types/variant.h" #include "api/media_stream_interface.h" #include "api/test/create_frame_generator.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/peer_configurer.h" #include "test/frame_generator_capturer.h" #include "test/platform_video_capturer.h" #include "test/testsupport/file_utils.h" namespace webrtc { namespace webrtc_pc_e2e { -namespace { - -using VideoConfig = - ::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::VideoConfig; -using AudioConfig = - ::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::AudioConfig; -using CapturingDeviceIndex = ::webrtc::webrtc_pc_e2e:: - PeerConnectionE2EQualityTestFixture::CapturingDeviceIndex; - -} // namespace void MediaHelper::MaybeAddAudio(TestPeer* peer) { if (!peer->params().audio_config) { @@ -105,7 +97,7 @@ MediaHelper::MaybeAddVideo(TestPeer* peer) { std::unique_ptr MediaHelper::CreateVideoCapturer( const VideoConfig& video_config, - PeerConfigurerImpl::VideoSource source, + PeerConfigurer::VideoSource source, std::unique_ptr frame_preprocessor) { CapturingDeviceIndex* capturing_device_index = diff --git a/test/pc/e2e/media/media_helper.h b/test/pc/e2e/media/media_helper.h index 4e977e3002..2d163d009e 100644 --- a/test/pc/e2e/media/media_helper.h +++ b/test/pc/e2e/media/media_helper.h @@ -15,10 +15,10 @@ #include #include "api/test/frame_generator_interface.h" -#include "api/test/peerconnection_quality_test_fixture.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/peer_configurer.h" #include "test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h" #include "test/pc/e2e/media/test_video_capturer_video_track_source.h" -#include "test/pc/e2e/peer_configurer.h" #include "test/pc/e2e/test_peer.h" namespace webrtc { @@ -42,8 +42,8 @@ class MediaHelper { private: std::unique_ptr CreateVideoCapturer( - const PeerConnectionE2EQualityTestFixture::VideoConfig& video_config, - PeerConfigurerImpl::VideoSource source, + const VideoConfig& video_config, + PeerConfigurer::VideoSource source, std::unique_ptr frame_preprocessor); diff --git a/test/pc/e2e/metric_metadata_keys.h b/test/pc/e2e/metric_metadata_keys.h index 5fe84af88d..2fee0cbcb0 100644 --- a/test/pc/e2e/metric_metadata_keys.h +++ b/test/pc/e2e/metric_metadata_keys.h @@ -10,21 +10,42 @@ #ifndef TEST_PC_E2E_METRIC_METADATA_KEYS_H_ #define TEST_PC_E2E_METRIC_METADATA_KEYS_H_ +#include + namespace webrtc { namespace webrtc_pc_e2e { +// All metadata fields are present only if applicable for particular metric. class MetricMetadataKey { public: + // Represents on peer with whom the metric is associated. static constexpr char kPeerMetadataKey[] = "peer"; - static constexpr char kStreamMetadataKey[] = "stream"; + // Represents sender of the media stream. + static constexpr char kSenderMetadataKey[] = "sender"; + // Represents receiver of the media stream. static constexpr char kReceiverMetadataKey[] = "receiver"; + // Represents name of the audio stream. + static constexpr char kAudioStreamMetadataKey[] = "audio_stream"; + // Represents name of the video stream. + static constexpr char kVideoStreamMetadataKey[] = "video_stream"; + // Represents name of the sync group to which stream belongs. + static constexpr char kPeerSyncGroupMetadataKey[] = "peer_sync_group"; + // Represents the test name (without any peer and stream data appended to it + // as it currently happens with the webrtc.test_metrics.Metric.test_case + // field). This metadata is temporary and it will be removed once this + // information is moved to webrtc.test_metrics.Metric.test_case. + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. + static constexpr char kExperimentalTestNameMetadataKey[] = + "experimental_test_name"; private: MetricMetadataKey() = default; }; +// All metadata fields are presented only if applicable for particular metric. class SampleMetadataKey { public: + // Represents a frame ID with which data point is associated. static constexpr char kFrameIdMetadataKey[] = "frame_id"; private: diff --git a/test/pc/e2e/network_quality_metrics_reporter.cc b/test/pc/e2e/network_quality_metrics_reporter.cc index fbcc5b30fa..0bb28f0847 100644 --- a/test/pc/e2e/network_quality_metrics_reporter.cc +++ b/test/pc/e2e/network_quality_metrics_reporter.cc @@ -49,13 +49,12 @@ void NetworkQualityMetricsReporter::Start( const TrackIdStreamInfoMap* /*reporter_helper*/) { test_case_name_ = std::string(test_case_name); // Check that network stats are clean before test execution. - std::unique_ptr alice_stats = - PopulateStats(alice_network_); - RTC_CHECK_EQ(alice_stats->PacketsSent(), 0); - RTC_CHECK_EQ(alice_stats->PacketsReceived(), 0); - std::unique_ptr bob_stats = PopulateStats(bob_network_); - RTC_CHECK_EQ(bob_stats->PacketsSent(), 0); - RTC_CHECK_EQ(bob_stats->PacketsReceived(), 0); + EmulatedNetworkStats alice_stats = PopulateStats(alice_network_); + RTC_CHECK_EQ(alice_stats.overall_outgoing_stats.packets_sent, 0); + RTC_CHECK_EQ(alice_stats.overall_incoming_stats.packets_received, 0); + EmulatedNetworkStats bob_stats = PopulateStats(bob_network_); + RTC_CHECK_EQ(bob_stats.overall_outgoing_stats.packets_sent, 0); + RTC_CHECK_EQ(bob_stats.overall_incoming_stats.packets_received, 0); } void NetworkQualityMetricsReporter::OnStatsReports( @@ -85,15 +84,16 @@ void NetworkQualityMetricsReporter::OnStatsReports( } void NetworkQualityMetricsReporter::StopAndReportResults() { - std::unique_ptr alice_stats = - PopulateStats(alice_network_); - std::unique_ptr bob_stats = PopulateStats(bob_network_); + EmulatedNetworkStats alice_stats = PopulateStats(alice_network_); + EmulatedNetworkStats bob_stats = PopulateStats(bob_network_); int64_t alice_packets_loss = - alice_stats->PacketsSent() - bob_stats->PacketsReceived(); + alice_stats.overall_outgoing_stats.packets_sent - + bob_stats.overall_incoming_stats.packets_received; int64_t bob_packets_loss = - bob_stats->PacketsSent() - alice_stats->PacketsReceived(); - ReportStats("alice", std::move(alice_stats), alice_packets_loss); - ReportStats("bob", std::move(bob_stats), bob_packets_loss); + bob_stats.overall_outgoing_stats.packets_sent - + alice_stats.overall_incoming_stats.packets_received; + ReportStats("alice", alice_stats, alice_packets_loss); + ReportStats("bob", bob_stats, bob_packets_loss); if (!webrtc::field_trial::IsEnabled(kUseStandardBytesStats)) { RTC_LOG(LS_ERROR) @@ -106,12 +106,11 @@ void NetworkQualityMetricsReporter::StopAndReportResults() { } } -std::unique_ptr -NetworkQualityMetricsReporter::PopulateStats( +EmulatedNetworkStats NetworkQualityMetricsReporter::PopulateStats( EmulatedNetworkManagerInterface* network) { rtc::Event wait; - std::unique_ptr stats; - network->GetStats([&](std::unique_ptr s) { + EmulatedNetworkStats stats; + network->GetStats([&](EmulatedNetworkStats s) { stats = std::move(s); wait.Set(); }); @@ -122,38 +121,43 @@ NetworkQualityMetricsReporter::PopulateStats( void NetworkQualityMetricsReporter::ReportStats( const std::string& network_label, - std::unique_ptr stats, + const EmulatedNetworkStats& stats, int64_t packet_loss) { metrics_logger_->LogSingleValueMetric( - "bytes_sent", GetTestCaseName(network_label), stats->BytesSent().bytes(), - Unit::kBytes, ImprovementDirection::kNeitherIsBetter); + "bytes_sent", GetTestCaseName(network_label), + stats.overall_outgoing_stats.bytes_sent.bytes(), Unit::kBytes, + ImprovementDirection::kNeitherIsBetter); metrics_logger_->LogSingleValueMetric( - "packets_sent", GetTestCaseName(network_label), stats->PacketsSent(), - Unit::kUnitless, ImprovementDirection::kNeitherIsBetter); + "packets_sent", GetTestCaseName(network_label), + stats.overall_outgoing_stats.packets_sent, Unit::kUnitless, + ImprovementDirection::kNeitherIsBetter); metrics_logger_->LogSingleValueMetric( "average_send_rate", GetTestCaseName(network_label), - stats->PacketsSent() >= 2 ? stats->AverageSendRate().kbps() : 0, + stats.overall_outgoing_stats.packets_sent >= 2 + ? stats.overall_outgoing_stats.AverageSendRate().kbps() + : 0, Unit::kKilobitsPerSecond, ImprovementDirection::kNeitherIsBetter); metrics_logger_->LogSingleValueMetric( "bytes_discarded_no_receiver", GetTestCaseName(network_label), - stats->BytesDropped().bytes(), Unit::kBytes, - ImprovementDirection::kNeitherIsBetter); + stats.overall_incoming_stats.bytes_discarded_no_receiver.bytes(), + Unit::kBytes, ImprovementDirection::kNeitherIsBetter); metrics_logger_->LogSingleValueMetric( "packets_discarded_no_receiver", GetTestCaseName(network_label), - stats->PacketsDropped(), Unit::kUnitless, - ImprovementDirection::kNeitherIsBetter); + stats.overall_incoming_stats.packets_discarded_no_receiver, + Unit::kUnitless, ImprovementDirection::kNeitherIsBetter); metrics_logger_->LogSingleValueMetric( "bytes_received", GetTestCaseName(network_label), - stats->BytesReceived().bytes(), Unit::kBytes, + stats.overall_incoming_stats.bytes_received.bytes(), Unit::kBytes, ImprovementDirection::kNeitherIsBetter); metrics_logger_->LogSingleValueMetric( "packets_received", GetTestCaseName(network_label), - stats->PacketsReceived(), Unit::kUnitless, + stats.overall_incoming_stats.packets_received, Unit::kUnitless, ImprovementDirection::kNeitherIsBetter); metrics_logger_->LogSingleValueMetric( "average_receive_rate", GetTestCaseName(network_label), - stats->PacketsReceived() >= 2 ? stats->AverageReceiveRate().kbps() - : 0, + stats.overall_incoming_stats.packets_received >= 2 + ? stats.overall_incoming_stats.AverageReceiveRate().kbps() + : 0, Unit::kKilobitsPerSecond, ImprovementDirection::kNeitherIsBetter); metrics_logger_->LogSingleValueMetric( "sent_packets_loss", GetTestCaseName(network_label), packet_loss, diff --git a/test/pc/e2e/network_quality_metrics_reporter.h b/test/pc/e2e/network_quality_metrics_reporter.h index 9de65b56cb..ed894bcf54 100644 --- a/test/pc/e2e/network_quality_metrics_reporter.h +++ b/test/pc/e2e/network_quality_metrics_reporter.h @@ -49,10 +49,10 @@ class NetworkQualityMetricsReporter DataSize payload_sent = DataSize::Zero(); }; - static std::unique_ptr PopulateStats( + static EmulatedNetworkStats PopulateStats( EmulatedNetworkManagerInterface* network); void ReportStats(const std::string& network_label, - std::unique_ptr stats, + const EmulatedNetworkStats& stats, int64_t packet_loss); void ReportPCStats(const std::string& pc_label, const PCStats& stats); std::string GetTestCaseName(const std::string& network_label) const; diff --git a/test/pc/e2e/peer_configurer.h b/test/pc/e2e/peer_configurer.h deleted file mode 100644 index 9bdc2a165d..0000000000 --- a/test/pc/e2e/peer_configurer.h +++ /dev/null @@ -1,338 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef TEST_PC_E2E_PEER_CONFIGURER_H_ -#define TEST_PC_E2E_PEER_CONFIGURER_H_ - -#include -#include -#include -#include - -#include "absl/strings/string_view.h" -#include "api/async_resolver_factory.h" -#include "api/audio/audio_mixer.h" -#include "api/call/call_factory_interface.h" -#include "api/fec_controller.h" -#include "api/rtc_event_log/rtc_event_log_factory_interface.h" -#include "api/task_queue/task_queue_factory.h" -#include "api/test/create_peer_connection_quality_test_frame_generator.h" -#include "api/test/peerconnection_quality_test_fixture.h" -#include "api/transport/network_control.h" -#include "api/video_codecs/video_decoder_factory.h" -#include "api/video_codecs/video_encoder_factory.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "rtc_base/network.h" -#include "rtc_base/rtc_certificate_generator.h" -#include "rtc_base/ssl_certificate.h" -#include "rtc_base/thread.h" -#include "test/pc/e2e/peer_connection_quality_test_params.h" - -namespace webrtc { -namespace webrtc_pc_e2e { - -class PeerConfigurerImpl final - : public PeerConnectionE2EQualityTestFixture::PeerConfigurer { - public: - using VideoSource = - absl::variant, - PeerConnectionE2EQualityTestFixture::CapturingDeviceIndex>; - - PeerConfigurerImpl(rtc::Thread* network_thread, - rtc::NetworkManager* network_manager, - rtc::PacketSocketFactory* packet_socket_factory) - : components_( - std::make_unique(network_thread, - network_manager, - packet_socket_factory)), - params_(std::make_unique()), - configurable_params_(std::make_unique()) {} - - PeerConfigurer* SetName(absl::string_view name) override { - params_->name = std::string(name); - return this; - } - - // Implementation of PeerConnectionE2EQualityTestFixture::PeerConfigurer. - PeerConfigurer* SetTaskQueueFactory( - std::unique_ptr task_queue_factory) override { - components_->pcf_dependencies->task_queue_factory = - std::move(task_queue_factory); - return this; - } - PeerConfigurer* SetCallFactory( - std::unique_ptr call_factory) override { - components_->pcf_dependencies->call_factory = std::move(call_factory); - return this; - } - PeerConfigurer* SetEventLogFactory( - std::unique_ptr event_log_factory) override { - components_->pcf_dependencies->event_log_factory = - std::move(event_log_factory); - return this; - } - PeerConfigurer* SetFecControllerFactory( - std::unique_ptr fec_controller_factory) - override { - components_->pcf_dependencies->fec_controller_factory = - std::move(fec_controller_factory); - return this; - } - PeerConfigurer* SetNetworkControllerFactory( - std::unique_ptr - network_controller_factory) override { - components_->pcf_dependencies->network_controller_factory = - std::move(network_controller_factory); - return this; - } - PeerConfigurer* SetVideoEncoderFactory( - std::unique_ptr video_encoder_factory) override { - components_->pcf_dependencies->video_encoder_factory = - std::move(video_encoder_factory); - return this; - } - PeerConfigurer* SetVideoDecoderFactory( - std::unique_ptr video_decoder_factory) override { - components_->pcf_dependencies->video_decoder_factory = - std::move(video_decoder_factory); - return this; - } - - PeerConfigurer* SetAsyncResolverFactory( - std::unique_ptr async_resolver_factory) - override { - components_->pc_dependencies->async_resolver_factory = - std::move(async_resolver_factory); - return this; - } - PeerConfigurer* SetRTCCertificateGenerator( - std::unique_ptr cert_generator) - override { - components_->pc_dependencies->cert_generator = std::move(cert_generator); - return this; - } - PeerConfigurer* SetSSLCertificateVerifier( - std::unique_ptr tls_cert_verifier) override { - components_->pc_dependencies->tls_cert_verifier = - std::move(tls_cert_verifier); - return this; - } - - PeerConfigurer* AddVideoConfig( - PeerConnectionE2EQualityTestFixture::VideoConfig config) override { - video_sources_.push_back( - CreateSquareFrameGenerator(config, /*type=*/absl::nullopt)); - configurable_params_->video_configs.push_back(std::move(config)); - return this; - } - PeerConfigurer* AddVideoConfig( - PeerConnectionE2EQualityTestFixture::VideoConfig config, - std::unique_ptr generator) override { - configurable_params_->video_configs.push_back(std::move(config)); - video_sources_.push_back(std::move(generator)); - return this; - } - PeerConfigurer* AddVideoConfig( - PeerConnectionE2EQualityTestFixture::VideoConfig config, - PeerConnectionE2EQualityTestFixture::CapturingDeviceIndex index) - override { - configurable_params_->video_configs.push_back(std::move(config)); - video_sources_.push_back(index); - return this; - } - PeerConfigurer* SetVideoSubscription( - PeerConnectionE2EQualityTestFixture::VideoSubscription subscription) - override { - configurable_params_->video_subscription = std::move(subscription); - return this; - } - PeerConfigurer* SetAudioConfig( - PeerConnectionE2EQualityTestFixture::AudioConfig config) override { - params_->audio_config = std::move(config); - return this; - } - PeerConfigurer* SetUseUlpFEC(bool value) override { - params_->use_ulp_fec = value; - return this; - } - PeerConfigurer* SetUseFlexFEC(bool value) override { - params_->use_flex_fec = value; - return this; - } - PeerConfigurer* SetVideoEncoderBitrateMultiplier(double multiplier) override { - params_->video_encoder_bitrate_multiplier = multiplier; - return this; - } - PeerConfigurer* SetNetEqFactory( - std::unique_ptr neteq_factory) override { - components_->pcf_dependencies->neteq_factory = std::move(neteq_factory); - return this; - } - PeerConfigurer* SetAudioProcessing( - rtc::scoped_refptr audio_processing) override { - components_->pcf_dependencies->audio_processing = audio_processing; - return this; - } - PeerConfigurer* SetAudioMixer( - rtc::scoped_refptr audio_mixer) override { - components_->pcf_dependencies->audio_mixer = audio_mixer; - return this; - } - - virtual PeerConfigurer* SetUseNetworkThreadAsWorkerThread() override { - components_->worker_thread = components_->network_thread; - return this; - } - - PeerConfigurer* SetRtcEventLogPath(std::string path) override { - params_->rtc_event_log_path = std::move(path); - return this; - } - PeerConfigurer* SetAecDumpPath(std::string path) override { - params_->aec_dump_path = std::move(path); - return this; - } - PeerConfigurer* SetRTCConfiguration( - PeerConnectionInterface::RTCConfiguration configuration) override { - params_->rtc_configuration = std::move(configuration); - return this; - } - PeerConfigurer* SetRTCOfferAnswerOptions( - PeerConnectionInterface::RTCOfferAnswerOptions options) override { - params_->rtc_offer_answer_options = std::move(options); - return this; - } - PeerConfigurer* SetBitrateSettings( - BitrateSettings bitrate_settings) override { - params_->bitrate_settings = bitrate_settings; - return this; - } - PeerConfigurer* SetVideoCodecs( - std::vector - video_codecs) override { - params_->video_codecs = std::move(video_codecs); - return this; - } - - PeerConfigurer* SetIceTransportFactory( - std::unique_ptr factory) override { - components_->pc_dependencies->ice_transport_factory = std::move(factory); - return this; - } - - PeerConfigurer* SetPortAllocatorExtraFlags(uint32_t extra_flags) override { - params_->port_allocator_extra_flags = extra_flags; - return this; - } - // Implementation of PeerConnectionE2EQualityTestFixture::PeerConfigurer end. - - InjectableComponents* components() { return components_.get(); } - Params* params() { return params_.get(); } - ConfigurableParams* configurable_params() { - return configurable_params_.get(); - } - const Params& params() const { return *params_; } - const ConfigurableParams& configurable_params() const { - return *configurable_params_; - } - std::vector* video_sources() { return &video_sources_; } - - // Returns InjectableComponents and transfer ownership to the caller. - // Can be called once. - std::unique_ptr ReleaseComponents() { - RTC_CHECK(components_); - auto components = std::move(components_); - components_ = nullptr; - return components; - } - - // Returns Params and transfer ownership to the caller. - // Can be called once. - std::unique_ptr ReleaseParams() { - RTC_CHECK(params_); - auto params = std::move(params_); - params_ = nullptr; - return params; - } - - // Returns ConfigurableParams and transfer ownership to the caller. - // Can be called once. - std::unique_ptr ReleaseConfigurableParams() { - RTC_CHECK(configurable_params_); - auto configurable_params = std::move(configurable_params_); - configurable_params_ = nullptr; - return configurable_params; - } - - // Returns video sources and transfer frame generators ownership to the - // caller. Can be called once. - std::vector ReleaseVideoSources() { - auto video_sources = std::move(video_sources_); - video_sources_.clear(); - return video_sources; - } - - private: - std::unique_ptr components_; - std::unique_ptr params_; - std::unique_ptr configurable_params_; - std::vector video_sources_; -}; - -class DefaultNamesProvider { - public: - // Caller have to ensure that default names array will outlive names provider - // instance. - explicit DefaultNamesProvider( - absl::string_view prefix, - rtc::ArrayView default_names = {}); - - void MaybeSetName(absl::optional& name); - - private: - std::string GenerateName(); - - std::string GenerateNameInternal(); - - const std::string prefix_; - const rtc::ArrayView default_names_; - - std::set known_names_; - size_t counter_ = 0; -}; - -class PeerParamsPreprocessor { - public: - PeerParamsPreprocessor(); - - // Set missing params to default values if it is required: - // * Generate video stream labels if some of them are missing - // * Generate audio stream labels if some of them are missing - // * Set video source generation mode if it is not specified - // * Video codecs under test - void SetDefaultValuesForMissingParams(PeerConfigurerImpl& peer); - - // Validate peer's parameters, also ensure uniqueness of all video stream - // labels. - void ValidateParams(const PeerConfigurerImpl& peer); - - private: - DefaultNamesProvider peer_names_provider_; - - std::set peer_names_; - std::set video_labels_; - std::set audio_labels_; - std::set video_sync_groups_; - std::set audio_sync_groups_; -}; - -} // namespace webrtc_pc_e2e -} // namespace webrtc - -#endif // TEST_PC_E2E_PEER_CONFIGURER_H_ diff --git a/test/pc/e2e/peer_connection_e2e_smoke_test.cc b/test/pc/e2e/peer_connection_e2e_smoke_test.cc index cb96546a0f..0e7993e5be 100644 --- a/test/pc/e2e/peer_connection_e2e_smoke_test.cc +++ b/test/pc/e2e/peer_connection_e2e_smoke_test.cc @@ -18,6 +18,9 @@ #include "api/test/create_peerconnection_quality_test_fixture.h" #include "api/test/metrics/global_metrics_logger_and_exporter.h" #include "api/test/network_emulation_manager.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/media_quality_test_params.h" +#include "api/test/pclf/peer_configurer.h" #include "api/test/peerconnection_quality_test_fixture.h" #include "call/simulated_network.h" #include "system_wrappers/include/field_trial.h" @@ -39,22 +42,6 @@ namespace { class PeerConnectionE2EQualityTestSmokeTest : public ::testing::Test { public: - using EmulatedSFUConfig = - PeerConnectionE2EQualityTestFixture::EmulatedSFUConfig; - using PeerConfigurer = PeerConnectionE2EQualityTestFixture::PeerConfigurer; - using RunParams = PeerConnectionE2EQualityTestFixture::RunParams; - using VideoConfig = PeerConnectionE2EQualityTestFixture::VideoConfig; - using VideoCodecConfig = - PeerConnectionE2EQualityTestFixture::VideoCodecConfig; - using AudioConfig = PeerConnectionE2EQualityTestFixture::AudioConfig; - using ScreenShareConfig = - PeerConnectionE2EQualityTestFixture::ScreenShareConfig; - using ScrollingParams = PeerConnectionE2EQualityTestFixture::ScrollingParams; - using VideoSimulcastConfig = - PeerConnectionE2EQualityTestFixture::VideoSimulcastConfig; - using EchoEmulationConfig = - PeerConnectionE2EQualityTestFixture::EchoEmulationConfig; - void SetUp() override { network_emulation_ = CreateNetworkEmulationManager(); auto video_quality_analyzer = std::make_unique( @@ -93,8 +80,11 @@ class PeerConnectionE2EQualityTestSmokeTest : public ::testing::Test { } void AddPeer(EmulatedNetworkManagerInterface* network, - rtc::FunctionView configurer) { - fixture_->AddPeer(network->network_dependencies(), configurer); + rtc::FunctionView update_configurer) { + auto configurer = + std::make_unique(network->network_dependencies()); + update_configurer(configurer.get()); + fixture_->AddPeer(std::move(configurer)); } void RunAndCheckEachVideoStreamReceivedFrames(const RunParams& run_params) { @@ -400,8 +390,8 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Screenshare) { screenshare.content_hint = VideoTrackInterface::ContentHint::kText; ScreenShareConfig screen_share_config = ScreenShareConfig(TimeDelta::Seconds(2)); - screen_share_config.scrolling_params = ScrollingParams( - TimeDelta::Millis(1800), kDefaultSlidesWidth, kDefaultSlidesHeight); + screen_share_config.scrolling_params = + ScrollingParams{.duration = TimeDelta::Millis(1800)}; auto screen_share_frame_generator = CreateScreenShareFrameGenerator(screenshare, screen_share_config); alice->AddVideoConfig(std::move(screenshare), diff --git a/test/pc/e2e/peer_connection_quality_test.cc b/test/pc/e2e/peer_connection_quality_test.cc index 6cf7449a14..83613118f9 100644 --- a/test/pc/e2e/peer_connection_quality_test.cc +++ b/test/pc/e2e/peer_connection_quality_test.cc @@ -22,6 +22,8 @@ #include "api/rtc_event_log_output_file.h" #include "api/scoped_refptr.h" #include "api/test/metrics/metric.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/peer_configurer.h" #include "api/test/time_controller.h" #include "api/test/video_quality_analyzer_interface.h" #include "pc/sdp_utils.h" @@ -38,6 +40,8 @@ #include "test/pc/e2e/analyzer/video/video_frame_tracking_id_injector.h" #include "test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h" #include "test/pc/e2e/cross_media_metrics_reporter.h" +#include "test/pc/e2e/metric_metadata_keys.h" +#include "test/pc/e2e/peer_params_preprocessor.h" #include "test/pc/e2e/stats_poller.h" #include "test/pc/e2e/test_peer_factory.h" #include "test/testsupport/file_utils.h" @@ -48,8 +52,6 @@ namespace { using ::webrtc::test::ImprovementDirection; using ::webrtc::test::Unit; -using VideoConfig = PeerConnectionE2EQualityTestFixture::VideoConfig; -using VideoCodecConfig = PeerConnectionE2EQualityTestFixture::VideoCodecConfig; constexpr TimeDelta kDefaultTimeout = TimeDelta::Seconds(10); constexpr char kSignalThreadName[] = "signaling_thread"; @@ -108,7 +110,7 @@ class FixturePeerConnectionObserver : public MockPeerConnectionObserver { }; void ValidateP2PSimulcastParams( - const std::vector>& peers) { + const std::vector>& peers) { for (size_t i = 0; i < peers.size(); ++i) { Params* params = peers[i]->params(); ConfigurableParams* configurable_params = peers[i]->configurable_params(); @@ -194,12 +196,8 @@ void PeerConnectionE2EQualityTest::AddQualityMetricsReporter( } PeerConnectionE2EQualityTest::PeerHandle* PeerConnectionE2EQualityTest::AddPeer( - const PeerNetworkDependencies& network_dependencies, - rtc::FunctionView configurer) { - peer_configurations_.push_back(std::make_unique( - network_dependencies.network_thread, network_dependencies.network_manager, - network_dependencies.packet_socket_factory)); - configurer(peer_configurations_.back().get()); + std::unique_ptr configurer) { + peer_configurations_.push_back(std::move(configurer)); peer_handles_.push_back(PeerHandleImpl()); return &peer_handles_.back(); } @@ -214,9 +212,9 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) { RTC_CHECK_EQ(peer_configurations_.size(), 2) << "Only peer to peer calls are allowed, please add 2 peers"; - std::unique_ptr alice_configurer = + std::unique_ptr alice_configurer = std::move(peer_configurations_[0]); - std::unique_ptr bob_configurer = + std::unique_ptr bob_configurer = std::move(peer_configurations_[1]); peer_configurations_.clear(); @@ -262,11 +260,15 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) { RemotePeerAudioConfig::Create(bob_configurer->params()->audio_config); absl::optional bob_remote_audio_config = RemotePeerAudioConfig::Create(alice_configurer->params()->audio_config); - // Copy Alice and Bob video configs and names to correctly pass them into - // lambdas. + // Copy Alice and Bob video configs, subscriptions and names to correctly pass + // them into lambdas. + VideoSubscription alice_subscription = + alice_configurer->configurable_params()->video_subscription; std::vector alice_video_configs = alice_configurer->configurable_params()->video_configs; std::string alice_name = alice_configurer->params()->name.value(); + VideoSubscription bob_subscription = + alice_configurer->configurable_params()->video_subscription; std::vector bob_video_configs = bob_configurer->configurable_params()->video_configs; std::string bob_name = bob_configurer->params()->name.value(); @@ -277,18 +279,20 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) { alice_ = test_peer_factory.CreateTestPeer( std::move(alice_configurer), std::make_unique( - [this, bob_video_configs, alice_name]( + [this, alice_name, alice_subscription, bob_video_configs]( rtc::scoped_refptr transceiver) { - OnTrackCallback(alice_name, transceiver, bob_video_configs); + OnTrackCallback(alice_name, alice_subscription, transceiver, + bob_video_configs); }, [this]() { StartVideo(alice_video_sources_); }), alice_remote_audio_config, run_params.echo_emulation_config); bob_ = test_peer_factory.CreateTestPeer( std::move(bob_configurer), std::make_unique( - [this, alice_video_configs, - bob_name](rtc::scoped_refptr transceiver) { - OnTrackCallback(bob_name, transceiver, alice_video_configs); + [this, bob_name, bob_subscription, alice_video_configs]( + rtc::scoped_refptr transceiver) { + OnTrackCallback(bob_name, bob_subscription, transceiver, + alice_video_configs); }, [this]() { StartVideo(bob_video_sources_); }), bob_remote_audio_config, run_params.echo_emulation_config); @@ -448,6 +452,7 @@ std::string PeerConnectionE2EQualityTest::GetFieldTrials( void PeerConnectionE2EQualityTest::OnTrackCallback( absl::string_view peer_name, + VideoSubscription peer_subscription, rtc::scoped_refptr transceiver, std::vector remote_video_configs) { const rtc::scoped_refptr& track = @@ -456,7 +461,7 @@ void PeerConnectionE2EQualityTest::OnTrackCallback( << "Expected 2 stream ids: 1st - sync group, 2nd - unique stream label"; std::string sync_group = transceiver->receiver()->stream_ids()[0]; std::string stream_label = transceiver->receiver()->stream_ids()[1]; - analyzer_helper_.AddTrackToStreamMapping(track->id(), stream_label, + analyzer_helper_.AddTrackToStreamMapping(track->id(), peer_name, stream_label, sync_group); if (track->kind() != MediaStreamTrackInterface::kVideoKind) { return; @@ -466,7 +471,8 @@ void PeerConnectionE2EQualityTest::OnTrackCallback( // track->kind() is kVideoKind. auto* video_track = static_cast(track.get()); std::unique_ptr> video_sink = - video_quality_analyzer_injection_helper_->CreateVideoSink(peer_name); + video_quality_analyzer_injection_helper_->CreateVideoSink( + peer_name, peer_subscription, /*report_infra_stats=*/false); video_track->AddOrUpdateSink(video_sink.get(), rtc::VideoSinkWants()); output_video_sinks_.push_back(std::move(video_sink)); } @@ -735,12 +741,18 @@ void PeerConnectionE2EQualityTest::TearDownCall() { } void PeerConnectionE2EQualityTest::ReportGeneralTestResults() { + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. metrics_logger_->LogSingleValueMetric( *alice_->params().name + "_connected", test_case_name_, alice_connected_, - Unit::kUnitless, ImprovementDirection::kBiggerIsBetter); + Unit::kUnitless, ImprovementDirection::kBiggerIsBetter, + {{MetricMetadataKey::kPeerMetadataKey, *alice_->params().name}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}); + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. metrics_logger_->LogSingleValueMetric( *bob_->params().name + "_connected", test_case_name_, bob_connected_, - Unit::kUnitless, ImprovementDirection::kBiggerIsBetter); + Unit::kUnitless, ImprovementDirection::kBiggerIsBetter, + {{MetricMetadataKey::kPeerMetadataKey, *bob_->params().name}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}); } Timestamp PeerConnectionE2EQualityTest::Now() const { diff --git a/test/pc/e2e/peer_connection_quality_test.h b/test/pc/e2e/peer_connection_quality_test.h index 532cec357f..6cbf232874 100644 --- a/test/pc/e2e/peer_connection_quality_test.h +++ b/test/pc/e2e/peer_connection_quality_test.h @@ -19,6 +19,9 @@ #include "api/task_queue/task_queue_factory.h" #include "api/test/audio_quality_analyzer_interface.h" #include "api/test/metrics/metrics_logger.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/media_quality_test_params.h" +#include "api/test/pclf/peer_configurer.h" #include "api/test/peerconnection_quality_test_fixture.h" #include "api/test/time_controller.h" #include "api/units/time_delta.h" @@ -32,8 +35,6 @@ #include "test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h" #include "test/pc/e2e/analyzer_helper.h" #include "test/pc/e2e/media/media_helper.h" -#include "test/pc/e2e/peer_configurer.h" -#include "test/pc/e2e/peer_connection_quality_test_params.h" #include "test/pc/e2e/sdp/sdp_changer.h" #include "test/pc/e2e/test_activities_executor.h" #include "test/pc/e2e/test_peer.h" @@ -44,11 +45,6 @@ namespace webrtc_pc_e2e { class PeerConnectionE2EQualityTest : public PeerConnectionE2EQualityTestFixture { public: - using RunParams = PeerConnectionE2EQualityTestFixture::RunParams; - using VideoConfig = PeerConnectionE2EQualityTestFixture::VideoConfig; - using VideoSimulcastConfig = - PeerConnectionE2EQualityTestFixture::VideoSimulcastConfig; - using PeerConfigurer = PeerConnectionE2EQualityTestFixture::PeerConfigurer; using QualityMetricsReporter = PeerConnectionE2EQualityTestFixture::QualityMetricsReporter; @@ -75,9 +71,7 @@ class PeerConnectionE2EQualityTest void AddQualityMetricsReporter(std::unique_ptr quality_metrics_reporter) override; - PeerHandle* AddPeer( - const PeerNetworkDependencies& network_dependencies, - rtc::FunctionView configurer) override; + PeerHandle* AddPeer(std::unique_ptr configurer) override; void Run(RunParams run_params) override; TimeDelta GetRealTestDuration() const override { @@ -96,6 +90,7 @@ class PeerConnectionE2EQualityTest // enabled in Run(). std::string GetFieldTrials(const RunParams& run_params); void OnTrackCallback(absl::string_view peer_name, + VideoSubscription peer_subscription, rtc::scoped_refptr transceiver, std::vector remote_video_configs); // Have to be run on the signaling thread. @@ -126,7 +121,7 @@ class PeerConnectionE2EQualityTest std::unique_ptr executor_; test::MetricsLogger* const metrics_logger_; - std::vector> peer_configurations_; + std::vector> peer_configurations_; std::vector peer_handles_; std::unique_ptr alice_; diff --git a/test/pc/e2e/peer_connection_quality_test_metric_names_test.cc b/test/pc/e2e/peer_connection_quality_test_metric_names_test.cc index fedba275fe..d5f46f3ccc 100644 --- a/test/pc/e2e/peer_connection_quality_test_metric_names_test.cc +++ b/test/pc/e2e/peer_connection_quality_test_metric_names_test.cc @@ -17,6 +17,9 @@ #include "api/test/metrics/metrics_logger.h" #include "api/test/metrics/stdout_metrics_exporter.h" #include "api/test/network_emulation_manager.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/media_quality_test_params.h" +#include "api/test/pclf/peer_configurer.h" #include "api/test/peerconnection_quality_test_fixture.h" #include "api/units/time_delta.h" #include "test/gmock.h" @@ -37,16 +40,7 @@ using ::webrtc::test::Metric; using ::webrtc::test::MetricsExporter; using ::webrtc::test::StdoutMetricsExporter; using ::webrtc::test::Unit; -using RunParams = - ::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::RunParams; -using VideoConfig = - ::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::VideoConfig; -using AudioConfig = - ::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::AudioConfig; -using PeerConfigurer = ::webrtc::webrtc_pc_e2e:: - PeerConnectionE2EQualityTestFixture::PeerConfigurer; -using VideoCodecConfig = ::webrtc::webrtc_pc_e2e:: - PeerConnectionE2EQualityTestFixture::VideoCodecConfig; +using ::webrtc::webrtc_pc_e2e::PeerConfigurer; // Adds a peer with some audio and video (the client should not care about // details about audio and video configs). @@ -56,16 +50,16 @@ void AddDefaultAudioVideoPeer( absl::string_view video_stream_label, const PeerNetworkDependencies& network_dependencies, PeerConnectionE2EQualityTestFixture& fixture) { - fixture.AddPeer(network_dependencies, [&](PeerConfigurer* peer) { - peer->SetName(peer_name); - AudioConfig audio{std::string(audio_stream_label)}; - audio.sync_group = std::string(peer_name); - peer->SetAudioConfig(std::move(audio)); - VideoConfig video(std::string(video_stream_label), 320, 180, 15); - video.sync_group = std::string(peer_name); - peer->AddVideoConfig(std::move(video)); - peer->SetVideoCodecs({VideoCodecConfig(cricket::kVp8CodecName)}); - }); + AudioConfig audio{std::string(audio_stream_label)}; + audio.sync_group = std::string(peer_name); + VideoConfig video(std::string(video_stream_label), 320, 180, 15); + video.sync_group = std::string(peer_name); + auto peer = std::make_unique(network_dependencies); + peer->SetName(peer_name); + peer->SetAudioConfig(std::move(audio)); + peer->AddVideoConfig(std::move(video)); + peer->SetVideoCodecs({VideoCodecConfig(cricket::kVp8CodecName)}); + fixture.AddPeer(std::move(peer)); } // Metric fields to assert on @@ -163,13 +157,17 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .name = "alice_connected", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case", .name = "bob_connected", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, // Metrics from DefaultAudioQualityAnalyzer MetricValidationInfo{ @@ -177,73 +175,133 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .name = "expand_rate", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, + "alice_audio"}, + {MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_audio", .name = "accelerate_rate", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, + "alice_audio"}, + {MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_audio", .name = "preemptive_rate", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, + "alice_audio"}, + {MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_audio", .name = "speech_expand_rate", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, + "alice_audio"}, + {MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_audio", .name = "average_jitter_buffer_delay_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, + "alice_audio"}, + {MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_audio", .name = "preferred_buffer_size_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, + "alice_audio"}, + {MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_audio", .name = "expand_rate", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, + "bob_audio"}, + {MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_audio", .name = "accelerate_rate", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, + "bob_audio"}, + {MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_audio", .name = "preemptive_rate", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, + "bob_audio"}, + {MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_audio", .name = "speech_expand_rate", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, + "bob_audio"}, + {MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_audio", .name = "average_jitter_buffer_delay_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, + "bob_audio"}, + {MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_audio", .name = "preferred_buffer_size_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, + "bob_audio"}, + {MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, // Metrics from DefaultVideoQualityAnalyzer MetricValidationInfo{ @@ -252,346 +310,511 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kStreamMetadataKey, + {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "ssim", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kStreamMetadataKey, + {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "transport_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kStreamMetadataKey, + {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "total_delay_incl_transport", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kStreamMetadataKey, + {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "time_between_rendered_frames", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kStreamMetadataKey, + {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "harmonic_framerate", .unit = Unit::kHertz, .improvement_direction = ImprovementDirection::kBiggerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kStreamMetadataKey, + {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "encode_frame_rate", .unit = Unit::kHertz, .improvement_direction = ImprovementDirection::kBiggerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kStreamMetadataKey, + {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "encode_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kStreamMetadataKey, + {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "time_between_freezes", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kBiggerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kStreamMetadataKey, + {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "freeze_time_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kStreamMetadataKey, + {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "pixels_per_frame", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kStreamMetadataKey, + {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "min_psnr_dB", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kStreamMetadataKey, + {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "decode_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kStreamMetadataKey, + {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "receive_to_render_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kStreamMetadataKey, + {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "dropped_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kStreamMetadataKey, + {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "frames_in_flight", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kStreamMetadataKey, + {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "rendered_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kStreamMetadataKey, + {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "max_skipped", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kStreamMetadataKey, + {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "target_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kStreamMetadataKey, + {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, + MetricValidationInfo{ + .test_case = "test_case/alice_video", + .name = "qp", + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kSmallerIsBetter, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "alice_video"}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_video", .name = "actual_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kStreamMetadataKey, + {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "psnr_dB", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kStreamMetadataKey, "bob_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "ssim", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kStreamMetadataKey, "bob_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "transport_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kStreamMetadataKey, "bob_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "total_delay_incl_transport", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kStreamMetadataKey, "bob_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "time_between_rendered_frames", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kStreamMetadataKey, "bob_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "harmonic_framerate", .unit = Unit::kHertz, .improvement_direction = ImprovementDirection::kBiggerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kStreamMetadataKey, "bob_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "encode_frame_rate", .unit = Unit::kHertz, .improvement_direction = ImprovementDirection::kBiggerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kStreamMetadataKey, "bob_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "encode_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kStreamMetadataKey, "bob_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "time_between_freezes", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kBiggerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kStreamMetadataKey, "bob_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "freeze_time_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kStreamMetadataKey, "bob_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "pixels_per_frame", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kStreamMetadataKey, "bob_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "min_psnr_dB", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kStreamMetadataKey, "bob_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "decode_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kStreamMetadataKey, "bob_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "receive_to_render_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kStreamMetadataKey, "bob_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "dropped_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kStreamMetadataKey, "bob_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "frames_in_flight", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kStreamMetadataKey, "bob_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "rendered_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kStreamMetadataKey, "bob_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "max_skipped", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kStreamMetadataKey, "bob_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "target_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kStreamMetadataKey, "bob_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_video", .name = "actual_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kStreamMetadataKey, "bob_video"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, + MetricValidationInfo{ + .test_case = "test_case/bob_video", + .name = "qp", + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kSmallerIsBetter, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case", .name = "cpu_usage_%", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, // Metrics from StatsBasedNetworkQualityMetricsReporter MetricValidationInfo{ @@ -599,133 +822,177 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .name = "bytes_discarded_no_receiver", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "packets_discarded_no_receiver", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "payload_bytes_received", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "payload_bytes_sent", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "bytes_sent", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "packets_sent", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "average_send_rate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "bytes_received", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "packets_received", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "average_receive_rate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "sent_packets_loss", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "bytes_discarded_no_receiver", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "packets_discarded_no_receiver", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "payload_bytes_received", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "payload_bytes_sent", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "bytes_sent", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "packets_sent", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "average_send_rate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "bytes_received", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "packets_received", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "average_receive_rate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "sent_packets_loss", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, // Metrics from VideoQualityMetricsReporter MetricValidationInfo{ @@ -733,37 +1000,49 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .name = "available_send_bandwidth", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "transmission_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice", .name = "retransmission_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "available_send_bandwidth", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "transmission_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob", .name = "retransmission_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, // Metrics from CrossMediaMetricsReporter MetricValidationInfo{ @@ -771,25 +1050,49 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .name = "audio_ahead_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, - .metadata = {}}, + .metadata = + {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_audio"}, + {MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kPeerSyncGroupMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/alice_alice_video", .name = "video_ahead_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, - .metadata = {}}, + .metadata = + {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_video"}, + {MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kPeerSyncGroupMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_bob_audio", .name = "audio_ahead_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, - .metadata = {}}, + .metadata = + {{MetricMetadataKey::kAudioStreamMetadataKey, "bob_audio"}, + {MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kPeerSyncGroupMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, MetricValidationInfo{ .test_case = "test_case/bob_bob_video", .name = "video_ahead_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, - .metadata = {}})); + .metadata = { + {MetricMetadataKey::kAudioStreamMetadataKey, "bob_video"}, + {MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kPeerSyncGroupMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}})); } } // namespace diff --git a/test/pc/e2e/peer_connection_quality_test_test.cc b/test/pc/e2e/peer_connection_quality_test_test.cc new file mode 100644 index 0000000000..066fe7d8ee --- /dev/null +++ b/test/pc/e2e/peer_connection_quality_test_test.cc @@ -0,0 +1,139 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "test/pc/e2e/peer_connection_quality_test.h" + +#include +#include +#include +#include + +#include "api/test/create_network_emulation_manager.h" +#include "api/test/metrics/global_metrics_logger_and_exporter.h" +#include "api/test/network_emulation_manager.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/media_quality_test_params.h" +#include "api/test/pclf/peer_configurer.h" +#include "api/test/peerconnection_quality_test_fixture.h" +#include "api/units/time_delta.h" +#include "rtc_base/time_utils.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" +#include "test/testsupport/frame_reader.h" + +namespace webrtc { +namespace webrtc_pc_e2e { +namespace { + +using ::testing::Eq; +using ::testing::Test; + +using ::webrtc::webrtc_pc_e2e::PeerConfigurer; + +// Remove files and directories in a directory non-recursively. +void CleanDir(absl::string_view dir, size_t expected_output_files_count) { + absl::optional> dir_content = + test::ReadDirectory(dir); + if (expected_output_files_count == 0) { + ASSERT_FALSE(dir_content.has_value()) << "Empty directory is expected"; + } else { + ASSERT_TRUE(dir_content.has_value()) << "Test directory is empty!"; + EXPECT_EQ(dir_content->size(), expected_output_files_count); + for (const auto& entry : *dir_content) { + if (test::DirExists(entry)) { + EXPECT_TRUE(test::RemoveDir(entry)) + << "Failed to remove sub directory: " << entry; + } else if (test::FileExists(entry)) { + EXPECT_TRUE(test::RemoveFile(entry)) + << "Failed to remove file: " << entry; + } else { + FAIL() << "Can't remove unknown file type: " << entry; + } + } + } + EXPECT_TRUE(test::RemoveDir(dir)) << "Failed to remove directory: " << dir; +} + +class PeerConnectionE2EQualityTestTest : public Test { + protected: + ~PeerConnectionE2EQualityTestTest() override = default; + + void SetUp() override { + // Create an empty temporary directory for this test. + test_directory_ = test::JoinFilename( + test::OutputPath(), + "TestDir_PeerConnectionE2EQualityTestTest_" + + std::string( + testing::UnitTest::GetInstance()->current_test_info()->name())); + test::CreateDir(test_directory_); + } + + void TearDown() override { + CleanDir(test_directory_, expected_output_files_count_); + } + + void ExpectOutputFilesCount(size_t count) { + expected_output_files_count_ = count; + } + + std::string test_directory_; + size_t expected_output_files_count_ = 0; +}; + +TEST_F(PeerConnectionE2EQualityTestTest, OutputVideoIsDumpedWhenRequested) { + std::unique_ptr network_emulation = + CreateNetworkEmulationManager(TimeMode::kSimulated); + PeerConnectionE2EQualityTest fixture( + "test_case", *network_emulation->time_controller(), + /*audio_quality_analyzer=*/nullptr, /*video_quality_analyzer=*/nullptr, + test::GetGlobalMetricsLogger()); + + EmulatedEndpoint* alice_endpoint = + network_emulation->CreateEndpoint(EmulatedEndpointConfig()); + EmulatedEndpoint* bob_endpoint = + network_emulation->CreateEndpoint(EmulatedEndpointConfig()); + + network_emulation->CreateRoute( + alice_endpoint, {network_emulation->CreateUnconstrainedEmulatedNode()}, + bob_endpoint); + network_emulation->CreateRoute( + bob_endpoint, {network_emulation->CreateUnconstrainedEmulatedNode()}, + alice_endpoint); + + EmulatedNetworkManagerInterface* alice_network = + network_emulation->CreateEmulatedNetworkManagerInterface( + {alice_endpoint}); + EmulatedNetworkManagerInterface* bob_network = + network_emulation->CreateEmulatedNetworkManagerInterface({bob_endpoint}); + + VideoConfig alice_video("alice_video", 320, 180, 15); + alice_video.output_dump_options = VideoDumpOptions(test_directory_); + PeerConfigurer alice(alice_network->network_dependencies()); + alice.SetName("alice"); + alice.AddVideoConfig(std::move(alice_video)); + fixture.AddPeer(std::make_unique(std::move(alice))); + + PeerConfigurer bob(bob_network->network_dependencies()); + bob.SetName("bob"); + fixture.AddPeer(std::make_unique(std::move(bob))); + + fixture.Run(RunParams(TimeDelta::Seconds(2))); + + auto frame_reader = test::CreateY4mFrameReader( + test::JoinFilename(test_directory_, "alice_video_bob_320x180_15.y4m")); + EXPECT_THAT(frame_reader->num_frames(), Eq(31)); // 2 seconds 15 fps + 1 + + ExpectOutputFilesCount(1); +} + +} // namespace +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/test/pc/e2e/peer_configurer.cc b/test/pc/e2e/peer_params_preprocessor.cc similarity index 80% rename from test/pc/e2e/peer_configurer.cc rename to test/pc/e2e/peer_params_preprocessor.cc index 9a51bbff37..05372125d2 100644 --- a/test/pc/e2e/peer_configurer.cc +++ b/test/pc/e2e/peer_params_preprocessor.cc @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,11 +8,16 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "test/pc/e2e/peer_configurer.h" +#include "test/pc/e2e/peer_params_preprocessor.h" #include +#include #include "absl/strings/string_view.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/media_quality_test_params.h" +#include "api/test/pclf/peer_configurer.h" +#include "api/test/peer_network_dependencies.h" #include "modules/video_coding/svc/create_scalability_structure.h" #include "modules/video_coding/svc/scalability_mode_util.h" #include "rtc_base/arraysize.h" @@ -22,11 +27,6 @@ namespace webrtc { namespace webrtc_pc_e2e { namespace { -using AudioConfig = PeerConnectionE2EQualityTestFixture::AudioConfig; -using VideoConfig = PeerConnectionE2EQualityTestFixture::VideoConfig; -using RunParams = PeerConnectionE2EQualityTestFixture::RunParams; -using VideoCodecConfig = PeerConnectionE2EQualityTestFixture::VideoCodecConfig; - // List of default names of generic participants according to // https://en.wikipedia.org/wiki/Alice_and_Bob constexpr absl::string_view kDefaultNames[] = {"alice", "bob", "charlie", @@ -34,42 +34,56 @@ constexpr absl::string_view kDefaultNames[] = {"alice", "bob", "charlie", } // namespace -DefaultNamesProvider::DefaultNamesProvider( - absl::string_view prefix, - rtc::ArrayView default_names) - : prefix_(prefix), default_names_(default_names) {} +class PeerParamsPreprocessor::DefaultNamesProvider { + public: + // Caller have to ensure that default names array will outlive names provider + // instance. + explicit DefaultNamesProvider( + absl::string_view prefix, + rtc::ArrayView default_names = {}) + : prefix_(prefix), default_names_(default_names) {} -void DefaultNamesProvider::MaybeSetName(absl::optional& name) { - if (name.has_value()) { - known_names_.insert(name.value()); - } else { - name = GenerateName(); + void MaybeSetName(absl::optional& name) { + if (name.has_value()) { + known_names_.insert(name.value()); + } else { + name = GenerateName(); + } } -} -std::string DefaultNamesProvider::GenerateName() { - std::string name; - do { - name = GenerateNameInternal(); - } while (!known_names_.insert(name).second); - return name; -} - -std::string DefaultNamesProvider::GenerateNameInternal() { - if (counter_ < default_names_.size()) { - return std::string(default_names_[counter_++]); + private: + std::string GenerateName() { + std::string name; + do { + name = GenerateNameInternal(); + } while (!known_names_.insert(name).second); + return name; } - return prefix_ + std::to_string(counter_++); -} + + std::string GenerateNameInternal() { + if (counter_ < default_names_.size()) { + return std::string(default_names_[counter_++]); + } + return prefix_ + std::to_string(counter_++); + } + + const std::string prefix_; + const rtc::ArrayView default_names_; + + std::set known_names_; + size_t counter_ = 0; +}; PeerParamsPreprocessor::PeerParamsPreprocessor() - : peer_names_provider_("peer_", kDefaultNames) {} + : peer_names_provider_( + std::make_unique("peer_", kDefaultNames)) {} +PeerParamsPreprocessor::~PeerParamsPreprocessor() = default; void PeerParamsPreprocessor::SetDefaultValuesForMissingParams( - PeerConfigurerImpl& peer) { + PeerConfigurer& peer) { Params* params = peer.params(); ConfigurableParams* configurable_params = peer.configurable_params(); - peer_names_provider_.MaybeSetName(params->name); + peer_names_provider_->MaybeSetName(params->name); DefaultNamesProvider video_stream_names_provider(*params->name + "_auto_video_stream_label_"); for (VideoConfig& config : configurable_params->video_configs) { @@ -83,13 +97,11 @@ void PeerParamsPreprocessor::SetDefaultValuesForMissingParams( } if (params->video_codecs.empty()) { - params->video_codecs.push_back( - PeerConnectionE2EQualityTestFixture::VideoCodecConfig( - cricket::kVp8CodecName)); + params->video_codecs.push_back(VideoCodecConfig(cricket::kVp8CodecName)); } } -void PeerParamsPreprocessor::ValidateParams(const PeerConfigurerImpl& peer) { +void PeerParamsPreprocessor::ValidateParams(const PeerConfigurer& peer) { const Params& p = peer.params(); RTC_CHECK_GT(p.video_encoder_bitrate_multiplier, 0.0); // Each peer should at least support 1 video codec. diff --git a/test/pc/e2e/peer_params_preprocessor.h b/test/pc/e2e/peer_params_preprocessor.h new file mode 100644 index 0000000000..c222811546 --- /dev/null +++ b/test/pc/e2e/peer_params_preprocessor.h @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef TEST_PC_E2E_PEER_PARAMS_PREPROCESSOR_H_ +#define TEST_PC_E2E_PEER_PARAMS_PREPROCESSOR_H_ + +#include +#include +#include + +#include "api/test/pclf/peer_configurer.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +class PeerParamsPreprocessor { + public: + PeerParamsPreprocessor(); + ~PeerParamsPreprocessor(); + + // Set missing params to default values if it is required: + // * Generate video stream labels if some of them are missing + // * Generate audio stream labels if some of them are missing + // * Set video source generation mode if it is not specified + // * Video codecs under test + void SetDefaultValuesForMissingParams(PeerConfigurer& peer); + + // Validate peer's parameters, also ensure uniqueness of all video stream + // labels. + void ValidateParams(const PeerConfigurer& peer); + + private: + class DefaultNamesProvider; + std::unique_ptr peer_names_provider_; + + std::set peer_names_; + std::set video_labels_; + std::set audio_labels_; + std::set video_sync_groups_; + std::set audio_sync_groups_; +}; + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // TEST_PC_E2E_PEER_PARAMS_PREPROCESSOR_H_ diff --git a/test/pc/e2e/sdp/sdp_changer.cc b/test/pc/e2e/sdp/sdp_changer.cc index b3ee3b78bb..af55f29175 100644 --- a/test/pc/e2e/sdp/sdp_changer.cc +++ b/test/pc/e2e/sdp/sdp_changer.cc @@ -14,6 +14,7 @@ #include "absl/memory/memory.h" #include "api/jsep_session_description.h" +#include "api/test/pclf/media_configuration.h" #include "media/base/media_constants.h" #include "p2p/base/p2p_constants.h" #include "pc/sdp_utils.h" @@ -23,8 +24,6 @@ namespace webrtc { namespace webrtc_pc_e2e { namespace { -using VideoCodecConfig = PeerConnectionE2EQualityTestFixture::VideoCodecConfig; - std::string CodecRequiredParamsToString( const std::map& codec_required_params) { rtc::StringBuilder out; @@ -167,7 +166,7 @@ void SignalingInterceptor::FillSimulcastContext( LocalAndRemoteSdp SignalingInterceptor::PatchOffer( std::unique_ptr offer, - const PeerConnectionE2EQualityTestFixture::VideoCodecConfig& first_codec) { + const VideoCodecConfig& first_codec) { for (auto& content : offer->description()->contents()) { context_.mids_order.push_back(content.mid()); cricket::MediaContentDescription* media_desc = content.media_description(); @@ -364,7 +363,7 @@ LocalAndRemoteSdp SignalingInterceptor::PatchVp9Offer( LocalAndRemoteSdp SignalingInterceptor::PatchAnswer( std::unique_ptr answer, - const PeerConnectionE2EQualityTestFixture::VideoCodecConfig& first_codec) { + const VideoCodecConfig& first_codec) { for (auto& content : answer->description()->contents()) { cricket::MediaContentDescription* media_desc = content.media_description(); if (media_desc->type() != cricket::MediaType::MEDIA_TYPE_VIDEO) { diff --git a/test/pc/e2e/sdp/sdp_changer.h b/test/pc/e2e/sdp/sdp_changer.h index 115ed5ba2c..6f68d03f52 100644 --- a/test/pc/e2e/sdp/sdp_changer.h +++ b/test/pc/e2e/sdp/sdp_changer.h @@ -20,7 +20,7 @@ #include "api/array_view.h" #include "api/jsep.h" #include "api/rtp_parameters.h" -#include "api/test/peerconnection_quality_test_fixture.h" +#include "api/test/pclf/media_configuration.h" #include "media/base/rid_description.h" #include "pc/session_description.h" #include "pc/simulcast_description.h" @@ -40,8 +40,7 @@ namespace webrtc_pc_e2e { // vector and they will be added in the same order, as they were in // `supported_codecs`. std::vector FilterVideoCodecCapabilities( - rtc::ArrayView - video_codecs, + rtc::ArrayView video_codecs, bool use_rtx, bool use_ulpfec, bool use_flexfec, @@ -77,10 +76,10 @@ class SignalingInterceptor { LocalAndRemoteSdp PatchOffer( std::unique_ptr offer, - const PeerConnectionE2EQualityTestFixture::VideoCodecConfig& first_codec); + const VideoCodecConfig& first_codec); LocalAndRemoteSdp PatchAnswer( std::unique_ptr answer, - const PeerConnectionE2EQualityTestFixture::VideoCodecConfig& first_codec); + const VideoCodecConfig& first_codec); std::vector> PatchOffererIceCandidates( rtc::ArrayView candidates); diff --git a/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc b/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc index 3017a87160..65dca5b518 100644 --- a/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc +++ b/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc @@ -22,6 +22,7 @@ #include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/stats/rtc_stats.h" #include "api/stats/rtcstats_objects.h" #include "api/test/metrics/metric.h" @@ -34,7 +35,9 @@ #include "rtc_base/ip_address.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/no_unique_address.h" #include "system_wrappers/include/field_trial.h" +#include "test/pc/e2e/metric_metadata_keys.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -43,6 +46,9 @@ namespace { using ::webrtc::test::ImprovementDirection; using ::webrtc::test::Unit; +using NetworkLayerStats = + StatsBasedNetworkQualityMetricsReporter::NetworkLayerStats; + constexpr TimeDelta kStatsWaitTimeout = TimeDelta::Seconds(1); // Field trial which controls whether to report standard-compliant bytes @@ -50,16 +56,14 @@ constexpr TimeDelta kStatsWaitTimeout = TimeDelta::Seconds(1); // in bytes sent or received. constexpr char kUseStandardBytesStats[] = "WebRTC-UseStandardBytesStats"; -std::unique_ptr PopulateStats( - std::vector endpoints, - NetworkEmulationManager* network_emulation) { +EmulatedNetworkStats PopulateStats(std::vector endpoints, + NetworkEmulationManager* network_emulation) { rtc::Event stats_loaded; - std::unique_ptr stats; - network_emulation->GetStats(endpoints, - [&](std::unique_ptr s) { - stats = std::move(s); - stats_loaded.Set(); - }); + EmulatedNetworkStats stats; + network_emulation->GetStats(endpoints, [&](EmulatedNetworkStats s) { + stats = std::move(s); + stats_loaded.Set(); + }); bool stats_received = stats_loaded.Wait(kStatsWaitTimeout); RTC_CHECK(stats_received); return stats; @@ -79,6 +83,83 @@ std::map PopulateIpToPeer( return out; } +// Accumulates emulated network stats being executed on the network thread. +// When all stats are collected stores it in thread safe variable. +class EmulatedNetworkStatsAccumulator { + public: + // `expected_stats_count` - the number of calls to + // AddEndpointStats/AddUplinkStats/AddDownlinkStats the accumulator is going + // to wait. If called more than expected, the program will crash. + explicit EmulatedNetworkStatsAccumulator(size_t expected_stats_count) + : not_collected_stats_count_(expected_stats_count) { + RTC_DCHECK_GE(not_collected_stats_count_, 0); + if (not_collected_stats_count_ == 0) { + all_stats_collected_.Set(); + } + sequence_checker_.Detach(); + } + + // Has to be executed on network thread. + void AddEndpointStats(std::string peer_name, EmulatedNetworkStats stats) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + n_stats_[peer_name].endpoints_stats = std::move(stats); + DecrementNotCollectedStatsCount(); + } + + // Has to be executed on network thread. + void AddUplinkStats(std::string peer_name, EmulatedNetworkNodeStats stats) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + n_stats_[peer_name].uplink_stats = std::move(stats); + DecrementNotCollectedStatsCount(); + } + + // Has to be executed on network thread. + void AddDownlinkStats(std::string peer_name, EmulatedNetworkNodeStats stats) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + n_stats_[peer_name].downlink_stats = std::move(stats); + DecrementNotCollectedStatsCount(); + } + + // Can be executed on any thread. + // Returns true if count down was completed and false if timeout elapsed + // before. + bool Wait(TimeDelta timeout) { return all_stats_collected_.Wait(timeout); } + + // Can be called once. Returns all collected stats by moving underlying + // object. + std::map ReleaseStats() { + RTC_DCHECK(!stats_released_); + stats_released_ = true; + MutexLock lock(&mutex_); + return std::move(stats_); + } + + private: + void DecrementNotCollectedStatsCount() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_CHECK_GT(not_collected_stats_count_, 0) + << "All stats are already collected"; + not_collected_stats_count_--; + if (not_collected_stats_count_ == 0) { + MutexLock lock(&mutex_); + stats_ = std::move(n_stats_); + all_stats_collected_.Set(); + } + } + + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; + size_t not_collected_stats_count_ RTC_GUARDED_BY(sequence_checker_); + // Collected on the network thread. Moved into `stats_` after all stats are + // collected. + std::map n_stats_ + RTC_GUARDED_BY(sequence_checker_); + + rtc::Event all_stats_collected_; + Mutex mutex_; + std::map stats_ RTC_GUARDED_BY(mutex_); + bool stats_released_ = false; +}; + } // namespace StatsBasedNetworkQualityMetricsReporter:: @@ -105,20 +186,24 @@ void StatsBasedNetworkQualityMetricsReporter::NetworkLayerStatsCollector:: MutexLock lock(&mutex_); // Check that network stats are clean before test execution. for (const auto& entry : peer_endpoints_) { - std::unique_ptr stats = + EmulatedNetworkStats stats = PopulateStats(entry.second, network_emulation_); - RTC_CHECK_EQ(stats->PacketsSent(), 0); - RTC_CHECK_EQ(stats->PacketsReceived(), 0); + RTC_CHECK_EQ(stats.overall_outgoing_stats.packets_sent, 0); + RTC_CHECK_EQ(stats.overall_incoming_stats.packets_received, 0); } } void StatsBasedNetworkQualityMetricsReporter::NetworkLayerStatsCollector:: AddPeer(absl::string_view peer_name, - std::vector endpoints) { + std::vector endpoints, + std::vector uplink, + std::vector downlink) { MutexLock lock(&mutex_); // When new peer is added not in the constructor, don't check if it has empty // stats, because their endpoint could be used for traffic before. peer_endpoints_.emplace(peer_name, std::move(endpoints)); + peer_uplinks_.emplace(peer_name, std::move(uplink)); + peer_downlinks_.emplace(peer_name, std::move(downlink)); for (const EmulatedEndpoint* const endpoint : endpoints) { RTC_CHECK(ip_to_peer_.find(endpoint->GetPeerLocalAddress()) == ip_to_peer_.end()) @@ -127,19 +212,43 @@ void StatsBasedNetworkQualityMetricsReporter::NetworkLayerStatsCollector:: } } -std::map +std::map StatsBasedNetworkQualityMetricsReporter::NetworkLayerStatsCollector:: GetStats() { MutexLock lock(&mutex_); - std::map peer_to_stats; + EmulatedNetworkStatsAccumulator stats_accumulator( + peer_endpoints_.size() + peer_uplinks_.size() + peer_downlinks_.size()); + for (const auto& entry : peer_endpoints_) { + network_emulation_->GetStats( + entry.second, [&stats_accumulator, + peer = entry.first](EmulatedNetworkStats s) mutable { + stats_accumulator.AddEndpointStats(std::move(peer), std::move(s)); + }); + } + for (const auto& entry : peer_uplinks_) { + network_emulation_->GetStats( + entry.second, [&stats_accumulator, + peer = entry.first](EmulatedNetworkNodeStats s) mutable { + stats_accumulator.AddUplinkStats(std::move(peer), std::move(s)); + }); + } + for (const auto& entry : peer_downlinks_) { + network_emulation_->GetStats( + entry.second, [&stats_accumulator, + peer = entry.first](EmulatedNetworkNodeStats s) mutable { + stats_accumulator.AddDownlinkStats(std::move(peer), std::move(s)); + }); + } + bool stats_collected = stats_accumulator.Wait(kStatsWaitTimeout); + RTC_CHECK(stats_collected); + std::map peer_to_stats = + stats_accumulator.ReleaseStats(); std::map> sender_to_receivers; for (const auto& entry : peer_endpoints_) { - NetworkLayerStats stats; - stats.stats = PopulateStats(entry.second, network_emulation_); const std::string& peer_name = entry.first; + const NetworkLayerStats& stats = peer_to_stats[peer_name]; for (const auto& income_stats_entry : - stats.stats->IncomingStatsPerSource()) { + stats.endpoints_stats.incoming_stats_per_source) { const rtc::IPAddress& source_ip = income_stats_entry.first; auto it = ip_to_peer_.find(source_ip); if (it == ip_to_peer_.end()) { @@ -148,7 +257,6 @@ StatsBasedNetworkQualityMetricsReporter::NetworkLayerStatsCollector:: } sender_to_receivers[it->second].push_back(peer_name); } - peer_to_stats.emplace(peer_name, std::move(stats)); } for (auto& entry : peer_to_stats) { const std::vector& receivers = @@ -162,7 +270,17 @@ StatsBasedNetworkQualityMetricsReporter::NetworkLayerStatsCollector:: void StatsBasedNetworkQualityMetricsReporter::AddPeer( absl::string_view peer_name, std::vector endpoints) { - collector_.AddPeer(peer_name, std::move(endpoints)); + collector_.AddPeer(peer_name, std::move(endpoints), /*uplink=*/{}, + /*downlink=*/{}); +} + +void StatsBasedNetworkQualityMetricsReporter::AddPeer( + absl::string_view peer_name, + std::vector endpoints, + std::vector uplink, + std::vector downlink) { + collector_.AddPeer(peer_name, std::move(endpoints), std::move(uplink), + std::move(downlink)); } void StatsBasedNetworkQualityMetricsReporter::Start( @@ -252,48 +370,56 @@ void StatsBasedNetworkQualityMetricsReporter::ReportStats( const NetworkLayerStats& network_layer_stats, int64_t packet_loss, const Timestamp& end_time) { + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. + std::map metric_metadata{ + {MetricMetadataKey::kPeerMetadataKey, pc_label}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}; metrics_logger_->LogSingleValueMetric( "bytes_discarded_no_receiver", GetTestCaseName(pc_label), - network_layer_stats.stats->BytesDropped().bytes(), Unit::kBytes, - ImprovementDirection::kNeitherIsBetter); + network_layer_stats.endpoints_stats.overall_incoming_stats + .bytes_discarded_no_receiver.bytes(), + Unit::kBytes, ImprovementDirection::kNeitherIsBetter, metric_metadata); metrics_logger_->LogSingleValueMetric( "packets_discarded_no_receiver", GetTestCaseName(pc_label), - network_layer_stats.stats->PacketsDropped(), Unit::kUnitless, - ImprovementDirection::kNeitherIsBetter); + network_layer_stats.endpoints_stats.overall_incoming_stats + .packets_discarded_no_receiver, + Unit::kUnitless, ImprovementDirection::kNeitherIsBetter, metric_metadata); metrics_logger_->LogSingleValueMetric( "payload_bytes_received", GetTestCaseName(pc_label), pc_stats.payload_received.bytes(), Unit::kBytes, - ImprovementDirection::kNeitherIsBetter); + ImprovementDirection::kNeitherIsBetter, metric_metadata); metrics_logger_->LogSingleValueMetric( "payload_bytes_sent", GetTestCaseName(pc_label), pc_stats.payload_sent.bytes(), Unit::kBytes, - ImprovementDirection::kNeitherIsBetter); + ImprovementDirection::kNeitherIsBetter, metric_metadata); metrics_logger_->LogSingleValueMetric( "bytes_sent", GetTestCaseName(pc_label), pc_stats.total_sent.bytes(), - Unit::kBytes, ImprovementDirection::kNeitherIsBetter); + Unit::kBytes, ImprovementDirection::kNeitherIsBetter, metric_metadata); metrics_logger_->LogSingleValueMetric( "packets_sent", GetTestCaseName(pc_label), pc_stats.packets_sent, - Unit::kUnitless, ImprovementDirection::kNeitherIsBetter); + Unit::kUnitless, ImprovementDirection::kNeitherIsBetter, metric_metadata); metrics_logger_->LogSingleValueMetric( "average_send_rate", GetTestCaseName(pc_label), (pc_stats.total_sent / (end_time - start_time_)).kbps(), - Unit::kKilobitsPerSecond, ImprovementDirection::kNeitherIsBetter); + Unit::kKilobitsPerSecond, ImprovementDirection::kNeitherIsBetter, + metric_metadata); metrics_logger_->LogSingleValueMetric( "bytes_received", GetTestCaseName(pc_label), pc_stats.total_received.bytes(), Unit::kBytes, - ImprovementDirection::kNeitherIsBetter); + ImprovementDirection::kNeitherIsBetter, metric_metadata); metrics_logger_->LogSingleValueMetric( "packets_received", GetTestCaseName(pc_label), pc_stats.packets_received, - Unit::kUnitless, ImprovementDirection::kNeitherIsBetter); + Unit::kUnitless, ImprovementDirection::kNeitherIsBetter, metric_metadata); metrics_logger_->LogSingleValueMetric( "average_receive_rate", GetTestCaseName(pc_label), (pc_stats.total_received / (end_time - start_time_)).kbps(), - Unit::kKilobitsPerSecond, ImprovementDirection::kNeitherIsBetter); + Unit::kKilobitsPerSecond, ImprovementDirection::kNeitherIsBetter, + metric_metadata); metrics_logger_->LogSingleValueMetric( "sent_packets_loss", GetTestCaseName(pc_label), packet_loss, - Unit::kUnitless, ImprovementDirection::kNeitherIsBetter); + Unit::kUnitless, ImprovementDirection::kNeitherIsBetter, metric_metadata); } std::string StatsBasedNetworkQualityMetricsReporter::GetTestCaseName( @@ -306,102 +432,158 @@ std::string StatsBasedNetworkQualityMetricsReporter::GetTestCaseName( void StatsBasedNetworkQualityMetricsReporter::LogNetworkLayerStats( const std::string& peer_name, const NetworkLayerStats& stats) const { - DataRate average_send_rate = stats.stats->PacketsSent() >= 2 - ? stats.stats->AverageSendRate() - : DataRate::Zero(); - DataRate average_receive_rate = stats.stats->PacketsReceived() >= 2 - ? stats.stats->AverageReceiveRate() - : DataRate::Zero(); + DataRate average_send_rate = + stats.endpoints_stats.overall_outgoing_stats.packets_sent >= 2 + ? stats.endpoints_stats.overall_outgoing_stats.AverageSendRate() + : DataRate::Zero(); + DataRate average_receive_rate = + stats.endpoints_stats.overall_incoming_stats.packets_received >= 2 + ? stats.endpoints_stats.overall_incoming_stats.AverageReceiveRate() + : DataRate::Zero(); + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. + std::map metric_metadata{ + {MetricMetadataKey::kPeerMetadataKey, peer_name}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}; rtc::StringBuilder log; log << "Raw network layer statistic for [" << peer_name << "]:\n" << "Local IPs:\n"; - std::vector local_ips = stats.stats->LocalAddresses(); - for (size_t i = 0; i < local_ips.size(); ++i) { - log << " " << local_ips[i].ToString() << "\n"; + for (size_t i = 0; i < stats.endpoints_stats.local_addresses.size(); ++i) { + log << " " << stats.endpoints_stats.local_addresses[i].ToString() << "\n"; } - if (!stats.stats->SentPacketsSizeCounter().IsEmpty()) { - metrics_logger_->LogMetric("sent_packets_size", GetTestCaseName(peer_name), - stats.stats->SentPacketsSizeCounter(), - Unit::kBytes, - ImprovementDirection::kNeitherIsBetter); + if (!stats.endpoints_stats.overall_outgoing_stats.sent_packets_size + .IsEmpty()) { + metrics_logger_->LogMetric( + "sent_packets_size", GetTestCaseName(peer_name), + stats.endpoints_stats.overall_outgoing_stats.sent_packets_size, + Unit::kBytes, ImprovementDirection::kNeitherIsBetter, metric_metadata); } - if (!stats.stats->ReceivedPacketsSizeCounter().IsEmpty()) { + if (!stats.endpoints_stats.overall_incoming_stats.received_packets_size + .IsEmpty()) { metrics_logger_->LogMetric( "received_packets_size", GetTestCaseName(peer_name), - stats.stats->ReceivedPacketsSizeCounter(), Unit::kBytes, - ImprovementDirection::kNeitherIsBetter); + stats.endpoints_stats.overall_incoming_stats.received_packets_size, + Unit::kBytes, ImprovementDirection::kNeitherIsBetter, metric_metadata); } - if (!stats.stats->DroppedPacketsSizeCounter().IsEmpty()) { + if (!stats.endpoints_stats.overall_incoming_stats + .packets_discarded_no_receiver_size.IsEmpty()) { metrics_logger_->LogMetric( - "dropped_packets_size", GetTestCaseName(peer_name), - stats.stats->DroppedPacketsSizeCounter(), Unit::kBytes, - ImprovementDirection::kNeitherIsBetter); + "packets_discarded_no_receiver_size", GetTestCaseName(peer_name), + stats.endpoints_stats.overall_incoming_stats + .packets_discarded_no_receiver_size, + Unit::kBytes, ImprovementDirection::kNeitherIsBetter, metric_metadata); } - if (!stats.stats->SentPacketsQueueWaitTimeUs().IsEmpty()) { + if (!stats.endpoints_stats.sent_packets_queue_wait_time_us.IsEmpty()) { metrics_logger_->LogMetric( "sent_packets_queue_wait_time_us", GetTestCaseName(peer_name), - stats.stats->SentPacketsQueueWaitTimeUs(), Unit::kUnitless, - ImprovementDirection::kNeitherIsBetter); + stats.endpoints_stats.sent_packets_queue_wait_time_us, Unit::kUnitless, + ImprovementDirection::kNeitherIsBetter, metric_metadata); } log << "Send statistic:\n" - << " packets: " << stats.stats->PacketsSent() - << " bytes: " << stats.stats->BytesSent().bytes() + << " packets: " + << stats.endpoints_stats.overall_outgoing_stats.packets_sent << " bytes: " + << stats.endpoints_stats.overall_outgoing_stats.bytes_sent.bytes() << " avg_rate (bytes/sec): " << average_send_rate.bytes_per_sec() << " avg_rate (bps): " << average_send_rate.bps() << "\n" << "Send statistic per destination:\n"; - for (const auto& entry : stats.stats->OutgoingStatsPerDestination()) { - DataRate source_average_send_rate = entry.second->PacketsSent() >= 2 - ? entry.second->AverageSendRate() + for (const auto& entry : + stats.endpoints_stats.outgoing_stats_per_destination) { + DataRate source_average_send_rate = entry.second.packets_sent >= 2 + ? entry.second.AverageSendRate() : DataRate::Zero(); log << "(" << entry.first.ToString() << "):\n" - << " packets: " << entry.second->PacketsSent() - << " bytes: " << entry.second->BytesSent().bytes() + << " packets: " << entry.second.packets_sent + << " bytes: " << entry.second.bytes_sent.bytes() << " avg_rate (bytes/sec): " << source_average_send_rate.bytes_per_sec() << " avg_rate (bps): " << source_average_send_rate.bps() << "\n"; - if (!entry.second->SentPacketsSizeCounter().IsEmpty()) { + if (!entry.second.sent_packets_size.IsEmpty()) { metrics_logger_->LogMetric( "sent_packets_size", GetTestCaseName(peer_name + "/" + entry.first.ToString()), - stats.stats->SentPacketsSizeCounter(), Unit::kBytes, - ImprovementDirection::kNeitherIsBetter); + entry.second.sent_packets_size, Unit::kBytes, + ImprovementDirection::kNeitherIsBetter, metric_metadata); } } + if (!stats.uplink_stats.packet_transport_time.IsEmpty()) { + log << "[Debug stats] packet_transport_time=(" + << stats.uplink_stats.packet_transport_time.GetAverage() << ", " + << stats.uplink_stats.packet_transport_time.GetStandardDeviation() + << ")\n"; + metrics_logger_->LogMetric( + "uplink_packet_transport_time", GetTestCaseName(peer_name), + stats.uplink_stats.packet_transport_time, Unit::kMilliseconds, + ImprovementDirection::kNeitherIsBetter, metric_metadata); + } + if (!stats.uplink_stats.size_to_packet_transport_time.IsEmpty()) { + log << "[Debug stats] size_to_packet_transport_time=(" + << stats.uplink_stats.size_to_packet_transport_time.GetAverage() << ", " + << stats.uplink_stats.size_to_packet_transport_time + .GetStandardDeviation() + << ")\n"; + metrics_logger_->LogMetric( + "uplink_size_to_packet_transport_time", GetTestCaseName(peer_name), + stats.uplink_stats.size_to_packet_transport_time, Unit::kUnitless, + ImprovementDirection::kNeitherIsBetter, metric_metadata); + } + log << "Receive statistic:\n" - << " packets: " << stats.stats->PacketsReceived() - << " bytes: " << stats.stats->BytesReceived().bytes() + << " packets: " + << stats.endpoints_stats.overall_incoming_stats.packets_received + << " bytes: " + << stats.endpoints_stats.overall_incoming_stats.bytes_received.bytes() << " avg_rate (bytes/sec): " << average_receive_rate.bytes_per_sec() << " avg_rate (bps): " << average_receive_rate.bps() << "\n" << "Receive statistic per source:\n"; - for (const auto& entry : stats.stats->IncomingStatsPerSource()) { + for (const auto& entry : stats.endpoints_stats.incoming_stats_per_source) { DataRate source_average_receive_rate = - entry.second->PacketsReceived() >= 2 - ? entry.second->AverageReceiveRate() - : DataRate::Zero(); + entry.second.packets_received >= 2 ? entry.second.AverageReceiveRate() + : DataRate::Zero(); log << "(" << entry.first.ToString() << "):\n" - << " packets: " << entry.second->PacketsReceived() - << " bytes: " << entry.second->BytesReceived().bytes() + << " packets: " << entry.second.packets_received + << " bytes: " << entry.second.bytes_received.bytes() << " avg_rate (bytes/sec): " << source_average_receive_rate.bytes_per_sec() << " avg_rate (bps): " << source_average_receive_rate.bps() << "\n"; - if (!entry.second->ReceivedPacketsSizeCounter().IsEmpty()) { + if (!entry.second.received_packets_size.IsEmpty()) { metrics_logger_->LogMetric( "received_packets_size", GetTestCaseName(peer_name + "/" + entry.first.ToString()), - stats.stats->ReceivedPacketsSizeCounter(), Unit::kBytes, - ImprovementDirection::kNeitherIsBetter); + entry.second.received_packets_size, Unit::kBytes, + ImprovementDirection::kNeitherIsBetter, metric_metadata); } - if (!entry.second->DroppedPacketsSizeCounter().IsEmpty()) { + if (!entry.second.packets_discarded_no_receiver_size.IsEmpty()) { metrics_logger_->LogMetric( - "dropped_packets_size", + "packets_discarded_no_receiver_size", GetTestCaseName(peer_name + "/" + entry.first.ToString()), - stats.stats->DroppedPacketsSizeCounter(), Unit::kBytes, - ImprovementDirection::kNeitherIsBetter); + entry.second.packets_discarded_no_receiver_size, Unit::kBytes, + ImprovementDirection::kNeitherIsBetter, metric_metadata); } } + if (!stats.downlink_stats.packet_transport_time.IsEmpty()) { + log << "[Debug stats] packet_transport_time=(" + << stats.downlink_stats.packet_transport_time.GetAverage() << ", " + << stats.downlink_stats.packet_transport_time.GetStandardDeviation() + << ")\n"; + metrics_logger_->LogMetric( + "downlink_packet_transport_time", GetTestCaseName(peer_name), + stats.downlink_stats.packet_transport_time, Unit::kMilliseconds, + ImprovementDirection::kNeitherIsBetter, metric_metadata); + } + if (!stats.downlink_stats.size_to_packet_transport_time.IsEmpty()) { + log << "[Debug stats] size_to_packet_transport_time=(" + << stats.downlink_stats.size_to_packet_transport_time.GetAverage() + << ", " + << stats.downlink_stats.size_to_packet_transport_time + .GetStandardDeviation() + << ")\n"; + metrics_logger_->LogMetric( + "downlink_size_to_packet_transport_time", GetTestCaseName(peer_name), + stats.downlink_stats.size_to_packet_transport_time, Unit::kUnitless, + ImprovementDirection::kNeitherIsBetter, metric_metadata); + } RTC_LOG(LS_INFO) << log.str(); } diff --git a/test/pc/e2e/stats_based_network_quality_metrics_reporter.h b/test/pc/e2e/stats_based_network_quality_metrics_reporter.h index c89a3b27a7..60daf40c8c 100644 --- a/test/pc/e2e/stats_based_network_quality_metrics_reporter.h +++ b/test/pc/e2e/stats_based_network_quality_metrics_reporter.h @@ -37,6 +37,14 @@ namespace webrtc_pc_e2e { class StatsBasedNetworkQualityMetricsReporter : public PeerConnectionE2EQualityTestFixture::QualityMetricsReporter { public: + // Emulated network layer stats for single peer. + struct NetworkLayerStats { + EmulatedNetworkStats endpoints_stats; + EmulatedNetworkNodeStats uplink_stats; + EmulatedNetworkNodeStats downlink_stats; + std::set receivers; + }; + // `networks` map peer name to network to report network layer stability stats // and to log network layer metrics. StatsBasedNetworkQualityMetricsReporter( @@ -47,6 +55,10 @@ class StatsBasedNetworkQualityMetricsReporter void AddPeer(absl::string_view peer_name, std::vector endpoints); + void AddPeer(absl::string_view peer_name, + std::vector endpoints, + std::vector uplink, + std::vector downlink); // Network stats must be empty when this method will be invoked. void Start(absl::string_view test_case_name, @@ -71,11 +83,6 @@ class StatsBasedNetworkQualityMetricsReporter int64_t packets_sent = 0; }; - struct NetworkLayerStats { - std::unique_ptr stats; - std::set receivers; - }; - class NetworkLayerStatsCollector { public: NetworkLayerStatsCollector( @@ -85,7 +92,9 @@ class StatsBasedNetworkQualityMetricsReporter void Start(); void AddPeer(absl::string_view peer_name, - std::vector endpoints); + std::vector endpoints, + std::vector uplink, + std::vector downlink); std::map GetStats(); @@ -93,6 +102,10 @@ class StatsBasedNetworkQualityMetricsReporter Mutex mutex_; std::map> peer_endpoints_ RTC_GUARDED_BY(mutex_); + std::map> peer_uplinks_ + RTC_GUARDED_BY(mutex_); + std::map> peer_downlinks_ + RTC_GUARDED_BY(mutex_); std::map ip_to_peer_ RTC_GUARDED_BY(mutex_); NetworkEmulationManager* const network_emulation_; }; diff --git a/test/pc/e2e/stats_based_network_quality_metrics_reporter_test.cc b/test/pc/e2e/stats_based_network_quality_metrics_reporter_test.cc new file mode 100644 index 0000000000..be55149482 --- /dev/null +++ b/test/pc/e2e/stats_based_network_quality_metrics_reporter_test.cc @@ -0,0 +1,150 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "test/pc/e2e/stats_based_network_quality_metrics_reporter.h" + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/test/create_network_emulation_manager.h" +#include "api/test/create_peer_connection_quality_test_frame_generator.h" +#include "api/test/metrics/metrics_logger.h" +#include "api/test/metrics/stdout_metrics_exporter.h" +#include "api/test/network_emulation_manager.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/media_quality_test_params.h" +#include "api/test/pclf/peer_configurer.h" +#include "api/test/peerconnection_quality_test_fixture.h" +#include "api/units/time_delta.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/pc/e2e/metric_metadata_keys.h" +#include "test/pc/e2e/peer_connection_quality_test.h" + +namespace webrtc { +namespace webrtc_pc_e2e { +namespace { + +using ::testing::UnorderedElementsAre; + +using ::webrtc::test::DefaultMetricsLogger; +using ::webrtc::test::ImprovementDirection; +using ::webrtc::test::Metric; +using ::webrtc::test::Unit; +using ::webrtc::webrtc_pc_e2e::PeerConfigurer; + +// Adds a peer with some audio and video (the client should not care about +// details about audio and video configs). +void AddDefaultAudioVideoPeer( + absl::string_view peer_name, + absl::string_view audio_stream_label, + absl::string_view video_stream_label, + const PeerNetworkDependencies& network_dependencies, + PeerConnectionE2EQualityTestFixture& fixture) { + AudioConfig audio{std::string(audio_stream_label)}; + audio.sync_group = std::string(peer_name); + VideoConfig video(std::string(video_stream_label), 320, 180, 15); + video.sync_group = std::string(peer_name); + auto peer = std::make_unique(network_dependencies); + peer->SetName(peer_name); + peer->SetAudioConfig(std::move(audio)); + peer->AddVideoConfig(std::move(video)); + peer->SetVideoCodecs({VideoCodecConfig(cricket::kVp8CodecName)}); + fixture.AddPeer(std::move(peer)); +} + +absl::optional FindMeetricByName(absl::string_view name, + rtc::ArrayView metrics) { + for (const Metric& metric : metrics) { + if (metric.name == name) { + return metric; + } + } + return absl::nullopt; +} + +TEST(StatsBasedNetworkQualityMetricsReporterTest, DebugStatsAreCollected) { + std::unique_ptr network_emulation = + CreateNetworkEmulationManager(TimeMode::kSimulated, + EmulatedNetworkStatsGatheringMode::kDebug); + DefaultMetricsLogger metrics_logger( + network_emulation->time_controller()->GetClock()); + PeerConnectionE2EQualityTest fixture( + "test_case", *network_emulation->time_controller(), + /*audio_quality_analyzer=*/nullptr, /*video_quality_analyzer=*/nullptr, + &metrics_logger); + + EmulatedEndpoint* alice_endpoint = + network_emulation->CreateEndpoint(EmulatedEndpointConfig()); + EmulatedEndpoint* bob_endpoint = + network_emulation->CreateEndpoint(EmulatedEndpointConfig()); + + EmulatedNetworkNode* alice_link = network_emulation->CreateEmulatedNode( + BuiltInNetworkBehaviorConfig{.link_capacity_kbps = 500}); + network_emulation->CreateRoute(alice_endpoint, {alice_link}, bob_endpoint); + EmulatedNetworkNode* bob_link = network_emulation->CreateEmulatedNode( + BuiltInNetworkBehaviorConfig{.link_capacity_kbps = 500}); + network_emulation->CreateRoute(bob_endpoint, {bob_link}, alice_endpoint); + + EmulatedNetworkManagerInterface* alice_network = + network_emulation->CreateEmulatedNetworkManagerInterface( + {alice_endpoint}); + EmulatedNetworkManagerInterface* bob_network = + network_emulation->CreateEmulatedNetworkManagerInterface({bob_endpoint}); + + AddDefaultAudioVideoPeer("alice", "alice_audio", "alice_video", + alice_network->network_dependencies(), fixture); + AddDefaultAudioVideoPeer("bob", "bob_audio", "bob_video", + bob_network->network_dependencies(), fixture); + + auto network_stats_reporter = + std::make_unique( + /*peer_endpoints=*/std::map>{}, + network_emulation.get(), &metrics_logger); + network_stats_reporter->AddPeer("alice", alice_network->endpoints(), + /*uplink=*/{alice_link}, + /*downlink=*/{bob_link}); + network_stats_reporter->AddPeer("bob", bob_network->endpoints(), + /*uplink=*/{bob_link}, + /*downlink=*/{alice_link}); + fixture.AddQualityMetricsReporter(std::move(network_stats_reporter)); + + fixture.Run(RunParams(TimeDelta::Seconds(4))); + + std::vector metrics = metrics_logger.GetCollectedMetrics(); + absl::optional uplink_packet_transport_time = + FindMeetricByName("uplink_packet_transport_time", metrics); + ASSERT_TRUE(uplink_packet_transport_time.has_value()); + ASSERT_FALSE(uplink_packet_transport_time->time_series.samples.empty()); + absl::optional uplink_size_to_packet_transport_time = + FindMeetricByName("uplink_size_to_packet_transport_time", metrics); + ASSERT_TRUE(uplink_size_to_packet_transport_time.has_value()); + ASSERT_FALSE( + uplink_size_to_packet_transport_time->time_series.samples.empty()); + absl::optional downlink_packet_transport_time = + FindMeetricByName("downlink_packet_transport_time", metrics); + ASSERT_TRUE(downlink_packet_transport_time.has_value()); + ASSERT_FALSE(downlink_packet_transport_time->time_series.samples.empty()); + absl::optional downlink_size_to_packet_transport_time = + FindMeetricByName("downlink_size_to_packet_transport_time", metrics); + ASSERT_TRUE(downlink_size_to_packet_transport_time.has_value()); + ASSERT_FALSE( + downlink_size_to_packet_transport_time->time_series.samples.empty()); +} + +} // namespace +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/test/pc/e2e/test_peer.cc b/test/pc/e2e/test_peer.cc index d978f10665..b3a9e1c164 100644 --- a/test/pc/e2e/test_peer.cc +++ b/test/pc/e2e/test_peer.cc @@ -15,17 +15,14 @@ #include "absl/memory/memory.h" #include "absl/strings/string_view.h" #include "api/scoped_refptr.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/peer_configurer.h" #include "modules/audio_processing/include/audio_processing.h" namespace webrtc { namespace webrtc_pc_e2e { namespace { -using VideoSubscription = ::webrtc::webrtc_pc_e2e:: - PeerConnectionE2EQualityTestFixture::VideoSubscription; -using VideoConfig = - ::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::VideoConfig; - class SetRemoteDescriptionCallback : public webrtc::SetRemoteDescriptionObserverInterface { public: @@ -136,7 +133,7 @@ TestPeer::TestPeer( std::unique_ptr observer, Params params, ConfigurableParams configurable_params, - std::vector video_sources, + std::vector video_sources, rtc::scoped_refptr audio_processing, std::unique_ptr worker_thread) : params_(std::move(params)), diff --git a/test/pc/e2e/test_peer.h b/test/pc/e2e/test_peer.h index 02e5528a6c..1088871817 100644 --- a/test/pc/e2e/test_peer.h +++ b/test/pc/e2e/test_peer.h @@ -22,12 +22,12 @@ #include "api/set_remote_description_observer_interface.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/test/frame_generator_interface.h" -#include "api/test/peerconnection_quality_test_fixture.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/media_quality_test_params.h" +#include "api/test/pclf/peer_configurer.h" #include "pc/peer_connection_wrapper.h" #include "rtc_base/logging.h" #include "rtc_base/synchronization/mutex.h" -#include "test/pc/e2e/peer_configurer.h" -#include "test/pc/e2e/peer_connection_quality_test_params.h" #include "test/pc/e2e/stats_provider.h" namespace webrtc { @@ -41,16 +41,15 @@ class TestPeer final : public StatsProvider { const Params& params() const { return params_; } ConfigurableParams configurable_params() const; - void AddVideoConfig(PeerConnectionE2EQualityTestFixture::VideoConfig config); + void AddVideoConfig(VideoConfig config); // Removes video config with specified name. Crashes if the config with // specified name isn't found. void RemoveVideoConfig(absl::string_view stream_label); - void SetVideoSubscription( - PeerConnectionE2EQualityTestFixture::VideoSubscription subscription); + void SetVideoSubscription(VideoSubscription subscription); void GetStats(RTCStatsCollectorCallback* callback) override; - PeerConfigurerImpl::VideoSource ReleaseVideoSource(size_t i) { + PeerConfigurer::VideoSource ReleaseVideoSource(size_t i) { RTC_CHECK(wrapper_) << "TestPeer is already closed"; return std::move(video_sources_[i]); } @@ -157,7 +156,7 @@ class TestPeer final : public StatsProvider { std::unique_ptr observer, Params params, ConfigurableParams configurable_params, - std::vector video_sources, + std::vector video_sources, rtc::scoped_refptr audio_processing, std::unique_ptr worker_thread); @@ -177,7 +176,7 @@ class TestPeer final : public StatsProvider { // worker thread and network thread. std::unique_ptr worker_thread_; std::unique_ptr wrapper_; - std::vector video_sources_; + std::vector video_sources_; rtc::scoped_refptr audio_processing_; std::vector> remote_ice_candidates_; diff --git a/test/pc/e2e/test_peer_factory.cc b/test/pc/e2e/test_peer_factory.cc index aa9a731cba..7fc12f2c11 100644 --- a/test/pc/e2e/test_peer_factory.cc +++ b/test/pc/e2e/test_peer_factory.cc @@ -15,6 +15,8 @@ #include "absl/strings/string_view.h" #include "api/task_queue/default_task_queue_factory.h" #include "api/test/create_time_controller.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/peer_configurer.h" #include "api/test/time_controller.h" #include "api/transport/field_trial_based_config.h" #include "api/video_codecs/builtin_video_decoder_factory.h" @@ -26,19 +28,12 @@ #include "rtc_base/thread.h" #include "test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h" #include "test/pc/e2e/echo/echo_emulation.h" -#include "test/pc/e2e/peer_configurer.h" #include "test/testsupport/copy_to_file_audio_capturer.h" namespace webrtc { namespace webrtc_pc_e2e { namespace { -using AudioConfig = - ::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::AudioConfig; -using VideoConfig = - ::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::VideoConfig; -using EchoEmulationConfig = ::webrtc::webrtc_pc_e2e:: - PeerConnectionE2EQualityTestFixture::EchoEmulationConfig; using EmulatedSFUConfigMap = ::webrtc::webrtc_pc_e2e::QualityAnalyzingVideoEncoder::EmulatedSFUConfigMap; @@ -295,17 +290,16 @@ absl::optional RemotePeerAudioConfig::Create( } std::unique_ptr TestPeerFactory::CreateTestPeer( - std::unique_ptr configurer, + std::unique_ptr configurer, std::unique_ptr observer, absl::optional remote_audio_config, - absl::optional - echo_emulation_config) { + absl::optional echo_emulation_config) { std::unique_ptr components = configurer->ReleaseComponents(); std::unique_ptr params = configurer->ReleaseParams(); std::unique_ptr configurable_params = configurer->ReleaseConfigurableParams(); - std::vector video_sources = + std::vector video_sources = configurer->ReleaseVideoSources(); RTC_DCHECK(components); RTC_DCHECK(params); diff --git a/test/pc/e2e/test_peer_factory.h b/test/pc/e2e/test_peer_factory.h index 8d78e2f8d9..f2698e2a15 100644 --- a/test/pc/e2e/test_peer_factory.h +++ b/test/pc/e2e/test_peer_factory.h @@ -18,26 +18,25 @@ #include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event_log_factory.h" -#include "api/test/peerconnection_quality_test_fixture.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/media_quality_test_params.h" +#include "api/test/pclf/peer_configurer.h" #include "api/test/time_controller.h" #include "modules/audio_device/include/test_audio_device.h" #include "rtc_base/task_queue.h" #include "test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h" -#include "test/pc/e2e/peer_configurer.h" -#include "test/pc/e2e/peer_connection_quality_test_params.h" #include "test/pc/e2e/test_peer.h" namespace webrtc { namespace webrtc_pc_e2e { struct RemotePeerAudioConfig { - explicit RemotePeerAudioConfig( - PeerConnectionE2EQualityTestFixture::AudioConfig config) + explicit RemotePeerAudioConfig(AudioConfig config) : sampling_frequency_in_hz(config.sampling_frequency_in_hz), output_file_name(config.output_dump_file_name) {} static absl::optional Create( - absl::optional config); + absl::optional config); int sampling_frequency_in_hz; absl::optional output_file_name; @@ -67,11 +66,10 @@ class TestPeerFactory { // also will setup dependencies, that are required for media analyzers // injection. std::unique_ptr CreateTestPeer( - std::unique_ptr configurer, + std::unique_ptr configurer, std::unique_ptr observer, absl::optional remote_audio_config, - absl::optional - echo_emulation_config); + absl::optional echo_emulation_config); private: rtc::Thread* signaling_thread_; diff --git a/test/peer_scenario/peer_scenario.cc b/test/peer_scenario/peer_scenario.cc index ea959c943a..485e33f67f 100644 --- a/test/peer_scenario/peer_scenario.cc +++ b/test/peer_scenario/peer_scenario.cc @@ -55,7 +55,7 @@ PeerScenario::PeerScenario( std::unique_ptr log_writer_manager, TimeMode mode) : log_writer_manager_(std::move(log_writer_manager)), - net_(mode), + net_(mode, EmulatedNetworkStatsGatheringMode::kDefault), signaling_thread_(net_.time_controller()->GetMainThread()) {} PeerScenarioClient* PeerScenario::CreateClient( diff --git a/test/peer_scenario/tests/remote_estimate_test.cc b/test/peer_scenario/tests/remote_estimate_test.cc index 9190f5c92e..2dfbfdd3c9 100644 --- a/test/peer_scenario/tests/remote_estimate_test.cc +++ b/test/peer_scenario/tests/remote_estimate_test.cc @@ -96,7 +96,10 @@ TEST(RemoteEstimateEndToEnd, AudioUsesAbsSendTimeExtension) { // want to ignore those and we can do that on the basis that the first // byte of RTP packets are guaranteed to not be 0. RtpPacket rtp_packet(&extension_map); - if (rtp_packet.Parse(packet.data)) { + // TODO(bugs.webrtc.org/14525): Look why there are RTP packets with + // payload 72 or 73 (these don't have the RTP AbsoluteSendTime + // Extension). + if (rtp_packet.Parse(packet.data) && rtp_packet.PayloadType() == 111) { EXPECT_TRUE(rtp_packet.HasExtension()); received_abs_send_time = true; } diff --git a/test/scenario/audio_stream.cc b/test/scenario/audio_stream.cc index ea170bc17c..3c94d7911f 100644 --- a/test/scenario/audio_stream.cc +++ b/test/scenario/audio_stream.cc @@ -93,9 +93,10 @@ SendAudioStream::SendAudioStream( RTC_DCHECK_LE(config.source.channels, 2); send_config.encoder_factory = encoder_factory; - if (config.encoder.fixed_rate) + bool use_fixed_rate = !config.encoder.min_rate && !config.encoder.max_rate; + if (use_fixed_rate) send_config.send_codec_spec->target_bitrate_bps = - config.encoder.fixed_rate->bps(); + config.encoder.fixed_rate.bps(); if (!config.adapt.binary_proto.empty()) { send_config.audio_network_adaptor_config = config.adapt.binary_proto; } else if (config.network_adaptation) { @@ -106,9 +107,9 @@ SendAudioStream::SendAudioStream( config.stream.in_bandwidth_estimation) { DataRate min_rate = DataRate::Infinity(); DataRate max_rate = DataRate::Infinity(); - if (config.encoder.fixed_rate) { - min_rate = *config.encoder.fixed_rate; - max_rate = *config.encoder.fixed_rate; + if (use_fixed_rate) { + min_rate = config.encoder.fixed_rate; + max_rate = config.encoder.fixed_rate; } else { min_rate = *config.encoder.min_rate; max_rate = *config.encoder.max_rate; @@ -129,10 +130,8 @@ SendAudioStream::SendAudioStream( sender_->SendTask([&] { send_stream_ = sender_->call_->CreateAudioSendStream(send_config); - if (field_trial::IsEnabled("WebRTC-SendSideBwe-WithOverhead")) { - sender->call_->OnAudioTransportOverheadChanged( - sender_->transport_->packet_overhead().bytes()); - } + sender->call_->OnAudioTransportOverheadChanged( + sender_->transport_->packet_overhead().bytes()); }); } diff --git a/test/scenario/call_client.cc b/test/scenario/call_client.cc index c9babc7b79..46f593898d 100644 --- a/test/scenario/call_client.cc +++ b/test/scenario/call_client.cc @@ -70,6 +70,7 @@ Call* CreateCall(TimeController* time_controller, call_config.task_queue_factory = time_controller->GetTaskQueueFactory(); call_config.network_controller_factory = network_controller_factory; call_config.audio_state = audio_state; + call_config.pacer_burst_interval = config.pacer_burst_interval; call_config.trials = config.field_trials; Clock* clock = time_controller->GetClock(); return Call::Create(call_config, clock, diff --git a/test/scenario/probing_test.cc b/test/scenario/probing_test.cc index 74b68fc044..86653ced9b 100644 --- a/test/scenario/probing_test.cc +++ b/test/scenario/probing_test.cc @@ -39,7 +39,7 @@ TEST(ProbingTest, MidCallProbingRampupTriggeredByUpdatedBitrateConstraints) { const DataRate kStartRate = DataRate::KilobitsPerSec(300); const DataRate kConstrainedRate = DataRate::KilobitsPerSec(100); - const DataRate kHighRate = DataRate::KilobitsPerSec(2500); + const DataRate kHighRate = DataRate::KilobitsPerSec(1500); VideoStreamConfig video_config; video_config.encoder.codec = diff --git a/test/scenario/scenario.cc b/test/scenario/scenario.cc index 4f0fb3159b..795276ee06 100644 --- a/test/scenario/scenario.cc +++ b/test/scenario/scenario.cc @@ -65,7 +65,8 @@ Scenario::Scenario( std::unique_ptr log_writer_factory, bool real_time) : log_writer_factory_(std::move(log_writer_factory)), - network_manager_(real_time ? TimeMode::kRealTime : TimeMode::kSimulated), + network_manager_(real_time ? TimeMode::kRealTime : TimeMode::kSimulated, + EmulatedNetworkStatsGatheringMode::kDefault), clock_(network_manager_.time_controller()->GetClock()), audio_decoder_factory_(CreateBuiltinAudioDecoderFactory()), audio_encoder_factory_(CreateBuiltinAudioEncoderFactory()), diff --git a/test/scenario/scenario_config.h b/test/scenario/scenario_config.h index be0d0b3589..9ce99401d7 100644 --- a/test/scenario/scenario_config.h +++ b/test/scenario/scenario_config.h @@ -53,6 +53,10 @@ struct TransportControllerConfig { struct CallClientConfig { TransportControllerConfig transport; + // Allows the pacer to send out multiple packets in a burst. + // The number of bites that can be sent in one burst is pacer_burst_interval * + // current bwe. 40ms is the default Chrome setting. + TimeDelta pacer_burst_interval = TimeDelta::Millis(40); const FieldTrialsView* field_trials = nullptr; }; @@ -194,7 +198,8 @@ struct AudioStreamConfig { ~Encoder(); bool allocate_bitrate = false; bool enable_dtx = false; - absl::optional fixed_rate; + DataRate fixed_rate = DataRate::KilobitsPerSec(32); + // Overrides fixed rate. absl::optional min_rate; absl::optional max_rate; TimeDelta initial_frame_length = TimeDelta::Millis(20); @@ -203,8 +208,8 @@ struct AudioStreamConfig { Stream(); Stream(const Stream&); ~Stream(); - bool abs_send_time = false; - bool in_bandwidth_estimation = false; + bool abs_send_time = true; + bool in_bandwidth_estimation = true; } stream; struct Rendering { std::string sync_group; diff --git a/test/scenario/stats_collection_unittest.cc b/test/scenario/stats_collection_unittest.cc index 3db1100a2a..9f46f10073 100644 --- a/test/scenario/stats_collection_unittest.cc +++ b/test/scenario/stats_collection_unittest.cc @@ -91,7 +91,7 @@ TEST(ScenarioAnalyzerTest, PsnrIsLowWhenNetworkIsBad) { // might change due to changes in configuration and encoder etc. EXPECT_NEAR(analyzer.stats().psnr_with_freeze.Mean(), 20, 10); EXPECT_NEAR(stats.call.stats().target_rate.Mean().kbps(), 75, 50); - EXPECT_NEAR(stats.video_send.stats().media_bitrate.Mean().kbps(), 100, 50); + EXPECT_NEAR(stats.video_send.stats().media_bitrate.Mean().kbps(), 70, 30); EXPECT_NEAR(stats.video_receive.stats().resolution.Mean(), 180, 10); EXPECT_NEAR(stats.audio_receive.stats().jitter_buffer.Mean().ms(), 250, 200); } diff --git a/test/scenario/video_stream.cc b/test/scenario/video_stream.cc index ad352f9ab9..96ced83b04 100644 --- a/test/scenario/video_stream.cc +++ b/test/scenario/video_stream.cc @@ -480,7 +480,7 @@ void SendVideoStream::UpdateActiveLayers(std::vector active_layers) { MutexLock lock(&mutex_); if (config_.encoder.codec == VideoStreamConfig::Encoder::Codec::kVideoCodecVP8) { - send_stream_->UpdateActiveSimulcastLayers(active_layers); + send_stream_->StartPerRtpStream(active_layers); } VideoEncoderConfig encoder_config = CreateVideoEncoderConfig(config_); RTC_CHECK_EQ(encoder_config.simulcast_layers.size(), active_layers.size()); diff --git a/test/scoped_key_value_config.cc b/test/scoped_key_value_config.cc index 449d5f0722..df84462637 100644 --- a/test/scoped_key_value_config.cc +++ b/test/scoped_key_value_config.cc @@ -10,7 +10,6 @@ #include "test/scoped_key_value_config.h" -#include "api/field_trials_view.h" #include "rtc_base/checks.h" #include "system_wrappers/include/field_trial.h" #include "test/field_trial.h" @@ -97,7 +96,7 @@ ScopedKeyValueConfig* ScopedKeyValueConfig::GetRoot(ScopedKeyValueConfig* n) { return n; } -std::string ScopedKeyValueConfig::Lookup(absl::string_view key) const { +std::string ScopedKeyValueConfig::GetValue(absl::string_view key) const { if (parent_ == nullptr) { return leaf_->LookupRecurse(key); } else { diff --git a/test/scoped_key_value_config.h b/test/scoped_key_value_config.h index db90ca3533..c0023f8228 100644 --- a/test/scoped_key_value_config.h +++ b/test/scoped_key_value_config.h @@ -17,24 +17,23 @@ #include #include "absl/strings/string_view.h" -#include "api/field_trials_view.h" +#include "api/field_trials_registry.h" #include "test/field_trial.h" namespace webrtc { namespace test { -class ScopedKeyValueConfig : public FieldTrialsView { +class ScopedKeyValueConfig : public FieldTrialsRegistry { public: virtual ~ScopedKeyValueConfig(); ScopedKeyValueConfig(); explicit ScopedKeyValueConfig(absl::string_view s); ScopedKeyValueConfig(ScopedKeyValueConfig& parent, absl::string_view s); - std::string Lookup(absl::string_view key) const override; - private: ScopedKeyValueConfig(ScopedKeyValueConfig* parent, absl::string_view s); ScopedKeyValueConfig* GetRoot(ScopedKeyValueConfig* n); + std::string GetValue(absl::string_view key) const override; std::string LookupRecurse(absl::string_view key) const; ScopedKeyValueConfig* const parent_; diff --git a/test/test_flags.cc b/test/test_flags.cc index a0becc2ab7..a0fff747fe 100644 --- a/test/test_flags.cc +++ b/test/test_flags.cc @@ -42,4 +42,10 @@ ABSL_FLAG(std::string, "", "Path where the test perf metrics should be stored using " "api/test/metrics/metric.proto proto format. File will contain " - "MetricsSet as a root proto"); + "MetricsSet as a root proto. On iOS, this MUST be a file name " + "and the file will be stored under NSDocumentDirectory."); + +ABSL_FLAG(bool, + export_perf_results_new_api, + false, + "Tells to initialize new API for exporting performance metrics"); diff --git a/test/test_flags.h b/test/test_flags.h index 6ca30b22f0..30f918fc7d 100644 --- a/test/test_flags.h +++ b/test/test_flags.h @@ -19,5 +19,6 @@ ABSL_DECLARE_FLAG(std::string, force_fieldtrials); ABSL_DECLARE_FLAG(std::vector, plot); ABSL_DECLARE_FLAG(std::string, isolated_script_test_perf_output); ABSL_DECLARE_FLAG(std::string, webrtc_test_metrics_output_path); +ABSL_DECLARE_FLAG(bool, export_perf_results_new_api); #endif // TEST_TEST_FLAGS_H_ diff --git a/test/test_main.cc b/test/test_main.cc index f919c4bba7..d811fd0e6d 100644 --- a/test/test_main.cc +++ b/test/test_main.cc @@ -9,6 +9,9 @@ */ #include +#include +#include +#include #include "absl/debugging/failure_signal_handler.h" #include "absl/debugging/symbolize.h" @@ -16,14 +19,50 @@ #include "test/gmock.h" #include "test/test_main_lib.h" +namespace { + +std::vector ReplaceDashesWithUnderscores(int argc, char* argv[]) { + std::vector args(argv, argv + argc); + for (std::string& arg : args) { + // Only replace arguments that starts with a dash. + if (!arg.empty() && arg[0] == '-') { + // Don't replace the 2 first characters. + auto begin = arg.begin() + 2; + // Replace dashes on the left of '=' or on all the arg if no '=' is found. + auto end = std::find(arg.begin(), arg.end(), '='); + std::replace(begin, end, '-', '_'); + } + } + return args; +} + +std::vector VectorOfStringsToVectorOfPointers( + std::vector& input) { + std::vector output(input.size()); + for (size_t i = 0; i < input.size(); ++i) { + output[i] = &(input[i][0]); + } + return output; +} + +} // namespace + int main(int argc, char* argv[]) { // Initialize the symbolizer to get a human-readable stack trace absl::InitializeSymbolizer(argv[0]); testing::InitGoogleMock(&argc, argv); - absl::ParseCommandLine(argc, argv); + // Before parsing the arguments with the absl flag library, any internal '-' + // characters will be converted to '_' characters to make sure the string is a + // valid attribute name. + std::vector new_argv = ReplaceDashesWithUnderscores(argc, argv); + std::vector raw_new_argv = VectorOfStringsToVectorOfPointers(new_argv); + absl::ParseCommandLine(argc, &raw_new_argv[0]); +// This absl handler use unsupported features/instructions on Fuchsia +#if !defined(WEBRTC_FUCHSIA) absl::FailureSignalHandlerOptions options; absl::InstallFailureSignalHandler(options); +#endif std::unique_ptr main = webrtc::TestMain::Create(); int err_code = main->Init(); diff --git a/test/test_main_lib.cc b/test/test_main_lib.cc index 9968adf32e..4c80315ac5 100644 --- a/test/test_main_lib.cc +++ b/test/test_main_lib.cc @@ -22,6 +22,7 @@ #include "api/test/metrics/chrome_perf_dashboard_metrics_exporter.h" #include "api/test/metrics/global_metrics_logger_and_exporter.h" #include "api/test/metrics/metrics_exporter.h" +#include "api/test/metrics/metrics_set_proto_file_exporter.h" #include "api/test/metrics/print_result_proxy_metrics_exporter.h" #include "api/test/metrics/stdout_metrics_exporter.h" #include "rtc_base/checks.h" @@ -48,11 +49,19 @@ ABSL_FLAG(std::string, NSTreatUnknownArgumentsAsOpen, "", - "Intentionally ignored flag intended for iOS simulator."); + "Intentionally ignored flag intended for iOS test runner."); ABSL_FLAG(std::string, ApplePersistenceIgnoreState, "", - "Intentionally ignored flag intended for iOS simulator."); + "Intentionally ignored flag intended for iOS test runner."); +ABSL_FLAG(bool, + enable_run_ios_unittests_with_xctest, + false, + "Intentionally ignored flag intended for iOS test runner."); +ABSL_FLAG(bool, + write_compiled_tests_json_to_writable_path, + false, + "Intentionally ignored flag intended for iOS test runner."); // This is the cousin of isolated_script_test_perf_output, but we can't dictate // where to write on iOS so the semantics of this flag are a bit different. @@ -64,6 +73,12 @@ ABSL_FLAG( "described by histogram.proto in " "https://chromium.googlesource.com/catapult/."); +#elif defined(WEBRTC_FUCHSIA) +ABSL_FLAG(std::string, use_vulkan, "", "Intentionally ignored flag."); +#else +// TODO(bugs.webrtc.org/8115): Remove workaround when fixed. +ABSL_FLAG(bool, no_sandbox, false, "Intentionally ignored flag."); +ABSL_FLAG(bool, test_launcher_bot_mode, false, "Intentionally ignored flag."); #endif ABSL_FLAG(std::string, @@ -71,11 +86,6 @@ ABSL_FLAG(std::string, "", "Path to output an empty JSON file which Chromium infra requires."); -ABSL_FLAG(bool, - export_perf_results_new_api, - false, - "Tells to initialize new API for exporting performance metrics"); - ABSL_FLAG(bool, logs, true, "print logs to stderr"); ABSL_FLAG(bool, verbose, false, "verbose logs to stderr"); @@ -85,6 +95,17 @@ ABSL_FLAG(std::string, "Path to collect trace events (json file) for chrome://tracing. " "If not set, events aren't captured."); +ABSL_FLAG(std::string, + test_launcher_shard_index, + "", + "Index of the test shard to run, from 0 to " + "the value specified with --test_launcher_total_shards."); + +ABSL_FLAG(std::string, + test_launcher_total_shards, + "", + "Total number of shards."); + namespace webrtc { namespace { @@ -112,6 +133,19 @@ class TestMainImpl : public TestMain { rtc::LogMessage::SetLogToStderr(absl::GetFlag(FLAGS_logs) || absl::GetFlag(FLAGS_verbose)); + // The sharding arguments take precedence over the sharding environment + // variables. + if (!absl::GetFlag(FLAGS_test_launcher_shard_index).empty() && + !absl::GetFlag(FLAGS_test_launcher_total_shards).empty()) { + std::string shard_index = + "GTEST_SHARD_INDEX=" + absl::GetFlag(FLAGS_test_launcher_shard_index); + std::string total_shards = + "GTEST_TOTAL_SHARDS=" + + absl::GetFlag(FLAGS_test_launcher_total_shards); + putenv(shard_index.data()); + putenv(total_shards.data()); + } + // InitFieldTrialsFromString stores the char*, so the char array must // outlive the application. field_trials_ = absl::GetFlag(FLAGS_force_fieldtrials); @@ -150,10 +184,11 @@ class TestMainImpl : public TestMain { } #if defined(WEBRTC_IOS) - rtc::test::InitTestSuite(RUN_ALL_TESTS, argc, argv, - absl::GetFlag(FLAGS_write_perf_output_on_ios), - absl::GetFlag(FLAGS_export_perf_results_new_api), - metrics_to_plot); + rtc::test::InitTestSuite( + RUN_ALL_TESTS, argc, argv, + absl::GetFlag(FLAGS_write_perf_output_on_ios), + absl::GetFlag(FLAGS_export_perf_results_new_api), + absl::GetFlag(FLAGS_webrtc_test_metrics_output_path), metrics_to_plot); rtc::test::RunTestsFromIOSApp(); int exit_code = 0; #else @@ -162,6 +197,12 @@ class TestMainImpl : public TestMain { std::vector> exporters; if (absl::GetFlag(FLAGS_export_perf_results_new_api)) { exporters.push_back(std::make_unique()); + if (!absl::GetFlag(FLAGS_webrtc_test_metrics_output_path).empty()) { + exporters.push_back( + std::make_unique( + webrtc::test::MetricsSetProtoFileExporter::Options( + absl::GetFlag(FLAGS_webrtc_test_metrics_output_path)))); + } if (!absl::GetFlag(FLAGS_isolated_script_test_perf_output).empty()) { exporters.push_back( std::make_unique( diff --git a/test/testsupport/file_utils_override.cc b/test/testsupport/file_utils_override.cc index ac44e91b8a..7d0a3e3312 100644 --- a/test/testsupport/file_utils_override.cc +++ b/test/testsupport/file_utils_override.cc @@ -69,7 +69,10 @@ const absl::string_view kPathDelimiter = "/"; const absl::string_view kAndroidChromiumTestsRoot = "/sdcard/chromium_tests_root/"; #endif - +#if defined(WEBRTC_FUCHSIA) +const absl::string_view kFuchsiaTestRoot = "/pkg/"; +const absl::string_view kFuchsiaTempWritableDir = "/tmp/"; +#endif #if !defined(WEBRTC_IOS) const absl::string_view kResourcesDirName = "resources"; #endif @@ -91,6 +94,11 @@ absl::optional ProjectRootPath() { // the test is bundled (which our tests are not), in which case it's 5 levels. return DirName(DirName(exe_dir)) + std::string(kPathDelimiter); #elif defined(WEBRTC_POSIX) +// Fuchsia uses POSIX defines as well but does not have full POSIX +// functionality. +#if defined(WEBRTC_FUCHSIA) + return std::string(kFuchsiaTestRoot); +#else char buf[PATH_MAX]; ssize_t count = ::readlink("/proc/self/exe", buf, arraysize(buf)); if (count <= 0) { @@ -100,6 +108,7 @@ absl::optional ProjectRootPath() { // On POSIX, tests execute in out/Whatever, so src is two levels up. std::string exe_dir = DirName(absl::string_view(buf, count)); return DirName(DirName(exe_dir)) + std::string(kPathDelimiter); +#endif #elif defined(WEBRTC_WIN) wchar_t buf[MAX_PATH]; buf[0] = 0; @@ -117,6 +126,8 @@ std::string OutputPath() { return IOSOutputPath(); #elif defined(WEBRTC_ANDROID) return std::string(kAndroidChromiumTestsRoot); +#elif defined(WEBRTC_FUCHSIA) + return std::string(kFuchsiaTempWritableDir); #else absl::optional path_opt = ProjectRootPath(); RTC_DCHECK(path_opt); diff --git a/test/testsupport/frame_reader.h b/test/testsupport/frame_reader.h index d2a3b4b064..7856476ca0 100644 --- a/test/testsupport/frame_reader.h +++ b/test/testsupport/frame_reader.h @@ -17,6 +17,7 @@ #include "absl/types/optional.h" #include "api/scoped_refptr.h" +#include "api/video/resolution.h" namespace webrtc { class I420Buffer; @@ -25,94 +26,123 @@ namespace test { // Handles reading of I420 frames from video files. class FrameReader { public: + struct Ratio { + int num = 1; + int den = 1; + }; + + static constexpr Ratio kNoScale = Ratio({.num = 1, .den = 1}); + virtual ~FrameReader() {} - // Initializes the frame reader, i.e. opens the input file. - // This must be called before reading of frames has started. - // Returns false if an error has occurred, in addition to printing to stderr. - virtual bool Init() = 0; + // Reads and returns next frame. Returns `nullptr` if reading failed or end of + // stream is reached. + virtual rtc::scoped_refptr PullFrame() = 0; - // Reads a frame from the input file. On success, returns the frame. - // Returns nullptr if encountering end of file or a read error. - virtual rtc::scoped_refptr ReadFrame() = 0; + // Reads and returns next frame. `frame_num` stores unwrapped frame number + // which can be passed to `ReadFrame` to re-read this frame later. Returns + // `nullptr` if reading failed or end of stream is reached. + virtual rtc::scoped_refptr PullFrame(int* frame_num) = 0; - // Closes the input file if open. Essentially makes this class impossible - // to use anymore. Will also be invoked by the destructor. - virtual void Close() = 0; + // Reads and returns frame specified by `frame_num`. Returns `nullptr` if + // reading failed. + virtual rtc::scoped_refptr ReadFrame(int frame_num) = 0; - // Frame length in bytes of a single frame image. - virtual size_t FrameLength() = 0; - // Total number of frames in the input video source. - virtual int NumberOfFrames() = 0; + // Reads next frame, resizes and returns it. `frame_num` stores unwrapped + // frame number which can be passed to `ReadFrame` to re-read this frame + // later. `resolution` specifies resolution of the returned frame. + // `framerate_scale` specifies frame rate scale factor. Frame rate scaling is + // done by skipping or repeating frames. + virtual rtc::scoped_refptr PullFrame(int* frame_num, + Resolution resolution, + Ratio framerate_scale) = 0; + + // Reads frame specified by `frame_num`, resizes and returns it. Returns + // `nullptr` if reading failed. + virtual rtc::scoped_refptr ReadFrame(int frame_num, + Resolution resolution) = 0; + + // Total number of retrievable frames. + virtual int num_frames() const = 0; }; class YuvFrameReaderImpl : public FrameReader { public: enum class RepeatMode { kSingle, kRepeat, kPingPong }; - class DropperUtil { - public: - DropperUtil(int source_fps, int target_fps); - enum class DropDecision { kDropframe, kKeepFrame }; - DropDecision UpdateLevel(); + // Creates the frame reader for a YUV file specified by `filepath`. + // `resolution` specifies width and height of frames in pixels. `repeat_mode` + // specifies behaviour of the reader at reaching the end of file (stop, read + // it over from the beginning or read in reverse order). The file is assumed + // to exist, be readable and to contain at least 1 frame. + YuvFrameReaderImpl(std::string filepath, + Resolution resolution, + RepeatMode repeat_mode); - private: - const double frame_size_buckets_; - double bucket_level_; - }; - - // Creates a file handler. The input file is assumed to exist and be readable. - // Parameters: - // input_filename The file to read from. - // width, height Size of each frame to read. - YuvFrameReaderImpl(std::string input_filename, int width, int height); - YuvFrameReaderImpl(std::string input_filename, - int input_width, - int input_height, - int desired_width, - int desired_height, - RepeatMode repeat_mode, - absl::optional clip_fps, - int target_fps); ~YuvFrameReaderImpl() override; - bool Init() override; - rtc::scoped_refptr ReadFrame() override; - void Close() override; - size_t FrameLength() override; - int NumberOfFrames() override; + + virtual void Init(); + + rtc::scoped_refptr PullFrame() override; + + rtc::scoped_refptr PullFrame(int* frame_num) override; + + rtc::scoped_refptr PullFrame(int* frame_num, + Resolution resolution, + Ratio framerate_scale) override; + + rtc::scoped_refptr ReadFrame(int frame_num) override; + + rtc::scoped_refptr ReadFrame(int frame_num, + Resolution resolution) override; + + int num_frames() const override { return num_frames_; } protected: - const std::string input_filename_; - // It is not const, so subclasses will be able to add frame header size. - size_t frame_length_in_bytes_; - const int input_width_; - const int input_height_; - const int desired_width_; - const int desired_height_; - const size_t frame_size_bytes_; + class RateScaler { + public: + int Skip(Ratio framerate_scale); + + private: + absl::optional ticks_; + }; + + const std::string filepath_; + Resolution resolution_; const RepeatMode repeat_mode_; - int number_of_frames_; - int current_frame_index_; - std::unique_ptr dropper_; - FILE* input_file_; + int num_frames_; + int frame_num_; + int frame_size_bytes_; + int header_size_bytes_; + FILE* file_; + RateScaler framerate_scaler_; }; class Y4mFrameReaderImpl : public YuvFrameReaderImpl { public: - // Creates a file handler. The input file is assumed to exist and be readable. - // Parameters: - // input_filename The file to read from. - // width, height Size of each frame to read. - Y4mFrameReaderImpl(std::string input_filename, int width, int height); - ~Y4mFrameReaderImpl() override; - bool Init() override; - rtc::scoped_refptr ReadFrame() override; + // Creates the frame reader for a Y4M file specified by `filepath`. + // `repeat_mode` specifies behaviour of the reader at reaching the end of file + // (stop, read it over from the beginning or read in reverse order). The file + // is assumed to exist, be readable and to contain at least 1 frame. + Y4mFrameReaderImpl(std::string filepath, RepeatMode repeat_mode); - private: - // Buffer that is used to read file and frame headers. - char* buffer_; + void Init() override; }; +std::unique_ptr CreateYuvFrameReader(std::string filepath, + Resolution resolution); + +std::unique_ptr CreateYuvFrameReader( + std::string filepath, + Resolution resolution, + YuvFrameReaderImpl::RepeatMode repeat_mode); + +std::unique_ptr CreateY4mFrameReader(std::string filepath); + +std::unique_ptr CreateY4mFrameReader( + std::string filepath, + YuvFrameReaderImpl::RepeatMode repeat_mode); + } // namespace test } // namespace webrtc diff --git a/test/testsupport/mock/mock_frame_reader.h b/test/testsupport/mock/mock_frame_reader.h index dbb246cfc8..f68bbf8368 100644 --- a/test/testsupport/mock/mock_frame_reader.h +++ b/test/testsupport/mock/mock_frame_reader.h @@ -20,11 +20,18 @@ namespace test { class MockFrameReader : public FrameReader { public: - MOCK_METHOD(bool, Init, (), (override)); - MOCK_METHOD(rtc::scoped_refptr, ReadFrame, (), (override)); - MOCK_METHOD(void, Close, (), (override)); - MOCK_METHOD(size_t, FrameLength, (), (override)); - MOCK_METHOD(int, NumberOfFrames, (), (override)); + MOCK_METHOD(rtc::scoped_refptr, PullFrame, (), (override)); + MOCK_METHOD(rtc::scoped_refptr, PullFrame, (int*), (override)); + MOCK_METHOD(rtc::scoped_refptr, + PullFrame, + (int*, Resolution, Ratio), + (override)); + MOCK_METHOD(rtc::scoped_refptr, ReadFrame, (int), (override)); + MOCK_METHOD(rtc::scoped_refptr, + ReadFrame, + (int, Resolution), + (override)); + MOCK_METHOD(int, num_frames, (), (const override)); }; } // namespace test diff --git a/test/testsupport/video_frame_writer_unittest.cc b/test/testsupport/video_frame_writer_unittest.cc index 57e2fbf048..9d59627c0f 100644 --- a/test/testsupport/video_frame_writer_unittest.cc +++ b/test/testsupport/video_frame_writer_unittest.cc @@ -140,13 +140,10 @@ TEST_F(Y4mVideoFrameWriterTest, WriteFrame) { GetFileSize(temp_filename_)); std::unique_ptr frame_reader = - std::make_unique(temp_filename_, kFrameWidth, - kFrameHeight); - ASSERT_TRUE(frame_reader->Init()); - AssertI420BuffersEq(frame_reader->ReadFrame(), expected_buffer); - AssertI420BuffersEq(frame_reader->ReadFrame(), expected_buffer); - EXPECT_FALSE(frame_reader->ReadFrame()); // End of file. - frame_reader->Close(); + CreateY4mFrameReader(temp_filename_); + AssertI420BuffersEq(frame_reader->PullFrame(), expected_buffer); + AssertI420BuffersEq(frame_reader->PullFrame(), expected_buffer); + EXPECT_FALSE(frame_reader->PullFrame()); // End of file. } TEST_F(YuvVideoFrameWriterTest, InitSuccess) {} @@ -164,14 +161,12 @@ TEST_F(YuvVideoFrameWriterTest, WriteFrame) { frame_writer_->Close(); EXPECT_EQ(2 * kFrameLength, GetFileSize(temp_filename_)); - std::unique_ptr frame_reader = - std::make_unique(temp_filename_, kFrameWidth, - kFrameHeight); - ASSERT_TRUE(frame_reader->Init()); - AssertI420BuffersEq(frame_reader->ReadFrame(), expected_buffer); - AssertI420BuffersEq(frame_reader->ReadFrame(), expected_buffer); - EXPECT_FALSE(frame_reader->ReadFrame()); // End of file. - frame_reader->Close(); + std::unique_ptr frame_reader = CreateYuvFrameReader( + temp_filename_, + Resolution({.width = kFrameWidth, .height = kFrameHeight})); + AssertI420BuffersEq(frame_reader->PullFrame(), expected_buffer); + AssertI420BuffersEq(frame_reader->PullFrame(), expected_buffer); + EXPECT_FALSE(frame_reader->PullFrame()); // End of file. } } // namespace test diff --git a/test/testsupport/y4m_frame_reader.cc b/test/testsupport/y4m_frame_reader.cc index 0faa024141..72fb9b5188 100644 --- a/test/testsupport/y4m_frame_reader.cc +++ b/test/testsupport/y4m_frame_reader.cc @@ -14,6 +14,7 @@ #include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" #include "test/testsupport/file_utils.h" @@ -22,87 +23,69 @@ namespace webrtc { namespace test { namespace { - -// Size of header: "YUV4MPEG2 WXXXXXX HXXXXXX FXXX:1 C420\n" -// We allocate up to 6 digits for width and height and up to 3 digits for FPS. -const size_t kFileHeaderMaxSize = 38; -// Size of header: "YUV4MPEG2 WX HX FX:1 C420\n" -const size_t kFileHeaderMinSize = 26; -// Size of header: "FRAME\n" -const size_t kFrameHeaderSize = 6; - -std::string GetExpectedHeaderPrefix(int width, int height) { - rtc::StringBuilder out; - out << "YUV4MPEG2 W" << width << " H" << height << " F"; - return out.str(); -} - +constexpr int kFrameHeaderSize = 6; // "FRAME\n" } // namespace -Y4mFrameReaderImpl::Y4mFrameReaderImpl(std::string input_filename, - int width, - int height) - : YuvFrameReaderImpl(input_filename, width, height) { - frame_length_in_bytes_ += kFrameHeaderSize; - buffer_ = new char[kFileHeaderMaxSize]; -} -Y4mFrameReaderImpl::~Y4mFrameReaderImpl() { - delete[] buffer_; +void ParseY4mHeader(std::string filepath, + Resolution* resolution, + int* header_size) { + FILE* file = fopen(filepath.c_str(), "r"); + RTC_CHECK(file != NULL) << "Cannot open " << filepath; + + // Length of Y4M header is technically unlimited due to the comment tag 'X'. + char h[1024]; + RTC_CHECK(fgets(h, sizeof(h), file) != NULL) + << "File " << filepath << " is too small"; + fclose(file); + + RTC_CHECK(sscanf(h, "YUV4MPEG2 W%d H%d", &resolution->width, + &resolution->height) == 2) + << filepath << " is not a valid Y4M file"; + + RTC_CHECK_GT(resolution->width, 0) << "Width must be positive"; + RTC_CHECK_GT(resolution->height, 0) << "Height must be positive"; + + *header_size = strcspn(h, "\n") + 1; + RTC_CHECK(static_cast(*header_size) < sizeof(h)) + << filepath << " has unexpectedly large header"; } -bool Y4mFrameReaderImpl::Init() { - if (input_width_ <= 0 || input_height_ <= 0) { - RTC_LOG(LS_ERROR) << "Frame width and height must be positive. Was: " - << input_width_ << "x" << input_height_; - return false; - } - input_file_ = fopen(input_filename_.c_str(), "rb"); - if (input_file_ == nullptr) { - RTC_LOG(LS_ERROR) << "Couldn't open input file: " << input_filename_; - return false; - } - size_t source_file_size = GetFileSize(input_filename_); - if (source_file_size <= 0u) { - RTC_LOG(LS_ERROR) << "Input file " << input_filename_ << " is empty."; - return false; - } - char* c_file_header = fgets(buffer_, kFileHeaderMaxSize, input_file_); - std::string file_header(c_file_header); - if (file_header.size() < kFileHeaderMinSize) { - RTC_LOG(LS_ERROR) << "Couldn't read Y4M header from input file: " - << input_filename_; - return false; - } - if (file_header.find(GetExpectedHeaderPrefix(input_width_, input_height_)) != - 0) { - RTC_LOG(LS_ERROR) << "Couldn't read Y4M file: " << input_filename_ - << ". Input file has different resolution, expected: " - << GetExpectedHeaderPrefix(input_width_, input_height_) - << "[0-9]?:1 C420; got: " << file_header; - return false; - } +Y4mFrameReaderImpl::Y4mFrameReaderImpl(std::string filepath, + RepeatMode repeat_mode) + : YuvFrameReaderImpl(filepath, Resolution(), repeat_mode) {} - number_of_frames_ = static_cast((source_file_size - file_header.size()) / - frame_length_in_bytes_); +void Y4mFrameReaderImpl::Init() { + file_ = fopen(filepath_.c_str(), "rb"); + RTC_CHECK(file_ != nullptr) << "Cannot open " << filepath_; - if (number_of_frames_ == 0) { - RTC_LOG(LS_ERROR) << "Input file " << input_filename_ << " is too small."; - } - return true; + ParseY4mHeader(filepath_, &resolution_, &header_size_bytes_); + frame_size_bytes_ = + CalcBufferSize(VideoType::kI420, resolution_.width, resolution_.height); + frame_size_bytes_ += kFrameHeaderSize; + + size_t file_size_bytes = GetFileSize(filepath_); + RTC_CHECK_GT(file_size_bytes, 0u) << "File " << filepath_ << " is empty"; + RTC_CHECK_GT(file_size_bytes, header_size_bytes_) + << "File " << filepath_ << " is too small"; + + num_frames_ = static_cast((file_size_bytes - header_size_bytes_) / + frame_size_bytes_); + RTC_CHECK_GT(num_frames_, 0u) << "File " << filepath_ << " is too small"; + header_size_bytes_ += kFrameHeaderSize; } -rtc::scoped_refptr Y4mFrameReaderImpl::ReadFrame() { - if (input_file_ == nullptr) { - RTC_LOG(LS_ERROR) << "Y4mFrameReaderImpl is not initialized."; - return nullptr; - } - if (fread(buffer_, 1, kFrameHeaderSize, input_file_) < kFrameHeaderSize && - ferror(input_file_)) { - RTC_LOG(LS_ERROR) << "Couldn't read frame header from input file: " - << input_filename_; - return nullptr; - } - return YuvFrameReaderImpl::ReadFrame(); +std::unique_ptr CreateY4mFrameReader(std::string filepath) { + return CreateY4mFrameReader(filepath, + YuvFrameReaderImpl::RepeatMode::kSingle); +} + +std::unique_ptr CreateY4mFrameReader( + std::string filepath, + YuvFrameReaderImpl::RepeatMode repeat_mode) { + Y4mFrameReaderImpl* frame_reader = + new Y4mFrameReaderImpl(filepath, repeat_mode); + frame_reader->Init(); + return std::unique_ptr(frame_reader); } } // namespace test diff --git a/test/testsupport/y4m_frame_reader_unittest.cc b/test/testsupport/y4m_frame_reader_unittest.cc index 219ec1bbe3..df81a8135b 100644 --- a/test/testsupport/y4m_frame_reader_unittest.cc +++ b/test/testsupport/y4m_frame_reader_unittest.cc @@ -25,15 +25,14 @@ namespace webrtc { namespace test { namespace { +using Ratio = FrameReader::Ratio; +using RepeatMode = YuvFrameReaderImpl::RepeatMode; -const absl::string_view kFileHeader = "YUV4MPEG2 W2 H2 F30:1 C420\n"; -const absl::string_view kFrameHeader = "FRAME\n"; -const absl::string_view kInputVideoContents = "abcdef"; - -const size_t kFrameWidth = 2; -const size_t kFrameHeight = 2; -const size_t kFrameLength = 3 * kFrameWidth * kFrameHeight / 2; // I420. - +constexpr Resolution kResolution({.width = 1, .height = 1}); +constexpr char kFileHeader[] = "YUV4MPEG2 W1 H1 F30:1 C420\n"; +constexpr char kFrameHeader[] = "FRAME\n"; +constexpr char kFrameContent[3][3] = {{0, 1, 2}, {1, 2, 3}, {2, 3, 4}}; +constexpr int kNumFrames = sizeof(kFrameContent) / sizeof(kFrameContent[0]); } // namespace class Y4mFrameReaderTest : public ::testing::Test { @@ -42,63 +41,118 @@ class Y4mFrameReaderTest : public ::testing::Test { ~Y4mFrameReaderTest() override = default; void SetUp() override { - temp_filename_ = webrtc::test::TempFilename(webrtc::test::OutputPath(), - "y4m_frame_reader_unittest"); - FILE* dummy = fopen(temp_filename_.c_str(), "wb"); - fprintf(dummy, "%s", - (std::string(kFileHeader) + std::string(kFrameHeader) + - std::string(kInputVideoContents)) - .c_str()); - fclose(dummy); + filepath_ = webrtc::test::TempFilename(webrtc::test::OutputPath(), + "y4m_frame_reader_unittest"); + FILE* file = fopen(filepath_.c_str(), "wb"); + fwrite(kFileHeader, 1, sizeof(kFileHeader) - 1, file); + for (int n = 0; n < kNumFrames; ++n) { + fwrite(kFrameHeader, 1, sizeof(kFrameHeader) - 1, file); + fwrite(kFrameContent[n], 1, sizeof(kFrameContent[n]), file); + } + fclose(file); - frame_reader_.reset( - new Y4mFrameReaderImpl(temp_filename_, kFrameWidth, kFrameHeight)); - ASSERT_TRUE(frame_reader_->Init()); + reader_ = CreateY4mFrameReader(filepath_); } - void TearDown() override { remove(temp_filename_.c_str()); } + void TearDown() override { remove(filepath_.c_str()); } - std::unique_ptr frame_reader_; - std::string temp_filename_; + std::string filepath_; + std::unique_ptr reader_; }; -TEST_F(Y4mFrameReaderTest, InitSuccess) {} - -TEST_F(Y4mFrameReaderTest, FrameLength) { - EXPECT_EQ(kFrameHeader.size() + kFrameLength, frame_reader_->FrameLength()); +TEST_F(Y4mFrameReaderTest, num_frames) { + EXPECT_EQ(kNumFrames, reader_->num_frames()); } -TEST_F(Y4mFrameReaderTest, NumberOfFrames) { - EXPECT_EQ(1, frame_reader_->NumberOfFrames()); +TEST_F(Y4mFrameReaderTest, PullFrame_frameResolution) { + rtc::scoped_refptr buffer = reader_->PullFrame(); + EXPECT_EQ(kResolution.width, buffer->width()); + EXPECT_EQ(kResolution.height, buffer->height()); } -TEST_F(Y4mFrameReaderTest, ReadFrame) { - rtc::scoped_refptr buffer = frame_reader_->ReadFrame(); - ASSERT_TRUE(buffer); - // Expect I420 packed as YUV. - EXPECT_EQ(kInputVideoContents[0], buffer->DataY()[0]); - EXPECT_EQ(kInputVideoContents[1], buffer->DataY()[1]); - EXPECT_EQ(kInputVideoContents[2], buffer->DataY()[2]); - EXPECT_EQ(kInputVideoContents[3], buffer->DataY()[3]); - EXPECT_EQ(kInputVideoContents[4], buffer->DataU()[0]); - EXPECT_EQ(kInputVideoContents[5], buffer->DataV()[0]); - EXPECT_FALSE(frame_reader_->ReadFrame()); // End of file. +TEST_F(Y4mFrameReaderTest, PullFrame_frameContent) { + rtc::scoped_refptr buffer = reader_->PullFrame(); + EXPECT_EQ(kFrameContent[0][0], *buffer->DataY()); + EXPECT_EQ(kFrameContent[0][1], *buffer->DataU()); + EXPECT_EQ(kFrameContent[0][2], *buffer->DataV()); } -TEST_F(Y4mFrameReaderTest, ReadFrameUninitialized) { - Y4mFrameReaderImpl file_reader(temp_filename_, kFrameWidth, kFrameHeight); - EXPECT_FALSE(file_reader.ReadFrame()); +TEST_F(Y4mFrameReaderTest, ReadFrame_randomOrder) { + std::vector expected_frames = {2, 0, 1}; + std::vector actual_frames; + for (int frame_num : expected_frames) { + rtc::scoped_refptr buffer = + reader_->ReadFrame(frame_num); + actual_frames.push_back(*buffer->DataY()); + } + EXPECT_EQ(expected_frames, actual_frames); } -TEST_F(Y4mFrameReaderTest, ReadFrameDifferentWidth) { - Y4mFrameReaderImpl file_reader(temp_filename_, kFrameWidth + 1, kFrameHeight); - EXPECT_FALSE(file_reader.Init()); +TEST_F(Y4mFrameReaderTest, PullFrame_scale) { + rtc::scoped_refptr buffer = reader_->PullFrame( + /*pulled_frame_num=*/nullptr, Resolution({.width = 2, .height = 2}), + FrameReader::kNoScale); + EXPECT_EQ(2, buffer->width()); + EXPECT_EQ(2, buffer->height()); } -TEST_F(Y4mFrameReaderTest, ReadFrameDifferentHeight) { - Y4mFrameReaderImpl file_reader(temp_filename_, kFrameWidth, kFrameHeight + 1); - EXPECT_FALSE(file_reader.Init()); +class Y4mFrameReaderRepeatModeTest + : public Y4mFrameReaderTest, + public ::testing::WithParamInterface< + std::tuple>> {}; + +TEST_P(Y4mFrameReaderRepeatModeTest, PullFrame) { + RepeatMode mode = std::get<0>(GetParam()); + std::vector expected_frames = std::get<1>(GetParam()); + + reader_ = CreateY4mFrameReader(filepath_, mode); + std::vector read_frames; + for (size_t i = 0; i < expected_frames.size(); ++i) { + rtc::scoped_refptr buffer = reader_->PullFrame(); + read_frames.push_back(*buffer->DataY()); + } + EXPECT_EQ(expected_frames, read_frames); } +INSTANTIATE_TEST_SUITE_P( + Y4mFrameReaderTest, + Y4mFrameReaderRepeatModeTest, + ::testing::ValuesIn( + {std::make_tuple(RepeatMode::kSingle, std::vector{0, 1, 2}), + std::make_tuple(RepeatMode::kRepeat, + std::vector{0, 1, 2, 0, 1, 2}), + std::make_tuple(RepeatMode::kPingPong, + std::vector{0, 1, 2, 1, 0, 1, 2})})); + +class Y4mFrameReaderFramerateScaleTest + : public Y4mFrameReaderTest, + public ::testing::WithParamInterface< + std::tuple>> {}; + +TEST_P(Y4mFrameReaderFramerateScaleTest, PullFrame) { + Ratio framerate_scale = std::get<0>(GetParam()); + std::vector expected_frames = std::get<1>(GetParam()); + + std::vector actual_frames; + for (size_t i = 0; i < expected_frames.size(); ++i) { + int pulled_frame; + rtc::scoped_refptr buffer = + reader_->PullFrame(&pulled_frame, kResolution, framerate_scale); + actual_frames.push_back(pulled_frame); + } + EXPECT_EQ(expected_frames, actual_frames); +} + +INSTANTIATE_TEST_SUITE_P(Y4mFrameReaderTest, + Y4mFrameReaderFramerateScaleTest, + ::testing::ValuesIn({ + std::make_tuple(Ratio({.num = 1, .den = 2}), + std::vector{0, 2, 4}), + std::make_tuple(Ratio({.num = 2, .den = 3}), + std::vector{0, 1, 3, 4, 6}), + std::make_tuple(Ratio({.num = 2, .den = 1}), + std::vector{0, 0, 1, 1}), + })); + } // namespace test } // namespace webrtc diff --git a/test/testsupport/yuv_frame_reader.cc b/test/testsupport/yuv_frame_reader.cc index 330541496b..02c1a68008 100644 --- a/test/testsupport/yuv_frame_reader.cc +++ b/test/testsupport/yuv_frame_reader.cc @@ -14,6 +14,7 @@ #include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" #include "rtc_base/logging.h" #include "test/frame_utils.h" #include "test/testsupport/file_utils.h" @@ -21,164 +22,140 @@ namespace webrtc { namespace test { -size_t FrameSizeBytes(int width, int height) { - int half_width = (width + 1) / 2; - size_t size_y = static_cast(width) * height; - size_t size_uv = static_cast(half_width) * ((height + 1) / 2); - return size_y + 2 * size_uv; +namespace { +using RepeatMode = YuvFrameReaderImpl::RepeatMode; + +int WrapFrameNum(int frame_num, int num_frames, RepeatMode mode) { + RTC_CHECK_GE(frame_num, 0) << "frame_num cannot be negative"; + RTC_CHECK_GT(num_frames, 0) << "num_frames must be greater than 0"; + if (mode == RepeatMode::kSingle) { + return frame_num; + } + if (mode == RepeatMode::kRepeat) { + return frame_num % num_frames; + } + + RTC_CHECK_EQ(RepeatMode::kPingPong, mode); + int cycle_len = 2 * (num_frames - 1); + int wrapped_num = frame_num % cycle_len; + if (wrapped_num >= num_frames) { + return cycle_len - wrapped_num; + } + return wrapped_num; } -YuvFrameReaderImpl::DropperUtil::DropperUtil(int source_fps, int target_fps) - : frame_size_buckets_( - std::max(1.0, static_cast(source_fps) / target_fps)), - bucket_level_(0.0) {} - -YuvFrameReaderImpl::DropperUtil::DropDecision -YuvFrameReaderImpl::DropperUtil::UpdateLevel() { - DropDecision decision; - if (bucket_level_ <= 0.0) { - decision = DropDecision::kKeepFrame; - bucket_level_ += frame_size_buckets_; - } else { - decision = DropDecision::kDropframe; - } - bucket_level_ -= 1.0; - return decision; -} - -YuvFrameReaderImpl::YuvFrameReaderImpl(std::string input_filename, - int width, - int height) - : YuvFrameReaderImpl(input_filename, - width, - height, - width, - height, - RepeatMode::kSingle, - 30, - 30) {} -YuvFrameReaderImpl::YuvFrameReaderImpl(std::string input_filename, - int input_width, - int input_height, - int desired_width, - int desired_height, - RepeatMode repeat_mode, - absl::optional clip_fps, - int target_fps) - : input_filename_(input_filename), - frame_length_in_bytes_(input_width * input_height + - 2 * ((input_width + 1) / 2) * - ((input_height + 1) / 2)), - input_width_(input_width), - input_height_(input_height), - desired_width_(desired_width), - desired_height_(desired_height), - frame_size_bytes_(FrameSizeBytes(input_width, input_height)), - repeat_mode_(repeat_mode), - number_of_frames_(-1), - current_frame_index_(-1), - dropper_(clip_fps.has_value() ? new DropperUtil(*clip_fps, target_fps) - : nullptr), - input_file_(nullptr) {} - -YuvFrameReaderImpl::~YuvFrameReaderImpl() { - Close(); -} - -bool YuvFrameReaderImpl::Init() { - if (input_width_ <= 0 || input_height_ <= 0) { - RTC_LOG(LS_ERROR) << "Frame width and height must be positive. Was: " - << input_width_ << "x" << input_height_; - return false; - } - input_file_ = fopen(input_filename_.c_str(), "rb"); - if (input_file_ == nullptr) { - RTC_LOG(LS_ERROR) << "Couldn't open input file: " - << input_filename_.c_str(); - return false; - } - // Calculate total number of frames. - size_t source_file_size = GetFileSize(input_filename_); - if (source_file_size <= 0u) { - RTC_LOG(LS_ERROR) << "Input file " << input_filename_.c_str() - << " is empty."; - return false; - } - number_of_frames_ = - static_cast(source_file_size / frame_length_in_bytes_); - - if (number_of_frames_ == 0) { - RTC_LOG(LS_ERROR) << "Input file " << input_filename_.c_str() - << " is too small."; - } - - current_frame_index_ = 0; - return true; -} - -rtc::scoped_refptr YuvFrameReaderImpl::ReadFrame() { - if (input_file_ == nullptr) { - RTC_LOG(LS_ERROR) << "YuvFrameReaderImpl is not initialized."; - return nullptr; - } - - rtc::scoped_refptr buffer; - - do { - if (current_frame_index_ >= number_of_frames_) { - switch (repeat_mode_) { - case RepeatMode::kSingle: - return nullptr; - case RepeatMode::kRepeat: - fseek(input_file_, 0, SEEK_SET); - current_frame_index_ = 0; - break; - case RepeatMode::kPingPong: - if (current_frame_index_ == number_of_frames_ * 2) { - fseek(input_file_, 0, SEEK_SET); - current_frame_index_ = 0; - } else { - int reverse_frame_index = current_frame_index_ - number_of_frames_; - int seek_frame_pos = (number_of_frames_ - reverse_frame_index - 1); - fseek(input_file_, seek_frame_pos * frame_size_bytes_, SEEK_SET); - } - break; - } - } - ++current_frame_index_; - - buffer = ReadI420Buffer(input_width_, input_height_, input_file_); - if (!buffer && ferror(input_file_)) { - RTC_LOG(LS_ERROR) << "Couldn't read frame from file: " - << input_filename_.c_str(); - } - } while (dropper_ && - dropper_->UpdateLevel() == DropperUtil::DropDecision::kDropframe); - - if (input_width_ == desired_width_ && input_height_ == desired_height_) { +rtc::scoped_refptr Scale(rtc::scoped_refptr buffer, + Resolution resolution) { + if (buffer->width() == resolution.width && + buffer->height() == resolution.height) { return buffer; } + rtc::scoped_refptr scaled( + I420Buffer::Create(resolution.width, resolution.height)); + scaled->ScaleFrom(*buffer.get()); + return scaled; +} +} // namespace - rtc::scoped_refptr rescaled_buffer( - I420Buffer::Create(desired_width_, desired_height_)); - rescaled_buffer->ScaleFrom(*buffer.get()); - - return rescaled_buffer; +int YuvFrameReaderImpl::RateScaler::Skip(Ratio framerate_scale) { + ticks_ = ticks_.value_or(framerate_scale.num); + int skip = 0; + while (ticks_ <= 0) { + *ticks_ += framerate_scale.num; + ++skip; + } + *ticks_ -= framerate_scale.den; + return skip; } -void YuvFrameReaderImpl::Close() { - if (input_file_ != nullptr) { - fclose(input_file_); - input_file_ = nullptr; +YuvFrameReaderImpl::YuvFrameReaderImpl(std::string filepath, + Resolution resolution, + RepeatMode repeat_mode) + : filepath_(filepath), + resolution_(resolution), + repeat_mode_(repeat_mode), + num_frames_(0), + frame_num_(0), + frame_size_bytes_(0), + header_size_bytes_(0), + file_(nullptr) {} + +YuvFrameReaderImpl::~YuvFrameReaderImpl() { + if (file_ != nullptr) { + fclose(file_); + file_ = nullptr; } } -size_t YuvFrameReaderImpl::FrameLength() { - return frame_length_in_bytes_; +void YuvFrameReaderImpl::Init() { + RTC_CHECK_GT(resolution_.width, 0) << "Width must be positive"; + RTC_CHECK_GT(resolution_.height, 0) << "Height must be positive"; + frame_size_bytes_ = + CalcBufferSize(VideoType::kI420, resolution_.width, resolution_.height); + + file_ = fopen(filepath_.c_str(), "rb"); + RTC_CHECK(file_ != NULL) << "Cannot open " << filepath_; + + size_t file_size_bytes = GetFileSize(filepath_); + RTC_CHECK_GT(file_size_bytes, 0u) << "File " << filepath_ << " is empty"; + + num_frames_ = static_cast(file_size_bytes / frame_size_bytes_); + RTC_CHECK_GT(num_frames_, 0u) << "File " << filepath_ << " is too small"; } -int YuvFrameReaderImpl::NumberOfFrames() { - return number_of_frames_; +rtc::scoped_refptr YuvFrameReaderImpl::PullFrame() { + return PullFrame(/*frame_num=*/nullptr); +} + +rtc::scoped_refptr YuvFrameReaderImpl::PullFrame(int* frame_num) { + return PullFrame(frame_num, resolution_, /*framerate_scale=*/kNoScale); +} + +rtc::scoped_refptr YuvFrameReaderImpl::PullFrame( + int* frame_num, + Resolution resolution, + Ratio framerate_scale) { + frame_num_ += framerate_scaler_.Skip(framerate_scale); + auto buffer = ReadFrame(frame_num_, resolution); + if (frame_num != nullptr) { + *frame_num = frame_num_; + } + return buffer; +} + +rtc::scoped_refptr YuvFrameReaderImpl::ReadFrame(int frame_num) { + return ReadFrame(frame_num, resolution_); +} + +rtc::scoped_refptr YuvFrameReaderImpl::ReadFrame( + int frame_num, + Resolution resolution) { + int wrapped_num = WrapFrameNum(frame_num, num_frames_, repeat_mode_); + if (wrapped_num >= num_frames_) { + RTC_CHECK_EQ(RepeatMode::kSingle, repeat_mode_); + return nullptr; + } + fseek(file_, header_size_bytes_ + wrapped_num * frame_size_bytes_, SEEK_SET); + auto buffer = ReadI420Buffer(resolution_.width, resolution_.height, file_); + RTC_CHECK(buffer != nullptr); + + return Scale(buffer, resolution); +} + +std::unique_ptr CreateYuvFrameReader(std::string filepath, + Resolution resolution) { + return CreateYuvFrameReader(filepath, resolution, + YuvFrameReaderImpl::RepeatMode::kSingle); +} + +std::unique_ptr CreateYuvFrameReader( + std::string filepath, + Resolution resolution, + YuvFrameReaderImpl::RepeatMode repeat_mode) { + YuvFrameReaderImpl* frame_reader = + new YuvFrameReaderImpl(filepath, resolution, repeat_mode); + frame_reader->Init(); + return std::unique_ptr(frame_reader); } } // namespace test diff --git a/test/testsupport/yuv_frame_reader_unittest.cc b/test/testsupport/yuv_frame_reader_unittest.cc index 525f0e9971..b9ea2d0c46 100644 --- a/test/testsupport/yuv_frame_reader_unittest.cc +++ b/test/testsupport/yuv_frame_reader_unittest.cc @@ -25,11 +25,12 @@ namespace webrtc { namespace test { namespace { -const std::string kInputFileContents = "bazouk"; +using Ratio = FrameReader::Ratio; +using RepeatMode = YuvFrameReaderImpl::RepeatMode; -const size_t kFrameWidth = 2; -const size_t kFrameHeight = 2; -const size_t kFrameLength = 3 * kFrameWidth * kFrameHeight / 2; // I420. +constexpr Resolution kResolution({.width = 1, .height = 1}); +constexpr char kFrameContent[3][3] = {{0, 1, 2}, {1, 2, 3}, {2, 3, 4}}; +constexpr int kNumFrames = sizeof(kFrameContent) / sizeof(kFrameContent[0]); } // namespace class YuvFrameReaderTest : public ::testing::Test { @@ -38,50 +39,108 @@ class YuvFrameReaderTest : public ::testing::Test { ~YuvFrameReaderTest() override = default; void SetUp() override { - temp_filename_ = webrtc::test::TempFilename(webrtc::test::OutputPath(), - "yuv_frame_reader_unittest"); - FILE* dummy = fopen(temp_filename_.c_str(), "wb"); - fprintf(dummy, "%s", kInputFileContents.c_str()); - fclose(dummy); + filepath_ = webrtc::test::TempFilename(webrtc::test::OutputPath(), + "yuv_frame_reader_unittest"); + FILE* file = fopen(filepath_.c_str(), "wb"); + fwrite(kFrameContent, 1, sizeof(kFrameContent), file); + fclose(file); - frame_reader_.reset( - new YuvFrameReaderImpl(temp_filename_, kFrameWidth, kFrameHeight)); - ASSERT_TRUE(frame_reader_->Init()); + reader_ = CreateYuvFrameReader(filepath_, kResolution); } - void TearDown() override { remove(temp_filename_.c_str()); } + void TearDown() override { remove(filepath_.c_str()); } - std::unique_ptr frame_reader_; - std::string temp_filename_; + std::string filepath_; + std::unique_ptr reader_; }; -TEST_F(YuvFrameReaderTest, InitSuccess) {} - -TEST_F(YuvFrameReaderTest, FrameLength) { - EXPECT_EQ(kFrameLength, frame_reader_->FrameLength()); +TEST_F(YuvFrameReaderTest, num_frames) { + EXPECT_EQ(kNumFrames, reader_->num_frames()); } -TEST_F(YuvFrameReaderTest, NumberOfFrames) { - EXPECT_EQ(1, frame_reader_->NumberOfFrames()); +TEST_F(YuvFrameReaderTest, PullFrame_frameContent) { + rtc::scoped_refptr buffer = reader_->PullFrame(); + EXPECT_EQ(kFrameContent[0][0], *buffer->DataY()); + EXPECT_EQ(kFrameContent[0][1], *buffer->DataU()); + EXPECT_EQ(kFrameContent[0][2], *buffer->DataV()); } -TEST_F(YuvFrameReaderTest, ReadFrame) { - rtc::scoped_refptr buffer = frame_reader_->ReadFrame(); - ASSERT_TRUE(buffer); - // Expect I420 packed as YUV. - EXPECT_EQ(kInputFileContents[0], buffer->DataY()[0]); - EXPECT_EQ(kInputFileContents[1], buffer->DataY()[1]); - EXPECT_EQ(kInputFileContents[2], buffer->DataY()[2]); - EXPECT_EQ(kInputFileContents[3], buffer->DataY()[3]); - EXPECT_EQ(kInputFileContents[4], buffer->DataU()[0]); - EXPECT_EQ(kInputFileContents[5], buffer->DataV()[0]); - EXPECT_FALSE(frame_reader_->ReadFrame()); // End of file. +TEST_F(YuvFrameReaderTest, ReadFrame_randomOrder) { + std::vector expected_frames = {2, 0, 1}; + std::vector actual_frames; + for (int frame_num : expected_frames) { + rtc::scoped_refptr buffer = + reader_->ReadFrame(frame_num); + actual_frames.push_back(*buffer->DataY()); + } + EXPECT_EQ(expected_frames, actual_frames); } -TEST_F(YuvFrameReaderTest, ReadFrameUninitialized) { - YuvFrameReaderImpl file_reader(temp_filename_, kFrameWidth, kFrameHeight); - EXPECT_FALSE(file_reader.ReadFrame()); +TEST_F(YuvFrameReaderTest, PullFrame_scale) { + rtc::scoped_refptr buffer = reader_->PullFrame( + /*pulled_frame_num=*/nullptr, Resolution({.width = 2, .height = 2}), + FrameReader::kNoScale); + EXPECT_EQ(2, buffer->width()); + EXPECT_EQ(2, buffer->height()); } +class YuvFrameReaderRepeatModeTest + : public YuvFrameReaderTest, + public ::testing::WithParamInterface< + std::tuple>> {}; + +TEST_P(YuvFrameReaderRepeatModeTest, PullFrame) { + RepeatMode mode = std::get<0>(GetParam()); + std::vector expected_frames = std::get<1>(GetParam()); + + reader_ = CreateYuvFrameReader(filepath_, kResolution, mode); + std::vector read_frames; + for (size_t i = 0; i < expected_frames.size(); ++i) { + rtc::scoped_refptr buffer = reader_->PullFrame(); + read_frames.push_back(*buffer->DataY()); + } + EXPECT_EQ(expected_frames, read_frames); +} + +INSTANTIATE_TEST_SUITE_P( + YuvFrameReaderTest, + YuvFrameReaderRepeatModeTest, + ::testing::ValuesIn( + {std::make_tuple(RepeatMode::kSingle, std::vector{0, 1, 2}), + std::make_tuple(RepeatMode::kRepeat, + std::vector{0, 1, 2, 0, 1, 2}), + std::make_tuple(RepeatMode::kPingPong, + std::vector{0, 1, 2, 1, 0, 1, 2})})); + +class YuvFrameReaderFramerateScaleTest + : public YuvFrameReaderTest, + public ::testing::WithParamInterface< + std::tuple>> {}; + +TEST_P(YuvFrameReaderFramerateScaleTest, PullFrame) { + Ratio framerate_scale = std::get<0>(GetParam()); + std::vector expected_frames = std::get<1>(GetParam()); + + std::vector actual_frames; + for (size_t i = 0; i < expected_frames.size(); ++i) { + int pulled_frame; + rtc::scoped_refptr buffer = + reader_->PullFrame(&pulled_frame, kResolution, framerate_scale); + actual_frames.push_back(pulled_frame); + } + EXPECT_EQ(expected_frames, actual_frames); +} + +INSTANTIATE_TEST_SUITE_P(YuvFrameReaderTest, + YuvFrameReaderFramerateScaleTest, + ::testing::ValuesIn({ + std::make_tuple(Ratio({.num = 1, .den = 2}), + std::vector{0, 2, 4}), + std::make_tuple(Ratio({.num = 2, .den = 3}), + std::vector{0, 1, 3, 4, 6}), + std::make_tuple(Ratio({.num = 2, .den = 1}), + std::vector{0, 0, 1, 1}), + })); + } // namespace test } // namespace webrtc diff --git a/tools_webrtc/android/build_aar.py b/tools_webrtc/android/build_aar.py index 42a902cafd..d910b39a7c 100755 --- a/tools_webrtc/android/build_aar.py +++ b/tools_webrtc/android/build_aar.py @@ -121,7 +121,7 @@ def _RunGN(args): def _RunNinja(output_directory, args): cmd = [ - os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja'), '-C', + os.path.join(SRC_DIR, 'third_party', 'ninja', 'ninja'), '-C', output_directory ] cmd.extend(args) diff --git a/tools_webrtc/autoroller/roll_deps.py b/tools_webrtc/autoroller/roll_deps.py index 20dc77f0b6..b8af88a50e 100755 --- a/tools_webrtc/autoroller/roll_deps.py +++ b/tools_webrtc/autoroller/roll_deps.py @@ -46,6 +46,7 @@ WEBRTC_ONLY_DEPS = [ 'src/testing', 'src/third_party', 'src/third_party/gtest-parallel', + 'src/third_party/pipewire/linux-amd64', 'src/tools', ] diff --git a/tools_webrtc/configure_pipewire.py b/tools_webrtc/configure_pipewire.py new file mode 100644 index 0000000000..79f8accbb4 --- /dev/null +++ b/tools_webrtc/configure_pipewire.py @@ -0,0 +1,72 @@ +#!/usr/bin/env vpython3 +# Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. +""" +This script is a wrapper that loads "pipewire" library. +""" + +import os +import subprocess +import sys + +_SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) +_SRC_DIR = os.path.dirname(_SCRIPT_DIR) + + +def _GetPipeWireDir(): + pipewire_dir = os.path.join(_SRC_DIR, 'third_party', 'pipewire', + 'linux-amd64') + + if not os.path.isdir(pipewire_dir): + pipewire_dir = None + + return pipewire_dir + + +def _ConfigurePipeWirePaths(path): + library_dir = os.path.join(path, 'lib64') + pipewire_binary_dir = os.path.join(path, 'bin') + pipewire_config_prefix = os.path.join(path, 'share', 'pipewire') + pipewire_module_dir = os.path.join(library_dir, 'pipewire-0.3') + spa_plugin_dir = os.path.join(library_dir, 'spa-0.2') + media_session_config_dir = os.path.join(pipewire_config_prefix, + 'media-session.d') + + env_vars = os.environ + env_vars['LD_LIBRARY_PATH'] = library_dir + env_vars['PIPEWIRE_CONFIG_PREFIX'] = pipewire_config_prefix + env_vars['PIPEWIRE_MODULE_DIR'] = pipewire_module_dir + env_vars['SPA_PLUGIN_DIR'] = spa_plugin_dir + env_vars['MEDIA_SESSION_CONFIG_DIR'] = media_session_config_dir + env_vars['PIPEWIRE_RUNTIME_DIR'] = '/tmp' + env_vars['PATH'] = env_vars['PATH'] + ':' + pipewire_binary_dir + + +def main(): + pipewire_dir = _GetPipeWireDir() + + if pipewire_dir is None: + print('configure-pipewire: Couldn\'t find directory %s' % pipewire_dir) + return 1 + + _ConfigurePipeWirePaths(pipewire_dir) + + pipewire_process = subprocess.Popen(["pipewire"], stdout=None) + pipewire_media_session_process = subprocess.Popen(["pipewire-media-session"], + stdout=None) + + return_value = subprocess.call(sys.argv[1:]) + + pipewire_media_session_process.terminate() + pipewire_process.terminate() + + return return_value + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/tools_webrtc/flags_compatibility.py b/tools_webrtc/flags_compatibility.py deleted file mode 100755 index 72c66a594f..0000000000 --- a/tools_webrtc/flags_compatibility.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env vpython3 - -# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. - -import argparse -import logging -import subprocess -import sys - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument('--isolated-script-test-output') - parser.add_argument('--isolated-script-test-perf-output') - args, unrecognized_args = parser.parse_known_args() - - test_command = _ForcePythonInterpreter(unrecognized_args) - if args.isolated_script_test_output: - test_command += [ - '--isolated_script_test_output', args.isolated_script_test_output - ] - if args.isolated_script_test_perf_output: - test_command += [ - '--isolated_script_test_perf_output=' + - args.isolated_script_test_perf_output - ] - logging.info('Running %r', test_command) - - return subprocess.call(test_command) - - -def _ForcePythonInterpreter(cmd): - """Returns the fixed command line to call the right python executable.""" - out = cmd[:] - if len(out) > 0: - if out[0] == 'python': - out[0] = sys.executable - elif out[0].endswith('.py'): - out.insert(0, sys.executable) - return out - - -if __name__ == '__main__': - logging.basicConfig(level=logging.INFO) - sys.exit(main()) diff --git a/tools_webrtc/gtest-parallel-wrapper.py b/tools_webrtc/gtest-parallel-wrapper.py index 2972e6c9bf..a64c773638 100755 --- a/tools_webrtc/gtest-parallel-wrapper.py +++ b/tools_webrtc/gtest-parallel-wrapper.py @@ -63,7 +63,7 @@ Will be converted into: --test_artifacts_dir=SOME_OUTPUT_DIR/test_artifacts \ --some_flag=some_value \ --another_flag \ - --isolated_script_test_perf_output=SOME_OTHER_DIR \ + --isolated-script-test-perf-output=SOME_OTHER_DIR \ --foo=bar \ --baz @@ -155,32 +155,12 @@ def ParseArgs(argv=None): # know what will be the swarming output dir. parser.add_argument('--store-test-artifacts', action='store_true') - # No-sandbox is a Chromium-specific flag, ignore it. - # TODO(bugs.webrtc.org/8115): Remove workaround when fixed. - parser.add_argument('--no-sandbox', - action='store_true', - help=argparse.SUPPRESS) - parser.add_argument('executable') parser.add_argument('executable_args', nargs='*') options, unrecognized_args = parser.parse_known_args(argv) - webrtc_flags_to_change = { - '--isolated-script-test-perf-output': - '--isolated_script_test_perf_output', - '--isolated-script-test-output': '--isolated_script_test_output', - } - args_to_pass = [] - for arg in unrecognized_args: - if any(arg.startswith(k) for k in list(webrtc_flags_to_change.keys())): - arg_split = arg.split('=') - args_to_pass.append(webrtc_flags_to_change[arg_split[0]] + '=' + - arg_split[1]) - else: - args_to_pass.append(arg) - - executable_args = options.executable_args + args_to_pass + executable_args = options.executable_args + unrecognized_args if options.store_test_artifacts: assert options.output_dir, ( diff --git a/tools_webrtc/gtest_parallel_wrapper_test.py b/tools_webrtc/gtest_parallel_wrapper_test.py index 609052d248..5fbd52e8db 100755 --- a/tools_webrtc/gtest_parallel_wrapper_test.py +++ b/tools_webrtc/gtest_parallel_wrapper_test.py @@ -146,7 +146,7 @@ class GtestParallelWrapperTest(unittest.TestCase): '--output_dir=' + output_dir, '--dump_json_test_results=SOME_DIR', 'some_test', '--', '--test_artifacts_dir=' + expected_artifacts_dir, '--some_flag=some_value', '--another_flag', - '--isolated_script_test_perf_output=SOME_OTHER_DIR', '--foo=bar', + '--isolated-script-test-perf-output=SOME_OTHER_DIR', '--foo=bar', '--baz' ]) self.assertEqual(result.gtest_parallel_args, expected) diff --git a/tools_webrtc/ios/build_ios_libs.py b/tools_webrtc/ios/build_ios_libs.py index e480781e7b..d2520ad563 100755 --- a/tools_webrtc/ios/build_ios_libs.py +++ b/tools_webrtc/ios/build_ios_libs.py @@ -198,7 +198,7 @@ def BuildWebRTC(output_dir, target_environment, target_arch, flavor, logging.info('Building target: %s', gn_target_name) cmd = [ - os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja'), + os.path.join(SRC_DIR, 'third_party', 'ninja', 'ninja'), '-C', output_dir, gn_target_name, diff --git a/tools_webrtc/iwyu/apply-iwyu b/tools_webrtc/iwyu/apply-iwyu index 794756ba47..afa611e4fe 100755 --- a/tools_webrtc/iwyu/apply-iwyu +++ b/tools_webrtc/iwyu/apply-iwyu @@ -1,4 +1,4 @@ -#!/bin/sh +#!/usr/bin/env bash # # Run the include-what-you-use tool (iwyu) on a file in the webrtc source # directory. @@ -24,17 +24,30 @@ if [ $DEBUG -gt 0 ]; then set -x fi -IWYU_TOOL="${IWYU_TOOL:-/usr/bin/iwyu_tool}" -FIX_INCLUDE="${FIX_INCLUDE:-/usr/bin/fix_include}" -FIX_INCLUDE_ARGS='' -IWYU_TOOL_DIR="${IWYU_TOOL_DIR:-tools_webrtc/iwyu}" -COMPILE_COMMANDS='' - error() { echo "$*" >&2 exit 1 } +find_alternates() { + for name in "$@" + do + name_path=$(which "${name}") + if [ ! -z "${name_path}" ]; then + echo ${name_path} + return 0 + fi + done + error "Could not find any of the tools '$@' in PATH." + return 1 +} + +IWYU_TOOL=$(find_alternates iwyu_tool iwyu_tool.py) +FIX_INCLUDE=$(find_alternates fix_include fix_includes.py) +FIX_INCLUDE_ARGS='' +IWYU_TOOL_DIR="${IWYU_TOOL_DIR:-tools_webrtc/iwyu}" +COMPILE_COMMANDS='' + usage() { echo "Usage: $0 [ -c compile-commands-file.json ] [-r] file.cc" echo "Runs the IWYU and fix-include on a CC file and its associated .h file" @@ -81,26 +94,27 @@ else FILE_H="" fi +tmpfile=$(realpath $(mktemp iwyu.XXXXXXX)) +trap 'rm -f -- "${tmpfile}"' EXIT + # IWYU has a confusing set of exit codes. Discard it. "$IWYU_TOOL" -p "$COMPILE_COMMANDS" "$FILE_CC" -- -Xiwyu --no_fwd_decls \ -Xiwyu --mapping_file=../../$IWYU_TOOL_DIR/mappings.imp \ - >& /tmp/includefixes$$ || echo "IWYU done, code $?" + >& ${tmpfile} || echo "IWYU done, code $?" -if grep 'fatal error' /tmp/includefixes$$; then +if grep 'fatal error' ${tmpfile}; then echo "iwyu run failed" - cat /tmp/includefixes$$ - rm /tmp/includefixes$$ + cat ${tmpfile} exit 1 else if [ $DEBUG -gt 1 ]; then - cat /tmp/includefixes$$ + cat ${tmpfile} fi # In compile_commands.json, the file name is recorded # as a relative path to the build directory. pushd "$(dirname "$COMPILE_COMMANDS")" || error "pushd failed" - "$FIX_INCLUDE" $FIX_INCLUDE_ARGS < /tmp/includefixes$$ || echo "Some files modified" + "$FIX_INCLUDE" $FIX_INCLUDE_ARGS < ${tmpfile} || echo "Some files modified" popd - rm /tmp/includefixes$$ fi grep -v -f tools_webrtc/iwyu/iwyu-filter-list $FILE_CC > $FILE_CC.new diff --git a/tools_webrtc/iwyu/mappings.imp b/tools_webrtc/iwyu/mappings.imp index 9e1855adbb..a79997bdce 100644 --- a/tools_webrtc/iwyu/mappings.imp +++ b/tools_webrtc/iwyu/mappings.imp @@ -32,4 +32,7 @@ { symbol: ["std::unique_ptr", "public", "", "public"] }, # Needed to avoid { symbol: ["std::ostringstream", "public", "", "public"] }, + +{ ref: "../../buildtools/third_party/libc++/trunk/include/libcxx.imp" }, ] + diff --git a/tools_webrtc/mb/mb.py b/tools_webrtc/mb/mb.py index 620a0c1d26..762df9a05c 100755 --- a/tools_webrtc/mb/mb.py +++ b/tools_webrtc/mb/mb.py @@ -53,8 +53,9 @@ class WebRTCMetaBuildWrapper(mb.MetaBuildWrapper): is_android = 'target_os="android"' in vals['gn_args'] is_fuchsia = 'target_os="fuchsia"' in vals['gn_args'] - is_linux = self.platform.startswith('linux') and not is_android is_ios = 'target_os="ios"' in vals['gn_args'] + is_linux = self.platform.startswith('linux') and not is_android + is_win = self.platform.startswith('win') if test_type == 'nontest': self.WriteFailureAndRaise('We should not be isolating %s.' % target, @@ -81,27 +82,26 @@ class WebRTCMetaBuildWrapper(mb.MetaBuildWrapper): ] elif is_android: cmdline += [ - vpython_exe, '../../build/android/test_wrapper/logdog_wrapper.py', - '--target', target, '--logdog-bin-cmd', '../../bin/logdog_butler', - '--logcat-output-file', '${ISOLATED_OUTDIR}/logcats', - '--store-tombstones' + 'luci-auth', 'context', '--', vpython_exe, + '../../build/android/test_wrapper/logdog_wrapper.py', '--target', + target, '--logdog-bin-cmd', + '../../.task_template_packages/logdog_butler', '--logcat-output-file', + '${ISOLATED_OUTDIR}/logcats', '--store-tombstones' ] - elif is_ios or is_fuchsia: - cmdline += [ - vpython_exe, '../../tools_webrtc/flags_compatibility.py', - 'bin/run_%s' % target - ] - extra_files.append('../../tools_webrtc/flags_compatibility.py') - elif test_type == 'raw': - cmdline += [vpython_exe, '../../tools_webrtc/flags_compatibility.py'] - extra_files.append('../../tools_webrtc/flags_compatibility.py') - cmdline.append(_GetExecutable(target, self.platform)) + elif is_ios or is_fuchsia or test_type == 'raw': + if is_win: + cmdline += ['bin\\run_{}.bat'.format(target)] + else: + cmdline += ['bin/run_{}'.format(target)] else: if isolate_map[target].get('use_webcam', False): cmdline += [ vpython_exe, '../../tools_webrtc/ensure_webcam_is_running.py' ] extra_files.append('../../tools_webrtc/ensure_webcam_is_running.py') + if isolate_map[target].get('use_pipewire', False): + cmdline += [vpython_exe, '../../tools_webrtc/configure_pipewire.py'] + extra_files.append('../../tools_webrtc/configure_pipewire.py') # is_linux uses use_ozone and x11 by default. use_x11 = is_linux diff --git a/tools_webrtc/mb/mb_config.pyl b/tools_webrtc/mb/mb_config.pyl index a939a3b439..138ecf275c 100644 --- a/tools_webrtc/mb/mb_config.pyl +++ b/tools_webrtc/mb/mb_config.pyl @@ -31,9 +31,8 @@ }, 'client.webrtc': { # Android - 'Android32 (M Nexus5X)': 'android_release_bot_arm_reclient', - 'Android32 (M Nexus5X)(dbg)': 'android_debug_static_bot_arm', - 'Android32 (M Nexus5X)(reclient)': 'android_release_bot_arm_reclient', + 'Android32': 'android_release_bot_arm_reclient', + 'Android32 (dbg)': 'android_debug_static_bot_arm', 'Android32 (more configs)': { 'bwe_test_logging': 'bwe_test_logging_android_arm', 'dummy_audio_file_devices_no_protobuf': @@ -43,12 +42,13 @@ 'Android32 Builder arm': 'android_pure_release_bot_arm', 'Android32 Builder x86': 'android_release_bot_x86', 'Android32 Builder x86 (dbg)': 'android_debug_static_bot_x86', - 'Android64 (M Nexus5X)': 'android_release_bot_arm64', - 'Android64 (M Nexus5X)(dbg)': 'android_debug_static_bot_arm64', + 'Android64': 'android_release_bot_arm64', + 'Android64 (dbg)': 'android_debug_static_bot_arm64', 'Android64 Builder arm64': 'android_pure_release_bot_arm64', 'Android64 Builder x64 (dbg)': 'android_debug_static_bot_x64', # Fuchsia + 'Fuchsia Builder': 'release_bot_x64_fuchsia', 'Fuchsia Release': 'release_bot_x64_fuchsia', # Linux @@ -75,14 +75,12 @@ 'Linux64 Release': 'release_bot_x64_reclient', 'Linux64 Release (ARM)': 'release_bot_arm64', 'Linux64 Release (Libfuzzer)': 'libfuzzer_asan_release_bot_x64_reclient', - 'Linux64 Release (reclient)': 'release_bot_x64_reclient', # Mac 'Mac Asan': 'mac_asan_clang_release_bot_x64', 'Mac64 Builder': 'pure_release_bot_x64', 'Mac64 Debug': 'debug_bot_x64', 'Mac64 Release': 'release_bot_x64', - 'Mac64 Release (reclient)': 'release_bot_x64_reclient', 'MacARM64 M1 Release': 'release_bot_arm64', 'MacArm64 Builder': 'release_bot_arm64', @@ -93,18 +91,16 @@ 'dummy_audio_file_devices_no_protobuf_x86', 'rtti_no_sctp': 'rtti_no_sctp_no_unicode_win_x86', }, - 'Win32 Builder (Clang)': 'win_clang_pure_release_bot_x86', 'Win32 Debug (Clang)': 'win_clang_debug_bot_x86', 'Win32 Release (Clang)': 'win_clang_release_bot_x86', 'Win64 ASan': 'win_asan_clang_release_bot_x64', + 'Win64 Builder (Clang)': 'win_clang_pure_release_bot_x64', 'Win64 Debug (Clang)': 'win_clang_debug_bot_x64', 'Win64 Release (Clang)': 'win_clang_release_bot_x64', - 'Win64 Release (Clang)(reclient)': 'win_clang_release_bot_x64_reclient', # iOS 'iOS64 Debug': 'ios_debug_bot_arm64', 'iOS64 Release': 'ios_release_bot_arm64', - 'iOS64 Release (reclient)': 'ios_release_bot_arm64_reclient', 'iOS64 Sim Debug (iOS 12)': 'ios_debug_bot_x64', 'iOS64 Sim Debug (iOS 13)': 'ios_debug_bot_x64', 'iOS64 Sim Debug (iOS 14)': 'ios_debug_bot_x64', @@ -130,13 +126,17 @@ # instead? 'Perf Android32 (M AOSP Nexus6)': 'release_bot_x64', 'Perf Android32 (M Nexus5)': 'release_bot_x64', + 'Perf Android32 (O Pixel2)': 'release_bot_x64', + 'Perf Android32 (R Pixel5)': 'release_bot_x64', 'Perf Android64 (M Nexus5X)': 'release_bot_x64', 'Perf Android64 (O Pixel2)': 'release_bot_x64', + 'Perf Android64 (R Pixel5)': 'release_bot_x64', + 'Perf Fuchsia': 'release_bot_x64_fuchsia', 'Perf Linux Bionic': 'release_bot_x64', 'Perf Linux Trusty': 'release_bot_x64', 'Perf Mac 11': 'release_bot_x64', 'Perf Mac M1 Arm64 12': 'release_bot_x64', - 'Perf Win7': 'release_bot_x64', + 'Perf Win 10': 'release_bot_x64', }, 'internal.client.webrtc': { 'iOS64 Debug': 'ios_internal_debug_bot_arm64', @@ -155,6 +155,7 @@ 'rtti_no_sctp': 'rtti_no_sctp_android_arm', }, 'android_arm_rel': 'android_release_bot_arm', + 'android_arm_rel_reclient': 'android_release_bot_arm_reclient', 'android_compile_arm64_dbg': 'android_debug_static_bot_arm64', 'android_compile_arm64_rel': 'android_pure_release_bot_arm64', 'android_compile_arm_dbg': 'android_debug_static_bot_arm', @@ -170,6 +171,7 @@ # iOS 'ios_compile_arm64_dbg': 'ios_debug_bot_arm64', 'ios_compile_arm64_rel': 'ios_release_bot_arm64', + 'ios_compile_arm64_rel_reclient': 'ios_release_bot_arm64_reclient', 'ios_sim_x64_dbg_ios12': 'ios_debug_bot_x64', 'ios_sim_x64_dbg_ios13': 'ios_debug_bot_x64', 'ios_sim_x64_dbg_ios14': 'ios_debug_bot_x64', @@ -184,6 +186,7 @@ 'linux_compile_rel': 'pure_release_bot_x64', 'linux_compile_x86_dbg': 'debug_bot_x86', 'linux_compile_x86_rel': 'pure_release_bot_x86', + 'linux_coverage': 'code_coverage_bot_x64', 'linux_dbg': 'debug_bot_x64', 'linux_libfuzzer_rel': 'libfuzzer_asan_release_bot_x64', 'linux_more_configs': { @@ -194,6 +197,7 @@ }, 'linux_msan': 'msan_clang_release_bot_x64', 'linux_rel': 'release_bot_x64', + 'linux_rel_reclient': 'release_bot_x64_reclient', 'linux_tsan2': 'tsan_clang_release_bot_x64', 'linux_ubsan': 'ubsan_clang_release_bot_x64', 'linux_ubsan_vptr': 'ubsan_vptr_clang_release_bot_x64', @@ -208,15 +212,17 @@ 'mac_dbg_m1': 'debug_bot_arm64', 'mac_rel': 'release_bot_x64', 'mac_rel_m1': 'release_bot_arm64', + 'mac_rel_reclient': 'release_bot_x64_reclient', # Windows 'win_asan': 'win_asan_clang_release_bot_x64', 'win_compile_x64_clang_dbg': 'win_clang_debug_bot_x64', 'win_compile_x64_clang_rel': 'win_clang_release_bot_x64', + 'win_compile_x64_clang_rel_reclient': + 'win_clang_release_bot_x64_reclient', 'win_compile_x86_clang_dbg': 'win_clang_debug_bot_x86', 'win_compile_x86_clang_rel': 'win_clang_release_bot_x86', 'win_x64_clang_dbg': 'win_clang_debug_bot_x64', - 'win_x64_clang_dbg_win10': 'win_clang_debug_bot_x64', 'win_x64_clang_rel': 'win_clang_release_bot_x64', 'win_x86_clang_dbg': 'win_clang_debug_bot_x86', 'win_x86_clang_rel': 'win_clang_release_bot_x86', @@ -244,7 +250,8 @@ 'android_pure_release_bot_arm64': ['android', 'pure_release_bot', 'arm64'], 'android_release_bot_arm': ['android', 'release_bot', 'arm'], 'android_release_bot_arm64': ['android', 'release_bot', 'arm64'], - 'android_release_bot_arm_reclient': ['android', 'release_bot_reclient', 'arm'], + 'android_release_bot_arm_reclient': + ['android', 'release_bot_reclient', 'arm'], 'android_release_bot_x64': ['android', 'release_bot', 'x64'], 'android_release_bot_x86': ['android', 'release_bot', 'x86'], 'asan_lsan_clang_release_bot_x64': @@ -253,9 +260,12 @@ ['android', 'debug_static_bot', 'arm', 'bwe_test_logging'], 'bwe_test_logging_x64': ['debug_bot', 'x64', 'bwe_test_logging'], 'bwe_test_logging_x86': ['debug_bot', 'x86', 'bwe_test_logging'], + 'code_coverage_bot_x64': [ + 'openh264', 'release_bot', 'x64', 'code_coverage', + 'partial_code_coverage_instrumentation' + ], 'codesearch_gen_linux_bot': ['openh264', 'debug_bot', 'minimal_symbols'], - # TODO(kjellander): Restore Goma for this when crbug.com/726706 is fixed. - 'debug_bot_arm': ['openh264', 'debug', 'arm'], + 'debug_bot_arm': ['openh264', 'debug_bot', 'arm'], 'debug_bot_arm64': ['openh264', 'debug_bot', 'arm64'], 'debug_bot_x64': ['openh264', 'debug_bot', 'x64'], 'debug_bot_x86': ['openh264', 'debug_bot', 'x86'], @@ -286,7 +296,8 @@ 'ios', 'release_bot', 'arm64', 'no_ios_code_signing', 'xctest', ], 'ios_release_bot_arm64_reclient': [ - 'ios', 'release_bot_reclient', 'arm64', 'no_ios_code_signing', 'xctest', + 'ios', 'release_bot', 'arm64', 'no_ios_code_signing', 'xctest', + 'no_goma', 'reclient', ], 'libfuzzer_asan_release_bot_x64': [ 'libfuzzer', 'asan', 'optimize_for_fuzzing', 'openh264', @@ -312,22 +323,21 @@ 'release_bot_arm64': ['openh264', 'release_bot', 'arm64'], 'release_bot_x64': ['openh264', 'release_bot', 'x64'], 'release_bot_x64_fuchsia': ['openh264', 'release_bot', 'x64', 'fuchsia'], - 'release_bot_x64_reclient': [ - 'openh264', 'release_bot_reclient', 'x64', - ], + 'release_bot_x64_reclient': ['openh264', 'release_bot_reclient', 'x64'], 'release_bot_x86': ['openh264', 'release_bot', 'x86'], - 'rtti_no_sctp_android_arm': [ - 'android', 'debug_static_bot', 'arm', 'rtti', 'no_sctp' - ], + 'rtti_no_sctp_android_arm': + ['android', 'debug_static_bot', 'arm', 'rtti', 'no_sctp'], 'rtti_no_sctp_no_unicode_win_x86': ['debug_bot', 'x86', 'rtti', 'no_sctp', 'win_undef_unicode'], 'rtti_no_sctp_x64': ['debug_bot', 'x64', 'rtti', 'no_sctp'], 'tsan_clang_release_bot_x64': ['tsan', 'clang', 'openh264', 'pure_release_bot', 'x64'], - 'ubsan_clang_release_bot_x64': - ['ubsan', 'clang', 'openh264', 'pure_release_bot', 'x64'], - 'ubsan_vptr_clang_release_bot_x64': - ['ubsan_vptr', 'clang', 'openh264', 'pure_release_bot', 'x64'], + 'ubsan_clang_release_bot_x64': [ + 'ubsan', 'clang', 'openh264', 'pure_release_bot', 'x64' + ], + 'ubsan_vptr_clang_release_bot_x64': [ + 'ubsan_vptr', 'clang', 'openh264', 'pure_release_bot', 'x64' + ], 'win_asan_clang_release_bot_x64': [ 'asan', 'clang', @@ -349,11 +359,11 @@ 'debug_bot', 'x86', ], - 'win_clang_pure_release_bot_x86': [ + 'win_clang_pure_release_bot_x64': [ 'clang', 'openh264', 'pure_release_bot', - 'x86', + 'x64', ], 'win_clang_release_bot_x64': [ 'clang', @@ -364,8 +374,10 @@ 'win_clang_release_bot_x64_reclient': [ 'clang', 'openh264', - 'release_bot_reclient', + 'release_bot', 'x64', + 'no_goma', + 'reclient', ], 'win_clang_release_bot_x86': [ 'clang', @@ -397,6 +409,9 @@ 'clang': { 'gn_args': 'is_clang=true', }, + 'code_coverage': { + 'gn_args': 'use_clang_coverage=true', + }, 'dcheck_always_on': { 'gn_args': 'dcheck_always_on=true', }, @@ -440,7 +455,11 @@ 'gn_args': 'symbol_level=1', }, 'msan': { - 'gn_args': 'is_msan=true msan_track_origins=2', + 'gn_args': 'is_msan=true msan_track_origins=2' + ' instrumented_libraries_release = "xenial"', + }, + 'no_goma': { + 'gn_args': 'use_goma=false', }, 'no_ios_code_signing': { 'gn_args': 'ios_enable_code_signing=false', @@ -457,6 +476,10 @@ 'optimize_for_fuzzing': { 'gn_args': 'optimize_for_fuzzing=true', }, + 'partial_code_coverage_instrumentation': { + 'gn_args': + 'coverage_instrumentation_input_file="//.code-coverage/files_to_instrument.txt"' + }, # The 'pure_release_bot' configuration is for release bots that are doing a # 100% release build without DCHECKs while 'release_bot' is a partial # release configs since `dcheck_always_on` is set to true. diff --git a/tools_webrtc/mb/mb_unittest.py b/tools_webrtc/mb/mb_unittest.py index 109d2bcb2d..40c12eda17 100755 --- a/tools_webrtc/mb/mb_unittest.py +++ b/tools_webrtc/mb/mb_unittest.py @@ -91,10 +91,11 @@ class FakeMBW(mb.WebRTCMetaBuildWrapper): abpath = self._AbsPath(path) self.files[abpath] = contents - def Call(self, cmd, env=None, capture_output=True, stdin=None): + def Call(self, cmd, env=None, capture_output=True, input=None): + # pylint: disable=redefined-builtin del env del capture_output - del stdin + del input self.calls.append(cmd) if self.cmds: return self.cmds.pop(0) @@ -324,12 +325,15 @@ class UnitTest(unittest.TestCase): files, ['../../.vpython3', '../../testing/test_env.py', 'foo_unittests']) self.assertEqual(command, [ + 'luci-auth', + 'context', + '--', 'vpython3', '../../build/android/test_wrapper/logdog_wrapper.py', '--target', 'foo_unittests', '--logdog-bin-cmd', - '../../bin/logdog_butler', + '../../.task_template_packages/logdog_butler', '--logcat-output-file', '${ISOLATED_OUTDIR}/logcats', '--store-tombstones', @@ -363,12 +367,15 @@ class UnitTest(unittest.TestCase): files, ['../../.vpython3', '../../testing/test_env.py', 'foo_unittests']) self.assertEqual(command, [ + 'luci-auth', + 'context', + '--', 'vpython3', '../../build/android/test_wrapper/logdog_wrapper.py', '--target', 'foo_unittests', '--logdog-bin-cmd', - '../../bin/logdog_butler', + '../../.task_template_packages/logdog_butler', '--logcat-output-file', '${ISOLATED_OUTDIR}/logcats', '--store-tombstones', @@ -489,14 +496,9 @@ class UnitTest(unittest.TestCase): self.assertEqual(files, [ '../../.vpython3', '../../testing/test_env.py', - '../../tools_webrtc/flags_compatibility.py', 'foo_unittests', ]) - self.assertEqual(command, [ - 'vpython3', - '../../tools_webrtc/flags_compatibility.py', - './foo_unittests', - ]) + self.assertEqual(command, ['bin/run_foo_unittests']) def test_gen_non_parallel_console_test_launcher(self): test_files = { diff --git a/tools_webrtc/presubmit_checks_lib/build_helpers.py b/tools_webrtc/presubmit_checks_lib/build_helpers.py index 86fc1a0bbd..3386d6d40c 100644 --- a/tools_webrtc/presubmit_checks_lib/build_helpers.py +++ b/tools_webrtc/presubmit_checks_lib/build_helpers.py @@ -69,7 +69,7 @@ def RunGnCheck(root_dir=None): def RunNinjaCommand(args, root_dir=None): """Runs ninja quietly. Any failure (e.g. clang not found) is silently discarded, since this is unlikely an error in submitted CL.""" - command = [os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja')] + args + command = [os.path.join(SRC_DIR, 'third_party', 'ninja', 'ninja')] + args p = subprocess.Popen(command, cwd=root_dir, stdout=subprocess.PIPE, diff --git a/tools_webrtc/ubsan/suppressions.txt b/tools_webrtc/ubsan/suppressions.txt index dc76f38c20..2ece795570 100644 --- a/tools_webrtc/ubsan/suppressions.txt +++ b/tools_webrtc/ubsan/suppressions.txt @@ -6,10 +6,6 @@ # the RTC_NO_SANITIZE macro. Please think twice before adding new exceptions. ############################################################################# -# YASM does some funny things that UBsan doesn't like. -# https://crbug.com/489901 -src:*/third_party/yasm/* - # OpenH264 triggers some errors that are out of our control. src:*/third_party/ffmpeg/libavcodec/* src:*/third_party/openh264/* @@ -22,3 +18,9 @@ src:*/third_party/libvpx/source/libvpx/vp8/* ############################################################################# # Ignore system libraries. src:*/usr/* + +############################################################################# +[alignment] +# Libaom and libsrtp are doing unaligned memory access. +src:*/third_party/libaom/source/libaom/* +src:*/third_party/libsrtp/srtp/srtp.c diff --git a/video/BUILD.gn b/video/BUILD.gn index 24ca9a18b5..27804f1dd6 100644 --- a/video/BUILD.gn +++ b/video/BUILD.gn @@ -15,7 +15,9 @@ rtc_library("video_stream_encoder_interface") { ] deps = [ "../api:fec_controller_api", + "../api:rtc_error", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:scoped_refptr", "../api/adaptation:resource_adaptation_api", "../api/units:data_rate", @@ -407,6 +409,7 @@ rtc_library("video_stream_encoder_impl") { ":video_stream_encoder_interface", "../api:field_trials_view", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:sequence_checker", "../api/adaptation:resource_adaptation_api", "../api/task_queue:pending_task_safety_flag", @@ -426,6 +429,7 @@ rtc_library("video_stream_encoder_impl") { "../api/video_codecs:video_codecs_api", "../call/adaptation:resource_adaptation", "../common_video", + "../media:rtc_media_base", "../modules:module_api_public", "../modules/video_coding", "../modules/video_coding:video_codec_interface", @@ -469,6 +473,7 @@ rtc_library("video_stream_encoder_impl") { "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/cleanup", + "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/types:optional", ] } @@ -625,6 +630,9 @@ if (rtc_include_tests) { "../api:simulated_network_api", "../api:time_controller", "../api/test/metrics:global_metrics_logger_and_exporter", + "../api/test/pclf:media_configuration", + "../api/test/pclf:media_quality_test_params", + "../api/test/pclf:peer_configurer", "../api/video_codecs:video_codecs_api", "../call:simulated_network", "../modules/video_coding:webrtc_vp9", @@ -809,6 +817,7 @@ if (rtc_include_tests) { ":video_stream_buffer_controller", ":video_stream_decoder_impl", ":video_stream_encoder_impl", + ":video_stream_encoder_interface", "../api:create_frame_generator", "../api:fake_frame_decryptor", "../api:fake_frame_encryptor", @@ -850,6 +859,7 @@ if (rtc_include_tests) { "../api/video:video_frame_type", "../api/video:video_rtp_headers", "../api/video/test:video_frame_matchers", + "../api/video_codecs:scalability_mode", "../api/video_codecs:video_codecs_api", "../api/video_codecs:vp8_temporal_layers_factory", "../call:call_interfaces", diff --git a/video/decode_synchronizer.cc b/video/decode_synchronizer.cc index a86066800f..7d4da3d47a 100644 --- a/video/decode_synchronizer.cc +++ b/video/decode_synchronizer.cc @@ -106,6 +106,7 @@ DecodeSynchronizer::DecodeSynchronizer(Clock* clock, } DecodeSynchronizer::~DecodeSynchronizer() { + RTC_DCHECK_RUN_ON(worker_queue_); RTC_DCHECK(schedulers_.empty()); } @@ -117,7 +118,7 @@ DecodeSynchronizer::CreateSynchronizedFrameScheduler() { // If this is the first `scheduler` added, start listening to the metronome. if (inserted && schedulers_.size() == 1) { RTC_DLOG(LS_VERBOSE) << "Listening to metronome"; - metronome_->AddListener(this); + ScheduleNextTick(); } return std::move(scheduler); @@ -160,12 +161,16 @@ void DecodeSynchronizer::RemoveFrameScheduler( schedulers_.erase(it); // If there are no more schedulers active, stop listening for metronome ticks. if (schedulers_.empty()) { - RTC_DLOG(LS_VERBOSE) << "Not listening to metronome"; - metronome_->RemoveListener(this); expected_next_tick_ = Timestamp::PlusInfinity(); } } +void DecodeSynchronizer::ScheduleNextTick() { + RTC_DCHECK_RUN_ON(worker_queue_); + metronome_->RequestCallOnNextTick( + SafeTask(safety_.flag(), [this] { OnTick(); })); +} + void DecodeSynchronizer::OnTick() { RTC_DCHECK_RUN_ON(worker_queue_); expected_next_tick_ = clock_->CurrentTime() + metronome_->TickPeriod(); @@ -177,10 +182,9 @@ void DecodeSynchronizer::OnTick() { std::move(scheduled_frame).RunFrameReleaseCallback(); } } -} -TaskQueueBase* DecodeSynchronizer::OnTickTaskQueue() { - return worker_queue_; + if (!schedulers_.empty()) + ScheduleNextTick(); } } // namespace webrtc diff --git a/video/decode_synchronizer.h b/video/decode_synchronizer.h index 26e6fdf31d..c6f8efdb29 100644 --- a/video/decode_synchronizer.h +++ b/video/decode_synchronizer.h @@ -53,12 +53,12 @@ namespace webrtc { // // DecodeSynchronizer is single threaded - all method calls must run on the // `worker_queue_`. -class DecodeSynchronizer : private Metronome::TickListener { +class DecodeSynchronizer { public: DecodeSynchronizer(Clock* clock, Metronome* metronome, TaskQueueBase* worker_queue); - ~DecodeSynchronizer() override; + ~DecodeSynchronizer(); DecodeSynchronizer(const DecodeSynchronizer&) = delete; DecodeSynchronizer& operator=(const DecodeSynchronizer&) = delete; @@ -119,9 +119,8 @@ class DecodeSynchronizer : private Metronome::TickListener { void OnFrameScheduled(SynchronizedFrameDecodeScheduler* scheduler); void RemoveFrameScheduler(SynchronizedFrameDecodeScheduler* scheduler); - // Metronome::TickListener implementation. - void OnTick() override; - TaskQueueBase* OnTickTaskQueue() override; + void ScheduleNextTick(); + void OnTick(); Clock* const clock_; TaskQueueBase* const worker_queue_; @@ -130,6 +129,7 @@ class DecodeSynchronizer : private Metronome::TickListener { Timestamp expected_next_tick_ = Timestamp::PlusInfinity(); std::set schedulers_ RTC_GUARDED_BY(worker_queue_); + ScopedTaskSafetyDetached safety_; }; } // namespace webrtc diff --git a/video/decode_synchronizer_unittest.cc b/video/decode_synchronizer_unittest.cc index 81d63029a9..7a0d833812 100644 --- a/video/decode_synchronizer_unittest.cc +++ b/video/decode_synchronizer_unittest.cc @@ -15,6 +15,7 @@ #include #include +#include "absl/functional/any_invocable.h" #include "api/metronome/test/fake_metronome.h" #include "api/units/time_delta.h" #include "test/gmock.h" @@ -25,9 +26,20 @@ using ::testing::_; using ::testing::Eq; +using ::testing::Invoke; +using ::testing::Return; namespace webrtc { +class MockMetronome : public Metronome { + public: + MOCK_METHOD(void, + RequestCallOnNextTick, + (absl::AnyInvocable callback), + (override)); + MOCK_METHOD(TimeDelta, TickPeriod, (), (const override)); +}; + class DecodeSynchronizerTest : public ::testing::Test { public: static constexpr TimeDelta kTickPeriod = TimeDelta::Millis(33); @@ -215,18 +227,26 @@ TEST_F(DecodeSynchronizerTest, FramesNotReleasedAfterStop) { time_controller_.AdvanceTime(TimeDelta::Zero()); } -TEST_F(DecodeSynchronizerTest, MetronomeNotListenedWhenNoStreamsAreActive) { - EXPECT_EQ(0u, metronome_.NumListeners()); - +TEST(DecodeSynchronizerStandaloneTest, + MetronomeNotListenedWhenNoStreamsAreActive) { + GlobalSimulatedTimeController time_controller(Timestamp::Millis(4711)); + Clock* clock(time_controller.GetClock()); + MockMetronome metronome; + ON_CALL(metronome, TickPeriod).WillByDefault(Return(TimeDelta::Seconds(1))); + DecodeSynchronizer decode_synchronizer_(clock, &metronome, + time_controller.GetMainThread()); + absl::AnyInvocable callback; + EXPECT_CALL(metronome, RequestCallOnNextTick) + .WillOnce(Invoke([&callback](absl::AnyInvocable cb) { + callback = std::move(cb); + })); auto scheduler = decode_synchronizer_.CreateSynchronizedFrameScheduler(); - EXPECT_EQ(1u, metronome_.NumListeners()); auto scheduler2 = decode_synchronizer_.CreateSynchronizedFrameScheduler(); - EXPECT_EQ(1u, metronome_.NumListeners()); - scheduler->Stop(); - EXPECT_EQ(1u, metronome_.NumListeners()); scheduler2->Stop(); - EXPECT_EQ(0u, metronome_.NumListeners()); + time_controller.AdvanceTime(TimeDelta::Seconds(1)); + ASSERT_TRUE(callback); + (std::move)(callback)(); } } // namespace webrtc diff --git a/video/encoder_rtcp_feedback_unittest.cc b/video/encoder_rtcp_feedback_unittest.cc index 4cbb747e51..f1ac65d48f 100644 --- a/video/encoder_rtcp_feedback_unittest.cc +++ b/video/encoder_rtcp_feedback_unittest.cc @@ -16,6 +16,8 @@ #include "test/gtest.h" #include "video/test/mock_video_stream_encoder.h" +using ::testing::_; + namespace webrtc { class VieKeyRequestTest : public ::testing::Test { @@ -38,18 +40,18 @@ class VieKeyRequestTest : public ::testing::Test { }; TEST_F(VieKeyRequestTest, CreateAndTriggerRequests) { - EXPECT_CALL(encoder_, SendKeyFrame()).Times(1); + EXPECT_CALL(encoder_, SendKeyFrame(_)).Times(1); encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kSsrc); } TEST_F(VieKeyRequestTest, TooManyOnReceivedIntraFrameRequest) { - EXPECT_CALL(encoder_, SendKeyFrame()).Times(1); + EXPECT_CALL(encoder_, SendKeyFrame(_)).Times(1); encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kSsrc); encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kSsrc); simulated_clock_.AdvanceTimeMilliseconds(10); encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kSsrc); - EXPECT_CALL(encoder_, SendKeyFrame()).Times(1); + EXPECT_CALL(encoder_, SendKeyFrame(_)).Times(1); simulated_clock_.AdvanceTimeMilliseconds(300); encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kSsrc); encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kSsrc); diff --git a/video/end_to_end_tests/fec_tests.cc b/video/end_to_end_tests/fec_tests.cc index 03c966c23e..ff85dde53b 100644 --- a/video/end_to_end_tests/fec_tests.cc +++ b/video/end_to_end_tests/fec_tests.cc @@ -159,7 +159,7 @@ class FlexfecRenderObserver : public test::EndToEndTest, static constexpr uint32_t kFlexfecLocalSsrc = 456; explicit FlexfecRenderObserver(bool enable_nack, bool expect_flexfec_rtcp) - : test::EndToEndTest(test::CallTest::kDefaultTimeout), + : test::EndToEndTest(test::CallTest::kLongTimeout), enable_nack_(enable_nack), expect_flexfec_rtcp_(expect_flexfec_rtcp), received_flexfec_rtcp_(false), diff --git a/video/end_to_end_tests/stats_tests.cc b/video/end_to_end_tests/stats_tests.cc index 4c8f21a585..2166916cad 100644 --- a/video/end_to_end_tests/stats_tests.cc +++ b/video/end_to_end_tests/stats_tests.cc @@ -101,6 +101,8 @@ TEST_F(StatsEndToEndTest, GetStats) { send_stats_filled_["DecoderImplementationName"] |= stats.decoder_implementation_name == test::FakeDecoder::kImplementationName; + receive_stats_filled_["PowerEfficientDecoder"] = + stats.power_efficient_decoder.has_value(); receive_stats_filled_["RenderDelayAsHighAsExpected"] |= stats.render_delay_ms >= kExpectedRenderDelayMs; @@ -160,6 +162,9 @@ TEST_F(StatsEndToEndTest, GetStats) { stats.encoder_implementation_name == test::FakeEncoder::kImplementationName; + send_stats_filled_["PowerEfficientEncoder"] |= + stats.power_efficient_encoder == true; + for (const auto& kv : stats.substreams) { if (expected_send_ssrcs_.find(kv.first) == expected_send_ssrcs_.end()) continue; // Probably RTX. diff --git a/video/pc_full_stack_tests.cc b/video/pc_full_stack_tests.cc index 715f3993ba..83b06830e0 100644 --- a/video/pc_full_stack_tests.cc +++ b/video/pc_full_stack_tests.cc @@ -19,6 +19,9 @@ #include "api/test/frame_generator_interface.h" #include "api/test/metrics/global_metrics_logger_and_exporter.h" #include "api/test/network_emulation_manager.h" +#include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/media_quality_test_params.h" +#include "api/test/pclf/peer_configurer.h" #include "api/test/peerconnection_quality_test_fixture.h" #include "api/test/simulated_network.h" #include "api/test/time_controller.h" @@ -33,21 +36,14 @@ namespace webrtc { -using EmulatedSFUConfig = - webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::EmulatedSFUConfig; -using PeerConfigurer = - webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::PeerConfigurer; -using RunParams = webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::RunParams; -using VideoConfig = - webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::VideoConfig; -using AudioConfig = - webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::AudioConfig; -using ScreenShareConfig = - webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::ScreenShareConfig; -using VideoSimulcastConfig = - webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::VideoSimulcastConfig; -using VideoCodecConfig = - webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::VideoCodecConfig; +using ::webrtc::webrtc_pc_e2e::AudioConfig; +using ::webrtc::webrtc_pc_e2e::EmulatedSFUConfig; +using ::webrtc::webrtc_pc_e2e::PeerConfigurer; +using ::webrtc::webrtc_pc_e2e::RunParams; +using ::webrtc::webrtc_pc_e2e::ScreenShareConfig; +using ::webrtc::webrtc_pc_e2e::VideoCodecConfig; +using ::webrtc::webrtc_pc_e2e::VideoConfig; +using ::webrtc::webrtc_pc_e2e::VideoSimulcastConfig; namespace { @@ -63,10 +59,14 @@ CreateTestFixture(const std::string& test_case_name, auto fixture = webrtc_pc_e2e::CreatePeerConnectionE2EQualityTestFixture( test_case_name, time_controller, /*audio_quality_analyzer=*/nullptr, /*video_quality_analyzer=*/nullptr); - fixture->AddPeer(network_links.first->network_dependencies(), - alice_configurer); - fixture->AddPeer(network_links.second->network_dependencies(), - bob_configurer); + auto alice = std::make_unique( + network_links.first->network_dependencies()); + auto bob = std::make_unique( + network_links.second->network_dependencies()); + alice_configurer(alice.get()); + bob_configurer(bob.get()); + fixture->AddPeer(std::move(alice)); + fixture->AddPeer(std::move(bob)); fixture->AddQualityMetricsReporter( std::make_unique( network_links.first, network_links.second, @@ -97,10 +97,8 @@ std::vector ParameterizedTestParams() { // Use the network thread as worker thread. // Use the worker thread for sending packets. // https://bugs.chromium.org/p/webrtc/issues/detail?id=14502 - {// TODO(webrtc:14502): Enable field trial soon but let it first run a - // couple of times to get a baseline.. - // .use_network_thread_as_worker_thread = true, - // .field_trials = "WebRTC-SendPacketsOnWorkerThread/Enabled/", + {.use_network_thread_as_worker_thread = true, + .field_trials = "WebRTC-SendPacketsOnWorkerThread/Enabled/", .test_case_name_postfix = "_ReducedThreads"}}; } diff --git a/video/receive_statistics_proxy2.cc b/video/receive_statistics_proxy2.cc index a3a7bca8ef..297f5d3de9 100644 --- a/video/receive_statistics_proxy2.cc +++ b/video/receive_statistics_proxy2.cc @@ -647,13 +647,16 @@ void ReceiveStatisticsProxy::OnIncomingPayloadType(int payload_type) { })); } -void ReceiveStatisticsProxy::OnDecoderImplementationName( - const char* implementation_name) { +void ReceiveStatisticsProxy::OnDecoderInfo( + const VideoDecoder::DecoderInfo& decoder_info) { RTC_DCHECK_RUN_ON(&decode_queue_); worker_thread_->PostTask(SafeTask( - task_safety_.flag(), [name = std::string(implementation_name), this]() { + task_safety_.flag(), + [this, name = decoder_info.implementation_name, + is_hardware_accelerated = decoder_info.is_hardware_accelerated]() { RTC_DCHECK_RUN_ON(&main_thread_); stats_.decoder_implementation_name = name; + stats_.power_efficient_decoder = is_hardware_accelerated; })); } diff --git a/video/receive_statistics_proxy2.h b/video/receive_statistics_proxy2.h index 19364fe845..1a2bb77fa6 100644 --- a/video/receive_statistics_proxy2.h +++ b/video/receive_statistics_proxy2.h @@ -21,6 +21,7 @@ #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" #include "api/units/timestamp.h" +#include "api/video_codecs/video_decoder.h" #include "call/video_receive_stream.h" #include "modules/include/module_common_types.h" #include "modules/video_coding/include/video_coding_defines.h" @@ -75,7 +76,7 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback, double estimated_freq_khz); void OnRenderedFrame(const VideoFrameMetaData& frame_meta); void OnIncomingPayloadType(int payload_type); - void OnDecoderImplementationName(const char* implementation_name); + void OnDecoderInfo(const VideoDecoder::DecoderInfo& decoder_info); void OnPreDecode(VideoCodecType codec_type, int qp); diff --git a/video/receive_statistics_proxy2_unittest.cc b/video/receive_statistics_proxy2_unittest.cc index 5061552eb9..f0869c4341 100644 --- a/video/receive_statistics_proxy2_unittest.cc +++ b/video/receive_statistics_proxy2_unittest.cc @@ -46,9 +46,9 @@ class ReceiveStatisticsProxy2Test : public ::testing::Test { public: ReceiveStatisticsProxy2Test() : time_controller_(Timestamp::Millis(1234)) { metrics::Reset(); - statistics_proxy_.reset( - new ReceiveStatisticsProxy(kRemoteSsrc, time_controller_.GetClock(), - time_controller_.GetMainThread())); + statistics_proxy_ = std::make_unique( + kRemoteSsrc, time_controller_.GetClock(), + time_controller_.GetMainThread()); } ~ReceiveStatisticsProxy2Test() override { statistics_proxy_.reset(); } @@ -578,12 +578,19 @@ TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsIncomingPayloadType) { EXPECT_EQ(kPayloadType, statistics_proxy_->GetStats().current_payload_type); } -TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsDecoderImplementationName) { - const char* kName = "decoderName"; - statistics_proxy_->OnDecoderImplementationName(kName); +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsDecoderInfo) { + auto init_stats = statistics_proxy_->GetStats(); + EXPECT_EQ(init_stats.decoder_implementation_name, "unknown"); + EXPECT_EQ(init_stats.power_efficient_decoder, absl::nullopt); + + const VideoDecoder::DecoderInfo decoder_info{ + .implementation_name = "decoderName", .is_hardware_accelerated = true}; + statistics_proxy_->OnDecoderInfo(decoder_info); time_controller_.AdvanceTime(TimeDelta::Zero()); - EXPECT_STREQ( - kName, statistics_proxy_->GetStats().decoder_implementation_name.c_str()); + auto stats = statistics_proxy_->GetStats(); + EXPECT_EQ(decoder_info.implementation_name, + stats.decoder_implementation_name); + EXPECT_TRUE(stats.power_efficient_decoder); } TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsOnCompleteFrame) { @@ -1746,12 +1753,7 @@ TEST_P(ReceiveStatisticsProxy2TestWithContent, DownscalesReported) { statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); const int kExpectedDownscales = 30; // 2 per 4 seconds = 30 per minute. - if (videocontenttypehelpers::IsScreenshare(content_type_)) { - EXPECT_METRIC_EQ( - kExpectedDownscales, - metrics::MinSample("WebRTC.Video.Screenshare." - "NumberResolutionDownswitchesPerMinute")); - } else { + if (!videocontenttypehelpers::IsScreenshare(content_type_)) { EXPECT_METRIC_EQ(kExpectedDownscales, metrics::MinSample( "WebRTC.Video.NumberResolutionDownswitchesPerMinute")); diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc index 2d82bcf78c..169aeb4700 100644 --- a/video/rtp_video_stream_receiver2.cc +++ b/video/rtp_video_stream_receiver2.cc @@ -1047,6 +1047,7 @@ void RtpVideoStreamReceiver2::SetProtectionPayloadTypes( RTC_DCHECK_RUN_ON(&packet_sequence_checker_); RTC_DCHECK(red_payload_type >= -1 && red_payload_type < 0x80); RTC_DCHECK(ulpfec_payload_type >= -1 && ulpfec_payload_type < 0x80); + red_payload_type_ = red_payload_type; ulpfec_receiver_ = MaybeConstructUlpfecReceiver( config_.rtp.remote_ssrc, red_payload_type, ulpfec_payload_type, config_.rtp.extensions, this, clock_); diff --git a/video/rtp_video_stream_receiver2_unittest.cc b/video/rtp_video_stream_receiver2_unittest.cc index 325188e93c..2ffe4788a1 100644 --- a/video/rtp_video_stream_receiver2_unittest.cc +++ b/video/rtp_video_stream_receiver2_unittest.cc @@ -371,6 +371,16 @@ TEST_F(RtpVideoStreamReceiver2Test, GenericKeyFrame) { video_header); } +TEST_F(RtpVideoStreamReceiver2Test, SetProtectionPayloadTypes) { + EXPECT_NE(rtp_video_stream_receiver_->red_payload_type(), 104); + EXPECT_NE(rtp_video_stream_receiver_->ulpfec_payload_type(), 107); + + rtp_video_stream_receiver_->SetProtectionPayloadTypes(104, 107); + + EXPECT_EQ(rtp_video_stream_receiver_->red_payload_type(), 104); + EXPECT_EQ(rtp_video_stream_receiver_->ulpfec_payload_type(), 107); +} + TEST_F(RtpVideoStreamReceiver2Test, PacketInfoIsPropagatedIntoVideoFrames) { constexpr uint64_t kAbsoluteCaptureTimestamp = 12; constexpr int kId0 = 1; diff --git a/video/rtp_video_stream_receiver_frame_transformer_delegate.cc b/video/rtp_video_stream_receiver_frame_transformer_delegate.cc index e6f33262b2..16015beee5 100644 --- a/video/rtp_video_stream_receiver_frame_transformer_delegate.cc +++ b/video/rtp_video_stream_receiver_frame_transformer_delegate.cc @@ -26,7 +26,7 @@ class TransformableVideoReceiverFrame TransformableVideoReceiverFrame(std::unique_ptr frame, uint32_t ssrc) : frame_(std::move(frame)), - metadata_(frame_->GetRtpVideoHeader()), + metadata_(frame_->GetRtpVideoHeader().GetAsMetadata()), ssrc_(ssrc) {} ~TransformableVideoReceiverFrame() override = default; diff --git a/video/send_statistics_proxy.cc b/video/send_statistics_proxy.cc index 1588aad221..b6c2d60a73 100644 --- a/video/send_statistics_proxy.cc +++ b/video/send_statistics_proxy.cc @@ -985,6 +985,8 @@ void SendStatisticsProxy::OnSendEncodedImage( stats->frames_encoded++; stats->total_encode_time_ms += encoded_image.timing_.encode_finish_ms - encoded_image.timing_.encode_start_ms; + if (codec_info) + stats->scalability_mode = codec_info->scalability_mode; // Report resolution of the top spatial layer. bool is_top_spatial_layer = codec_info == nullptr || codec_info->end_of_picture; @@ -1053,11 +1055,12 @@ void SendStatisticsProxy::OnSendEncodedImage( } void SendStatisticsProxy::OnEncoderImplementationChanged( - const std::string& implementation_name) { + EncoderImplementation implementation) { MutexLock lock(&mutex_); encoder_changed_ = EncoderChangeEvent{stats_.encoder_implementation_name, - implementation_name}; - stats_.encoder_implementation_name = implementation_name; + implementation.name}; + stats_.encoder_implementation_name = implementation.name; + stats_.power_efficient_encoder = implementation.is_hardware_accelerated; } int SendStatisticsProxy::GetInputFrameRate() const { diff --git a/video/send_statistics_proxy.h b/video/send_statistics_proxy.h index 7f862bb88b..4203b1c873 100644 --- a/video/send_statistics_proxy.h +++ b/video/send_statistics_proxy.h @@ -62,7 +62,7 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver, const CodecSpecificInfo* codec_info) override; void OnEncoderImplementationChanged( - const std::string& implementation_name) override; + EncoderImplementation implementation) override; // Used to update incoming frame rate. void OnIncomingFrame(int width, int height) override; diff --git a/video/send_statistics_proxy_unittest.cc b/video/send_statistics_proxy_unittest.cc index 72e8eb13e3..af3b0208e2 100644 --- a/video/send_statistics_proxy_unittest.cc +++ b/video/send_statistics_proxy_unittest.cc @@ -21,15 +21,21 @@ #include "api/video/video_adaptation_reason.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_codec_type.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/video_codec.h" #include "rtc_base/fake_clock.h" #include "system_wrappers/include/metrics.h" +#include "test/gmock.h" #include "test/gtest.h" #include "test/scoped_key_value_config.h" #include "video/config/video_encoder_config.h" +#include "video/video_stream_encoder_observer.h" namespace webrtc { namespace { + +using ::testing::Optional; + const uint32_t kFirstSsrc = 17; const uint32_t kSecondSsrc = 42; const uint32_t kFirstRtxSsrc = 18; @@ -395,6 +401,34 @@ TEST_F(SendStatisticsProxyTest, OnSendEncodedImageWithoutQpQpSumWontExist) { statistics_proxy_->GetStats().substreams[ssrc].qp_sum); } +TEST_F(SendStatisticsProxyTest, + OnSendEncodedImageSetsScalabilityModeOfCurrentLayer) { + EncodedImage encoded_image; + CodecSpecificInfo codec_info; + ScalabilityMode layer0_mode = ScalabilityMode::kL1T1; + ScalabilityMode layer1_mode = ScalabilityMode::kL1T3; + auto ssrc0 = config_.rtp.ssrcs[0]; + auto ssrc1 = config_.rtp.ssrcs[1]; + EXPECT_EQ(absl::nullopt, + statistics_proxy_->GetStats().substreams[ssrc0].scalability_mode); + EXPECT_EQ(absl::nullopt, + statistics_proxy_->GetStats().substreams[ssrc1].scalability_mode); + encoded_image.SetSpatialIndex(0); + codec_info.scalability_mode = layer0_mode; + statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info); + EXPECT_THAT(statistics_proxy_->GetStats().substreams[ssrc0].scalability_mode, + layer0_mode); + EXPECT_EQ(absl::nullopt, + statistics_proxy_->GetStats().substreams[ssrc1].scalability_mode); + encoded_image.SetSpatialIndex(1); + codec_info.scalability_mode = layer1_mode; + statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info); + EXPECT_THAT(statistics_proxy_->GetStats().substreams[ssrc0].scalability_mode, + layer0_mode); + EXPECT_THAT(statistics_proxy_->GetStats().substreams[ssrc1].scalability_mode, + layer1_mode); +} + TEST_F(SendStatisticsProxyTest, TotalEncodedBytesTargetFirstFrame) { const uint32_t kTargetBytesPerSecond = 100000; statistics_proxy_->OnSetEncoderTargetRate(kTargetBytesPerSecond * 8); @@ -2811,8 +2845,13 @@ TEST_F(SendStatisticsProxyTest, FecBitrateNotReportedWhenNotEnabled) { TEST_F(SendStatisticsProxyTest, GetStatsReportsEncoderImplementationName) { const std::string kName = "encoderName"; - statistics_proxy_->OnEncoderImplementationChanged(kName); + statistics_proxy_->OnEncoderImplementationChanged(EncoderImplementation{ + .name = kName, + .is_hardware_accelerated = true, + }); EXPECT_EQ(kName, statistics_proxy_->GetStats().encoder_implementation_name); + EXPECT_THAT(statistics_proxy_->GetStats().power_efficient_encoder, + ::testing::IsTrue()); } TEST_F(SendStatisticsProxyTest, Vp9SvcLowSpatialLayerDoesNotUpdateResolution) { @@ -2867,7 +2906,8 @@ class ForcedFallbackTest : public SendStatisticsProxyTest { protected: void InsertEncodedFrames(int num_frames, int interval_ms) { - statistics_proxy_->OnEncoderImplementationChanged(codec_name_); + statistics_proxy_->OnEncoderImplementationChanged( + {.name = codec_name_, .is_hardware_accelerated = false}); // First frame is not updating stats, insert initial frame. if (statistics_proxy_->GetStats().frames_encoded == 0) { diff --git a/video/test/mock_video_stream_encoder.h b/video/test/mock_video_stream_encoder.h index ff246df253..946f45cc76 100644 --- a/video/test/mock_video_stream_encoder.h +++ b/video/test/mock_video_stream_encoder.h @@ -34,7 +34,10 @@ class MockVideoStreamEncoder : public VideoStreamEncoderInterface { (override)); MOCK_METHOD(void, SetSink, (EncoderSink*, bool), (override)); MOCK_METHOD(void, SetStartBitrate, (int), (override)); - MOCK_METHOD(void, SendKeyFrame, (), (override)); + MOCK_METHOD(void, + SendKeyFrame, + (const std::vector&), + (override)); MOCK_METHOD(void, OnLossNotification, (const VideoEncoder::LossNotification&), @@ -52,12 +55,20 @@ class MockVideoStreamEncoder : public VideoStreamEncoderInterface { MOCK_METHOD(void, MockedConfigureEncoder, (const VideoEncoderConfig&, size_t)); + MOCK_METHOD(void, + MockedConfigureEncoder, + (const VideoEncoderConfig&, size_t, SetParametersCallback)); // gtest generates implicit copy which is not allowed on VideoEncoderConfig, // so we can't mock ConfigureEncoder directly. void ConfigureEncoder(VideoEncoderConfig config, size_t max_data_payload_length) { MockedConfigureEncoder(config, max_data_payload_length); } + void ConfigureEncoder(VideoEncoderConfig config, + size_t max_data_payload_length, + SetParametersCallback) { + MockedConfigureEncoder(config, max_data_payload_length); + } }; } // namespace webrtc diff --git a/video/video_quality_observer2.cc b/video/video_quality_observer2.cc index 0751d3f4ed..0afc2f5235 100644 --- a/video/video_quality_observer2.cc +++ b/video/video_quality_observer2.cc @@ -109,11 +109,13 @@ void VideoQualityObserver::UpdateHistograms(bool screenshare) { int num_resolution_downgrades_per_minute = num_resolution_downgrades_ * 60000 / video_duration_ms; - RTC_HISTOGRAM_COUNTS_SPARSE_100( - uma_prefix + ".NumberResolutionDownswitchesPerMinute", - num_resolution_downgrades_per_minute); - log_stream << uma_prefix << ".NumberResolutionDownswitchesPerMinute " - << num_resolution_downgrades_per_minute << "\n"; + if (!screenshare) { + RTC_HISTOGRAM_COUNTS_SPARSE_100( + uma_prefix + ".NumberResolutionDownswitchesPerMinute", + num_resolution_downgrades_per_minute); + log_stream << uma_prefix << ".NumberResolutionDownswitchesPerMinute " + << num_resolution_downgrades_per_minute << "\n"; + } int num_freezes_per_minute = freezes_durations_.NumSamples() * 60000 / video_duration_ms; diff --git a/video/video_receive_stream2.cc b/video/video_receive_stream2.cc index dc0eef4363..3bca6f63ff 100644 --- a/video/video_receive_stream2.cc +++ b/video/video_receive_stream2.cc @@ -66,10 +66,6 @@ namespace { constexpr TimeDelta kMinBaseMinimumDelay = TimeDelta::Zero(); constexpr TimeDelta kMaxBaseMinimumDelay = TimeDelta::Seconds(10); -// Create no decoders before the stream starts. All decoders are created on -// demand when we receive payload data of the corresponding type. -constexpr int kDefaultMaximumPreStreamDecoders = 0; - // Concrete instance of RecordableEncodedFrame wrapping needed content // from EncodedFrame. class WebRtcRecordableEncodedFrame : public RecordableEncodedFrame { @@ -227,7 +223,6 @@ VideoReceiveStream2::VideoReceiveStream2( max_wait_for_frame_(DetermineMaxWaitForFrame( TimeDelta::Millis(config_.rtp.nack.rtp_history_ms), false)), - maximum_pre_stream_decoders_("max", kDefaultMaximumPreStreamDecoders), decode_queue_(task_queue_factory_->CreateTaskQueue( "DecodingQueue", TaskQueueFactory::Priority::HIGH)) { @@ -268,12 +263,6 @@ VideoReceiveStream2::VideoReceiveStream2( } else { rtp_receive_statistics_->EnableRetransmitDetection(remote_ssrc(), true); } - - ParseFieldTrial( - { - &maximum_pre_stream_decoders_, - }, - call_->trials().Lookup("WebRTC-PreStreamDecoders")); } VideoReceiveStream2::~VideoReceiveStream2() { @@ -393,18 +382,6 @@ void VideoReceiveStream2::Start() { stats_proxy_.DecoderThreadStarting(); decode_queue_.PostTask([this] { RTC_DCHECK_RUN_ON(&decode_queue_); - // Create up to maximum_pre_stream_decoders_ up front, wait the the other - // decoders until they are requested (i.e., we receive the corresponding - // payload). - int decoders_count = 0; - for (const Decoder& decoder : config_.decoders) { - if (decoders_count >= maximum_pre_stream_decoders_) { - break; - } - CreateAndRegisterExternalDecoder(decoder); - ++decoders_count; - } - decoder_stopped_ = false; }); buffer_->StartNextDecode(true); @@ -678,11 +655,20 @@ int VideoReceiveStream2::GetBaseMinimumPlayoutDelayMs() const { } void VideoReceiveStream2::OnFrame(const VideoFrame& video_frame) { - VideoFrameMetaData frame_meta(video_frame, clock_->CurrentTime()); + source_tracker_.OnFrameDelivered(video_frame.packet_infos()); + config_.renderer->OnFrame(video_frame); // TODO(bugs.webrtc.org/10739): we should set local capture clock offset for // `video_frame.packet_infos`. But VideoFrame is const qualified here. + // For frame delay metrics, calculated in `OnRenderedFrame`, to better reflect + // user experience measurements must be done as close as possible to frame + // rendering moment. Capture current time, which is used for calculation of + // delay metrics in `OnRenderedFrame`, right after frame is passed to + // renderer. Frame may or may be not rendered by this time. This results in + // inaccuracy but is still the best we can do in the absence of "frame + // rendered" callback from the renderer. + VideoFrameMetaData frame_meta(video_frame, clock_->CurrentTime()); call_->worker_thread()->PostTask( SafeTask(task_safety_.flag(), [frame_meta, this]() { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); @@ -698,8 +684,6 @@ void VideoReceiveStream2::OnFrame(const VideoFrame& video_frame) { stats_proxy_.OnRenderedFrame(frame_meta); })); - source_tracker_.OnFrameDelivered(video_frame.packet_infos()); - config_.renderer->OnFrame(video_frame); webrtc::MutexLock lock(&pending_resolution_mutex_); if (pending_resolution_.has_value()) { if (!pending_resolution_->empty() && diff --git a/video/video_receive_stream2.h b/video/video_receive_stream2.h index 6b3d1ce439..34937a244e 100644 --- a/video/video_receive_stream2.h +++ b/video/video_receive_stream2.h @@ -334,11 +334,6 @@ class VideoReceiveStream2 std::vector> buffered_encoded_frames_ RTC_GUARDED_BY(decode_queue_); - // Set by the field trial WebRTC-PreStreamDecoders. The parameter `max` - // determines the maximum number of decoders that are created up front before - // any video frame has been received. - FieldTrialParameter maximum_pre_stream_decoders_; - // Defined last so they are destroyed before all other members. rtc::TaskQueue decode_queue_; diff --git a/video/video_receive_stream2_unittest.cc b/video/video_receive_stream2_unittest.cc index 4a0f777af2..458944aefa 100644 --- a/video/video_receive_stream2_unittest.cc +++ b/video/video_receive_stream2_unittest.cc @@ -42,7 +42,6 @@ #include "modules/pacing/packet_router.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/video_coding/encoded_frame.h" -#include "rtc_base/event.h" #include "rtc_base/logging.h" #include "system_wrappers/include/clock.h" #include "test/fake_decoder.h" @@ -197,8 +196,7 @@ class VideoReceiveStream2Test : public ::testing::TestWithParam { config_(&mock_transport_, &mock_h264_decoder_factory_), call_stats_(clock_, time_controller_.GetMainThread()), fake_renderer_(&time_controller_), - fake_metronome_(time_controller_.GetTaskQueueFactory(), - TimeDelta::Millis(16)), + fake_metronome_(TimeDelta::Millis(16)), decode_sync_(clock_, &fake_metronome_, time_controller_.GetMainThread()), @@ -229,7 +227,6 @@ class VideoReceiveStream2Test : public ::testing::TestWithParam { video_receive_stream_->Stop(); video_receive_stream_->UnregisterFromTransport(); } - fake_metronome_.Stop(); time_controller_.AdvanceTime(TimeDelta::Zero()); } diff --git a/video/video_send_stream.cc b/video/video_send_stream.cc index b2599987b8..e5545e761c 100644 --- a/video/video_send_stream.cc +++ b/video/video_send_stream.cc @@ -209,8 +209,12 @@ VideoSendStream::~VideoSendStream() { transport_->DestroyRtpVideoSender(rtp_video_sender_); } -void VideoSendStream::UpdateActiveSimulcastLayers( - const std::vector active_layers) { +void VideoSendStream::Start() { + const std::vector active_layers(config_.rtp.ssrcs.size(), true); + StartPerRtpStream(active_layers); +} + +void VideoSendStream::StartPerRtpStream(const std::vector active_layers) { RTC_DCHECK_RUN_ON(&thread_checker_); // Keep our `running_` flag expected state in sync with active layers since @@ -232,35 +236,16 @@ void VideoSendStream::UpdateActiveSimulcastLayers( } } active_layers_string << "}"; - RTC_LOG(LS_INFO) << "UpdateActiveSimulcastLayers: " - << active_layers_string.str(); + RTC_LOG(LS_INFO) << "StartPerRtpStream: " << active_layers_string.str(); rtp_transport_queue_->RunOrPost( SafeTask(transport_queue_safety_, [this, active_layers] { - send_stream_.UpdateActiveSimulcastLayers(active_layers); + send_stream_.StartPerRtpStream(active_layers); })); running_ = running; } -void VideoSendStream::Start() { - RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_DLOG(LS_INFO) << "VideoSendStream::Start"; - if (running_) - return; - - running_ = true; - - // It is expected that after VideoSendStream::Start has been called, incoming - // frames are not dropped in VideoStreamEncoder. To ensure this, Start has to - // be synchronized. - // TODO(tommi): ^^^ Validate if this still holds. - rtp_transport_queue_->RunSynchronous([this] { - transport_queue_safety_->SetAlive(); - send_stream_.Start(); - }); -} - void VideoSendStream::Stop() { RTC_DCHECK_RUN_ON(&thread_checker_); if (!running_) @@ -301,11 +286,17 @@ void VideoSendStream::SetSource( } void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config) { + ReconfigureVideoEncoder(std::move(config), nullptr); +} + +void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config, + SetParametersCallback callback) { RTC_DCHECK_RUN_ON(&thread_checker_); RTC_DCHECK_EQ(content_type_, config.content_type); video_stream_encoder_->ConfigureEncoder( std::move(config), - config_.rtp.max_packet_size - CalculateMaxHeaderSize(config_.rtp)); + config_.rtp.max_packet_size - CalculateMaxHeaderSize(config_.rtp), + std::move(callback)); } VideoSendStream::Stats VideoSendStream::GetStats() { @@ -343,5 +334,26 @@ void VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) { send_stream_.DeliverRtcp(packet, length); } +void VideoSendStream::GenerateKeyFrame(const std::vector& rids) { + // Map rids to layers. If rids is empty, generate a keyframe for all layers. + std::vector next_frames(config_.rtp.ssrcs.size(), + VideoFrameType::kVideoFrameKey); + if (!config_.rtp.rids.empty() && !rids.empty()) { + std::fill(next_frames.begin(), next_frames.end(), + VideoFrameType::kVideoFrameDelta); + for (const auto& rid : rids) { + for (size_t i = 0; i < config_.rtp.rids.size(); i++) { + if (config_.rtp.rids[i] == rid) { + next_frames[i] = VideoFrameType::kVideoFrameKey; + break; + } + } + } + } + if (video_stream_encoder_) { + video_stream_encoder_->SendKeyFrame(next_frames); + } +} + } // namespace internal } // namespace webrtc diff --git a/video/video_send_stream.h b/video/video_send_stream.h index 5b4323d329..a7ce112b21 100644 --- a/video/video_send_stream.h +++ b/video/video_send_stream.h @@ -13,6 +13,7 @@ #include #include +#include #include #include "api/fec_controller.h" @@ -77,8 +78,8 @@ class VideoSendStream : public webrtc::VideoSendStream { void DeliverRtcp(const uint8_t* packet, size_t length); // webrtc::VideoSendStream implementation. - void UpdateActiveSimulcastLayers(std::vector active_layers) override; void Start() override; + void StartPerRtpStream(std::vector active_layers) override; void Stop() override; bool started() override; @@ -88,11 +89,14 @@ class VideoSendStream : public webrtc::VideoSendStream { void SetSource(rtc::VideoSourceInterface* source, const DegradationPreference& degradation_preference) override; - void ReconfigureVideoEncoder(VideoEncoderConfig) override; + void ReconfigureVideoEncoder(VideoEncoderConfig config) override; + void ReconfigureVideoEncoder(VideoEncoderConfig config, + SetParametersCallback callback) override; Stats GetStats() override; void StopPermanentlyAndGetRtpStates(RtpStateMap* rtp_state_map, RtpPayloadStateMap* payload_state_map); + void GenerateKeyFrame(const std::vector& rids) override; private: friend class test::VideoSendStreamPeer; diff --git a/video/video_send_stream_impl.cc b/video/video_send_stream_impl.cc index 3fdbcb8ad1..f34388e56a 100644 --- a/video/video_send_stream_impl.cc +++ b/video/video_send_stream_impl.cc @@ -300,6 +300,11 @@ VideoSendStreamImpl::VideoSendStreamImpl( VideoSendStreamImpl::~VideoSendStreamImpl() { RTC_DCHECK_RUN_ON(&thread_checker_); RTC_LOG(LS_INFO) << "~VideoSendStreamImpl: " << config_->ToString(); + // TODO(webrtc:14502): Change `transport_queue_safety_` to be of type + // ScopedTaskSafety if experiment WebRTC-SendPacketsOnWorkerThread succeed. + if (rtp_transport_queue_->IsCurrent()) { + transport_queue_safety_->SetNotAlive(); + } } void VideoSendStreamImpl::DeliverRtcp(const uint8_t* packet, size_t length) { @@ -307,34 +312,20 @@ void VideoSendStreamImpl::DeliverRtcp(const uint8_t* packet, size_t length) { rtp_video_sender_->DeliverRtcp(packet, length); } -void VideoSendStreamImpl::UpdateActiveSimulcastLayers( +void VideoSendStreamImpl::StartPerRtpStream( const std::vector active_layers) { RTC_DCHECK_RUN_ON(rtp_transport_queue_); bool previously_active = rtp_video_sender_->IsActive(); rtp_video_sender_->SetActiveModules(active_layers); if (!rtp_video_sender_->IsActive() && previously_active) { - // Payload router switched from active to inactive. StopVideoSendStream(); } else if (rtp_video_sender_->IsActive() && !previously_active) { - // Payload router switched from inactive to active. StartupVideoSendStream(); } } -void VideoSendStreamImpl::Start() { - RTC_DCHECK_RUN_ON(rtp_transport_queue_); - RTC_LOG(LS_INFO) << "VideoSendStream::Start"; - if (rtp_video_sender_->IsActive()) - return; - - TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Start"); - rtp_video_sender_->SetActive(true); - StartupVideoSendStream(); -} - void VideoSendStreamImpl::StartupVideoSendStream() { RTC_DCHECK_RUN_ON(rtp_transport_queue_); - transport_queue_safety_->SetAlive(); bitrate_allocator_->AddObserver(this, GetAllocationConfig()); @@ -374,7 +365,7 @@ void VideoSendStreamImpl::Stop() { RTC_DCHECK(transport_queue_safety_->alive()); TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Stop"); - rtp_video_sender_->SetActive(false); + rtp_video_sender_->Stop(); StopVideoSendStream(); } diff --git a/video/video_send_stream_impl.h b/video/video_send_stream_impl.h index d444eabc21..f145450655 100644 --- a/video/video_send_stream_impl.h +++ b/video/video_send_stream_impl.h @@ -79,8 +79,7 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, ~VideoSendStreamImpl() override; void DeliverRtcp(const uint8_t* packet, size_t length); - void UpdateActiveSimulcastLayers(std::vector active_layers); - void Start(); + void StartPerRtpStream(std::vector active_layers); void Stop(); // TODO(holmer): Move these to RtpTransportControllerSend. diff --git a/video/video_send_stream_impl_unittest.cc b/video/video_send_stream_impl_unittest.cc index 8a88ba0f16..c38dcd0e1e 100644 --- a/video/video_send_stream_impl_unittest.cc +++ b/video/video_send_stream_impl_unittest.cc @@ -66,8 +66,8 @@ std::string GetAlrProbingExperimentString() { } class MockRtpVideoSender : public RtpVideoSenderInterface { public: - MOCK_METHOD(void, SetActive, (bool), (override)); - MOCK_METHOD(void, SetActiveModules, (const std::vector), (override)); + MOCK_METHOD(void, SetActiveModules, (const std::vector&), (override)); + MOCK_METHOD(void, Stop, (), (override)); MOCK_METHOD(bool, IsActive, (), (override)); MOCK_METHOD(void, OnNetworkAvailability, (bool), (override)); MOCK_METHOD((std::map), @@ -139,12 +139,19 @@ class VideoSendStreamImplTest : public ::testing::Test { .WillRepeatedly(Return(&packet_router_)); EXPECT_CALL(transport_controller_, CreateRtpVideoSender) .WillRepeatedly(Return(&rtp_video_sender_)); - EXPECT_CALL(rtp_video_sender_, SetActive(_)) - .WillRepeatedly(::testing::Invoke( - [&](bool active) { rtp_video_sender_active_ = active; })); - EXPECT_CALL(rtp_video_sender_, IsActive()) - .WillRepeatedly( - ::testing::Invoke([&]() { return rtp_video_sender_active_; })); + ON_CALL(rtp_video_sender_, Stop()).WillByDefault(::testing::Invoke([&] { + active_modules_.clear(); + })); + ON_CALL(rtp_video_sender_, IsActive()) + .WillByDefault(::testing::Invoke([&]() { + for (bool enabled : active_modules_) { + if (enabled) + return true; + } + return false; + })); + ON_CALL(rtp_video_sender_, SetActiveModules) + .WillByDefault(::testing::SaveArg<0>(&active_modules_)); ON_CALL(transport_controller_, GetWorkerQueue()) .WillByDefault(Return(&worker_queue_)); } @@ -183,8 +190,8 @@ class VideoSendStreamImplTest : public ::testing::Test { NiceMock bitrate_allocator_; NiceMock video_stream_encoder_; NiceMock rtp_video_sender_; + std::vector active_modules_; - bool rtp_video_sender_active_ = false; RtcEventLogNull event_log_; VideoSendStream::Config config_; SendDelayStats send_delay_stats_; @@ -210,7 +217,7 @@ TEST_F(VideoSendStreamImplTest, RegistersAsBitrateObserverOnStart) { EXPECT_EQ(config.bitrate_priority, kDefaultBitratePriority); })); worker_queue_.RunSynchronous([&] { - vss_impl->Start(); + vss_impl->StartPerRtpStream({true}); EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(1); vss_impl->Stop(); }); @@ -225,7 +232,7 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChange) { kDefaultInitialBitrateBps, kDefaultBitratePriority, VideoEncoderConfig::ContentType::kRealtimeVideo); - worker_queue_.RunSynchronous([&] { vss_impl->Start(); }); + worker_queue_.RunSynchronous([&] { vss_impl->StartPerRtpStream({true}); }); // QVGA + VGA configuration matching defaults in // media/engine/simulcast.cc. @@ -291,7 +298,7 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChangeWithAlr) { auto vss_impl = CreateVideoSendStreamImpl( kDefaultInitialBitrateBps, kDefaultBitratePriority, VideoEncoderConfig::ContentType::kScreen); - worker_queue_.RunSynchronous([&] { vss_impl->Start(); }); + worker_queue_.RunSynchronous([&] { vss_impl->StartPerRtpStream({true}); }); // Simulcast screenshare. VideoStream low_stream; @@ -357,7 +364,7 @@ TEST_F(VideoSendStreamImplTest, kDefaultInitialBitrateBps, kDefaultBitratePriority, VideoEncoderConfig::ContentType::kRealtimeVideo); - worker_queue_.RunSynchronous([&] { vss_impl->Start(); }); + worker_queue_.RunSynchronous([&] { vss_impl->StartPerRtpStream({true}); }); // 2-layer video simulcast. VideoStream low_stream; low_stream.width = 320; @@ -422,7 +429,7 @@ TEST_F(VideoSendStreamImplTest, SetsScreensharePacingFactorWithFeedback) { kDefaultInitialBitrateBps, kDefaultBitratePriority, VideoEncoderConfig::ContentType::kScreen); worker_queue_.RunSynchronous([&] { - vss_impl->Start(); + vss_impl->StartPerRtpStream({true}); vss_impl->Stop(); }); } @@ -434,7 +441,7 @@ TEST_F(VideoSendStreamImplTest, DoesNotSetPacingFactorWithoutFeedback) { VideoEncoderConfig::ContentType::kScreen); worker_queue_.RunSynchronous([&] { EXPECT_CALL(transport_controller_, SetPacingFactor(_)).Times(0); - vss_impl->Start(); + vss_impl->StartPerRtpStream({true}); vss_impl->Stop(); }); } @@ -447,7 +454,7 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationWhenEnabled) { EXPECT_CALL(transport_controller_, SetPacingFactor(_)).Times(0); VideoStreamEncoderInterface::EncoderSink* const sink = static_cast(vss_impl.get()); - worker_queue_.RunSynchronous([&] { vss_impl->Start(); }); + worker_queue_.RunSynchronous([&] { vss_impl->StartPerRtpStream({true}); }); // Populate a test instance of video bitrate allocation. VideoBitrateAllocation alloc; alloc.SetBitrate(0, 0, 10000); @@ -494,7 +501,7 @@ TEST_F(VideoSendStreamImplTest, ThrottlesVideoBitrateAllocationWhenTooSimilar) { kDefaultInitialBitrateBps, kDefaultBitratePriority, VideoEncoderConfig::ContentType::kScreen); worker_queue_.RunSynchronous([&] { - vss_impl->Start(); + vss_impl->StartPerRtpStream({true}); // Unpause encoder, to allows allocations to be passed through. const uint32_t kBitrateBps = 100000; EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) @@ -556,7 +563,7 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationOnLayerChange) { VideoEncoderConfig::ContentType::kScreen); worker_queue_.RunSynchronous([&] { - vss_impl->Start(); + vss_impl->StartPerRtpStream({true}); // Unpause encoder, to allows allocations to be passed through. const uint32_t kBitrateBps = 100000; EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) @@ -599,7 +606,7 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationAfterTimeout) { kDefaultInitialBitrateBps, kDefaultBitratePriority, VideoEncoderConfig::ContentType::kScreen); worker_queue_.RunSynchronous([&] { - vss_impl->Start(); + vss_impl->StartPerRtpStream({true}); const uint32_t kBitrateBps = 100000; // Unpause encoder, to allows allocations to be passed through. EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) @@ -709,7 +716,7 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) { auto vss_impl = CreateVideoSendStreamImpl( kDefaultInitialBitrateBps, kDefaultBitratePriority, VideoEncoderConfig::ContentType::kRealtimeVideo); - worker_queue_.RunSynchronous([&] { vss_impl->Start(); }); + worker_queue_.RunSynchronous([&] { vss_impl->StartPerRtpStream({true}); }); VideoStream qvga_stream; qvga_stream.width = 320; qvga_stream.height = 180; @@ -842,7 +849,7 @@ TEST_F(VideoSendStreamImplTest, DisablesPaddingOnPausedEncoder) { int min_transmit_bitrate_bps = 30000; config_.rtp.ssrcs.emplace_back(1); - worker_queue_.RunSynchronous([&] { vss_impl->Start(); }); + worker_queue_.RunSynchronous([&] { vss_impl->StartPerRtpStream({true}); }); // Starts without padding. EXPECT_EQ(0, padding_bitrate); encoder_queue_->PostTask([&] { @@ -893,7 +900,7 @@ TEST_F(VideoSendStreamImplTest, KeepAliveOnDroppedFrame) { VideoEncoderConfig::ContentType::kRealtimeVideo); EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(0); worker_queue_.RunSynchronous([&] { - vss_impl->Start(); + vss_impl->StartPerRtpStream({true}); const uint32_t kBitrateBps = 100000; EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) .Times(1) @@ -941,7 +948,7 @@ TEST_F(VideoSendStreamImplTest, ConfiguresBitratesForSvc) { ? VideoEncoderConfig::ContentType::kScreen : VideoEncoderConfig::ContentType::kRealtimeVideo); - worker_queue_.RunSynchronous([&] { vss_impl->Start(); }); + worker_queue_.RunSynchronous([&] { vss_impl->StartPerRtpStream({true}); }); // Svc VideoStream stream; diff --git a/video/video_send_stream_tests.cc b/video/video_send_stream_tests.cc index b34450a9d8..923c318c6d 100644 --- a/video/video_send_stream_tests.cc +++ b/video/video_send_stream_tests.cc @@ -2694,9 +2694,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { // bitrates than expected by this test, due to encoder pushback and subtracted // overhead. webrtc::test::ScopedKeyValueConfig field_trials( - field_trials_, - "WebRTC-VideoRateControl/bitrate_adjuster:false/" - "WebRTC-SendSideBwe-WithOverhead/Disabled/"); + field_trials_, "WebRTC-VideoRateControl/bitrate_adjuster:false/"); class EncoderBitrateThresholdObserver : public test::SendTest, public VideoBitrateAllocatorFactory, @@ -2722,8 +2720,8 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { EXPECT_LE(codec.startBitrate, codec.maxBitrate); if (num_rate_allocator_creations_ == 0) { EXPECT_EQ(static_cast(kMinBitrateKbps), codec.minBitrate); - EXPECT_EQ(static_cast(kStartBitrateKbps), - codec.startBitrate); + EXPECT_NEAR(static_cast(kStartBitrateKbps), + codec.startBitrate, 10); EXPECT_EQ(static_cast(kMaxBitrateKbps), codec.maxBitrate); } else if (num_rate_allocator_creations_ == 1) { EXPECT_EQ(static_cast(kLowerMaxBitrateKbps), @@ -2749,8 +2747,8 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { EXPECT_EQ(0, num_encoder_initializations_); EXPECT_EQ(static_cast(kMinBitrateKbps), codecSettings->minBitrate); - EXPECT_EQ(static_cast(kStartBitrateKbps), - codecSettings->startBitrate); + EXPECT_NEAR(static_cast(kStartBitrateKbps), + codecSettings->startBitrate, 10); EXPECT_EQ(static_cast(kMaxBitrateKbps), codecSettings->maxBitrate); @@ -2775,14 +2773,18 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { FakeEncoder::SetRates(parameters); } - void WaitForSetRates(uint32_t expected_bitrate) { + void WaitForSetRates(uint32_t expected_bitrate, int abs_error) { // Wait for the expected rate to be set. In some cases there can be // more than one update pending, in which case we keep waiting // until the correct value has been observed. + // The target_bitrate_ is reduced by the calculated packet overhead. const int64_t start_time = rtc::TimeMillis(); do { MutexLock lock(&mutex_); - if (target_bitrate_ == expected_bitrate) { + + int error = target_bitrate_ - expected_bitrate; + if ((error < 0 && error >= -abs_error) || + (error >= 0 && error <= abs_error)) { return; } } while (bitrate_changed_event_.Wait( @@ -2790,7 +2792,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { VideoSendStreamTest::kDefaultTimeout - TimeDelta::Millis(rtc::TimeMillis() - start_time)))); MutexLock lock(&mutex_); - EXPECT_EQ(target_bitrate_, expected_bitrate) + EXPECT_NEAR(target_bitrate_, expected_bitrate, abs_error) << "Timed out while waiting encoder rate to be set."; } @@ -2832,7 +2834,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { << "Timed out while waiting for rate allocator to be created."; ASSERT_TRUE(init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeout)) << "Timed out while waiting for encoder to be configured."; - WaitForSetRates(kStartBitrateKbps); + WaitForSetRates(kStartBitrateKbps, 80); BitrateConstraints bitrate_config; bitrate_config.start_bitrate_bps = kIncreasedStartBitrateKbps * 1000; bitrate_config.max_bitrate_bps = kIncreasedMaxBitrateKbps * 1000; @@ -2841,7 +2843,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { bitrate_config); }); // Encoder rate is capped by EncoderConfig max_bitrate_bps. - WaitForSetRates(kMaxBitrateKbps); + WaitForSetRates(kMaxBitrateKbps, 10); encoder_config_.max_bitrate_bps = kLowerMaxBitrateKbps * 1000; SendTask(task_queue_, [&]() { send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy()); @@ -2851,7 +2853,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { EXPECT_EQ(2, num_rate_allocator_creations_) << "Rate allocator should have been recreated."; - WaitForSetRates(kLowerMaxBitrateKbps); + WaitForSetRates(kLowerMaxBitrateKbps, 10); EXPECT_EQ(1, num_encoder_initializations_); encoder_config_.max_bitrate_bps = kIncreasedMaxBitrateKbps * 1000; @@ -2865,7 +2867,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { // Expected target bitrate is the start bitrate set in the call to // call_->GetTransportControllerSend()->SetSdpBitrateParameters. - WaitForSetRates(kIncreasedStartBitrateKbps); + WaitForSetRates(kIncreasedStartBitrateKbps, 10); EXPECT_EQ(1, num_encoder_initializations_); } @@ -3039,9 +3041,9 @@ class Vp9HeaderObserver : public test::SendTest { EXPECT_EQ(kVp9PayloadType, rtp_packet.PayloadType()); rtc::ArrayView rtp_payload = rtp_packet.payload(); - bool new_packet = packets_sent_ == 0 || + bool new_packet = !last_packet_sequence_number_.has_value() || IsNewerSequenceNumber(rtp_packet.SequenceNumber(), - last_packet_sequence_number_); + *last_packet_sequence_number_); if (!rtp_payload.empty() && new_packet) { RTPVideoHeader video_header; EXPECT_NE( @@ -3056,7 +3058,6 @@ class Vp9HeaderObserver : public test::SendTest { // Verify configuration specific settings. InspectHeader(vp9_header); - ++packets_sent_; if (rtp_packet.Marker()) { MutexLock lock(&mutex_); ++frames_sent_; @@ -3278,20 +3279,12 @@ class Vp9HeaderObserver : public test::SendTest { vp9.num_spatial_layers); EXPECT_TRUE(vp9.spatial_layer_resolution_present); // Y:1 - absl::optional info; - absl::optional scalability_mode = - ScalabilityModeFromString(params_.scalability_mode); - if (scalability_mode) { - info = ScalabilityStructureConfig(*scalability_mode); - } - double default_ratio = 1.0; - for (int i = static_cast(vp9.num_spatial_layers) - 1; i >= 0; --i) { - double ratio = info ? (static_cast(info->scaling_factor_num[i]) / - info->scaling_factor_den[i]) - : default_ratio; + ScalableVideoController::StreamLayersConfig config = GetScalabilityConfig(); + for (int i = config.num_spatial_layers - 1; i >= 0; --i) { + double ratio = static_cast(config.scaling_factor_num[i]) / + config.scaling_factor_den[i]; EXPECT_EQ(expected_width_ * ratio, vp9.width[i]); // WIDTH EXPECT_EQ(expected_height_ * ratio, vp9.height[i]); // HEIGHT - default_ratio /= 2.0; } } @@ -3301,15 +3294,15 @@ class Vp9HeaderObserver : public test::SendTest { absl::get(video.video_type_header); const bool new_temporal_unit = - packets_sent_ == 0 || - IsNewerTimestamp(rtp_packet.Timestamp(), last_packet_timestamp_); + !last_packet_timestamp_.has_value() || + IsNewerTimestamp(rtp_packet.Timestamp(), *last_packet_timestamp_); const bool new_frame = new_temporal_unit || last_vp9_.spatial_idx != vp9_header.spatial_idx; EXPECT_EQ(new_frame, video.is_first_packet_in_frame); if (!new_temporal_unit) { EXPECT_FALSE(last_packet_marker_); - EXPECT_EQ(last_packet_timestamp_, rtp_packet.Timestamp()); + EXPECT_EQ(*last_packet_timestamp_, rtp_packet.Timestamp()); EXPECT_EQ(last_vp9_.picture_id, vp9_header.picture_id); EXPECT_EQ(last_vp9_.tl0_pic_idx, vp9_header.tl0_pic_idx); VerifySpatialIdxWithinFrame(vp9_header); @@ -3328,16 +3321,26 @@ class Vp9HeaderObserver : public test::SendTest { VerifyTl0Idx(vp9_header); } + ScalableVideoController::StreamLayersConfig GetScalabilityConfig() const { + absl::optional scalability_mode = + ScalabilityModeFromString(params_.scalability_mode); + EXPECT_TRUE(scalability_mode.has_value()); + absl::optional config = + ScalabilityStructureConfig(*scalability_mode); + EXPECT_TRUE(config.has_value()); + EXPECT_EQ(config->num_spatial_layers, params_.num_spatial_layers); + return *config; + } + test::FunctionVideoEncoderFactory encoder_factory_; const Vp9TestParams params_; VideoCodecVP9 vp9_settings_; webrtc::VideoEncoderConfig encoder_config_; bool last_packet_marker_ = false; - uint16_t last_packet_sequence_number_ = 0; - uint32_t last_packet_timestamp_ = 0; + absl::optional last_packet_sequence_number_; + absl::optional last_packet_timestamp_; RTPVideoHeaderVP9 last_vp9_; std::map last_temporal_idx_by_spatial_idx_; - size_t packets_sent_ = 0; Mutex mutex_; size_t frames_sent_ = 0; int expected_width_ = 0; @@ -3348,8 +3351,8 @@ class Vp9Test : public VideoSendStreamTest, public ::testing::WithParamInterface { public: Vp9Test() - : params_(::testing::get<0>(GetParam())), - use_scalability_mode_identifier_(::testing::get<1>(GetParam())) {} + : params_(::testing::get(GetParam())), + use_scalability_mode_identifier_(::testing::get(GetParam())) {} protected: const Vp9TestParams params_; @@ -3462,17 +3465,12 @@ void VideoSendStreamTest::TestVp9NonFlexMode( } int GetRequiredDivisibility() const { - absl::optional scalability_mode = - ScalabilityModeFromString(params_.scalability_mode); - EXPECT_TRUE(scalability_mode); - absl::optional config = - ScalabilityStructureConfig(*scalability_mode); - EXPECT_TRUE(config); - + ScalableVideoController::StreamLayersConfig config = + GetScalabilityConfig(); int required_divisibility = 1; - for (size_t sl_idx = 0; sl_idx < params_.num_spatial_layers; ++sl_idx) { + for (int sl_idx = 0; sl_idx < config.num_spatial_layers; ++sl_idx) { required_divisibility = cricket::LeastCommonMultiple( - required_divisibility, config->scaling_factor_den[sl_idx]); + required_divisibility, config.scaling_factor_den[sl_idx]); } return required_divisibility; } @@ -3705,8 +3703,6 @@ TEST_F(VideoSendStreamTest, EncoderConfigMaxFramerateReportedToSource) { // testing that the maximum possible target payload rate is smaller than the // maximum bandwidth estimate by the overhead rate. TEST_F(VideoSendStreamTest, RemoveOverheadFromBandwidth) { - test::ScopedFieldTrials override_field_trials( - "WebRTC-SendSideBwe-WithOverhead/Enabled/"); class RemoveOverheadFromBandwidthTest : public test::EndToEndTest, public test::FakeEncoder { public: diff --git a/video/video_stream_buffer_controller.cc b/video/video_stream_buffer_controller.cc index 046ff69cae..f7d3acdaf6 100644 --- a/video/video_stream_buffer_controller.cc +++ b/video/video_stream_buffer_controller.cc @@ -109,7 +109,6 @@ VideoStreamBufferController::VideoStreamBufferController( RTC_DCHECK(timing_); RTC_DCHECK(clock_); RTC_DCHECK(frame_decode_scheduler_); - RTC_LOG(LS_WARNING) << "Using FrameBuffer3"; ParseFieldTrial({&zero_playout_delay_max_decode_queue_size_}, field_trials.Lookup("WebRTC-ZeroPlayoutDelay")); diff --git a/video/video_stream_buffer_controller_unittest.cc b/video/video_stream_buffer_controller_unittest.cc index 3e6c352fb1..e7235a2ff1 100644 --- a/video/video_stream_buffer_controller_unittest.cc +++ b/video/video_stream_buffer_controller_unittest.cc @@ -132,8 +132,7 @@ class VideoStreamBufferControllerFixture field_trials_(std::get<1>(GetParam())), time_controller_(kClockStart), clock_(time_controller_.GetClock()), - fake_metronome_(time_controller_.GetTaskQueueFactory(), - TimeDelta::Millis(16)), + fake_metronome_(TimeDelta::Millis(16)), decode_sync_(clock_, &fake_metronome_, time_controller_.GetMainThread()), @@ -163,7 +162,6 @@ class VideoStreamBufferControllerFixture if (buffer_) { buffer_->Stop(); } - fake_metronome_.Stop(); time_controller_.AdvanceTime(TimeDelta::Zero()); } diff --git a/video/video_stream_decoder2.cc b/video/video_stream_decoder2.cc index 0a8825db13..1ef2d0ecd0 100644 --- a/video/video_stream_decoder2.cc +++ b/video/video_stream_decoder2.cc @@ -10,6 +10,7 @@ #include "video/video_stream_decoder2.h" +#include "api/video_codecs/video_decoder.h" #include "modules/video_coding/video_receiver2.h" #include "rtc_base/checks.h" #include "video/receive_statistics_proxy2.h" @@ -60,9 +61,9 @@ void VideoStreamDecoder::OnIncomingPayloadType(int payload_type) { receive_stats_callback_->OnIncomingPayloadType(payload_type); } -void VideoStreamDecoder::OnDecoderImplementationName( - const char* implementation_name) { - receive_stats_callback_->OnDecoderImplementationName(implementation_name); +void VideoStreamDecoder::OnDecoderInfoChanged( + const VideoDecoder::DecoderInfo& decoder_info) { + receive_stats_callback_->OnDecoderInfo(decoder_info); } } // namespace internal diff --git a/video/video_stream_decoder2.h b/video/video_stream_decoder2.h index 995008d8c9..473d463186 100644 --- a/video/video_stream_decoder2.h +++ b/video/video_stream_decoder2.h @@ -18,6 +18,7 @@ #include "api/scoped_refptr.h" #include "api/video/video_sink_interface.h" +#include "api/video_codecs/video_decoder.h" #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" #include "modules/video_coding/include/video_coding_defines.h" #include "rtc_base/platform_thread.h" @@ -45,7 +46,8 @@ class VideoStreamDecoder : public VCMReceiveCallback { VideoContentType content_type) override; void OnDroppedFrames(uint32_t frames_dropped) override; void OnIncomingPayloadType(int payload_type) override; - void OnDecoderImplementationName(const char* implementation_name) override; + void OnDecoderInfoChanged( + const VideoDecoder::DecoderInfo& decoder_info) override; private: VideoReceiver2* const video_receiver_; diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc index 3178d9cd57..c680fe12c8 100644 --- a/video/video_stream_encoder.cc +++ b/video/video_stream_encoder.cc @@ -35,6 +35,7 @@ #include "call/adaptation/resource_adaptation_processor.h" #include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_adapter.h" +#include "media/base/media_channel.h" #include "modules/video_coding/include/video_codec_initializer.h" #include "modules/video_coding/svc/svc_rate_allocator.h" #include "modules/video_coding/utility/vp8_constants.h" @@ -149,6 +150,10 @@ bool RequiresEncoderReset(const VideoCodec& prev_send_codec, if (new_send_codec.codecType == kVideoCodecVP9) { size_t num_spatial_layers = new_send_codec.VP9().numberOfSpatialLayers; for (unsigned char i = 0; i < num_spatial_layers; ++i) { + if (!new_send_codec.spatialLayers[i].active) { + // No need to reset when layer is inactive. + continue; + } if (new_send_codec.spatialLayers[i].width != prev_send_codec.spatialLayers[i].width || new_send_codec.spatialLayers[i].height != @@ -156,7 +161,8 @@ bool RequiresEncoderReset(const VideoCodec& prev_send_codec, new_send_codec.spatialLayers[i].numberOfTemporalLayers != prev_send_codec.spatialLayers[i].numberOfTemporalLayers || new_send_codec.spatialLayers[i].qpMax != - prev_send_codec.spatialLayers[i].qpMax) { + prev_send_codec.spatialLayers[i].qpMax || + !prev_send_codec.spatialLayers[i].active) { return true; } } @@ -879,9 +885,16 @@ void VideoStreamEncoder::SetStartBitrate(int start_bitrate_bps) { void VideoStreamEncoder::ConfigureEncoder(VideoEncoderConfig config, size_t max_data_payload_length) { + ConfigureEncoder(std::move(config), max_data_payload_length, nullptr); +} + +void VideoStreamEncoder::ConfigureEncoder(VideoEncoderConfig config, + size_t max_data_payload_length, + SetParametersCallback callback) { RTC_DCHECK_RUN_ON(worker_queue_); encoder_queue_.PostTask( - [this, config = std::move(config), max_data_payload_length]() mutable { + [this, config = std::move(config), max_data_payload_length, + callback = std::move(callback)]() mutable { RTC_DCHECK_RUN_ON(&encoder_queue_); RTC_DCHECK(sink_); RTC_LOG(LS_INFO) << "ConfigureEncoder requested."; @@ -912,7 +925,13 @@ void VideoStreamEncoder::ConfigureEncoder(VideoEncoderConfig config, // minimize the number of reconfigurations. The codec configuration // depends on incoming video frame size. if (last_frame_info_) { + if (callback) { + encoder_configuration_callbacks_.push_back(std::move(callback)); + } + ReconfigureEncoder(); + } else { + webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); } }); } @@ -1369,6 +1388,8 @@ void VideoStreamEncoder::ReconfigureEncoder() { stream_resource_manager_.ConfigureQualityScaler(info); stream_resource_manager_.ConfigureBandwidthQualityScaler(info); + webrtc::RTCError encoder_configuration_result = webrtc::RTCError::OK(); + if (!encoder_initialized_) { RTC_LOG(LS_WARNING) << "Failed to initialize " << CodecTypeToPayloadString(codec.codecType) @@ -1378,8 +1399,19 @@ void VideoStreamEncoder::ReconfigureEncoder() { if (switch_encoder_on_init_failures_) { RequestEncoderSwitch(); + } else { + encoder_configuration_result = + webrtc::RTCError(RTCErrorType::UNSUPPORTED_OPERATION); } } + + if (!encoder_configuration_callbacks_.empty()) { + for (auto& callback : encoder_configuration_callbacks_) { + webrtc::InvokeSetParametersCallback(callback, + encoder_configuration_result); + } + encoder_configuration_callbacks_.clear(); + } } void VideoStreamEncoder::RequestEncoderSwitch() { @@ -1838,9 +1870,12 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, // Encoder metadata needs to be updated before encode complete callback. VideoEncoder::EncoderInfo info = encoder_->GetEncoderInfo(); - if (info.implementation_name != encoder_info_.implementation_name) { - encoder_stats_observer_->OnEncoderImplementationChanged( - info.implementation_name); + if (info.implementation_name != encoder_info_.implementation_name || + info.is_hardware_accelerated != encoder_info_.is_hardware_accelerated) { + encoder_stats_observer_->OnEncoderImplementationChanged({ + .name = info.implementation_name, + .is_hardware_accelerated = info.is_hardware_accelerated, + }); if (bitrate_adjuster_) { // Encoder implementation changed, reset overshoot detector states. bitrate_adjuster_->Reset(); @@ -1978,9 +2013,10 @@ void VideoStreamEncoder::RequestRefreshFrame() { })); } -void VideoStreamEncoder::SendKeyFrame() { +void VideoStreamEncoder::SendKeyFrame( + const std::vector& layers) { if (!encoder_queue_.IsCurrent()) { - encoder_queue_.PostTask([this] { SendKeyFrame(); }); + encoder_queue_.PostTask([this, layers] { SendKeyFrame(layers); }); return; } RTC_DCHECK_RUN_ON(&encoder_queue_); @@ -1995,9 +2031,15 @@ void VideoStreamEncoder::SendKeyFrame() { return; // Shutting down, or not configured yet. } - // TODO(webrtc:10615): Map keyframe request to spatial layer. - std::fill(next_frame_types_.begin(), next_frame_types_.end(), - VideoFrameType::kVideoFrameKey); + if (!layers.empty()) { + RTC_DCHECK_EQ(layers.size(), next_frame_types_.size()); + for (size_t i = 0; i < layers.size() && i < next_frame_types_.size(); i++) { + next_frame_types_[i] = layers[i]; + } + } else { + std::fill(next_frame_types_.begin(), next_frame_types_.end(), + VideoFrameType::kVideoFrameKey); + } } void VideoStreamEncoder::OnLossNotification( diff --git a/video/video_stream_encoder.h b/video/video_stream_encoder.h index 9af2e0bcff..ccff3ffefd 100644 --- a/video/video_stream_encoder.h +++ b/video/video_stream_encoder.h @@ -17,8 +17,10 @@ #include #include +#include "absl/container/inlined_vector.h" #include "api/adaptation/resource.h" #include "api/field_trials_view.h" +#include "api/rtp_sender_interface.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/units/data_rate.h" @@ -106,12 +108,15 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, void ConfigureEncoder(VideoEncoderConfig config, size_t max_data_payload_length) override; + void ConfigureEncoder(VideoEncoderConfig config, + size_t max_data_payload_length, + SetParametersCallback callback) override; // Permanently stop encoding. After this method has returned, it is // guaranteed that no encoded frames will be delivered to the sink. void Stop() override; - void SendKeyFrame() override; + void SendKeyFrame(const std::vector& layers = {}) override; void OnLossNotification( const VideoEncoder::LossNotification& loss_notification) override; @@ -302,6 +307,8 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // Set when configuration must create a new encoder object, e.g., // because of a codec change. bool pending_encoder_creation_ RTC_GUARDED_BY(&encoder_queue_); + absl::InlinedVector encoder_configuration_callbacks_ + RTC_GUARDED_BY(&encoder_queue_); absl::optional last_frame_info_ RTC_GUARDED_BY(&encoder_queue_); diff --git a/video/video_stream_encoder_interface.h b/video/video_stream_encoder_interface.h index 38f180d121..25190aa474 100644 --- a/video/video_stream_encoder_interface.h +++ b/video/video_stream_encoder_interface.h @@ -15,7 +15,9 @@ #include "api/adaptation/resource.h" #include "api/fec_controller_override.h" +#include "api/rtc_error.h" #include "api/rtp_parameters.h" // For DegradationPreference. +#include "api/rtp_sender_interface.h" #include "api/scoped_refptr.h" #include "api/units/data_rate.h" #include "api/video/video_bitrate_allocator.h" @@ -97,8 +99,10 @@ class VideoStreamEncoderInterface { // resolution. Should be replaced by a construction time setting. virtual void SetStartBitrate(int start_bitrate_bps) = 0; - // Request a key frame. Used for signalling from the remote receiver. - virtual void SendKeyFrame() = 0; + // Request a key frame. Used for signalling from the remote receiver with + // no arguments and for RTCRtpSender.generateKeyFrame with a list of + // rids/layers. + virtual void SendKeyFrame(const std::vector& layers = {}) = 0; // Inform the encoder that a loss has occurred. virtual void OnLossNotification( @@ -129,6 +133,9 @@ class VideoStreamEncoderInterface { // packetization for H.264. virtual void ConfigureEncoder(VideoEncoderConfig config, size_t max_data_payload_length) = 0; + virtual void ConfigureEncoder(VideoEncoderConfig config, + size_t max_data_payload_length, + SetParametersCallback callback) = 0; // Permanently stop encoding. After this method has returned, it is // guaranteed that no encoded frames will be delivered to the sink. diff --git a/video/video_stream_encoder_observer.h b/video/video_stream_encoder_observer.h index 32d8408a85..c10412181d 100644 --- a/video/video_stream_encoder_observer.h +++ b/video/video_stream_encoder_observer.h @@ -14,11 +14,9 @@ #include #include -#include "absl/types/optional.h" #include "api/video/video_adaptation_counters.h" #include "api/video/video_adaptation_reason.h" #include "api/video/video_bitrate_allocation.h" -#include "api/video/video_codec_constants.h" #include "api/video_codecs/video_encoder.h" #include "video/config/video_encoder_config.h" @@ -29,6 +27,11 @@ namespace webrtc { // encoded data. So use some other type to represent that. class EncodedImage; +struct EncoderImplementation { + const std::string& name; + bool is_hardware_accelerated; +}; + // Broken out into a base class, with public inheritance below, only to ease // unit testing of the internal class OveruseFrameDetector. class CpuOveruseMetricsObserver { @@ -71,7 +74,7 @@ class VideoStreamEncoderObserver : public CpuOveruseMetricsObserver { const CodecSpecificInfo* codec_info) = 0; virtual void OnEncoderImplementationChanged( - const std::string& implementation_name) = 0; + EncoderImplementation implementation) = 0; virtual void OnFrameDropped(DropReason reason) = 0; diff --git a/video/video_stream_encoder_unittest.cc b/video/video_stream_encoder_unittest.cc index 5271654ac9..cdd4c75ab7 100644 --- a/video/video_stream_encoder_unittest.cc +++ b/video/video_stream_encoder_unittest.cc @@ -885,7 +885,7 @@ class VideoStreamEncoderTest : public ::testing::Test { &video_source_, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); video_stream_encoder_->SetStartBitrate(kTargetBitrate.bps()); video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config), - kMaxPayloadLength); + kMaxPayloadLength, nullptr); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); } @@ -1097,7 +1097,8 @@ class VideoStreamEncoderTest : public ::testing::Test { quality_scaling_ = b; } - void SetRequestedResolutionAlignment(int requested_resolution_alignment) { + void SetRequestedResolutionAlignment( + uint32_t requested_resolution_alignment) { MutexLock lock(&local_mutex_); requested_resolution_alignment_ = requested_resolution_alignment; } @@ -1331,7 +1332,7 @@ class VideoStreamEncoderTest : public ::testing::Test { int last_input_width_ RTC_GUARDED_BY(local_mutex_) = 0; int last_input_height_ RTC_GUARDED_BY(local_mutex_) = 0; bool quality_scaling_ RTC_GUARDED_BY(local_mutex_) = true; - int requested_resolution_alignment_ RTC_GUARDED_BY(local_mutex_) = 1; + uint32_t requested_resolution_alignment_ RTC_GUARDED_BY(local_mutex_) = 1; bool apply_alignment_to_all_simulcast_layers_ RTC_GUARDED_BY(local_mutex_) = false; bool is_hardware_accelerated_ RTC_GUARDED_BY(local_mutex_) = false; @@ -2472,7 +2473,7 @@ class ResolutionAlignmentTest scale_factors_(::testing::get<1>(GetParam())) {} protected: - const int requested_alignment_; + const uint32_t requested_alignment_; const std::vector scale_factors_; }; @@ -2538,8 +2539,8 @@ TEST_P(ResolutionAlignmentTest, SinkWantsAlignmentApplied) { EXPECT_EQ(codec.numberOfSimulcastStreams, num_streams); // Frame size should be a multiple of the requested alignment. for (int i = 0; i < codec.numberOfSimulcastStreams; ++i) { - EXPECT_EQ(codec.simulcastStream[i].width % requested_alignment_, 0); - EXPECT_EQ(codec.simulcastStream[i].height % requested_alignment_, 0); + EXPECT_EQ(codec.simulcastStream[i].width % requested_alignment_, 0u); + EXPECT_EQ(codec.simulcastStream[i].height % requested_alignment_, 0u); // Aspect ratio should match. EXPECT_EQ(codec.width * codec.simulcastStream[i].height, codec.height * codec.simulcastStream[i].width); @@ -8648,6 +8649,64 @@ TEST_F(VideoStreamEncoderTest, video_stream_encoder_->Stop(); } +TEST_F(VideoStreamEncoderTest, RecreatesEncoderWhenEnableVp9SpatialLayer) { + // Set up encoder to use VP9 SVC using two spatial layers. + fake_encoder_.SetTemporalLayersSupported(/*spatial_idx=*/0, true); + fake_encoder_.SetTemporalLayersSupported(/*spatial_idx*/ 1, true); + VideoEncoderConfig video_encoder_config; + test::FillEncoderConfiguration(VideoCodecType::kVideoCodecVP9, + /* num_streams*/ 1, &video_encoder_config); + video_encoder_config.max_bitrate_bps = 2 * kTargetBitrate.bps(); + video_encoder_config.content_type = + VideoEncoderConfig::ContentType::kRealtimeVideo; + VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); + vp9_settings.numberOfSpatialLayers = 2; + vp9_settings.numberOfTemporalLayers = 2; + vp9_settings.interLayerPred = InterLayerPredMode::kOn; + vp9_settings.automaticResizeOn = false; + video_encoder_config.encoder_specific_settings = + rtc::make_ref_counted( + vp9_settings); + video_encoder_config.spatial_layers = GetSvcConfig(1280, 720, + /*fps=*/30.0, + /*first_active_layer=*/0, + /*num_spatial_layers=*/2, + /*num_temporal_layers=*/3, + /*is_screenshare=*/false); + ConfigureEncoder(video_encoder_config.Copy(), + VideoStreamEncoder::BitrateAllocationCallbackType:: + kVideoLayersAllocation); + + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); + + video_source_.IncomingCapturedFrame(CreateFrame(CurrentTimeMs(), 1280, 720)); + WaitForEncodedFrame(CurrentTimeMs()); + EXPECT_EQ(fake_encoder_.GetNumInitializations(), 1); + + // Turn off the top spatial layer. This does not require an encoder reset. + video_encoder_config.spatial_layers[1].active = false; + video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(), + kMaxPayloadLength, nullptr); + + time_controller_.AdvanceTime(TimeDelta::Millis(33)); + video_source_.IncomingCapturedFrame(CreateFrame(CurrentTimeMs(), 1280, 720)); + WaitForEncodedFrame(CurrentTimeMs()); + EXPECT_EQ(fake_encoder_.GetNumInitializations(), 1); + + // Turn on the top spatial layer again, this does require an encoder reset. + video_encoder_config.spatial_layers[1].active = true; + video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(), + kMaxPayloadLength, nullptr); + + time_controller_.AdvanceTime(TimeDelta::Millis(33)); + video_source_.IncomingCapturedFrame(CreateFrame(CurrentTimeMs(), 1280, 720)); + WaitForEncodedFrame(CurrentTimeMs()); + EXPECT_EQ(fake_encoder_.GetNumInitializations(), 2); + + video_stream_encoder_->Stop(); +} + #endif // !defined(WEBRTC_IOS) // Test parameters: (VideoCodecType codec, bool allow_i420_conversion) diff --git a/webrtc.gni b/webrtc.gni index f3570ea7ee..c4467fefec 100644 --- a/webrtc.gni +++ b/webrtc.gni @@ -232,6 +232,11 @@ declare_args() { # RingRTC change to disable until used # Includes the dav1d decoder in the internal decoder factory when set to true. rtc_include_dav1d_in_internal_decoder_factory = false + + # When set to true, a run-time check will make sure that all field trial keys + # have been registered in accordance with the field trial policy. The check + # will only run with builds that have RTC_DCHECKs enabled. + rtc_strict_field_trials = false } if (!build_with_mozilla) {