Merge branch 'm116' into 5845

This commit is contained in:
inaqui-signal 2023-07-24 17:05:03 -05:00 committed by Iñaqui
commit c570368abc
1303 changed files with 41301 additions and 30581 deletions

View file

@ -22,3 +22,13 @@ b6760f9e4442410f2bcb6090b3b89bf709e2fce2
665174fdbb4e0540eccb27cf7412348f1b65534c
# Format almost all C++ in WebRTC. Again. Mostly #include reordering.
a4d873786f10eedd72de25ad0d94ad7c53c1f68a
# Format /modules
c018bae80743596694db91478b879815f19dfb74
# Format /sdk
6f86f6af008176e631140e6a80e0a0bca9550143
# Format /rtc_base
802e8e5fdbac5af0e264d7fbe32b16643ad718ae
# Format ^(api|call|common_audio|examples|media|net|p2p|pc)/
bceec84aeedcb9f5992d30dae0b477fb5e227fee
# Format the rest
7220ee97aafaa33a914f578e0f04184a94d17974

4
.gn
View file

@ -55,7 +55,7 @@ default_args = {
ios_deployment_target = "12.0"
# The SDK API level, in contrast, is set by build/android/AndroidManifest.xml.
android32_ndk_api_level = 16
android32_ndk_api_level = 21
android64_ndk_api_level = 21
# WebRTC does not provide the gflags dependency. Because libyuv uses it only
@ -79,4 +79,6 @@ default_args = {
fuchsia_sdk_readelf_exec =
"//third_party/llvm-build/Release+Asserts/bin/llvm-readelf"
fuchsia_target_api_level = 9
use_cxx17 = true
}

View file

@ -83,6 +83,33 @@ wheel: <
version: "version:3.20.0"
>
wheel: <
name: "infra/python/wheels/requests-py2_py3"
version: "version:2.13.0"
name: "infra/python/wheels/requests-py3"
version: "version:2.31.0"
>
wheel: <
name: "infra/python/wheels/idna-py2_py3"
version: "version:2.8"
>
wheel: <
name: "infra/python/wheels/urllib3-py2_py3"
version: "version:1.26.6"
>
wheel: <
name: "infra/python/wheels/certifi-py2_py3"
version: "version:2020.11.8"
>
wheel: <
name: "infra/python/wheels/charset_normalizer-py3"
version: "version:2.0.4"
>
wheel: <
name: "infra/python/wheels/brotli/${vpython_platform}"
version: "version:1.0.9"
>
# Used by:
# tools_webrtc/sslroots
wheel: <
name: "infra/python/wheels/asn1crypto-py2_py3"
version: "version:1.0.1"
>

View file

@ -105,6 +105,7 @@ Robert Bares <robert@bares.me>
Robert Mader <robert.mader@posteo.de>
Robert Nagy <robert.nagy@gmail.com>
Ryan Yoakum <ryoakum@skobalt.com>
Samuel Attard <samuel.r.attard@gmail.com>
Sarah Thompson <sarah@telergy.com>
Satender Saroha <ssaroha@yahoo.com>
Saul Kravitz <Saul.Kravitz@celera.com>
@ -133,6 +134,7 @@ Yura Yaroshevich <yura.yaroshevich@gmail.com>
Yuriy Pavlyshak <yuriy@appear.in>
Yusuke Suzuki <utatane.tea@gmail.com>
Pengfei Han <hanpfei@gmail.com>
Yingying Ma <yingying.ma@intel.com>
# END individuals section.
# BEGIN organizations section.
@ -153,6 +155,7 @@ HyperConnect Inc. <*@hpcnt.com>
Intel Corporation <*@intel.com>
LG Electronics, Inc. <*@lge.com>
Life On Air Inc. <*@lifeonair.com>
LiveKit <*@livekit.io>
Meta Platforms, Inc. <*@meta.com>
Microsoft Corporation <*@microsoft.com>
MIPS Technologies <*@mips.com>

View file

@ -23,7 +23,6 @@
import("//build/config/linux/pkg_config.gni")
import("//build/config/sanitizers/sanitizers.gni")
import("//third_party/google_benchmark/buildconfig.gni")
import("webrtc.gni")
if (rtc_enable_protobuf) {
import("//third_party/protobuf/proto_library.gni")
@ -63,7 +62,9 @@ if (!build_with_chromium) {
"modules/remote_bitrate_estimator:rtp_to_text",
"modules/rtp_rtcp:test_packet_masks_metrics",
"modules/video_capture:video_capture_internal_impl",
"modules/video_coding:video_codec_perf_tests",
"net/dcsctp:dcsctp_unittests",
"pc:peer_connection_mediachannel_split_unittests",
"pc:peerconnection_unittests",
"pc:rtc_pc_unittests",
"pc:slow_peer_connection_unittests",
@ -101,7 +102,6 @@ if (!build_with_chromium) {
}
if (rtc_enable_protobuf) {
deps += [
"audio:low_bandwidth_audio_perf_test",
"logging:rtc_event_log_rtp_dump",
"tools_webrtc/perf:webrtc_dashboard_upload",
]
@ -109,9 +109,6 @@ if (!build_with_chromium) {
if ((is_linux || is_chromeos) && rtc_use_pipewire) {
deps += [ "modules/desktop_capture:shared_screencast_stream_test" ]
}
if (is_fuchsia) {
deps += [ ":fuchsia_perf_tests" ]
}
}
if (target_os == "android") {
deps += [ "tools_webrtc:binary_version_check" ]
@ -276,10 +273,16 @@ config("common_config") {
defines += [ "WEBRTC_ENABLE_PROTOBUF=0" ]
}
if (rtc_strict_field_trials) {
defines += [ "WEBRTC_STRICT_FIELD_TRIALS=1" ]
} else {
if (rtc_strict_field_trials == "") {
defines += [ "WEBRTC_STRICT_FIELD_TRIALS=0" ]
} else if (rtc_strict_field_trials == "dcheck") {
defines += [ "WEBRTC_STRICT_FIELD_TRIALS=1" ]
} else if (rtc_strict_field_trials == "warn") {
defines += [ "WEBRTC_STRICT_FIELD_TRIALS=2" ]
} else {
assert(false,
"Unsupported value for rtc_strict_field_trials: " +
"$rtc_strict_field_trials")
}
if (rtc_include_internal_audio_device) {
@ -478,6 +481,7 @@ if (!build_with_chromium) {
suppressed_configs += [ "//build/config/compiler:thin_archive" ]
defines = []
# RingRTC change to exclude av1 and h264 factories
deps = [
"api:create_peerconnection_factory",
"api:libjingle_peerconnection_api",
@ -488,6 +492,16 @@ if (!build_with_chromium) {
"api/task_queue",
"api/task_queue:default_task_queue_factory",
"api/test/metrics",
"api/video_codecs:video_decoder_factory_template",
# "api/video_codecs:video_decoder_factory_template_dav1d_adapter",
"api/video_codecs:video_decoder_factory_template_libvpx_vp8_adapter",
"api/video_codecs:video_decoder_factory_template_libvpx_vp9_adapter",
# "api/video_codecs:video_decoder_factory_template_open_h264_adapter",
"api/video_codecs:video_encoder_factory_template",
# "api/video_codecs:video_encoder_factory_template_libaom_av1_adapter",
"api/video_codecs:video_encoder_factory_template_libvpx_vp8_adapter",
"api/video_codecs:video_encoder_factory_template_libvpx_vp9_adapter",
# "api/video_codecs:video_encoder_factory_template_open_h264_adapter",
"audio",
"call",
"common_audio",
@ -516,13 +530,6 @@ if (!build_with_chromium) {
]
}
if (rtc_include_builtin_video_codecs) {
deps += [
"api/video_codecs:builtin_video_decoder_factory",
"api/video_codecs:builtin_video_encoder_factory",
]
}
if (build_with_mozilla) {
deps += [
"api/video:video_frame",
@ -627,7 +634,7 @@ if (rtc_include_tests && !build_with_chromium) {
}
}
if (enable_google_benchmarks) {
if (rtc_enable_google_benchmarks) {
rtc_test("benchmarks") {
testonly = true
deps = [
@ -730,24 +737,6 @@ if (rtc_include_tests && !build_with_chromium) {
}
}
if (is_fuchsia) {
rtc_test("fuchsia_perf_tests") {
testonly = true
deps = [
#TODO(fxbug.dev/115601) - Enable when fixed
"call:call_perf_tests",
"modules/audio_coding:audio_coding_perf_tests",
"modules/audio_processing:audio_processing_perf_tests",
"pc:peerconnection_perf_tests",
"test:test_main",
"video:video_full_stack_tests",
"video:video_pc_full_stack_tests",
]
data = webrtc_perf_tests_resources
}
}
rtc_test("webrtc_nonparallel_tests") {
testonly = true
deps = [ "rtc_base:rtc_base_nonparallel_tests" ]

284
DEPS
View file

@ -10,7 +10,7 @@ vars = {
# chromium waterfalls. More info at: crbug.com/570091.
'checkout_configuration': 'default',
'checkout_instrumented_libraries': 'checkout_linux and checkout_configuration == "default"',
'chromium_revision': 'd76cada1245a655255d71c6166dc5417f2756bd5',
'chromium_revision': '8603a0cee25d1cc4d701ef742b3df7c775440524',
# Fetch the prebuilt binaries for llvm-cov and llvm-profdata. Needed to
# process the raw profiles produced by instrumented targets (built with
@ -24,8 +24,8 @@ vars = {
'resultdb_version': 'git_revision:ebc74d10fa0d64057daa6f128e89f3672eeeec95',
# By default, download the fuchsia sdk from the public sdk directory.
'fuchsia_sdk_cipd_prefix': 'fuchsia/sdk/gn/',
'fuchsia_version': 'version:11.20230221.2.1',
'fuchsia_sdk_cipd_prefix': 'fuchsia/sdk/core/',
'fuchsia_version': 'version:13.20230615.1.1',
# By default, download the fuchsia images from the fuchsia GCS bucket.
'fuchsia_images_bucket': 'fuchsia',
'checkout_fuchsia': False,
@ -36,7 +36,7 @@ vars = {
'checkout_fuchsia_product_bundles': '"{checkout_fuchsia_boot_images}" != ""',
# reclient CIPD package version
'reclient_version': 're_client_version:0.96.2.d36a87c-gomaip',
'reclient_version': 're_client_version:0.108.0.7cdbbe9-gomaip',
# ninja CIPD package version
# https://chrome-infra-packages.appspot.com/p/infra/3pp/tools/ninja
@ -46,34 +46,34 @@ vars = {
deps = {
# RingRTC change to use a fork of opus
'src/ringrtc/opus/src':
'https://github.com/signalapp/opus.git@74d8597f47aa680c9f9e21ab0b99c8c0632fe27d',
'https://github.com/signalapp/opus.git@593419e833acab4d15b4901fe156177fb7315468',
# TODO(kjellander): Move this to be Android-only.
'src/base':
'https://chromium.googlesource.com/chromium/src/base@0fc91d5d9071a1619d481fbc6a166fcd9a4ac0a9',
'https://chromium.googlesource.com/chromium/src/base@ca4474373784d15364b5d190970e5bdfa1544c2a',
'src/build':
'https://chromium.googlesource.com/chromium/src/build@07283ef796b29a02df30fbdb498b978f31d00410',
'https://chromium.googlesource.com/chromium/src/build@6c0e0e0c84aa581f9bfa042e511dc9aaffa8fd82',
'src/buildtools':
'https://chromium.googlesource.com/chromium/src/buildtools@d110f6238fee0e4c82ab64606e5e967a3c809c55',
'https://chromium.googlesource.com/chromium/src/buildtools@3739a3619309af3b788379ad0936ca00b981616e',
# Gradle 6.6.1. Used for testing Android Studio project generation for WebRTC.
'src/examples/androidtests/third_party/gradle': {
'url': 'https://chromium.googlesource.com/external/github.com/gradle/gradle.git@f2d1fb54a951d8b11d25748e4711bec8d128d7e3',
'condition': 'checkout_android',
},
'src/ios': {
'url': 'https://chromium.googlesource.com/chromium/src/ios@143d6295ffa1eb7446d5e3ee236bc2c67ecf1dc0',
'url': 'https://chromium.googlesource.com/chromium/src/ios@0df9bead2936138bd3853fdf826b29470cfa517e',
'condition': 'checkout_ios',
},
'src/testing':
'https://chromium.googlesource.com/chromium/src/testing@b93e19cb405e9c39c414470ce1bca7e17aaae347',
'https://chromium.googlesource.com/chromium/src/testing@f3b8f1d8c1d7ca49f9a77b8e669c357572f4447c',
'src/third_party':
'https://chromium.googlesource.com/chromium/src/third_party@82835c7ebd240b08997ee36a42eae06b9e386b95',
'https://chromium.googlesource.com/chromium/src/third_party@770155421d251b9541301084d0db46812540c251',
'src/buildtools/linux64': {
'packages': [
{
'package': 'gn/gn/linux-${{arch}}',
'version': 'git_revision:b25a2f8c2d33f02082f0f258350f5e22c0973108',
'version': 'git_revision:4bd1a77e67958fb7f6739bd4542641646f264e5d',
}
],
'dep_type': 'cipd',
@ -83,7 +83,7 @@ deps = {
'packages': [
{
'package': 'gn/gn/mac-${{arch}}',
'version': 'git_revision:b25a2f8c2d33f02082f0f258350f5e22c0973108',
'version': 'git_revision:4bd1a77e67958fb7f6739bd4542641646f264e5d',
}
],
'dep_type': 'cipd',
@ -93,7 +93,7 @@ deps = {
'packages': [
{
'package': 'gn/gn/windows-amd64',
'version': 'git_revision:b25a2f8c2d33f02082f0f258350f5e22c0973108',
'version': 'git_revision:4bd1a77e67958fb7f6739bd4542641646f264e5d',
}
],
'dep_type': 'cipd',
@ -112,14 +112,14 @@ deps = {
'condition': 'not (host_os == "linux" and host_cpu == "arm64")',
},
'src/buildtools/clang_format/script':
'src/third_party/clang-format/script':
'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/clang/tools/clang-format.git@f97059df7f8b205064625cdb5f97b56668a125ef',
'src/buildtools/third_party/libc++/trunk':
'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxx.git@abad4bb576d98d81c2d5861c3862cc87145f7dad',
'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxx.git@055b2e17ae4f0e2c025ad0c7508b01787df17758',
'src/buildtools/third_party/libc++abi/trunk':
'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxxabi.git@5559ffe8ca32471e894f4cb04b7d18d28862677f',
'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxxabi.git@c2a35d1b2cf4b6ca85f5235c76ad9b1aff97e801',
'src/buildtools/third_party/libunwind/trunk':
'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git@aecf8d01b908e9dbfe6741da2fe7425879df3537',
'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git@b5a43ecdac82a248f8a700a68c722b4d98708377',
'src/third_party/ninja': {
'packages': [
@ -155,7 +155,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_build_tools/aapt2',
'version': '36NqCian2RIwuM6SFfizdUgKoXyZhy3q6pFfsws0szYC',
'version': 'STY0BXlZxsEhudnlXQFed-B5UpwehcoM0sYqor6qRqsC',
},
],
'condition': 'checkout_android',
@ -166,7 +166,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_build_tools/bundletool',
'version': 'TpDdbF-PPgwL0iOVsdLM07L-DUp2DV3hgzCMmPd2_GUC',
'version': 'LbB0aRQ3VKjRJZmc_PD0VTZ1O34_zD92rh71aOEOEcEC',
},
],
'condition': 'checkout_android',
@ -174,38 +174,42 @@ deps = {
},
'src/third_party/boringssl/src':
'https://boringssl.googlesource.com/boringssl.git@ca1690e221677cea3fb946f324eb89d846ec53f2',
'https://boringssl.googlesource.com/boringssl.git@ae88f198a49d77993e9c44b017d0e69c810dc668',
'src/third_party/breakpad/breakpad':
'https://chromium.googlesource.com/breakpad/breakpad.git@abb105db21e962eda5b7d9b7a0ac8dd701e0b987',
'https://chromium.googlesource.com/breakpad/breakpad.git@8988364bcddd9b194b0bf931c10bc125987330ed',
'src/third_party/catapult':
'https://chromium.googlesource.com/catapult.git@a22c2597b5bfa481324838866945c3ed7394ac47',
'https://chromium.googlesource.com/catapult.git@89fad9023d62d7031789a904b2aa6bd1d4d0a3e2',
'src/third_party/ced/src': {
'url': 'https://chromium.googlesource.com/external/github.com/google/compact_enc_det.git@ba412eaaacd3186085babcd901679a48863c7dd5',
},
'src/third_party/colorama/src':
'https://chromium.googlesource.com/external/colorama.git@799604a1041e9b3bc5d2789ecbd7e8db2e18e6b8',
'https://chromium.googlesource.com/external/colorama.git@3de9f013df4b470069d03d250224062e8cf15c49',
'src/third_party/cpu_features/src': {
'url': 'https://chromium.googlesource.com/external/github.com/google/cpu_features.git@936b9ab5515dead115606559502e3864958f7f6e',
'condition': 'checkout_android',
},
'src/third_party/crc32c/src':
'https://chromium.googlesource.com/external/github.com/google/crc32c.git@fa5ade41ee480003d9c5af6f43567ba22e4e17e6',
'src/third_party/depot_tools':
'https://chromium.googlesource.com/chromium/tools/depot_tools.git@30136f0cb715c04b8e3fb7aec59338f1563a76ae',
'https://chromium.googlesource.com/chromium/tools/depot_tools.git@3ffad8166e1c233624dcac4e5a12a59944f1231a',
'src/third_party/ffmpeg':
'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@ee0c52d52036ecadfd38affec86c04937480bedb',
'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@881c5c3f6412020c37e97e178e0f5da9ddd2ae90',
'src/third_party/flatbuffers/src':
'https://chromium.googlesource.com/external/github.com/google/flatbuffers.git@a56f9ec50e908362e20254fcef28e62a2f148d91',
'https://chromium.googlesource.com/external/github.com/google/flatbuffers.git@13fc75cb6b7b44793f3f5b4ba025ff403d012c9f',
'src/third_party/grpc/src': {
'url': 'https://chromium.googlesource.com/external/github.com/grpc/grpc.git@a017e9b7f20743c69627b94d7d101e4e6baadb44',
'url': 'https://chromium.googlesource.com/external/github.com/grpc/grpc.git@822dab21d9995c5cf942476b35ca12a1aa9d2737',
},
# Used for embedded builds. CrOS & Linux use the system version.
'src/third_party/fontconfig/src': {
'url': 'https://chromium.googlesource.com/external/fontconfig.git@2ef790a0dbbab24235d1b8c0325ab4414de5f0a9',
'url': 'https://chromium.googlesource.com/external/fontconfig.git@06929a556fdc39c8fe12965b69070c8df520a33e',
'condition': 'checkout_linux',
},
'src/third_party/freetype/src':
'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@7f9499044e3baa901de99251a007aa66e750b26c',
'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@5c00a46805d6423fc45b4ba2c0f2e22dd0450d73',
'src/third_party/harfbuzz-ng/src':
'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@2822b589bc837fae6f66233e2cf2eef0f6ce8470',
'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@db700b5670d9475cc8ed4880cc9447b232c5e432',
'src/third_party/google_benchmark/src': {
'url': 'https://chromium.googlesource.com/external/github.com/google/benchmark.git@f730846b0a3c0dc0699978846fb14ffb2fad0bdc',
'url': 'https://chromium.googlesource.com/external/github.com/google/benchmark.git@b177433f3ee2513b1075140c723d73ab8901790f',
},
# WebRTC-only dependency (not present in Chromium).
'src/third_party/gtest-parallel':
@ -223,13 +227,13 @@ deps = {
'src/third_party/googletest/src':
'https://chromium.googlesource.com/external/github.com/google/googletest.git@af29db7ec28d6df1c7f0f745186884091e602e07',
'src/third_party/icu': {
'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@c6b68522318204f795a8f04caebf6c0beb679cc4',
'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@e8c3bc9ea97d4423ad0515e5f1c064f486dae8b1',
},
'src/third_party/jdk': {
'packages': [
{
'package': 'chromium/third_party/jdk',
'version': '-FR8HTNcMfxy7J2HUaWVa0QmEE4f68iotzvFbqOj2LEC',
'version': '2Of9Pe_OdO4xoAATuiLDiMVNebKTNO3WrwJGqil4RosC',
},
],
'condition': 'host_os == "linux" and checkout_android',
@ -258,7 +262,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/kotlin_stdlib',
'version': 'Mg7371mEUwDQH4_z29HdWqYWVlXN6t2dXX0kIutg_SwC',
'version': 'z4_AYYz2Tw5GKikuiDLTuxxf0NJVGLkC3CVcyiIpc-gC',
},
],
'condition': 'checkout_android',
@ -269,7 +273,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/kotlinc',
'version': 'bCZedwoM-hb1pP1QKzA3P5aR4zjZltqLj4JQpmQsHuUC',
'version': 'QSwE30iq_KLKxImEnQEwDMQM_cU10eZSAwNobs8BEsoC',
},
],
'condition': 'checkout_android',
@ -277,25 +281,25 @@ deps = {
},
# Used for building libFuzzers (only supports Linux).
'src/third_party/libFuzzer/src':
'https://chromium.googlesource.com/chromium/llvm-project/compiler-rt/lib/fuzzer.git@debe7d2d1982e540fbd6bd78604bf001753f9e74',
'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/compiler-rt/lib/fuzzer.git@26cc39e59b2bf5cbc20486296248a842c536878d',
'src/third_party/libjpeg_turbo':
'https://chromium.googlesource.com/chromium/deps/libjpeg_turbo.git@0b6e6a152242c2fa30ffb29633d1b7d7b229b46b',
'https://chromium.googlesource.com/chromium/deps/libjpeg_turbo.git@aa4075f116e4312537d0d3e9dbd5e31096539f94',
'src/third_party/libsrtp':
'https://chromium.googlesource.com/chromium/deps/libsrtp.git@5b7c744eb8310250ccc534f3f86a2015b3887a0a',
'src/third_party/dav1d/libdav1d':
'https://chromium.googlesource.com/external/github.com/videolan/dav1d.git@ed63a7459376a21e88b871006574dc2055a2ea35',
'https://chromium.googlesource.com/external/github.com/videolan/dav1d.git@f8ae94eca0f53502a2cddd29a263c1edea4822a0',
'src/third_party/libaom/source/libaom':
'https://aomedia.googlesource.com/aom.git@70b12695e1967d9589dd15b345a039e575e8f429',
'https://aomedia.googlesource.com/aom.git@233000f66e9ff0bb09226a2f222a029bb4c89de6',
'src/third_party/libunwindstack': {
'url': 'https://chromium.googlesource.com/chromium/src/third_party/libunwindstack.git@4dbfa0e8c844c8e243b297bc185e54a99ff94f9e',
'condition': 'checkout_android',
},
'src/third_party/perfetto':
'https://android.googlesource.com/platform/external/perfetto.git@55985e77ff4f3e023d321c7f7236e8cfe098e545',
'https://android.googlesource.com/platform/external/perfetto.git@0ba4c2cd12264c4d33787fb700b93c67ee9fbc11',
'src/third_party/libvpx/source/libvpx':
'https://chromium.googlesource.com/webm/libvpx.git@db69ce6aea278bee88668fd9cc2af2e544516fdb',
'https://chromium.googlesource.com/webm/libvpx.git@2245df50a6d360d33fccd51479c48f2210ed607a',
'src/third_party/libyuv':
'https://chromium.googlesource.com/libyuv/libyuv.git@2bdc210be9eb11ded16bf3ef1f6cadb0d4dcb0c2',
'https://chromium.googlesource.com/libyuv/libyuv.git@552571e8b24b2619c39ec176e6cb8e75d3e7fdd3',
'src/third_party/lss': {
'url': 'https://chromium.googlesource.com/linux-syscall-support.git@ce877209e11aa69dcfffbd53ef90ea1d07136521',
'condition': 'checkout_android or checkout_linux',
@ -311,12 +315,12 @@ deps = {
},
'src/third_party/openh264/src':
'https://chromium.googlesource.com/external/github.com/cisco/openh264@db956674bbdfbaab5acdd3fdb4117c2fef5527e9',
'https://chromium.googlesource.com/external/github.com/cisco/openh264@09a4f3ec842a8932341b195c5b01e141c8a16eb7',
'src/third_party/r8': {
'packages': [
{
'package': 'chromium/third_party/r8',
'version': 'PwglNZFRNPkBBXdnY9NfrZFk2ULWDTRxhV9rl2kvkpUC',
'version': 'vw5kLlW3-suSlCKSO9OQpFWpR8oDnvQ8k1RgKNUapQYC',
},
],
'condition': 'checkout_android',
@ -339,9 +343,8 @@ deps = {
'url': 'https://chromium.googlesource.com/external/github.com/kennethreitz/requests.git@refs/tags/v2.23.0',
'condition': 'checkout_android',
},
# RingRTC change to update tools to fix https://bugs.chromium.org/p/chromium/issues/detail?id=1395849
'src/tools':
'https://chromium.googlesource.com/chromium/src/tools@a6b7d5e7a1f60c80a4c62576ebadff507c5c84f8',
'https://chromium.googlesource.com/chromium/src/tools@eb2e55cf816468d0b8899ce5d8429f7eb8c42f01',
'src/third_party/accessibility_test_framework': {
'packages': [
@ -354,17 +357,6 @@ deps = {
'dep_type': 'cipd',
},
'src/third_party/android_support_test_runner': {
'packages': [
{
'package': 'chromium/third_party/android_support_test_runner',
'version': '96d4bf848cd210fdcbca6bcc8c1b4b39cbd93141',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
'src/third_party/byte_buddy': {
'packages': [
{
@ -391,7 +383,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/espresso',
'version': 'y8fIfH8Leo2cPm7iGCYnBxZpwOlgLv8rm2mlcmJlvGsC',
'version': '5LoBT0j383h_4dXbnap7gnNQMtMjpbMJD1JaGIYNj-IC',
},
],
'condition': 'checkout_android',
@ -409,16 +401,22 @@ deps = {
'dep_type': 'cipd',
},
'src/third_party/android_ndk': {
'url': 'https://chromium.googlesource.com/android_ndk.git@8388a2be5421311dc75c5f937aae13d821a27f3d',
'condition': 'checkout_android',
'src/third_party/android_toolchain': {
'packages': [
{
'package': 'chromium/third_party/android_toolchain/android_toolchain',
'version': 'version:2@r25c.cr1',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
'src/third_party/androidx': {
'packages': [
{
'package': 'chromium/third_party/androidx',
'version': 'waVlDuvM1_o0siiUN39LBaTpj0pfqoBRglXQgdicJicC',
'version': 'MqkmMx1Ct4Fk2Vb_FY05yLzXxVnH9evr2OqP6tpU9MEC',
},
],
'condition': 'checkout_android',
@ -429,7 +427,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_build_tools/manifest_merger',
'version': 'EbRaK62t9grqlZqL-JTd_zwM4t1u9fm1x4c2rLE0cqQC',
'version': 'UNXioFXYvz7k7UmE2WYAaXuYIK3Ky0aSQ0IuDEdS9soC',
},
],
'condition': 'checkout_android',
@ -468,7 +466,7 @@ deps = {
},
{
'package': 'chromium/third_party/android_sdk/public/cmdline-tools',
'version': '3Yn5Sn7BMObm8gsoZCF0loJMKg9_PpgU07G9DObCLdQC',
'version': 'EWnL2r7oV5GtE9Ef7GyohyFam42wtMtEKYU4dCb3U1YC',
},
],
'condition': 'checkout_android',
@ -523,7 +521,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/turbine',
'version': 'YQC-btuan_DTe9V9dv_e1LxgYSWeOoDfrd-VSqzIvHkC',
'version': 'G8Cku1fztaDd9to_8gk3PNWM2iRacAxD9zcUAgUPUEAC',
},
],
'condition': 'checkout_android',
@ -534,11 +532,11 @@ deps = {
'packages': [
{
'package': 'infra/tools/luci/isolate/${{platform}}',
'version': 'git_revision:c543f47ae455dbfe7e8fed5baa61a14d9068e98c',
'version': 'git_revision:39f255d5875293d3e1d978888b819ac124a8b0cc',
},
{
'package': 'infra/tools/luci/swarming/${{platform}}',
'version': 'git_revision:c543f47ae455dbfe7e8fed5baa61a14d9068e98c',
'version': 'git_revision:39f255d5875293d3e1d978888b819ac124a8b0cc',
},
],
'dep_type': 'cipd',
@ -1167,17 +1165,6 @@ deps = {
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/com_google_android_gms_play_services_fido': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_fido',
'version': 'version:2@19.0.0-beta.cr1',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/com_google_android_gms_play_services_flags': {
'packages': [
{
@ -1369,7 +1356,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_google_auto_value_auto_value_annotations',
'version': 'version:2@1.9.cr1',
'version': 'version:2@1.10.1.cr1',
},
],
'condition': 'checkout_android',
@ -1457,7 +1444,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_annotations',
'version': 'version:2@2.14.0.cr1',
'version': 'version:2@2.18.0.cr1',
},
],
'condition': 'checkout_android',
@ -1666,7 +1653,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_google_guava_guava',
'version': 'version:2@31.0.1-jre.cr1',
'version': 'version:2@31.1-jre.cr1',
},
],
'condition': 'checkout_android',
@ -1761,6 +1748,28 @@ deps = {
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/com_squareup_okio_okio_jvm': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_squareup_okio_okio_jvm',
'version': 'version:2@3.0.0.cr1',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/com_squareup_wire_wire_runtime_jvm': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_squareup_wire_wire_runtime_jvm',
'version': 'version:2@4.5.1.cr1',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/io_github_java_diff_utils_java_diff_utils': {
'packages': [
{
@ -1886,7 +1895,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/net_bytebuddy_byte_buddy',
'version': 'version:2@1.12.22.cr1',
'version': 'version:2@1.14.4.cr1',
},
],
'condition': 'checkout_android',
@ -1897,7 +1906,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/net_bytebuddy_byte_buddy_agent',
'version': 'version:2@1.12.22.cr1',
'version': 'version:2@1.14.4.cr1',
},
],
'condition': 'checkout_android',
@ -1915,22 +1924,11 @@ deps = {
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/net_sf_kxml_kxml2': {
'src/third_party/android_deps/libs/org_bouncycastle_bcprov_jdk18on': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/net_sf_kxml_kxml2',
'version': 'version:2@2.3.0.cr1',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/org_bouncycastle_bcprov_jdk15on': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_bouncycastle_bcprov_jdk15on',
'version': 'version:2@1.68.cr1',
'package': 'chromium/third_party/android_deps/libs/org_bouncycastle_bcprov_jdk18on',
'version': 'version:2@1.72.cr1',
},
],
'condition': 'checkout_android',
@ -2040,7 +2038,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlin_kotlin_stdlib_jdk7',
'version': 'version:2@1.8.0.cr1',
'version': 'version:2@1.8.20.cr1',
},
],
'condition': 'checkout_android',
@ -2051,7 +2049,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlin_kotlin_stdlib_jdk8',
'version': 'version:2@1.8.0.cr1',
'version': 'version:2@1.8.20.cr1',
},
],
'condition': 'checkout_android',
@ -2062,7 +2060,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_coroutines_android',
'version': 'version:2@1.6.1.cr1',
'version': 'version:2@1.6.4.cr1',
},
],
'condition': 'checkout_android',
@ -2073,7 +2071,18 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_coroutines_core_jvm',
'version': 'version:2@1.6.1.cr1',
'version': 'version:2@1.6.4.cr1',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_coroutines_guava': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_coroutines_guava',
'version': 'version:2@1.6.4.cr1',
},
],
'condition': 'checkout_android',
@ -2102,11 +2111,33 @@ deps = {
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/org_mockito_mockito_android': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_mockito_mockito_android',
'version': 'version:2@5.3.1.cr1',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/org_mockito_mockito_core': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_mockito_mockito_core',
'version': 'version:2@5.1.1.cr1',
'version': 'version:2@5.3.1.cr1',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/org_mockito_mockito_subclass': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_mockito_mockito_subclass',
'version': 'version:2@5.3.1.cr1',
},
],
'condition': 'checkout_android',
@ -2128,7 +2159,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_ow2_asm_asm',
'version': 'version:2@9.2.cr1',
'version': 'version:2@9.5.cr1',
},
],
'condition': 'checkout_android',
@ -2139,7 +2170,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_ow2_asm_asm_analysis',
'version': 'version:2@9.2.cr1',
'version': 'version:2@9.5.cr1',
},
],
'condition': 'checkout_android',
@ -2150,7 +2181,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_ow2_asm_asm_commons',
'version': 'version:2@9.2.cr1',
'version': 'version:2@9.5.cr1',
},
],
'condition': 'checkout_android',
@ -2161,7 +2192,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_ow2_asm_asm_tree',
'version': 'version:2@9.2.cr1',
'version': 'version:2@9.5.cr1',
},
],
'condition': 'checkout_android',
@ -2172,7 +2203,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_ow2_asm_asm_util',
'version': 'version:2@9.2.cr1',
'version': 'version:2@9.5.cr1',
},
],
'condition': 'checkout_android',
@ -2194,7 +2225,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_robolectric_annotations',
'version': 'version:2@4.8.1.cr1',
'version': 'version:2@4.10.3.cr1',
},
],
'condition': 'checkout_android',
@ -2205,7 +2236,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_robolectric_junit',
'version': 'version:2@4.8.1.cr1',
'version': 'version:2@4.10.3.cr1',
},
],
'condition': 'checkout_android',
@ -2216,7 +2247,18 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_robolectric_nativeruntime',
'version': 'version:2@4.8.1.cr1',
'version': 'version:2@4.10.3.cr1',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/org_robolectric_nativeruntime_dist_compat': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_robolectric_nativeruntime_dist_compat',
'version': 'version:2@1.0.1.cr1',
},
],
'condition': 'checkout_android',
@ -2227,7 +2269,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_robolectric_pluginapi',
'version': 'version:2@4.8.1.cr1',
'version': 'version:2@4.10.3.cr1',
},
],
'condition': 'checkout_android',
@ -2238,7 +2280,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_robolectric_plugins_maven_dependency_resolver',
'version': 'version:2@4.8.1.cr1',
'version': 'version:2@4.10.3.cr1',
},
],
'condition': 'checkout_android',
@ -2249,7 +2291,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_robolectric_resources',
'version': 'version:2@4.8.1.cr1',
'version': 'version:2@4.10.3.cr1',
},
],
'condition': 'checkout_android',
@ -2260,7 +2302,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_robolectric_robolectric',
'version': 'version:2@4.8.1.cr1',
'version': 'version:2@4.10.3.cr1',
},
],
'condition': 'checkout_android',
@ -2271,7 +2313,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_robolectric_sandbox',
'version': 'version:2@4.8.1.cr1',
'version': 'version:2@4.10.3.cr1',
},
],
'condition': 'checkout_android',
@ -2282,7 +2324,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadowapi',
'version': 'version:2@4.8.1.cr1',
'version': 'version:2@4.10.3.cr1',
},
],
'condition': 'checkout_android',
@ -2293,7 +2335,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadows_framework',
'version': 'version:2@4.8.1.cr1',
'version': 'version:2@4.10.3.cr1',
},
],
'condition': 'checkout_android',
@ -2304,7 +2346,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadows_playservices',
'version': 'version:2@4.8.1.cr1',
'version': 'version:2@4.10.3.cr1',
},
],
'condition': 'checkout_android',
@ -2315,7 +2357,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_robolectric_utils',
'version': 'version:2@4.8.1.cr1',
'version': 'version:2@4.10.3.cr1',
},
],
'condition': 'checkout_android',
@ -2326,7 +2368,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_robolectric_utils_reflector',
'version': 'version:2@4.8.1.cr1',
'version': 'version:2@4.10.3.cr1',
},
],
'condition': 'checkout_android',

View file

@ -828,7 +828,6 @@ def RunPythonTests(input_api, output_api):
test_directories = [
input_api.PresubmitLocalPath(),
Join('rtc_tools', 'py_event_log_analyzer'),
Join('audio', 'test', 'unittests'),
] + [
root for root, _, files in os.walk(Join('tools_webrtc')) if any(
f.endswith('_test.py') and f not in excluded_files for f in files)

View file

@ -24,4 +24,4 @@ native API header files.
* [Reporting bugs](docs/bug-reporting.md)
* [Documentation](g3doc/sitemap.md)
[native-dev]: https://webrtc.googlesource.com/src/+/main/docs/native-code/index.md
[native-dev]: https://webrtc.googlesource.com/src/+/main/docs/native-code/

View file

@ -65,6 +65,17 @@ if (!build_with_chromium) {
}
}
rtc_library("location") {
visibility = [ "*" ]
deps = [ "../rtc_base/system:rtc_export" ]
if (build_with_chromium) {
sources = [ "../../webrtc_overrides/api/location.h" ]
deps += [ "//base" ]
} else {
sources = [ "location.h" ]
}
}
rtc_library("rtp_headers") {
visibility = [ "*" ]
sources = [
@ -370,7 +381,10 @@ rtc_library("rtc_error") {
"../rtc_base:macromagic",
"../rtc_base/system:rtc_export",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
}
rtc_source_set("packet_socket_factory") {
@ -433,27 +447,6 @@ rtc_source_set("make_ref_counted") {
deps = [ "../rtc_base:refcount" ]
}
rtc_source_set("video_quality_test_fixture_api") {
visibility = [ "*" ]
testonly = true
sources = [ "test/video_quality_test_fixture.h" ]
deps = [
":fec_controller_api",
":libjingle_peerconnection_api",
":network_state_predictor_api",
":rtp_parameters",
":simulated_network_api",
"../call:fake_network",
"../call:rtp_interfaces",
"../test:test_common",
"../test:video_test_common",
"../video/config:encoder_config",
"transport:bitrate_settings",
"transport:network_control",
"video_codecs:video_codecs_api",
]
}
rtc_source_set("video_quality_analyzer_api") {
visibility = [ "*" ]
testonly = true
@ -462,6 +455,7 @@ rtc_source_set("video_quality_analyzer_api") {
deps = [
":array_view",
":stats_observer_interface",
"../rtc_base:checks",
"video:encoded_image",
"video:video_frame",
"video:video_rtp_headers",
@ -605,7 +599,6 @@ rtc_source_set("peer_connection_quality_test_fixture_api") {
rtc_source_set("frame_generator_api") {
visibility = [ "*" ]
testonly = true
sources = [
"test/frame_generator_interface.cc",
"test/frame_generator_interface.h",
@ -618,20 +611,6 @@ rtc_source_set("frame_generator_api") {
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("test_dependency_factory") {
visibility = [ "*" ]
testonly = true
sources = [
"test/test_dependency_factory.cc",
"test/test_dependency_factory.h",
]
deps = [
":video_quality_test_fixture_api",
"../rtc_base:checks",
"../rtc_base:platform_thread_types",
]
}
if (rtc_include_tests) {
# TODO(srte): Move to network_emulation sub directory.
rtc_library("create_network_emulation_manager") {
@ -648,6 +627,41 @@ if (rtc_include_tests) {
}
if (!build_with_chromium) {
rtc_source_set("video_quality_test_fixture_api") {
visibility = [ "*" ]
testonly = true
sources = [ "test/video_quality_test_fixture.h" ]
deps = [
":fec_controller_api",
":libjingle_peerconnection_api",
":network_state_predictor_api",
":rtp_parameters",
":simulated_network_api",
"../call:fake_network",
"../call:rtp_interfaces",
"../test:test_common",
"../test:video_test_common",
"../video/config:encoder_config",
"transport:bitrate_settings",
"transport:network_control",
"video_codecs:video_codecs_api",
]
}
rtc_library("test_dependency_factory") {
visibility = [ "*" ]
testonly = true
sources = [
"test/test_dependency_factory.cc",
"test/test_dependency_factory.h",
]
deps = [
":video_quality_test_fixture_api",
"../rtc_base:checks",
"../rtc_base:platform_thread_types",
]
}
rtc_library("create_video_quality_test_fixture_api") {
visibility = [ "*" ]
testonly = true
@ -1008,10 +1022,14 @@ if (rtc_include_tests) {
rtc_library("video_codec_stats_api") {
visibility = [ "*" ]
testonly = true
sources = [ "test/video_codec_stats.h" ]
sources = [
"test/video_codec_stats.cc",
"test/video_codec_stats.h",
]
deps = [
"../api/numerics:numerics",
"../api/units:data_rate",
"../api/units:data_size",
"../api/units:frequency",
"test/metrics:metric",
"test/metrics:metrics_logger",
@ -1071,7 +1089,7 @@ if (rtc_include_tests) {
]
deps = [
":video_codec_tester_api",
"../modules/video_coding:videocodec_test_impl",
"../modules/video_coding:video_codec_tester",
]
}
@ -1283,6 +1301,17 @@ if (rtc_include_tests) {
]
}
rtc_source_set("mock_transformable_audio_frame") {
visibility = [ "*" ]
testonly = true
sources = [ "test/mock_transformable_audio_frame.h" ]
deps = [
":frame_transformer_interface",
"../test:test_support",
]
}
rtc_source_set("mock_transformable_video_frame") {
visibility = [ "*" ]
testonly = true
@ -1439,6 +1468,7 @@ if (rtc_include_tests) {
"units:units_unittests",
"video:frame_buffer_unittest",
"video:rtp_video_frame_assembler_unittests",
"video:video_frame_metadata_unittest",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
@ -1467,6 +1497,7 @@ if (rtc_include_tests) {
":mock_peerconnectioninterface",
":mock_rtp",
":mock_session_description_interface",
":mock_transformable_audio_frame",
":mock_transformable_video_frame",
":mock_video_bitrate_allocator",
":mock_video_bitrate_allocator_factory",
@ -1490,6 +1521,7 @@ rtc_source_set("field_trials_registry") {
":field_trials_view",
"../experiments:registered_field_trials",
"../rtc_base:checks",
"../rtc_base:logging",
"../rtc_base/containers:flat_set",
"../rtc_base/system:rtc_export",
]
@ -1536,6 +1568,7 @@ rtc_library("frame_transformer_factory") {
deps = [
":frame_transformer_interface",
":scoped_refptr",
"../audio:audio",
"../modules/rtp_rtcp",
"../rtc_base:refcount",
"video:encoded_frame",

View file

@ -149,6 +149,10 @@ template <typename T,
class ArrayView final : public array_view_internal::ArrayViewBase<T, Size> {
public:
using value_type = T;
using reference = value_type&;
using const_reference = const value_type&;
using pointer = value_type*;
using const_pointer = const value_type*;
using const_iterator = const T*;
// Construct an ArrayView from a pointer and a length.

View file

@ -10,7 +10,6 @@
#include "api/audio_codecs/audio_decoder.h"
#include <memory>
#include <utility>

View file

@ -32,8 +32,9 @@ AudioEncoderMultiChannelOpusConfig::AudioEncoderMultiChannelOpusConfig(
const AudioEncoderMultiChannelOpusConfig&) = default;
AudioEncoderMultiChannelOpusConfig::~AudioEncoderMultiChannelOpusConfig() =
default;
AudioEncoderMultiChannelOpusConfig& AudioEncoderMultiChannelOpusConfig::
operator=(const AudioEncoderMultiChannelOpusConfig&) = default;
AudioEncoderMultiChannelOpusConfig&
AudioEncoderMultiChannelOpusConfig::operator=(
const AudioEncoderMultiChannelOpusConfig&) = default;
bool AudioEncoderMultiChannelOpusConfig::IsOk() const {
if (frame_size_ms <= 0 || frame_size_ms % 10 != 0)

View file

@ -52,7 +52,6 @@ void AudioOptions::SetAll(const AudioOptions& change) {
change.audio_jitter_buffer_fast_accelerate);
SetFrom(&audio_jitter_buffer_min_delay_ms,
change.audio_jitter_buffer_min_delay_ms);
SetFrom(&combined_audio_video_bwe, change.combined_audio_video_bwe);
SetFrom(&audio_network_adaptor, change.audio_network_adaptor);
SetFrom(&audio_network_adaptor_config, change.audio_network_adaptor_config);
SetFrom(&init_recording_on_send, change.init_recording_on_send);
@ -72,7 +71,6 @@ bool AudioOptions::operator==(const AudioOptions& o) const {
o.audio_jitter_buffer_fast_accelerate &&
audio_jitter_buffer_min_delay_ms ==
o.audio_jitter_buffer_min_delay_ms &&
combined_audio_video_bwe == o.combined_audio_video_bwe &&
audio_network_adaptor == o.audio_network_adaptor &&
audio_network_adaptor_config == o.audio_network_adaptor_config &&
init_recording_on_send == o.init_recording_on_send;
@ -97,7 +95,6 @@ std::string AudioOptions::ToString() const {
audio_jitter_buffer_fast_accelerate);
ToStringIfSet(&result, "audio_jitter_buffer_min_delay_ms",
audio_jitter_buffer_min_delay_ms);
ToStringIfSet(&result, "combined_audio_video_bwe", combined_audio_video_bwe);
ToStringIfSet(&result, "audio_network_adaptor", audio_network_adaptor);
ToStringIfSet(&result, "init_recording_on_send", init_recording_on_send);
result << "}";

View file

@ -58,11 +58,6 @@ struct RTC_EXPORT AudioOptions {
absl::optional<bool> audio_jitter_buffer_fast_accelerate;
// Audio receiver jitter buffer (NetEq) minimum target delay in milliseconds.
absl::optional<int> audio_jitter_buffer_min_delay_ms;
// Enable combined audio+bandwidth BWE.
// TODO(pthatcher): This flag is set from the
// "googCombinedAudioVideoBwe", but not used anywhere. So delete it,
// and check if any other AudioOptions members are unused.
absl::optional<bool> combined_audio_video_bwe;
// Enable audio network adaptor.
// TODO(webrtc:11717): Remove this API in favor of adaptivePtime in
// RtpEncodingParameters.

View file

@ -36,6 +36,10 @@ struct PacketOptions {
bool is_retransmit = false;
bool included_in_feedback = false;
bool included_in_allocation = false;
// Whether this packet can be part of a packet batch at lower levels.
bool batchable = false;
// Whether this packet is the last of a batch.
bool last_packet_in_batch = false;
};
class Transport {

View file

@ -88,7 +88,8 @@ std::string Candidate::ToStringInternal(bool sensitive) const {
uint32_t Candidate::GetPriority(uint32_t type_preference,
int network_adapter_preference,
int relay_preference) const {
int relay_preference,
bool adjust_local_preference) const {
// RFC 5245 - 4.1.2.1.
// priority = (2^24)*(type preference) +
// (2^8)*(local preference) +
@ -106,11 +107,25 @@ uint32_t Candidate::GetPriority(uint32_t type_preference,
// local preference = (NIC Type << 8 | Addr_Pref) + relay preference.
// The relay preference is based on the number of TURN servers, the
// first TURN server gets the highest preference.
int addr_pref = IPAddressPrecedence(address_.ipaddr());
int local_preference =
((network_adapter_preference << 8) | addr_pref) + relay_preference;
// Ensure that the added relay preference will not result in a relay candidate
// whose STUN priority attribute has a higher priority than a server-reflexive
// candidate.
// The STUN priority attribute is calculated as
// (peer-reflexive type preference) << 24 | (priority & 0x00FFFFFF)
// as described in
// https://www.rfc-editor.org/rfc/rfc5245#section-7.1.2.1
// To satisfy that condition, add kMaxTurnServers to the local preference.
// This can not overflow the field width since the highest "NIC pref"
// assigned is kHighestNetworkPreference = 127
RTC_DCHECK_LT(local_preference + kMaxTurnServers, 0x10000);
if (adjust_local_preference && relay_protocol_.empty()) {
local_preference += kMaxTurnServers;
}
return (type_preference << 24) | (local_preference << 8) | (256 - component_);
}

View file

@ -173,7 +173,8 @@ class RTC_EXPORT Candidate {
uint32_t GetPriority(uint32_t type_preference,
int network_adapter_preference,
int relay_preference) const;
int relay_preference,
bool adjust_local_preference) const;
bool operator==(const Candidate& o) const;
bool operator!=(const Candidate& o) const;

View file

@ -39,6 +39,7 @@ rtc::scoped_refptr<PeerConnectionFactoryInterface> CreatePeerConnectionFactory(
rtc::scoped_refptr<AudioMixer> audio_mixer,
rtc::scoped_refptr<AudioProcessing> audio_processing,
AudioFrameProcessor* audio_frame_processor,
std::unique_ptr<AudioFrameProcessor> owned_audio_frame_processor,
std::unique_ptr<FieldTrialsView> field_trials) {
if (!field_trials) {
field_trials = std::make_unique<webrtc::FieldTrialBasedConfig>();
@ -64,7 +65,12 @@ rtc::scoped_refptr<PeerConnectionFactoryInterface> CreatePeerConnectionFactory(
media_dependencies.adm = std::move(default_adm);
media_dependencies.audio_encoder_factory = std::move(audio_encoder_factory);
media_dependencies.audio_decoder_factory = std::move(audio_decoder_factory);
media_dependencies.audio_frame_processor = audio_frame_processor;
if (audio_frame_processor) {
media_dependencies.audio_frame_processor = audio_frame_processor;
} else if (owned_audio_frame_processor) {
media_dependencies.owned_audio_frame_processor =
std::move(owned_audio_frame_processor);
}
if (audio_processing) {
media_dependencies.audio_processing = std::move(audio_processing);
} else {
@ -80,4 +86,44 @@ rtc::scoped_refptr<PeerConnectionFactoryInterface> CreatePeerConnectionFactory(
return CreateModularPeerConnectionFactory(std::move(dependencies));
}
rtc::scoped_refptr<PeerConnectionFactoryInterface> CreatePeerConnectionFactory(
rtc::Thread* network_thread,
rtc::Thread* worker_thread,
rtc::Thread* signaling_thread,
rtc::scoped_refptr<AudioDeviceModule> default_adm,
rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory,
rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory,
std::unique_ptr<VideoEncoderFactory> video_encoder_factory,
std::unique_ptr<VideoDecoderFactory> video_decoder_factory,
rtc::scoped_refptr<AudioMixer> audio_mixer,
rtc::scoped_refptr<AudioProcessing> audio_processing,
AudioFrameProcessor* audio_frame_processor) {
return CreatePeerConnectionFactory(
network_thread, worker_thread, signaling_thread, default_adm,
audio_encoder_factory, audio_decoder_factory,
std::move(video_encoder_factory), std::move(video_decoder_factory),
audio_mixer, audio_processing, audio_frame_processor, nullptr, nullptr);
}
rtc::scoped_refptr<PeerConnectionFactoryInterface> CreatePeerConnectionFactory(
rtc::Thread* network_thread,
rtc::Thread* worker_thread,
rtc::Thread* signaling_thread,
rtc::scoped_refptr<AudioDeviceModule> default_adm,
rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory,
rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory,
std::unique_ptr<VideoEncoderFactory> video_encoder_factory,
std::unique_ptr<VideoDecoderFactory> video_decoder_factory,
rtc::scoped_refptr<AudioMixer> audio_mixer,
rtc::scoped_refptr<AudioProcessing> audio_processing,
std::unique_ptr<AudioFrameProcessor> owned_audio_frame_processor,
std::unique_ptr<FieldTrialsView> field_trials) {
return CreatePeerConnectionFactory(
network_thread, worker_thread, signaling_thread, default_adm,
audio_encoder_factory, audio_decoder_factory,
std::move(video_encoder_factory), std::move(video_decoder_factory),
audio_mixer, audio_processing, nullptr,
std::move(owned_audio_frame_processor), std::move(field_trials));
}
} // namespace webrtc

View file

@ -37,6 +37,9 @@ class AudioProcessing;
// Create a new instance of PeerConnectionFactoryInterface with optional video
// codec factories. These video factories represents all video codecs, i.e. no
// extra internal video codecs will be added.
// TODO(bugs.webrtc.org/15111):
// Remove the method with the raw AudioFrameProcessor pointer in the
// follow-up.
RTC_EXPORT rtc::scoped_refptr<PeerConnectionFactoryInterface>
CreatePeerConnectionFactory(
rtc::Thread* network_thread,
@ -49,7 +52,21 @@ CreatePeerConnectionFactory(
std::unique_ptr<VideoDecoderFactory> video_decoder_factory,
rtc::scoped_refptr<AudioMixer> audio_mixer,
rtc::scoped_refptr<AudioProcessing> audio_processing,
AudioFrameProcessor* audio_frame_processor = nullptr,
AudioFrameProcessor* audio_frame_processor = nullptr);
RTC_EXPORT rtc::scoped_refptr<PeerConnectionFactoryInterface>
CreatePeerConnectionFactory(
rtc::Thread* network_thread,
rtc::Thread* worker_thread,
rtc::Thread* signaling_thread,
rtc::scoped_refptr<AudioDeviceModule> default_adm,
rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory,
rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory,
std::unique_ptr<VideoEncoderFactory> video_encoder_factory,
std::unique_ptr<VideoDecoderFactory> video_decoder_factory,
rtc::scoped_refptr<AudioMixer> audio_mixer,
rtc::scoped_refptr<AudioProcessing> audio_processing,
std::unique_ptr<AudioFrameProcessor> owned_audio_frame_processor,
std::unique_ptr<FieldTrialsView> field_trials = nullptr);
} // namespace webrtc

View file

@ -10,6 +10,8 @@
#include "api/data_channel_interface.h"
#include "rtc_base/checks.h"
namespace webrtc {
bool DataChannelInterface::ordered() const {
@ -44,4 +46,17 @@ uint64_t DataChannelInterface::MaxSendQueueSize() {
return 16 * 1024 * 1024; // 16 MiB
}
// TODO(tommi): Remove method once downstream implementations have been removed.
bool DataChannelInterface::Send(const DataBuffer& buffer) {
RTC_DCHECK_NOTREACHED();
return false;
}
// TODO(tommi): Remove implementation once method is pure virtual.
void DataChannelInterface::SendAsync(
DataBuffer buffer,
absl::AnyInvocable<void(RTCError) &&> on_complete) {
RTC_DCHECK_NOTREACHED();
}
} // namespace webrtc

View file

@ -19,6 +19,7 @@
#include <string>
#include "absl/functional/any_invocable.h"
#include "absl/types/optional.h"
#include "api/priority.h"
#include "api/rtc_error.h"
@ -100,6 +101,17 @@ class DataChannelObserver {
// The data channel's buffered_amount has changed.
virtual void OnBufferedAmountChange(uint64_t sent_data_size) {}
// Override this to get callbacks directly on the network thread.
// An implementation that does that must not block the network thread
// but rather only use the callback to trigger asynchronous processing
// elsewhere as a result of the notification.
// The default return value, `false`, means that notifications will be
// delivered on the signaling thread associated with the peerconnection
// instance.
// TODO(webrtc:11547): Eventually all DataChannelObserver implementations
// should be called on the network thread and this method removed.
virtual bool IsOkToCallOnTheNetworkThread() { return false; }
protected:
virtual ~DataChannelObserver() = default;
};
@ -187,7 +199,20 @@ class RTC_EXPORT DataChannelInterface : public rtc::RefCountInterface {
// Returns false if the data channel is not in open state or if the send
// buffer is full.
// TODO(webrtc:13289): Return an RTCError with information about the failure.
virtual bool Send(const DataBuffer& buffer) = 0;
// TODO(tommi): Remove this method once downstream implementations don't refer
// to it.
virtual bool Send(const DataBuffer& buffer);
// Queues up an asynchronus send operation to run on a network thread.
// Once the operation has completed the `on_complete` callback is invoked,
// on the thread the send operation was done on. It's important that
// `on_complete` implementations do not block the current thread but rather
// post any expensive operations to other worker threads.
// TODO(tommi): Make pure virtual after updating mock class in Chromium.
// Deprecate `Send` in favor of this variant since the return value of `Send`
// is limiting for a fully async implementation (yet in practice is ignored).
virtual void SendAsync(DataBuffer buffer,
absl::AnyInvocable<void(RTCError) &&> on_complete);
// Amount of bytes that can be queued for sending on the data channel.
// Those are bytes that have not yet been processed at the SCTP level.

View file

@ -31,6 +31,9 @@ class VCMProtectionCallback {
uint32_t* sent_nack_rate_bps,
uint32_t* sent_fec_rate_bps) = 0;
// 'retransmission_mode' is either a value of enum RetransmissionMode, or
// computed with bitwise operators on values of enum RetransmissionMode.
virtual void SetRetransmissionMode(int retransmission_mode) = 0;
protected:
virtual ~VCMProtectionCallback() {}
};

View file

@ -16,14 +16,19 @@
#include "experiments/registered_field_trials.h"
#include "rtc_base/checks.h"
#include "rtc_base/containers/flat_set.h"
#include "rtc_base/logging.h"
namespace webrtc {
std::string FieldTrialsRegistry::Lookup(absl::string_view key) const {
#if WEBRTC_STRICT_FIELD_TRIALS
#if WEBRTC_STRICT_FIELD_TRIALS == 1
RTC_DCHECK(absl::c_linear_search(kRegisteredFieldTrials, key) ||
test_keys_.contains(key))
<< key << " is not registered.";
<< key << " is not registered, see g3doc/field-trials.md.";
#elif WEBRTC_STRICT_FIELD_TRIALS == 2
RTC_LOG_IF(LS_WARNING, !(absl::c_linear_search(kRegisteredFieldTrials, key) ||
test_keys_.contains(key)))
<< key << " is not registered, see g3doc/field-trials.md.";
#endif
return GetValue(key);
}

View file

@ -12,6 +12,7 @@
#include <string>
#include "absl/strings/match.h"
#include "absl/strings/string_view.h"
#include "rtc_base/system/rtc_export.h"
@ -32,18 +33,14 @@ class RTC_EXPORT FieldTrialsView {
virtual std::string Lookup(absl::string_view key) const = 0;
bool IsEnabled(absl::string_view key) const {
return Lookup(key).find("Enabled") == 0;
return absl::StartsWith(Lookup(key), "Enabled");
}
bool IsDisabled(absl::string_view key) const {
return Lookup(key).find("Disabled") == 0;
return absl::StartsWith(Lookup(key), "Disabled");
}
};
// TODO(bugs.webrtc.org/10335): Remove once all migrated to
// api/field_trials_view.h
typedef FieldTrialsView WebRtcKeyValueConfig;
} // namespace webrtc
#endif // API_FIELD_TRIALS_VIEW_H_

View file

@ -10,6 +10,7 @@
#include "api/frame_transformer_factory.h"
#include "audio/channel_send_frame_transformer_delegate.h"
#include "modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h"
namespace webrtc {
@ -24,6 +25,12 @@ std::unique_ptr<TransformableVideoFrameInterface> CreateVideoReceiverFrame() {
return nullptr;
}
std::unique_ptr<TransformableAudioFrameInterface> CloneAudioFrame(
TransformableAudioFrameInterface* original) {
// At the moment, only making sender frames is supported.
return CloneSenderAudioFrame(original);
}
std::unique_ptr<TransformableVideoFrameInterface> CloneVideoFrame(
TransformableVideoFrameInterface* original) {
// At the moment, only making sender frames from receiver frames is supported.

View file

@ -32,6 +32,8 @@ std::unique_ptr<TransformableVideoFrameInterface> CreateVideoSenderFrame();
std::unique_ptr<TransformableVideoFrameInterface> CreateVideoReceiverFrame();
// Creates a new frame with the same metadata as the original.
// The original can be a sender or receiver frame.
RTC_EXPORT std::unique_ptr<TransformableAudioFrameInterface> CloneAudioFrame(
TransformableAudioFrameInterface* original);
RTC_EXPORT std::unique_ptr<TransformableVideoFrameInterface> CloneVideoFrame(
TransformableVideoFrameInterface* original);
} // namespace webrtc

View file

@ -58,18 +58,9 @@ class TransformableVideoFrameInterface : public TransformableFrameInterface {
virtual ~TransformableVideoFrameInterface() = default;
virtual bool IsKeyFrame() const = 0;
// Returns data needed in the frame transformation logic; for example,
// when the transformation applied to the frame is encryption/decryption, the
// additional data holds the serialized generic frame descriptor extension
// calculated in webrtc::RtpDescriptorAuthentication.
// This has been superseeded by GetMetadata() and will be removed shortly.
[[deprecated("https://crbug.com/1414370")]] virtual std::vector<uint8_t>
GetAdditionalData() const = 0;
virtual VideoFrameMetadata Metadata() const = 0;
virtual const VideoFrameMetadata& GetMetadata() const = 0;
// TODO(https://crbug.com/webrtc/14709): Make pure virtual when Chromium MOCK
// has implemented this.
virtual void SetMetadata(const VideoFrameMetadata&) {}
virtual void SetMetadata(const VideoFrameMetadata&) = 0;
};
// Extends the TransformableFrameInterface to expose audio-specific information.
@ -77,12 +68,21 @@ class TransformableAudioFrameInterface : public TransformableFrameInterface {
public:
virtual ~TransformableAudioFrameInterface() = default;
virtual void SetRTPTimestamp(uint32_t timestamp) = 0;
// Exposes the frame header, enabling the interface clients to use the
// information in the header as needed, for example to compile the list of
// csrcs.
// TODO(crbug.com/1453226): Deprecate and remove once callers have migrated to
// the getters for specific fields.
virtual const RTPHeader& GetHeader() const = 0;
virtual rtc::ArrayView<const uint32_t> GetContributingSources() const = 0;
// TODO(crbug.com/1453226): Change this to pure virtual after it
// is implemented everywhere.
virtual const absl::optional<uint16_t> SequenceNumber() const {
return absl::nullopt;
}
};
// Objects implement this interface to be notified with the transformed frame.

View file

@ -90,18 +90,11 @@ struct IceTransportInit final {
// best connection to use or ping, and lets the transport decide when and
// whether to switch.
//
// Which ICE controller is used is determined based on the field trial
// "WebRTC-UseActiveIceController" as follows:
// Which ICE controller is used is determined as follows:
//
// 1. If the field trial is not enabled
// a. The legacy ICE controller factory is used if one is supplied.
// b. If not, a default ICE controller (BasicIceController) is
// constructed and used.
//
// 2. If the field trial is enabled
// a. If an active ICE controller factory is supplied, it is used and
// 1. If an active ICE controller factory is supplied, it is used and
// the legacy ICE controller factory is not used.
// b. If not, a default active ICE controller is used, wrapping over the
// 2. If not, a default active ICE controller is used, wrapping over the
// supplied or the default legacy ICE controller.
void set_active_ice_controller_factory(
cricket::ActiveIceControllerFactoryInterface*

View file

@ -71,9 +71,6 @@ class JsepSessionDescription : public SessionDescriptionInterface {
size_t mediasection_index) const;
virtual bool ToString(std::string* out) const;
static const int kDefaultVideoCodecId;
static const char kDefaultVideoCodecName[];
private:
std::unique_ptr<cricket::SessionDescription> description_;
std::string session_id_;

View file

@ -12,6 +12,8 @@
#include <string.h>
#include <utility>
#include "absl/algorithm/container.h"
#include "api/make_ref_counted.h"
#include "rtc_base/checks.h"
@ -360,6 +362,11 @@ bool StatsReport::Value::bool_val() const {
return value_.bool_;
}
const StatsReport::Id& StatsReport::Value::id_val() const {
RTC_DCHECK_EQ(type_, kId);
return *value_.id_;
}
const char* StatsReport::Value::display_name() const {
switch (name) {
case kStatsValueNameAecDivergentFilterFraction:
@ -783,28 +790,28 @@ const StatsReport::Value* StatsReport::FindValue(StatsValueName name) const {
StatsCollection::StatsCollection() {}
StatsCollection::~StatsCollection() {
RTC_DCHECK(thread_checker_.IsCurrent());
RTC_DCHECK_RUN_ON(&thread_checker_);
for (auto* r : list_)
delete r;
}
StatsCollection::const_iterator StatsCollection::begin() const {
RTC_DCHECK(thread_checker_.IsCurrent());
RTC_DCHECK_RUN_ON(&thread_checker_);
return list_.begin();
}
StatsCollection::const_iterator StatsCollection::end() const {
RTC_DCHECK(thread_checker_.IsCurrent());
RTC_DCHECK_RUN_ON(&thread_checker_);
return list_.end();
}
size_t StatsCollection::size() const {
RTC_DCHECK(thread_checker_.IsCurrent());
RTC_DCHECK_RUN_ON(&thread_checker_);
return list_.size();
}
StatsReport* StatsCollection::InsertNew(const StatsReport::Id& id) {
RTC_DCHECK(thread_checker_.IsCurrent());
RTC_DCHECK_RUN_ON(&thread_checker_);
RTC_DCHECK(Find(id) == nullptr);
StatsReport* report = new StatsReport(id);
list_.push_back(report);
@ -812,13 +819,13 @@ StatsReport* StatsCollection::InsertNew(const StatsReport::Id& id) {
}
StatsReport* StatsCollection::FindOrAddNew(const StatsReport::Id& id) {
RTC_DCHECK(thread_checker_.IsCurrent());
RTC_DCHECK_RUN_ON(&thread_checker_);
StatsReport* ret = Find(id);
return ret ? ret : InsertNew(id);
}
StatsReport* StatsCollection::ReplaceOrAddNew(const StatsReport::Id& id) {
RTC_DCHECK(thread_checker_.IsCurrent());
RTC_DCHECK_RUN_ON(&thread_checker_);
RTC_DCHECK(id.get());
Container::iterator it = absl::c_find_if(
list_,
@ -832,10 +839,37 @@ StatsReport* StatsCollection::ReplaceOrAddNew(const StatsReport::Id& id) {
return InsertNew(id);
}
StatsCollection::Container StatsCollection::DetachCollection() {
RTC_DCHECK_RUN_ON(&thread_checker_);
#if RTC_DCHECK_IS_ON
for (auto* report : list_)
report->DetachSequenceCheckers();
#endif
return std::move(list_);
}
void StatsCollection::MergeCollection(Container collection) {
RTC_DCHECK_RUN_ON(&thread_checker_);
for (auto* report : collection) {
#if RTC_DCHECK_IS_ON
report->AttachSequenceCheckers();
#endif
Container::iterator it = absl::c_find_if(list_, [&](const StatsReport* r) {
return r->id()->Equals(report->id());
});
if (it == list_.end()) {
list_.push_back(report);
} else {
delete *it;
*it = report;
}
}
}
// Looks for a report with the given `id`. If one is not found, null
// will be returned.
StatsReport* StatsCollection::Find(const StatsReport::Id& id) {
RTC_DCHECK(thread_checker_.IsCurrent());
RTC_DCHECK_RUN_ON(&thread_checker_);
Container::iterator it = absl::c_find_if(
list_,
[&id](const StatsReport* r) -> bool { return r->id()->Equals(id); });

View file

@ -344,8 +344,15 @@ class RTC_EXPORT StatsReport {
// TODO(tommi): Move `name` and `display_name` out of the Value struct.
const StatsValueName name;
protected:
#if RTC_DCHECK_IS_ON
friend class StatsReport;
void DetachSequenceChecker() { thread_checker_.Detach(); }
void AttachSequenceChecker() { RTC_DCHECK_RUN_ON(&thread_checker_); }
#endif
private:
webrtc::SequenceChecker thread_checker_;
webrtc::SequenceChecker thread_checker_{webrtc::SequenceChecker::kDetached};
mutable int ref_count_ RTC_GUARDED_BY(thread_checker_) = 0;
const Type type_;
@ -403,6 +410,19 @@ class RTC_EXPORT StatsReport {
const Value* FindValue(StatsValueName name) const;
#if RTC_DCHECK_IS_ON
void DetachSequenceCheckers() {
for (auto& v : values_) {
v.second->DetachSequenceChecker();
}
}
void AttachSequenceCheckers() {
for (auto& v : values_) {
v.second->AttachSequenceChecker();
}
}
#endif
private:
// The unique identifier for this object.
// This is used as a key for this report in ordered containers,
@ -441,13 +461,16 @@ class StatsCollection {
StatsReport* FindOrAddNew(const StatsReport::Id& id);
StatsReport* ReplaceOrAddNew(const StatsReport::Id& id);
Container DetachCollection();
void MergeCollection(Container collection);
// Looks for a report with the given `id`. If one is not found, null
// will be returned.
StatsReport* Find(const StatsReport::Id& id);
private:
Container list_;
webrtc::SequenceChecker thread_checker_;
webrtc::SequenceChecker thread_checker_{SequenceChecker::kDetached};
};
} // namespace webrtc

31
api/location.h Normal file
View file

@ -0,0 +1,31 @@
/*
* Copyright 2023 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_LOCATION_H_
#define API_LOCATION_H_
#include "rtc_base/system/rtc_export.h"
namespace webrtc {
// Location provides basic info where of an object was constructed, or was
// significantly brought to life. This is a stripped down version of
// https://source.chromium.org/chromium/chromium/src/+/main:base/location.h
// that only specifies an interface compatible to how base::Location is
// supposed to be used.
// The declaration is overriden inside the Chromium build.
class RTC_EXPORT Location {
public:
static Location Current() { return Location(); }
};
} // namespace webrtc
#endif // API_LOCATION_H_

View file

@ -10,6 +10,7 @@
#ifndef API_MAKE_REF_COUNTED_H_
#define API_MAKE_REF_COUNTED_H_
#include <type_traits>
#include <utility>
#include "rtc_base/ref_counted_object.h"

View file

@ -9,6 +9,7 @@
*/
#include "api/media_stream_interface.h"
#include "api/media_types.h"
namespace webrtc {

View file

@ -9,6 +9,7 @@
*/
#include "api/neteq/default_neteq_controller_factory.h"
#include "modules/audio_coding/neteq/decision_logic.h"
namespace webrtc {

View file

@ -76,6 +76,7 @@ struct NetEqLifetimeStatistics {
uint64_t packets_discarded = 0;
// Below stats are not part of the spec.
uint64_t delayed_packet_outage_samples = 0;
uint64_t delayed_packet_outage_events = 0;
// This is sum of relative packet arrival delays of received packets so far.
// Since end-to-end delay of a packet is difficult to measure and is not
// necessarily useful for measuring jitter buffer performance, we report a

View file

@ -13,7 +13,6 @@
#include <cstddef>
#include <cstdint>
#include <functional>
#include <memory>
@ -81,7 +80,7 @@ class NetEqController {
bool dtx_or_cng;
size_t num_samples;
size_t span_samples;
size_t span_samples_no_dtx;
size_t span_samples_wait_time;
size_t num_packets;
};
@ -144,13 +143,6 @@ class NetEqController {
virtual bool SetBaseMinimumDelay(int delay_ms) = 0;
virtual int GetBaseMinimumDelay() const = 0;
// These methods test the `cng_state_` for different conditions.
virtual bool CngRfc3389On() const = 0;
virtual bool CngOff() const = 0;
// Resets the `cng_state_` to kCngOff.
virtual void SetCngOff() = 0;
// Reports back to DecisionLogic whether the decision to do expand remains or
// not. Note that this is necessary, since an expand decision can be changed
// to kNormal in NetEqImpl::GetDecision if there is still enough data in the

View file

@ -25,7 +25,7 @@ namespace webrtc {
template <class T>
class Notifier : public T {
public:
Notifier() { sequence_checker_.Detach(); }
Notifier() = default;
virtual void RegisterObserver(ObserverInterface* observer) {
RTC_DCHECK_RUN_ON(&sequence_checker_);
@ -60,7 +60,8 @@ class Notifier : public T {
std::list<ObserverInterface*> observers_ RTC_GUARDED_BY(sequence_checker_);
private:
RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_;
RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_{
SequenceChecker::kDetached};
};
} // namespace webrtc

View file

@ -65,6 +65,12 @@ class SamplesStatsCounter {
RTC_DCHECK(!IsEmpty());
return *stats_.GetMax();
}
// Returns sum in O(1) time. This function may not be called if there are
// no samples.
double GetSum() const {
RTC_DCHECK(!IsEmpty());
return *stats_.GetSum();
}
// Returns average in O(1) time. This function may not be called if there are
// no samples.
double GetAverage() const {

View file

@ -61,6 +61,7 @@ TEST(SamplesStatsCounterTest, FullSimpleTest) {
EXPECT_TRUE(!stats.IsEmpty());
EXPECT_DOUBLE_EQ(stats.GetMin(), 1.0);
EXPECT_DOUBLE_EQ(stats.GetMax(), 100.0);
EXPECT_DOUBLE_EQ(stats.GetSum(), 5050.0);
EXPECT_NEAR(stats.GetAverage(), 50.5, 1e-6);
for (int i = 1; i <= 100; i++) {
double p = i / 100.0;

View file

@ -456,9 +456,6 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface {
// when switching from a static scene to one with motion.
absl::optional<int> screencast_min_bitrate;
// Use new combined audio/video bandwidth estimation?
absl::optional<bool> combined_audio_video_bwe;
#if defined(WEBRTC_FUCHSIA)
// TODO(bugs.webrtc.org/11066): Remove entirely once Fuchsia does not use.
// TODO(bugs.webrtc.org/9891) - Move to crypto_options
@ -1605,9 +1602,16 @@ class RTC_EXPORT PeerConnectionFactoryInterface
// Creates a new local VideoTrack. The same `source` can be used in several
// tracks.
virtual rtc::scoped_refptr<VideoTrackInterface> CreateVideoTrack(
rtc::scoped_refptr<VideoTrackSourceInterface> source,
absl::string_view label) = 0;
ABSL_DEPRECATED("Use version with scoped_refptr")
virtual rtc::scoped_refptr<VideoTrackInterface> CreateVideoTrack(
const std::string& label,
VideoTrackSourceInterface* source) = 0;
VideoTrackSourceInterface* source) {
return CreateVideoTrack(
rtc::scoped_refptr<VideoTrackSourceInterface>(source), label);
}
// Creates an new AudioTrack. At the moment `source` can be null.
virtual rtc::scoped_refptr<AudioTrackInterface> CreateAudioTrack(

View file

@ -10,11 +10,13 @@
#include "api/rtc_error.h"
#include "rtc_base/arraysize.h"
#include <iterator>
#include "absl/strings/string_view.h"
namespace {
const char* kRTCErrorTypeNames[] = {
absl::string_view kRTCErrorTypeNames[] = {
"NONE",
"UNSUPPORTED_OPERATION",
"UNSUPPORTED_PARAMETER",
@ -30,11 +32,11 @@ const char* kRTCErrorTypeNames[] = {
};
static_assert(
static_cast<int>(webrtc::RTCErrorType::OPERATION_ERROR_WITH_DATA) ==
(arraysize(kRTCErrorTypeNames) - 1),
(std::size(kRTCErrorTypeNames) - 1),
"kRTCErrorTypeNames must have as many strings as RTCErrorType "
"has values.");
const char* kRTCErrorDetailTypeNames[] = {
absl::string_view kRTCErrorDetailTypeNames[] = {
"NONE",
"DATA_CHANNEL_FAILURE",
"DTLS_FAILURE",
@ -46,7 +48,7 @@ const char* kRTCErrorDetailTypeNames[] = {
};
static_assert(
static_cast<int>(webrtc::RTCErrorDetailType::HARDWARE_ENCODER_ERROR) ==
(arraysize(kRTCErrorDetailTypeNames) - 1),
(std::size(kRTCErrorDetailTypeNames) - 1),
"kRTCErrorDetailTypeNames must have as many strings as "
"RTCErrorDetailType has values.");
@ -63,16 +65,16 @@ const char* RTCError::message() const {
return message_.c_str();
}
void RTCError::set_message(std::string message) {
message_ = std::move(message);
void RTCError::set_message(absl::string_view message) {
message_ = std::string(message);
}
const char* ToString(RTCErrorType error) {
absl::string_view ToString(RTCErrorType error) {
int index = static_cast<int>(error);
return kRTCErrorTypeNames[index];
}
const char* ToString(RTCErrorDetailType error) {
absl::string_view ToString(RTCErrorDetailType error) {
int index = static_cast<int>(error);
return kRTCErrorDetailTypeNames[index];
}

View file

@ -17,6 +17,7 @@
#include <string>
#include <utility> // For std::move.
#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
@ -108,8 +109,8 @@ class RTC_EXPORT RTCError {
RTCError() {}
explicit RTCError(RTCErrorType type) : type_(type) {}
RTCError(RTCErrorType type, std::string message)
: type_(type), message_(std::move(message)) {}
RTCError(RTCErrorType type, absl::string_view message)
: type_(type), message_(message) {}
// In many use cases, it is better to use move than copy,
// but copy and assignment are provided for those cases that need it.
@ -133,7 +134,7 @@ class RTC_EXPORT RTCError {
// stable.
const char* message() const;
void set_message(std::string message);
void set_message(absl::string_view message);
RTCErrorDetailType error_detail() const { return error_detail_; }
void set_error_detail(RTCErrorDetailType detail) { error_detail_ = detail; }
@ -158,8 +159,8 @@ class RTC_EXPORT RTCError {
//
// Only intended to be used for logging/diagnostics. The returned char* points
// to literal string that lives for the whole duration of the program.
RTC_EXPORT const char* ToString(RTCErrorType error);
RTC_EXPORT const char* ToString(RTCErrorDetailType error);
RTC_EXPORT absl::string_view ToString(RTCErrorType error);
RTC_EXPORT absl::string_view ToString(RTCErrorDetailType error);
#ifdef WEBRTC_UNIT_TEST
inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982)
@ -307,23 +308,23 @@ class RTCErrorOr {
// the stack.
const T& value() const {
RTC_DCHECK(ok());
return value_;
return *value_;
}
T& value() {
RTC_DCHECK(ok());
return value_;
return *value_;
}
// Moves our current value out of this object and returns it, or DCHECK-fails
// if !this->ok().
T MoveValue() {
RTC_DCHECK(ok());
return std::move(value_);
return std::move(*value_);
}
private:
RTCError error_;
T value_;
absl::optional<T> value_;
};
} // namespace webrtc

View file

@ -14,9 +14,10 @@
#include "test/gtest.h"
namespace webrtc {
namespace {
const int kDefaultMoveOnlyIntValue = 0xbadf00d;
constexpr int kDefaultMoveOnlyIntValue = 0xbadf00d;
// Class that has no copy constructor, ensuring that RTCErrorOr can
struct MoveOnlyInt {
@ -55,46 +56,47 @@ struct MoveOnlyInt2 {
int value = kDefaultMoveOnlyIntValue;
};
} // namespace
namespace webrtc {
// Test that the default constructor creates a "no error" error.
TEST(RTCErrorTest, DefaultConstructor) {
RTCError e;
EXPECT_EQ(RTCErrorType::NONE, e.type());
EXPECT_EQ(std::string(), e.message());
EXPECT_EQ(e.type(), RTCErrorType::NONE);
EXPECT_STREQ(e.message(), "");
EXPECT_TRUE(e.ok());
}
TEST(RTCErrorTest, NormalConstructors) {
RTCError a(RTCErrorType::INVALID_PARAMETER);
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, a.type());
EXPECT_EQ(std::string(), a.message());
EXPECT_EQ(a.type(), RTCErrorType::INVALID_PARAMETER);
EXPECT_STREQ(a.message(), "");
// Constructor that takes const char* message.
RTCError b(RTCErrorType::UNSUPPORTED_PARAMETER, "foobar");
EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER, b.type());
EXPECT_EQ(std::string("foobar"), b.message());
EXPECT_EQ(b.type(), RTCErrorType::UNSUPPORTED_PARAMETER);
EXPECT_STREQ(b.message(), "foobar");
// Constructor that takes absl::string_view message.
RTCError c(RTCErrorType::SYNTAX_ERROR, absl::string_view("baz"));
EXPECT_EQ(c.type(), RTCErrorType::SYNTAX_ERROR);
EXPECT_STREQ(c.message(), "baz");
// Constructor that takes std::string message.
RTCError c(RTCErrorType::INVALID_RANGE, std::string("new"));
EXPECT_EQ(RTCErrorType::INVALID_RANGE, c.type());
EXPECT_EQ(std::string("new"), c.message());
RTCError d(RTCErrorType::INVALID_RANGE, std::string("new"));
EXPECT_EQ(d.type(), RTCErrorType::INVALID_RANGE);
EXPECT_STREQ(d.message(), "new");
}
TEST(RTCErrorTest, MoveConstructor) {
// Static string.
RTCError a(RTCErrorType::INVALID_PARAMETER, "foo");
RTCError b(std::move(a));
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, b.type());
EXPECT_EQ(std::string("foo"), b.message());
EXPECT_EQ(b.type(), RTCErrorType::INVALID_PARAMETER);
EXPECT_STREQ(b.message(), "foo");
// Non-static string.
RTCError c(RTCErrorType::UNSUPPORTED_PARAMETER, std::string("bar"));
RTCError d(std::move(c));
EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER, d.type());
EXPECT_EQ(std::string("bar"), d.message());
EXPECT_EQ(d.type(), RTCErrorType::UNSUPPORTED_PARAMETER);
EXPECT_STREQ(d.message(), "bar");
}
TEST(RTCErrorTest, MoveAssignment) {
@ -102,24 +104,21 @@ TEST(RTCErrorTest, MoveAssignment) {
RTCError e(RTCErrorType::INVALID_PARAMETER, "foo");
e = RTCError(RTCErrorType::UNSUPPORTED_PARAMETER, "bar");
EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER, e.type());
EXPECT_EQ(std::string("bar"), e.message());
EXPECT_EQ(e.type(), RTCErrorType::UNSUPPORTED_PARAMETER);
EXPECT_STREQ(e.message(), "bar");
e = RTCError(RTCErrorType::SYNTAX_ERROR, std::string("baz"));
EXPECT_EQ(std::string("baz"), e.message());
e = RTCError(RTCErrorType::SYNTAX_ERROR, absl::string_view("baz"));
EXPECT_STREQ(e.message(), "baz");
e = RTCError(RTCErrorType::SYNTAX_ERROR, std::string("another"));
EXPECT_EQ(std::string("another"), e.message());
e = RTCError(RTCErrorType::SYNTAX_ERROR, "last");
EXPECT_EQ(std::string("last"), e.message());
EXPECT_STREQ(e.message(), "another");
}
// Test that the error returned by RTCError::OK() is a "no error" error.
TEST(RTCErrorTest, OKConstant) {
RTCError ok = RTCError::OK();
EXPECT_EQ(RTCErrorType::NONE, ok.type());
EXPECT_EQ(std::string(), ok.message());
EXPECT_EQ(ok.type(), RTCErrorType::NONE);
EXPECT_STREQ(ok.message(), "");
EXPECT_TRUE(ok.ok());
}
@ -135,33 +134,26 @@ TEST(RTCErrorTest, OkMethod) {
// std::strings.
TEST(RTCErrorTest, SetMessage) {
RTCError e;
// Try all combinations of "is static string"/"is non-static string" calls.
e.set_message("foo");
EXPECT_EQ(std::string("foo"), e.message());
EXPECT_STREQ(e.message(), "foo");
e.set_message("bar");
EXPECT_EQ(std::string("bar"), e.message());
e.set_message(absl::string_view("bar"));
EXPECT_STREQ(e.message(), "bar");
e.set_message(std::string("string"));
EXPECT_EQ(std::string("string"), e.message());
e.set_message(std::string("more"));
EXPECT_EQ(std::string("more"), e.message());
e.set_message("love to test");
EXPECT_EQ(std::string("love to test"), e.message());
EXPECT_STREQ(e.message(), "string");
}
// Test that the default constructor creates an "INTERNAL_ERROR".
TEST(RTCErrorOrTest, DefaultConstructor) {
RTCErrorOr<MoveOnlyInt> e;
EXPECT_EQ(RTCErrorType::INTERNAL_ERROR, e.error().type());
EXPECT_EQ(e.error().type(), RTCErrorType::INTERNAL_ERROR);
}
// Test that an RTCErrorOr can be implicitly constructed from a value.
TEST(RTCErrorOrTest, ImplicitValueConstructor) {
RTCErrorOr<MoveOnlyInt> e = [] { return MoveOnlyInt(100); }();
EXPECT_EQ(100, e.value().value);
EXPECT_EQ(e.value().value, 100);
}
// Test that an RTCErrorOr can be implicitly constructed from an RTCError.
@ -169,20 +161,20 @@ TEST(RTCErrorOrTest, ImplicitErrorConstructor) {
RTCErrorOr<MoveOnlyInt> e = [] {
return RTCError(RTCErrorType::SYNTAX_ERROR);
}();
EXPECT_EQ(RTCErrorType::SYNTAX_ERROR, e.error().type());
EXPECT_EQ(e.error().type(), RTCErrorType::SYNTAX_ERROR);
}
TEST(RTCErrorOrTest, MoveConstructor) {
RTCErrorOr<MoveOnlyInt> a(MoveOnlyInt(5));
RTCErrorOr<MoveOnlyInt> b(std::move(a));
EXPECT_EQ(5, b.value().value);
EXPECT_EQ(b.value().value, 5);
}
TEST(RTCErrorOrTest, MoveAssignment) {
RTCErrorOr<MoveOnlyInt> a(MoveOnlyInt(5));
RTCErrorOr<MoveOnlyInt> b(MoveOnlyInt(10));
a = std::move(b);
EXPECT_EQ(10, a.value().value);
EXPECT_EQ(a.value().value, 10);
}
TEST(RTCErrorOrTest, ConversionConstructor) {
@ -194,7 +186,7 @@ TEST(RTCErrorOrTest, ConversionAssignment) {
RTCErrorOr<MoveOnlyInt> a(MoveOnlyInt(5));
RTCErrorOr<MoveOnlyInt2> b(MoveOnlyInt2(10));
b = std::move(a);
EXPECT_EQ(5, b.value().value);
EXPECT_EQ(b.value().value, 5);
}
TEST(RTCErrorOrTest, OkMethod) {
@ -207,14 +199,14 @@ TEST(RTCErrorOrTest, OkMethod) {
TEST(RTCErrorOrTest, MoveError) {
RTCErrorOr<int> e({RTCErrorType::SYNTAX_ERROR, "message"});
RTCError err = e.MoveError();
EXPECT_EQ(RTCErrorType::SYNTAX_ERROR, err.type());
EXPECT_EQ(std::string("message"), err.message());
EXPECT_EQ(err.type(), RTCErrorType::SYNTAX_ERROR);
EXPECT_STREQ(err.message(), "message");
}
TEST(RTCErrorOrTest, MoveValue) {
RTCErrorOr<MoveOnlyInt> e(MoveOnlyInt(88));
MoveOnlyInt value = e.MoveValue();
EXPECT_EQ(88, value.value);
EXPECT_EQ(value.value, 88);
}
// Death tests.
@ -239,4 +231,5 @@ TEST(RTCErrorOrDeathTest, MoveErrorValue) {
#endif // RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
} // namespace
} // namespace webrtc

View file

@ -44,7 +44,6 @@ RTPHeader::RTPHeader()
arrOfCSRCs(),
paddingLength(0),
headerLength(0),
payload_type_frequency(0),
extension() {}
RTPHeader::RTPHeader(const RTPHeader& other) = default;

View file

@ -162,7 +162,6 @@ struct RTC_EXPORT RTPHeader {
uint32_t arrOfCSRCs[kRtpCsrcSize];
size_t paddingLength;
size_t headerLength;
int payload_type_frequency;
RTPHeaderExtension extension;
};

View file

@ -44,6 +44,9 @@ RtcpFeedback::RtcpFeedback(RtcpFeedbackType type,
RtcpFeedback::RtcpFeedback(const RtcpFeedback& rhs) = default;
RtcpFeedback::~RtcpFeedback() = default;
RtpCodec::RtpCodec() = default;
RtpCodec::RtpCodec(const RtpCodec&) = default;
RtpCodec::~RtpCodec() = default;
RtpCodecCapability::RtpCodecCapability() = default;
RtpCodecCapability::~RtpCodecCapability() = default;

View file

@ -122,12 +122,10 @@ struct RTC_EXPORT RtcpFeedback {
bool operator!=(const RtcpFeedback& o) const { return !(*this == o); }
};
// RtpCodecCapability is to RtpCodecParameters as RtpCapabilities is to
// RtpParameters. This represents the static capabilities of an endpoint's
// implementation of a codec.
struct RTC_EXPORT RtpCodecCapability {
RtpCodecCapability();
~RtpCodecCapability();
struct RTC_EXPORT RtpCodec {
RtpCodec();
RtpCodec(const RtpCodec&);
virtual ~RtpCodec();
// Build MIME "type/subtype" string from `name` and `kind`.
std::string mime_type() const { return MediaTypeToString(kind) + "/" + name; }
@ -138,25 +136,18 @@ struct RTC_EXPORT RtpCodecCapability {
// The media type of this codec. Equivalent to MIME top-level type.
cricket::MediaType kind = cricket::MEDIA_TYPE_AUDIO;
// Clock rate in Hertz. If unset, the codec is applicable to any clock rate.
// If unset, the implementation default is used.
absl::optional<int> clock_rate;
// Default payload type for this codec. Mainly needed for codecs that use
// that have statically assigned payload types.
absl::optional<int> preferred_payload_type;
// Maximum packetization time supported by an RtpReceiver for this codec.
// TODO(deadbeef): Not implemented.
absl::optional<int> max_ptime;
// Preferred packetization time for an RtpReceiver or RtpSender of this codec.
// TODO(deadbeef): Not implemented.
absl::optional<int> ptime;
// The number of audio channels supported. Unused for video codecs.
// The number of audio channels used. Unset for video codecs. If unset for
// audio, the implementation default is used.
// TODO(deadbeef): The "implementation default" part isn't fully implemented.
// Only defaults to 1, even though some codecs (such as opus) should really
// default to 2.
absl::optional<int> num_channels;
// Feedback mechanisms supported for this codec.
// Feedback mechanisms to be used for this codec.
// TODO(deadbeef): Not implemented with PeerConnection senders/receivers.
std::vector<RtcpFeedback> rtcp_feedback;
// Codec-specific parameters that must be signaled to the remote party.
@ -168,39 +159,31 @@ struct RTC_EXPORT RtpCodecCapability {
// Boolean values are represented by the string "1".
std::map<std::string, std::string> parameters;
// Codec-specific parameters that may optionally be signaled to the remote
// party.
// TODO(deadbeef): Not implemented.
std::map<std::string, std::string> options;
bool operator==(const RtpCodec& o) const {
return name == o.name && kind == o.kind && clock_rate == o.clock_rate &&
num_channels == o.num_channels && rtcp_feedback == o.rtcp_feedback &&
parameters == o.parameters;
}
bool operator!=(const RtpCodec& o) const { return !(*this == o); }
};
// Maximum number of temporal layer extensions supported by this codec.
// For example, a value of 1 indicates that 2 total layers are supported.
// TODO(deadbeef): Not implemented.
int max_temporal_layer_extensions = 0;
// RtpCodecCapability is to RtpCodecParameters as RtpCapabilities is to
// RtpParameters. This represents the static capabilities of an endpoint's
// implementation of a codec.
struct RTC_EXPORT RtpCodecCapability : public RtpCodec {
RtpCodecCapability();
virtual ~RtpCodecCapability();
// Maximum number of spatial layer extensions supported by this codec.
// For example, a value of 1 indicates that 2 total layers are supported.
// TODO(deadbeef): Not implemented.
int max_spatial_layer_extensions = 0;
// Default payload type for this codec. Mainly needed for codecs that have
// statically assigned payload types.
absl::optional<int> preferred_payload_type;
// Whether the implementation can send/receive SVC layers with distinct SSRCs.
// Always false for audio codecs. True for video codecs that support scalable
// video coding with MRST.
// TODO(deadbeef): Not implemented.
bool svc_multi_stream_support = false;
// https://w3c.github.io/webrtc-svc/#dom-rtcrtpcodeccapability-scalabilitymodes
// List of scalability modes supported by the video codec.
absl::InlinedVector<ScalabilityMode, kScalabilityModeCount> scalability_modes;
bool operator==(const RtpCodecCapability& o) const {
return name == o.name && kind == o.kind && clock_rate == o.clock_rate &&
return RtpCodec::operator==(o) &&
preferred_payload_type == o.preferred_payload_type &&
max_ptime == o.max_ptime && ptime == o.ptime &&
num_channels == o.num_channels && rtcp_feedback == o.rtcp_feedback &&
parameters == o.parameters && options == o.options &&
max_temporal_layer_extensions == o.max_temporal_layer_extensions &&
max_spatial_layer_extensions == o.max_spatial_layer_extensions &&
svc_multi_stream_support == o.svc_multi_stream_support &&
scalability_modes == o.scalability_modes;
}
bool operator!=(const RtpCodecCapability& o) const { return !(*this == o); }
@ -229,8 +212,8 @@ struct RTC_EXPORT RtpHeaderExtensionCapability {
bool preferred_encrypt = false;
// The direction of the extension. The kStopped value is only used with
// RtpTransceiverInterface::HeaderExtensionsToOffer() and
// SetOfferedRtpHeaderExtensions().
// RtpTransceiverInterface::SetHeaderExtensionsToNegotiate() and
// SetHeaderExtensionsToNegotiate().
RtpTransceiverDirection direction = RtpTransceiverDirection::kSendRecv;
// Constructors for convenience.
@ -554,63 +537,18 @@ struct RTC_EXPORT RtpEncodingParameters {
}
};
struct RTC_EXPORT RtpCodecParameters {
struct RTC_EXPORT RtpCodecParameters : public RtpCodec {
RtpCodecParameters();
RtpCodecParameters(const RtpCodecParameters&);
~RtpCodecParameters();
// Build MIME "type/subtype" string from `name` and `kind`.
std::string mime_type() const { return MediaTypeToString(kind) + "/" + name; }
// Used to identify the codec. Equivalent to MIME subtype.
std::string name;
// The media type of this codec. Equivalent to MIME top-level type.
cricket::MediaType kind = cricket::MEDIA_TYPE_AUDIO;
virtual ~RtpCodecParameters();
// Payload type used to identify this codec in RTP packets.
// This must always be present, and must be unique across all codecs using
// the same transport.
int payload_type = 0;
// If unset, the implementation default is used.
absl::optional<int> clock_rate;
// The number of audio channels used. Unset for video codecs. If unset for
// audio, the implementation default is used.
// TODO(deadbeef): The "implementation default" part isn't fully implemented.
// Only defaults to 1, even though some codecs (such as opus) should really
// default to 2.
absl::optional<int> num_channels;
// The maximum packetization time to be used by an RtpSender.
// If `ptime` is also set, this will be ignored.
// TODO(deadbeef): Not implemented.
absl::optional<int> max_ptime;
// The packetization time to be used by an RtpSender.
// If unset, will use any time up to max_ptime.
// TODO(deadbeef): Not implemented.
absl::optional<int> ptime;
// Feedback mechanisms to be used for this codec.
// TODO(deadbeef): Not implemented with PeerConnection senders/receivers.
std::vector<RtcpFeedback> rtcp_feedback;
// Codec-specific parameters that must be signaled to the remote party.
//
// Corresponds to "a=fmtp" parameters in SDP.
//
// Contrary to ORTC, these parameters are named using all lowercase strings.
// This helps make the mapping to SDP simpler, if an application is using SDP.
// Boolean values are represented by the string "1".
std::map<std::string, std::string> parameters;
bool operator==(const RtpCodecParameters& o) const {
return name == o.name && kind == o.kind && payload_type == o.payload_type &&
clock_rate == o.clock_rate && num_channels == o.num_channels &&
max_ptime == o.max_ptime && ptime == o.ptime &&
rtcp_feedback == o.rtcp_feedback && parameters == o.parameters;
return RtpCodec::operator==(o) && payload_type == o.payload_type;
}
bool operator!=(const RtpCodecParameters& o) const { return !(*this == o); }
};

View file

@ -41,7 +41,6 @@ struct RTC_EXPORT RtpTransceiverInit final {
// The added RtpTransceiver will be added to these streams.
std::vector<std::string> stream_ids;
// TODO(bugs.webrtc.org/7600): Not implemented.
std::vector<RtpEncodingParameters> send_encodings;
};
@ -149,25 +148,24 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface {
rtc::ArrayView<RtpCodecCapability> codecs) = 0;
virtual std::vector<RtpCodecCapability> codec_preferences() const = 0;
// Readonly attribute which contains the set of header extensions that was set
// with SetOfferedRtpHeaderExtensions, or a default set if it has not been
// Returns the set of header extensions that was set
// with SetHeaderExtensionsToNegotiate, or a default set if it has not been
// called.
// https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface
virtual std::vector<RtpHeaderExtensionCapability> HeaderExtensionsToOffer()
const = 0;
virtual std::vector<RtpHeaderExtensionCapability>
GetHeaderExtensionsToNegotiate() const = 0;
// Readonly attribute which is either empty if negotation has not yet
// Returns either the empty set if negotation has not yet
// happened, or a vector of the negotiated header extensions.
// https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface
virtual std::vector<RtpHeaderExtensionCapability> HeaderExtensionsNegotiated()
const = 0;
virtual std::vector<RtpHeaderExtensionCapability>
GetNegotiatedHeaderExtensions() const = 0;
// The SetOfferedRtpHeaderExtensions method modifies the next SDP negotiation
// The SetHeaderExtensionsToNegotiate method modifies the next SDP negotiation
// so that it negotiates use of header extensions which are not kStopped.
// https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface
virtual webrtc::RTCError SetOfferedRtpHeaderExtensions(
rtc::ArrayView<const RtpHeaderExtensionCapability>
header_extensions_to_offer) = 0;
virtual webrtc::RTCError SetHeaderExtensionsToNegotiate(
rtc::ArrayView<const RtpHeaderExtensionCapability> header_extensions) = 0;
protected:
~RtpTransceiverInterface() override = default;

View file

@ -44,6 +44,11 @@ class RTC_LOCKABLE SequenceChecker
using Impl = webrtc_sequence_checker_internal::SequenceCheckerDoNothing;
#endif
public:
enum InitialState : bool { kDetached = false, kAttached = true };
explicit SequenceChecker(InitialState initial_state = kAttached)
: Impl(initial_state) {}
// Returns true if sequence checker is attached to the current sequence.
bool IsCurrent() const { return Impl::IsCurrent(); }
// Detaches checker from sequence to which it is attached. Next attempt

View file

@ -98,6 +98,19 @@ TEST(SequenceCheckerTest, MethodNotAllowedOnDifferentThreadInDebug) {
[&] { EXPECT_EQ(sequence_checker.IsCurrent(), !RTC_DCHECK_IS_ON); });
}
#if RTC_DCHECK_IS_ON
TEST(SequenceCheckerTest, OnlyCurrentOnOneThread) {
SequenceChecker sequence_checker(SequenceChecker::kDetached);
RunOnDifferentThread([&] {
EXPECT_TRUE(sequence_checker.IsCurrent());
// Spawn a new thread from within the first one to guarantee that we have
// two concurrently active threads (and that there's no chance of the
// thread ref being reused).
RunOnDifferentThread([&] { EXPECT_FALSE(sequence_checker.IsCurrent()); });
});
}
#endif
TEST(SequenceCheckerTest, MethodNotAllowedOnDifferentTaskQueueInDebug) {
SequenceChecker sequence_checker;
TaskQueueForTest queue;
@ -122,8 +135,7 @@ TEST(SequenceCheckerTest, DetachFromTaskQueueInDebug) {
TEST(SequenceCheckerTest, ExpectationToString) {
TaskQueueForTest queue1;
SequenceChecker sequence_checker;
sequence_checker.Detach();
SequenceChecker sequence_checker(SequenceChecker::kDetached);
rtc::Event blocker;
queue1.PostTask([&blocker, &sequence_checker]() {
@ -149,6 +161,24 @@ TEST(SequenceCheckerTest, ExpectationToString) {
#endif
}
TEST(SequenceCheckerTest, InitiallyDetached) {
TaskQueueForTest queue1;
SequenceChecker sequence_checker(SequenceChecker::kDetached);
rtc::Event blocker;
queue1.PostTask([&blocker, &sequence_checker]() {
EXPECT_TRUE(sequence_checker.IsCurrent());
blocker.Set();
});
blocker.Wait(rtc::Event::kForever);
#if RTC_DCHECK_IS_ON
EXPECT_FALSE(sequence_checker.IsCurrent());
#endif
}
class TestAnnotations {
public:
TestAnnotations() : test_var_(false) {}

View file

@ -163,7 +163,9 @@ class RTC_EXPORT RTCStats {
return std::make_unique<this_class>(*this); \
} \
\
const char* this_class::type() const { return this_class::kType; } \
const char* this_class::type() const { \
return this_class::kType; \
} \
\
std::vector<const webrtc::RTCStatsMemberInterface*> \
this_class::MembersOfThisObjectAndAncestors( \
@ -194,7 +196,9 @@ class RTC_EXPORT RTCStats {
return std::make_unique<this_class>(*this); \
} \
\
const char* this_class::type() const { return this_class::kType; } \
const char* this_class::type() const { \
return this_class::kType; \
} \
\
std::vector<const webrtc::RTCStatsMemberInterface*> \
this_class::MembersOfThisObjectAndAncestors( \
@ -202,34 +206,6 @@ class RTC_EXPORT RTCStats {
return parent_class::MembersOfThisObjectAndAncestors(0); \
}
// Non-standard stats members can be exposed to the JavaScript API in Chrome
// e.g. through origin trials. The group ID can be used by the blink layer to
// determine if a stats member should be exposed or not. Multiple non-standard
// stats members can share the same group ID so that they are exposed together.
enum class NonStandardGroupId {
// Group ID used for testing purposes only.
kGroupIdForTesting,
// I2E:
// https://groups.google.com/a/chromium.org/forum/#!topic/blink-dev/hE2B1iItPDk
kRtcAudioJitterBufferMaxPackets,
// I2E:
// https://groups.google.com/a/chromium.org/forum/#!topic/blink-dev/YbhMyqLXXXo
kRtcStatsRelativePacketArrivalDelay,
};
// Certain stat members should only be exposed to the JavaScript API in
// certain circumstances as to avoid passive fingerprinting.
enum class StatExposureCriteria : uint8_t {
// The stat should always be exposed. This is the default.
kAlways,
// The stat exposes hardware capabilities and thus should has limited exposure
// to JavaScript. The requirements for exposure are written in the spec at
// https://w3c.github.io/webrtc-stats/#limiting-exposure-of-hardware-capabilities.
kHardwareCapability,
// The stat is non-standard so user agents should filter these.
kNonStandard,
};
// Interface for `RTCStats` members, which have a name and a value of a type
// defined in a subclass. Only the types listed in `Type` are supported, these
// are implemented by `RTCStatsMember<T>`. The value of a member may be
@ -265,20 +241,6 @@ class RTCStatsMemberInterface {
virtual bool is_sequence() const = 0;
virtual bool is_string() const = 0;
virtual bool is_defined() const = 0;
// Is this part of the stats spec? Used so that chromium can easily filter
// out anything unstandardized.
bool is_standardized() const {
return exposure_criteria() != StatExposureCriteria::kNonStandard;
}
// Non-standard stats members can have group IDs in order to be exposed in
// JavaScript through experiments. Standardized stats have no group IDs.
virtual std::vector<NonStandardGroupId> group_ids() const { return {}; }
// The conditions for exposing the statistic to JavaScript. Stats with
// criteria that is not kAlways has some restriction and should be filtered
// in accordance to the spec.
virtual StatExposureCriteria exposure_criteria() const {
return StatExposureCriteria::kAlways;
}
// Type and value comparator. The names are not compared. These operators are
// exposed for testing.
bool operator==(const RTCStatsMemberInterface& other) const {
@ -349,7 +311,12 @@ class RTCStatsMember : public RTCStatsMemberInterface {
return value_.value();
}
// Value getters.
// Getter methods that look the same as absl::optional<T>. Please prefer these
// in order to unblock replacing RTCStatsMember<T> with absl::optional<T> in
// the future (https://crbug.com/webrtc/15164).
bool has_value() const { return value_.has_value(); }
const T& value() const { return value_.value(); }
T& value() { return value_.value(); }
T& operator*() {
RTC_DCHECK(value_);
return *value_;
@ -358,8 +325,6 @@ class RTCStatsMember : public RTCStatsMemberInterface {
RTC_DCHECK(value_);
return *value_;
}
// Value getters, arrow operator.
T* operator->() {
RTC_DCHECK(value_);
return &(*value_);
@ -371,9 +336,7 @@ class RTCStatsMember : public RTCStatsMemberInterface {
protected:
bool IsEqual(const RTCStatsMemberInterface& other) const override {
if (type() != other.type() ||
is_standardized() != other.is_standardized() ||
exposure_criteria() != other.exposure_criteria())
if (type() != other.type())
return false;
const RTCStatsMember<T>& other_t =
static_cast<const RTCStatsMember<T>&>(other);
@ -422,154 +385,6 @@ WEBRTC_DECLARE_RTCSTATSMEMBER(std::vector<std::string>);
WEBRTC_DECLARE_RTCSTATSMEMBER(rtc_stats_internal::MapStringUint64);
WEBRTC_DECLARE_RTCSTATSMEMBER(rtc_stats_internal::MapStringDouble);
// For stats with restricted exposure.
template <typename T, StatExposureCriteria E>
class RTCRestrictedStatsMember : public RTCStatsMember<T> {
public:
explicit RTCRestrictedStatsMember(const char* name)
: RTCStatsMember<T>(name) {}
RTCRestrictedStatsMember(const char* name, const T& value)
: RTCStatsMember<T>(name, value) {}
RTCRestrictedStatsMember(const char* name, T&& value)
: RTCStatsMember<T>(name, std::move(value)) {}
RTCRestrictedStatsMember(const RTCRestrictedStatsMember<T, E>& other)
: RTCStatsMember<T>(other) {}
RTCRestrictedStatsMember(RTCRestrictedStatsMember<T, E>&& other)
: RTCStatsMember<T>(std::move(other)) {}
StatExposureCriteria exposure_criteria() const override { return E; }
T& operator=(const T& value) { return RTCStatsMember<T>::operator=(value); }
T& operator=(const T&& value) {
return RTCStatsMember<T>::operator=(std::move(value));
}
private:
static_assert(E != StatExposureCriteria::kAlways,
"kAlways is the default exposure criteria. Use "
"RTCStatMember<T> instead.");
};
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCRestrictedStatsMember<bool, StatExposureCriteria::kHardwareCapability>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCRestrictedStatsMember<int32_t,
StatExposureCriteria::kHardwareCapability>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCRestrictedStatsMember<uint32_t,
StatExposureCriteria::kHardwareCapability>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCRestrictedStatsMember<int64_t,
StatExposureCriteria::kHardwareCapability>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCRestrictedStatsMember<uint64_t,
StatExposureCriteria::kHardwareCapability>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCRestrictedStatsMember<double, StatExposureCriteria::kHardwareCapability>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCRestrictedStatsMember<std::string,
StatExposureCriteria::kHardwareCapability>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCRestrictedStatsMember<std::vector<bool>,
StatExposureCriteria::kHardwareCapability>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCRestrictedStatsMember<std::vector<int32_t>,
StatExposureCriteria::kHardwareCapability>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCRestrictedStatsMember<std::vector<uint32_t>,
StatExposureCriteria::kHardwareCapability>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCRestrictedStatsMember<std::vector<int64_t>,
StatExposureCriteria::kHardwareCapability>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCRestrictedStatsMember<std::vector<uint64_t>,
StatExposureCriteria::kHardwareCapability>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCRestrictedStatsMember<std::vector<double>,
StatExposureCriteria::kHardwareCapability>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCRestrictedStatsMember<std::vector<std::string>,
StatExposureCriteria::kHardwareCapability>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCRestrictedStatsMember<std::map<std::string, uint64_t>,
StatExposureCriteria::kHardwareCapability>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCRestrictedStatsMember<std::map<std::string, double>,
StatExposureCriteria::kHardwareCapability>;
// Using inheritance just so that it's obvious from the member's declaration
// whether it's standardized or not.
template <typename T>
class RTCNonStandardStatsMember
: public RTCRestrictedStatsMember<T, StatExposureCriteria::kNonStandard> {
public:
explicit RTCNonStandardStatsMember(const char* name)
: RTCRestrictedStatsBase(name) {}
RTCNonStandardStatsMember(const char* name,
std::initializer_list<NonStandardGroupId> group_ids)
: RTCRestrictedStatsBase(name), group_ids_(group_ids) {}
RTCNonStandardStatsMember(const char* name, const T& value)
: RTCRestrictedStatsBase(name, value) {}
RTCNonStandardStatsMember(const char* name, T&& value)
: RTCRestrictedStatsBase(name, std::move(value)) {}
RTCNonStandardStatsMember(const RTCNonStandardStatsMember<T>& other)
: RTCRestrictedStatsBase(other), group_ids_(other.group_ids_) {}
RTCNonStandardStatsMember(RTCNonStandardStatsMember<T>&& other)
: RTCRestrictedStatsBase(std::move(other)),
group_ids_(std::move(other.group_ids_)) {}
std::vector<NonStandardGroupId> group_ids() const override {
return group_ids_;
}
T& operator=(const T& value) {
return RTCRestrictedStatsMember<
T, StatExposureCriteria::kNonStandard>::operator=(value);
}
T& operator=(const T&& value) {
return RTCRestrictedStatsMember<
T, StatExposureCriteria::kNonStandard>::operator=(std::move(value));
}
private:
using RTCRestrictedStatsBase =
RTCRestrictedStatsMember<T, StatExposureCriteria::kNonStandard>;
std::vector<NonStandardGroupId> group_ids_;
};
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCNonStandardStatsMember<bool>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCNonStandardStatsMember<int32_t>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCNonStandardStatsMember<uint32_t>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCNonStandardStatsMember<int64_t>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCNonStandardStatsMember<uint64_t>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCNonStandardStatsMember<double>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCNonStandardStatsMember<std::string>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCNonStandardStatsMember<std::vector<bool>>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCNonStandardStatsMember<std::vector<int32_t>>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCNonStandardStatsMember<std::vector<uint32_t>>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCNonStandardStatsMember<std::vector<int64_t>>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCNonStandardStatsMember<std::vector<uint64_t>>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCNonStandardStatsMember<std::vector<double>>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCNonStandardStatsMember<std::vector<std::string>>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCNonStandardStatsMember<std::map<std::string, uint64_t>>;
extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT)
RTCNonStandardStatsMember<std::map<std::string, double>>;
} // namespace webrtc
#endif // API_STATS_RTC_STATS_H_

View file

@ -60,24 +60,13 @@ class RTC_EXPORT RTCStatsReport final
StatsMap::const_iterator it_;
};
// TODO(bugs.webrtc.org/13756): deprecate this in favor of Timestamp.
// TODO(hbos): Remove "= 0" once downstream has been updated to call with a
// parameter.
ABSL_DEPRECATED("Call Create with Timestamp instead")
static rtc::scoped_refptr<RTCStatsReport> Create(int64_t timestamp_us = 0);
static rtc::scoped_refptr<RTCStatsReport> Create(Timestamp timestamp);
// TODO(bugs.webrtc.org/13756): deprecate this in favor of Timestamp.
ABSL_DEPRECATED("Use constructor with Timestamp instead")
explicit RTCStatsReport(int64_t timestamp_us);
explicit RTCStatsReport(Timestamp timestamp);
RTCStatsReport(const RTCStatsReport& other) = delete;
rtc::scoped_refptr<RTCStatsReport> Copy() const;
// TODO(bugs.webrtc.org/13756): deprecate this in favor of Timestamp.
ABSL_DEPRECATED("Call timestamp() instead")
int64_t timestamp_us() const { return timestamp_.us_or(-1); }
Timestamp timestamp() const { return timestamp_; }
void AddStats(std::unique_ptr<const RTCStats> stats);
// On success, returns a non-owning pointer to `stats`. If the stats ID is not

View file

@ -23,98 +23,6 @@
namespace webrtc {
// https://w3c.github.io/webrtc-pc/#idl-def-rtcdatachannelstate
struct RTCDataChannelState {
static const char* const kConnecting;
static const char* const kOpen;
static const char* const kClosing;
static const char* const kClosed;
};
// https://w3c.github.io/webrtc-stats/#dom-rtcstatsicecandidatepairstate
struct RTCStatsIceCandidatePairState {
static const char* const kFrozen;
static const char* const kWaiting;
static const char* const kInProgress;
static const char* const kFailed;
static const char* const kSucceeded;
};
// https://w3c.github.io/webrtc-pc/#rtcicecandidatetype-enum
struct RTCIceCandidateType {
static const char* const kHost;
static const char* const kSrflx;
static const char* const kPrflx;
static const char* const kRelay;
};
// https://w3c.github.io/webrtc-pc/#idl-def-rtcdtlstransportstate
struct RTCDtlsTransportState {
static const char* const kNew;
static const char* const kConnecting;
static const char* const kConnected;
static const char* const kClosed;
static const char* const kFailed;
};
// `RTCMediaStreamTrackStats::kind` is not an enum in the spec but the only
// valid values are "audio" and "video".
// https://w3c.github.io/webrtc-stats/#dom-rtcmediastreamtrackstats-kind
struct RTCMediaStreamTrackKind {
static const char* const kAudio;
static const char* const kVideo;
};
// https://w3c.github.io/webrtc-stats/#dom-rtcnetworktype
struct RTCNetworkType {
static const char* const kBluetooth;
static const char* const kCellular;
static const char* const kEthernet;
static const char* const kWifi;
static const char* const kWimax;
static const char* const kVpn;
static const char* const kUnknown;
};
// https://w3c.github.io/webrtc-stats/#dom-rtcqualitylimitationreason
struct RTCQualityLimitationReason {
static const char* const kNone;
static const char* const kCpu;
static const char* const kBandwidth;
static const char* const kOther;
};
// https://webrtc.org/experiments/rtp-hdrext/video-content-type/
struct RTCContentType {
static const char* const kUnspecified;
static const char* const kScreenshare;
};
// https://w3c.github.io/webrtc-stats/#dom-rtcdtlsrole
struct RTCDtlsRole {
static const char* const kUnknown;
static const char* const kClient;
static const char* const kServer;
};
// https://www.w3.org/TR/webrtc/#rtcicerole
struct RTCIceRole {
static const char* const kUnknown;
static const char* const kControlled;
static const char* const kControlling;
};
// https://www.w3.org/TR/webrtc/#dom-rtcicetransportstate
struct RTCIceTransportState {
static const char* const kNew;
static const char* const kChecking;
static const char* const kConnected;
static const char* const kCompleted;
static const char* const kDisconnected;
static const char* const kFailed;
static const char* const kClosed;
};
// https://w3c.github.io/webrtc-stats/#certificatestats-dict*
class RTC_EXPORT RTCCertificateStats final : public RTCStats {
public:
@ -130,20 +38,6 @@ class RTC_EXPORT RTCCertificateStats final : public RTCStats {
RTCStatsMember<std::string> issuer_certificate_id;
};
// Non standard extension mapping to rtc::AdapterType
struct RTCNetworkAdapterType {
static constexpr char kUnknown[] = "unknown";
static constexpr char kEthernet[] = "ethernet";
static constexpr char kWifi[] = "wifi";
static constexpr char kCellular[] = "cellular";
static constexpr char kLoopback[] = "loopback";
static constexpr char kAny[] = "any";
static constexpr char kCellular2g[] = "cellular2g";
static constexpr char kCellular3g[] = "cellular3g";
static constexpr char kCellular4g[] = "cellular4g";
static constexpr char kCellular5g[] = "cellular5g";
};
// https://w3c.github.io/webrtc-stats/#codec-dict*
class RTC_EXPORT RTCCodecStats final : public RTCStats {
public:
@ -173,7 +67,6 @@ class RTC_EXPORT RTCDataChannelStats final : public RTCStats {
RTCStatsMember<std::string> label;
RTCStatsMember<std::string> protocol;
RTCStatsMember<int32_t> data_channel_identifier;
// Enum type RTCDataChannelState.
RTCStatsMember<std::string> state;
RTCStatsMember<uint32_t> messages_sent;
RTCStatsMember<uint64_t> bytes_sent;
@ -193,7 +86,6 @@ class RTC_EXPORT RTCIceCandidatePairStats final : public RTCStats {
RTCStatsMember<std::string> transport_id;
RTCStatsMember<std::string> local_candidate_id;
RTCStatsMember<std::string> remote_candidate_id;
// Enum type RTCStatsIceCandidatePairState.
RTCStatsMember<std::string> state;
// Obsolete: priority
RTCStatsMember<uint64_t> priority;
@ -239,7 +131,6 @@ class RTC_EXPORT RTCIceCandidateStats : public RTCStats {
RTCStatsMember<int32_t> port;
RTCStatsMember<std::string> protocol;
RTCStatsMember<std::string> relay_protocol;
// Enum type RTCIceCandidateType.
RTCStatsMember<std::string> candidate_type;
RTCStatsMember<int32_t> priority;
RTCStatsMember<std::string> url;
@ -247,11 +138,12 @@ class RTC_EXPORT RTCIceCandidateStats : public RTCStats {
RTCStatsMember<std::string> related_address;
RTCStatsMember<int32_t> related_port;
RTCStatsMember<std::string> username_fragment;
// Enum type RTCIceTcpCandidateType.
RTCStatsMember<std::string> tcp_type;
RTCNonStandardStatsMember<bool> vpn;
RTCNonStandardStatsMember<std::string> network_adapter_type;
// The following metrics are NOT exposed to JavaScript. We should consider
// standardizing or removing them.
RTCStatsMember<bool> vpn;
RTCStatsMember<std::string> network_adapter_type;
protected:
RTCIceCandidateStats(std::string id, Timestamp timestamp, bool is_remote);
@ -279,68 +171,6 @@ class RTC_EXPORT RTCRemoteIceCandidateStats final
const char* type() const override;
};
// TODO(https://crbug.com/webrtc/14419): Delete this class, it's deprecated.
class RTC_EXPORT DEPRECATED_RTCMediaStreamStats final : public RTCStats {
public:
WEBRTC_RTCSTATS_DECL();
DEPRECATED_RTCMediaStreamStats(std::string id, Timestamp timestamp);
DEPRECATED_RTCMediaStreamStats(const DEPRECATED_RTCMediaStreamStats& other);
~DEPRECATED_RTCMediaStreamStats() override;
RTCStatsMember<std::string> stream_identifier;
RTCStatsMember<std::vector<std::string>> track_ids;
};
using RTCMediaStreamStats [[deprecated("bugs.webrtc.org/14419")]] =
DEPRECATED_RTCMediaStreamStats;
// TODO(https://crbug.com/webrtc/14175): Delete this class, it's deprecated.
class RTC_EXPORT DEPRECATED_RTCMediaStreamTrackStats final : public RTCStats {
public:
WEBRTC_RTCSTATS_DECL();
DEPRECATED_RTCMediaStreamTrackStats(std::string id,
Timestamp timestamp,
const char* kind);
DEPRECATED_RTCMediaStreamTrackStats(
const DEPRECATED_RTCMediaStreamTrackStats& other);
~DEPRECATED_RTCMediaStreamTrackStats() override;
RTCStatsMember<std::string> track_identifier;
RTCStatsMember<std::string> media_source_id;
RTCStatsMember<bool> remote_source;
RTCStatsMember<bool> ended;
// TODO(https://crbug.com/webrtc/14173): Remove this obsolete metric.
RTCStatsMember<bool> detached;
// Enum type RTCMediaStreamTrackKind.
RTCStatsMember<std::string> kind;
RTCStatsMember<double> jitter_buffer_delay;
RTCStatsMember<uint64_t> jitter_buffer_emitted_count;
// Video-only members
RTCStatsMember<uint32_t> frame_width;
RTCStatsMember<uint32_t> frame_height;
RTCStatsMember<uint32_t> frames_sent;
RTCStatsMember<uint32_t> huge_frames_sent;
RTCStatsMember<uint32_t> frames_received;
RTCStatsMember<uint32_t> frames_decoded;
RTCStatsMember<uint32_t> frames_dropped;
// Audio-only members
RTCStatsMember<double> audio_level; // Receive-only
RTCStatsMember<double> total_audio_energy; // Receive-only
RTCStatsMember<double> echo_return_loss;
RTCStatsMember<double> echo_return_loss_enhancement;
RTCStatsMember<double> echo_likelihood; // RingRTC change to enable echo detection
RTCStatsMember<uint64_t> total_samples_received;
RTCStatsMember<double> total_samples_duration; // Receive-only
RTCStatsMember<uint64_t> concealed_samples;
RTCStatsMember<uint64_t> silent_concealed_samples;
RTCStatsMember<uint64_t> concealment_events;
RTCStatsMember<uint64_t> inserted_samples_for_deceleration;
RTCStatsMember<uint64_t> removed_samples_for_acceleration;
};
using RTCMediaStreamTrackStats [[deprecated("bugs.webrtc.org/14175")]] =
DEPRECATED_RTCMediaStreamTrackStats;
// https://w3c.github.io/webrtc-stats/#pcstats-dict*
class RTC_EXPORT RTCPeerConnectionStats final : public RTCStats {
public:
@ -355,29 +185,24 @@ class RTC_EXPORT RTCPeerConnectionStats final : public RTCStats {
};
// https://w3c.github.io/webrtc-stats/#streamstats-dict*
class RTC_EXPORT RTCRTPStreamStats : public RTCStats {
class RTC_EXPORT RTCRtpStreamStats : public RTCStats {
public:
WEBRTC_RTCSTATS_DECL();
RTCRTPStreamStats(const RTCRTPStreamStats& other);
~RTCRTPStreamStats() override;
RTCRtpStreamStats(const RTCRtpStreamStats& other);
~RTCRtpStreamStats() override;
RTCStatsMember<uint32_t> ssrc;
RTCStatsMember<std::string> kind;
// Obsolete: track_id
RTCStatsMember<std::string> track_id;
RTCStatsMember<std::string> transport_id;
RTCStatsMember<std::string> codec_id;
// Obsolete
RTCStatsMember<std::string> media_type; // renamed to kind.
protected:
RTCRTPStreamStats(std::string id, Timestamp timestamp);
RTCRtpStreamStats(std::string id, Timestamp timestamp);
};
// https://www.w3.org/TR/webrtc-stats/#receivedrtpstats-dict*
class RTC_EXPORT RTCReceivedRtpStreamStats : public RTCRTPStreamStats {
class RTC_EXPORT RTCReceivedRtpStreamStats : public RTCRtpStreamStats {
public:
WEBRTC_RTCSTATS_DECL();
@ -392,14 +217,14 @@ class RTC_EXPORT RTCReceivedRtpStreamStats : public RTCRTPStreamStats {
};
// https://www.w3.org/TR/webrtc-stats/#sentrtpstats-dict*
class RTC_EXPORT RTCSentRtpStreamStats : public RTCRTPStreamStats {
class RTC_EXPORT RTCSentRtpStreamStats : public RTCRtpStreamStats {
public:
WEBRTC_RTCSTATS_DECL();
RTCSentRtpStreamStats(const RTCSentRtpStreamStats& other);
~RTCSentRtpStreamStats() override;
RTCStatsMember<uint32_t> packets_sent;
RTCStatsMember<uint64_t> packets_sent;
RTCStatsMember<uint64_t> bytes_sent;
protected:
@ -407,16 +232,14 @@ class RTC_EXPORT RTCSentRtpStreamStats : public RTCRTPStreamStats {
};
// https://w3c.github.io/webrtc-stats/#inboundrtpstats-dict*
class RTC_EXPORT RTCInboundRTPStreamStats final
class RTC_EXPORT RTCInboundRtpStreamStats final
: public RTCReceivedRtpStreamStats {
public:
WEBRTC_RTCSTATS_DECL();
RTCInboundRTPStreamStats(std::string id, Timestamp timestamp);
RTCInboundRTPStreamStats(const RTCInboundRTPStreamStats& other);
~RTCInboundRTPStreamStats() override;
// TODO(https://crbug.com/webrtc/14174): Implement trackIdentifier and kind.
RTCInboundRtpStreamStats(std::string id, Timestamp timestamp);
RTCInboundRtpStreamStats(const RTCInboundRtpStreamStats& other);
~RTCInboundRtpStreamStats() override;
RTCStatsMember<std::string> playout_id;
RTCStatsMember<std::string> track_identifier;
@ -428,6 +251,10 @@ class RTC_EXPORT RTCInboundRTPStreamStats final
RTCStatsMember<uint64_t> fec_packets_discarded;
RTCStatsMember<uint64_t> bytes_received;
RTCStatsMember<uint64_t> header_bytes_received;
// Inbound RTX stats. Only defined when RTX is used and it is therefore
// possible to distinguish retransmissions.
RTCStatsMember<uint64_t> retransmitted_packets_received;
RTCStatsMember<uint64_t> retransmitted_bytes_received;
RTCStatsMember<double> last_packet_received_timestamp;
RTCStatsMember<double> jitter_buffer_delay;
RTCStatsMember<double> jitter_buffer_target_delay;
@ -443,7 +270,7 @@ class RTC_EXPORT RTCInboundRTPStreamStats final
RTCStatsMember<double> total_audio_energy;
RTCStatsMember<double> total_samples_duration;
// Stats below are only implemented or defined for video.
RTCStatsMember<int32_t> frames_received;
RTCStatsMember<uint32_t> frames_received;
RTCStatsMember<uint32_t> frame_width;
RTCStatsMember<uint32_t> frame_height;
RTCStatsMember<double> frames_per_second;
@ -465,11 +292,9 @@ class RTC_EXPORT RTCInboundRTPStreamStats final
// Only populated if audio/video sync is enabled.
// TODO(https://crbug.com/webrtc/14177): Expose even if A/V sync is off?
RTCStatsMember<double> estimated_playout_timestamp;
// Only implemented for video.
// TODO(https://crbug.com/webrtc/14178): Also implement for audio.
RTCRestrictedStatsMember<std::string,
StatExposureCriteria::kHardwareCapability>
decoder_implementation;
// Only defined for video.
// In JavaScript, this is only exposed if HW exposure is allowed.
RTCStatsMember<std::string> decoder_implementation;
// FIR and PLI counts are only defined for |kind == "video"|.
RTCStatsMember<uint32_t> fir_count;
RTCStatsMember<uint32_t> pli_count;
@ -482,35 +307,34 @@ class RTC_EXPORT RTCInboundRTPStreamStats final
// TimingFrameInfo::ToString().
// TODO(https://crbug.com/webrtc/14586): Unship or standardize this metric.
RTCStatsMember<std::string> goog_timing_frame_info;
RTCRestrictedStatsMember<bool, StatExposureCriteria::kHardwareCapability>
power_efficient_decoder;
// Non-standard audio metrics.
RTCNonStandardStatsMember<uint64_t> jitter_buffer_flushes;
RTCNonStandardStatsMember<uint64_t> delayed_packet_outage_samples;
RTCNonStandardStatsMember<double> relative_packet_arrival_delay;
RTCNonStandardStatsMember<uint32_t> interruption_count;
RTCNonStandardStatsMember<double> total_interruption_duration;
// In JavaScript, this is only exposed if HW exposure is allowed.
RTCStatsMember<bool> power_efficient_decoder;
// The former googMinPlayoutDelayMs (in seconds).
RTCNonStandardStatsMember<double> min_playout_delay;
// The following metrics are NOT exposed to JavaScript. We should consider
// standardizing or removing them.
RTCStatsMember<uint64_t> jitter_buffer_flushes;
RTCStatsMember<uint64_t> delayed_packet_outage_samples;
RTCStatsMember<double> relative_packet_arrival_delay;
RTCStatsMember<uint32_t> interruption_count;
RTCStatsMember<double> total_interruption_duration;
RTCStatsMember<double> min_playout_delay;
};
// https://w3c.github.io/webrtc-stats/#outboundrtpstats-dict*
class RTC_EXPORT RTCOutboundRTPStreamStats final : public RTCRTPStreamStats {
class RTC_EXPORT RTCOutboundRtpStreamStats final
: public RTCSentRtpStreamStats {
public:
WEBRTC_RTCSTATS_DECL();
RTCOutboundRTPStreamStats(std::string id, Timestamp timestamp);
RTCOutboundRTPStreamStats(const RTCOutboundRTPStreamStats& other);
~RTCOutboundRTPStreamStats() override;
RTCOutboundRtpStreamStats(std::string id, Timestamp timestamp);
RTCOutboundRtpStreamStats(const RTCOutboundRtpStreamStats& other);
~RTCOutboundRtpStreamStats() override;
RTCStatsMember<std::string> media_source_id;
RTCStatsMember<std::string> remote_id;
RTCStatsMember<std::string> mid;
RTCStatsMember<std::string> rid;
RTCStatsMember<uint32_t> packets_sent;
RTCStatsMember<uint64_t> retransmitted_packets_sent;
RTCStatsMember<uint64_t> bytes_sent;
RTCStatsMember<uint64_t> header_bytes_sent;
RTCStatsMember<uint64_t> retransmitted_bytes_sent;
RTCStatsMember<double> target_bitrate;
@ -524,26 +348,24 @@ class RTC_EXPORT RTCOutboundRTPStreamStats final : public RTCRTPStreamStats {
RTCStatsMember<uint32_t> frames_sent;
RTCStatsMember<uint32_t> huge_frames_sent;
RTCStatsMember<double> total_packet_send_delay;
// Enum type RTCQualityLimitationReason
RTCStatsMember<std::string> quality_limitation_reason;
RTCStatsMember<std::map<std::string, double>> quality_limitation_durations;
// https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-qualitylimitationresolutionchanges
RTCStatsMember<uint32_t> quality_limitation_resolution_changes;
// https://w3c.github.io/webrtc-provisional-stats/#dom-rtcoutboundrtpstreamstats-contenttype
RTCStatsMember<std::string> content_type;
// In JavaScript, this is only exposed if HW exposure is allowed.
// Only implemented for video.
// TODO(https://crbug.com/webrtc/14178): Implement for audio as well.
RTCRestrictedStatsMember<std::string,
StatExposureCriteria::kHardwareCapability>
encoder_implementation;
RTCStatsMember<std::string> encoder_implementation;
// FIR and PLI counts are only defined for |kind == "video"|.
RTCStatsMember<uint32_t> fir_count;
RTCStatsMember<uint32_t> pli_count;
RTCStatsMember<uint32_t> nack_count;
RTCStatsMember<uint64_t> qp_sum;
RTCStatsMember<bool> active;
RTCRestrictedStatsMember<bool, StatExposureCriteria::kHardwareCapability>
power_efficient_encoder;
// In JavaScript, this is only exposed if HW exposure is allowed.
RTCStatsMember<bool> power_efficient_encoder;
RTCStatsMember<std::string> scalability_mode;
};
@ -643,7 +465,6 @@ class RTC_EXPORT RTCTransportStats final : public RTCStats {
RTCStatsMember<uint64_t> bytes_received;
RTCStatsMember<uint64_t> packets_received;
RTCStatsMember<std::string> rtcp_transport_stats_id;
// Enum type RTCDtlsTransportState.
RTCStatsMember<std::string> dtls_state;
RTCStatsMember<std::string> selected_candidate_pair_id;
RTCStatsMember<std::string> local_certificate_id;

View file

@ -17,6 +17,7 @@ rtc_library("task_queue") {
sources = [ "task_queue_base.cc" ]
deps = [
"..:location",
"../../rtc_base:checks",
"../../rtc_base:macromagic",
"../../rtc_base/system:rtc_export",

View file

@ -14,6 +14,7 @@
#include <utility>
#include "absl/functional/any_invocable.h"
#include "api/location.h"
#include "api/units/time_delta.h"
#include "rtc_base/system/rtc_export.h"
#include "rtc_base/thread_annotations.h"
@ -62,7 +63,10 @@ class RTC_LOCKABLE RTC_EXPORT TaskQueueBase {
// Note that this guarantee does not apply to delayed tasks.
//
// May be called on any thread or task queue, including this task queue.
virtual void PostTask(absl::AnyInvocable<void() &&> task) = 0;
void PostTask(absl::AnyInvocable<void() &&> task,
const Location& location = Location::Current()) {
PostTaskImpl(std::move(task), PostTaskTraits{}, location);
}
// Prefer PostDelayedTask() over PostDelayedHighPrecisionTask() whenever
// possible.
@ -87,8 +91,13 @@ class RTC_LOCKABLE RTC_EXPORT TaskQueueBase {
// https://crbug.com/webrtc/13583 for more information.
//
// May be called on any thread or task queue, including this task queue.
virtual void PostDelayedTask(absl::AnyInvocable<void() &&> task,
TimeDelta delay) = 0;
void PostDelayedTask(absl::AnyInvocable<void() &&> task,
TimeDelta delay,
const Location& location = Location::Current()) {
PostDelayedTaskImpl(std::move(task), delay,
PostDelayedTaskTraits{.high_precision = false},
location);
}
// Prefer PostDelayedTask() over PostDelayedHighPrecisionTask() whenever
// possible.
@ -106,20 +115,28 @@ class RTC_LOCKABLE RTC_EXPORT TaskQueueBase {
// battery, when the timer precision can be as poor as 15 ms.
//
// May be called on any thread or task queue, including this task queue.
virtual void PostDelayedHighPrecisionTask(absl::AnyInvocable<void() &&> task,
TimeDelta delay) = 0;
void PostDelayedHighPrecisionTask(
absl::AnyInvocable<void() &&> task,
TimeDelta delay,
const Location& location = Location::Current()) {
PostDelayedTaskImpl(std::move(task), delay,
PostDelayedTaskTraits{.high_precision = true},
location);
}
// As specified by `precision`, calls either PostDelayedTask() or
// PostDelayedHighPrecisionTask().
void PostDelayedTaskWithPrecision(DelayPrecision precision,
absl::AnyInvocable<void() &&> task,
TimeDelta delay) {
void PostDelayedTaskWithPrecision(
DelayPrecision precision,
absl::AnyInvocable<void() &&> task,
TimeDelta delay,
const Location& location = Location::Current()) {
switch (precision) {
case DelayPrecision::kLow:
PostDelayedTask(std::move(task), delay);
PostDelayedTask(std::move(task), delay, location);
break;
case DelayPrecision::kHigh:
PostDelayedHighPrecisionTask(std::move(task), delay);
PostDelayedHighPrecisionTask(std::move(task), delay, location);
break;
}
}
@ -131,6 +148,18 @@ class RTC_LOCKABLE RTC_EXPORT TaskQueueBase {
bool IsCurrent() const { return Current() == this; }
protected:
// This is currently only present here to simplify introduction of future
// planned task queue changes.
struct PostTaskTraits {};
struct PostDelayedTaskTraits {
// If `high_precision` is false, tasks may execute within up to a 17 ms
// leeway in addition to OS timer precision. Otherwise the task should be
// limited to OS timer precision. See PostDelayedTask() and
// PostDelayedHighPrecisionTask() for more information.
bool high_precision = false;
};
class RTC_EXPORT CurrentTaskQueueSetter {
public:
explicit CurrentTaskQueueSetter(TaskQueueBase* task_queue);
@ -142,6 +171,20 @@ class RTC_LOCKABLE RTC_EXPORT TaskQueueBase {
TaskQueueBase* const previous_;
};
// Subclasses should implement this method to support the behavior defined in
// the PostTask and PostTaskTraits docs above.
virtual void PostTaskImpl(absl::AnyInvocable<void() &&> task,
const PostTaskTraits& traits,
const Location& location) = 0;
// Subclasses should implement this method to support the behavior defined in
// the PostDelayedTask/PostHighPrecisionDelayedTask and PostDelayedTaskTraits
// docs above.
virtual void PostDelayedTaskImpl(absl::AnyInvocable<void() &&> task,
TimeDelta delay,
const PostDelayedTaskTraits& traits,
const Location& location) = 0;
// Users of the TaskQueue should call Delete instead of directly deleting
// this object.
virtual ~TaskQueueBase() = default;

View file

@ -177,14 +177,35 @@ TEST_P(TaskQueueTest, PostedUnexecutedClosureDestroyedOnTaskQueue) {
CreateTaskQueue(factory, "PostedUnexecutedClosureDestroyedOnTaskQueue");
TaskQueueBase* queue_ptr = queue.get();
queue->PostTask([] { SleepFor(TimeDelta::Millis(100)); });
// Give the task queue a chance to start executing the first lambda.
// Give the task queue a chance to start executing the first lambda.
SleepFor(TimeDelta::Millis(10));
// Then ensure the next lambda (which is likely not executing yet) is
// destroyed in the task queue context when the queue is deleted.
auto cleanup = absl::Cleanup(
[queue_ptr] { EXPECT_EQ(queue_ptr, TaskQueueBase::Current()); });
rtc::Event finished;
// Then ensure the next lambda (which is likely not executing yet) is
// destroyed in the task queue context when the queue is deleted.
auto cleanup = absl::Cleanup([queue_ptr, &finished] {
EXPECT_EQ(queue_ptr, TaskQueueBase::Current());
finished.Set();
});
queue->PostTask([cleanup = std::move(cleanup)] {});
queue = nullptr;
finished.Wait(TimeDelta::Seconds(1));
}
TEST_P(TaskQueueTest, PostedClosureDestroyedOnTaskQueue) {
std::unique_ptr<webrtc::TaskQueueFactory> factory = GetParam()(nullptr);
auto queue = CreateTaskQueue(factory, "PostedClosureDestroyedOnTaskQueue");
TaskQueueBase* queue_ptr = queue.get();
rtc::Event finished;
auto cleanup = absl::Cleanup([queue_ptr, &finished] {
EXPECT_EQ(queue_ptr, TaskQueueBase::Current());
finished.Set();
});
// The cleanup task may or may not have had time to execute when the task
// queue is destroyed. Regardless, the task should be destroyed on the
// queue.
queue->PostTask([cleanup = std::move(cleanup)] {});
queue = nullptr;
finished.Wait(TimeDelta::Seconds(1));
}
TEST_P(TaskQueueTest, PostedExecutedClosureDestroyedOnTaskQueue) {
@ -198,7 +219,7 @@ TEST_P(TaskQueueTest, PostedExecutedClosureDestroyedOnTaskQueue) {
EXPECT_EQ(queue_ptr, TaskQueueBase::Current());
finished.Set();
})] {});
finished.Wait(rtc::Event::kForever);
finished.Wait(TimeDelta::Seconds(1));
}
TEST_P(TaskQueueTest, PostAndReuse) {

View file

@ -20,15 +20,22 @@ namespace webrtc {
class MockTaskQueueBase : public TaskQueueBase {
public:
using TaskQueueBase::PostDelayedTaskTraits;
using TaskQueueBase::PostTaskTraits;
MOCK_METHOD(void, Delete, (), (override));
MOCK_METHOD(void, PostTask, (absl::AnyInvocable<void() &&>), (override));
MOCK_METHOD(void,
PostDelayedTask,
(absl::AnyInvocable<void() &&>, TimeDelta),
PostTaskImpl,
(absl::AnyInvocable<void() &&>,
const PostTaskTraits&,
const Location&),
(override));
MOCK_METHOD(void,
PostDelayedHighPrecisionTask,
(absl::AnyInvocable<void() &&>, TimeDelta),
PostDelayedTaskImpl,
(absl::AnyInvocable<void() &&>,
TimeDelta,
const PostDelayedTaskTraits&,
const Location&),
(override));
};

View file

@ -49,6 +49,11 @@ class FrameGeneratorInterface {
virtual void ChangeResolution(size_t width, size_t height) = 0;
virtual Resolution GetResolution() const = 0;
// Returns the frames per second this generator is supposed to provide
// according to its data source. Not all frame generators know the frames per
// second of the data source, in such case this method returns absl::nullopt.
virtual absl::optional<int> fps() const = 0;
};
} // namespace test

View file

@ -51,6 +51,11 @@ class MockDataChannelInterface
MOCK_METHOD(uint64_t, buffered_amount, (), (const, override));
MOCK_METHOD(void, Close, (), (override));
MOCK_METHOD(bool, Send, (const DataBuffer& buffer), (override));
MOCK_METHOD(void,
SendAsync,
(DataBuffer buffer,
absl::AnyInvocable<void(RTCError) &&> on_complete),
(override));
protected:
MockDataChannelInterface() = default;

View file

@ -65,6 +65,11 @@ class MockPeerConnectionFactoryInterface
CreateVideoTrack,
(const std::string&, VideoTrackSourceInterface*),
(override));
MOCK_METHOD(rtc::scoped_refptr<VideoTrackInterface>,
CreateVideoTrack,
(rtc::scoped_refptr<VideoTrackSourceInterface>,
absl::string_view),
(override));
MOCK_METHOD(rtc::scoped_refptr<AudioTrackInterface>,
CreateAudioTrack,
(const std::string&, AudioSourceInterface*),

View file

@ -68,18 +68,18 @@ class MockRtpTransceiver : public RtpTransceiverInterface {
(),
(const, override));
MOCK_METHOD(std::vector<RtpHeaderExtensionCapability>,
HeaderExtensionsToOffer,
GetHeaderExtensionsToNegotiate,
(),
(const, override));
MOCK_METHOD(std::vector<RtpHeaderExtensionCapability>,
HeaderExtensionsNegotiated,
GetNegotiatedHeaderExtensions,
(),
(const, override));
MOCK_METHOD(webrtc::RTCError,
SetOfferedRtpHeaderExtensions,
(rtc::ArrayView<const RtpHeaderExtensionCapability>
header_extensions_to_offer),
(override));
MOCK_METHOD(
webrtc::RTCError,
SetHeaderExtensionsToNegotiate,
(rtc::ArrayView<const RtpHeaderExtensionCapability> header_extensions),
(override));
};
} // namespace webrtc

View file

@ -0,0 +1,44 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_MOCK_TRANSFORMABLE_AUDIO_FRAME_H_
#define API_TEST_MOCK_TRANSFORMABLE_AUDIO_FRAME_H_
#include "api/frame_transformer_interface.h"
#include "test/gmock.h"
namespace webrtc {
class MockTransformableAudioFrame : public TransformableAudioFrameInterface {
public:
MOCK_METHOD(rtc::ArrayView<const uint8_t>, GetData, (), (const, override));
MOCK_METHOD(void, SetData, (rtc::ArrayView<const uint8_t>), (override));
MOCK_METHOD(void, SetRTPTimestamp, (uint32_t), (override));
MOCK_METHOD(uint8_t, GetPayloadType, (), (const, override));
MOCK_METHOD(uint32_t, GetSsrc, (), (const, override));
MOCK_METHOD(uint32_t, GetTimestamp, (), (const, override));
MOCK_METHOD(RTPHeader&, GetHeader, (), (const override));
MOCK_METHOD(rtc::ArrayView<const uint32_t>,
GetContributingSources,
(),
(const override));
MOCK_METHOD(const absl::optional<uint16_t>,
SequenceNumber,
(),
(const, override));
MOCK_METHOD(TransformableFrameInterface::Direction,
GetDirection,
(),
(const, override));
};
} // namespace webrtc
#endif // API_TEST_MOCK_TRANSFORMABLE_AUDIO_FRAME_H_

View file

@ -26,11 +26,6 @@ class MockTransformableVideoFrame
MOCK_METHOD(uint32_t, GetTimestamp, (), (const, override));
MOCK_METHOD(uint32_t, GetSsrc, (), (const, override));
MOCK_METHOD(bool, IsKeyFrame, (), (const, override));
MOCK_METHOD(std::vector<uint8_t>, GetAdditionalData, (), (const, override));
MOCK_METHOD(const webrtc::VideoFrameMetadata&,
GetMetadata,
(),
(const, override));
MOCK_METHOD(void,
SetMetadata,
(const webrtc::VideoFrameMetadata&),
@ -40,6 +35,7 @@ class MockTransformableVideoFrame
GetDirection,
(),
(const, override));
MOCK_METHOD(VideoFrameMetadata, Metadata, (), (const, override));
};
static_assert(!std::is_abstract_v<MockTransformableVideoFrame>, "");

View file

@ -27,7 +27,7 @@ namespace webrtc {
namespace webrtc_pc_e2e {
namespace {
std::string SpecToString(VideoResolution::Spec spec) {
absl::string_view SpecToString(VideoResolution::Spec spec) {
switch (spec) {
case VideoResolution::Spec::kNone:
return "None";
@ -207,24 +207,22 @@ VideoConfig::VideoConfig(const VideoResolution& resolution)
}
VideoConfig::VideoConfig(size_t width, size_t height, int32_t fps)
: width(width), height(height), fps(fps) {}
VideoConfig::VideoConfig(std::string stream_label,
VideoConfig::VideoConfig(absl::string_view stream_label,
size_t width,
size_t height,
int32_t fps)
: width(width),
height(height),
fps(fps),
stream_label(std::move(stream_label)) {}
: width(width), height(height), fps(fps), stream_label(stream_label) {}
AudioConfig::AudioConfig(std::string stream_label)
: stream_label(std::move(stream_label)) {}
AudioConfig::AudioConfig(absl::string_view stream_label)
: stream_label(stream_label) {}
VideoCodecConfig::VideoCodecConfig(absl::string_view name)
: name(name), required_params() {}
VideoCodecConfig::VideoCodecConfig(std::string name)
: name(std::move(name)), required_params() {}
VideoCodecConfig::VideoCodecConfig(
std::string name,
absl::string_view name,
std::map<std::string, std::string> required_params)
: name(std::move(name)), required_params(std::move(required_params)) {}
: name(name), required_params(std::move(required_params)) {}
absl::optional<VideoResolution> VideoSubscription::GetMaxResolution(
rtc::ArrayView<const VideoConfig> video_configs) {

View file

@ -306,7 +306,7 @@ class VideoDumpOptions {
struct VideoConfig {
explicit VideoConfig(const VideoResolution& resolution);
VideoConfig(size_t width, size_t height, int32_t fps);
VideoConfig(std::string stream_label,
VideoConfig(absl::string_view stream_label,
size_t width,
size_t height,
int32_t fps);
@ -375,19 +375,13 @@ struct VideoConfig {
// Contains properties for audio in the call.
struct AudioConfig {
enum Mode {
kGenerated,
kFile,
};
AudioConfig() = default;
explicit AudioConfig(std::string stream_label);
explicit AudioConfig(absl::string_view stream_label);
// Have to be unique among all specified configs for all peers in the call.
// Will be auto generated if omitted.
absl::optional<std::string> stream_label;
Mode mode = kGenerated;
// Have to be specified only if mode = kFile
// If no file is specified an audio will be generated.
absl::optional<std::string> input_file_name;
// If specified the input stream will be also copied to specified file.
absl::optional<std::string> input_dump_file_name;
@ -405,8 +399,8 @@ struct AudioConfig {
};
struct VideoCodecConfig {
explicit VideoCodecConfig(std::string name);
VideoCodecConfig(std::string name,
explicit VideoCodecConfig(absl::string_view name);
VideoCodecConfig(absl::string_view name,
std::map<std::string, std::string> required_params);
// Next two fields are used to specify concrete video codec, that should be
// used in the test. Video code will be negotiated in SDP during offer/

View file

@ -181,12 +181,12 @@ PeerConfigurer* PeerConfigurer::SetUseNetworkThreadAsWorkerThread() {
return this;
}
PeerConfigurer* PeerConfigurer::SetRtcEventLogPath(std::string path) {
params_->rtc_event_log_path = std::move(path);
PeerConfigurer* PeerConfigurer::SetRtcEventLogPath(absl::string_view path) {
params_->rtc_event_log_path = std::string(path);
return this;
}
PeerConfigurer* PeerConfigurer::SetAecDumpPath(std::string path) {
params_->aec_dump_path = std::move(path);
PeerConfigurer* PeerConfigurer::SetAecDumpPath(absl::string_view path) {
params_->aec_dump_path = std::string(path);
return this;
}
PeerConfigurer* PeerConfigurer::SetRTCConfiguration(

View file

@ -152,10 +152,10 @@ class PeerConfigurer {
// If is set, an RTCEventLog will be saved in that location and it will be
// available for further analysis.
PeerConfigurer* SetRtcEventLogPath(std::string path);
PeerConfigurer* SetRtcEventLogPath(absl::string_view path);
// If is set, an AEC dump will be saved in that location and it will be
// available for further analysis.
PeerConfigurer* SetAecDumpPath(std::string path);
PeerConfigurer* SetAecDumpPath(absl::string_view path);
PeerConfigurer* SetRTCConfiguration(
PeerConnectionInterface::RTCConfiguration configuration);
PeerConfigurer* SetRTCOfferAnswerOptions(

View file

@ -11,7 +11,7 @@ import("../../../webrtc.gni")
rtc_library("function_video_factory") {
visibility = [ "*" ]
testonly = true
public = [
sources = [
"function_video_decoder_factory.h",
"function_video_encoder_factory.h",
]
@ -25,7 +25,27 @@ rtc_library("function_video_factory") {
rtc_library("video_frame_writer") {
visibility = [ "*" ]
testonly = true
public = [ "video_frame_writer.h" ]
sources = [ "video_frame_writer.h" ]
deps = [ "../../video:video_frame" ]
}
rtc_library("test_video_track_source") {
visibility = [ "*" ]
testonly = true
sources = [
"test_video_track_source.cc",
"test_video_track_source.h",
]
deps = [
"../..:media_stream_interface",
"../..:sequence_checker",
"../../../rtc_base:checks",
"../../../rtc_base:macromagic",
"../../../rtc_base/system:no_unique_address",
"../../video:recordable_encoded_frame",
"../../video:video_frame",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}

6
api/test/video/DEPS Normal file
View file

@ -0,0 +1,6 @@
specific_include_rules = {
"test_video_track_source\.h": [
"+rtc_base/thread_annotations.h",
"+rtc_base/system/no_unique_address.h",
],
}

View file

@ -0,0 +1,55 @@
/*
* Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/video/test_video_track_source.h"
#include "api/media_stream_interface.h"
#include "api/sequence_checker.h"
#include "api/video/video_frame.h"
#include "api/video/video_sink_interface.h"
#include "api/video/video_source_interface.h"
#include "rtc_base/checks.h"
namespace webrtc {
namespace test {
TestVideoTrackSource::TestVideoTrackSource(bool remote)
: state_(kInitializing), remote_(remote) {
worker_thread_checker_.Detach();
signaling_thread_checker_.Detach();
}
VideoTrackSourceInterface::SourceState TestVideoTrackSource::state() const {
RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
return state_;
}
void TestVideoTrackSource::SetState(SourceState new_state) {
RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
if (state_ != new_state) {
state_ = new_state;
FireOnChanged();
}
}
void TestVideoTrackSource::AddOrUpdateSink(
rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
RTC_DCHECK(worker_thread_checker_.IsCurrent());
source()->AddOrUpdateSink(sink, wants);
}
void TestVideoTrackSource::RemoveSink(
rtc::VideoSinkInterface<VideoFrame>* sink) {
RTC_DCHECK(worker_thread_checker_.IsCurrent());
source()->RemoveSink(sink);
}
} // namespace test
} // namespace webrtc

View file

@ -0,0 +1,88 @@
/*
* Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_VIDEO_TEST_VIDEO_TRACK_SOURCE_H_
#define API_TEST_VIDEO_TEST_VIDEO_TRACK_SOURCE_H_
#include "absl/types/optional.h"
#include "api/media_stream_interface.h"
#include "api/notifier.h"
#include "api/sequence_checker.h"
#include "api/video/recordable_encoded_frame.h"
#include "api/video/video_frame.h"
#include "api/video/video_sink_interface.h"
#include "api/video/video_source_interface.h"
#include "rtc_base/system/no_unique_address.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
namespace test {
// Video source that can be used as input for tests.
class TestVideoTrackSource : public Notifier<VideoTrackSourceInterface> {
public:
explicit TestVideoTrackSource(bool remote);
~TestVideoTrackSource() override = default;
void SetState(SourceState new_state);
SourceState state() const override;
bool remote() const override { return remote_; }
bool is_screencast() const override { return false; }
absl::optional<bool> needs_denoising() const override {
return absl::nullopt;
}
bool GetStats(Stats* stats) override { return false; }
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override;
void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override;
bool SupportsEncodedOutput() const override { return false; }
void GenerateKeyFrame() override {}
void AddEncodedSink(
rtc::VideoSinkInterface<RecordableEncodedFrame>* sink) override {}
void RemoveEncodedSink(
rtc::VideoSinkInterface<RecordableEncodedFrame>* sink) override {}
// Starts producing video.
virtual void Start() = 0;
// Stops producing video.
virtual void Stop() = 0;
virtual void SetScreencast(bool is_screencast) = 0;
// TODO(titovartem): make next 4 methods pure virtual.
virtual void SetEnableAdaptation(bool enable_adaptation) {}
virtual int GetFrameWidth() const { return 0; }
virtual int GetFrameHeight() const { return 0; }
virtual void OnOutputFormatRequest(int width,
int height,
const absl::optional<int>& max_fps) {}
protected:
virtual rtc::VideoSourceInterface<VideoFrame>* source() = 0;
private:
RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_thread_checker_;
RTC_NO_UNIQUE_ADDRESS SequenceChecker signaling_thread_checker_;
SourceState state_ RTC_GUARDED_BY(&signaling_thread_checker_);
const bool remote_;
};
} // namespace test
} // namespace webrtc
#endif // API_TEST_VIDEO_TEST_VIDEO_TRACK_SOURCE_H_

View file

@ -0,0 +1,97 @@
/*
* Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/video_codec_stats.h"
namespace webrtc {
namespace test {
void VideoCodecStats::Stream::LogMetrics(
MetricsLogger* logger,
std::string test_case_name,
std::map<std::string, std::string> metadata) const {
logger->LogMetric("width", test_case_name, width, Unit::kCount,
webrtc::test::ImprovementDirection::kBiggerIsBetter,
metadata);
logger->LogMetric("height", test_case_name, height, Unit::kCount,
webrtc::test::ImprovementDirection::kBiggerIsBetter,
metadata);
logger->LogMetric(
"frame_size_bytes", test_case_name, frame_size_bytes, Unit::kBytes,
webrtc::test::ImprovementDirection::kNeitherIsBetter, metadata);
logger->LogMetric("keyframe", test_case_name, keyframe, Unit::kCount,
webrtc::test::ImprovementDirection::kSmallerIsBetter,
metadata);
logger->LogMetric("qp", test_case_name, qp, Unit::kUnitless,
webrtc::test::ImprovementDirection::kSmallerIsBetter,
metadata);
logger->LogMetric(
"encode_time_ms", test_case_name, encode_time_ms, Unit::kMilliseconds,
webrtc::test::ImprovementDirection::kSmallerIsBetter, metadata);
logger->LogMetric(
"decode_time_ms", test_case_name, decode_time_ms, Unit::kMilliseconds,
webrtc::test::ImprovementDirection::kSmallerIsBetter, metadata);
logger->LogMetric("target_bitrate_kbps", test_case_name, target_bitrate_kbps,
Unit::kKilobitsPerSecond,
webrtc::test::ImprovementDirection::kBiggerIsBetter,
metadata);
logger->LogMetric("target_framerate_fps", test_case_name,
target_framerate_fps, Unit::kHertz,
webrtc::test::ImprovementDirection::kBiggerIsBetter,
metadata);
logger->LogMetric("encoded_bitrate_kbps", test_case_name,
encoded_bitrate_kbps, Unit::kKilobitsPerSecond,
webrtc::test::ImprovementDirection::kBiggerIsBetter,
metadata);
logger->LogMetric("encoded_framerate_fps", test_case_name,
encoded_framerate_fps, Unit::kHertz,
webrtc::test::ImprovementDirection::kBiggerIsBetter,
metadata);
logger->LogMetric("bitrate_mismatch_pct", test_case_name,
bitrate_mismatch_pct, Unit::kPercent,
webrtc::test::ImprovementDirection::kSmallerIsBetter,
metadata);
logger->LogMetric("framerate_mismatch_pct", test_case_name,
framerate_mismatch_pct, Unit::kPercent,
webrtc::test::ImprovementDirection::kSmallerIsBetter,
metadata);
logger->LogMetric("transmission_time_ms", test_case_name,
transmission_time_ms, Unit::kMilliseconds,
webrtc::test::ImprovementDirection::kSmallerIsBetter,
metadata);
logger->LogMetric("psnr_y_db", test_case_name, psnr.y, Unit::kUnitless,
webrtc::test::ImprovementDirection::kBiggerIsBetter,
metadata);
logger->LogMetric("psnr_u_db", test_case_name, psnr.u, Unit::kUnitless,
webrtc::test::ImprovementDirection::kBiggerIsBetter,
metadata);
logger->LogMetric("psnr_v_db", test_case_name, psnr.v, Unit::kUnitless,
webrtc::test::ImprovementDirection::kBiggerIsBetter,
metadata);
}
} // namespace test
} // namespace webrtc

View file

@ -11,6 +11,7 @@
#ifndef API_TEST_VIDEO_CODEC_STATS_H_
#define API_TEST_VIDEO_CODEC_STATS_H_
#include <map>
#include <string>
#include <vector>
@ -19,6 +20,7 @@
#include "api/test/metrics/metric.h"
#include "api/test/metrics/metrics_logger.h"
#include "api/units/data_rate.h"
#include "api/units/data_size.h"
#include "api/units/frequency.h"
namespace webrtc {
@ -44,10 +46,10 @@ class VideoCodecStats {
int width = 0;
int height = 0;
int size_bytes = 0;
DataSize frame_size = DataSize::Zero();
bool keyframe = false;
absl::optional<int> qp = absl::nullopt;
absl::optional<int> base_spatial_idx = absl::nullopt;
absl::optional<int> qp;
absl::optional<int> base_spatial_idx;
Timestamp encode_start = Timestamp::Zero();
TimeDelta encode_time = TimeDelta::Zero();
@ -59,35 +61,46 @@ class VideoCodecStats {
double u = 0.0;
double v = 0.0;
};
absl::optional<Psnr> psnr = absl::nullopt;
absl::optional<Psnr> psnr;
absl::optional<DataRate> target_bitrate;
absl::optional<Frequency> target_framerate;
bool encoded = false;
bool decoded = false;
};
struct Stream {
int num_frames = 0;
int num_keyframes = 0;
SamplesStatsCounter width;
SamplesStatsCounter height;
SamplesStatsCounter size_bytes;
SamplesStatsCounter frame_size_bytes;
SamplesStatsCounter keyframe;
SamplesStatsCounter qp;
SamplesStatsCounter encode_time_us;
SamplesStatsCounter decode_time_us;
SamplesStatsCounter encode_time_ms;
SamplesStatsCounter decode_time_ms;
DataRate bitrate = DataRate::Zero();
Frequency framerate = Frequency::Zero();
int bitrate_mismatch_pct = 0;
int framerate_mismatch_pct = 0;
SamplesStatsCounter transmission_time_us;
SamplesStatsCounter target_bitrate_kbps;
SamplesStatsCounter target_framerate_fps;
SamplesStatsCounter encoded_bitrate_kbps;
SamplesStatsCounter encoded_framerate_fps;
SamplesStatsCounter bitrate_mismatch_pct;
SamplesStatsCounter framerate_mismatch_pct;
SamplesStatsCounter transmission_time_ms;
struct Psnr {
SamplesStatsCounter y;
SamplesStatsCounter u;
SamplesStatsCounter v;
} psnr;
// Logs `Stream` metrics to provided `MetricsLogger`.
void LogMetrics(MetricsLogger* logger,
std::string test_case_name,
std::map<std::string, std::string> metadata = {}) const;
};
virtual ~VideoCodecStats() = default;
@ -97,18 +110,8 @@ class VideoCodecStats {
virtual std::vector<Frame> Slice(
absl::optional<Filter> filter = absl::nullopt) const = 0;
// Returns video statistics aggregated for given `frames`. If `bitrate` is
// provided, also performs rate control analysis. If `framerate` is provided,
// also calculates frame rate mismatch.
virtual Stream Aggregate(
const std::vector<Frame>& frames,
absl::optional<DataRate> bitrate = absl::nullopt,
absl::optional<Frequency> framerate = absl::nullopt) const = 0;
// Logs `Stream` metrics to provided `MetricsLogger`.
virtual void LogMetrics(MetricsLogger* logger,
const Stream& stream,
std::string test_case_name) const = 0;
// Returns video statistics aggregated for given `frames`.
virtual Stream Aggregate(const std::vector<Frame>& frames) const = 0;
};
} // namespace test

View file

@ -12,6 +12,7 @@
#define API_TEST_VIDEO_CODEC_TESTER_H_
#include <memory>
#include <string>
#include "absl/functional/any_invocable.h"
#include "absl/types/optional.h"
@ -46,10 +47,14 @@ class VideoCodecTester {
struct DecoderSettings {
PacingSettings pacing;
absl::optional<std::string> decoder_input_base_path;
absl::optional<std::string> decoder_output_base_path;
};
struct EncoderSettings {
PacingSettings pacing;
absl::optional<std::string> encoder_input_base_path;
absl::optional<std::string> encoder_output_base_path;
};
virtual ~VideoCodecTester() = default;
@ -88,6 +93,8 @@ class VideoCodecTester {
virtual ~Encoder() = default;
virtual void Initialize() = 0;
virtual void Encode(const VideoFrame& frame, EncodeCallback callback) = 0;
virtual void Flush() = 0;
@ -101,6 +108,8 @@ class VideoCodecTester {
virtual ~Decoder() = default;
virtual void Initialize() = 0;
virtual void Decode(const EncodedImage& frame, DecodeCallback callback) = 0;
virtual void Flush() = 0;

View file

@ -21,6 +21,7 @@
#include "api/video/encoded_image.h"
#include "api/video/video_frame.h"
#include "api/video_codecs/video_encoder.h"
#include "rtc_base/checks.h"
namespace webrtc {
@ -150,6 +151,18 @@ class VideoQualityAnalyzerInterface
// call.
virtual void UnregisterParticipantInCall(absl::string_view peer_name) {}
// Informs analyzer that peer `receiver_peer_name` should not receive any
// stream from sender `sender_peer_name`.
// This method is a no-op if the sender or the receiver does not exist.
virtual void OnPauseAllStreamsFrom(absl::string_view sender_peer_name,
absl::string_view receiver_peer_name) {}
// Informs analyzer that peer `receiver_peer_name` is expected to receive all
// streams from `sender_peer_name`.
// This method is a no-op if the sender or the receiver does not exist.
virtual void OnResumeAllStreamsFrom(absl::string_view sender_peer_name,
absl::string_view receiver_peer_name) {}
// Tells analyzer that analysis complete and it should calculate final
// statistics.
virtual void Stop() {}
@ -158,6 +171,13 @@ class VideoQualityAnalyzerInterface
// frame ids space wraps around, then stream label for frame id may change.
// It will crash, if the specified `frame_id` wasn't captured.
virtual std::string GetStreamLabel(uint16_t frame_id) = 0;
// Returns the sender peer name of the last stream where this frame was
// captured. The sender for this frame id may change when the frame ids wrap
// around. Also it will crash, if the specified `frame_id` wasn't captured.
virtual std::string GetSenderPeerName(uint16_t frame_id) const {
RTC_CHECK(false) << "Not implemented.";
}
};
} // namespace webrtc

View file

@ -155,6 +155,7 @@ if (rtc_include_tests) {
if (rtc_include_tests) {
rtc_source_set("mock_network_control") {
visibility = [ "*" ]
testonly = true
sources = [ "test/mock_network_control.h" ]
deps = [

View file

@ -233,6 +233,12 @@ struct NetworkControlUpdate {
NetworkControlUpdate();
NetworkControlUpdate(const NetworkControlUpdate&);
~NetworkControlUpdate();
bool has_updates() const {
return congestion_window.has_value() || pacer_config.has_value() ||
!probe_cluster_configs.empty() || target_rate.has_value();
}
absl::optional<DataSize> congestion_window;
absl::optional<PacerConfig> pacer_config;
std::vector<ProbeClusterConfig> probe_cluster_configs;

View file

@ -756,9 +756,10 @@ enum IceAttributeType {
STUN_ATTR_GOOG_MISC_INFO = 0xC059,
// Obsolete.
STUN_ATTR_GOOG_OBSOLETE_1 = 0xC05A,
STUN_ATTR_GOOG_CONNECTION_ID = 0xC05B, // Not yet implemented.
STUN_ATTR_GOOG_DELTA = 0xC05C, // Not yet implemented.
STUN_ATTR_GOOG_DELTA_ACK = 0xC05D, // Not yet implemented.
STUN_ATTR_GOOG_CONNECTION_ID = 0xC05B, // Not yet implemented.
STUN_ATTR_GOOG_DELTA = 0xC05C, // Not yet implemented.
STUN_ATTR_GOOG_DELTA_ACK = 0xC05D, // Not yet implemented.
STUN_ATTR_GOOG_DELTA_SYNC_REQ = 0xC05E, // Not yet implemented.
// MESSAGE-INTEGRITY truncated to 32-bit.
STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32 = 0xC060,
};

View file

@ -16,6 +16,55 @@
namespace webrtc {
class MockNetworkControllerInterface : public NetworkControllerInterface {
public:
MOCK_METHOD(NetworkControlUpdate,
OnNetworkAvailability,
(NetworkAvailability),
(override));
MOCK_METHOD(NetworkControlUpdate,
OnProcessInterval,
(ProcessInterval),
(override));
MOCK_METHOD(NetworkControlUpdate,
OnNetworkRouteChange,
(NetworkRouteChange),
(override));
MOCK_METHOD(NetworkControlUpdate,
OnRemoteBitrateReport,
(RemoteBitrateReport),
(override));
MOCK_METHOD(NetworkControlUpdate,
OnRoundTripTimeUpdate,
(RoundTripTimeUpdate),
(override));
MOCK_METHOD(NetworkControlUpdate, OnSentPacket, (SentPacket), (override));
MOCK_METHOD(NetworkControlUpdate,
OnReceivedPacket,
(ReceivedPacket),
(override));
MOCK_METHOD(NetworkControlUpdate,
OnStreamsConfig,
(StreamsConfig),
(override));
MOCK_METHOD(NetworkControlUpdate,
OnTargetRateConstraints,
(TargetRateConstraints),
(override));
MOCK_METHOD(NetworkControlUpdate,
OnTransportLossReport,
(TransportLossReport),
(override));
MOCK_METHOD(NetworkControlUpdate,
OnTransportPacketsFeedback,
(TransportPacketsFeedback),
(override));
MOCK_METHOD(NetworkControlUpdate,
OnNetworkStateEstimate,
(NetworkStateEstimate),
(override));
};
class MockNetworkStateEstimator : public NetworkStateEstimator {
public:
MOCK_METHOD(absl::optional<NetworkStateEstimate>,

View file

@ -8,9 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/units/timestamp.h"
#include <limits>
#include "api/units/timestamp.h"
#include "test/gtest.h"
namespace webrtc {

View file

@ -125,6 +125,7 @@ rtc_source_set("recordable_encoded_frame") {
rtc_source_set("video_frame_type") {
visibility = [ "*" ]
sources = [ "video_frame_type.h" ]
absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_source_set("render_resolution") {
@ -372,3 +373,16 @@ rtc_library("frame_buffer_unittest") {
"../../test:test_support",
]
}
rtc_library("video_frame_metadata_unittest") {
testonly = true
sources = [ "video_frame_metadata_unittest.cc" ]
deps = [
":video_frame_metadata",
"../../api/video:video_frame",
"../../modules/video_coding:codec_globals_headers",
"../../test:test_support",
"../../video:video",
]
}

View file

@ -12,6 +12,7 @@
#include <memory>
#include "absl/base/attributes.h"
#include "absl/base/macros.h"
#include "api/video/video_bitrate_allocator.h"
#include "api/video_codecs/video_codec.h"
@ -30,13 +31,14 @@ class BuiltinVideoBitrateAllocatorFactory
std::unique_ptr<VideoBitrateAllocator> CreateVideoBitrateAllocator(
const VideoCodec& codec) override {
switch (codec.codecType) {
case kVideoCodecAV1:
case kVideoCodecVP9:
return std::make_unique<SvcRateAllocator>(codec);
default:
return std::make_unique<SimulcastRateAllocator>(codec);
// TODO(https://crbug.com/webrtc/14884): Update SvcRateAllocator to
// support simulcast and use it for VP9/AV1 simulcast as well.
if ((codec.codecType == kVideoCodecAV1 ||
codec.codecType == kVideoCodecVP9) &&
codec.numberOfSimulcastStreams <= 1) {
return std::make_unique<SvcRateAllocator>(codec);
}
return std::make_unique<SimulcastRateAllocator>(codec);
}
};

View file

@ -152,4 +152,23 @@ void VideoFrameMetadata::SetCsrcs(std::vector<uint32_t> csrcs) {
csrcs_ = std::move(csrcs);
}
bool operator==(const VideoFrameMetadata& lhs, const VideoFrameMetadata& rhs) {
return lhs.frame_type_ == rhs.frame_type_ && lhs.width_ == rhs.width_ &&
lhs.height_ == rhs.height_ && lhs.rotation_ == rhs.rotation_ &&
lhs.content_type_ == rhs.content_type_ &&
lhs.frame_id_ == rhs.frame_id_ &&
lhs.spatial_index_ == rhs.spatial_index_ &&
lhs.temporal_index_ == rhs.temporal_index_ &&
lhs.frame_dependencies_ == rhs.frame_dependencies_ &&
lhs.decode_target_indications_ == rhs.decode_target_indications_ &&
lhs.is_last_frame_in_picture_ == rhs.is_last_frame_in_picture_ &&
lhs.simulcast_idx_ == rhs.simulcast_idx_ && lhs.codec_ == rhs.codec_ &&
lhs.codec_specifics_ == rhs.codec_specifics_ &&
lhs.ssrc_ == rhs.ssrc_ && lhs.csrcs_ == rhs.csrcs_;
}
bool operator!=(const VideoFrameMetadata& lhs, const VideoFrameMetadata& rhs) {
return !(lhs == rhs);
}
} // namespace webrtc

View file

@ -94,6 +94,11 @@ class RTC_EXPORT VideoFrameMetadata {
std::vector<uint32_t> GetCsrcs() const;
void SetCsrcs(std::vector<uint32_t> csrcs);
friend bool operator==(const VideoFrameMetadata& lhs,
const VideoFrameMetadata& rhs);
friend bool operator!=(const VideoFrameMetadata& lhs,
const VideoFrameMetadata& rhs);
private:
VideoFrameType frame_type_ = VideoFrameType::kEmptyFrame;
int16_t width_ = 0;
@ -114,7 +119,7 @@ class RTC_EXPORT VideoFrameMetadata {
RTPVideoHeaderCodecSpecifics codec_specifics_;
// RTP info.
uint32_t ssrc_;
uint32_t ssrc_ = 0u;
std::vector<uint32_t> csrcs_;
};
} // namespace webrtc

View file

@ -0,0 +1,123 @@
/*
* Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/video/video_frame_metadata.h"
#include "api/video/video_frame.h"
#include "modules/video_coding/codecs/h264/include/h264_globals.h"
#include "modules/video_coding/codecs/vp9/include/vp9_globals.h"
#include "test/gtest.h"
#include "video/video_receive_stream2.h"
namespace webrtc {
namespace {
RTPVideoHeaderH264 ExampleHeaderH264() {
NaluInfo nalu_info;
nalu_info.type = 1;
nalu_info.sps_id = 2;
nalu_info.pps_id = 3;
RTPVideoHeaderH264 header;
header.nalu_type = 4;
header.packetization_type = H264PacketizationTypes::kH264StapA;
header.nalus[0] = nalu_info;
header.nalus_length = 1;
header.packetization_mode = H264PacketizationMode::SingleNalUnit;
return header;
}
RTPVideoHeaderVP9 ExampleHeaderVP9() {
RTPVideoHeaderVP9 header;
header.InitRTPVideoHeaderVP9();
header.inter_pic_predicted = true;
header.flexible_mode = true;
header.beginning_of_frame = true;
header.end_of_frame = true;
header.ss_data_available = true;
header.non_ref_for_inter_layer_pred = true;
header.picture_id = 1;
header.max_picture_id = 2;
header.tl0_pic_idx = 3;
header.temporal_idx = 4;
header.spatial_idx = 5;
header.temporal_up_switch = true;
header.inter_layer_predicted = true;
header.gof_idx = 6;
header.num_ref_pics = 1;
header.pid_diff[0] = 8;
header.ref_picture_id[0] = 9;
header.num_spatial_layers = 1;
header.first_active_layer = 0;
header.spatial_layer_resolution_present = true;
header.width[0] = 12;
header.height[0] = 13;
header.end_of_picture = true;
header.gof.SetGofInfoVP9(TemporalStructureMode::kTemporalStructureMode1);
header.gof.pid_start = 14;
return header;
}
TEST(VideoFrameMetadataTest, H264MetadataEquality) {
RTPVideoHeaderH264 header = ExampleHeaderH264();
VideoFrameMetadata metadata_lhs;
metadata_lhs.SetRTPVideoHeaderCodecSpecifics(header);
VideoFrameMetadata metadata_rhs;
metadata_rhs.SetRTPVideoHeaderCodecSpecifics(header);
EXPECT_TRUE(metadata_lhs == metadata_rhs);
EXPECT_FALSE(metadata_lhs != metadata_rhs);
}
TEST(VideoFrameMetadataTest, H264MetadataInequality) {
RTPVideoHeaderH264 header = ExampleHeaderH264();
VideoFrameMetadata metadata_lhs;
metadata_lhs.SetRTPVideoHeaderCodecSpecifics(header);
VideoFrameMetadata metadata_rhs;
header.nalus[0].type = 17;
metadata_rhs.SetRTPVideoHeaderCodecSpecifics(header);
EXPECT_FALSE(metadata_lhs == metadata_rhs);
EXPECT_TRUE(metadata_lhs != metadata_rhs);
}
TEST(VideoFrameMetadataTest, VP9MetadataEquality) {
RTPVideoHeaderVP9 header = ExampleHeaderVP9();
VideoFrameMetadata metadata_lhs;
metadata_lhs.SetRTPVideoHeaderCodecSpecifics(header);
VideoFrameMetadata metadata_rhs;
metadata_rhs.SetRTPVideoHeaderCodecSpecifics(header);
EXPECT_TRUE(metadata_lhs == metadata_rhs);
EXPECT_FALSE(metadata_lhs != metadata_rhs);
}
TEST(VideoFrameMetadataTest, VP9MetadataInequality) {
RTPVideoHeaderVP9 header = ExampleHeaderVP9();
VideoFrameMetadata metadata_lhs;
metadata_lhs.SetRTPVideoHeaderCodecSpecifics(header);
VideoFrameMetadata metadata_rhs;
header.gof.pid_diff[0][0] = 42;
metadata_rhs.SetRTPVideoHeaderCodecSpecifics(header);
EXPECT_FALSE(metadata_lhs == metadata_rhs);
EXPECT_TRUE(metadata_lhs != metadata_rhs);
}
} // namespace
} // namespace webrtc

View file

@ -11,6 +11,8 @@
#ifndef API_VIDEO_VIDEO_FRAME_TYPE_H_
#define API_VIDEO_VIDEO_FRAME_TYPE_H_
#include "absl/strings/string_view.h"
namespace webrtc {
enum class VideoFrameType {
@ -21,6 +23,20 @@ enum class VideoFrameType {
kVideoFrameDelta = 4,
};
inline constexpr absl::string_view VideoFrameTypeToString(
VideoFrameType frame_type) {
if (frame_type == VideoFrameType::kEmptyFrame) {
return "empty";
}
if (frame_type == VideoFrameType::kVideoFrameKey) {
return "key";
}
if (frame_type == VideoFrameType::kVideoFrameDelta) {
return "delta";
}
return "";
}
} // namespace webrtc
#endif // API_VIDEO_VIDEO_FRAME_TYPE_H_

View file

@ -35,6 +35,7 @@ struct RTC_EXPORT VideoSinkWants {
VideoSinkWants();
VideoSinkWants(const VideoSinkWants&);
~VideoSinkWants();
// Tells the source whether the sink wants frames with rotation applied.
// By default, any rotation must be applied by the sink.
bool rotation_applied = false;
@ -84,8 +85,12 @@ struct RTC_EXPORT VideoSinkWants {
// This is the resolution requested by the user using RtpEncodingParameters.
absl::optional<FrameSize> requested_resolution;
// `active` : is (any) of the layers/sink(s) active.
bool is_active = true;
// `is_active` : Is this VideoSinkWants from an encoder that is encoding any
// layer. IF YES, it will affect how the VideoAdapter will choose to
// prioritize the OnOutputFormatRequest vs. requested_resolution. IF NO,
// VideoAdapter consider this VideoSinkWants as a passive listener (e.g a
// VideoRenderer or a VideoEncoder that is not currently actively encoding).
bool is_active = false;
// This sub-struct contains information computed by VideoBroadcaster
// that aggregates several VideoSinkWants (and sends them to

View file

@ -124,9 +124,6 @@ struct VideoPlayoutDelay {
}
};
// TODO(bugs.webrtc.org/7660): Old name, delete after downstream use is updated.
using PlayoutDelay = VideoPlayoutDelay;
} // namespace webrtc
#endif // API_VIDEO_VIDEO_TIMING_H_

View file

@ -139,9 +139,9 @@ rtc_library("builtin_video_encoder_factory") {
"../../api:scoped_refptr",
"../../media:codec",
"../../media:media_constants",
"../../media:rtc_encoder_simulcast_proxy",
"../../media:rtc_internal_video_codecs",
"../../media:rtc_media_base",
"../../media:rtc_simulcast_encoder_adapter",
"../../rtc_base:checks",
"../../rtc_base/system:rtc_export",
]

View file

@ -20,8 +20,8 @@
#include "api/video_codecs/video_encoder.h"
#include "media/base/codec.h"
#include "media/base/media_constants.h"
#include "media/engine/encoder_simulcast_proxy.h"
#include "media/engine/internal_encoder_factory.h"
#include "media/engine/simulcast_encoder_adapter.h"
#include "rtc_base/checks.h"
namespace webrtc {
@ -36,15 +36,17 @@ class BuiltinVideoEncoderFactory : public VideoEncoderFactory {
std::unique_ptr<VideoEncoder> CreateVideoEncoder(
const SdpVideoFormat& format) override {
// Try creating internal encoder.
std::unique_ptr<VideoEncoder> internal_encoder;
// Try creating an InternalEncoderFactory-backed SimulcastEncoderAdapter.
// The adapter has a passthrough mode for the case that simulcast is not
// used, so all responsibility can be delegated to it.
std::unique_ptr<VideoEncoder> encoder;
if (format.IsCodecInList(
internal_encoder_factory_->GetSupportedFormats())) {
internal_encoder = std::make_unique<EncoderSimulcastProxy>(
encoder = std::make_unique<SimulcastEncoderAdapter>(
internal_encoder_factory_.get(), format);
}
return internal_encoder;
return encoder;
}
std::vector<SdpVideoFormat> GetSupportedFormats() const override {

View file

@ -114,8 +114,8 @@ TEST(VideoDecoderFactoryTemplate, OpenH264) {
TEST(VideoDecoderFactoryTemplate, Dav1d) {
VideoDecoderFactoryTemplate<Dav1dDecoderTemplateAdapter> factory;
auto formats = factory.GetSupportedFormats();
EXPECT_THAT(formats.size(), 1);
EXPECT_THAT(formats[0], Field(&SdpVideoFormat::name, "AV1"));
EXPECT_THAT(formats, Not(IsEmpty()));
EXPECT_THAT(formats, Each(Field(&SdpVideoFormat::name, "AV1")));
EXPECT_THAT(factory.CreateVideoDecoder(formats[0]), Ne(nullptr));
}

View file

@ -22,8 +22,9 @@ using ::testing::Each;
using ::testing::Eq;
using ::testing::Field;
using ::testing::IsEmpty;
using ::testing::Ne;
using ::testing::IsNull;
using ::testing::Not;
using ::testing::NotNull;
using ::testing::UnorderedElementsAre;
namespace webrtc {
@ -68,8 +69,8 @@ struct BarEncoderTemplateAdapter {
TEST(VideoEncoderFactoryTemplate, OneTemplateAdapterCreateEncoder) {
VideoEncoderFactoryTemplate<FooEncoderTemplateAdapter> factory;
EXPECT_THAT(factory.GetSupportedFormats(), UnorderedElementsAre(kFooSdp));
EXPECT_THAT(factory.CreateVideoEncoder(kFooSdp), Ne(nullptr));
EXPECT_THAT(factory.CreateVideoEncoder(SdpVideoFormat("FooX")), Eq(nullptr));
EXPECT_THAT(factory.CreateVideoEncoder(kFooSdp), NotNull());
EXPECT_THAT(factory.CreateVideoEncoder(SdpVideoFormat("FooX")), IsNull());
}
TEST(VideoEncoderFactoryTemplate, OneTemplateAdapterCodecSupport) {
@ -97,11 +98,11 @@ TEST(VideoEncoderFactoryTemplate, TwoTemplateAdaptersCreateEncoders) {
factory;
EXPECT_THAT(factory.GetSupportedFormats(),
UnorderedElementsAre(kFooSdp, kBarLowSdp, kBarHighSdp));
EXPECT_THAT(factory.CreateVideoEncoder(kFooSdp), Ne(nullptr));
EXPECT_THAT(factory.CreateVideoEncoder(kBarLowSdp), Ne(nullptr));
EXPECT_THAT(factory.CreateVideoEncoder(kBarHighSdp), Ne(nullptr));
EXPECT_THAT(factory.CreateVideoEncoder(SdpVideoFormat("FooX")), Eq(nullptr));
EXPECT_THAT(factory.CreateVideoEncoder(SdpVideoFormat("Bar")), Eq(nullptr));
EXPECT_THAT(factory.CreateVideoEncoder(kFooSdp), NotNull());
EXPECT_THAT(factory.CreateVideoEncoder(kBarLowSdp), NotNull());
EXPECT_THAT(factory.CreateVideoEncoder(kBarHighSdp), NotNull());
EXPECT_THAT(factory.CreateVideoEncoder(SdpVideoFormat("FooX")), IsNull());
EXPECT_THAT(factory.CreateVideoEncoder(SdpVideoFormat("Bar")), NotNull());
}
TEST(VideoEncoderFactoryTemplate, TwoTemplateAdaptersCodecSupport) {
@ -131,7 +132,7 @@ TEST(VideoEncoderFactoryTemplate, LibvpxVp8) {
EXPECT_THAT(formats[0], Field(&SdpVideoFormat::name, "VP8"));
EXPECT_THAT(formats[0], Field(&SdpVideoFormat::scalability_modes,
Contains(ScalabilityMode::kL1T3)));
EXPECT_THAT(factory.CreateVideoEncoder(formats[0]), Ne(nullptr));
EXPECT_THAT(factory.CreateVideoEncoder(formats[0]), NotNull());
}
TEST(VideoEncoderFactoryTemplate, LibvpxVp9) {
@ -141,7 +142,7 @@ TEST(VideoEncoderFactoryTemplate, LibvpxVp9) {
EXPECT_THAT(formats, Each(Field(&SdpVideoFormat::name, "VP9")));
EXPECT_THAT(formats, Each(Field(&SdpVideoFormat::scalability_modes,
Contains(ScalabilityMode::kL3T3_KEY))));
EXPECT_THAT(factory.CreateVideoEncoder(formats[0]), Ne(nullptr));
EXPECT_THAT(factory.CreateVideoEncoder(formats[0]), NotNull());
}
// TODO(bugs.webrtc.org/13573): When OpenH264 is no longer a conditional build
@ -154,7 +155,7 @@ TEST(VideoEncoderFactoryTemplate, OpenH264) {
EXPECT_THAT(formats, Each(Field(&SdpVideoFormat::name, "H264")));
EXPECT_THAT(formats, Each(Field(&SdpVideoFormat::scalability_modes,
Contains(ScalabilityMode::kL1T3))));
EXPECT_THAT(factory.CreateVideoEncoder(formats[0]), Ne(nullptr));
EXPECT_THAT(factory.CreateVideoEncoder(formats[0]), NotNull());
}
#endif // defined(WEBRTC_USE_H264)
@ -165,7 +166,7 @@ TEST(VideoEncoderFactoryTemplate, LibaomAv1) {
EXPECT_THAT(formats[0], Field(&SdpVideoFormat::name, "AV1"));
EXPECT_THAT(formats[0], Field(&SdpVideoFormat::scalability_modes,
Contains(ScalabilityMode::kL3T3_KEY)));
EXPECT_THAT(factory.CreateVideoEncoder(formats[0]), Ne(nullptr));
EXPECT_THAT(factory.CreateVideoEncoder(formats[0]), NotNull());
}
} // namespace

View file

@ -796,6 +796,37 @@ TEST(SoftwareFallbackEncoderTest, ReportsHardwareAccelerated) {
EXPECT_FALSE(wrapper->GetEncoderInfo().is_hardware_accelerated);
}
TEST(SoftwareFallbackEncoderTest, ConfigureHardwareOnSecondAttempt) {
auto* sw_encoder = new ::testing::NiceMock<MockVideoEncoder>();
auto* hw_encoder = new ::testing::NiceMock<MockVideoEncoder>();
EXPECT_CALL(*sw_encoder, GetEncoderInfo())
.WillRepeatedly(Return(GetEncoderInfoWithHardwareAccelerated(false)));
EXPECT_CALL(*hw_encoder, GetEncoderInfo())
.WillRepeatedly(Return(GetEncoderInfoWithHardwareAccelerated(true)));
std::unique_ptr<VideoEncoder> wrapper =
CreateVideoEncoderSoftwareFallbackWrapper(
std::unique_ptr<VideoEncoder>(sw_encoder),
std::unique_ptr<VideoEncoder>(hw_encoder));
EXPECT_TRUE(wrapper->GetEncoderInfo().is_hardware_accelerated);
// Initialize the encoder. When HW attempt fails we fallback to SW.
VideoCodec codec_ = {};
codec_.width = 100;
codec_.height = 100;
EXPECT_CALL(*hw_encoder, InitEncode(_, _))
.WillOnce(Return(WEBRTC_VIDEO_CODEC_ERR_PARAMETER));
EXPECT_CALL(*sw_encoder, InitEncode(_, _))
.WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
wrapper->InitEncode(&codec_, kSettings);
// When reconfiguring (Release+InitEncode) we should re-attempt HW.
wrapper->Release();
EXPECT_CALL(*hw_encoder, InitEncode(_, _))
.WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
wrapper->InitEncode(&codec_, kSettings);
}
class PreferTemporalLayersFallbackTest : public ::testing::Test {
public:
PreferTemporalLayersFallbackTest() {}

View file

@ -148,6 +148,15 @@ class RTC_EXPORT VideoCodec {
bool active;
unsigned int qpMax;
// The actual number of simulcast streams. This is <= 1 in singlecast (it can
// be 0 in old code paths), but it is also 1 in the {active,inactive,inactive}
// "single RTP simulcast" use case and the legacy kSVC use case. In all other
// cases this is the same as the number of encodings (which may include
// inactive encodings). In other words:
// - `numberOfSimulcastStreams <= 1` in singlecast and singlecast-like setups
// including legacy kSVC (encodings interpreted as spatial layers) or
// standard kSVC (1 active encoding).
// - `numberOfSimulcastStreams > 1` in simulcast of 2+ active encodings.
unsigned char numberOfSimulcastStreams;
SimulcastStream simulcastStream[kMaxSimulcastStreams];
SpatialLayer spatialLayers[kMaxSpatialLayers];

View file

@ -14,13 +14,17 @@
#include <memory>
#include <vector>
#include "api/video_codecs/av1_profile.h"
#include "api/video_codecs/sdp_video_format.h"
#include "modules/video_coding/codecs/av1/dav1d_decoder.h"
namespace webrtc {
struct Dav1dDecoderTemplateAdapter {
static std::vector<SdpVideoFormat> SupportedFormats() {
return {SdpVideoFormat("AV1")};
return {SdpVideoFormat("AV1"),
SdpVideoFormat(
"AV1", {{kAV1FmtpProfile,
AV1ProfileToString(AV1Profile::kProfile1).data()}})};
}
static std::unique_ptr<VideoDecoder> CreateDecoder(

View file

@ -11,6 +11,7 @@
#include "api/video_codecs/video_encoder.h"
#include <string.h>
#include <algorithm>
#include "rtc_base/checks.h"

View file

@ -17,6 +17,7 @@
#include "absl/algorithm/container.h"
#include "api/array_view.h"
#include "api/video_codecs/sdp_video_format.h"
#include "api/video_codecs/video_encoder.h"
#include "api/video_codecs/video_encoder_factory.h"
#include "modules/video_coding/svc/scalability_mode_util.h"
@ -51,7 +52,16 @@ class VideoEncoderFactoryTemplate : public VideoEncoderFactory {
std::unique_ptr<VideoEncoder> CreateVideoEncoder(
const SdpVideoFormat& format) override {
return CreateVideoEncoderInternal<Ts...>(format);
// We fuzzy match the specified format for both valid and not so valid
// reasons. The valid reason is that there are many standardized codec
// specific fmtp parameters that have not been implemented, and in those
// cases we should not fail to instantiate an encoder just because we don't
// recognize the parameter. The not so valid reason is that we have started
// adding parameters completely unrelated to the SDP to the SdpVideoFormat.
// TODO(bugs.webrtc.org/13868): Remove FuzzyMatchSdpVideoFormat
absl::optional<SdpVideoFormat> matched =
FuzzyMatchSdpVideoFormat(GetSupportedFormats(), format);
return CreateVideoEncoderInternal<Ts...>(matched.value_or(format));
}
CodecSupport QueryCodecSupport(

View file

@ -24,7 +24,7 @@ struct LibvpxVp9EncoderTemplateAdapter {
static std::unique_ptr<VideoEncoder> CreateEncoder(
const SdpVideoFormat& format) {
return VP9Encoder::Create(cricket::VideoCodec(format));
return VP9Encoder::Create(cricket::CreateVideoCodec(format));
}
static bool IsScalabilityModeSupported(ScalabilityMode scalability_mode) {

View file

@ -31,7 +31,7 @@ struct OpenH264EncoderTemplateAdapter {
static std::unique_ptr<VideoEncoder> CreateEncoder(
const SdpVideoFormat& format) {
#if defined(WEBRTC_USE_H264)
return H264Encoder::Create(cricket::VideoCodec(format));
return H264Encoder::Create(cricket::CreateVideoCodec(format));
#else
return nullptr;
#endif

Some files were not shown because too many files have changed in this diff Show more