Merge with 4183 (M85) (#25)

This commit is contained in:
Peter Thatcher 2020-09-01 14:43:30 -07:00 committed by GitHub
parent 96787db848
commit e502386463
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
812 changed files with 30848 additions and 17159 deletions

View file

@ -92,6 +92,8 @@ Raman Budny <budnyjj@gmail.com>
Stephan Hartmann <stha09@googlemail.com>
&yet LLC <*@andyet.com>
8x8 Inc. <*@sip-communicator.org>
8x8 Inc. <*@8x8.com>
Agora IO <*@agora.io>
ARM Holdings <*@arm.com>
BroadSoft Inc. <*@broadsoft.com>
@ -108,6 +110,7 @@ Opera Software ASA <*@opera.com>
Optical Tone Ltd <*@opticaltone.com>
Pengutronix e.K. <*@pengutronix.de>
RingCentral, Inc. <*@ringcentral.com>
Signal Messenger, LLC <*@signal.org>
Sinch AB <*@sinch.com>
Signal Messenger, LLC <*signal.org>
struktur AG <*@struktur.de>

View file

@ -265,6 +265,10 @@ config("common_config") {
defines += [ "WEBRTC_USE_H264" ]
}
if (rtc_use_absl_mutex) {
defines += [ "WEBRTC_ABSL_MUTEX" ]
}
if (rtc_disable_logging) {
defines += [ "RTC_DISABLE_LOGGING" ]
}
@ -581,6 +585,14 @@ if (rtc_include_tests) {
}
}
rtc_test("benchmarks") {
testonly = true
deps = [
"rtc_base/synchronization:mutex_benchmark",
"test:benchmark_main",
]
}
# This runs tests that must run in real time and therefore can take some
# time to execute. They are in a separate executable to avoid making the
# regular unittest suite too slow to run frequently.

162
DEPS
View file

@ -8,37 +8,37 @@ vars = {
# chromium waterfalls. More info at: crbug.com/570091.
'checkout_configuration': 'default',
'checkout_instrumented_libraries': 'checkout_linux and checkout_configuration == "default"',
'chromium_revision': '8ffd72401d4e9b12d3b8979c8ef9549d32741e8c',
'chromium_revision': '4d95e6c77b6c37d8ea56bb81f14cb9c12a1cc1a3',
}
deps = {
# TODO(kjellander): Move this to be Android-only once the libevent dependency
# in base/third_party/libevent is solved.
'src/base':
'https://chromium.googlesource.com/chromium/src/base@e6c0c5b9adccfbc6d1f810cf15b300c3ce19107b',
'https://chromium.googlesource.com/chromium/src/base@2df7267880bf7d4086d55c0e56cd72c394bfda36',
'src/build':
'https://chromium.googlesource.com/chromium/src/build@f70e3b9685e03176b131ef03d185ba367e981c1d',
'https://chromium.googlesource.com/chromium/src/build@a03951acb996e9cea78b4ab575896bf1bfcd9668',
'src/buildtools':
'https://chromium.googlesource.com/chromium/src/buildtools@204a35a2a64f7179f8b76d7a0385653690839e21',
'https://chromium.googlesource.com/chromium/src/buildtools@1b066f021638735d72aa799ae6bc37e0b8963c67',
# Gradle 4.3-rc4. Used for testing Android Studio project generation for WebRTC.
'src/examples/androidtests/third_party/gradle': {
'url': 'https://chromium.googlesource.com/external/github.com/gradle/gradle.git@89af43c4d0506f69980f00dde78c97b2f81437f8',
'condition': 'checkout_android',
},
'src/ios': {
'url': 'https://chromium.googlesource.com/chromium/src/ios@7b694bd9367de782f0ae61fa0c713d2a0745c412',
'url': 'https://chromium.googlesource.com/chromium/src/ios@9200aad36b240166dcf8d771b95102f8193e737f',
'condition': 'checkout_ios',
},
'src/testing':
'https://chromium.googlesource.com/chromium/src/testing@5a5fb44e80d7fa2a1bb7c86467d7217335e6bae8',
'https://chromium.googlesource.com/chromium/src/testing@502600d41a00af23dd09e02ea358061e3c951634',
'src/third_party':
'https://chromium.googlesource.com/chromium/src/third_party@57686d64cb3c949799993d7732c981c64d9d47f4',
'https://chromium.googlesource.com/chromium/src/third_party@e0df6e10adc084f88dda51c0cbab84645db6c135',
'src/buildtools/linux64': {
'packages': [
{
'package': 'gn/gn/linux-amd64',
'version': 'git_revision:5ed3c9cc67b090d5e311e4bd2aba072173e82db9',
'version': 'git_revision:d0a6f072070988e7b038496c4e7d6c562b649732',
}
],
'dep_type': 'cipd',
@ -48,7 +48,7 @@ deps = {
'packages': [
{
'package': 'gn/gn/mac-amd64',
'version': 'git_revision:5ed3c9cc67b090d5e311e4bd2aba072173e82db9',
'version': 'git_revision:d0a6f072070988e7b038496c4e7d6c562b649732',
}
],
'dep_type': 'cipd',
@ -58,7 +58,7 @@ deps = {
'packages': [
{
'package': 'gn/gn/windows-amd64',
'version': 'git_revision:5ed3c9cc67b090d5e311e4bd2aba072173e82db9',
'version': 'git_revision:d0a6f072070988e7b038496c4e7d6c562b649732',
}
],
'dep_type': 'cipd',
@ -119,20 +119,20 @@ deps = {
},
'src/third_party/boringssl/src':
'https://boringssl.googlesource.com/boringssl.git@a810d82575ecbde26406fa583371f807f8721ed7',
'https://boringssl.googlesource.com/boringssl.git@88024df12147e56b6abd66b743ff441a0aaa09a8',
'src/third_party/breakpad/breakpad':
'https://chromium.googlesource.com/breakpad/breakpad.git@2ffe116322aa4373d408a72b665fa7fe7a504d4a',
'https://chromium.googlesource.com/breakpad/breakpad.git@2757a2c9c819fcae3784576aef0c8400c7ad06d7',
'src/third_party/catapult':
'https://chromium.googlesource.com/catapult.git@087cffcba472d70f3d0b1115d0b9100c365073d1',
'https://chromium.googlesource.com/catapult.git@2ad47493f833c5191f56c74d3f1aac10e7c105e8',
'src/third_party/ced/src': {
'url': 'https://chromium.googlesource.com/external/github.com/google/compact_enc_det.git@ba412eaaacd3186085babcd901679a48863c7dd5',
},
'src/third_party/colorama/src':
'https://chromium.googlesource.com/external/colorama.git@799604a1041e9b3bc5d2789ecbd7e8db2e18e6b8',
'src/third_party/depot_tools':
'https://chromium.googlesource.com/chromium/tools/depot_tools.git@30ef5cb43761b8536b071a26ca59fca17e6a7de6',
'https://chromium.googlesource.com/chromium/tools/depot_tools.git@37e562110fa58a913b13ed2258f18449f90c6ad7',
'src/third_party/ffmpeg':
'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@587a3f48499df05d3c65f1529fd08b0783217b39',
'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@be66dc5fd0e3c53646107b2dc5d7594a869ebdc6',
'src/third_party/findbugs': {
'url': 'https://chromium.googlesource.com/chromium/deps/findbugs.git@4275d9ac8610db6b1bc9a5e887f97e41b33fac67',
'condition': 'checkout_android',
@ -143,9 +143,12 @@ deps = {
'condition': 'checkout_linux',
},
'src/third_party/freetype/src':
'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@3f70e6d20c82b28174096adcd0657b3c998b007b',
'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@62fea391fa9993f8c1d206a50080d690178ce518',
'src/third_party/harfbuzz-ng/src':
'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@100d40c827eb8336b2b671856f151275d47e71ad',
'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@e3af529e511ca492284cdd9f4584666b88a9e00f',
'src/third_party/google_benchmark/src': {
'url': 'https://chromium.googlesource.com/external/github.com/google/benchmark.git@367119482ff4abc3d73e4a109b410090fc281337',
},
# WebRTC-only dependency (not present in Chromium).
'src/third_party/gtest-parallel':
'https://chromium.googlesource.com/external/github.com/google/gtest-parallel@df0b4e476f98516cea7d593e5dbb0fca44f6ee7f',
@ -160,9 +163,9 @@ deps = {
'dep_type': 'cipd',
},
'src/third_party/googletest/src':
'https://chromium.googlesource.com/external/github.com/google/googletest.git@a09ea700d32bab83325aff9ff34d0582e50e3997',
'https://chromium.googlesource.com/external/github.com/google/googletest.git@4fe018038f87675c083d0cfb6a6b57c274fb1753',
'src/third_party/icu': {
'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@f2223961702f00a8833874b0560d615a2cc42738',
'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@630b884f84d1d5e92aeda3463dca99fe2befd30e',
},
'src/third_party/jdk': {
'packages': [
@ -192,13 +195,15 @@ deps = {
'src/third_party/libsrtp':
'https://chromium.googlesource.com/chromium/deps/libsrtp.git@650611720ecc23e0e6b32b0e3100f8b4df91696c',
'src/third_party/libaom/source/libaom':
'https://aomedia.googlesource.com/aom.git@c810066815b80dd1ac8ade15170ce962d6646368',
'https://aomedia.googlesource.com/aom.git@2aa13c436e4dc6d78e05d13e6be73a23b3810bd3',
'src/third_party/libunwindstack': {
'url': 'https://chromium.googlesource.com/chromium/src/third_party/libunwindstack.git@acf93761dc00ac67bd7534c4040699abed4f8d94',
'url': 'https://chromium.googlesource.com/chromium/src/third_party/libunwindstack.git@046920fc491aba67c6f6a750b4be7b835cff4e5b',
'condition': 'checkout_android',
},
'src/third_party/perfetto':
'https://android.googlesource.com/platform/external/perfetto.git@60cf022c0223b4c28424509dca35e347872c4832',
'src/third_party/libvpx/source/libvpx':
'https://chromium.googlesource.com/webm/libvpx.git@77960f37b3d328cf7552f6cd69a083f4005aed7b',
'https://chromium.googlesource.com/webm/libvpx.git@c1765573149e2c0fe2acabc224c0f9085b9e7f2b',
'src/third_party/libyuv':
'https://chromium.googlesource.com/libyuv/libyuv.git@6afd9becdf58822b1da6770598d8597c583ccfad',
'src/third_party/lss': {
@ -221,7 +226,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/r8',
'version': 'UAycWqc5QfELtJhhnoU4jQHjsyxPjRNyZ0EfvlojaY4C',
'version': 'gobCh01BNwJNyLHHNFUmLWSMaAbe4x3izuzBFzxQpDoC',
},
],
'condition': 'checkout_android',
@ -241,10 +246,6 @@ deps = {
'url': 'https://chromium.googlesource.com/external/github.com/kennethreitz/requests.git@f172b30356d821d180fa4ecfa3e71c7274a32de4',
'condition': 'checkout_android',
},
'src/third_party/robolectric/robolectric': {
'url': 'https://chromium.googlesource.com/external/robolectric.git@f2df0efb033bb402399ebfb9bf58aefee5cced05',
'condition': 'checkout_android',
},
'src/third_party/ub-uiautomator/lib': {
'url': 'https://chromium.googlesource.com/chromium/third_party/ub-uiautomator.git@00270549ce3161ae72ceb24712618ea28b4f9434',
'condition': 'checkout_android',
@ -257,9 +258,9 @@ deps = {
'condition': 'checkout_win',
},
'src/tools':
'https://chromium.googlesource.com/chromium/src/tools@b64de32dc94866c1446065a6ce8703c856dd518b',
'https://chromium.googlesource.com/chromium/src/tools@050a4a5e267e98c79fe632d84bbc2fbaa4d22fd4',
'src/tools/swarming_client':
'https://chromium.googlesource.com/infra/luci/client-py.git@160b445a44e0daacf6f3f8570ca2707ec451f374',
'https://chromium.googlesource.com/infra/luci/client-py.git@4c095d04179dc725a300085ae21fe3b79900d072',
'src/third_party/accessibility_test_framework': {
'packages': [
@ -617,6 +618,17 @@ deps = {
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/androidx_annotation_annotation_experimental': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_annotation_annotation_experimental',
'version': 'version:1.0.0-cr0',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/androidx_appcompat_appcompat': {
'packages': [
{
@ -709,7 +721,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_coordinatorlayout_coordinatorlayout',
'version': 'version:1.0.0-cr0',
'version': 'version:1.1.0-cr0',
},
],
'condition': 'checkout_android',
@ -1039,7 +1051,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_recyclerview_recyclerview',
'version': 'version:1.0.0-cr0',
'version': 'version:1.1.0-cr0',
},
],
'condition': 'checkout_android',
@ -1090,11 +1102,22 @@ deps = {
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/androidx_test_espresso_espresso_contrib': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_contrib',
'version': 'version:3.2.0-cr0',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/androidx_test_espresso_espresso_core': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_core',
'version': 'version:3.1.0-cr0',
'version': 'version:3.2.0-cr0',
},
],
'condition': 'checkout_android',
@ -1105,7 +1128,29 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_idling_resource',
'version': 'version:3.1.0-cr0',
'version': 'version:3.2.0-cr0',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/androidx_test_espresso_espresso_intents': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_intents',
'version': 'version:3.2.0-cr0',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/androidx_test_espresso_espresso_web': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_web',
'version': 'version:3.2.0-cr0',
},
],
'condition': 'checkout_android',
@ -1171,7 +1216,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_transition_transition',
'version': 'version:1.0.0-rc02-cr0',
'version': 'version:1.2.0-cr0',
},
],
'condition': 'checkout_android',
@ -1222,6 +1267,17 @@ deps = {
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/androidx_viewpager2_viewpager2': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_viewpager2_viewpager2',
'version': 'version:1.0.0-cr0',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/androidx_viewpager_viewpager': {
'packages': [
{
@ -1952,7 +2008,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_google_android_material_material',
'version': 'version:1.0.0-rc02-cr0',
'version': 'version:1.2.0-alpha06-cr0',
},
],
'condition': 'checkout_android',
@ -2216,7 +2272,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_google_protobuf_protobuf_javalite',
'version': 'version:3.11.4-cr0',
'version': 'version:3.12.2-cr0',
},
],
'condition': 'checkout_android',
@ -2520,6 +2576,17 @@ deps = {
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/org_ccil_cowan_tagsoup_tagsoup': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_ccil_cowan_tagsoup_tagsoup',
'version': 'version:1.2.1-cr0',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/org_checkerframework_checker_compat_qual': {
'packages': [
{
@ -2861,6 +2928,28 @@ deps = {
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/org_robolectric_shadows_multidex': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadows_multidex',
'version': 'version:4.3.1-cr0',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/org_robolectric_shadows_playservices': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadows_playservices',
'version': 'version:4.3.1-cr0',
},
],
'condition': 'checkout_android',
'dep_type': 'cipd',
},
'src/third_party/android_deps/libs/org_robolectric_utils': {
'packages': [
{
@ -3140,6 +3229,7 @@ include_rules = [
"+absl/algorithm/container.h",
"+absl/base/attributes.h",
"+absl/base/config.h",
"+absl/base/const_init.h",
"+absl/base/macros.h",
"+absl/container/inlined_vector.h",
"+absl/memory/memory.h",

View file

@ -47,10 +47,13 @@ CPPLINT_BLACKLIST = [
# - build/c++11 : Rvalue ref checks are unreliable (false positives),
# include file and feature blacklists are
# google3-specific.
# - runtime/references : Mutable references are not banned by the Google
# C++ style guide anymore (starting from May 2020).
# - whitespace/operators: Same as above (doesn't seem sufficient to eliminate
# all move-related errors).
BLACKLIST_LINT_FILTERS = [
'-build/c++11',
'-runtime/references',
'-whitespace/operators',
]
@ -94,15 +97,20 @@ LEGACY_API_DIRS = (
API_DIRS = NATIVE_API_DIRS[:] + LEGACY_API_DIRS[:]
# TARGET_RE matches a GN target, and extracts the target name and the contents.
TARGET_RE = re.compile(r'(?P<indent>\s*)\w+\("(?P<target_name>\w+)"\) {'
r'(?P<target_contents>.*?)'
r'(?P=indent)}',
re.MULTILINE | re.DOTALL)
TARGET_RE = re.compile(
r'(?P<indent>\s*)(?P<target_type>\w+)\("(?P<target_name>\w+)"\) {'
r'(?P<target_contents>.*?)'
r'(?P=indent)}',
re.MULTILINE | re.DOTALL)
# SOURCES_RE matches a block of sources inside a GN target.
SOURCES_RE = re.compile(r'sources \+?= \[(?P<sources>.*?)\]',
re.MULTILINE | re.DOTALL)
# DEPS_RE matches a block of sources inside a GN target.
DEPS_RE = re.compile(r'\bdeps \+?= \[(?P<deps>.*?)\]',
re.MULTILINE | re.DOTALL)
# FILE_PATH_RE matchies a file path.
FILE_PATH_RE = re.compile(r'"(?P<file_path>(\w|\/)+)(?P<extension>\.\w+)"')
@ -338,6 +346,37 @@ def CheckNoSourcesAbove(input_api, gn_files, output_api):
return []
def CheckAbseilDependencies(input_api, gn_files, output_api):
"""Checks that Abseil dependencies are declared in `absl_deps`."""
absl_re = re.compile(r'third_party/abseil-cpp', re.MULTILINE | re.DOTALL)
target_types_to_check = [
'rtc_library',
'rtc_source_set',
'rtc_static_library',
'webrtc_fuzzer_test',
]
error_msg = ('Abseil dependencies in target "%s" (file: %s) '
'should be moved to the "absl_deps" parameter.')
errors = []
for gn_file in gn_files:
gn_file_content = input_api.ReadFile(gn_file)
for target_match in TARGET_RE.finditer(gn_file_content):
target_type = target_match.group('target_type')
target_name = target_match.group('target_name')
target_contents = target_match.group('target_contents')
if target_type in target_types_to_check:
for deps_match in DEPS_RE.finditer(target_contents):
deps = deps_match.group('deps').splitlines()
for dep in deps:
if re.search(absl_re, dep):
errors.append(
output_api.PresubmitError(error_msg % (target_name,
gn_file.LocalPath())))
break # no need to warn more than once per target
return errors
def CheckNoMixingSources(input_api, gn_files, output_api):
"""Disallow mixing C, C++ and Obj-C/Obj-C++ in the same target.
@ -577,6 +616,7 @@ def CheckGnChanges(input_api, output_api):
if gn_files:
result.extend(CheckNoSourcesAbove(input_api, gn_files, output_api))
result.extend(CheckNoMixingSources(input_api, gn_files, output_api))
result.extend(CheckAbseilDependencies(input_api, gn_files, output_api))
result.extend(CheckNoPackageBoundaryViolations(input_api, gn_files,
output_api))
result.extend(CheckPublicDepsIsNotUsed(gn_files, input_api, output_api))

View file

@ -51,3 +51,13 @@ At branch-heads/4147:
- Support for 5G in network stack
- Remove SRTP_AES128_CM_SHA1_80
- Allow forInjectable APM?
At branch-heads/4183:
- Video adapation refactored
- Refactored audio frame length (ptime) adapation
- Support for audio RED payload type added
- Support for audio RED payload type added
- Frame Marking RTP header extension removed
- MediaTransportInterface and DatagramTransportInterface removed
- Switch to using absl::MutexLock
- RtpRtcp module moved to RtpRtcpInterface + RtpRtcpModule2

View file

@ -23,9 +23,11 @@ adds the first use.
* `absl::variant` and related stuff from `absl/types/variant.h`.
* The functions in `absl/algorithm/algorithm.h` and
`absl/algorithm/container.h`.
* `absl/base/const_init.h` for mutex initialization.
* The macros in `absl/base/attributes.h`, `absl/base/config.h` and
`absl/base/macros.h`.
## **Disallowed**
### `absl::make_unique`
@ -34,7 +36,7 @@ adds the first use.
### `absl::Mutex`
*Use `rtc::CriticalSection` instead.*
*Use `webrtc::Mutex` instead.*
Chromium has a ban on new static initializers, and `absl::Mutex` uses
one. To make `absl::Mutex` available, we would need to nicely ask the
@ -61,3 +63,12 @@ has decided if they will change `absl::Span` to match.
These are optimized for speed, not binary size. Even `StrCat` calls
with a modest number of arguments can easily add several hundred bytes
to the binary.
## How to depend on Abseil
For build targets `rtc_library`, `rtc_source_set` and `rtc_static_library`,
dependencies on Abseil need to be listed in `absl_deps` instead of `deps`.
This is needed in order to support the Abseil component build in Chromium. In
such build mode, WebRTC will depend on a unique Abseil build target what will
generate a shared library.

View file

@ -71,8 +71,8 @@ rtc_library("rtp_headers") {
"..:webrtc_common",
"units:timestamp",
"video:video_rtp_headers",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("rtp_packet_info") {
@ -90,8 +90,8 @@ rtc_library("rtp_packet_info") {
"..:webrtc_common",
"../rtc_base:rtc_base_approved",
"../rtc_base/system:rtc_export",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("media_stream_interface") {
@ -111,8 +111,8 @@ rtc_library("media_stream_interface") {
"../rtc_base/system:rtc_export",
"video:recordable_encoded_frame",
"video:video_frame",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("libjingle_peerconnection_api") {
@ -167,12 +167,14 @@ rtc_library("libjingle_peerconnection_api") {
":media_stream_interface",
":network_state_predictor_api",
":packet_socket_factory",
":priority",
":rtc_error",
":rtc_stats_api",
":rtp_packet_info",
":rtp_parameters",
":rtp_transceiver_direction",
":scoped_refptr",
"adaptation:resource_adaptation_api",
"audio:audio_mixer_api",
"audio_codecs:audio_codecs_api",
"crypto:frame_decryptor_interface",
@ -182,23 +184,15 @@ rtc_library("libjingle_peerconnection_api") {
"rtc_event_log",
"task_queue",
"transport:bitrate_settings",
"transport:datagram_transport_interface",
"transport:enums",
"transport:network_control",
"transport:webrtc_key_value_config",
"transport/media:audio_interfaces",
"transport/media:media_transport_interface",
"transport/media:video_interfaces",
"transport/rtp:rtp_source",
"units:data_rate",
"units:timestamp",
"video:encoded_image",
"video:video_frame",
"video:video_rtp_headers",
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
# Basically, don't add stuff here. You might break sensitive downstream
# targets like pnacl. API should not depend on anything outside of this
@ -213,6 +207,12 @@ rtc_library("libjingle_peerconnection_api") {
"../rtc_base:rtc_base_approved",
"../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
}
rtc_source_set("frame_transformer_interface") {
@ -222,6 +222,7 @@ rtc_source_set("frame_transformer_interface") {
":scoped_refptr",
"../rtc_base:refcount",
"video:encoded_frame",
"video:video_frame_metadata",
]
}
@ -236,8 +237,8 @@ rtc_library("rtc_error") {
"../rtc_base:logging",
"../rtc_base:macromagic",
"../rtc_base/system:rtc_export",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("packet_socket_factory") {
@ -273,7 +274,6 @@ rtc_source_set("video_quality_test_fixture_api") {
"../test:video_test_common",
"transport:bitrate_settings",
"transport:network_control",
"transport/media:media_transport_interface",
"video_codecs:video_codecs_api",
]
}
@ -284,11 +284,15 @@ rtc_source_set("video_quality_analyzer_api") {
sources = [ "test/video_quality_analyzer_interface.h" ]
deps = [
":array_view",
":stats_observer_interface",
"video:encoded_image",
"video:video_frame",
"video:video_rtp_headers",
"video_codecs:video_codecs_api",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
}
@ -303,6 +307,10 @@ rtc_source_set("rtp_transceiver_direction") {
sources = [ "rtp_transceiver_direction.h" ]
}
rtc_source_set("priority") {
sources = [ "priority.h" ]
}
rtc_library("rtp_parameters") {
visibility = [ "*" ]
sources = [
@ -313,18 +321,21 @@ rtc_library("rtp_parameters") {
]
deps = [
":array_view",
":priority",
":rtp_transceiver_direction",
"../rtc_base:checks",
"../rtc_base:stringutils",
"../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
}
if (is_android) {
java_cpp_enum("rtp_parameters_enums") {
sources = [ "rtp_parameters.h" ]
java_cpp_enum("priority_enums") {
sources = [ "priority.h" ]
}
}
@ -344,11 +355,9 @@ rtc_source_set("stats_observer_interface") {
testonly = true
sources = [ "test/stats_observer_interface.h" ]
deps = [
# For api/stats_types.h
":libjingle_peerconnection_api",
":rtp_parameters",
]
deps = [ ":rtc_stats_api" ]
absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_source_set("peer_connection_quality_test_fixture_api") {
@ -371,14 +380,16 @@ rtc_source_set("peer_connection_quality_test_fixture_api") {
":stats_observer_interface",
":video_quality_analyzer_api",
"../media:rtc_media_base",
"../rtc_base:deprecation",
"../rtc_base:rtc_base",
"rtc_event_log",
"task_queue",
"transport:network_control",
"transport/media:media_transport_interface",
"units:time_delta",
"video:video_frame",
"video_codecs:video_codecs_api",
]
absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
@ -393,8 +404,8 @@ rtc_source_set("frame_generator_api") {
deps = [
":scoped_refptr",
"video:video_frame",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("test_dependency_factory") {
@ -407,7 +418,7 @@ rtc_library("test_dependency_factory") {
deps = [
":video_quality_test_fixture_api",
"../rtc_base:checks",
"../rtc_base:thread_checker",
"../rtc_base:platform_thread_types",
]
}
@ -471,8 +482,8 @@ rtc_library("create_frame_generator") {
"../rtc_base:checks",
"../system_wrappers",
"../test:frame_generator_impl",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("create_peer_connection_quality_test_frame_generator") {
@ -542,8 +553,8 @@ rtc_library("audio_options_api") {
":array_view",
"../rtc_base:stringutils",
"../rtc_base/system:rtc_export",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("transport_api") {
@ -570,8 +581,8 @@ rtc_source_set("simulated_network_api") {
deps = [
"../rtc_base",
"../rtc_base:criticalsection",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
# TODO(srte): Move to network_emulation sub directory.
@ -704,6 +715,8 @@ if (rtc_include_tests) {
"../modules/audio_coding:neteq_test_factory",
"../rtc_base:checks",
"neteq:neteq_api",
]
absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
"//third_party/abseil-cpp/absl/strings",
@ -854,6 +867,7 @@ if (rtc_include_tests) {
}
rtc_source_set("mock_peerconnectioninterface") {
visibility = [ "*" ]
testonly = true
sources = [ "test/mock_peerconnectioninterface.h" ]
@ -863,6 +877,17 @@ if (rtc_include_tests) {
]
}
rtc_source_set("mock_peer_connection_factory_interface") {
visibility = [ "*" ]
testonly = true
sources = [ "test/mock_peer_connection_factory_interface.h" ]
deps = [
":libjingle_peerconnection_api",
"../test:test_support",
]
}
rtc_source_set("mock_rtp") {
testonly = true
sources = [
@ -876,6 +901,16 @@ if (rtc_include_tests) {
]
}
rtc_source_set("mock_transformable_video_frame") {
testonly = true
sources = [ "test/mock_transformable_video_frame.h" ]
deps = [
":frame_transformer_interface",
"../test:test_support",
]
}
rtc_source_set("mock_video_bitrate_allocator") {
testonly = true
sources = [ "test/mock_video_bitrate_allocator.h" ]
@ -933,39 +968,6 @@ if (rtc_include_tests) {
]
}
rtc_source_set("fake_media_transport") {
testonly = true
sources = [
"test/fake_datagram_transport.h",
"test/fake_media_transport.h",
]
deps = [
"../rtc_base:checks",
"transport:datagram_transport_interface",
"transport/media:media_transport_interface",
"//third_party/abseil-cpp/absl/algorithm:container",
]
}
rtc_library("loopback_media_transport") {
testonly = true
sources = [
"test/loopback_media_transport.cc",
"test/loopback_media_transport.h",
]
deps = [
"../rtc_base",
"../rtc_base:checks",
"transport:datagram_transport_interface",
"transport/media:media_transport_interface",
"//third_party/abseil-cpp/absl/algorithm:container",
]
}
rtc_library("create_time_controller") {
visibility = [ "*" ]
testonly = true
@ -996,7 +998,6 @@ if (rtc_include_tests) {
"rtp_parameters_unittest.cc",
"scoped_refptr_unittest.cc",
"test/create_time_controller_unittest.cc",
"test/loopback_media_transport_unittest.cc",
]
deps = [
@ -1004,7 +1005,6 @@ if (rtc_include_tests) {
":create_time_controller",
":function_view",
":libjingle_peerconnection_api",
":loopback_media_transport",
":rtc_error",
":rtc_event_log_output_file",
":rtp_packet_info",
@ -1035,13 +1035,13 @@ if (rtc_include_tests) {
":dummy_peer_connection",
":fake_frame_decryptor",
":fake_frame_encryptor",
":fake_media_transport",
":loopback_media_transport",
":mock_audio_mixer",
":mock_frame_decryptor",
":mock_frame_encryptor",
":mock_peer_connection_factory_interface",
":mock_peerconnectioninterface",
":mock_rtp",
":mock_transformable_video_frame",
":mock_video_bitrate_allocator",
":mock_video_bitrate_allocator_factory",
":mock_video_codec_factory",

View file

@ -115,11 +115,6 @@ specific_include_rules = {
"+rtc_base/ref_count.h",
],
"media_transport_interface\.h": [
"+rtc_base/copy_on_write_buffer.h", # As used by datachannelinterface.h
"+rtc_base/network_route.h",
],
"packet_socket_factory\.h": [
"+rtc_base/proxy_info.h",
"+rtc_base/async_packet_socket.h",

23
api/adaptation/BUILD.gn Normal file
View file

@ -0,0 +1,23 @@
# Copyright(c) 2020 The WebRTC project authors.All Rights Reserved.
#
# Use of this source code is governed by a BSD - style license
# that can be found in the LICENSE file in the root of the source
# tree.An additional intellectual property rights grant can be found
# in the file PATENTS.All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import("../../webrtc.gni")
rtc_source_set("resource_adaptation_api") {
visibility = [ "*" ]
sources = [
"resource.cc",
"resource.h",
]
deps = [
"../../api:scoped_refptr",
"../../rtc_base:refcount",
"../../rtc_base:rtc_base_approved",
"../../rtc_base/system:rtc_export",
]
}

7
api/adaptation/DEPS Normal file
View file

@ -0,0 +1,7 @@
specific_include_rules = {
"resource\.h": [
# ref_count.h is a public_deps of rtc_base_approved. Necessary because of
# rtc::RefCountInterface.
"+rtc_base/ref_count.h",
],
}

View file

@ -0,0 +1,30 @@
/*
* Copyright 2019 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/adaptation/resource.h"
namespace webrtc {
const char* ResourceUsageStateToString(ResourceUsageState usage_state) {
switch (usage_state) {
case ResourceUsageState::kOveruse:
return "kOveruse";
case ResourceUsageState::kUnderuse:
return "kUnderuse";
}
}
ResourceListener::~ResourceListener() {}
Resource::Resource() {}
Resource::~Resource() {}
} // namespace webrtc

67
api/adaptation/resource.h Normal file
View file

@ -0,0 +1,67 @@
/*
* Copyright 2019 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_ADAPTATION_RESOURCE_H_
#define API_ADAPTATION_RESOURCE_H_
#include <string>
#include "api/scoped_refptr.h"
#include "rtc_base/ref_count.h"
#include "rtc_base/system/rtc_export.h"
namespace webrtc {
class Resource;
enum class ResourceUsageState {
// Action is needed to minimze the load on this resource.
kOveruse,
// Increasing the load on this resource is desired, if possible.
kUnderuse,
};
RTC_EXPORT const char* ResourceUsageStateToString(
ResourceUsageState usage_state);
class RTC_EXPORT ResourceListener {
public:
virtual ~ResourceListener();
virtual void OnResourceUsageStateMeasured(
rtc::scoped_refptr<Resource> resource,
ResourceUsageState usage_state) = 0;
};
// A Resource monitors an implementation-specific resource. It may report
// kOveruse or kUnderuse when resource usage is high or low enough that we
// should perform some sort of mitigation to fulfil the resource's constraints.
//
// The methods on this interface are invoked on the adaptation task queue.
// Resource usage measurements may be performed on an any task queue.
//
// The Resource is reference counted to prevent use-after-free when posting
// between task queues. As such, the implementation MUST NOT make any
// assumptions about which task queue Resource is destructed on.
class RTC_EXPORT Resource : public rtc::RefCountInterface {
public:
Resource();
// Destruction may happen on any task queue.
~Resource() override;
virtual std::string Name() const = 0;
// The |listener| may be informed of resource usage measurements on any task
// queue, but not after this method is invoked with the null argument.
virtual void SetResourceListener(ResourceListener* listener) = 0;
};
} // namespace webrtc
#endif // API_ADAPTATION_RESOURCE_H_

View file

@ -38,7 +38,7 @@ void CallFixed(ArrayView<T, N> av) {}
} // namespace
TEST(ArrayViewTest, TestConstructFromPtrAndArray) {
TEST(ArrayViewDeathTest, TestConstructFromPtrAndArray) {
char arr[] = "Arrr!";
const char carr[] = "Carrr!";
EXPECT_EQ(6u, Call<const char>(arr));
@ -409,7 +409,7 @@ TEST(FixArrayViewTest, TestSwapFixed) {
// swap(x, w); // Compile error, because different sizes.
}
TEST(ArrayViewTest, TestIndexing) {
TEST(ArrayViewDeathTest, TestIndexing) {
char arr[] = "abcdefg";
ArrayView<char> x(arr);
const ArrayView<char> y(arr);

View file

@ -61,8 +61,8 @@ rtc_library("aec3_config_json") {
"../../rtc_base:rtc_base_approved",
"../../rtc_base:rtc_json",
"../../rtc_base/system:rtc_export",
"//third_party/abseil-cpp/absl/strings",
]
absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("aec3_factory") {

View file

@ -11,6 +11,8 @@
#include "api/audio/audio_frame.h"
#include <string.h>
#include <algorithm>
#include <utility>
#include "rtc_base/checks.h"
#include "rtc_base/time_utils.h"
@ -22,6 +24,28 @@ AudioFrame::AudioFrame() {
static_assert(sizeof(data_) == kMaxDataSizeBytes, "kMaxDataSizeBytes");
}
void swap(AudioFrame& a, AudioFrame& b) {
using std::swap;
swap(a.timestamp_, b.timestamp_);
swap(a.elapsed_time_ms_, b.elapsed_time_ms_);
swap(a.ntp_time_ms_, b.ntp_time_ms_);
swap(a.samples_per_channel_, b.samples_per_channel_);
swap(a.sample_rate_hz_, b.sample_rate_hz_);
swap(a.num_channels_, b.num_channels_);
swap(a.channel_layout_, b.channel_layout_);
swap(a.speech_type_, b.speech_type_);
swap(a.vad_activity_, b.vad_activity_);
swap(a.profile_timestamp_ms_, b.profile_timestamp_ms_);
swap(a.packet_infos_, b.packet_infos_);
const size_t length_a = a.samples_per_channel_ * a.num_channels_;
const size_t length_b = b.samples_per_channel_ * b.num_channels_;
RTC_DCHECK_LE(length_a, AudioFrame::kMaxDataSizeSamples);
RTC_DCHECK_LE(length_b, AudioFrame::kMaxDataSizeSamples);
std::swap_ranges(a.data_, a.data_ + std::max(length_a, length_b), b.data_);
swap(a.muted_, b.muted_);
swap(a.absolute_capture_timestamp_ms_, b.absolute_capture_timestamp_ms_);
}
void AudioFrame::Reset() {
ResetWithoutMuting();
muted_ = true;

View file

@ -14,6 +14,8 @@
#include <stddef.h>
#include <stdint.h>
#include <utility>
#include "api/audio/channel_layout.h"
#include "api/rtp_packet_infos.h"
#include "rtc_base/constructor_magic.h"
@ -58,6 +60,8 @@ class AudioFrame {
AudioFrame();
friend void swap(AudioFrame& a, AudioFrame& b);
// Resets all members to their default state.
void Reset();
// Same as Reset(), but leaves mute state unchanged. Muting a frame requires

View file

@ -133,4 +133,54 @@ TEST(AudioFrameTest, CopyFrom) {
EXPECT_EQ(0, memcmp(frame2.data(), frame1.data(), sizeof(samples)));
}
TEST(AudioFrameTest, SwapFrames) {
AudioFrame frame1, frame2;
int16_t samples1[kNumChannelsMono * kSamplesPerChannel];
for (size_t i = 0; i < kNumChannelsMono * kSamplesPerChannel; ++i) {
samples1[i] = i;
}
frame1.UpdateFrame(kTimestamp, samples1, kSamplesPerChannel, kSampleRateHz,
AudioFrame::kPLC, AudioFrame::kVadActive,
kNumChannelsMono);
frame1.set_absolute_capture_timestamp_ms(12345678);
const auto frame1_channel_layout = frame1.channel_layout();
int16_t samples2[(kNumChannelsMono + 1) * (kSamplesPerChannel + 1)];
for (size_t i = 0; i < (kNumChannelsMono + 1) * (kSamplesPerChannel + 1);
++i) {
samples2[i] = 1000 + i;
}
frame2.UpdateFrame(kTimestamp + 1, samples2, kSamplesPerChannel + 1,
kSampleRateHz + 1, AudioFrame::kNormalSpeech,
AudioFrame::kVadPassive, kNumChannelsMono + 1);
const auto frame2_channel_layout = frame2.channel_layout();
swap(frame1, frame2);
EXPECT_EQ(kTimestamp + 1, frame1.timestamp_);
ASSERT_EQ(kSamplesPerChannel + 1, frame1.samples_per_channel_);
EXPECT_EQ(kSampleRateHz + 1, frame1.sample_rate_hz_);
EXPECT_EQ(AudioFrame::kNormalSpeech, frame1.speech_type_);
EXPECT_EQ(AudioFrame::kVadPassive, frame1.vad_activity_);
ASSERT_EQ(kNumChannelsMono + 1, frame1.num_channels_);
for (size_t i = 0; i < (kNumChannelsMono + 1) * (kSamplesPerChannel + 1);
++i) {
EXPECT_EQ(samples2[i], frame1.data()[i]);
}
EXPECT_FALSE(frame1.absolute_capture_timestamp_ms());
EXPECT_EQ(frame2_channel_layout, frame1.channel_layout());
EXPECT_EQ(kTimestamp, frame2.timestamp_);
ASSERT_EQ(kSamplesPerChannel, frame2.samples_per_channel_);
EXPECT_EQ(kSampleRateHz, frame2.sample_rate_hz_);
EXPECT_EQ(AudioFrame::kPLC, frame2.speech_type_);
EXPECT_EQ(AudioFrame::kVadActive, frame2.vad_activity_);
ASSERT_EQ(kNumChannelsMono, frame2.num_channels_);
for (size_t i = 0; i < kNumChannelsMono * kSamplesPerChannel; ++i) {
EXPECT_EQ(samples1[i], frame2.data()[i]);
}
EXPECT_EQ(12345678, frame2.absolute_capture_timestamp_ms());
EXPECT_EQ(frame1_channel_layout, frame2.channel_layout());
}
} // namespace webrtc

View file

@ -38,6 +38,8 @@ rtc_library("audio_codecs_api") {
"../../rtc_base:sanitizer",
"../../rtc_base/system:rtc_export",
"../units:time_delta",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]

View file

@ -25,6 +25,8 @@ rtc_library("audio_encoder_L16") {
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base:safe_minmax",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -42,6 +44,8 @@ rtc_library("audio_decoder_L16") {
"../../../modules/audio_coding:pcm16b",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]

View file

@ -25,6 +25,8 @@ rtc_library("audio_encoder_g711") {
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base:safe_minmax",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -42,6 +44,8 @@ rtc_library("audio_decoder_g711") {
"../../../modules/audio_coding:g711",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]

View file

@ -31,6 +31,8 @@ rtc_library("audio_encoder_g722") {
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base:safe_minmax",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -48,6 +50,8 @@ rtc_library("audio_decoder_g722") {
"../../../modules/audio_coding:g722",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]

View file

@ -30,6 +30,8 @@ rtc_library("audio_encoder_ilbc") {
"../../../modules/audio_coding:ilbc",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base:safe_minmax",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -46,6 +48,8 @@ rtc_library("audio_decoder_ilbc") {
"..:audio_codecs_api",
"../../../modules/audio_coding:ilbc",
"../../../rtc_base:rtc_base_approved",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]

View file

@ -68,6 +68,8 @@ rtc_library("audio_encoder_isac_fix") {
"../../../modules/audio_coding:isac_fix",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -85,6 +87,8 @@ rtc_library("audio_decoder_isac_fix") {
"../../../modules/audio_coding:isac_fix",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -102,6 +106,8 @@ rtc_library("audio_encoder_isac_float") {
"../../../modules/audio_coding:isac",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -119,6 +125,8 @@ rtc_library("audio_decoder_isac_float") {
"../../../modules/audio_coding:isac",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]

View file

@ -23,8 +23,8 @@ rtc_library("audio_encoder_opus_config") {
deps = [
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
defines = []
if (rtc_opus_variable_complexity) {
defines += [ "WEBRTC_OPUS_VARIABLE_COMPLEXITY=1" ]
@ -49,6 +49,8 @@ rtc_library("audio_encoder_opus") {
"../../../modules/audio_coding:webrtc_opus",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -66,6 +68,8 @@ rtc_library("audio_decoder_opus") {
"../../../modules/audio_coding:webrtc_opus",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -82,8 +86,8 @@ rtc_library("audio_encoder_multiopus") {
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
"../opus:audio_encoder_opus_config",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("audio_decoder_multiopus") {
@ -99,6 +103,8 @@ rtc_library("audio_decoder_multiopus") {
"../../../modules/audio_coding:webrtc_multiopus",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",

View file

@ -75,6 +75,8 @@ struct RTC_EXPORT AudioOptions {
// and check if any other AudioOptions members are unused.
absl::optional<bool> combined_audio_video_bwe;
// Enable audio network adaptor.
// TODO(webrtc:11717): Remove this API in favor of adaptivePtime in
// RtpEncodingParameters.
absl::optional<bool> audio_network_adaptor;
// Config string for audio network adaptor.
absl::optional<std::string> audio_network_adaptor_config;

View file

@ -20,6 +20,7 @@
#include <string>
#include "absl/types/optional.h"
#include "api/priority.h"
#include "api/rtc_error.h"
#include "rtc_base/checks.h"
#include "rtc_base/copy_on_write_buffer.h"
@ -61,6 +62,9 @@ struct DataChannelInit {
// The stream id, or SID, for SCTP data channels. -1 if unset (see above).
int id = -1;
// https://w3c.github.io/webrtc-priority/#new-rtcdatachannelinit-member
absl::optional<Priority> priority;
};
// At the JavaScript level, data can be passed in as a string or a blob, so
@ -154,6 +158,7 @@ class RTC_EXPORT DataChannelInterface : public rtc::RefCountInterface {
// If negotiated in-band, this ID will be populated once the DTLS role is
// determined, and until then this will return -1.
virtual int id() const = 0;
virtual Priority priority() const { return Priority::kLow; }
virtual DataState state() const = 0;
// When state is kClosed, and the DataChannel was not closed using
// the closing procedure, returns the error information about the closing.

View file

@ -16,6 +16,7 @@
#include "api/scoped_refptr.h"
#include "api/video/encoded_frame.h"
#include "api/video/video_frame_metadata.h"
#include "rtc_base/ref_count.h"
namespace webrtc {
@ -48,6 +49,8 @@ class TransformableVideoFrameInterface : public TransformableFrameInterface {
// TODO(bugs.webrtc.org/11380) remove from interface once
// webrtc::RtpDescriptorAuthentication is exposed in api/.
virtual std::vector<uint8_t> GetAdditionalData() const = 0;
virtual const VideoFrameMetadata& GetMetadata() const = 0;
};
// Extends the TransformableFrameInterface to expose audio-specific information.

View file

@ -23,8 +23,8 @@ rtc_source_set("neteq_api") {
"../../rtc_base:rtc_base_approved",
"../../system_wrappers:system_wrappers",
"../audio_codecs:audio_codecs_api",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("custom_neteq_factory") {
@ -56,8 +56,8 @@ rtc_source_set("neteq_controller_api") {
":tick_timer",
"../../rtc_base:rtc_base_approved",
"../../system_wrappers:system_wrappers",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("default_neteq_controller_factory") {

View file

@ -30,7 +30,8 @@ std::string NetEq::Config::ToString() const {
<< ", min_delay_ms=" << min_delay_ms << ", enable_fast_accelerate="
<< (enable_fast_accelerate ? "true" : "false")
<< ", enable_muted_state=" << (enable_muted_state ? "true" : "false")
<< ", enable_rtx_handling=" << (enable_rtx_handling ? "true" : "false");
<< ", enable_rtx_handling=" << (enable_rtx_handling ? "true" : "false")
<< ", extra_output_delay_ms=" << extra_output_delay_ms;
return ss.str();
}

View file

@ -138,6 +138,10 @@ class NetEq {
bool enable_rtx_handling = false;
absl::optional<AudioCodecPairId> codec_pair_id;
bool for_test_no_time_stretching = false; // Use only for testing.
// Adds extra delay to the output of NetEq, without affecting jitter or
// loss behavior. This is mainly for testing. Value must be a non-negative
// multiple of 10 ms.
int extra_output_delay_ms = 0;
};
enum ReturnCodes { kOK = 0, kFail = -1 };

View file

@ -73,6 +73,7 @@
#include <string>
#include <vector>
#include "api/adaptation/resource.h"
#include "api/async_resolver_factory.h"
#include "api/audio/audio_mixer.h"
#include "api/audio_codecs/audio_decoder_factory.h"
@ -103,7 +104,6 @@
#include "api/task_queue/task_queue_factory.h"
#include "api/transport/bitrate_settings.h"
#include "api/transport/enums.h"
#include "api/transport/media/media_transport_interface.h"
#include "api/transport/network_control.h"
#include "api/transport/webrtc_key_value_config.h"
#include "api/turn_customizer.h"
@ -614,34 +614,6 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface {
// correctly. This flag will be deprecated soon. Do not rely on it.
bool active_reset_srtp_params = false;
// DEPRECATED. Do not use. This option is ignored by peer connection.
// TODO(webrtc:9719): Delete this option.
bool use_media_transport = false;
// DEPRECATED. Do not use. This option is ignored by peer connection.
// TODO(webrtc:9719): Delete this option.
bool use_media_transport_for_data_channels = false;
// If MediaTransportFactory is provided in PeerConnectionFactory, this flag
// informs PeerConnection that it should use the DatagramTransportInterface
// for packets instead DTLS. It's invalid to set it to |true| if the
// MediaTransportFactory wasn't provided.
absl::optional<bool> use_datagram_transport;
// If MediaTransportFactory is provided in PeerConnectionFactory, this flag
// informs PeerConnection that it should use the DatagramTransport's
// implementation of DataChannelTransportInterface for data channels instead
// of SCTP-DTLS.
absl::optional<bool> use_datagram_transport_for_data_channels;
// If true, this PeerConnection will only use datagram transport for data
// channels when receiving an incoming offer that includes datagram
// transport parameters. It will not request use of a datagram transport
// when it creates the initial, outgoing offer.
// This setting only applies when |use_datagram_transport_for_data_channels|
// is true.
absl::optional<bool> use_datagram_transport_for_data_channels_receive_only;
// Defines advanced optional cryptographic settings related to SRTP and
// frame encryption for native WebRTC. Setting this will overwrite any
// settings set in PeerConnectionFactory (which is deprecated).
@ -1137,6 +1109,14 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface {
return absl::nullopt;
}
// When a resource is overused, the PeerConnection will try to reduce the load
// on the sysem, for example by reducing the resolution or frame rate of
// encoded streams. The Resource API allows injecting platform-specific usage
// measurements. The conditions to trigger kOveruse or kUnderuse are up to the
// implementation.
// TODO(hbos): Make pure virtual when implemented by downstream projects.
virtual void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) {}
// Start RtcEventLog using an existing output-sink. Takes ownership of
// |output| and passes it on to Call, which will take the ownership. If the
// operation fails the output will be closed and deallocated. The event log
@ -1351,7 +1331,6 @@ struct RTC_EXPORT PeerConnectionFactoryDependencies final {
std::unique_ptr<NetworkStatePredictorFactoryInterface>
network_state_predictor_factory;
std::unique_ptr<NetworkControllerFactoryInterface> network_controller_factory;
std::unique_ptr<MediaTransportFactory> media_transport_factory;
std::unique_ptr<NetEqFactory> neteq_factory;
std::unique_ptr<WebRtcKeyValueConfig> trials;
};

View file

@ -138,6 +138,7 @@ PROXY_METHOD0(IceConnectionState, standardized_ice_connection_state)
PROXY_METHOD0(PeerConnectionState, peer_connection_state)
PROXY_METHOD0(IceGatheringState, ice_gathering_state)
PROXY_METHOD0(absl::optional<bool>, can_trickle_ice_candidates)
PROXY_METHOD1(void, AddAdaptationResource, rtc::scoped_refptr<Resource>)
PROXY_METHOD2(bool,
StartRtcEventLog,
std::unique_ptr<RtcEventLogOutput>,

26
api/priority.h Normal file
View file

@ -0,0 +1,26 @@
/*
* Copyright 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_PRIORITY_H_
#define API_PRIORITY_H_
namespace webrtc {
// GENERATED_JAVA_ENUM_PACKAGE: org.webrtc
enum class Priority {
kVeryLow,
kLow,
kMedium,
kHigh,
};
} // namespace webrtc
#endif // API_PRIORITY_H_

View file

@ -55,6 +55,7 @@
#include <memory>
#include <string>
#include <tuple>
#include <type_traits>
#include <utility>
#include "api/scoped_refptr.h"
@ -396,6 +397,16 @@ class ConstMethodCall : public rtc::Message, public rtc::MessageHandler {
return call.Marshal(RTC_FROM_HERE, worker_thread_); \
}
// For use when returning purely const state (set during construction).
// Use with caution. This method should only be used when the return value will
// always be the same.
#define BYPASS_PROXY_CONSTMETHOD0(r, method) \
r method() const override { \
static_assert(!std::is_pointer<r>::value, "Type is a pointer"); \
static_assert(!std::is_reference<r>::value, "Type is a reference"); \
return c_->method(); \
}
} // namespace webrtc
#endif // API_PROXY_H_

View file

@ -141,14 +141,16 @@ TEST_F(RtcEventLogOutputFileTest, AllowReasonableFileSizeLimits) {
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
TEST_F(RtcEventLogOutputFileTest, WritingToInactiveFileForbidden) {
class RtcEventLogOutputFileDeathTest : public RtcEventLogOutputFileTest {};
TEST_F(RtcEventLogOutputFileDeathTest, WritingToInactiveFileForbidden) {
RtcEventLogOutputFile output_file(output_file_name_, 2);
ASSERT_FALSE(output_file.Write("abc"));
ASSERT_FALSE(output_file.IsActive());
EXPECT_DEATH(output_file.Write("abc"), "");
}
TEST_F(RtcEventLogOutputFileTest, DisallowUnreasonableFileSizeLimits) {
TEST_F(RtcEventLogOutputFileDeathTest, DisallowUnreasonableFileSizeLimits) {
// Keeping in a temporary unique_ptr to make it clearer that the death is
// triggered by construction, not destruction.
std::unique_ptr<RtcEventLogOutputFile> output_file;

View file

@ -26,9 +26,7 @@ RTPHeaderExtension::RTPHeaderExtension()
videoRotation(kVideoRotation_0),
hasVideoContentType(false),
videoContentType(VideoContentType::UNSPECIFIED),
has_video_timing(false),
has_frame_marking(false),
frame_marking({false, false, false, false, false, 0xFF, 0, 0}) {}
has_video_timing(false) {}
RTPHeaderExtension::RTPHeaderExtension(const RTPHeaderExtension& other) =
default;

View file

@ -21,10 +21,9 @@
#include "api/units/timestamp.h"
#include "api/video/color_space.h"
#include "api/video/video_content_type.h"
#include "api/video/video_frame_marking.h"
#include "api/video/video_rotation.h"
#include "api/video/video_timing.h"
#include "common_types.h" // NOLINT(build/include)
#include "common_types.h" // NOLINT (build/include)
namespace webrtc {
@ -143,9 +142,6 @@ struct RTPHeaderExtension {
bool has_video_timing;
VideoSendTiming video_timing;
bool has_frame_marking;
FrameMarking frame_marking;
PlayoutDelay playout_delay = {-1, -1};
// For identification of a stream when ssrc is not signaled. See

View file

@ -18,6 +18,20 @@
namespace webrtc {
const char* DegradationPreferenceToString(
DegradationPreference degradation_preference) {
switch (degradation_preference) {
case DegradationPreference::DISABLED:
return "disabled";
case DegradationPreference::MAINTAIN_FRAMERATE:
return "maintain-framerate";
case DegradationPreference::MAINTAIN_RESOLUTION:
return "maintain-resolution";
case DegradationPreference::BALANCED:
return "balanced";
}
}
const double kDefaultBitratePriority = 1.0;
RtcpFeedback::RtcpFeedback() = default;
@ -105,7 +119,6 @@ constexpr char RtpExtension::kAbsoluteCaptureTimeUri[];
constexpr char RtpExtension::kVideoRotationUri[];
constexpr char RtpExtension::kVideoContentTypeUri[];
constexpr char RtpExtension::kVideoTimingUri[];
constexpr char RtpExtension::kFrameMarkingUri[];
constexpr char RtpExtension::kGenericFrameDescriptorUri00[];
constexpr char RtpExtension::kDependencyDescriptorUri[];
constexpr char RtpExtension::kTransportSequenceNumberUri[];
@ -144,7 +157,6 @@ bool RtpExtension::IsSupportedForVideo(absl::string_view uri) {
uri == webrtc::RtpExtension::kVideoContentTypeUri ||
uri == webrtc::RtpExtension::kVideoTimingUri ||
uri == webrtc::RtpExtension::kMidUri ||
uri == webrtc::RtpExtension::kFrameMarkingUri ||
uri == webrtc::RtpExtension::kGenericFrameDescriptorUri00 ||
uri == webrtc::RtpExtension::kDependencyDescriptorUri ||
uri == webrtc::RtpExtension::kColorSpaceUri ||

View file

@ -20,6 +20,7 @@
#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
#include "api/media_types.h"
#include "api/priority.h"
#include "api/rtp_transceiver_direction.h"
#include "rtc_base/system/rtc_export.h"
@ -91,15 +92,10 @@ enum class DegradationPreference {
BALANCED,
};
RTC_EXPORT extern const double kDefaultBitratePriority;
RTC_EXPORT const char* DegradationPreferenceToString(
DegradationPreference degradation_preference);
// GENERATED_JAVA_ENUM_PACKAGE: org.webrtc
enum class Priority {
kVeryLow,
kLow,
kMedium,
kHigh,
};
RTC_EXPORT extern const double kDefaultBitratePriority;
struct RTC_EXPORT RtcpFeedback {
RtcpFeedbackType type = RtcpFeedbackType::CCM;
@ -226,7 +222,7 @@ struct RTC_EXPORT RtpHeaderExtensionCapability {
bool preferred_encrypt = false;
// The direction of the extension. The kStopped value is only used with
// RtpTransceiverInterface::header_extensions_offered() and
// RtpTransceiverInterface::HeaderExtensionsToOffer() and
// SetOfferedRtpHeaderExtensions().
RtpTransceiverDirection direction = RtpTransceiverDirection::kSendRecv;
@ -314,10 +310,6 @@ struct RTC_EXPORT RtpExtension {
static constexpr char kVideoTimingUri[] =
"http://www.webrtc.org/experiments/rtp-hdrext/video-timing";
// Header extension for video frame marking.
static constexpr char kFrameMarkingUri[] =
"http://tools.ietf.org/html/draft-ietf-avtext-framemarking-07";
// Experimental codec agnostic frame descriptor.
static constexpr char kGenericFrameDescriptorUri00[] =
"http://www.webrtc.org/experiments/rtp-hdrext/"
@ -481,6 +473,10 @@ struct RTC_EXPORT RtpEncodingParameters {
// Called "encodingId" in ORTC.
std::string rid;
// Allow dynamic frame length changes for audio:
// https://w3c.github.io/webrtc-extensions/#dom-rtcrtpencodingparameters-adaptiveptime
bool adaptive_ptime = false;
bool operator==(const RtpEncodingParameters& o) const {
return ssrc == o.ssrc && bitrate_priority == o.bitrate_priority &&
network_priority == o.network_priority &&
@ -489,7 +485,8 @@ struct RTC_EXPORT RtpEncodingParameters {
max_framerate == o.max_framerate &&
num_temporal_layers == o.num_temporal_layers &&
scale_resolution_down_by == o.scale_resolution_down_by &&
active == o.active && rid == o.rid;
active == o.active && rid == o.rid &&
adaptive_ptime == o.adaptive_ptime;
}
bool operator!=(const RtpEncodingParameters& o) const {
return !(*this == o);

View file

@ -41,4 +41,10 @@ RtpTransceiverInterface::HeaderExtensionsToOffer() const {
return {};
}
webrtc::RTCError RtpTransceiverInterface::SetOfferedRtpHeaderExtensions(
rtc::ArrayView<const RtpHeaderExtensionCapability>
header_extensions_to_offer) {
return webrtc::RTCError(webrtc::RTCErrorType::UNSUPPORTED_OPERATION);
}
} // namespace webrtc

View file

@ -133,6 +133,13 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface {
virtual std::vector<RtpHeaderExtensionCapability> HeaderExtensionsToOffer()
const;
// The SetOfferedRtpHeaderExtensions method modifies the next SDP negotiation
// so that it negotiates use of header extensions which are not kStopped.
// https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface
virtual webrtc::RTCError SetOfferedRtpHeaderExtensions(
rtc::ArrayView<const RtpHeaderExtensionCapability>
header_extensions_to_offer);
protected:
~RtpTransceiverInterface() override = default;
};

View file

@ -319,6 +319,14 @@ class RTCStatsMember : public RTCStatsMemberInterface {
std::string ValueToString() const override;
std::string ValueToJson() const override;
template <typename U>
inline T ValueOrDefault(U default_value) const {
if (is_defined()) {
return *(*this);
}
return default_value;
}
// Assignment operators.
T& operator=(const T& value) {
value_ = value;

View file

@ -134,7 +134,7 @@ class RTC_EXPORT RTCDataChannelStats final : public RTCStats {
RTCStatsMember<std::string> label;
RTCStatsMember<std::string> protocol;
RTCStatsMember<int32_t> datachannelid;
RTCStatsMember<int32_t> data_channel_identifier;
// TODO(hbos): Support enum types? "RTCStatsMember<RTCDataChannelState>"?
RTCStatsMember<std::string> state;
RTCStatsMember<uint32_t> messages_sent;

View file

@ -21,6 +21,8 @@ rtc_library("task_queue") {
"../../rtc_base:checks",
"../../rtc_base:macromagic",
"../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/base:config",
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/strings",
@ -51,6 +53,8 @@ rtc_library("task_queue_test") {
deps = [
"../../../webrtc_overrides:webrtc_component",
"../../test:test_support",
]
absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
]
@ -62,6 +66,8 @@ rtc_library("task_queue_test") {
"../../rtc_base:timeutils",
"../../rtc_base/task_utils:to_queued_task",
"../../test:test_support",
]
absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
]

View file

@ -37,9 +37,11 @@ TEST_P(TaskQueueTest, PostAndCheckCurrent) {
rtc::Event event;
auto queue = CreateTaskQueue(factory, "PostAndCheckCurrent");
// We're not running a task, so there shouldn't be a current queue.
// We're not running a task, so |queue| shouldn't be current.
// Note that because rtc::Thread also supports the TQ interface and
// TestMainImpl::Init wraps the main test thread (bugs.webrtc.org/9714), that
// means that TaskQueueBase::Current() will still return a valid value.
EXPECT_FALSE(queue->IsCurrent());
EXPECT_FALSE(TaskQueueBase::Current());
queue->PostTask(ToQueuedTask([&event, &queue] {
EXPECT_TRUE(queue->IsCurrent());
@ -269,5 +271,10 @@ TEST_P(TaskQueueTest, PostTwoWithSharedUnprotectedState) {
EXPECT_TRUE(done.Wait(1000));
}
// TaskQueueTest is a set of tests for any implementation of the TaskQueueBase.
// Tests are instantiated next to the concrete implementation(s).
// https://github.com/google/googletest/blob/master/googletest/docs/advanced.md#creating-value-parameterized-abstract-tests
GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(TaskQueueTest);
} // namespace
} // namespace webrtc

View file

@ -17,6 +17,12 @@
namespace webrtc {
namespace test {
int AudioprocFloat(rtc::scoped_refptr<AudioProcessing> audio_processing,
int argc,
char* argv[]) {
return AudioprocFloatImpl(std::move(audio_processing), argc, argv);
}
int AudioprocFloat(std::unique_ptr<AudioProcessingBuilder> ap_builder,
int argc,
char* argv[]) {

View file

@ -19,6 +19,22 @@
namespace webrtc {
namespace test {
// This is an interface for the audio processing simulation utility. This
// utility can be used to simulate the audioprocessing module using a recording
// (either an AEC dump or wav files), and generate the output as a wav file.
// Any audio_processing object specified in the input is used for the
// simulation. The optional |audio_processing| object provides the
// AudioProcessing instance that is used during the simulation. Note that when
// the audio_processing object is specified all functionality that relies on
// using the AudioProcessingBuilder is deactivated, since the AudioProcessing
// object is already created and the builder is not used in the simulation. It
// is needed to pass the command line flags as |argc| and |argv|, so these can
// be interpreted properly by the utility. To see a list of all supported
// command line flags, run the executable with the '--help' flag.
int AudioprocFloat(rtc::scoped_refptr<AudioProcessing> audio_processing,
int argc,
char* argv[]);
// This is an interface for the audio processing simulation utility. This
// utility can be used to simulate the audioprocessing module using a recording
// (either an AEC dump or wav files), and generate the output as a wav file.

View file

@ -29,14 +29,14 @@
#include "api/test/fake_frame_decryptor.h"
#include "api/test/fake_frame_encryptor.h"
#include "api/test/fake_media_transport.h"
#include "api/test/loopback_media_transport.h"
#include "api/test/mock_audio_mixer.h"
#include "api/test/mock_frame_decryptor.h"
#include "api/test/mock_frame_encryptor.h"
#include "api/test/mock_peer_connection_factory_interface.h"
#include "api/test/mock_peerconnectioninterface.h"
#include "api/test/mock_rtpreceiver.h"
#include "api/test/mock_rtpsender.h"
#include "api/test/mock_transformable_video_frame.h"
#include "api/test/mock_video_bitrate_allocator.h"
#include "api/test/mock_video_bitrate_allocator_factory.h"
#include "api/test/mock_video_decoder.h"

View file

@ -1,4 +1,3 @@
/*
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
*
@ -18,6 +17,7 @@
namespace webrtc {
// Returns a non-null NetworkEmulationManager instance.
std::unique_ptr<NetworkEmulationManager> CreateNetworkEmulationManager(
TimeMode mode = TimeMode::kRealTime);

View file

@ -25,6 +25,7 @@ namespace webrtc_pc_e2e {
// During the test Alice will be caller and Bob will answer the call.
// |test_case_name| is a name of test case, that will be used for all metrics
// reporting.
// Returns a non-null PeerConnectionE2EQualityTestFixture instance.
std::unique_ptr<PeerConnectionE2EQualityTestFixture>
CreatePeerConnectionE2EQualityTestFixture(
std::string test_case_name,

View file

@ -35,13 +35,17 @@ std::unique_ptr<CallFactoryInterface> CreateTimeControllerBasedCallFactory(
explicit TimeControllerBasedCallFactory(TimeController* time_controller)
: time_controller_(time_controller) {}
Call* CreateCall(const Call::Config& config) override {
return Call::Create(config, time_controller_->GetClock(),
time_controller_->CreateProcessThread("CallModules"),
if (!module_thread_) {
module_thread_ = SharedModuleThread::Create(
"CallModules", [this]() { module_thread_ = nullptr; });
}
return Call::Create(config, time_controller_->GetClock(), module_thread_,
time_controller_->CreateProcessThread("Pacer"));
}
private:
TimeController* time_controller_;
rtc::scoped_refptr<SharedModuleThread> module_thread_;
};
return std::make_unique<TimeControllerBasedCallFactory>(time_controller);
}

View file

@ -1,121 +0,0 @@
/*
* Copyright 2019 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_FAKE_DATAGRAM_TRANSPORT_H_
#define API_TEST_FAKE_DATAGRAM_TRANSPORT_H_
#include <cstddef>
#include <string>
#include "api/transport/datagram_transport_interface.h"
#include "api/transport/media/media_transport_interface.h"
namespace webrtc {
// Maxmum size of datagrams sent by |FakeDatagramTransport|.
constexpr size_t kMaxFakeDatagramSize = 1000;
// Fake datagram transport. Does not support making an actual connection
// or sending data. Only used for tests that need to stub out a transport.
class FakeDatagramTransport : public DatagramTransportInterface {
public:
FakeDatagramTransport(
const MediaTransportSettings& settings,
std::string transport_parameters,
const std::function<bool(absl::string_view, absl::string_view)>&
are_parameters_compatible)
: settings_(settings),
transport_parameters_(transport_parameters),
are_parameters_compatible_(are_parameters_compatible) {}
~FakeDatagramTransport() override { RTC_DCHECK(!state_callback_); }
void Connect(rtc::PacketTransportInternal* packet_transport) override {
packet_transport_ = packet_transport;
}
CongestionControlInterface* congestion_control() override {
return nullptr; // Datagram interface doesn't provide this yet.
}
void SetTransportStateCallback(
MediaTransportStateCallback* callback) override {
state_callback_ = callback;
}
RTCError SendDatagram(rtc::ArrayView<const uint8_t> data,
DatagramId datagram_id) override {
return RTCError::OK();
}
size_t GetLargestDatagramSize() const override {
return kMaxFakeDatagramSize;
}
void SetDatagramSink(DatagramSinkInterface* sink) override {}
std::string GetTransportParameters() const override {
if (settings_.remote_transport_parameters) {
return *settings_.remote_transport_parameters;
}
return transport_parameters_;
}
RTCError SetRemoteTransportParameters(
absl::string_view remote_parameters) override {
if (are_parameters_compatible_(GetTransportParameters(),
remote_parameters)) {
return RTCError::OK();
}
return RTCError(RTCErrorType::UNSUPPORTED_PARAMETER,
"Incompatible remote transport parameters");
}
RTCError OpenChannel(int channel_id) override {
return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
}
RTCError SendData(int channel_id,
const SendDataParams& params,
const rtc::CopyOnWriteBuffer& buffer) override {
return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
}
RTCError CloseChannel(int channel_id) override {
return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
}
void SetDataSink(DataChannelSink* /*sink*/) override {}
bool IsReadyToSend() const override { return false; }
rtc::PacketTransportInternal* packet_transport() { return packet_transport_; }
void set_state(webrtc::MediaTransportState state) {
if (state_callback_) {
state_callback_->OnStateChanged(state);
}
}
const MediaTransportSettings& settings() { return settings_; }
private:
const MediaTransportSettings settings_;
const std::string transport_parameters_;
const std::function<bool(absl::string_view, absl::string_view)>
are_parameters_compatible_;
rtc::PacketTransportInternal* packet_transport_ = nullptr;
MediaTransportStateCallback* state_callback_ = nullptr;
};
} // namespace webrtc
#endif // API_TEST_FAKE_DATAGRAM_TRANSPORT_H_

View file

@ -1,74 +0,0 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_FAKE_MEDIA_TRANSPORT_H_
#define API_TEST_FAKE_MEDIA_TRANSPORT_H_
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "absl/algorithm/container.h"
#include "api/test/fake_datagram_transport.h"
#include "api/transport/media/media_transport_interface.h"
namespace webrtc {
// Fake media transport factory creates fake media transport.
// Also creates fake datagram transport, since both media and datagram
// transports are created by |MediaTransportFactory|.
class FakeMediaTransportFactory : public MediaTransportFactory {
public:
explicit FakeMediaTransportFactory(
const absl::optional<std::string>& transport_offer = "")
: transport_offer_(transport_offer) {}
~FakeMediaTransportFactory() = default;
std::string GetTransportName() const override { return "fake"; }
RTCErrorOr<std::unique_ptr<MediaTransportInterface>> CreateMediaTransport(
rtc::PacketTransportInternal* packet_transport,
rtc::Thread* network_thread,
const MediaTransportSettings& settings) override {
return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
}
RTCErrorOr<std::unique_ptr<MediaTransportInterface>> CreateMediaTransport(
rtc::Thread* network_thread,
const MediaTransportSettings& settings) override {
return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
}
RTCErrorOr<std::unique_ptr<DatagramTransportInterface>>
CreateDatagramTransport(rtc::Thread* network_thread,
const MediaTransportSettings& settings) override {
return std::unique_ptr<DatagramTransportInterface>(
new FakeDatagramTransport(settings, transport_offer_.value_or(""),
transport_parameters_comparison_));
}
void set_transport_parameters_comparison(
std::function<bool(absl::string_view, absl::string_view)> comparison) {
transport_parameters_comparison_ = std::move(comparison);
}
private:
const absl::optional<std::string> transport_offer_;
std::function<bool(absl::string_view, absl::string_view)>
transport_parameters_comparison_ =
[](absl::string_view local, absl::string_view remote) {
return local == remote;
};
};
} // namespace webrtc
#endif // API_TEST_FAKE_MEDIA_TRANSPORT_H_

View file

@ -1,373 +0,0 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/loopback_media_transport.h"
#include <memory>
#include "absl/algorithm/container.h"
#include "rtc_base/time_utils.h"
namespace webrtc {
namespace {
constexpr size_t kLoopbackMaxDatagramSize = 1200;
class WrapperDatagramTransport : public DatagramTransportInterface {
public:
explicit WrapperDatagramTransport(DatagramTransportInterface* wrapped)
: wrapped_(wrapped) {}
// Datagram transport overrides.
void Connect(rtc::PacketTransportInternal* packet_transport) override {
return wrapped_->Connect(packet_transport);
}
CongestionControlInterface* congestion_control() override {
return wrapped_->congestion_control();
}
void SetTransportStateCallback(
MediaTransportStateCallback* callback) override {
return wrapped_->SetTransportStateCallback(callback);
}
RTCError SendDatagram(rtc::ArrayView<const uint8_t> data,
DatagramId datagram_id) override {
return wrapped_->SendDatagram(data, datagram_id);
}
size_t GetLargestDatagramSize() const override {
return wrapped_->GetLargestDatagramSize();
}
void SetDatagramSink(DatagramSinkInterface* sink) override {
return wrapped_->SetDatagramSink(sink);
}
std::string GetTransportParameters() const override {
return wrapped_->GetTransportParameters();
}
RTCError SetRemoteTransportParameters(absl::string_view parameters) override {
return wrapped_->SetRemoteTransportParameters(parameters);
}
// Data channel overrides.
RTCError OpenChannel(int channel_id) override {
return wrapped_->OpenChannel(channel_id);
}
RTCError SendData(int channel_id,
const SendDataParams& params,
const rtc::CopyOnWriteBuffer& buffer) override {
return wrapped_->SendData(channel_id, params, buffer);
}
RTCError CloseChannel(int channel_id) override {
return wrapped_->CloseChannel(channel_id);
}
void SetDataSink(DataChannelSink* sink) override {
wrapped_->SetDataSink(sink);
}
bool IsReadyToSend() const override { return wrapped_->IsReadyToSend(); }
private:
DatagramTransportInterface* wrapped_;
};
} // namespace
WrapperMediaTransportFactory::WrapperMediaTransportFactory(
DatagramTransportInterface* wrapped_datagram_transport)
: wrapped_datagram_transport_(wrapped_datagram_transport) {}
WrapperMediaTransportFactory::WrapperMediaTransportFactory(
MediaTransportFactory* wrapped)
: wrapped_factory_(wrapped) {}
RTCErrorOr<std::unique_ptr<MediaTransportInterface>>
WrapperMediaTransportFactory::CreateMediaTransport(
rtc::PacketTransportInternal* packet_transport,
rtc::Thread* network_thread,
const MediaTransportSettings& settings) {
return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
}
RTCErrorOr<std::unique_ptr<DatagramTransportInterface>>
WrapperMediaTransportFactory::CreateDatagramTransport(
rtc::Thread* network_thread,
const MediaTransportSettings& settings) {
created_transport_count_++;
if (wrapped_factory_) {
return wrapped_factory_->CreateDatagramTransport(network_thread, settings);
}
return {
std::make_unique<WrapperDatagramTransport>(wrapped_datagram_transport_)};
}
std::string WrapperMediaTransportFactory::GetTransportName() const {
if (wrapped_factory_) {
return wrapped_factory_->GetTransportName();
}
return "wrapped-transport";
}
int WrapperMediaTransportFactory::created_transport_count() const {
return created_transport_count_;
}
RTCErrorOr<std::unique_ptr<MediaTransportInterface>>
WrapperMediaTransportFactory::CreateMediaTransport(
rtc::Thread* network_thread,
const MediaTransportSettings& settings) {
return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
}
MediaTransportPair::MediaTransportPair(rtc::Thread* thread)
: first_datagram_transport_(thread),
second_datagram_transport_(thread),
first_factory_(&first_datagram_transport_),
second_factory_(&second_datagram_transport_) {
first_datagram_transport_.Connect(&second_datagram_transport_);
second_datagram_transport_.Connect(&first_datagram_transport_);
}
MediaTransportPair::~MediaTransportPair() = default;
MediaTransportPair::LoopbackDataChannelTransport::LoopbackDataChannelTransport(
rtc::Thread* thread)
: thread_(thread) {}
MediaTransportPair::LoopbackDataChannelTransport::
~LoopbackDataChannelTransport() {
RTC_CHECK(data_sink_ == nullptr);
}
void MediaTransportPair::LoopbackDataChannelTransport::Connect(
LoopbackDataChannelTransport* other) {
other_ = other;
}
RTCError MediaTransportPair::LoopbackDataChannelTransport::OpenChannel(
int channel_id) {
// No-op. No need to open channels for the loopback.
return RTCError::OK();
}
RTCError MediaTransportPair::LoopbackDataChannelTransport::SendData(
int channel_id,
const SendDataParams& params,
const rtc::CopyOnWriteBuffer& buffer) {
invoker_.AsyncInvoke<void>(RTC_FROM_HERE, thread_,
[this, channel_id, params, buffer] {
other_->OnData(channel_id, params.type, buffer);
});
return RTCError::OK();
}
RTCError MediaTransportPair::LoopbackDataChannelTransport::CloseChannel(
int channel_id) {
invoker_.AsyncInvoke<void>(RTC_FROM_HERE, thread_, [this, channel_id] {
other_->OnRemoteCloseChannel(channel_id);
rtc::CritScope lock(&sink_lock_);
if (data_sink_) {
data_sink_->OnChannelClosed(channel_id);
}
});
return RTCError::OK();
}
void MediaTransportPair::LoopbackDataChannelTransport::SetDataSink(
DataChannelSink* sink) {
rtc::CritScope lock(&sink_lock_);
data_sink_ = sink;
if (data_sink_ && ready_to_send_) {
data_sink_->OnReadyToSend();
}
}
bool MediaTransportPair::LoopbackDataChannelTransport::IsReadyToSend() const {
rtc::CritScope lock(&sink_lock_);
return ready_to_send_;
}
void MediaTransportPair::LoopbackDataChannelTransport::FlushAsyncInvokes() {
invoker_.Flush(thread_);
}
void MediaTransportPair::LoopbackDataChannelTransport::OnData(
int channel_id,
DataMessageType type,
const rtc::CopyOnWriteBuffer& buffer) {
rtc::CritScope lock(&sink_lock_);
if (data_sink_) {
data_sink_->OnDataReceived(channel_id, type, buffer);
}
}
void MediaTransportPair::LoopbackDataChannelTransport::OnRemoteCloseChannel(
int channel_id) {
rtc::CritScope lock(&sink_lock_);
if (data_sink_) {
data_sink_->OnChannelClosing(channel_id);
data_sink_->OnChannelClosed(channel_id);
}
}
void MediaTransportPair::LoopbackDataChannelTransport::OnReadyToSend(
bool ready_to_send) {
invoker_.AsyncInvoke<void>(RTC_FROM_HERE, thread_, [this, ready_to_send] {
rtc::CritScope lock(&sink_lock_);
ready_to_send_ = ready_to_send;
// Propagate state to data channel sink, if present.
if (data_sink_ && ready_to_send_) {
data_sink_->OnReadyToSend();
}
});
}
MediaTransportPair::LoopbackDatagramTransport::LoopbackDatagramTransport(
rtc::Thread* thread)
: thread_(thread), dc_transport_(thread) {}
void MediaTransportPair::LoopbackDatagramTransport::Connect(
LoopbackDatagramTransport* other) {
other_ = other;
dc_transport_.Connect(&other->dc_transport_);
}
void MediaTransportPair::LoopbackDatagramTransport::Connect(
rtc::PacketTransportInternal* packet_transport) {
if (state_after_connect_) {
SetState(*state_after_connect_);
}
}
CongestionControlInterface*
MediaTransportPair::LoopbackDatagramTransport::congestion_control() {
return nullptr;
}
void MediaTransportPair::LoopbackDatagramTransport::SetTransportStateCallback(
MediaTransportStateCallback* callback) {
RTC_DCHECK_RUN_ON(thread_);
state_callback_ = callback;
if (state_callback_) {
state_callback_->OnStateChanged(state_);
}
}
RTCError MediaTransportPair::LoopbackDatagramTransport::SendDatagram(
rtc::ArrayView<const uint8_t> data,
DatagramId datagram_id) {
rtc::CopyOnWriteBuffer buffer;
buffer.SetData(data.data(), data.size());
invoker_.AsyncInvoke<void>(
RTC_FROM_HERE, thread_, [this, datagram_id, buffer = std::move(buffer)] {
RTC_DCHECK_RUN_ON(thread_);
other_->DeliverDatagram(std::move(buffer));
if (sink_) {
DatagramAck ack;
ack.datagram_id = datagram_id;
ack.receive_timestamp = Timestamp::Micros(rtc::TimeMicros());
sink_->OnDatagramAcked(ack);
}
});
return RTCError::OK();
}
size_t MediaTransportPair::LoopbackDatagramTransport::GetLargestDatagramSize()
const {
return kLoopbackMaxDatagramSize;
}
void MediaTransportPair::LoopbackDatagramTransport::SetDatagramSink(
DatagramSinkInterface* sink) {
RTC_DCHECK_RUN_ON(thread_);
sink_ = sink;
}
std::string
MediaTransportPair::LoopbackDatagramTransport::GetTransportParameters() const {
return transport_parameters_;
}
RTCError
MediaTransportPair::LoopbackDatagramTransport::SetRemoteTransportParameters(
absl::string_view remote_parameters) {
RTC_DCHECK_RUN_ON(thread_);
if (transport_parameters_comparison_(GetTransportParameters(),
remote_parameters)) {
return RTCError::OK();
}
return RTCError(RTCErrorType::UNSUPPORTED_PARAMETER,
"Incompatible remote transport parameters");
}
RTCError MediaTransportPair::LoopbackDatagramTransport::OpenChannel(
int channel_id) {
return dc_transport_.OpenChannel(channel_id);
}
RTCError MediaTransportPair::LoopbackDatagramTransport::SendData(
int channel_id,
const SendDataParams& params,
const rtc::CopyOnWriteBuffer& buffer) {
return dc_transport_.SendData(channel_id, params, buffer);
}
RTCError MediaTransportPair::LoopbackDatagramTransport::CloseChannel(
int channel_id) {
return dc_transport_.CloseChannel(channel_id);
}
void MediaTransportPair::LoopbackDatagramTransport::SetDataSink(
DataChannelSink* sink) {
dc_transport_.SetDataSink(sink);
}
bool MediaTransportPair::LoopbackDatagramTransport::IsReadyToSend() const {
return dc_transport_.IsReadyToSend();
}
void MediaTransportPair::LoopbackDatagramTransport::SetState(
MediaTransportState state) {
invoker_.AsyncInvoke<void>(RTC_FROM_HERE, thread_, [this, state] {
RTC_DCHECK_RUN_ON(thread_);
state_ = state;
if (state_callback_) {
state_callback_->OnStateChanged(state_);
}
});
dc_transport_.OnReadyToSend(state == MediaTransportState::kWritable);
}
void MediaTransportPair::LoopbackDatagramTransport::SetStateAfterConnect(
MediaTransportState state) {
state_after_connect_ = state;
}
void MediaTransportPair::LoopbackDatagramTransport::FlushAsyncInvokes() {
dc_transport_.FlushAsyncInvokes();
}
void MediaTransportPair::LoopbackDatagramTransport::DeliverDatagram(
rtc::CopyOnWriteBuffer buffer) {
RTC_DCHECK_RUN_ON(thread_);
if (sink_) {
sink_->OnDatagramReceived(buffer);
}
}
} // namespace webrtc

View file

@ -1,269 +0,0 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_LOOPBACK_MEDIA_TRANSPORT_H_
#define API_TEST_LOOPBACK_MEDIA_TRANSPORT_H_
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "api/transport/datagram_transport_interface.h"
#include "api/transport/media/media_transport_interface.h"
#include "rtc_base/async_invoker.h"
#include "rtc_base/critical_section.h"
#include "rtc_base/thread.h"
#include "rtc_base/thread_checker.h"
namespace webrtc {
// Wrapper used to hand out unique_ptrs to loopback media
// transport without ownership changes to the underlying
// transport.
// It works in two modes:
// It can either wrap a factory, or it can wrap an existing interface.
// In the former mode, it delegates the work to the wrapped factory.
// In the latter mode, it always returns static instance of the transport
// interface.
//
// Example use:
// Factory wrap_static_interface = Wrapper(media_transport_interface);
// Factory wrap_factory = Wrapper(wrap_static_interface);
// The second factory may be created multiple times, and ownership may be passed
// to the client. The first factory counts the number of invocations of
// CreateMediaTransport();
class WrapperMediaTransportFactory : public MediaTransportFactory {
public:
explicit WrapperMediaTransportFactory(
DatagramTransportInterface* wrapped_datagram_transport);
explicit WrapperMediaTransportFactory(MediaTransportFactory* wrapped);
RTCErrorOr<std::unique_ptr<MediaTransportInterface>> CreateMediaTransport(
rtc::PacketTransportInternal* packet_transport,
rtc::Thread* network_thread,
const MediaTransportSettings& settings) override;
RTCErrorOr<std::unique_ptr<MediaTransportInterface>> CreateMediaTransport(
rtc::Thread* network_thread,
const MediaTransportSettings& settings) override;
RTCErrorOr<std::unique_ptr<DatagramTransportInterface>>
CreateDatagramTransport(rtc::Thread* network_thread,
const MediaTransportSettings& settings) override;
std::string GetTransportName() const override;
int created_transport_count() const;
private:
DatagramTransportInterface* wrapped_datagram_transport_ = nullptr;
MediaTransportFactory* wrapped_factory_ = nullptr;
int created_transport_count_ = 0;
};
// Contains two MediaTransportsInterfaces that are connected to each other.
// Currently supports audio only.
class MediaTransportPair {
public:
struct Stats {
int sent_audio_frames = 0;
int received_audio_frames = 0;
int sent_video_frames = 0;
int received_video_frames = 0;
};
explicit MediaTransportPair(rtc::Thread* thread);
~MediaTransportPair();
DatagramTransportInterface* first_datagram_transport() {
return &first_datagram_transport_;
}
DatagramTransportInterface* second_datagram_transport() {
return &second_datagram_transport_;
}
std::unique_ptr<MediaTransportFactory> first_factory() {
return std::make_unique<WrapperMediaTransportFactory>(&first_factory_);
}
std::unique_ptr<MediaTransportFactory> second_factory() {
return std::make_unique<WrapperMediaTransportFactory>(&second_factory_);
}
void SetState(MediaTransportState state) {
first_datagram_transport_.SetState(state);
second_datagram_transport_.SetState(state);
}
void SetFirstState(MediaTransportState state) {
first_datagram_transport_.SetState(state);
}
void SetSecondStateAfterConnect(MediaTransportState state) {
second_datagram_transport_.SetState(state);
}
void SetFirstDatagramTransportParameters(const std::string& params) {
first_datagram_transport_.set_transport_parameters(params);
}
void SetSecondDatagramTransportParameters(const std::string& params) {
second_datagram_transport_.set_transport_parameters(params);
}
void SetFirstDatagramTransportParametersComparison(
std::function<bool(absl::string_view, absl::string_view)> comparison) {
first_datagram_transport_.set_transport_parameters_comparison(
std::move(comparison));
}
void SetSecondDatagramTransportParametersComparison(
std::function<bool(absl::string_view, absl::string_view)> comparison) {
second_datagram_transport_.set_transport_parameters_comparison(
std::move(comparison));
}
void FlushAsyncInvokes() {
first_datagram_transport_.FlushAsyncInvokes();
second_datagram_transport_.FlushAsyncInvokes();
}
int first_factory_transport_count() const {
return first_factory_.created_transport_count();
}
int second_factory_transport_count() const {
return second_factory_.created_transport_count();
}
private:
class LoopbackDataChannelTransport : public DataChannelTransportInterface {
public:
explicit LoopbackDataChannelTransport(rtc::Thread* thread);
~LoopbackDataChannelTransport() override;
void Connect(LoopbackDataChannelTransport* other);
RTCError OpenChannel(int channel_id) override;
RTCError SendData(int channel_id,
const SendDataParams& params,
const rtc::CopyOnWriteBuffer& buffer) override;
RTCError CloseChannel(int channel_id) override;
bool IsReadyToSend() const override;
void SetDataSink(DataChannelSink* sink) override;
void OnReadyToSend(bool ready_to_send);
void FlushAsyncInvokes();
private:
void OnData(int channel_id,
DataMessageType type,
const rtc::CopyOnWriteBuffer& buffer);
void OnRemoteCloseChannel(int channel_id);
rtc::Thread* const thread_;
rtc::CriticalSection sink_lock_;
DataChannelSink* data_sink_ RTC_GUARDED_BY(sink_lock_) = nullptr;
bool ready_to_send_ RTC_GUARDED_BY(sink_lock_) = false;
LoopbackDataChannelTransport* other_;
rtc::AsyncInvoker invoker_;
};
class LoopbackDatagramTransport : public DatagramTransportInterface {
public:
explicit LoopbackDatagramTransport(rtc::Thread* thread);
void Connect(LoopbackDatagramTransport* other);
// Datagram transport overrides.
void Connect(rtc::PacketTransportInternal* packet_transport) override;
CongestionControlInterface* congestion_control() override;
void SetTransportStateCallback(
MediaTransportStateCallback* callback) override;
RTCError SendDatagram(rtc::ArrayView<const uint8_t> data,
DatagramId datagram_id) override;
size_t GetLargestDatagramSize() const override;
void SetDatagramSink(DatagramSinkInterface* sink) override;
std::string GetTransportParameters() const override;
RTCError SetRemoteTransportParameters(
absl::string_view remote_parameters) override;
// Data channel overrides.
RTCError OpenChannel(int channel_id) override;
RTCError SendData(int channel_id,
const SendDataParams& params,
const rtc::CopyOnWriteBuffer& buffer) override;
RTCError CloseChannel(int channel_id) override;
void SetDataSink(DataChannelSink* sink) override;
bool IsReadyToSend() const override;
// Loopback-specific functionality.
void SetState(MediaTransportState state);
// When Connect() is called, the datagram transport will enter this state.
// This is useful for mimicking zero-RTT connectivity, for example.
void SetStateAfterConnect(MediaTransportState state);
void FlushAsyncInvokes();
void set_transport_parameters(const std::string& value) {
transport_parameters_ = value;
}
void set_transport_parameters_comparison(
std::function<bool(absl::string_view, absl::string_view)> comparison) {
thread_->Invoke<void>(
RTC_FROM_HERE, [this, comparison = std::move(comparison)] {
RTC_DCHECK_RUN_ON(thread_);
transport_parameters_comparison_ = std::move(comparison);
});
}
private:
void DeliverDatagram(rtc::CopyOnWriteBuffer buffer);
rtc::Thread* thread_;
LoopbackDataChannelTransport dc_transport_;
MediaTransportState state_ RTC_GUARDED_BY(thread_) =
MediaTransportState::kPending;
DatagramSinkInterface* sink_ RTC_GUARDED_BY(thread_) = nullptr;
MediaTransportStateCallback* state_callback_ RTC_GUARDED_BY(thread_) =
nullptr;
LoopbackDatagramTransport* other_;
std::string transport_parameters_;
std::function<bool(absl::string_view, absl::string_view)>
transport_parameters_comparison_ RTC_GUARDED_BY(thread_) =
[](absl::string_view a, absl::string_view b) { return a == b; };
absl::optional<MediaTransportState> state_after_connect_;
rtc::AsyncInvoker invoker_;
};
LoopbackDatagramTransport first_datagram_transport_;
LoopbackDatagramTransport second_datagram_transport_;
WrapperMediaTransportFactory first_factory_;
WrapperMediaTransportFactory second_factory_;
};
} // namespace webrtc
#endif // API_TEST_LOOPBACK_MEDIA_TRANSPORT_H_

View file

@ -1,201 +0,0 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/loopback_media_transport.h"
#include <algorithm>
#include <memory>
#include <vector>
#include "test/gmock.h"
namespace webrtc {
namespace {
class MockMediaTransportAudioSinkInterface
: public MediaTransportAudioSinkInterface {
public:
MOCK_METHOD2(OnData, void(uint64_t, MediaTransportEncodedAudioFrame));
};
class MockMediaTransportVideoSinkInterface
: public MediaTransportVideoSinkInterface {
public:
MOCK_METHOD2(OnData, void(uint64_t, MediaTransportEncodedVideoFrame));
};
class MockMediaTransportKeyFrameRequestCallback
: public MediaTransportKeyFrameRequestCallback {
public:
MOCK_METHOD1(OnKeyFrameRequested, void(uint64_t));
};
class MockDataChannelSink : public DataChannelSink {
public:
MOCK_METHOD3(OnDataReceived,
void(int, DataMessageType, const rtc::CopyOnWriteBuffer&));
MOCK_METHOD1(OnChannelClosing, void(int));
MOCK_METHOD1(OnChannelClosed, void(int));
MOCK_METHOD0(OnReadyToSend, void());
};
class MockStateCallback : public MediaTransportStateCallback {
public:
MOCK_METHOD1(OnStateChanged, void(MediaTransportState));
};
} // namespace
TEST(LoopbackMediaTransport, DataDeliveredToSink) {
std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
thread->Start();
MediaTransportPair transport_pair(thread.get());
MockDataChannelSink sink;
transport_pair.first_datagram_transport()->SetDataSink(&sink);
const int channel_id = 1;
EXPECT_CALL(
sink, OnDataReceived(
channel_id, DataMessageType::kText,
::testing::Property<rtc::CopyOnWriteBuffer, const char*>(
&rtc::CopyOnWriteBuffer::cdata, ::testing::StrEq("foo"))));
SendDataParams params;
params.type = DataMessageType::kText;
rtc::CopyOnWriteBuffer buffer("foo");
transport_pair.second_datagram_transport()->SendData(channel_id, params,
buffer);
transport_pair.FlushAsyncInvokes();
transport_pair.first_datagram_transport()->SetDataSink(nullptr);
}
TEST(LoopbackMediaTransport, CloseDeliveredToSink) {
std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
thread->Start();
MediaTransportPair transport_pair(thread.get());
MockDataChannelSink first_sink;
transport_pair.first_datagram_transport()->SetDataSink(&first_sink);
MockDataChannelSink second_sink;
transport_pair.second_datagram_transport()->SetDataSink(&second_sink);
const int channel_id = 1;
{
::testing::InSequence s;
EXPECT_CALL(second_sink, OnChannelClosing(channel_id));
EXPECT_CALL(second_sink, OnChannelClosed(channel_id));
EXPECT_CALL(first_sink, OnChannelClosed(channel_id));
}
transport_pair.first_datagram_transport()->CloseChannel(channel_id);
transport_pair.FlushAsyncInvokes();
transport_pair.first_datagram_transport()->SetDataSink(nullptr);
transport_pair.second_datagram_transport()->SetDataSink(nullptr);
}
TEST(LoopbackMediaTransport, InitialStateDeliveredWhenCallbackSet) {
std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
thread->Start();
MediaTransportPair transport_pair(thread.get());
MockStateCallback state_callback;
EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kPending));
thread->Invoke<void>(RTC_FROM_HERE, [&transport_pair, &state_callback] {
transport_pair.first_datagram_transport()->SetTransportStateCallback(
&state_callback);
});
transport_pair.FlushAsyncInvokes();
}
TEST(LoopbackMediaTransport, ChangedStateDeliveredWhenCallbackSet) {
std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
thread->Start();
MediaTransportPair transport_pair(thread.get());
transport_pair.SetState(MediaTransportState::kWritable);
transport_pair.FlushAsyncInvokes();
MockStateCallback state_callback;
EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kWritable));
thread->Invoke<void>(RTC_FROM_HERE, [&transport_pair, &state_callback] {
transport_pair.first_datagram_transport()->SetTransportStateCallback(
&state_callback);
});
transport_pair.FlushAsyncInvokes();
}
TEST(LoopbackMediaTransport, StateChangeDeliveredToCallback) {
std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
thread->Start();
MediaTransportPair transport_pair(thread.get());
MockStateCallback state_callback;
EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kPending));
EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kWritable));
thread->Invoke<void>(RTC_FROM_HERE, [&transport_pair, &state_callback] {
transport_pair.first_datagram_transport()->SetTransportStateCallback(
&state_callback);
});
transport_pair.SetState(MediaTransportState::kWritable);
transport_pair.FlushAsyncInvokes();
}
TEST(LoopbackMediaTransport, NotReadyToSendWhenDataSinkSet) {
std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
thread->Start();
MediaTransportPair transport_pair(thread.get());
MockDataChannelSink data_channel_sink;
EXPECT_CALL(data_channel_sink, OnReadyToSend()).Times(0);
transport_pair.first_datagram_transport()->SetDataSink(&data_channel_sink);
transport_pair.FlushAsyncInvokes();
transport_pair.first_datagram_transport()->SetDataSink(nullptr);
}
TEST(LoopbackMediaTransport, ReadyToSendWhenDataSinkSet) {
std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
thread->Start();
MediaTransportPair transport_pair(thread.get());
transport_pair.SetState(MediaTransportState::kWritable);
transport_pair.FlushAsyncInvokes();
MockDataChannelSink data_channel_sink;
EXPECT_CALL(data_channel_sink, OnReadyToSend());
transport_pair.first_datagram_transport()->SetDataSink(&data_channel_sink);
transport_pair.FlushAsyncInvokes();
transport_pair.first_datagram_transport()->SetDataSink(nullptr);
}
TEST(LoopbackMediaTransport, StateChangeDeliveredToDataSink) {
std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
thread->Start();
MediaTransportPair transport_pair(thread.get());
MockDataChannelSink data_channel_sink;
EXPECT_CALL(data_channel_sink, OnReadyToSend());
transport_pair.first_datagram_transport()->SetDataSink(&data_channel_sink);
transport_pair.SetState(MediaTransportState::kWritable);
transport_pair.FlushAsyncInvokes();
transport_pair.first_datagram_transport()->SetDataSink(nullptr);
}
} // namespace webrtc

View file

@ -19,12 +19,9 @@ namespace test {
class MockAudioMixer : public AudioMixer {
public:
MOCK_METHOD(bool, AddSource, (Source * audio_source), (override));
MOCK_METHOD(void, RemoveSource, (Source * audio_source), (override));
MOCK_METHOD(void,
Mix,
(size_t number_of_channels, AudioFrame* audio_frame_for_mixing),
(override));
MOCK_METHOD(bool, AddSource, (Source*), (override));
MOCK_METHOD(void, RemoveSource, (Source*), (override));
MOCK_METHOD(void, Mix, (size_t number_of_channels, AudioFrame*), (override));
};
} // namespace test
} // namespace webrtc

View file

@ -0,0 +1,75 @@
/*
* Copyright 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_
#define API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_
#include <memory>
#include <string>
#include "api/peer_connection_interface.h"
#include "test/gmock.h"
namespace webrtc {
class MockPeerConnectionFactoryInterface final
: public rtc::RefCountedObject<webrtc::PeerConnectionFactoryInterface> {
public:
rtc::scoped_refptr<MockPeerConnectionFactoryInterface> Create() {
return new MockPeerConnectionFactoryInterface();
}
MOCK_METHOD(void, SetOptions, (const Options&), (override));
MOCK_METHOD(rtc::scoped_refptr<PeerConnectionInterface>,
CreatePeerConnection,
(const PeerConnectionInterface::RTCConfiguration&,
PeerConnectionDependencies),
(override));
MOCK_METHOD(rtc::scoped_refptr<PeerConnectionInterface>,
CreatePeerConnection,
(const PeerConnectionInterface::RTCConfiguration&,
std::unique_ptr<cricket::PortAllocator>,
std::unique_ptr<rtc::RTCCertificateGeneratorInterface>,
PeerConnectionObserver*),
(override));
MOCK_METHOD(RtpCapabilities,
GetRtpSenderCapabilities,
(cricket::MediaType),
(const override));
MOCK_METHOD(RtpCapabilities,
GetRtpReceiverCapabilities,
(cricket::MediaType),
(const override));
MOCK_METHOD(rtc::scoped_refptr<MediaStreamInterface>,
CreateLocalMediaStream,
(const std::string&),
(override));
MOCK_METHOD(rtc::scoped_refptr<AudioSourceInterface>,
CreateAudioSource,
(const cricket::AudioOptions&),
(override));
MOCK_METHOD(rtc::scoped_refptr<VideoTrackInterface>,
CreateVideoTrack,
(const std::string&, VideoTrackSourceInterface*),
(override));
MOCK_METHOD(rtc::scoped_refptr<AudioTrackInterface>,
CreateAudioTrack,
(const std::string&, AudioSourceInterface*),
(override));
MOCK_METHOD(bool, StartAecDump, (FILE*, int64_t), (override));
MOCK_METHOD(void, StopAecDump, (), (override));
protected:
MockPeerConnectionFactoryInterface() = default;
};
} // namespace webrtc
#endif // API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_

View file

@ -0,0 +1,38 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_
#define API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_
#include <vector>
#include "api/frame_transformer_interface.h"
#include "test/gmock.h"
namespace webrtc {
class MockTransformableVideoFrame
: public webrtc::TransformableVideoFrameInterface {
public:
MOCK_METHOD(rtc::ArrayView<const uint8_t>, GetData, (), (const override));
MOCK_METHOD(void, SetData, (rtc::ArrayView<const uint8_t> data), (override));
MOCK_METHOD(uint32_t, GetTimestamp, (), (const override));
MOCK_METHOD(uint32_t, GetSsrc, (), (const, override));
MOCK_METHOD(bool, IsKeyFrame, (), (const, override));
MOCK_METHOD(std::vector<uint8_t>, GetAdditionalData, (), (const, override));
MOCK_METHOD(const webrtc::VideoFrameMetadata&,
GetMetadata,
(),
(const, override));
};
} // namespace webrtc
#endif // API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_

View file

@ -26,7 +26,7 @@ class MockVideoBitrateAllocatorFactory
CreateVideoBitrateAllocator,
(const VideoCodec&),
(override));
MOCK_METHOD(void, Die, (), ());
MOCK_METHOD(void, Die, ());
};
} // namespace webrtc

View file

@ -27,12 +27,12 @@ class MockVideoDecoderFactory : public webrtc::VideoDecoderFactory {
MOCK_METHOD(std::vector<webrtc::SdpVideoFormat>,
GetSupportedFormats,
(),
(const override));
(const, override));
MOCK_METHOD(std::unique_ptr<webrtc::VideoDecoder>,
CreateVideoDecoder,
(const webrtc::SdpVideoFormat&),
(override));
MOCK_METHOD(void, Die, (), ());
MOCK_METHOD(void, Die, ());
};
} // namespace webrtc

View file

@ -23,8 +23,8 @@ class MockEncodedImageCallback : public EncodedImageCallback {
MOCK_METHOD(Result,
OnEncodedImage,
(const EncodedImage& encodedImage,
const CodecSpecificInfo* codecSpecificInfo,
const RTPFragmentationHeader* fragmentation),
const CodecSpecificInfo*,
const RTPFragmentationHeader*),
(override));
MOCK_METHOD(void, OnDroppedFrame, (DropReason reason), (override));
};
@ -33,31 +33,41 @@ class MockVideoEncoder : public VideoEncoder {
public:
MOCK_METHOD(void,
SetFecControllerOverride,
(FecControllerOverride * fec_controller_override),
(FecControllerOverride*),
(override));
MOCK_METHOD(int32_t,
InitEncode,
(const VideoCodec* codecSettings,
int32_t numberOfCores,
size_t maxPayloadSize),
(const VideoCodec*, int32_t numberOfCores, size_t maxPayloadSize),
(override));
MOCK_METHOD(int32_t,
InitEncode,
(const VideoCodec*, const VideoEncoder::Settings& settings),
(override));
MOCK_METHOD2(InitEncode,
int32_t(const VideoCodec* codecSettings,
const VideoEncoder::Settings& settings));
MOCK_METHOD2(Encode,
int32_t(const VideoFrame& inputImage,
const std::vector<VideoFrameType>* frame_types));
MOCK_METHOD1(RegisterEncodeCompleteCallback,
int32_t(EncodedImageCallback* callback));
MOCK_METHOD0(Release, int32_t());
MOCK_METHOD0(Reset, int32_t());
MOCK_METHOD1(SetRates, void(const RateControlParameters& parameters));
MOCK_METHOD1(OnPacketLossRateUpdate, void(float packet_loss_rate));
MOCK_METHOD1(OnRttUpdate, void(int64_t rtt_ms));
MOCK_METHOD1(OnLossNotification,
void(const LossNotification& loss_notification));
MOCK_CONST_METHOD0(GetEncoderInfo, EncoderInfo(void));
MOCK_METHOD(int32_t,
Encode,
(const VideoFrame& inputImage,
const std::vector<VideoFrameType>*),
(override));
MOCK_METHOD(int32_t,
RegisterEncodeCompleteCallback,
(EncodedImageCallback*),
(override));
MOCK_METHOD(int32_t, Release, (), (override));
MOCK_METHOD(void,
SetRates,
(const RateControlParameters& parameters),
(override));
MOCK_METHOD(void,
OnPacketLossRateUpdate,
(float packet_loss_rate),
(override));
MOCK_METHOD(void, OnRttUpdate, (int64_t rtt_ms), (override));
MOCK_METHOD(void,
OnLossNotification,
(const LossNotification& loss_notification),
(override));
MOCK_METHOD(EncoderInfo, GetEncoderInfo, (), (const, override));
};
} // namespace webrtc

View file

@ -27,17 +27,17 @@ class MockVideoEncoderFactory : public webrtc::VideoEncoderFactory {
MOCK_METHOD(std::vector<SdpVideoFormat>,
GetSupportedFormats,
(),
(const override));
(const, override));
MOCK_METHOD(CodecInfo,
QueryVideoEncoder,
(const SdpVideoFormat&),
(const override));
(const, override));
MOCK_METHOD(std::unique_ptr<VideoEncoder>,
CreateVideoEncoder,
(const SdpVideoFormat&),
(override));
MOCK_METHOD(void, Die, (), ());
MOCK_METHOD(void, Die, ());
};
} // namespace webrtc

View file

@ -23,6 +23,6 @@ rtc_library("network_emulation") {
"../../units:data_rate",
"../../units:data_size",
"../../units:timestamp",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}

View file

@ -33,13 +33,13 @@
#include "api/test/simulated_network.h"
#include "api/test/stats_observer_interface.h"
#include "api/test/video_quality_analyzer_interface.h"
#include "api/transport/media/media_transport_interface.h"
#include "api/transport/network_control.h"
#include "api/units/time_delta.h"
#include "api/video_codecs/video_decoder_factory.h"
#include "api/video_codecs/video_encoder.h"
#include "api/video_codecs/video_encoder_factory.h"
#include "media/base/media_constants.h"
#include "rtc_base/deprecation.h"
#include "rtc_base/network.h"
#include "rtc_base/rtc_certificate_generator.h"
#include "rtc_base/ssl_certificate.h"
@ -287,8 +287,6 @@ class PeerConnectionE2EQualityTestFixture {
virtual PeerConfigurer* SetNetworkControllerFactory(
std::unique_ptr<NetworkControllerFactoryInterface>
network_controller_factory) = 0;
virtual PeerConfigurer* SetMediaTransportFactory(
std::unique_ptr<MediaTransportFactory> media_transport_factory) = 0;
virtual PeerConfigurer* SetVideoEncoderFactory(
std::unique_ptr<VideoEncoderFactory> video_encoder_factory) = 0;
virtual PeerConfigurer* SetVideoDecoderFactory(
@ -337,6 +335,9 @@ class PeerConnectionE2EQualityTestFixture {
PeerConnectionInterface::RTCConfiguration configuration) = 0;
// Set bitrate parameters on PeerConnection. This constraints will be
// applied to all summed RTP streams for this peer.
virtual PeerConfigurer* SetBitrateSettings(
BitrateSettings bitrate_settings) = 0;
RTC_DEPRECATED
virtual PeerConfigurer* SetBitrateParameters(
PeerConnectionInterface::BitrateParameters bitrate_params) = 0;
};
@ -448,6 +449,12 @@ class PeerConnectionE2EQualityTestFixture {
virtual void AddPeer(rtc::Thread* network_thread,
rtc::NetworkManager* network_manager,
rtc::FunctionView<void(PeerConfigurer*)> configurer) = 0;
// Runs the media quality test, which includes setting up the call with
// configured participants, running it according to provided |run_params| and
// terminating it properly at the end. During call duration media quality
// metrics are gathered, which are then reported to stdout and (if configured)
// to the json/protobuf output file through the WebRTC perf test results
// reporting system.
virtual void Run(RunParams run_params) = 0;
// Returns real test duration - the time of test execution measured during

View file

@ -11,9 +11,8 @@
#ifndef API_TEST_STATS_OBSERVER_INTERFACE_H_
#define API_TEST_STATS_OBSERVER_INTERFACE_H_
#include <string>
#include "api/stats_types.h"
#include "absl/strings/string_view.h"
#include "api/stats/rtc_stats_report.h"
namespace webrtc {
namespace webrtc_pc_e2e {
@ -25,8 +24,9 @@ class StatsObserverInterface {
// Method called when stats reports are available for the PeerConnection
// identified by |pc_label|.
virtual void OnStatsReports(const std::string& pc_label,
const StatsReports& reports) = 0;
virtual void OnStatsReports(
absl::string_view pc_label,
const rtc::scoped_refptr<const RTCStatsReport>& report) = 0;
};
} // namespace webrtc_pc_e2e

View file

@ -14,22 +14,24 @@
#include <utility>
#include "rtc_base/checks.h"
#include "rtc_base/thread_checker.h"
#include "rtc_base/platform_thread_types.h"
namespace webrtc {
namespace {
// This checks everything in this file gets called on the same thread. It's
// static because it needs to look at the static methods too.
rtc::ThreadChecker* GetThreadChecker() {
static rtc::ThreadChecker checker;
return &checker;
bool IsValidTestDependencyFactoryThread() {
const rtc::PlatformThreadRef main_thread = rtc::CurrentThreadRef();
return rtc::IsThreadRefEqual(main_thread, rtc::CurrentThreadRef());
}
} // namespace
std::unique_ptr<TestDependencyFactory> TestDependencyFactory::instance_ =
nullptr;
const TestDependencyFactory& TestDependencyFactory::GetInstance() {
RTC_DCHECK(GetThreadChecker()->IsCurrent());
RTC_DCHECK(IsValidTestDependencyFactoryThread());
if (instance_ == nullptr) {
instance_ = std::make_unique<TestDependencyFactory>();
}
@ -38,14 +40,14 @@ const TestDependencyFactory& TestDependencyFactory::GetInstance() {
void TestDependencyFactory::SetInstance(
std::unique_ptr<TestDependencyFactory> instance) {
RTC_DCHECK(GetThreadChecker()->IsCurrent());
RTC_DCHECK(IsValidTestDependencyFactoryThread());
RTC_CHECK(instance_ == nullptr);
instance_ = std::move(instance);
}
std::unique_ptr<VideoQualityTestFixtureInterface::InjectionComponents>
TestDependencyFactory::CreateComponents() const {
RTC_DCHECK(GetThreadChecker()->IsCurrent());
RTC_DCHECK(IsValidTestDependencyFactoryThread());
return nullptr;
}

View file

@ -14,7 +14,9 @@
#include <memory>
#include <string>
#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "api/test/stats_observer_interface.h"
#include "api/video/encoded_image.h"
#include "api/video/video_frame.h"
@ -76,42 +78,65 @@ class VideoQualityAnalyzerInterface : public StatsObserverInterface {
// calculations. Analyzer can perform simple calculations on the calling
// thread in each method, but should remember, that it is the same thread,
// that is used in video pipeline.
virtual void Start(std::string test_case_name, int max_threads_count) {}
virtual void Start(std::string test_case_name,
rtc::ArrayView<const std::string> peer_names,
int max_threads_count) {}
// Will be called when frame was generated from the input stream.
// |peer_name| is name of the peer on which side frame was captured.
// Returns frame id, that will be set by framework to the frame.
virtual uint16_t OnFrameCaptured(const std::string& stream_label,
virtual uint16_t OnFrameCaptured(absl::string_view peer_name,
const std::string& stream_label,
const VideoFrame& frame) = 0;
// Will be called before calling the encoder.
virtual void OnFramePreEncode(const VideoFrame& frame) {}
// |peer_name| is name of the peer on which side frame came to encoder.
virtual void OnFramePreEncode(absl::string_view peer_name,
const VideoFrame& frame) {}
// Will be called for each EncodedImage received from encoder. Single
// VideoFrame can produce multiple EncodedImages. Each encoded image will
// have id from VideoFrame.
virtual void OnFrameEncoded(uint16_t frame_id,
// |peer_name| is name of the peer on which side frame was encoded.
virtual void OnFrameEncoded(absl::string_view peer_name,
uint16_t frame_id,
const EncodedImage& encoded_image,
const EncoderStats& stats) {}
// Will be called for each frame dropped by encoder.
virtual void OnFrameDropped(EncodedImageCallback::DropReason reason) {}
// |peer_name| is name of the peer on which side frame drop was detected.
virtual void OnFrameDropped(absl::string_view peer_name,
EncodedImageCallback::DropReason reason) {}
// Will be called before calling the decoder.
virtual void OnFramePreDecode(uint16_t frame_id,
// |peer_name| is name of the peer on which side frame was received.
virtual void OnFramePreDecode(absl::string_view peer_name,
uint16_t frame_id,
const EncodedImage& encoded_image) {}
// Will be called after decoding the frame.
virtual void OnFrameDecoded(const VideoFrame& frame,
// |peer_name| is name of the peer on which side frame was decoded.
virtual void OnFrameDecoded(absl::string_view peer_name,
const VideoFrame& frame,
const DecoderStats& stats) {}
// Will be called when frame will be obtained from PeerConnection stack.
virtual void OnFrameRendered(const VideoFrame& frame) {}
// |peer_name| is name of the peer on which side frame was rendered.
virtual void OnFrameRendered(absl::string_view peer_name,
const VideoFrame& frame) {}
// Will be called if encoder return not WEBRTC_VIDEO_CODEC_OK.
// All available codes are listed in
// modules/video_coding/include/video_error_codes.h
virtual void OnEncoderError(const VideoFrame& frame, int32_t error_code) {}
// |peer_name| is name of the peer on which side error acquired.
virtual void OnEncoderError(absl::string_view peer_name,
const VideoFrame& frame,
int32_t error_code) {}
// Will be called if decoder return not WEBRTC_VIDEO_CODEC_OK.
// All available codes are listed in
// modules/video_coding/include/video_error_codes.h
virtual void OnDecoderError(uint16_t frame_id, int32_t error_code) {}
// |peer_name| is name of the peer on which side error acquired.
virtual void OnDecoderError(absl::string_view peer_name,
uint16_t frame_id,
int32_t error_code) {}
// Will be called every time new stats reports are available for the
// Peer Connection identified by |pc_label|.
void OnStatsReports(const std::string& pc_label,
const StatsReports& stats_reports) override {}
void OnStatsReports(
absl::string_view pc_label,
const rtc::scoped_refptr<const RTCStatsReport>& report) override {}
// Tells analyzer that analysis complete and it should calculate final
// statistics.

View file

@ -14,10 +14,8 @@ rtc_library("bitrate_settings") {
"bitrate_settings.cc",
"bitrate_settings.h",
]
deps = [
"../../rtc_base/system:rtc_export",
"//third_party/abseil-cpp/absl/types:optional",
]
deps = [ "../../rtc_base/system:rtc_export" ]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("enums") {
@ -41,6 +39,8 @@ rtc_library("network_control") {
"../units:data_size",
"../units:time_delta",
"../units:timestamp",
]
absl_deps = [
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -49,10 +49,8 @@ rtc_library("network_control") {
rtc_source_set("webrtc_key_value_config") {
visibility = [ "*" ]
sources = [ "webrtc_key_value_config.h" ]
deps = [
"../../rtc_base/system:rtc_export",
"//third_party/abseil-cpp/absl/strings",
]
deps = [ "../../rtc_base/system:rtc_export" ]
absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("field_trial_based_config") {
@ -64,26 +62,20 @@ rtc_library("field_trial_based_config") {
deps = [
":webrtc_key_value_config",
"../../system_wrappers:field_trial",
"//third_party/abseil-cpp/absl/strings",
]
absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
# TODO(nisse): Rename?
rtc_source_set("datagram_transport_interface") {
visibility = [ "*" ]
sources = [
"congestion_control_interface.h",
"data_channel_transport_interface.h",
"datagram_transport_interface.h",
]
sources = [ "data_channel_transport_interface.h" ]
deps = [
":network_control",
"..:array_view",
"..:rtc_error",
"../../rtc_base:rtc_base_approved",
"../units:data_rate",
"../units:timestamp",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("goog_cc") {

View file

@ -1,75 +0,0 @@
/* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This is EXPERIMENTAL interface for media and datagram transports.
#ifndef API_TRANSPORT_CONGESTION_CONTROL_INTERFACE_H_
#define API_TRANSPORT_CONGESTION_CONTROL_INTERFACE_H_
#include <memory>
#include <string>
#include <utility>
#include "api/transport/network_control.h"
#include "api/units/data_rate.h"
namespace webrtc {
// TODO(nisse): Defined together with MediaTransportInterface. But we should use
// types that aren't tied to media, so that MediaTransportInterface can depend
// on CongestionControlInterface, but not the other way around.
// api/transport/network_control.h may be a reasonable place.
class MediaTransportRttObserver;
struct MediaTransportAllocatedBitrateLimits;
struct MediaTransportTargetRateConstraints;
// Defines congestion control feedback interface for media and datagram
// transports.
class CongestionControlInterface {
public:
virtual ~CongestionControlInterface() = default;
// Updates allocation limits.
virtual void SetAllocatedBitrateLimits(
const MediaTransportAllocatedBitrateLimits& limits) = 0;
// Sets starting rate.
virtual void SetTargetBitrateLimits(
const MediaTransportTargetRateConstraints& target_rate_constraints) = 0;
// Intended for receive side. AddRttObserver registers an observer to be
// called for each RTT measurement, typically once per ACK. Before media
// transport is destructed the observer must be unregistered.
//
// TODO(sukhanov): Looks like AddRttObserver and RemoveRttObserver were
// never implemented for media transport, so keeping noop implementation.
virtual void AddRttObserver(MediaTransportRttObserver* observer) {}
virtual void RemoveRttObserver(MediaTransportRttObserver* observer) {}
// Adds a target bitrate observer. Before media transport is destructed
// the observer must be unregistered (by calling
// RemoveTargetTransferRateObserver).
// A newly registered observer will be called back with the latest recorded
// target rate, if available.
virtual void AddTargetTransferRateObserver(
TargetTransferRateObserver* observer) = 0;
// Removes an existing |observer| from observers. If observer was never
// registered, an error is logged and method does nothing.
virtual void RemoveTargetTransferRateObserver(
TargetTransferRateObserver* observer) = 0;
// Returns the last known target transfer rate as reported to the above
// observers.
virtual absl::optional<TargetTransferRate> GetLatestTargetTransferRate() = 0;
};
} // namespace webrtc
#endif // API_TRANSPORT_CONGESTION_CONTROL_INTERFACE_H_

View file

@ -35,8 +35,8 @@ enum class DataMessageType {
// sent reliably and in-order, even if the data channel is configured for
// unreliable delivery.
struct SendDataParams {
SendDataParams();
SendDataParams(const SendDataParams&);
SendDataParams() = default;
SendDataParams(const SendDataParams&) = default;
DataMessageType type = DataMessageType::kText;

View file

@ -1,151 +0,0 @@
/* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This is EXPERIMENTAL interface for media and datagram transports.
#ifndef API_TRANSPORT_DATAGRAM_TRANSPORT_INTERFACE_H_
#define API_TRANSPORT_DATAGRAM_TRANSPORT_INTERFACE_H_
#include <memory>
#include <string>
#include <utility>
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "api/rtc_error.h"
#include "api/transport/congestion_control_interface.h"
#include "api/transport/data_channel_transport_interface.h"
#include "api/units/data_rate.h"
#include "api/units/timestamp.h"
namespace rtc {
class PacketTransportInternal;
} // namespace rtc
namespace webrtc {
class MediaTransportStateCallback;
typedef int64_t DatagramId;
struct DatagramAck {
// |datagram_id| is same as passed in
// DatagramTransportInterface::SendDatagram.
DatagramId datagram_id;
// The timestamp at which the remote peer received the identified datagram,
// according to that peer's clock.
Timestamp receive_timestamp = Timestamp::MinusInfinity();
};
// All sink methods are called on network thread.
class DatagramSinkInterface {
public:
virtual ~DatagramSinkInterface() {}
// Called when new packet is received.
virtual void OnDatagramReceived(rtc::ArrayView<const uint8_t> data) = 0;
// Called when datagram is actually sent (datragram can be delayed due
// to congestion control or fusing). |datagram_id| is same as passed in
// DatagramTransportInterface::SendDatagram.
virtual void OnDatagramSent(DatagramId datagram_id) = 0;
// Called when datagram is ACKed.
virtual void OnDatagramAcked(const DatagramAck& datagram_ack) = 0;
// Called when a datagram is lost.
virtual void OnDatagramLost(DatagramId datagram_id) = 0;
};
// Datagram transport allows to send and receive unreliable packets (datagrams)
// and receive feedback from congestion control (via
// CongestionControlInterface). The idea is to send RTP packets as datagrams and
// have underlying implementation of datagram transport to use QUIC datagram
// protocol.
class DatagramTransportInterface : public DataChannelTransportInterface {
public:
virtual ~DatagramTransportInterface() = default;
// Connect the datagram transport to the ICE transport.
// The implementation must be able to ignore incoming packets that don't
// belong to it.
virtual void Connect(rtc::PacketTransportInternal* packet_transport) = 0;
// Returns congestion control feedback interface or nullptr if datagram
// transport does not implement congestion control.
//
// Note that right now datagram transport is used without congestion control,
// but we plan to use it in the future.
virtual CongestionControlInterface* congestion_control() = 0;
// Sets a state observer callback. Before datagram transport is destroyed, the
// callback must be unregistered by setting it to nullptr.
// A newly registered callback will be called with the current state.
// Datagram transport does not invoke this callback concurrently.
virtual void SetTransportStateCallback(
MediaTransportStateCallback* callback) = 0;
// Start asynchronous send of datagram. The status returned by this method
// only pertains to the synchronous operations (e.g. serialization /
// packetization), not to the asynchronous operation.
//
// Datagrams larger than GetLargestDatagramSize() will fail and return error.
//
// Datagrams are sent in FIFO order.
//
// |datagram_id| is only used in ACK/LOST notifications in
// DatagramSinkInterface and does not need to be unique.
virtual RTCError SendDatagram(rtc::ArrayView<const uint8_t> data,
DatagramId datagram_id) = 0;
// Returns maximum size of datagram message, does not change.
// TODO(sukhanov): Because value may be undefined before connection setup
// is complete, consider returning error when called before connection is
// established. Currently returns hardcoded const, because integration
// prototype may call before connection is established.
virtual size_t GetLargestDatagramSize() const = 0;
// Sets packet sink. Sink must be unset by calling
// SetDataTransportSink(nullptr) before the data transport is destroyed or
// before new sink is set.
virtual void SetDatagramSink(DatagramSinkInterface* sink) = 0;
// Retrieves transport parameters for this datagram transport. May be called
// on either client- or server-perspective transports.
//
// For servers, the parameters represent what kind of connections and data the
// server is prepared to accept. This is generally a superset of acceptable
// parameters.
//
// For clients, the parameters echo the server configuration used to create
// the client, possibly removing any fields or parameters which the client
// does not understand.
virtual std::string GetTransportParameters() const = 0;
// Sets remote transport parameters. |remote_params| is a serialized string
// of opaque parameters, understood by the datagram transport implementation.
// Returns an error if |remote_params| are not compatible with this transport.
//
// TODO(mellem): Make pure virtual. The default implementation maintains
// original negotiation behavior (negotiation falls back to RTP if the
// remote datagram transport fails to echo exactly the local parameters).
virtual RTCError SetRemoteTransportParameters(
absl::string_view remote_params) {
if (remote_params == GetTransportParameters()) {
return RTCError::OK();
}
return RTCError(RTCErrorType::UNSUPPORTED_PARAMETER,
"Local and remote transport parameters do not match");
}
};
} // namespace webrtc
#endif // API_TRANSPORT_DATAGRAM_TRANSPORT_INTERFACE_H_

View file

@ -1,52 +0,0 @@
# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import("../../../webrtc.gni")
rtc_library("media_transport_interface") {
visibility = [ "*" ]
sources = [
"media_transport_config.cc",
"media_transport_config.h",
"media_transport_interface.cc",
"media_transport_interface.h",
]
deps = [
":audio_interfaces",
":video_interfaces",
"..:datagram_transport_interface",
"..:network_control",
"../..:array_view",
"../..:rtc_error",
"../../..:webrtc_common",
"../../../rtc_base",
"../../../rtc_base:checks",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base:stringutils",
"../../units:data_rate",
"//third_party/abseil-cpp/absl/types:optional",
]
}
rtc_library("audio_interfaces") {
visibility = [ "*" ]
sources = [
"audio_transport.cc",
"audio_transport.h",
]
deps = [ "../..:array_view" ]
}
rtc_library("video_interfaces") {
visibility = [ "*" ]
sources = [
"video_transport.cc",
"video_transport.h",
]
deps = [ "../../video:encoded_image" ]
}

View file

@ -1,54 +0,0 @@
/*
* Copyright 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This is EXPERIMENTAL interface for media transport.
//
// The goal is to refactor WebRTC code so that audio and video frames
// are sent / received through the media transport interface. This will
// enable different media transport implementations, including QUIC-based
// media transport.
#include "api/transport/media/audio_transport.h"
#include <utility>
namespace webrtc {
MediaTransportEncodedAudioFrame::~MediaTransportEncodedAudioFrame() {}
MediaTransportEncodedAudioFrame::MediaTransportEncodedAudioFrame(
int sampling_rate_hz,
int starting_sample_index,
int samples_per_channel,
int sequence_number,
FrameType frame_type,
int payload_type,
std::vector<uint8_t> encoded_data)
: sampling_rate_hz_(sampling_rate_hz),
starting_sample_index_(starting_sample_index),
samples_per_channel_(samples_per_channel),
sequence_number_(sequence_number),
frame_type_(frame_type),
payload_type_(payload_type),
encoded_data_(std::move(encoded_data)) {}
MediaTransportEncodedAudioFrame& MediaTransportEncodedAudioFrame::operator=(
const MediaTransportEncodedAudioFrame&) = default;
MediaTransportEncodedAudioFrame& MediaTransportEncodedAudioFrame::operator=(
MediaTransportEncodedAudioFrame&&) = default;
MediaTransportEncodedAudioFrame::MediaTransportEncodedAudioFrame(
const MediaTransportEncodedAudioFrame&) = default;
MediaTransportEncodedAudioFrame::MediaTransportEncodedAudioFrame(
MediaTransportEncodedAudioFrame&&) = default;
} // namespace webrtc

View file

@ -1,120 +0,0 @@
/* Copyright 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This is EXPERIMENTAL interface for media transport.
//
// The goal is to refactor WebRTC code so that audio and video frames
// are sent / received through the media transport interface. This will
// enable different media transport implementations, including QUIC-based
// media transport.
#ifndef API_TRANSPORT_MEDIA_AUDIO_TRANSPORT_H_
#define API_TRANSPORT_MEDIA_AUDIO_TRANSPORT_H_
#include <vector>
#include "api/array_view.h"
namespace webrtc {
// Represents encoded audio frame in any encoding (type of encoding is opaque).
// To avoid copying of encoded data use move semantics when passing by value.
class MediaTransportEncodedAudioFrame final {
public:
enum class FrameType {
// Normal audio frame (equivalent to webrtc::kAudioFrameSpeech).
kSpeech,
// DTX frame (equivalent to webrtc::kAudioFrameCN).
kDiscontinuousTransmission,
// TODO(nisse): Mis-spelled version, update users, then delete.
kDiscountinuousTransmission = kDiscontinuousTransmission,
};
MediaTransportEncodedAudioFrame(
// Audio sampling rate, for example 48000.
int sampling_rate_hz,
// Starting sample index of the frame, i.e. how many audio samples were
// before this frame since the beginning of the call or beginning of time
// in one channel (the starting point should not matter for NetEq). In
// WebRTC it is used as a timestamp of the frame.
// TODO(sukhanov): Starting_sample_index is currently adjusted on the
// receiver side in RTP path. Non-RTP implementations should preserve it.
// For NetEq initial offset should not matter so we should consider fixing
// RTP path.
int starting_sample_index,
// Number of audio samples in audio frame in 1 channel.
int samples_per_channel,
// Sequence number of the frame in the order sent, it is currently
// required by NetEq, but we can fix NetEq, because starting_sample_index
// should be enough.
int sequence_number,
// If audio frame is a speech or discontinued transmission.
FrameType frame_type,
// Opaque payload type. In RTP codepath payload type is stored in RTP
// header. In other implementations it should be simply passed through the
// wire -- it's needed for decoder.
int payload_type,
// Vector with opaque encoded data.
std::vector<uint8_t> encoded_data);
~MediaTransportEncodedAudioFrame();
MediaTransportEncodedAudioFrame(const MediaTransportEncodedAudioFrame&);
MediaTransportEncodedAudioFrame& operator=(
const MediaTransportEncodedAudioFrame& other);
MediaTransportEncodedAudioFrame& operator=(
MediaTransportEncodedAudioFrame&& other);
MediaTransportEncodedAudioFrame(MediaTransportEncodedAudioFrame&&);
// Getters.
int sampling_rate_hz() const { return sampling_rate_hz_; }
int starting_sample_index() const { return starting_sample_index_; }
int samples_per_channel() const { return samples_per_channel_; }
int sequence_number() const { return sequence_number_; }
int payload_type() const { return payload_type_; }
FrameType frame_type() const { return frame_type_; }
rtc::ArrayView<const uint8_t> encoded_data() const { return encoded_data_; }
private:
int sampling_rate_hz_;
int starting_sample_index_;
int samples_per_channel_;
// TODO(sukhanov): Refactor NetEq so we don't need sequence number.
// Having sample_index and samples_per_channel should be enough.
int sequence_number_;
FrameType frame_type_;
int payload_type_;
std::vector<uint8_t> encoded_data_;
};
// Interface for receiving encoded audio frames from MediaTransportInterface
// implementations.
class MediaTransportAudioSinkInterface {
public:
virtual ~MediaTransportAudioSinkInterface() = default;
// Called when new encoded audio frame is received.
virtual void OnData(uint64_t channel_id,
MediaTransportEncodedAudioFrame frame) = 0;
};
} // namespace webrtc
#endif // API_TRANSPORT_MEDIA_AUDIO_TRANSPORT_H_

View file

@ -1,29 +0,0 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/transport/media/media_transport_config.h"
#include "rtc_base/checks.h"
#include "rtc_base/strings/string_builder.h"
namespace webrtc {
MediaTransportConfig::MediaTransportConfig(size_t rtp_max_packet_size)
: rtp_max_packet_size(rtp_max_packet_size) {
RTC_DCHECK_GT(rtp_max_packet_size, 0);
}
std::string MediaTransportConfig::DebugString() const {
rtc::StringBuilder result;
result << "{rtp_max_packet_size: " << rtp_max_packet_size.value_or(0) << "}";
return result.Release();
}
} // namespace webrtc

View file

@ -1,38 +0,0 @@
/* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_CONFIG_H_
#define API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_CONFIG_H_
#include <memory>
#include <string>
#include <utility>
#include "absl/types/optional.h"
namespace webrtc {
// Media transport config is made available to both transport and audio / video
// layers, but access to individual interfaces should not be open without
// necessity.
struct MediaTransportConfig {
// Default constructor for no-media transport scenarios.
MediaTransportConfig() = default;
// Constructor for datagram transport scenarios.
explicit MediaTransportConfig(size_t rtp_max_packet_size);
std::string DebugString() const;
// If provided, limits RTP packet size (excludes ICE, IP or network overhead).
absl::optional<size_t> rtp_max_packet_size;
};
} // namespace webrtc
#endif // API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_CONFIG_H_

View file

@ -1,108 +0,0 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This is EXPERIMENTAL interface for media transport.
//
// The goal is to refactor WebRTC code so that audio and video frames
// are sent / received through the media transport interface. This will
// enable different media transport implementations, including QUIC-based
// media transport.
#include "api/transport/media/media_transport_interface.h"
#include <cstdint>
#include <utility>
#include "api/transport/datagram_transport_interface.h"
namespace webrtc {
MediaTransportSettings::MediaTransportSettings() = default;
MediaTransportSettings::MediaTransportSettings(const MediaTransportSettings&) =
default;
MediaTransportSettings& MediaTransportSettings::operator=(
const MediaTransportSettings&) = default;
MediaTransportSettings::~MediaTransportSettings() = default;
SendDataParams::SendDataParams() = default;
SendDataParams::SendDataParams(const SendDataParams&) = default;
RTCErrorOr<std::unique_ptr<MediaTransportInterface>>
MediaTransportFactory::CreateMediaTransport(
rtc::PacketTransportInternal* packet_transport,
rtc::Thread* network_thread,
const MediaTransportSettings& settings) {
return std::unique_ptr<MediaTransportInterface>(nullptr);
}
RTCErrorOr<std::unique_ptr<MediaTransportInterface>>
MediaTransportFactory::CreateMediaTransport(
rtc::Thread* network_thread,
const MediaTransportSettings& settings) {
return std::unique_ptr<MediaTransportInterface>(nullptr);
}
RTCErrorOr<std::unique_ptr<DatagramTransportInterface>>
MediaTransportFactory::CreateDatagramTransport(
rtc::Thread* network_thread,
const MediaTransportSettings& settings) {
return std::unique_ptr<DatagramTransportInterface>(nullptr);
}
std::string MediaTransportFactory::GetTransportName() const {
return "";
}
MediaTransportInterface::MediaTransportInterface() = default;
MediaTransportInterface::~MediaTransportInterface() = default;
absl::optional<std::string>
MediaTransportInterface::GetTransportParametersOffer() const {
return absl::nullopt;
}
void MediaTransportInterface::Connect(
rtc::PacketTransportInternal* packet_transport) {}
void MediaTransportInterface::SetKeyFrameRequestCallback(
MediaTransportKeyFrameRequestCallback* callback) {}
absl::optional<TargetTransferRate>
MediaTransportInterface::GetLatestTargetTransferRate() {
return absl::nullopt;
}
void MediaTransportInterface::AddNetworkChangeCallback(
MediaTransportNetworkChangeCallback* callback) {}
void MediaTransportInterface::RemoveNetworkChangeCallback(
MediaTransportNetworkChangeCallback* callback) {}
void MediaTransportInterface::SetFirstAudioPacketReceivedObserver(
AudioPacketReceivedObserver* observer) {}
void MediaTransportInterface::AddTargetTransferRateObserver(
TargetTransferRateObserver* observer) {}
void MediaTransportInterface::RemoveTargetTransferRateObserver(
TargetTransferRateObserver* observer) {}
void MediaTransportInterface::AddRttObserver(
MediaTransportRttObserver* observer) {}
void MediaTransportInterface::RemoveRttObserver(
MediaTransportRttObserver* observer) {}
size_t MediaTransportInterface::GetAudioPacketOverhead() const {
return 0;
}
void MediaTransportInterface::SetAllocatedBitrateLimits(
const MediaTransportAllocatedBitrateLimits& limits) {}
} // namespace webrtc

View file

@ -1,320 +0,0 @@
/* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This is EXPERIMENTAL interface for media transport.
//
// The goal is to refactor WebRTC code so that audio and video frames
// are sent / received through the media transport interface. This will
// enable different media transport implementations, including QUIC-based
// media transport.
#ifndef API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_INTERFACE_H_
#define API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_INTERFACE_H_
#include <memory>
#include <string>
#include <utility>
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "api/rtc_error.h"
#include "api/transport/data_channel_transport_interface.h"
#include "api/transport/media/audio_transport.h"
#include "api/transport/media/video_transport.h"
#include "api/transport/network_control.h"
#include "api/units/data_rate.h"
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/network_route.h"
namespace rtc {
class PacketTransportInternal;
class Thread;
} // namespace rtc
namespace webrtc {
class DatagramTransportInterface;
class RtcEventLog;
class AudioPacketReceivedObserver {
public:
virtual ~AudioPacketReceivedObserver() = default;
// Invoked for the first received audio packet on a given channel id.
// It will be invoked once for each channel id.
virtual void OnFirstAudioPacketReceived(int64_t channel_id) = 0;
};
// Used to configure stream allocations.
struct MediaTransportAllocatedBitrateLimits {
DataRate min_pacing_rate = DataRate::Zero();
DataRate max_padding_bitrate = DataRate::Zero();
DataRate max_total_allocated_bitrate = DataRate::Zero();
};
// Used to configure target bitrate constraints.
// If the value is provided, the constraint is updated.
// If the value is omitted, the value is left unchanged.
struct MediaTransportTargetRateConstraints {
absl::optional<DataRate> min_bitrate;
absl::optional<DataRate> max_bitrate;
absl::optional<DataRate> starting_bitrate;
};
// A collection of settings for creation of media transport.
struct MediaTransportSettings final {
MediaTransportSettings();
MediaTransportSettings(const MediaTransportSettings&);
MediaTransportSettings& operator=(const MediaTransportSettings&);
~MediaTransportSettings();
// Group calls are not currently supported, in 1:1 call one side must set
// is_caller = true and another is_caller = false.
bool is_caller;
// Must be set if a pre-shared key is used for the call.
// TODO(bugs.webrtc.org/9944): This should become zero buffer in the distant
// future.
absl::optional<std::string> pre_shared_key;
// If present, this is a config passed from the caller to the answerer in the
// offer. Each media transport knows how to understand its own parameters.
absl::optional<std::string> remote_transport_parameters;
// If present, provides the event log that media transport should use.
// Media transport does not own it. The lifetime of |event_log| will exceed
// the lifetime of the instance of MediaTransportInterface instance.
RtcEventLog* event_log = nullptr;
};
// Callback to notify about network route changes.
class MediaTransportNetworkChangeCallback {
public:
virtual ~MediaTransportNetworkChangeCallback() = default;
// Called when the network route is changed, with the new network route.
virtual void OnNetworkRouteChanged(
const rtc::NetworkRoute& new_network_route) = 0;
};
// State of the media transport. Media transport begins in the pending state.
// It transitions to writable when it is ready to send media. It may transition
// back to pending if the connection is blocked. It may transition to closed at
// any time. Closed is terminal: a transport will never re-open once closed.
enum class MediaTransportState {
kPending,
kWritable,
kClosed,
};
// Callback invoked whenever the state of the media transport changes.
class MediaTransportStateCallback {
public:
virtual ~MediaTransportStateCallback() = default;
// Invoked whenever the state of the media transport changes.
virtual void OnStateChanged(MediaTransportState state) = 0;
};
// Callback for RTT measurements on the receive side.
// TODO(nisse): Related interfaces: CallStatsObserver and RtcpRttStats. It's
// somewhat unclear what type of measurement is needed. It's used to configure
// NACK generation and playout buffer. Either raw measurement values or recent
// maximum would make sense for this use. Need consolidation of RTT signalling.
class MediaTransportRttObserver {
public:
virtual ~MediaTransportRttObserver() = default;
// Invoked when a new RTT measurement is available, typically once per ACK.
virtual void OnRttUpdated(int64_t rtt_ms) = 0;
};
// Media transport interface for sending / receiving encoded audio/video frames
// and receiving bandwidth estimate update from congestion control.
class MediaTransportInterface : public DataChannelTransportInterface {
public:
MediaTransportInterface();
virtual ~MediaTransportInterface();
// Retrieves callers config (i.e. media transport offer) that should be passed
// to the callee, before the call is connected. Such config is opaque to SDP
// (sdp just passes it through). The config is a binary blob, so SDP may
// choose to use base64 to serialize it (or any other approach that guarantees
// that the binary blob goes through). This should only be called for the
// caller's perspective.
//
// This may return an unset optional, which means that the given media
// transport is not supported / disabled and shouldn't be reported in SDP.
//
// It may also return an empty string, in which case the media transport is
// supported, but without any extra settings.
// TODO(psla): Make abstract.
virtual absl::optional<std::string> GetTransportParametersOffer() const;
// Connect the media transport to the ICE transport.
// The implementation must be able to ignore incoming packets that don't
// belong to it.
// TODO(psla): Make abstract.
virtual void Connect(rtc::PacketTransportInternal* packet_transport);
// Start asynchronous send of audio frame. The status returned by this method
// only pertains to the synchronous operations (e.g.
// serialization/packetization), not to the asynchronous operation.
virtual RTCError SendAudioFrame(uint64_t channel_id,
MediaTransportEncodedAudioFrame frame) = 0;
// Start asynchronous send of video frame. The status returned by this method
// only pertains to the synchronous operations (e.g.
// serialization/packetization), not to the asynchronous operation.
virtual RTCError SendVideoFrame(
uint64_t channel_id,
const MediaTransportEncodedVideoFrame& frame) = 0;
// Used by video sender to be notified on key frame requests.
virtual void SetKeyFrameRequestCallback(
MediaTransportKeyFrameRequestCallback* callback);
// Requests a keyframe for the particular channel (stream). The caller should
// check that the keyframe is not present in a jitter buffer already (i.e.
// don't request a keyframe if there is one that you will get from the jitter
// buffer in a moment).
virtual RTCError RequestKeyFrame(uint64_t channel_id) = 0;
// Sets audio sink. Sink must be unset by calling SetReceiveAudioSink(nullptr)
// before the media transport is destroyed or before new sink is set.
virtual void SetReceiveAudioSink(MediaTransportAudioSinkInterface* sink) = 0;
// Registers a video sink. Before destruction of media transport, you must
// pass a nullptr.
virtual void SetReceiveVideoSink(MediaTransportVideoSinkInterface* sink) = 0;
// Adds a target bitrate observer. Before media transport is destructed
// the observer must be unregistered (by calling
// RemoveTargetTransferRateObserver).
// A newly registered observer will be called back with the latest recorded
// target rate, if available.
virtual void AddTargetTransferRateObserver(
TargetTransferRateObserver* observer);
// Removes an existing |observer| from observers. If observer was never
// registered, an error is logged and method does nothing.
virtual void RemoveTargetTransferRateObserver(
TargetTransferRateObserver* observer);
// Sets audio packets observer, which gets informed about incoming audio
// packets. Before destruction, the observer must be unregistered by setting
// nullptr.
//
// This method may be temporary, when the multiplexer is implemented (or
// multiplexer may use it to demultiplex channel ids).
virtual void SetFirstAudioPacketReceivedObserver(
AudioPacketReceivedObserver* observer);
// Intended for receive side. AddRttObserver registers an observer to be
// called for each RTT measurement, typically once per ACK. Before media
// transport is destructed the observer must be unregistered.
virtual void AddRttObserver(MediaTransportRttObserver* observer);
virtual void RemoveRttObserver(MediaTransportRttObserver* observer);
// Returns the last known target transfer rate as reported to the above
// observers.
virtual absl::optional<TargetTransferRate> GetLatestTargetTransferRate();
// Gets the audio packet overhead in bytes. Returned overhead does not include
// transport overhead (ipv4/6, turn channeldata, tcp/udp, etc.).
// If the transport is capable of fusing packets together, this overhead
// might not be a very accurate number.
// TODO(nisse): Deprecated.
virtual size_t GetAudioPacketOverhead() const;
// Corresponding observers for audio and video overhead. Before destruction,
// the observers must be unregistered by setting nullptr.
// Registers an observer for network change events. If the network route is
// already established when the callback is added, |callback| will be called
// immediately with the current network route. Before media transport is
// destroyed, the callback must be removed.
virtual void AddNetworkChangeCallback(
MediaTransportNetworkChangeCallback* callback);
virtual void RemoveNetworkChangeCallback(
MediaTransportNetworkChangeCallback* callback);
// Sets a state observer callback. Before media transport is destroyed, the
// callback must be unregistered by setting it to nullptr.
// A newly registered callback will be called with the current state.
// Media transport does not invoke this callback concurrently.
virtual void SetMediaTransportStateCallback(
MediaTransportStateCallback* callback) = 0;
// Updates allocation limits.
// TODO(psla): Make abstract when downstream implementation implement it.
virtual void SetAllocatedBitrateLimits(
const MediaTransportAllocatedBitrateLimits& limits);
// Sets starting rate.
// TODO(psla): Make abstract when downstream implementation implement it.
virtual void SetTargetBitrateLimits(
const MediaTransportTargetRateConstraints& target_rate_constraints) {}
// TODO(sukhanov): RtcEventLogs.
};
// If media transport factory is set in peer connection factory, it will be
// used to create media transport for sending/receiving encoded frames and
// this transport will be used instead of default RTP/SRTP transport.
//
// Currently Media Transport negotiation is not supported in SDP.
// If application is using media transport, it must negotiate it before
// setting media transport factory in peer connection.
class MediaTransportFactory {
public:
virtual ~MediaTransportFactory() = default;
// Creates media transport.
// - Does not take ownership of packet_transport or network_thread.
// - Does not support group calls, in 1:1 call one side must set
// is_caller = true and another is_caller = false.
virtual RTCErrorOr<std::unique_ptr<MediaTransportInterface>>
CreateMediaTransport(rtc::PacketTransportInternal* packet_transport,
rtc::Thread* network_thread,
const MediaTransportSettings& settings);
// Creates a new Media Transport in a disconnected state. If the media
// transport for the caller is created, one can then call
// MediaTransportInterface::GetTransportParametersOffer on that new instance.
// TODO(psla): Make abstract.
virtual RTCErrorOr<std::unique_ptr<webrtc::MediaTransportInterface>>
CreateMediaTransport(rtc::Thread* network_thread,
const MediaTransportSettings& settings);
// Creates a new Datagram Transport in a disconnected state. If the datagram
// transport for the caller is created, one can then call
// DatagramTransportInterface::GetTransportParametersOffer on that new
// instance.
//
// TODO(sukhanov): Consider separating media and datagram transport factories.
// TODO(sukhanov): Move factory to a separate .h file.
virtual RTCErrorOr<std::unique_ptr<DatagramTransportInterface>>
CreateDatagramTransport(rtc::Thread* network_thread,
const MediaTransportSettings& settings);
// Gets a transport name which is supported by the implementation.
// Different factories should return different transport names, and at runtime
// it will be checked that different names were used.
// For example, "rtp" or "generic" may be returned by two different
// implementations.
// The value returned by this method must never change in the lifetime of the
// factory.
// TODO(psla): Make abstract.
virtual std::string GetTransportName() const;
};
} // namespace webrtc
#endif // API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_INTERFACE_H_

View file

@ -1,56 +0,0 @@
/*
* Copyright 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This is EXPERIMENTAL interface for media transport.
//
// The goal is to refactor WebRTC code so that audio and video frames
// are sent / received through the media transport interface. This will
// enable different media transport implementations, including QUIC-based
// media transport.
#include "api/transport/media/video_transport.h"
#include <utility>
namespace webrtc {
MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame() = default;
MediaTransportEncodedVideoFrame::~MediaTransportEncodedVideoFrame() = default;
MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame(
int64_t frame_id,
std::vector<int64_t> referenced_frame_ids,
int payload_type,
const webrtc::EncodedImage& encoded_image)
: payload_type_(payload_type),
encoded_image_(encoded_image),
frame_id_(frame_id),
referenced_frame_ids_(std::move(referenced_frame_ids)) {}
MediaTransportEncodedVideoFrame& MediaTransportEncodedVideoFrame::operator=(
const MediaTransportEncodedVideoFrame&) = default;
MediaTransportEncodedVideoFrame& MediaTransportEncodedVideoFrame::operator=(
MediaTransportEncodedVideoFrame&&) = default;
MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame(
const MediaTransportEncodedVideoFrame& o)
: MediaTransportEncodedVideoFrame() {
*this = o;
}
MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame(
MediaTransportEncodedVideoFrame&& o)
: MediaTransportEncodedVideoFrame() {
*this = std::move(o);
}
} // namespace webrtc

View file

@ -1,101 +0,0 @@
/* Copyright 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This is EXPERIMENTAL interface for media transport.
//
// The goal is to refactor WebRTC code so that audio and video frames
// are sent / received through the media transport interface. This will
// enable different media transport implementations, including QUIC-based
// media transport.
#ifndef API_TRANSPORT_MEDIA_VIDEO_TRANSPORT_H_
#define API_TRANSPORT_MEDIA_VIDEO_TRANSPORT_H_
#include <vector>
#include "api/video/encoded_image.h"
namespace webrtc {
// Represents encoded video frame, along with the codec information.
class MediaTransportEncodedVideoFrame final {
public:
MediaTransportEncodedVideoFrame(int64_t frame_id,
std::vector<int64_t> referenced_frame_ids,
int payload_type,
const webrtc::EncodedImage& encoded_image);
~MediaTransportEncodedVideoFrame();
MediaTransportEncodedVideoFrame(const MediaTransportEncodedVideoFrame&);
MediaTransportEncodedVideoFrame& operator=(
const MediaTransportEncodedVideoFrame& other);
MediaTransportEncodedVideoFrame& operator=(
MediaTransportEncodedVideoFrame&& other);
MediaTransportEncodedVideoFrame(MediaTransportEncodedVideoFrame&&);
int payload_type() const { return payload_type_; }
const webrtc::EncodedImage& encoded_image() const { return encoded_image_; }
int64_t frame_id() const { return frame_id_; }
const std::vector<int64_t>& referenced_frame_ids() const {
return referenced_frame_ids_;
}
// Hack to workaround lack of ownership of the EncodedImage buffer. If we
// don't already own the underlying data, make a copy.
void Retain() { encoded_image_.Retain(); }
private:
MediaTransportEncodedVideoFrame();
int payload_type_;
// The buffer is not always owned by the encoded image. On the sender it means
// that it will need to make a copy using the Retain() method, if it wants to
// deliver it asynchronously.
webrtc::EncodedImage encoded_image_;
// Frame id uniquely identifies a frame in a stream. It needs to be unique in
// a given time window (i.e. technically unique identifier for the lifetime of
// the connection is not needed, but you need to guarantee that remote side
// got rid of the previous frame_id if you plan to reuse it).
//
// It is required by a remote jitter buffer, and is the same as
// EncodedFrame::id::picture_id.
//
// This data must be opaque to the media transport, and media transport should
// itself not make any assumptions about what it is and its uniqueness.
int64_t frame_id_;
// A single frame might depend on other frames. This is set of identifiers on
// which the current frame depends.
std::vector<int64_t> referenced_frame_ids_;
};
// Interface for receiving encoded video frames from MediaTransportInterface
// implementations.
class MediaTransportVideoSinkInterface {
public:
virtual ~MediaTransportVideoSinkInterface() = default;
// Called when new encoded video frame is received.
virtual void OnData(uint64_t channel_id,
MediaTransportEncodedVideoFrame frame) = 0;
};
// Interface for video sender to be notified of received key frame request.
class MediaTransportKeyFrameRequestCallback {
public:
virtual ~MediaTransportKeyFrameRequestCallback() = default;
// Called when a key frame request is received on the transport.
virtual void OnKeyFrameRequested(uint64_t channel_id) = 0;
};
} // namespace webrtc
#endif // API_TRANSPORT_MEDIA_VIDEO_TRANSPORT_H_

View file

@ -14,15 +14,20 @@ rtc_source_set("rtp_source") {
deps = [
"../../../api:rtp_headers",
"../../../rtc_base:checks",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("dependency_descriptor") {
visibility = [ "*" ]
sources = [ "dependency_descriptor.h" ]
deps = [
sources = [
"dependency_descriptor.cc",
"dependency_descriptor.h",
]
deps = [ "../../../rtc_base:checks" ]
absl_deps = [
"//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
}

View file

@ -0,0 +1,54 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/transport/rtp/dependency_descriptor.h"
#include "absl/container/inlined_vector.h"
#include "absl/strings/string_view.h"
#include "rtc_base/checks.h"
namespace webrtc {
constexpr int DependencyDescriptor::kMaxSpatialIds;
constexpr int DependencyDescriptor::kMaxTemporalIds;
constexpr int DependencyDescriptor::kMaxTemplates;
constexpr int DependencyDescriptor::kMaxDecodeTargets;
namespace webrtc_impl {
absl::InlinedVector<DecodeTargetIndication, 10> StringToDecodeTargetIndications(
absl::string_view symbols) {
absl::InlinedVector<DecodeTargetIndication, 10> dtis;
dtis.reserve(symbols.size());
for (char symbol : symbols) {
DecodeTargetIndication indication;
switch (symbol) {
case '-':
indication = DecodeTargetIndication::kNotPresent;
break;
case 'D':
indication = DecodeTargetIndication::kDiscardable;
break;
case 'R':
indication = DecodeTargetIndication::kRequired;
break;
case 'S':
indication = DecodeTargetIndication::kSwitch;
break;
default:
RTC_NOTREACHED();
}
dtis.push_back(indication);
}
return dtis;
}
} // namespace webrtc_impl
} // namespace webrtc

View file

@ -13,10 +13,12 @@
#include <stdint.h>
#include <initializer_list>
#include <memory>
#include <vector>
#include "absl/container/inlined_vector.h"
#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
namespace webrtc {
@ -52,6 +54,13 @@ enum class DecodeTargetIndication {
};
struct FrameDependencyTemplate {
// Setters are named briefly to chain them when building the template.
FrameDependencyTemplate& S(int spatial_layer);
FrameDependencyTemplate& T(int temporal_layer);
FrameDependencyTemplate& Dtis(absl::string_view dtis);
FrameDependencyTemplate& FrameDiffs(std::initializer_list<int> diffs);
FrameDependencyTemplate& ChainDiffs(std::initializer_list<int> diffs);
friend bool operator==(const FrameDependencyTemplate& lhs,
const FrameDependencyTemplate& rhs) {
return lhs.spatial_id == rhs.spatial_id &&
@ -90,6 +99,11 @@ struct FrameDependencyStructure {
};
struct DependencyDescriptor {
static constexpr int kMaxSpatialIds = 4;
static constexpr int kMaxTemporalIds = 8;
static constexpr int kMaxDecodeTargets = 32;
static constexpr int kMaxTemplates = 64;
bool first_packet_in_frame = true;
bool last_packet_in_frame = true;
int frame_number = 0;
@ -99,6 +113,37 @@ struct DependencyDescriptor {
std::unique_ptr<FrameDependencyStructure> attached_structure;
};
// Below are implementation details.
namespace webrtc_impl {
absl::InlinedVector<DecodeTargetIndication, 10> StringToDecodeTargetIndications(
absl::string_view indication_symbols);
} // namespace webrtc_impl
inline FrameDependencyTemplate& FrameDependencyTemplate::S(int spatial_layer) {
this->spatial_id = spatial_layer;
return *this;
}
inline FrameDependencyTemplate& FrameDependencyTemplate::T(int temporal_layer) {
this->temporal_id = temporal_layer;
return *this;
}
inline FrameDependencyTemplate& FrameDependencyTemplate::Dtis(
absl::string_view dtis) {
this->decode_target_indications =
webrtc_impl::StringToDecodeTargetIndications(dtis);
return *this;
}
inline FrameDependencyTemplate& FrameDependencyTemplate::FrameDiffs(
std::initializer_list<int> diffs) {
this->frame_diffs.assign(diffs.begin(), diffs.end());
return *this;
}
inline FrameDependencyTemplate& FrameDependencyTemplate::ChainDiffs(
std::initializer_list<int> diffs) {
this->chain_diffs.assign(diffs.begin(), diffs.end());
return *this;
}
} // namespace webrtc
#endif // API_TRANSPORT_RTP_DEPENDENCY_DESCRIPTOR_H_

View file

@ -18,11 +18,16 @@ namespace webrtc {
class MockNetworkStateEstimator : public NetworkStateEstimator {
public:
MOCK_METHOD0(GetCurrentEstimate, absl::optional<NetworkStateEstimate>());
MOCK_METHOD1(OnTransportPacketsFeedback,
void(const TransportPacketsFeedback&));
MOCK_METHOD1(OnReceivedPacket, void(const PacketResult&));
MOCK_METHOD1(OnRouteChange, void(const NetworkRouteChange&));
MOCK_METHOD(absl::optional<NetworkStateEstimate>,
GetCurrentEstimate,
(),
(override));
MOCK_METHOD(void,
OnTransportPacketsFeedback,
(const TransportPacketsFeedback&),
(override));
MOCK_METHOD(void, OnReceivedPacket, (const PacketResult&), (override));
MOCK_METHOD(void, OnRouteChange, (const NetworkRouteChange&), (override));
};
} // namespace webrtc

View file

@ -175,7 +175,7 @@ TEST(UnitConversionTest, DataRateAndDataSizeAndFrequency) {
EXPECT_EQ((rate_b / freq_a).bytes(), kBitsPerSecond / kHertz / 8);
}
TEST(UnitConversionTest, DivisionFailsOnLargeSize) {
TEST(UnitConversionDeathTest, DivisionFailsOnLargeSize) {
// Note that the failure is expected since the current implementation is
// implementated in a way that does not support division of large sizes. If
// the implementation is changed, this test can safely be removed.

View file

@ -21,7 +21,6 @@ rtc_library("video_rtp_headers") {
"hdr_metadata.h",
"video_content_type.cc",
"video_content_type.h",
"video_frame_marking.h",
"video_rotation.h",
"video_timing.cc",
"video_timing.h",
@ -31,8 +30,8 @@ rtc_library("video_rtp_headers") {
"..:array_view",
"../../rtc_base:rtc_base_approved",
"../../rtc_base/system:rtc_export",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("video_frame") {
@ -56,8 +55,8 @@ rtc_library("video_frame") {
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
"../../rtc_base/system:rtc_export",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
if (is_android) {
@ -141,8 +140,8 @@ rtc_library("encoded_image") {
"../../rtc_base:deprecation",
"../../rtc_base:rtc_base_approved",
"../../rtc_base/system:rtc_export",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("encoded_frame") {
@ -173,8 +172,8 @@ rtc_library("video_bitrate_allocation") {
"../../rtc_base:safe_conversions",
"../../rtc_base:stringutils",
"../../rtc_base/system:rtc_export",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("video_bitrate_allocator") {
@ -209,8 +208,8 @@ rtc_source_set("video_stream_decoder") {
"../task_queue",
"../units:time_delta",
"../video_codecs:video_codecs_api",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("video_stream_decoder_create") {
@ -237,7 +236,10 @@ rtc_library("video_adaptation") {
"video_adaptation_reason.h",
]
deps = [ "../../rtc_base:checks" ]
deps = [
"../../rtc_base:checks",
"../../rtc_base:stringutils",
]
}
rtc_source_set("video_stream_encoder") {
@ -256,10 +258,29 @@ rtc_source_set("video_stream_encoder") {
":video_codec_constants",
":video_frame",
"..:rtp_parameters",
"..:scoped_refptr",
"../:fec_controller_api",
"../:rtp_parameters",
"../adaptation:resource_adaptation_api",
"../units:data_rate",
"../video_codecs:video_codecs_api",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("video_frame_metadata") {
visibility = [ "*" ]
sources = [
"video_frame_metadata.cc",
"video_frame_metadata.h",
]
deps = [
"..:array_view",
"../../modules/rtp_rtcp:rtp_video_header",
"../transport/rtp:dependency_descriptor",
]
absl_deps = [
"//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/types:optional",
]
}
@ -299,16 +320,21 @@ rtc_library("builtin_video_bitrate_allocator_factory") {
"../../modules/video_coding:video_coding_utility",
"../../modules/video_coding:webrtc_vp9_helpers",
"../video_codecs:video_codecs_api",
"//third_party/abseil-cpp/absl/base:core_headers",
]
absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ]
}
if (rtc_include_tests) {
rtc_library("video_unittests") {
testonly = true
sources = [ "video_stream_decoder_create_unittest.cc" ]
sources = [
"video_frame_metadata_unittest.cc",
"video_stream_decoder_create_unittest.cc",
]
deps = [
":video_frame_metadata",
":video_stream_decoder_create",
"../../modules/rtp_rtcp:rtp_video_header",
"../../test:test_support",
"../task_queue:default_task_queue_factory",
"../video_codecs:builtin_video_decoder_factory",

View file

@ -1,3 +1,4 @@
brandtr@webrtc.org
magjed@webrtc.org
nisse@webrtc.org

View file

@ -21,8 +21,8 @@ rtc_library("rtc_api_video_unittests") {
"..:video_frame",
"..:video_rtp_headers",
"../../../test:test_support",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("mock_recordable_encoded_frame") {

View file

@ -17,13 +17,18 @@
namespace webrtc {
class MockRecordableEncodedFrame : public RecordableEncodedFrame {
public:
MOCK_CONST_METHOD0(encoded_buffer,
rtc::scoped_refptr<const EncodedImageBufferInterface>());
MOCK_CONST_METHOD0(color_space, absl::optional<webrtc::ColorSpace>());
MOCK_CONST_METHOD0(codec, VideoCodecType());
MOCK_CONST_METHOD0(is_key_frame, bool());
MOCK_CONST_METHOD0(resolution, EncodedResolution());
MOCK_CONST_METHOD0(render_time, Timestamp());
MOCK_METHOD(rtc::scoped_refptr<const EncodedImageBufferInterface>,
encoded_buffer,
(),
(const, override));
MOCK_METHOD(absl::optional<webrtc::ColorSpace>,
color_space,
(),
(const, override));
MOCK_METHOD(VideoCodecType, codec, (), (const, override));
MOCK_METHOD(bool, is_key_frame, (), (const, override));
MOCK_METHOD(EncodedResolution, resolution, (), (const, override));
MOCK_METHOD(Timestamp, render_time, (), (const, override));
};
} // namespace webrtc
#endif // API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_

View file

@ -10,6 +10,8 @@
#include "api/video/video_adaptation_counters.h"
#include "rtc_base/strings/string_builder.h"
namespace webrtc {
bool VideoAdaptationCounters::operator==(
@ -30,4 +32,11 @@ VideoAdaptationCounters VideoAdaptationCounters::operator+(
fps_adaptations + other.fps_adaptations);
}
std::string VideoAdaptationCounters::ToString() const {
rtc::StringBuilder ss;
ss << "{ res=" << resolution_adaptations << " fps=" << fps_adaptations
<< " }";
return ss.Release();
}
} // namespace webrtc

View file

@ -11,6 +11,8 @@
#ifndef API_VIDEO_VIDEO_ADAPTATION_COUNTERS_H_
#define API_VIDEO_VIDEO_ADAPTATION_COUNTERS_H_
#include <string>
#include "rtc_base/checks.h"
namespace webrtc {
@ -33,6 +35,8 @@ struct VideoAdaptationCounters {
VideoAdaptationCounters operator+(const VideoAdaptationCounters& other) const;
std::string ToString() const;
int resolution_adaptations;
int fps_adaptations;
};

View file

@ -0,0 +1,28 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/video/video_frame_metadata.h"
#include "modules/rtp_rtcp/source/rtp_video_header.h"
namespace webrtc {
VideoFrameMetadata::VideoFrameMetadata(const RTPVideoHeader& header)
: width_(header.width), height_(header.height) {
if (header.generic) {
frame_id_ = header.generic->frame_id;
spatial_index_ = header.generic->spatial_index;
temporal_index_ = header.generic->temporal_index;
frame_dependencies_ = header.generic->dependencies;
decode_target_indications_ = header.generic->decode_target_indications;
}
}
} // namespace webrtc

View file

@ -0,0 +1,59 @@
/*
* Copyright 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_VIDEO_VIDEO_FRAME_METADATA_H_
#define API_VIDEO_VIDEO_FRAME_METADATA_H_
#include <cstdint>
#include "absl/container/inlined_vector.h"
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "api/transport/rtp/dependency_descriptor.h"
namespace webrtc {
struct RTPVideoHeader;
// A subset of metadata from the RTP video header, exposed in insertable streams
// API.
class VideoFrameMetadata {
public:
explicit VideoFrameMetadata(const RTPVideoHeader& header);
VideoFrameMetadata(const VideoFrameMetadata&) = default;
VideoFrameMetadata& operator=(const VideoFrameMetadata&) = default;
uint16_t GetWidth() const { return width_; }
uint16_t GetHeight() const { return height_; }
absl::optional<int64_t> GetFrameId() const { return frame_id_; }
int GetSpatialIndex() const { return spatial_index_; }
int GetTemporalIndex() const { return temporal_index_; }
rtc::ArrayView<const int64_t> GetFrameDependencies() const {
return frame_dependencies_;
}
rtc::ArrayView<const DecodeTargetIndication> GetDecodeTargetIndications()
const {
return decode_target_indications_;
}
private:
int16_t width_;
int16_t height_;
absl::optional<int64_t> frame_id_;
int spatial_index_ = 0;
int temporal_index_ = 0;
absl::InlinedVector<int64_t, 5> frame_dependencies_;
absl::InlinedVector<DecodeTargetIndication, 10> decode_target_indications_;
};
} // namespace webrtc
#endif // API_VIDEO_VIDEO_FRAME_METADATA_H_

View file

@ -0,0 +1,120 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/video/video_frame_metadata.h"
#include "modules/rtp_rtcp/source/rtp_video_header.h"
#include "test/gmock.h"
#include "test/gtest.h"
namespace webrtc {
namespace {
using ::testing::ElementsAre;
using ::testing::IsEmpty;
TEST(VideoFrameMetadata, GetWidthReturnsCorrectValue) {
RTPVideoHeader video_header;
video_header.width = 1280u;
VideoFrameMetadata metadata(video_header);
EXPECT_EQ(metadata.GetWidth(), video_header.width);
}
TEST(VideoFrameMetadata, GetHeightReturnsCorrectValue) {
RTPVideoHeader video_header;
video_header.height = 720u;
VideoFrameMetadata metadata(video_header);
EXPECT_EQ(metadata.GetHeight(), video_header.height);
}
TEST(VideoFrameMetadata, GetFrameIdReturnsCorrectValue) {
RTPVideoHeader video_header;
RTPVideoHeader::GenericDescriptorInfo& generic =
video_header.generic.emplace();
generic.frame_id = 10;
VideoFrameMetadata metadata(video_header);
EXPECT_EQ(metadata.GetFrameId().value(), 10);
}
TEST(VideoFrameMetadata, HasNoFrameIdForHeaderWithoutGeneric) {
RTPVideoHeader video_header;
VideoFrameMetadata metadata(video_header);
ASSERT_FALSE(video_header.generic);
EXPECT_EQ(metadata.GetFrameId(), absl::nullopt);
}
TEST(VideoFrameMetadata, GetSpatialIndexReturnsCorrectValue) {
RTPVideoHeader video_header;
RTPVideoHeader::GenericDescriptorInfo& generic =
video_header.generic.emplace();
generic.spatial_index = 2;
VideoFrameMetadata metadata(video_header);
EXPECT_EQ(metadata.GetSpatialIndex(), 2);
}
TEST(VideoFrameMetadata, SpatialIndexIsZeroForHeaderWithoutGeneric) {
RTPVideoHeader video_header;
VideoFrameMetadata metadata(video_header);
ASSERT_FALSE(video_header.generic);
EXPECT_EQ(metadata.GetSpatialIndex(), 0);
}
TEST(VideoFrameMetadata, GetTemporalIndexReturnsCorrectValue) {
RTPVideoHeader video_header;
RTPVideoHeader::GenericDescriptorInfo& generic =
video_header.generic.emplace();
generic.temporal_index = 3;
VideoFrameMetadata metadata(video_header);
EXPECT_EQ(metadata.GetTemporalIndex(), 3);
}
TEST(VideoFrameMetadata, TemporalIndexIsZeroForHeaderWithoutGeneric) {
RTPVideoHeader video_header;
VideoFrameMetadata metadata(video_header);
ASSERT_FALSE(video_header.generic);
EXPECT_EQ(metadata.GetTemporalIndex(), 0);
}
TEST(VideoFrameMetadata, GetFrameDependenciesReturnsCorrectValue) {
RTPVideoHeader video_header;
RTPVideoHeader::GenericDescriptorInfo& generic =
video_header.generic.emplace();
generic.dependencies = {5, 6, 7};
VideoFrameMetadata metadata(video_header);
EXPECT_THAT(metadata.GetFrameDependencies(), ElementsAre(5, 6, 7));
}
TEST(VideoFrameMetadata, FrameDependencyVectorIsEmptyForHeaderWithoutGeneric) {
RTPVideoHeader video_header;
VideoFrameMetadata metadata(video_header);
ASSERT_FALSE(video_header.generic);
EXPECT_THAT(metadata.GetFrameDependencies(), IsEmpty());
}
TEST(VideoFrameMetadata, GetDecodeTargetIndicationsReturnsCorrectValue) {
RTPVideoHeader video_header;
RTPVideoHeader::GenericDescriptorInfo& generic =
video_header.generic.emplace();
generic.decode_target_indications = {DecodeTargetIndication::kSwitch};
VideoFrameMetadata metadata(video_header);
EXPECT_THAT(metadata.GetDecodeTargetIndications(),
ElementsAre(DecodeTargetIndication::kSwitch));
}
TEST(VideoFrameMetadata,
DecodeTargetIndicationsVectorIsEmptyForHeaderWithoutGeneric) {
RTPVideoHeader video_header;
VideoFrameMetadata metadata(video_header);
ASSERT_FALSE(video_header.generic);
EXPECT_THAT(metadata.GetDecodeTargetIndications(), IsEmpty());
}
} // namespace
} // namespace webrtc

View file

@ -13,8 +13,10 @@
#include <vector>
#include "api/adaptation/resource.h"
#include "api/fec_controller_override.h"
#include "api/rtp_parameters.h" // For DegradationPreference.
#include "api/scoped_refptr.h"
#include "api/units/data_rate.h"
#include "api/video/video_bitrate_allocator.h"
#include "api/video/video_sink_interface.h"
@ -49,6 +51,15 @@ class VideoStreamEncoderInterface : public rtc::VideoSinkInterface<VideoFrame> {
int min_transmit_bitrate_bps) = 0;
};
// If the resource is overusing, the VideoStreamEncoder will try to reduce
// resolution or frame rate until no resource is overusing.
// TODO(https://crbug.com/webrtc/11565): When the ResourceAdaptationProcessor
// is moved to Call this method could be deleted altogether in favor of
// Call-level APIs only.
virtual void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) = 0;
virtual std::vector<rtc::scoped_refptr<Resource>>
GetAdaptationResources() = 0;
// Sets the source that will provide video frames to the VideoStreamEncoder's
// OnFrame method. |degradation_preference| control whether or not resolution
// or frame rate may be reduced. The VideoStreamEncoder registers itself with

View file

@ -49,6 +49,8 @@ rtc_library("video_codecs_api") {
"../video:video_codec_constants",
"../video:video_frame",
"../video:video_rtp_headers",
]
absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/strings",
@ -100,8 +102,8 @@ rtc_library("builtin_video_encoder_factory") {
"../../media:rtc_media_base",
"../../rtc_base:checks",
"../../rtc_base/system:rtc_export",
"//third_party/abseil-cpp/absl/strings",
]
absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("vp8_temporal_layers_factory") {
@ -148,6 +150,8 @@ rtc_library("rtc_software_fallback_wrappers") {
"../video:video_bitrate_allocation",
"../video:video_frame",
"../video:video_rtp_headers",
]
absl_deps = [
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",

View file

@ -40,5 +40,6 @@ if (rtc_include_tests) {
"../../video:video_rtp_headers",
"//testing/gtest",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}

View file

@ -19,7 +19,7 @@
#include "absl/types/optional.h"
#include "api/video/video_bitrate_allocation.h"
#include "api/video/video_codec_type.h"
#include "common_types.h" // NOLINT(build/include)
#include "common_types.h" // NOLINT(build/include_directory)
#include "rtc_base/system/rtc_export.h"
namespace webrtc {

View file

@ -19,8 +19,8 @@ rtc_source_set("voip_api") {
deps = [
"..:array_view",
"../audio_codecs:audio_codecs_api",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("voip_engine_factory") {

Some files were not shown because too many files have changed in this diff Show more