Merge upstream 'v2.31.2'

This commit is contained in:
Oscar Mira 2023-09-28 16:52:30 +02:00
commit 3beb2e5f4f
62 changed files with 1809 additions and 617 deletions

View file

@ -5,6 +5,13 @@ on:
tags:
- 'v*'
permissions:
contents: read
env:
CARGO_TERM_COLOR: always
NDK_VERSION: '25.2.9519653'
jobs:
publish:
name: Publish
@ -14,11 +21,6 @@ jobs:
- name: Free up disk space
run: sudo rm -rf /usr/share/dotnet /opt/ghc
- uses: actions/checkout@v3
- name: Validate Gradle Wrapper
uses: gradle/wrapper-validation-action@v1
- name: Install protoc
run: sudo apt-get update && sudo apt-get install -y protobuf-compiler
@ -31,16 +33,25 @@ jobs:
env:
DEPOT_TOOLS_GIT_HASH: 3f3e2f789e27d41d3c20a555fd60a39ebd631e15
- uses: actions/checkout@v3
- name: Install Rust targets
run: rustup toolchain install $(cat rust-toolchain) --profile minimal --target aarch64-linux-android,armv7-linux-androideabi,x86_64-linux-android
- name: Set up JDK 17
uses: actions/setup-java@v3
with:
distribution: temurin
java-version: 17
cache: gradle
- name: Prepare workspace for Android
run: bin/prepare-workspace android
env:
DEPOT_TOOLS_UPDATE: 0
- name: Build library
run: bin/build-aar --arch arm arm64 x64 --release-build
run: ANDROID_NDK_HOME="$ANDROID_HOME/ndk/$NDK_VERSION" bin/build-aar --arch arm arm64 x64 --release-build
env:
OVERRIDE_VERSION: ${{ github.ref_name }}
@ -54,7 +65,7 @@ jobs:
path: out/release/libs/
- name: Publish to GitHub Packages
run: ./bin/build-aar --arch arm arm64 x64 --release-build --publish
run: ANDROID_NDK_HOME="$ANDROID_HOME/ndk/$NDK_VERSION" ./bin/build-aar --arch arm arm64 x64 --release-build --publish
env:
OVERRIDE_VERSION: ${{ github.ref_name }}
GITHUB_TOKEN: ${{ secrets.PUBLISH_PAT }}

View file

@ -5,6 +5,9 @@ on:
schedule:
- cron: '0 0 * * *'
permissions:
contents: read
jobs:
sync:
name: Sync

View file

@ -8,6 +8,9 @@ on:
- '.github/FUNDING.yml'
- '.github/ISSUE_TEMPLATE/**'
permissions:
contents: read
jobs:
code_formatting:
name: Code Formatting
@ -72,6 +75,14 @@ jobs:
grep -q '"prebuildChecksum": ""' package.json
working-directory: src/node
wrapper_validation:
name: Validate Gradle wrapper
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Run wrapper validation
uses: gradle/wrapper-validation-action@v1
tests:
name: Tests
runs-on: ubuntu-latest

View file

@ -1,5 +1,43 @@
# Changelog
## v2.31.2
- Update webrtc to 5845c
- Update the hardcoded PulseAudio device name to "Signal Calling"
- Add more audio control and safe defaults
- Add accessor for bandwidth estimate
- Update webrtc to 5845d
- Disable early initialization of recording
- Generate license files for WebRTC builds
- call-sim: Add test iterations and mos averaging
- Add more audio configuration and control
- Improve builds on GitHub Actions
- Build webrtc on AWS for android, ios, linux, mac
## v2.31.1
- Update tag for build automation
## v2.31.0
- Group Calls: Separate PeekInfo device counts on in/excluding pending devices
- Desktop: Migrate to deviceCountIncluding/ExcludingPendingDevices as well
- Update to WebRTC m116
- Desktop: Use stack arrays for JS arguments rather than vectors
- Build improvements; Support more build automation
- Log improvements
## v2.30.0
- Add JoinState.PENDING, for call link calls with admin approval

View file

@ -8,7 +8,7 @@
Pod::Spec.new do |s|
s.name = "SignalRingRTC"
s.version = "2.30.0"
s.version = "2.31.2"
s.summary = "A Swift & Objective-C library used by the Signal iOS app for WebRTC interactions."
s.description = <<-DESC
@ -22,7 +22,7 @@ Pod::Spec.new do |s|
s.social_media_url = 'https://twitter.com/signalapp'
# Newer versions of Xcode don't correctly handle command-line testing on older simulators.
s.platform = :ios, ENV.include?('RINGRTC_POD_TESTING') ? '14' : '12.2'
s.platform = :ios, ENV.include?('RINGRTC_POD_TESTING') ? '14' : '13'
s.requires_arc = true
s.swift_version = '5'

View file

@ -35,6 +35,8 @@ JAR_FILES = [
]
WEBRTC_SO_LIBS = ['libringrtc_rffi.so']
SO_LIBS = WEBRTC_SO_LIBS + ['libringrtc.so']
# Android NDK used in webrtc/src/third_party/android_toolchain/.../ndk-version.h
NDK_REVISION = '25.2.9519653'
class Project(enum.Flag):
WEBRTC = enum.auto()
@ -110,9 +112,6 @@ def ParseArgs():
parser.add_argument('--webrtc-version',
required=True,
help='WebRTC version')
parser.add_argument('--use-webrtc-ndk',
action='store_true',
help='''Use WebRTC's vendored NDK (and SDK) to build RingRTC''')
parser.add_argument('--extra-gradle-args',
nargs='*', default=[],
help='Additional gradle arguments')
@ -175,16 +174,9 @@ def GetOutputDir(build_dir, debug_build):
def GetGradleBuildDir(build_dir):
return os.path.join(build_dir, 'gradle')
def RunSdkmanagerLicenses(webrtc_src_dir, dry_run):
executable = os.path.join(webrtc_src_dir, 'third_party', 'android_sdk', 'public',
'cmdline-tools', 'latest', 'bin', 'sdkmanager')
cmd = [ executable, '--licenses' ]
RunCmd(dry_run, cmd)
def BuildArch(dry_run, project_dir, webrtc_src_dir, build_dir, arch, debug_build,
use_webrtc_ndk,
extra_gn_args, extra_gn_flags, extra_ninja_flags, extra_cargo_flags,
jobs, build_projects):
jobs, build_projects, publish_to_maven):
logging.info('Building: {} ...'.format(arch))
@ -216,10 +208,14 @@ def BuildArch(dry_run, project_dir, webrtc_src_dir, build_dir, arch, debug_build
RunCmd(dry_run, ninja_args, cwd=webrtc_src_dir)
if Project.RINGRTC in build_projects:
if use_webrtc_ndk:
ndk_dir = os.path.join(webrtc_src_dir, 'third_party', 'android_ndk')
else:
ndk_dir = os.environ['ANDROID_NDK_HOME']
with open(os.path.join(ndk_dir, 'source.properties'), "r") as f:
kvs = {}
for line in f.readlines():
key, value = line.split("=")
kvs[key.strip()] = value.strip()
if kvs['Pkg.Revision'] != NDK_REVISION and publish_to_maven:
raise Exception('Android NDK must be ' + NDK_REVISION)
ndk_host_os = platform.system().lower()
ndk_toolchain_dir = os.path.join(
@ -309,6 +305,17 @@ def GetAndroidApiLevel(arch):
else:
return 21
def CollectWebrtcLicenses(dry_run, project_dir, webrtc_src_dir, build_dir, debug_build, archs):
assert len(NINJA_TARGETS) == 1, 'need to make this a loop'
md_gen_args = [
'vpython3',
os.path.join('tools_webrtc', 'libs', 'generate_licenses.py'),
'--target',
NINJA_TARGETS[0],
build_dir,
] + [GetArchBuildDir(build_dir, arch, debug_build) for arch in archs]
RunCmd(dry_run, md_gen_args, cwd=webrtc_src_dir)
def ArchiveWebrtc(dry_run, build_dir, debug_build, archs, webrtc_version):
build_mode = 'debug' if debug_build else 'release'
archive_name = f'webrtc-{webrtc_version}-android-{build_mode}.tar.bz2'
@ -332,16 +339,23 @@ def ArchiveWebrtc(dry_run, build_dir, debug_build, archs, webrtc_version):
logging.debug(' Adding lib: {} (unstripped) ...'.format(lib))
add(os.path.join(output_arch_rel_path, 'lib.unstripped', lib))
logging.debug(' Adding acknowledgments file')
add('LICENSE.md')
def CreateLibs(dry_run, project_dir, webrtc_src_dir, build_dir, archs, output,
debug_build, unstripped, use_webrtc_ndk,
debug_build, unstripped,
extra_gn_args, extra_gn_flags, extra_ninja_flags,
extra_cargo_flags, jobs, build_projects, webrtc_version):
extra_cargo_flags, jobs, build_projects, webrtc_version,
publish_to_maven):
for arch in archs:
BuildArch(dry_run, project_dir, webrtc_src_dir, build_dir, arch,
debug_build, use_webrtc_ndk,
debug_build,
extra_gn_args, extra_gn_flags, extra_ninja_flags, extra_cargo_flags,
jobs, build_projects)
jobs, build_projects, publish_to_maven)
if Project.WEBRTC in build_projects:
CollectWebrtcLicenses(dry_run, project_dir, webrtc_src_dir, build_dir, debug_build, archs)
if Project.WEBRTC_ARCHIVE in build_projects:
ArchiveWebrtc(dry_run, build_dir, debug_build, archs, webrtc_version)
@ -383,8 +397,8 @@ def CreateLibs(dry_run, project_dir, webrtc_src_dir, build_dir, archs, output,
os.path.basename(lib)))
def PerformBuild(dry_run, extra_gradle_args, version, webrtc_version,
gradle_dir, publish,
use_webrtc_ndk, build_projects,
gradle_dir, publish_to_maven,
build_projects,
install_local, install_dir, project_dir, webrtc_src_dir, build_dir,
archs, output, debug_build, release_build, unstripped,
extra_gn_args, extra_gn_flags, extra_ninja_flags,
@ -400,10 +414,6 @@ def PerformBuild(dry_run, extra_gradle_args, version, webrtc_version,
if release_build:
build_types = build_types + ['release']
if use_webrtc_ndk:
os.environ['ANDROID_SDK_ROOT'] = os.path.join(
webrtc_src_dir, 'third_party', 'android_sdk', 'public')
gradle_build_dir = GetGradleBuildDir(build_dir)
shutil.rmtree(gradle_build_dir, ignore_errors=True)
gradle_exec = [
@ -430,9 +440,10 @@ def PerformBuild(dry_run, extra_gradle_args, version, webrtc_version,
"-PwebrtcJar={}/libwebrtc.jar".format(lib_dir),
]
CreateLibs(dry_run, project_dir, webrtc_src_dir, build_dir,
archs, output, build_debug, unstripped, use_webrtc_ndk,
archs, output, build_debug, unstripped,
extra_gn_args, extra_gn_flags, extra_ninja_flags,
extra_cargo_flags, jobs, build_projects, webrtc_version)
extra_cargo_flags, jobs, build_projects, webrtc_version,
publish_to_maven)
if Project.AAR not in build_projects:
return
@ -447,7 +458,7 @@ def PerformBuild(dry_run, extra_gradle_args, version, webrtc_version,
gradle_exec.append('installArchives')
if publish is True:
if publish_to_maven:
gradle_exec.append('uploadArchives')
gradle_exec.extend(extra_gradle_args)
@ -512,28 +523,23 @@ def main():
if args.clean is True:
for arch in DEFAULT_ARCHS:
rm_dir = GetArchBuildRoot(build_dir, arch)
clean_dir(GetArchBuildRoot(build_dir, arch), args.dry_run)
clean_dir(GetGradleBuildDir(build_dir), args.dry_run)
for dir in ('debug', 'release', 'javadoc', 'rustdoc', 'rust-lint'):
clean_dir(os.path.join(build_dir, dir), args.dry_run)
return 0
if args.use_webrtc_ndk:
# This is potentially still useful for a non-WebRTC NDK,
# but trying to find the location of the sdkmanager tool is more trouble than it's worth,
# especially when most people install SDKs and NDKs through Android Studio.
RunSdkmanagerLicenses(args.webrtc_src_dir, args.dry_run)
if args.publish is True:
if args.debug_build is True:
print('ERROR: Only the release build can be uploaded')
return 1
publish_to_maven = args.publish
PerformBuild(args.dry_run, args.extra_gradle_args, args.publish_version, args.webrtc_version,
args.gradle_dir,
args.publish,
args.use_webrtc_ndk, build_projects,
publish_to_maven,
build_projects,
args.install_local, args.install_dir,
args.project_dir, args.webrtc_src_dir, build_dir, args.arch, args.output,
args.debug_build, args.release_build, args.unstripped, args.extra_gn_args,

View file

@ -79,7 +79,7 @@ then
exit 1
fi
export MACOSX_DEPLOYMENT_TARGET="10.10"
export MACOSX_DEPLOYMENT_TARGET="10.13"
# Build WebRTC.
(

View file

@ -82,7 +82,7 @@ then
exit 1
fi
export MACOSX_DEPLOYMENT_TARGET="10.10"
export MACOSX_DEPLOYMENT_TARGET="10.13"
# Build WebRTC.
(

View file

@ -94,7 +94,7 @@ case "$TARGET_ARCH" in
;;
esac
export MACOSX_DEPLOYMENT_TARGET="10.10"
export MACOSX_DEPLOYMENT_TARGET="10.13"
# Build WebRTC.
if [ "${BUILD_WHAT}" = "all" ] || [ "${BUILD_WHAT}" = "webrtc" ]
@ -103,21 +103,25 @@ then
WEBRTC_ARGS="target_cpu=\"${GN_ARCH}\" rtc_build_examples=false rtc_build_tools=false rtc_include_tests=false rtc_enable_protobuf=false rtc_use_x11=false rtc_enable_sctp=false rtc_libvpx_build_vp9=true rtc_include_ilbc=false"
if [ "${BUILD_TYPE}" = "debug" ]
if [ "${BUILD_TYPE}" = "release" ]
then
(cd src/webrtc/src && gn gen -C "${OUTPUT_DIR}"/debug "--args=${WEBRTC_ARGS}" && ninja -C "${OUTPUT_DIR}"/debug)
else
# Build with debug line tables, but not full debug info.
(cd src/webrtc/src && gn gen -C "${OUTPUT_DIR}"/release "--args=${WEBRTC_ARGS} is_debug=false symbol_level=1" && ninja -C "${OUTPUT_DIR}"/release)
WEBRTC_ARGS="${WEBRTC_ARGS} is_debug=false symbol_level=1"
fi
(
cd src/webrtc/src
gn gen -C "${OUTPUT_DIR}/${BUILD_TYPE}" "--args=${WEBRTC_ARGS}"
ninja -C "${OUTPUT_DIR}/${BUILD_TYPE}" webrtc
tools_webrtc/libs/generate_licenses.py --target :webrtc "${OUTPUT_DIR}/${BUILD_TYPE}" "${OUTPUT_DIR}/${BUILD_TYPE}"
)
if [ -n "${ARCHIVE_WEBRTC}" ]
then
STATIC_LIB_PATH="${BUILD_TYPE}"/obj/webrtc.lib
if [ ! -e "${OUTPUT_DIR}/${STATIC_LIB_PATH}" ]; then
STATIC_LIB_PATH="${BUILD_TYPE}"/obj/libwebrtc.a
fi
tar -c --auto-compress --dereference -f "${OUTPUT_DIR}"/webrtc-"${WEBRTC_VERSION}"-"${HOST_PLATFORM}"-"${TARGET_ARCH}"-${BUILD_TYPE}.tar.bz2 -C "${OUTPUT_DIR}" "${STATIC_LIB_PATH}"
tar -c --auto-compress --dereference -f "${OUTPUT_DIR}"/webrtc-"${WEBRTC_VERSION}"-"${HOST_PLATFORM}"-"${TARGET_ARCH}"-${BUILD_TYPE}.tar.bz2 -C "${OUTPUT_DIR}" "${STATIC_LIB_PATH}" "${BUILD_TYPE}/LICENSE.md"
fi
fi

View file

@ -82,7 +82,7 @@ then
exit 1
fi
export MACOSX_DEPLOYMENT_TARGET="10.10"
export MACOSX_DEPLOYMENT_TARGET="10.13"
# Build WebRTC.
(

View file

@ -121,7 +121,7 @@ webrtc_arch_for_host_sim() {
}
# The Rust compiler expects this to always be in the form "major.minor".
export IPHONEOS_DEPLOYMENT_TARGET="12.2"
export IPHONEOS_DEPLOYMENT_TARGET="13.0"
# The WebRTC part of the build resulting in WebRTC.xcframework.
if [ "${BUILD_WHAT}" = "all" ] || [ "${BUILD_WHAT}" = "webrtc" ]
@ -135,7 +135,7 @@ then
fi
EXTRA_GN_ARGS="rtc_build_examples=false rtc_build_tools=false rtc_include_tests=false rtc_enable_protobuf=false rtc_enable_sctp=false rtc_libvpx_build_vp9=false rtc_include_ilbc=false"
(cd "${WEBRTC_SRC_DIR}" && ./tools_webrtc/ios/build_ios_libs.py -o "${OUTPUT_DIR}/${BUILD_TYPE}/" --build_config ${BUILD_TYPE} --arch "${ARCHS[@]}" --extra-gn-args "${EXTRA_GN_ARGS}")
(cd "${WEBRTC_SRC_DIR}" && ./tools_webrtc/ios/build_ios_libs.py -o "${OUTPUT_DIR}/${BUILD_TYPE}/" --build_config ${BUILD_TYPE} --arch "${ARCHS[@]}" --deployment-target "${IPHONEOS_DEPLOYMENT_TARGET}" --extra-gn-args "${EXTRA_GN_ARGS}")
"${BIN_DIR}/print_build_env.py" \
--webrtc-version="${WEBRTC_VERSION}" \
@ -163,9 +163,11 @@ then
s.vendored_frameworks = "'${BUILD_TYPE}'/WebRTC.xcframework"
end' >"${OUTPUT_DIR}"/WebRTCForTesting.podspec
"${BIN_DIR}"/convert_webrtc_acknowledgments.py -f plist "${OUTPUT_DIR}"/${BUILD_TYPE}/WebRTC.xcframework/LICENSE.md > "${OUTPUT_DIR}"/${BUILD_TYPE}/acknowledgments.plist
if [[ -n "${ARCHIVE_WEBRTC}" ]]
then
tar -c --auto-compress --no-mac-metadata -f "${OUTPUT_DIR}/webrtc-${WEBRTC_VERSION}-ios-${BUILD_TYPE}.tar.bz2" -C "${OUTPUT_DIR}" WebRTCForTesting.podspec "${BUILD_TYPE}/WebRTC.xcframework"
tar -c --auto-compress --no-mac-metadata -f "${OUTPUT_DIR}/webrtc-${WEBRTC_VERSION}-ios-${BUILD_TYPE}.tar.bz2" -C "${OUTPUT_DIR}" WebRTCForTesting.podspec "${BUILD_TYPE}/WebRTC.xcframework" "${BUILD_TYPE}/acknowledgments.plist"
fi
fi

235
bin/build-webrtc-on-aws.py Executable file
View file

@ -0,0 +1,235 @@
#!/usr/bin/env python3
#
# Copyright 2023 Signal Messenger, LLC
# SPDX-License-Identifier: AGPL-3.0-only
#
"""
This script builds webrtc artifacts for a specified target on preconfigured AWS EC2 instances
"""
try:
import argparse
import boto3
import botocore
import fabric
import paramiko
import time
except ImportError as e:
raise ImportError(str(e) + '- required module not found')
TARGET_PLATFORMS = ['android', 'ios', 'linux', 'mac', 'windows']
PLATFORM_EC2_INSTANCE_TAG = {
'android': 'webrtc-build-linux',
'ios': 'webrtc-build-macos',
'linux': 'webrtc-build-linux',
'mac': 'webrtc-build-macos',
'windows': 'webrtc-build-windows'
}
EC2_INSTANCE_TAG_USER = {
'webrtc-build-linux': 'ubuntu',
'webrtc-build-macos': 'ec2-user'
}
EC2_DEDICATED_MAC_HOST_TAG = 'webrtc-mac-host'
def parse_args():
parser = argparse.ArgumentParser(
description='Build webrtc on preconfigured AWS EC2 instances')
parser.add_argument('--aws-profile',
required=True,
help='AWS profile name. Look for [profile <NAME>] in ~/.aws/config')
parser.add_argument('--aws-region',
required=True,
help='AWS region. Example: us-east-2')
parser.add_argument('--aws-identity-file',
required=True,
help='AWS EC2 keypair pem file path.')
parser.add_argument('--keep-running',
action='store_true',
help='Keep ec2 instance running. Default: false')
parser.add_argument('--target',
required=True,
help='build target: ' + ', '.join(TARGET_PLATFORMS))
parser.add_argument('--webrtc',
required=True,
help='WebRTC version tag. Example: 5615d')
return parser.parse_args()
def get_ec2_instance(ec2_tag: str):
ec2 = boto3.resource('ec2')
custom_filter = [{
'Name': 'tag:Name', 'Values': [ec2_tag]}]
instances = ec2.instances.filter(Filters=custom_filter)
if len(list(instances.all())) == 0:
raise RuntimeError(ec2_tag + " not found.")
elif len(list(instances.all())) > 1:
raise RuntimeError(ec2_tag + ": too many instances with this tag found.")
return list(instances.all())[0]
def place_dedicated_host(instance, host_id: str):
boto3.client('ec2').modify_instance_placement(
Affinity='host',
HostId=host_id,
InstanceId=instance.id,
Tenancy='host',
)
def start_new_host(instance):
availability_zone = instance.network_interfaces[0].subnet.availability_zone
response = boto3.client('ec2').allocate_hosts(
AvailabilityZone=availability_zone,
InstanceType='mac2.metal',
Quantity=1,
TagSpecifications=[
{
'ResourceType': 'dedicated-host',
'Tags': [
{
'Key': 'Name',
'Value': EC2_DEDICATED_MAC_HOST_TAG
},
]
},
],
HostMaintenance='off',
)
return response['HostIds'][0]
def setup_dedicated_host(instance):
try:
# Check whether a dedicated host exists
first = True
while True:
response = boto3.client('ec2').describe_hosts(Filters=[
{
'Name': 'tag:Name',
'Values': [
EC2_DEDICATED_MAC_HOST_TAG,
]
},
])
if len(response['Hosts']) == 0:
host_id = start_new_host(instance)
place_dedicated_host(instance, host_id)
elif len(response['Hosts'][0]['Instances']) == 0 or \
response['Hosts'][0]['Instances'][0]['InstanceId'] != instance.id:
host = response['Hosts'][0]
if host['State'] == 'pending':
if first:
first = False
print(f'Waiting for host {host["HostId"]} to become available...')
time.sleep(10)
continue
else:
place_dedicated_host(instance, host['HostId'])
break
except botocore.exceptions.ClientError as error:
if error.response['Error']['Code'] == 'InvalidHostID.NotFound':
host_id = start_new_host(instance)
place_dedicated_host(instance, host_id)
else:
raise error
def start_ec2_instance(target: str, ec2_tag: str):
instance = get_ec2_instance(ec2_tag)
# Set up a dedicated host for mac builds
if target == 'mac' or target == 'ios':
# Allocate dedicated host
setup_dedicated_host(instance)
# Wait for instance to be stopped or running
if instance.state['Name'] != 'stopped' and instance.state['Name'] != 'running':
print(f'{instance} state: {instance.state["Name"]}')
print(f'{instance} stopping...')
instance.stop()
waiter = boto3.client('ec2').get_waiter('instance_stopped')
waiter.wait(
InstanceIds=[instance.id]
)
instance.start()
print(f'{instance} initializing...')
waiter = boto3.client('ec2').get_waiter('instance_status_ok')
waiter.wait(
InstanceIds=[instance.id],
Filters=[
{
"Name": "instance-status.reachability",
"Values": [
"passed"
]
}
]
)
print(f'{instance} ready')
return instance
def stop_ec2_instance(instance):
instance.stop()
print(f'{instance} stopped')
def build_webrtc(target, hostname, user, identity_filepath, webrtc):
with fabric.Connection(hostname,
user,
connect_kwargs={"pkey": paramiko.RSAKey.from_private_key_file(identity_filepath)}) as conn:
# Clone ringrtc
conn.run('rm -rf ringrtc')
conn.run('git clone --depth 1 https://github.com/signalapp/ringrtc.git')
# Set webrtc version
if target == 'android' or target == 'linux':
conn.run(f'sed -i "/webrtc.version=/ s/=.*/={webrtc}/" ringrtc/config/version.properties')
else:
conn.run('sed -i \'\' "/webrtc.version=/ s/=.*/={}/" ringrtc/config/version.properties'.format(webrtc))
# Build target and download resulting artifacts
conn.run(f'cd ringrtc; ./bin/build-webrtc.py --target {target} --release')
if target == 'android' or target == 'ios':
conn.get(f'ringrtc/out/webrtc-{webrtc}-{target}-release.tar.bz2')
elif target == 'linux' or target == 'mac':
conn.get(f'ringrtc/out/webrtc-{webrtc}-{target}-x64-release.tar.bz2')
conn.get(f'ringrtc/out_arm/webrtc-{webrtc}-{target}-arm64-release.tar.bz2')
def main() -> None:
args = parse_args()
# Setup AWS profile
boto3.setup_default_session(profile_name=args.aws_profile, region_name=args.aws_region)
if args.target == 'windows':
raise Exception(f'{args.target} not implemented')
# Start instance
ec2_tag = PLATFORM_EC2_INSTANCE_TAG[args.target]
instance = start_ec2_instance(args.target, ec2_tag)
# Build target on instance
print(f'Building webrtc on instance {instance.id}')
build_webrtc(
args.target, instance.public_dns_name, EC2_INSTANCE_TAG_USER[ec2_tag], args.aws_identity_file, args.webrtc)
# Stop instance
if args.keep_running:
print(f'EC2 instance {instance.id} will keep running')
else:
stop_ec2_instance(instance)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,142 @@
#!/usr/bin/env python3
#
# Copyright 2023 Signal Messenger, LLC
# SPDX-License-Identifier: AGPL-3.0-only
#
import argparse
import html
import plistlib
import sys
from collections.abc import Iterable
from typing import Dict, Tuple, TextIO
def parse(input: TextIO, filename: str) -> Dict[str, str]:
# The format is
# # webrtc
# ```
# LICENSE INFO GOES HERE
# POSSIBLY SEVERAL LINES OF IT
# ```
# repeated for each library
result = dict()
line_number = 0
def readline() -> str:
nonlocal line_number
line_number += 1
return input.readline()
def fatal(msg: str) -> None:
raise Exception(f'{filename}:{line_number}: {msg}')
while True:
line = readline()
if not line:
# EOF
break
line = line.strip()
if not line:
# Ignore blank lines between dependencies.
continue
# Expect a dependency line.
if not line.startswith('# '):
fatal(f'unexpected line: "{line.strip()}"')
name = line[2:]
# Expect the start of a fenced license.
line = readline()
if line != '```\n':
if not line:
fatal('unexpected end of file')
line = line.strip()
if not line:
fatal('unexpected blank line')
fatal(f'unexpected line: "{line.strip()}"')
license = ''
while True:
line = readline()
if line == '' or line == '```\n':
# Break on the end fence or on EOF.
break
# These probably shouldn't be escaped in the first place, but they are.
license += html.unescape(line)
result[name] = license
return result
def print_as_markdown(deps: Iterable[Tuple[str, str]]) -> None:
for name, license in deps:
print('#', name)
print('```')
# The license already has a trailing newline.
print(license, end='')
print('```')
print()
def print_as_plist(deps: Iterable[Tuple[str, str]]) -> None:
# We're trying to match the format in Signal-iOS/Signal/Settings.bundle/Acknowledgements.plist
# which comes from <https://developer.apple.com/library/archive/documentation/PreferenceSettings/Conceptual/SettingsApplicationSchemaReference/Introduction/Introduction.html>.
plistlib.dump(fp=sys.stdout.buffer, value={
'PreferenceSpecifiers': [
{
'FooterText': 'RingRTC depends on the WebRTC project',
'Title': 'Acknowledgments',
'Type': 'PSGroupSpecifier',
}
] + [
{
'FooterText': license,
'Title': name,
'Type': 'PSGroupSpecifier',
} for name, license in deps
] + [
{
'FooterText': 'Generated in RingRTC',
'Title': '',
'Type': 'PSGroupSpecifier',
}
],
'StringsTable': 'Acknowledgments',
'Title': 'Acknowledgments',
})
sys.stdout.flush()
def main() -> None:
parser = argparse.ArgumentParser(
description='Convert WebRTC LICENSE.md to other formats')
parser.add_argument('-f', '--format', choices=['md', 'plist'],
help='Output format (Markdown or iOS Settings.plist)')
parser.add_argument('files', nargs='+', metavar='LICENSE.md',
help='License file(s) generated by WebRTC')
args = parser.parse_args()
dependencies = dict()
for path in args.files:
with open(path) as f:
dependencies.update(parse(f, path))
# Sort the dependencies, but always put WebRTC first.
webrtc_dep = dependencies.pop('webrtc')
sorted_dependencies = [('webrtc', webrtc_dep)] + sorted(dependencies.items())
if args.format == 'md':
# Same as input format, but merges multiple files.
print_as_markdown(sorted_dependencies)
elif args.format == 'plist':
print_as_plist(sorted_dependencies)
else:
print([name for (name, _) in sorted_dependencies])
if __name__ == '__main__':
main()

View file

@ -18,14 +18,14 @@ from typing import BinaryIO
UNVERIFIED_DOWNLOAD_NAME = "unverified.tmp"
PREBUILD_CHECKSUMS = {
'android': 'c36342532242fb17758cb5c390dfe3b9e6653cb9674d7abe21c470ba4ef96531',
'ios': '056fca009076f343fe50f2ada9001c66ea5e23c64f17b29ae6142495053bee03',
'linux-arm64': '934e14088c7bf2cb99f2e4a1c9afd7486794c02b97130a45643663e9a6643b1e',
'linux-x64': 'f14e31f083f26db098853ab0e79e449a39a8fa368059b03441a0c4a5eb5d2d28',
'mac-arm64': '89e14117b6f8479e7e66372c3a987bd773e966110beaa283e1e734524c9450c4',
'mac-x64': '6b82d7e4667ee838e75ca1535ea9ca40aefc9547c9709295e8bbf882d6bd8d80',
'windows-arm64': '4cd4828471d0ad642e4d5c59034a3607369d8410db5e2a124f901fc41059158d',
'windows-x64': '09ab97830d79e1910e59aec49c6d2a15dfa4baf60159f8a69101a25b7a3ca5ee',
'android': '5c39f1084dff122afdd8578c7137dff05b33fd8fd4f140cfbe66c87242207ad4',
'ios': 'dab63cc6731e5ea224415f75c5c3e3a08ae2c19be0795e1fa00f18494fe3b5c2',
'linux-arm64': '98c10d65548e2d21e82d62012dd362f972bad50e8323e7014ae9e4024a60ea75',
'linux-x64': 'cdefb83b31faeebe66884b29c991e03b401c55dfc0db4623aa0a54e00519cbb8',
'mac-arm64': '60e4369b6757ea47909484522d77c5b0adb1d8fe65c01adbcf2dd5c3dc013020',
'mac-x64': '6e5ef9a1e7d4404ff6b2fa6609411a272573360e4c76222e635f9262d5b48aba',
'windows-arm64': 'f89bf4806d6ad213ab74d7c7414d975e528077a013edc37776e2193bf8e7edfc',
'windows-x64': 'c0302fb70dae2c12f73001c20601c05091e207f2972ffdcd337c2f8296ec0db7',
}

View file

@ -6,6 +6,25 @@ buildscript {
}
}
plugins {
id "io.github.gradle-nexus.publish-plugin" version "1.3.0"
}
def versionProperties = new Properties()
file("config/version.properties").withInputStream { versionProperties.load(it) }
if (!project.hasProperty("ringrtcVersion")) {
ext.ringrtcVersion =
"${versionProperties.getProperty("ringrtc.version.major")}." +
"${versionProperties.getProperty("ringrtc.version.minor")}." +
"${versionProperties.getProperty("ringrtc.version.revision")}"
}
ext.webrtc_version = "${versionProperties.getProperty("webrtc.version")}"
group = "im.molly"
version = ringrtcVersion
task clean(type: Delete) {
delete rootProject.buildDir
}

View file

@ -114,13 +114,28 @@ pub async fn chop_audio_and_analyze(
}
}
let stats = Stats {
config: StatsConfig {
title: format!(
"MOS Over Time ({}sec)",
let (title, chart_name) = if speech {
(
format!(
"MOS Speech Over Time ({}sec)",
chopped_audio_result.reference_time_secs
),
chart_name: format!("{}.artifacts.mos.svg", client_name),
format!("{}.artifacts.mos_s.svg", client_name),
)
} else {
(
format!(
"MOS Audio Over Time ({}sec)",
chopped_audio_result.reference_time_secs
),
format!("{}.artifacts.mos_a.svg", client_name),
)
};
let stats = Stats {
config: StatsConfig {
title,
chart_name,
x_label: "Test Seconds".to_string(),
y_label: "MOS".to_string(),
x_max: Some(chopped_audio_result.degraded_time_secs as f32 + 5.0),
@ -130,7 +145,11 @@ pub async fn chop_audio_and_analyze(
data,
};
test_results.mos = AnalysisReportMos::Series(stats);
if speech {
test_results.mos_s = AnalysisReportMos::Series(Box::new(stats));
} else {
test_results.mos_a = AnalysisReportMos::Series(Box::new(stats));
}
Ok(())
}
@ -160,7 +179,11 @@ pub async fn get_audio_and_analyze(
))
.await?
{
test_results.mos = AnalysisReportMos::Single(mos);
if speech {
test_results.mos_s = AnalysisReportMos::Single(mos);
} else {
test_results.mos_a = AnalysisReportMos::Single(mos);
}
}
Ok(())

View file

@ -9,7 +9,8 @@ use std::{fmt, path::Path, time::Duration};
/// display of most tracked `dimensions` that are available.
#[allow(dead_code)]
pub enum ChartDimension {
Mos,
MosSpeech,
MosAudio,
ContainerCpuUsage,
ContainerMemUsage,
@ -58,7 +59,8 @@ pub enum ChartDimension {
impl ChartDimension {
pub fn get_title_and_y_label(&self) -> (&'static str, &'static str) {
match self {
ChartDimension::Mos => ("MOS", "MOS"),
ChartDimension::MosSpeech => ("MOS Speech", "MOS"),
ChartDimension::MosAudio => ("MOS Audio", "MOS"),
ChartDimension::ContainerCpuUsage => ("CPU Usage", "%"),
ChartDimension::ContainerMemUsage => ("Memory Usage", "MiB"),
ChartDimension::ContainerTxBitrate => ("TX Bitrate", "kbps"),
@ -115,7 +117,8 @@ impl ChartDimension {
pub fn get_name(&self) -> &'static str {
match self {
ChartDimension::Mos => "mos",
ChartDimension::MosSpeech => "mos_speech",
ChartDimension::MosAudio => "mos_audio",
ChartDimension::ContainerCpuUsage => "container_cpu_usage",
ChartDimension::ContainerMemUsage => "container_mem_usage",
ChartDimension::ContainerTxBitrate => "container_tx_bitrate",
@ -179,6 +182,8 @@ pub struct TestCaseConfig {
/// A flag to control recording of packet capture. Enabling this results in a `tcpdump.pcap`
/// file among the generated artifacts for the test.
pub tcp_dump: bool,
/// The number of times to run the test case.
pub iterations: u16,
}
impl Default for TestCaseConfig {
@ -189,6 +194,7 @@ impl Default for TestCaseConfig {
client_a_config: Default::default(),
client_b_config: Default::default(),
tcp_dump: false,
iterations: 1,
}
}
}
@ -255,13 +261,29 @@ impl Default for CallConfig {
}
#[allow(dead_code)]
#[derive(Debug, Clone, PartialEq)]
pub enum AudioAnalysisType {
/// Uses the wideband speech mode of visqol. Setting this option yields 16kHz/mono
/// copies of all wav artifacts.
Speech,
/// Uses the audio mode of the analysis tool.
Audio,
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(i32)]
pub enum AudioBandwidth {
// Constants in libopus.
Auto = -1000,
Full = 1105,
SuperWide = 1104,
Wide = 1103,
Medium = 1102,
Narrow = 1101,
}
impl fmt::Display for AudioBandwidth {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
AudioBandwidth::Auto => write!(f, "auto"),
AudioBandwidth::Full => write!(f, "full"),
AudioBandwidth::SuperWide => write!(f, "super-wide"),
AudioBandwidth::Wide => write!(f, "wide"),
AudioBandwidth::Medium => write!(f, "medium"),
AudioBandwidth::Narrow => write!(f, "narrow"),
}
}
}
#[allow(dead_code)]
@ -284,24 +306,34 @@ pub enum AudioAnalysisMode {
pub struct AudioConfig {
/// The name (without path or extension) of the audio file to use as source material.
pub input_name: String,
/// The desired packet size, the amount of time in each packet.
pub packet_size_ms: u32,
/// The initial encoding bitrate to use (or only bitrate to use if there is no adaptation).
pub start_bitrate_bps: u16,
/// The minimum encoding bitrate to use if there is adaptation.
pub min_bitrate_bps: u16,
/// The maximum encoding bitrate to use if there is adaptation.
pub max_bitrate_bps: u16,
/// The initial desired packet size, the amount of time in each packet.
pub initial_packet_size_ms: i32,
/// The minimum packet size. Used only in adaptive scenarios.
pub min_packet_size_ms: i32,
/// The maximum packet size. Used only in adaptive scenarios.
pub max_packet_size_ms: i32,
/// The initial audio encoding bitrate.
pub initial_bitrate_bps: i32,
/// The minimum audio encoding bitrate. Used only in adaptive scenarios.
pub min_bitrate_bps: i32,
/// The maximum encoding bitrate. Used only in adaptive scenarios.
pub max_bitrate_bps: i32,
/// The Opus bandwidth value to use (Auto is the default).
pub bandwidth: AudioBandwidth,
/// The Opus complexity value to use.
pub complexity: u8,
pub complexity: i32,
/// The adaptation method to use. 0 means no adaptation (the default).
pub adaptation: i32,
/// Flag to enable the Opus constant bitrate mode.
pub enable_cbr: bool,
/// Flag to enable the Opus DTX.
pub enable_dtx: bool,
/// Flag to enable the Opus in-band FEC.
pub enable_fec: bool,
/// Flag to enable the to enable transport-wide congestion control for audio.
pub enable_tcc_audio: bool,
/// Flag to enable transport-wide congestion control for audio.
pub enable_tcc: bool,
/// Flag to enabled redundant packets to be sent for audio.
pub enable_red: bool,
/// Flag to enable WebRTC's high pass filter.
pub enable_high_pass_filter: bool,
/// Flag to enable WebRTC's acoustic echo cancellation.
@ -311,10 +343,14 @@ pub struct AudioConfig {
/// Flag to enable WebRTC's automatic gain control.
pub enable_agc: bool,
/// The maximum number of packets the jitter buffer can hold.
pub jitter_buffer_max_packets: u16,
/// The type of audio analysis to be performed (usually speech).
pub analysis_type: AudioAnalysisType,
/// The mechanism to use when analyzing audio.
pub jitter_buffer_max_packets: i32,
/// How often RTCP reports should be sent. Subject to jitter applied by WebRTC.
pub rtcp_report_interval_ms: i32,
/// Whether or not speech (wideband) analysis should be performed.
pub speech_analysis: bool,
/// Whether or not audio (fullband) analysis should be performed.
pub audio_analysis: bool,
/// The mechanism to use when analyzing speech/audio.
pub analysis_mode: AudioAnalysisMode,
/// Sometimes spectrogram generation takes too long, so we might want to disable it.
pub generate_spectrogram: bool,
@ -332,15 +368,20 @@ impl Default for AudioConfig {
fn default() -> Self {
Self {
input_name: "silence".to_string(),
packet_size_ms: 20,
start_bitrate_bps: 32000,
min_bitrate_bps: 20000,
initial_packet_size_ms: 20,
min_packet_size_ms: 20,
max_packet_size_ms: 20,
initial_bitrate_bps: 32000,
min_bitrate_bps: 16000,
max_bitrate_bps: 32000,
bandwidth: AudioBandwidth::Auto,
complexity: 9,
adaptation: 0,
enable_cbr: true,
enable_dtx: true,
enable_fec: true,
enable_tcc_audio: false,
enable_tcc: false,
enable_red: false,
enable_high_pass_filter: true,
// Default tests now disable AEC in order to prevent random timing delays
// from causing double-talk and thus attenuating valid audio.
@ -348,7 +389,9 @@ impl Default for AudioConfig {
enable_ns: true,
enable_agc: true,
jitter_buffer_max_packets: 200,
analysis_type: AudioAnalysisType::Speech,
rtcp_report_interval_ms: 5000,
speech_analysis: true,
audio_analysis: false,
analysis_mode: AudioAnalysisMode::Normal,
generate_spectrogram: true,
}

View file

@ -682,25 +682,42 @@ pub async fn start_cli(
args.push("--allowed-bitrate-kbps".to_string());
args.push(format!("{}", call_config.allowed_bitrate_kbps));
args.push("--packet-size-ms".to_string());
args.push(format!("{}", call_config.audio.packet_size_ms));
args.push(format!(
"--initial-packet-size-ms={}",
call_config.audio.initial_packet_size_ms
));
args.push(format!(
"--min-packet-size-ms={}",
call_config.audio.min_packet_size_ms
));
args.push(format!(
"--max-packet-size-ms={}",
call_config.audio.max_packet_size_ms
));
args.push("--default-bitrate-bps".to_string());
args.push(format!("{}", call_config.audio.start_bitrate_bps));
args.push(format!(
"--initial-bitrate-bps={}",
call_config.audio.initial_bitrate_bps
));
args.push(format!(
"--min-bitrate-bps={}",
call_config.audio.min_bitrate_bps
));
args.push(format!(
"--max-bitrate-bps={}",
call_config.audio.max_bitrate_bps
));
args.push("--min-bitrate-bps".to_string());
args.push(format!("{}", call_config.audio.min_bitrate_bps));
args.push("--max-bitrate-bps".to_string());
args.push(format!("{}", call_config.audio.max_bitrate_bps));
args.push("--complexity".to_string());
args.push(format!("{}", call_config.audio.complexity));
args.push(format!("--bandwidth={}", call_config.audio.bandwidth));
args.push(format!("--complexity={}", call_config.audio.complexity));
args.push(format!("--adaptation={}", call_config.audio.adaptation));
args.push(format!("--cbr={}", call_config.audio.enable_cbr));
args.push(format!("--dtx={}", call_config.audio.enable_dtx));
args.push(format!("--fec={}", call_config.audio.enable_fec));
args.push(format!("--tcc={}", call_config.audio.enable_tcc_audio));
args.push(format!("--tcc={}", call_config.audio.enable_tcc));
args.push(format!("--red={}", call_config.audio.enable_red));
args.push(format!("--vp9={}", call_config.video.enable_vp9));
@ -732,6 +749,11 @@ pub async fn start_cli(
call_config.audio.jitter_buffer_max_packets
));
args.push(format!(
"--audio-rtcp-report-interval-ms={}",
call_config.audio.rtcp_report_interval_ms
));
if let Some(input_video_file) = input_video_file {
args.push(format!("--input-video-file=/media/{}", input_video_file));
}

View file

@ -75,7 +75,7 @@ async fn run_example(test: &mut Test) -> Result<()> {
test.run(
GroupConfig {
group_name: "example".to_string(),
chart_dimensions: vec![ChartDimension::Mos],
chart_dimensions: vec![ChartDimension::MosSpeech],
x_labels: &[],
},
vec![TestCaseConfig {
@ -99,7 +99,7 @@ async fn run_baseline_over_all_profiles(test: &mut Test) -> Result<()> {
test.run(
GroupConfig {
group_name: "baseline_over_all_profiles".to_string(),
chart_dimensions: vec![ChartDimension::Mos],
chart_dimensions: vec![ChartDimension::MosSpeech],
x_labels: &[],
},
vec![TestCaseConfig {
@ -130,7 +130,7 @@ async fn run_dtx_tests_with_loss(test: &mut Test) -> Result<()> {
test.run(
GroupConfig {
group_name: "dtx_tests_with_loss".to_string(),
chart_dimensions: vec![ChartDimension::Mos],
chart_dimensions: vec![ChartDimension::MosSpeech],
x_labels: &[],
},
vec![
@ -197,7 +197,7 @@ async fn run_example_with_relay(test: &mut Test) -> Result<()> {
test.run(
GroupConfig {
group_name: "example_with_relay".to_string(),
chart_dimensions: vec![ChartDimension::Mos],
chart_dimensions: vec![ChartDimension::MosSpeech],
x_labels: &[],
},
vec![
@ -305,11 +305,11 @@ async fn run_ptime_analysis(test: &mut Test) -> Result<()> {
test.preprocess_sounds(vec!["speaker_a", "speaker_b"])
.await?;
let test_cases = [20, 40, 60, 120].map(|packet_size_ms| TestCaseConfig {
test_case_name: format!("ptime_{packet_size_ms}"),
let test_cases = [20, 40, 60, 120].map(|initial_packet_size_ms| TestCaseConfig {
test_case_name: format!("ptime_{initial_packet_size_ms}"),
client_a_config: CallConfig {
audio: AudioConfig {
packet_size_ms,
initial_packet_size_ms,
..Default::default()
},
..Default::default()
@ -317,7 +317,7 @@ async fn run_ptime_analysis(test: &mut Test) -> Result<()> {
.with_audio_input_name("normal_phrasing"),
client_b_config: CallConfig {
audio: AudioConfig {
packet_size_ms,
initial_packet_size_ms,
..Default::default()
},
..Default::default()
@ -329,7 +329,7 @@ async fn run_ptime_analysis(test: &mut Test) -> Result<()> {
test.run(
GroupConfig {
group_name: "ptime_over_loss".to_string(),
chart_dimensions: vec![ChartDimension::Mos],
chart_dimensions: vec![ChartDimension::MosSpeech],
x_labels: &[],
},
test_cases.clone().into(),
@ -347,7 +347,7 @@ async fn run_ptime_analysis(test: &mut Test) -> Result<()> {
test.run(
GroupConfig {
group_name: "ptime_over_bandwidth".to_string(),
chart_dimensions: vec![ChartDimension::Mos],
chart_dimensions: vec![ChartDimension::MosSpeech],
x_labels: &[],
},
test_cases.into(),
@ -373,7 +373,7 @@ async fn run_video_send_over_bandwidth(test: &mut Test) -> Result<()> {
test.run(
GroupConfig {
group_name: "video_send_over_bandwidth".to_string(),
chart_dimensions: vec![ChartDimension::Mos],
chart_dimensions: vec![ChartDimension::MosSpeech],
x_labels: &[],
},
vec![TestCaseConfig {
@ -419,7 +419,7 @@ async fn run_video_compare_vp8_vs_vp9(test: &mut Test) -> Result<()> {
test.run(
GroupConfig {
group_name: "video_compare_vp8_vs_vp9".to_string(),
chart_dimensions: vec![ChartDimension::Mos],
chart_dimensions: vec![ChartDimension::MosSpeech],
x_labels: &[],
},
vec![
@ -489,13 +489,13 @@ async fn run_video_compare_vp8_vs_vp9(test: &mut Test) -> Result<()> {
// Uses a 12 second reference audio file so that the resulting 240 second session recording
// can be chopped evenly and MOS calculated for each 12-second audio segment.
async fn run_changing_bandwidth_audio_test(test: &mut Test) -> Result<()> {
let test_cases = [20, 60, 120].map(|packet_size_ms| TestCaseConfig {
test_case_name: format!("ptime_{packet_size_ms}"),
let test_cases = [20, 60, 120].map(|initial_packet_size_ms| TestCaseConfig {
test_case_name: format!("ptime_{initial_packet_size_ms}"),
length_seconds: 240,
client_a_config: CallConfig {
audio: AudioConfig {
input_name: "normal_12s".to_string(),
packet_size_ms,
initial_packet_size_ms,
analysis_mode: AudioAnalysisMode::Chopped,
generate_spectrogram: false,
..Default::default()
@ -505,7 +505,7 @@ async fn run_changing_bandwidth_audio_test(test: &mut Test) -> Result<()> {
client_b_config: CallConfig {
audio: AudioConfig {
input_name: "normal_12s".to_string(),
packet_size_ms,
initial_packet_size_ms,
analysis_mode: AudioAnalysisMode::Chopped,
generate_spectrogram: false,
..Default::default()
@ -518,7 +518,7 @@ async fn run_changing_bandwidth_audio_test(test: &mut Test) -> Result<()> {
test.run(
GroupConfig {
group_name: "changing_bandwidth_audio_test".to_string(),
chart_dimensions: vec![ChartDimension::Mos],
chart_dimensions: vec![ChartDimension::MosSpeech],
x_labels: &[],
},
test_cases.into(),

View file

@ -6,7 +6,7 @@
use anyhow::{anyhow, Result};
use plotly::{
color::NamedColor,
common::{Font, Line, Marker, Mode, Title},
common::{Font, Line, LineShape, Marker, Mode, Title},
layout::{Axis, AxisType, BarMode, Margin},
Bar, ImageFormat, Layout, Plot, Scatter,
};
@ -22,7 +22,7 @@ use crate::test::{GroupRun, Sound, TestCase, TestResults};
type ChartPoint = (f32, f32);
#[derive(Debug, Default)]
#[derive(Debug)]
pub struct StatsConfig {
pub title: String,
pub chart_name: String,
@ -34,6 +34,26 @@ pub struct StatsConfig {
pub y_min: Option<f32>,
/// By default, charts will use the StatsData.overall_max + 10% for y_max.
pub y_max: Option<f32>,
/// Line presentation, the default is `Linear`, which connects each point to the
/// next. Some charts look better with the `Hv` type, which maintains its value until
/// the next point along the x-axis.
pub line_shape: LineShape,
}
impl Default for StatsConfig {
fn default() -> Self {
Self {
title: "".to_string(),
chart_name: "".to_string(),
x_label: "".to_string(),
y_label: "".to_string(),
x_min: Option::None,
x_max: Option::None,
y_min: Option::None,
y_max: Option::None,
line_shape: LineShape::Linear,
}
}
}
/// Our current standard value for ignoring the first "5 seconds" of garbage data.
@ -68,6 +88,9 @@ pub struct StatsData {
pub min: f32,
pub max: f32,
pub ave: f32,
/// Track the max inserted index in case data is aperiodic.
pub max_index: f32,
}
impl Default for StatsData {
@ -83,6 +106,7 @@ impl Default for StatsData {
min: f32::MAX,
max: 0.0,
ave: 0.0,
max_index: 0.0,
}
}
}
@ -107,10 +131,9 @@ impl StatsData {
self.period = period;
}
// Push data to the next periodic index and update statistics.
pub fn push(&mut self, value: f32) {
self.points
.push((((self.points.len() + 1) as f32) * self.period, value));
/// Push data to an arbitrary index and update statistics.
pub fn push_with_index(&mut self, index: f32, value: f32) {
self.points.push((index, value));
if self.points.len() > self.filter_min && self.points.len() <= self.filter_max {
self.sum += value as f64;
@ -122,6 +145,12 @@ impl StatsData {
// To ensure good ranges for charting, we need to keep the overall min/max.
self.overall_min = self.overall_min.min(value);
self.overall_max = self.overall_max.max(value);
self.max_index = self.max_index.max(index);
}
/// Push data to the next periodic index and update statistics.
pub fn push(&mut self, value: f32) {
self.push_with_index(((self.points.len() + 1) as f32) * self.period, value);
}
}
@ -139,7 +168,7 @@ pub enum AnalysisReportMos {
/// There is a single mos value available.
Single(f32),
/// There is a stats collection of mos values available.
Series(Stats),
Series(Box<Stats>),
}
impl AnalysisReportMos {
@ -155,7 +184,8 @@ impl AnalysisReportMos {
#[derive(Debug)]
pub struct AnalysisReport {
pub mos: AnalysisReportMos,
pub mos_s: AnalysisReportMos,
pub mos_a: AnalysisReportMos,
pub vmaf: Option<f32>,
}
@ -199,7 +229,8 @@ impl AnalysisReport {
// There isn't much to build for audio, now that its only item, mos, is pre-calculated.
pub async fn build(
mos: AnalysisReportMos,
mos_s: AnalysisReportMos,
mos_a: AnalysisReportMos,
video_analysis_file_name: Option<&str>,
) -> Result<Self> {
let vmaf = if let Some(video_analysis_file_name) = video_analysis_file_name {
@ -208,7 +239,7 @@ impl AnalysisReport {
None
};
Ok(Self { mos, vmaf })
Ok(Self { mos_s, mos_a, vmaf })
}
}
@ -375,6 +406,12 @@ pub struct VideoReceiveStatsTransfer {
pub key_frames_decoded: StatsData,
}
#[derive(Debug, Default)]
pub struct AudioAdaptationTransfer {
pub bitrate: StatsData,
pub packet_length: StatsData,
}
#[derive(Debug)]
pub struct ConnectionStats {
pub timestamp_us: Vec<u64>,
@ -433,6 +470,12 @@ pub struct VideoReceiveStats {
pub key_frames_decoded_stats: Stats,
}
#[derive(Debug, Default)]
pub struct AudioAdaptation {
pub bitrate_stats: Stats,
pub packet_length_stats: Stats,
}
#[derive(Debug)]
pub struct ClientLogReport {
pub connection_stats: ConnectionStats,
@ -440,6 +483,7 @@ pub struct ClientLogReport {
pub video_send_stats: VideoSendStats,
pub audio_receive_stats: AudioReceiveStats,
pub video_receive_stats: VideoReceiveStats,
pub audio_adaptation: AudioAdaptation,
}
impl ClientLogReport {
@ -451,6 +495,7 @@ impl ClientLogReport {
VideoSendStatsTransfer,
AudioReceiveStatsTransfer,
VideoReceiveStatsTransfer,
AudioAdaptationTransfer,
)> {
// Look through the file and pull out RingRTC logs, particularly the `stats!` details.
let file = File::open(file_name).await?;
@ -481,11 +526,20 @@ impl ClientLogReport {
r".*ringrtc_stats!,video,recv,(?P<ssrc>\d+),(?P<packets_per_second>[-+]?[0-9]*\.?[0-9]+),(?P<packet_loss>[-+]?[0-9]*\.?[0-9]+)%,(?P<bitrate>[0-9]+)bps,(?P<framerate>[0-9]*\.?[0-9]+)fps,(?P<key_frames_decoded>\d+),(?P<decode_time_per_frame>[0-9]*\.?[0-9]+)ms,(?P<resolution>\d+x\d+)",
)?;
// Example: ringrtc_adapt!,audio,240,18000,60
let re_adaptation_line = Regex::new(
r".*ringrtc_adapt!,audio,(?P<time>\d+),(?P<bitrate>\d+),(?P<packet_length>\d+)",
)?;
let mut connection_stats = ConnectionStatsTransfer::default();
let mut audio_send_stats = AudioSendStatsTransfer::default();
let mut video_send_stats = VideoSendStatsTransfer::default();
let mut audio_receive_stats = AudioReceiveStatsTransfer::default();
let mut video_receive_stats = VideoReceiveStatsTransfer::default();
let mut audio_adaptation_stats = AudioAdaptationTransfer {
bitrate: StatsData::new_skip_n(0),
packet_length: StatsData::new_skip_n(0),
};
let mut lines = reader.lines();
while let Some(line) = lines.next_line().await? {
@ -567,6 +621,7 @@ impl ClientLogReport {
video_send_stats
.remote_round_trip_time
.push(f32::from_str(&cap["remote_round_trip_time"])?);
continue;
}
if let Some(cap) = re_audio_receive_line.captures(&line) {
@ -609,6 +664,17 @@ impl ClientLogReport {
.push(f32::from_str(&cap["key_frames_decoded"])?);
continue;
}
if let Some(cap) = re_adaptation_line.captures(&line) {
let time_index = f32::from_str(&cap["time"])?;
audio_adaptation_stats
.bitrate
.push_with_index(time_index, f32::from_str(&cap["bitrate"])? / 1000.0);
audio_adaptation_stats
.packet_length
.push_with_index(time_index, f32::from_str(&cap["packet_length"])?);
continue;
}
}
Ok((
@ -617,6 +683,7 @@ impl ClientLogReport {
video_send_stats,
audio_receive_stats,
video_receive_stats,
audio_adaptation_stats,
))
}
@ -627,6 +694,7 @@ impl ClientLogReport {
video_send_stats,
audio_receive_stats,
video_receive_stats,
audio_adaptation,
) = ClientLogReport::parse(file_name).await?;
// We assume that all entries in the stats vectors are in sync.
@ -1060,12 +1128,44 @@ impl ClientLogReport {
key_frames_decoded_stats,
};
let bitrate_stats = Stats {
config: StatsConfig {
title: "Adaptation Bitrate Changes".to_string(),
chart_name: format!("{}.audio.adaptation.bitrate.svg", client_name),
x_label: "Test Seconds".to_string(),
y_label: "Kbps".to_string(),
x_max: Some(audio_adaptation.bitrate.max_index + 5.0),
line_shape: LineShape::Hv,
..Default::default()
},
data: audio_adaptation.bitrate,
};
let packet_length_stats = Stats {
config: StatsConfig {
title: "Adaptation Packet Length Changes".to_string(),
chart_name: format!("{}.audio.adaptation.packet_length.svg", client_name),
x_label: "Test Seconds".to_string(),
y_label: "milliseconds".to_string(),
x_max: Some(audio_adaptation.packet_length.max_index + 5.0),
line_shape: LineShape::Hv,
..Default::default()
},
data: audio_adaptation.packet_length,
};
let audio_adaptation = AudioAdaptation {
bitrate_stats,
packet_length_stats,
};
Ok(Self {
connection_stats,
audio_send_stats,
video_send_stats,
audio_receive_stats,
video_receive_stats,
audio_adaptation,
})
}
}
@ -1086,6 +1186,11 @@ pub struct Report {
pub analysis_report: AnalysisReport,
pub docker_stats_report: DockerStatsReport,
pub client_log_report: ClientLogReport,
/// If there is no video being tested, don't generate charts or columns for it.
pub show_video: bool,
/// Keep track of how many iterations were assigned for the test case.
pub iterations: u16,
}
impl Report {
@ -1096,7 +1201,8 @@ impl Report {
test_results: TestResults,
) -> Result<Self> {
let analysis_report = AnalysisReport::build(
test_results.mos,
test_results.mos_s,
test_results.mos_a,
test_case
.client_b
.output_yuv
@ -1135,15 +1241,12 @@ impl Report {
analysis_report,
docker_stats_report,
client_log_report,
show_video: test_case_config.client_a_config.video.input_name.is_some()
|| test_case_config.client_b_config.video.input_name.is_some(),
iterations: test_case_config.iterations,
};
test_report
.create_charts(
&test_case.test_path,
test_case_config.client_a_config.video.input_name.is_some()
|| test_case_config.client_b_config.video.input_name.is_some(),
)
.await;
test_report.create_charts(&test_case.test_path).await;
Ok(test_report)
}
@ -1216,7 +1319,12 @@ impl Report {
let trace = Scatter::new(x_trace, y_trace)
.mode(Mode::LinesMarkers)
.marker(Marker::new().size(marker_size))
.line(Line::new().color(NamedColor::SteelBlue).width(2.0));
.line(
Line::new()
.color(NamedColor::SteelBlue)
.width(2.0)
.shape(stats.config.line_shape.clone()),
);
let x_min = stats.config.x_min.unwrap_or(0.0);
let x_max = stats.config.x_max.unwrap_or({
@ -1266,12 +1374,13 @@ impl Report {
);
}
pub async fn create_charts(&self, test_path: &str, show_video_charts: bool) {
pub async fn create_charts(&self, test_path: &str) {
let connection_stats = &self.client_log_report.connection_stats;
let audio_send_stats = &self.client_log_report.audio_send_stats;
let audio_receive_stats = &self.client_log_report.audio_receive_stats;
let video_send_stats = &self.client_log_report.video_send_stats;
let video_receive_stats = &self.client_log_report.video_receive_stats;
let audio_adaptation = &self.client_log_report.audio_adaptation;
let mut line_chart_stats = vec![
&self.docker_stats_report.cpu_usage,
@ -1293,9 +1402,11 @@ impl Report {
&audio_receive_stats.jitter_stats,
&audio_receive_stats.audio_energy_stats,
&audio_receive_stats.jitter_buffer_delay_stats,
&audio_adaptation.bitrate_stats,
&audio_adaptation.packet_length_stats,
];
if show_video_charts {
if self.show_video {
line_chart_stats.append(&mut vec![
&video_send_stats.packets_per_second_stats,
&video_send_stats.average_packet_size_stats,
@ -1318,7 +1429,11 @@ impl Report {
]);
}
if let AnalysisReportMos::Series(stats) = &self.analysis_report.mos {
if let AnalysisReportMos::Series(stats) = &self.analysis_report.mos_s {
line_chart_stats.push(stats);
}
if let AnalysisReportMos::Series(stats) = &self.analysis_report.mos_a {
line_chart_stats.push(stats);
}
@ -1347,15 +1462,26 @@ impl Report {
set_name,
&self.report_name,
&self.client_name,
self.analysis_report.mos.get_mos_for_display(),
self.analysis_report.mos_s.get_mos_for_display(),
self.analysis_report.mos_a.get_mos_for_display(),
)
.as_bytes(),
);
buf.extend_from_slice(html.network_config_section(network_configs).as_bytes());
buf.extend_from_slice(html.call_config_section(test_case_config).as_bytes());
if let AnalysisReportMos::Series(stats) = &self.analysis_report.mos {
let audio_core_stats = Self::build_stats_rows(&html, &[stats]);
let mut audio_core_stats: Vec<&Stats> = vec![];
if let AnalysisReportMos::Series(stats) = &self.analysis_report.mos_s {
audio_core_stats.push(stats);
}
if let AnalysisReportMos::Series(stats) = &self.analysis_report.mos_a {
audio_core_stats.push(stats);
}
if !audio_core_stats.is_empty() {
let audio_core_stats = Self::build_stats_rows(&html, &audio_core_stats);
buf.extend_from_slice(
html.accordion_section(
"audioCore",
@ -1410,6 +1536,28 @@ impl Report {
.as_bytes(),
);
if test_case_config.client_b_config.audio.adaptation > 0 {
let audio_adaptation = &self.client_log_report.audio_adaptation;
let audio_adaptation = Self::build_stats_rows(
&html,
&[
&audio_adaptation.bitrate_stats,
&audio_adaptation.packet_length_stats,
],
);
buf.extend_from_slice(
html.accordion_section(
"audioAdaptation",
vec![HtmlAccordionItem {
label: "Audio Adaptation".to_string(),
body: audio_adaptation,
collapsed: true,
}],
)
.as_bytes(),
);
}
let connection_stats = &self.client_log_report.connection_stats;
let connection_stats = Self::build_stats_rows(
&html,
@ -1582,9 +1730,14 @@ impl Report {
/// Return the stats value (the average) for the given dimension.
fn get_stats_value_for_chart(report: &Report, chart_dimension: &ChartDimension) -> f32 {
match chart_dimension {
ChartDimension::Mos => report
ChartDimension::MosSpeech => report
.analysis_report
.mos
.mos_s
.get_mos_for_display()
.unwrap_or(0f32),
ChartDimension::MosAudio => report
.analysis_report
.mos_a
.get_mos_for_display()
.unwrap_or(0f32),
ChartDimension::ContainerCpuUsage => report.docker_stats_report.cpu_usage.data.ave,
@ -2018,6 +2171,143 @@ impl Report {
}
}
/// A summary row can represent a single record, an aggregate item, or an aggregate average,
/// calculated from 2 or more aggregate items.
#[derive(Clone, Copy, Eq, PartialEq)]
enum SummaryRowType {
Single,
Aggregate,
AggregateItem,
}
/// A convenience struct for tracking the averaged values for a row in the summary report. This
/// is particularly useful when aggregating values from several rows.
#[derive(Clone, Copy)]
struct SummaryRow {
pub audio_send_packet_size: f32,
pub audio_send_packet_rate: f32,
pub audio_send_bitrate: f32,
pub audio_receive_packet_rate: f32,
pub audio_receive_bitrate: f32,
pub audio_receive_loss: f32,
pub container_cpu: f32,
pub container_memory: f32,
pub container_tx_bitrate: f32,
pub container_rx_bitrate: f32,
pub mos_s: Option<f32>,
pub mos_a: Option<f32>,
pub vmaf: Option<f32>,
pub row_type: SummaryRowType,
pub row_index: usize,
}
impl SummaryRow {
pub fn new(report: &Report) -> Self {
Self {
audio_send_packet_size: report
.client_log_report
.audio_send_stats
.average_packet_size_stats
.data
.ave,
audio_send_packet_rate: report
.client_log_report
.audio_send_stats
.packets_per_second_stats
.data
.ave,
audio_send_bitrate: report
.client_log_report
.audio_send_stats
.bitrate_stats
.data
.ave,
audio_receive_packet_rate: report
.client_log_report
.audio_receive_stats
.packets_per_second_stats
.data
.ave,
audio_receive_bitrate: report
.client_log_report
.audio_receive_stats
.bitrate_stats
.data
.ave,
audio_receive_loss: report
.client_log_report
.audio_receive_stats
.packet_loss_stats
.data
.ave,
container_cpu: report.docker_stats_report.cpu_usage.data.ave,
container_memory: report.docker_stats_report.mem_usage.data.ave,
container_tx_bitrate: report.docker_stats_report.tx_bitrate.data.ave,
container_rx_bitrate: report.docker_stats_report.rx_bitrate.data.ave,
mos_s: report.analysis_report.mos_s.get_mos_for_display(),
mos_a: report.analysis_report.mos_a.get_mos_for_display(),
vmaf: report.analysis_report.vmaf,
row_type: SummaryRowType::Single,
row_index: 0,
}
}
pub fn new_aggregate(report: &Report) -> Self {
let mut aggregate = Self::new(report);
aggregate.row_type = SummaryRowType::Aggregate;
aggregate
}
pub fn set_aggregate_item(&mut self, row_index: usize) {
self.row_type = SummaryRowType::AggregateItem;
self.row_index = row_index;
}
/// Update the aggregated averages for all values.
pub fn update(&mut self, new: &Self, count: usize) {
let new_average = |old_value: f32, new_value: f32| -> f32 {
(old_value * (count as f32 - 1f32) + new_value) / count as f32
};
if count > 1 {
self.audio_send_packet_size =
new_average(self.audio_send_packet_size, new.audio_send_packet_size);
self.audio_send_packet_rate =
new_average(self.audio_send_packet_rate, new.audio_send_packet_rate);
self.audio_send_bitrate = new_average(self.audio_send_bitrate, new.audio_send_bitrate);
self.audio_receive_packet_rate = new_average(
self.audio_receive_packet_rate,
new.audio_receive_packet_rate,
);
self.audio_receive_bitrate =
new_average(self.audio_receive_bitrate, new.audio_receive_bitrate);
self.audio_receive_loss = new_average(self.audio_receive_loss, new.audio_receive_loss);
self.container_cpu = new_average(self.container_cpu, new.container_cpu);
self.container_memory = new_average(self.container_memory, new.container_memory);
self.container_tx_bitrate =
new_average(self.container_tx_bitrate, new.container_tx_bitrate);
self.container_rx_bitrate =
new_average(self.container_rx_bitrate, new.container_rx_bitrate);
// We expect mos and vmaf to always be there or never.
if let (Some(mos_s), Some(new_mos_s)) = (self.mos_s, new.mos_s) {
self.mos_s = Some(new_average(mos_s, new_mos_s));
}
if let (Some(mos_a), Some(new_mos_a)) = (self.mos_a, new.mos_a) {
self.mos_a = Some(new_average(mos_a, new_mos_a));
}
if let (Some(vmaf), Some(new_vmaf)) = (self.vmaf, new.vmaf) {
self.vmaf = Some(new_average(vmaf, new_vmaf));
}
}
}
}
pub struct HtmlAccordionItem {
label: String,
body: String,
@ -2114,12 +2404,32 @@ impl Html {
buf
}
fn get_text_emphasis_for_mos(mos_s: Option<f32>, mos_a: Option<f32>) -> &'static str {
let weight = match (mos_s, mos_a) {
(Some(mos_s), Some(mos_a)) => (mos_s + mos_a) / 2.0,
(Some(mos_s), None) => mos_s,
(None, Some(mos_a)) => mos_a,
(None, None) => 0.0,
};
if weight > 4.0 {
"table-success"
} else if weight > 3.5 {
"table-warning"
} else if weight > 0.0 {
"table-danger"
} else {
""
}
}
pub fn report_heading(
&self,
set_name: &str,
test_name: &str,
client_name: &str,
mos: Option<f32>,
mos_s: Option<f32>,
mos_a: Option<f32>,
) -> String {
let mut buf = String::new();
@ -2132,26 +2442,20 @@ impl Html {
buf.push_str("</div>\n");
buf.push_str("<div class=\"col-md-6\">\n");
match mos {
None => {
buf.push_str("<h2 class=\"text-right\">MOS: None</h2>");
}
Some(mos) => {
let text_emphasis = if mos > 4.0 {
"text-success"
} else if mos > 3.5 {
"text-warning"
} else {
"text-danger"
};
let mos_s_string = mos_s
.map(|mos| format!("{:.3}", mos))
.unwrap_or_else(|| "None".to_string());
let mos_a_string = mos_a
.map(|mos| format!("{:.3}", mos))
.unwrap_or_else(|| "None".to_string());
let _ = writeln!(
buf,
"<h2 class=\"text-right {}\">MOS: {:.3}</h2>",
text_emphasis, mos
"<h2 class=\"text-right {}\">MOS_S: {}/MOS_A: {}</h2>",
Html::get_text_emphasis_for_mos(mos_s, mos_a),
mos_s_string,
mos_a_string
);
}
}
buf.push_str("</div>\n");
buf.push_str("</div>\n");
@ -2368,6 +2672,131 @@ impl Html {
buf
}
fn summary_report_row(
&self,
group_name: &str,
report: &Report,
summary_row: &SummaryRow,
iteration_count_for_group: usize,
) -> String {
let mut buf = String::new();
let table_emphasis = Html::get_text_emphasis_for_mos(summary_row.mos_s, summary_row.mos_a);
match summary_row.row_type {
SummaryRowType::Single => {
let _ = writeln!(
buf,
r#"<tr class="{} clickable" onclick="window.location='{}/{}/report.html'">"#,
table_emphasis, group_name, report.report_name
);
}
SummaryRowType::Aggregate => {
let _ = writeln!(
buf,
r#"<tr class="{}" data-bs-toggle="collapse" data-bs-target=".{}_{}_collapsed">"#,
table_emphasis, group_name, iteration_count_for_group
);
}
SummaryRowType::AggregateItem => {
let _ = writeln!(
buf,
r#"<tr class="{} clickable w-auto small fw-light collapse {}_{}_collapsed" onclick="window.location='{}/{}_{}/report.html'">"#,
table_emphasis,
group_name,
iteration_count_for_group,
group_name,
report.report_name,
summary_row.row_index
);
}
}
let indent = if summary_row.row_type == SummaryRowType::AggregateItem {
"&nbsp;&nbsp"
} else {
""
};
let _ = writeln!(buf, "<td>{}{}</td>", indent, report.test_case_name);
let _ = writeln!(buf, "<td>{}{}</td>", indent, report.sound_name);
let _ = writeln!(buf, "<td>{}{}</td>", indent, report.video_name);
let _ = writeln!(
buf,
"<td>{}{}</td>",
indent,
report.network_profile.get_name()
);
let _ = writeln!(
buf,
"<td>{}{:.0}</td>",
indent, summary_row.audio_send_packet_size
);
let _ = writeln!(
buf,
"<td>{}{:.2}</td>",
indent, summary_row.audio_send_packet_rate
);
let _ = writeln!(
buf,
"<td>{}{:.2}</td>",
indent, summary_row.audio_send_bitrate
);
let _ = writeln!(
buf,
"<td>{}{:.2}</td>",
indent, summary_row.audio_receive_packet_rate
);
let _ = writeln!(
buf,
"<td>{}{:.2}</td>",
indent, summary_row.audio_receive_bitrate
);
let _ = writeln!(
buf,
"<td>{}{:.2}</td>",
indent, summary_row.audio_receive_loss
);
let _ = writeln!(buf, "<td>{}{:.2}</td>", indent, summary_row.container_cpu);
let _ = writeln!(
buf,
"<td>{}{:.2}</td>",
indent, summary_row.container_memory
);
let _ = writeln!(
buf,
"<td>{}{:.2}</td>",
indent, summary_row.container_tx_bitrate
);
let _ = writeln!(
buf,
"<td>{}{:.2}</td>",
indent, summary_row.container_rx_bitrate
);
if let Some(mos) = summary_row.mos_s {
let _ = writeln!(buf, "<td>{}{:.3}</td>", indent, mos);
} else {
buf.push_str("<td></td>\n");
}
if let Some(mos) = summary_row.mos_a {
let _ = writeln!(buf, "<td>{}{:.3}</td>", indent, mos);
} else {
buf.push_str("<td></td>\n");
}
if let Some(vmaf) = summary_row.vmaf {
let _ = writeln!(buf, "<td>{}{:.3}</td>", indent, vmaf);
} else {
buf.push_str("<td></td>\n");
}
buf.push_str("</tr>\n");
buf
}
pub fn summary_report_section(
&self,
reports: &Vec<Result<Report>>,
@ -2385,7 +2814,7 @@ impl Html {
buf.push_str("<th colspan=\"3\">Client Send Stats (average)</th>\n");
buf.push_str("<th colspan=\"3\">Client Receive Stats (average)</th>\n");
buf.push_str("<th colspan=\"4\">Container Stats (average)</th>\n");
buf.push_str("<th rowspan=\"2\">MOS</th>\n");
buf.push_str("<th colspan=\"2\">MOS</th>\n");
buf.push_str("<th rowspan=\"2\">VMAF</th>\n");
buf.push_str("</tr>\n");
buf.push_str("<tr>\n");
@ -2403,126 +2832,91 @@ impl Html {
buf.push_str("<th>Mem</th>\n");
buf.push_str("<th>TX Bitrate</th>\n");
buf.push_str("<th>RX Bitrate</th>\n");
buf.push_str("<th>Speech</th>\n");
buf.push_str("<th>Audio</th>\n");
buf.push_str("</tr>\n");
buf.push_str("</thead>\n");
buf.push_str("<tbody>\n");
let mut summary_rows: Vec<SummaryRow> = vec![];
let mut aggregate_summary_row: Option<SummaryRow> = None;
// Keep track of the number of iterable items there are for the group so that we
// can make sure class names are unique.
let mut iteration_count_for_group = 0;
for result in reports {
// Each report will result in a row in the summary. Each row can be either for
// a specific test case or an aggregate of several iterations, and then the
// actual aggregated items themselves. The aggregated items are hidden by default.
match result {
Ok(report) => {
let table_emphasis = match report.analysis_report.mos.get_mos_for_display() {
Some(mos) => {
if mos > 4.0 {
"table-success"
} else if mos > 3.5 {
"table-warning"
let mut current_summary_row = SummaryRow::new(report);
if report.iterations > 1 {
if !summary_rows.is_empty() {
// We are already aggregating the test iterations.
if let Some(aggregate) = &mut aggregate_summary_row {
aggregate.update(&current_summary_row, summary_rows.len() + 1);
current_summary_row.set_aggregate_item(summary_rows.len() + 1);
summary_rows.push(current_summary_row);
if summary_rows.len() == report.iterations as usize {
// This is the end. Show the aggregate summary row first. Use
// the current report for naming.
buf.push_str(&self.summary_report_row(
group_name,
report,
aggregate,
iteration_count_for_group,
));
// Show all the iterations.
summary_rows.iter().for_each(|summary_line| {
buf.push_str(&self.summary_report_row(
group_name,
report,
summary_line,
iteration_count_for_group,
));
});
summary_rows.clear();
aggregate_summary_row = None;
iteration_count_for_group += 1;
}
} else {
"table-danger"
// This would be a bad state, warn and reset.
println!(
"There are summary_lines but averaged_summary_line is None!"
);
summary_rows.clear();
aggregate_summary_row = None;
iteration_count_for_group += 1;
}
}
None => "",
};
let _ = writeln!(buf, "<tr class=\"{} clickable\" onclick=\"window.location='{}/{}/report.html'\">", table_emphasis, group_name, report.report_name);
let _ = writeln!(buf, "<td>{}</td>", report.test_case_name);
let _ = writeln!(buf, "<td>{}</td>", report.sound_name);
let _ = writeln!(buf, "<td>{}</td>", report.video_name);
let _ = writeln!(buf, "<td>{}</td>", report.network_profile.get_name());
let _ = writeln!(
buf,
"<td>{:.0}</td>",
report
.client_log_report
.audio_send_stats
.average_packet_size_stats
.data
.ave
);
let _ = writeln!(
buf,
"<td>{:.2}</td>",
report
.client_log_report
.audio_send_stats
.packets_per_second_stats
.data
.ave
);
let _ = writeln!(
buf,
"<td>{:.2}</td>",
report
.client_log_report
.audio_send_stats
.bitrate_stats
.data
.ave
);
let _ = writeln!(
buf,
"<td>{:.2}</td>",
report
.client_log_report
.audio_receive_stats
.packets_per_second_stats
.data
.ave
);
let _ = writeln!(
buf,
"<td>{:.2}</td>",
report
.client_log_report
.audio_receive_stats
.bitrate_stats
.data
.ave
);
let _ = writeln!(
buf,
"<td>{:.2}</td>",
report
.client_log_report
.audio_receive_stats
.packet_loss_stats
.data
.ave
);
let _ = writeln!(
buf,
"<td>{:.2}</td>",
report.docker_stats_report.cpu_usage.data.ave
);
let _ = writeln!(
buf,
"<td>{:.2}</td>",
report.docker_stats_report.mem_usage.data.ave
);
let _ = writeln!(
buf,
"<td>{:.2}</td>",
report.docker_stats_report.tx_bitrate.data.ave
);
let _ = writeln!(
buf,
"<td>{:.2}</td>",
report.docker_stats_report.rx_bitrate.data.ave
);
if let Some(mos) = report.analysis_report.mos.get_mos_for_display() {
let _ = writeln!(buf, "<td>{:.3}</td>", mos);
} else {
buf.push_str("<td></td>");
// This is the first of N iterations to track.
// Make a new aggregate for all rows in the test iteration.
aggregate_summary_row = Some(SummaryRow::new_aggregate(report));
// Set the current row as the first iteration item.
current_summary_row.set_aggregate_item(1);
// Add the current row to our list for display once all rows in the
// test iterations are aggregated.
summary_rows.push(current_summary_row);
}
if let Some(vmaf) = report.analysis_report.vmaf {
let _ = writeln!(buf, "<td>{:.3}</td>", vmaf);
} else {
buf.push_str("<td></td>");
// Display the report normally, one measurement for the line.
buf.push_str(&self.summary_report_row(
group_name,
report,
&current_summary_row,
iteration_count_for_group,
));
}
buf.push_str("</tr>\n");
}
Err(err) => {
buf.push_str("<tr class=\"table-dark\">\n");

View file

@ -27,8 +27,7 @@ use tower::timeout::Timeout;
use crate::audio::{chop_audio_and_analyze, get_audio_and_analyze};
use crate::common::{
AudioAnalysisMode, AudioAnalysisType, GroupConfig, NetworkConfigWithOffset, NetworkProfile,
TestCaseConfig,
AudioAnalysisMode, GroupConfig, NetworkConfigWithOffset, NetworkProfile, TestCaseConfig,
};
use crate::docker::{
analyze_audio, analyze_video, clean_network, clean_up, convert_mp4_to_yuv, convert_raw_to_wav,
@ -56,7 +55,10 @@ pub struct Client<'a> {
/// record and pass some things along as we create them.
#[derive(Default)]
pub struct TestResults {
pub mos: AnalysisReportMos,
/// MOS analysis using the speech model (wideband).
pub mos_s: AnalysisReportMos,
/// MOS analysis using the audio model (fullband).
pub mos_a: AnalysisReportMos,
}
pub struct TestCase<'a> {
@ -198,8 +200,6 @@ impl Test {
test_case_config: &TestCaseConfig,
network_configs: &[NetworkConfigWithOffset],
) -> Result<()> {
println!("\nPreparing for test: {}", test_case.report_name);
create_network().await?;
start_signaling_server().await?;
@ -274,7 +274,7 @@ impl Test {
// We wait for both clients to indicate that they are ready and already
// registered with the relay server.
if !done && event.ready_count == 2 {
println!("\nRunning test: {}", test_case.report_name);
println!("\nRunning test...");
let mut network_configs = network_configs.iter();
let mut timed_config_next = network_configs.next();
@ -454,7 +454,7 @@ impl Test {
)
.await?;
if test_case_config.client_a_config.audio.analysis_type == AudioAnalysisType::Speech {
if test_case_config.client_a_config.audio.speech_analysis {
convert_wav_to_16khz_mono(
&test_case.test_path,
&test_case.client_a.output_wav,
@ -471,52 +471,74 @@ impl Test {
)
.await?;
let analyze_client_b_as_speech =
test_case_config.client_b_config.audio.analysis_type == AudioAnalysisType::Speech;
let client_b_output_wav = if analyze_client_b_as_speech {
if test_case_config.client_b_config.audio.speech_analysis {
convert_wav_to_16khz_mono(
&test_case.test_path,
&test_case.client_b.output_wav,
&test_case.client_b.output_wav_speech,
)
.await?;
test_case.client_b.output_wav_speech.to_string()
} else {
test_case.client_b.output_wav.to_string()
};
}
match test_case_config.client_b_config.audio.analysis_mode {
AudioAnalysisMode::None => {
// Do nothing, no analysis is requested.
}
AudioAnalysisMode::Normal => {
if test_case_config.client_b_config.audio.speech_analysis {
get_audio_and_analyze(
&test_case.test_path,
&client_b_output_wav,
&test_case.client_b.output_wav_speech,
&self.set_path,
&test_case.client_a.sound.wav(analyze_client_b_as_speech),
&test_case.client_a.sound.wav(true),
test_case.client_b.sound.analysis_extension(),
analyze_client_b_as_speech,
true,
&mut test_results,
)
.await?;
}
AudioAnalysisMode::Chopped => {
chop_audio_and_analyze(
if test_case_config.client_b_config.audio.audio_analysis {
get_audio_and_analyze(
&test_case.test_path,
&client_b_output_wav,
&test_case.client_b.output_wav,
&self.set_path,
&test_case.client_a.sound.wav(analyze_client_b_as_speech),
&test_case.client_a.sound.wav(false),
test_case.client_b.sound.analysis_extension(),
test_case.client_b.name,
analyze_client_b_as_speech,
false,
&mut test_results,
)
.await?;
}
}
AudioAnalysisMode::Chopped => {
if test_case_config.client_b_config.audio.speech_analysis {
chop_audio_and_analyze(
&test_case.test_path,
&test_case.client_b.output_wav_speech,
&self.set_path,
&test_case.client_a.sound.wav(true),
test_case.client_b.sound.analysis_extension(),
test_case.client_b.name,
true,
&mut test_results,
)
.await?;
}
if test_case_config.client_b_config.audio.audio_analysis {
chop_audio_and_analyze(
&test_case.test_path,
&test_case.client_b.output_wav,
&self.set_path,
&test_case.client_a.sound.wav(false),
test_case.client_b.sound.analysis_extension(),
test_case.client_b.name,
false,
&mut test_results,
)
.await?;
}
}
}
if test_case_config.client_b_config.audio.generate_spectrogram {
generate_spectrogram(
@ -839,6 +861,7 @@ impl Test {
}
for network_profile in &profiles {
for i in 1..=test.iterations {
let report_name = format!(
"{}-{}-{}",
test.test_case_name,
@ -846,14 +869,21 @@ impl Test {
network_profile.get_name()
);
let test_case_path = format!(
let test_case_path = if test.iterations > 1 {
println!("\nRunning test case: {}, iteration: {}", report_name, i);
format!(
"{}/{}/{}_{}",
self.set_path, group_config.group_name, report_name, i
)
} else {
println!("\nRunning test case: {}", report_name);
format!(
"{}/{}/{}",
self.set_path, group_config.group_name, report_name
);
)
};
fs::create_dir_all(test_case_path.clone())?;
println!("\nRunning test case: {}", report_name);
let test_case = TestCase {
report_name,
test_path: test_case_path,
@ -893,6 +923,7 @@ impl Test {
);
}
}
}
// Push the group of test case reports in with the test config itself for reporting.
self.group_runs.push(GroupRun {

View file

@ -1,5 +1,5 @@
webrtc.version=5615e-1
webrtc.version=5845d-1
ringrtc.version.major=2
ringrtc.version.minor=30
ringrtc.version.revision=0
ringrtc.version.minor=31
ringrtc.version.revision=2

Binary file not shown.

View file

@ -1,7 +1,7 @@
# Note: Check https://gradle.org/release-checksums/ before updating wrapper or distribution
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionSha256Sum=8cc27038d5dbd815759851ba53e70cf62e481b87494cc97cfd97982ada5ba634
distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-bin.zip
distributionSha256Sum=ff7bf6a86f09b9b2c40bb8f48b25fc19cf2b2664fd1d220cd7ab833ec758d0d7
distributionUrl=https\://services.gradle.org/distributions/gradle-8.0.2-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

18
gradlew vendored
View file

@ -55,7 +55,7 @@
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
@ -80,10 +80,10 @@ do
esac
done
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
APP_NAME="Gradle"
# This is normally unused
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
@ -143,12 +143,16 @@ fi
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC3045
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC3045
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
@ -205,6 +209,12 @@ set -- \
org.gradle.wrapper.GradleWrapperMain \
"$@"
# Stop when "xargs" is not available.
if ! command -v xargs >/dev/null 2>&1
then
die "xargs is not available"
fi
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.

15
gradlew.bat vendored
View file

@ -14,7 +14,7 @@
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@if "%DEBUG%"=="" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@ -25,7 +25,8 @@
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
if "%DIRNAME%"=="" set DIRNAME=.
@rem This is normally unused
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@ -40,7 +41,7 @@ if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute
if %ERRORLEVEL% equ 0 goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
@ -75,13 +76,15 @@ set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
if %ERRORLEVEL% equ 0 goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
set EXIT_CODE=%ERRORLEVEL%
if %EXIT_CODE% equ 0 set EXIT_CODE=1
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
exit /b %EXIT_CODE%
:mainEnd
if "%OS%"=="Windows_NT" endlocal

View file

@ -1065,6 +1065,7 @@ public class CallManager {
* <ul>
* <li>401: the room does not exist (and this is the wrong API to create a new room)
* <li>403: the admin passkey is incorrect
* <li>409: the room is currently in use, so restrictions cannot be changed at the moment
* </ul>
*
* This request is idempotent; if it fails due to a network issue, it is safe to retry.
@ -1163,7 +1164,7 @@ public class CallManager {
if (result.isSuccess()) {
handler.handleResponse(result.getValue());
} else {
handler.handleResponse(new PeekInfo(Collections.emptyList(), null, null, null, 0, Collections.emptyList()));
handler.handleResponse(new PeekInfo(Collections.emptyList(), null, null, null, 0, 0, Collections.emptyList()));
}
});
ringrtcPeekGroupCall(nativeCallManager, requestId, sfuUrl, membershipProof, Util.serializeFromGroupMemberInfo(groupMembers));

View file

@ -43,7 +43,9 @@ public final class PeekInfo {
@Nullable
private final Long maxDevices;
private final long deviceCount;
private final long deviceCountIncludingPendingDevices;
private final long deviceCountExcludingPendingDevices;
@NonNull
private final List<UUID> pendingUsers;
@ -52,14 +54,16 @@ public final class PeekInfo {
@Nullable UUID creator,
@Nullable String eraId,
@Nullable Long maxDevices,
long deviceCount,
long deviceCountIncludingPendingDevices,
long deviceCountExcludingPendingDevices,
@NonNull List<UUID> pendingUsers
) {
this.joinedMembers = joinedMembers;
this.creator = creator;
this.eraId = eraId;
this.maxDevices = maxDevices;
this.deviceCount = deviceCount;
this.deviceCountIncludingPendingDevices = deviceCountIncludingPendingDevices;
this.deviceCountExcludingPendingDevices = deviceCountExcludingPendingDevices;
this.pendingUsers = pendingUsers;
}
@ -69,7 +73,8 @@ public final class PeekInfo {
@Nullable byte[] creator,
@Nullable String eraId,
@Nullable Long maxDevices,
long deviceCount,
long deviceCountIncludingPendingDevices,
long deviceCountExcludingPendingDevices,
@NonNull List<byte[]> rawPendingUsers
) {
Log.i(TAG, "fromNative(): joinedMembers.size = " + rawJoinedMembers.size());
@ -84,7 +89,7 @@ public final class PeekInfo {
pendingUsers.add(Util.getUuidFromBytes(pendingUser));
}
return new PeekInfo(joinedMembers, creator == null ? null : Util.getUuidFromBytes(creator), eraId, maxDevices, deviceCount, pendingUsers);
return new PeekInfo(joinedMembers, creator == null ? null : Util.getUuidFromBytes(creator), eraId, maxDevices, deviceCountIncludingPendingDevices, deviceCountExcludingPendingDevices, pendingUsers);
}
@NonNull
@ -107,8 +112,18 @@ public final class PeekInfo {
return maxDevices;
}
/** @deprecated Use {@link #getDeviceCountIncludingPendingDevices()} or {@link #getDeviceCountExcludingPendingDevices()} as appropriate */
@Deprecated
public long getDeviceCount() {
return deviceCount;
return deviceCountIncludingPendingDevices;
}
public long getDeviceCountIncludingPendingDevices() {
return deviceCountIncludingPendingDevices;
}
public long getDeviceCountExcludingPendingDevices() {
return deviceCountExcludingPendingDevices;
}
@NonNull

View file

@ -1,5 +1,5 @@
plugins {
id 'com.android.library' version '7.0.0'
id 'com.android.library' version '8.0.2'
id 'maven-publish'
}
@ -9,22 +9,14 @@ repositories {
mavenLocal()
}
def versionProperties = new Properties()
file("../../config/version.properties").withInputStream { versionProperties.load(it) }
if (!project.hasProperty("ringrtcVersion")) {
ext.ringrtcVersion =
"${versionProperties.getProperty("ringrtc.version.major")}." +
"${versionProperties.getProperty("ringrtc.version.minor")}." +
"${versionProperties.getProperty("ringrtc.version.revision")}"
}
ext.debug_jni_lib_dirs = project.hasProperty("debugRingrtcLibDir") ? [debugRingrtcLibDir] : ["jniLibs"]
ext.release_jni_lib_dirs = project.hasProperty("releaseRingrtcLibDir") ? [releaseRingrtcLibDir] : ["jniLibs"]
ext.webrtc_jar = project.hasProperty("webrtcJar") ? webrtcJar : "libs/libwebrtc.jar"
android {
compileSdk 30
namespace 'org.signal.ringrtc'
compileSdk 33
defaultConfig {
minSdk 21
@ -54,11 +46,15 @@ android {
// Libraries are already stripped if necessary when linked.
doNotStrip "**/*.so"
}
publishing {
singleVariant("release")
}
}
dependencies {
api files(webrtc_jar)
api 'androidx.annotation:annotation:1.2.0'
api 'androidx.annotation:annotation:1.4.0'
coreLibraryDesugaring 'com.android.tools:desugar_jdk_libs:1.1.6'
androidTestImplementation "androidx.test:runner:1.4.0"
androidTestImplementation "androidx.test:core:1.4.0"
@ -81,13 +77,6 @@ task javadoc(type: Javadoc) {
afterEvaluate {
publishing {
publications {
debug(MavenPublication) {
from components.debug
groupId = 'im.molly'
artifactId = archivesBaseName
version = "${ringrtcVersion}-DEBUG"
}
mavenJava(MavenPublication) {
from components.release
@ -143,13 +132,11 @@ afterEvaluate {
}
}
ext.webrtc_version = "${versionProperties.getProperty("webrtc.version")}"
task version {
group 'Info'
description = 'Prints the versions as read from the version config file.'
doLast {
println "RingRTC version: " + version
println "RingRTC version: " + ringrtcVersion
println "WebRTC version : " + project.webrtc_version
}
}

View file

@ -235,7 +235,8 @@ public class CallLinksTest extends CallTestBase {
callManager.peekCallLinkCall("sfu.example", new byte[] { 1, 2, 3 }, EXAMPLE_KEY, result -> {
errors.checkThat(result.getStatus(), is((short)200));
errors.checkThat(result.getValue().getEraId(), is((String)null));
errors.checkThat(result.getValue().getDeviceCount(), is(0L));
errors.checkThat(result.getValue().getDeviceCountIncludingPendingDevices(), is(0L));
errors.checkThat(result.getValue().getDeviceCountExcludingPendingDevices(), is(0L));
latch.countDown();
});

View file

@ -39,7 +39,8 @@ public class CallManagerTest extends CallTestBase {
CountDownLatch latch = new CountDownLatch(1);
callManager.peekGroupCall("sfu.example", new byte[] { 1, 2, 3 }, new ArrayList<>(), result -> {
errors.checkThat(result.getEraId(), is((String)null));
errors.checkThat(result.getDeviceCount(), is(0L));
errors.checkThat(result.getDeviceCountIncludingPendingDevices(), is(0L));
errors.checkThat(result.getDeviceCountExcludingPendingDevices(), is(0L));
latch.countDown();
});
@ -91,7 +92,8 @@ public class CallManagerTest extends CallTestBase {
CountDownLatch latch = new CountDownLatch(1);
callManager.peekGroupCall("sfu.example", new byte[] { 1, 2, 3 }, Arrays.asList(members), result -> {
errors.checkThat(result.getEraId(), is("mesozoic"));
errors.checkThat(result.getDeviceCount(), is(7L));
errors.checkThat(result.getDeviceCountIncludingPendingDevices(), is(7L));
errors.checkThat(result.getDeviceCountExcludingPendingDevices(), is(3L));
errors.checkThat(result.getMaxDevices(), is(20L));
errors.checkThat(result.getCreator(), is(user1));
errors.checkThat(

View file

@ -3,4 +3,4 @@
* Copyright 2019-2021 Signal Messenger, LLC
* SPDX-License-Identifier: AGPL-3.0-only
-->
<manifest package="org.signal.ringrtc" />
<manifest />

View file

@ -459,7 +459,7 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 12.2;
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
@ -520,7 +520,7 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 12.2;
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
SDKROOT = iphoneos;

View file

@ -940,7 +940,7 @@ func callManagerInterfaceGroupCallRingUpdate(object: UnsafeMutableRawPointer?, g
}
@available(iOSApplicationExtension, unavailable)
func callManagerInterfaceHandlePeekChanged(object: UnsafeMutableRawPointer?, clientId: UInt32, joinedMembers: AppUuidArray, creator: AppByteSlice, eraId: AppByteSlice, maxDevices: AppOptionalUInt32, deviceCount: UInt32, pendingUsers: AppUuidArray) {
func callManagerInterfaceHandlePeekChanged(object: UnsafeMutableRawPointer?, clientId: UInt32, joinedMembers: AppUuidArray, creator: AppByteSlice, eraId: AppByteSlice, maxDevices: AppOptionalUInt32, deviceCountIncludingPendingDevices: UInt32, deviceCountExcludingPendingDevices: UInt32, pendingUsers: AppUuidArray) {
guard let object = object else {
owsFailDebug("object was unexpectedly nil")
return
@ -973,7 +973,7 @@ func callManagerInterfaceHandlePeekChanged(object: UnsafeMutableRawPointer?, cli
finalMaxDevices = maxDevices.value
}
let peekInfo = PeekInfo(joinedMembers: finalJoinedMembers, creator: creator.toUUID(), eraId: eraId.asString(), maxDevices: finalMaxDevices, deviceCount: deviceCount, pendingUsers: finalPendingUsers)
let peekInfo = PeekInfo(joinedMembers: finalJoinedMembers, creator: creator.toUUID(), eraId: eraId.asString(), maxDevices: finalMaxDevices, deviceCountIncludingPendingDevices: deviceCountIncludingPendingDevices, deviceCountExcludingPendingDevices: deviceCountExcludingPendingDevices, pendingUsers: finalPendingUsers)
obj.handlePeekChanged(clientId: clientId, peekInfo: peekInfo)
}

View file

@ -116,7 +116,8 @@ public struct PeekInfo {
public let creator: UUID?
public let eraId: String?
public let maxDevices: UInt32?
public let deviceCount: UInt32
public let deviceCountIncludingPendingDevices: UInt32
public let deviceCountExcludingPendingDevices: UInt32
public let pendingUsers: [UUID]
static func fromRtc(_ rtcPeekInfo: rtc_sfu_PeekInfo) -> Self {
@ -125,10 +126,14 @@ public struct PeekInfo {
creator: rtcPeekInfo.creator.toUUID(),
eraId: rtcPeekInfo.era_id.toString(),
maxDevices: rtcPeekInfo.max_devices.asUInt32(),
deviceCount: rtcPeekInfo.device_count,
deviceCountIncludingPendingDevices: rtcPeekInfo.device_count_including_pending_devices,
deviceCountExcludingPendingDevices: rtcPeekInfo.device_count_excluding_pending_devices,
pendingUsers: rtcPeekInfo.pending_users.toUUIDs()
)
}
@available(*, deprecated, message: "use 'deviceCountIncludingPendingDevices' or 'deviceCountExcludingPendingDevices' as appropriate")
public var deviceCount: UInt32 { deviceCountIncludingPendingDevices }
}
extension rtc_UserIds {
@ -320,6 +325,7 @@ public class SFUClient {
/// Possible failure codes include:
/// - 401: the room does not exist (and this is the wrong API to create a new room)
/// - 403: the admin passkey is incorrect
/// - 409: the room is currently in use, so restrictions cannot be changed at the moment
///
/// This request is idempotent; if it fails due to a network issue, it is safe to retry.
///

View file

@ -263,7 +263,8 @@ final class CallLinkTests: XCTestCase {
.done { result in
XCTAssertNil(result.errorStatusCode)
XCTAssertNil(result.peekInfo.eraId)
XCTAssertEqual(0, result.peekInfo.deviceCount)
XCTAssertEqual(0, result.peekInfo.deviceCountIncludingPendingDevices)
XCTAssertEqual(0, result.peekInfo.deviceCountExcludingPendingDevices)
callbackCompleted.fulfill()
}
@ -285,7 +286,8 @@ final class CallLinkTests: XCTestCase {
.done { result in
XCTAssertEqual(PeekInfo.expiredCallLinkStatus, result.errorStatusCode)
XCTAssertNil(result.peekInfo.eraId)
XCTAssertEqual(0, result.peekInfo.deviceCount)
XCTAssertEqual(0, result.peekInfo.deviceCountIncludingPendingDevices)
XCTAssertEqual(0, result.peekInfo.deviceCountExcludingPendingDevices)
callbackCompleted.fulfill()
}
@ -307,7 +309,8 @@ final class CallLinkTests: XCTestCase {
.done { result in
XCTAssertEqual(PeekInfo.invalidCallLinkStatus, result.errorStatusCode)
XCTAssertNil(result.peekInfo.eraId)
XCTAssertEqual(0, result.peekInfo.deviceCount)
XCTAssertEqual(0, result.peekInfo.deviceCountIncludingPendingDevices)
XCTAssertEqual(0, result.peekInfo.deviceCountExcludingPendingDevices)
callbackCompleted.fulfill()
}

View file

@ -3241,7 +3241,8 @@ class SignalRingRTCTests: XCTestCase {
XCTAssertNil(result.errorStatusCode)
let peekInfo = result.peekInfo
XCTAssertEqual(peekInfo.eraId, "mesozoic")
XCTAssertEqual(peekInfo.deviceCount, 7)
XCTAssertEqual(peekInfo.deviceCountIncludingPendingDevices, 7)
XCTAssertEqual(peekInfo.deviceCountExcludingPendingDevices, 3)
XCTAssertEqual(peekInfo.maxDevices, 20)
XCTAssertEqual(peekInfo.creator, user1)
XCTAssertEqual(Set(peekInfo.joinedMembers), [user1, user2]);

View file

@ -1,6 +1,6 @@
{
"name": "@signalapp/ringrtc",
"version": "2.30.0",
"version": "2.31.2",
"description": "Signal Messenger voice and video calling library.",
"main": "dist/index.js",
"types": "dist/index.d.ts",

View file

@ -178,7 +178,10 @@ export interface PeekInfo {
creator?: GroupCallUserId;
eraId?: string;
maxDevices?: number;
/** @deprecated Use {@link #deviceCountIncludingPendingDevices} and {@link #deviceCountExcludingPendingDevices} as appropriate */
deviceCount: number;
deviceCountIncludingPendingDevices: number;
deviceCountExcludingPendingDevices: number;
pendingUsers: Array<GroupCallUserId>;
}
@ -939,6 +942,7 @@ export class RingRTCType {
* Possible failure codes include:
* - 401: the room does not exist (and this is the wrong API to create a new room)
* - 403: the admin passkey is incorrect
* - 409: the room is currently in use, so restrictions cannot be changed at the moment
*
* This request is idempotent; if it fails due to a network issue, it is safe to retry.
*
@ -1114,7 +1118,13 @@ export class RingRTCType {
if (result.success) {
return result.value;
} else {
return { devices: [], deviceCount: 0, pendingUsers: [] };
return {
devices: [],
deviceCount: 0,
deviceCountIncludingPendingDevices: 0,
deviceCountExcludingPendingDevices: 0,
pendingUsers: [],
};
}
});
}

View file

@ -425,7 +425,8 @@ describe('RingRTC', () => {
);
const peekInfo = await peekResponse;
assert.equal(peekInfo.eraId, 'mesozoic');
assert.equal(peekInfo.deviceCount, 7);
assert.equal(peekInfo.deviceCountIncludingPendingDevices, 7);
assert.equal(peekInfo.deviceCountExcludingPendingDevices, 3);
assert.equal(peekInfo.maxDevices, 20);
assert.isTrue(peekInfo.creator?.equals(Buffer.of(0x11, 0x11, 0x11, 0x11)));
assert.deepEqual(peekInfo.devices, [
@ -818,7 +819,8 @@ describe('RingRTC', () => {
const state = await callLinkResponse;
if (state.success) {
assert.isUndefined(state.value.eraId);
assert.equal(state.value.deviceCount, 0);
assert.equal(state.value.deviceCountIncludingPendingDevices, 0);
assert.equal(state.value.deviceCountExcludingPendingDevices, 0);
} else {
assert.fail('should have succeeded');
}

2
src/rust/Cargo.lock generated
View file

@ -1460,7 +1460,7 @@ dependencies = [
[[package]]
name = "ringrtc"
version = "2.30.0"
version = "2.31.2"
dependencies = [
"aes",
"aes-gcm-siv",

View file

@ -5,7 +5,7 @@
[package]
name = "ringrtc"
version = "2.30.0"
version = "2.31.2"
authors = ["Calling Team <callingteam@signal.org>"]
edition = "2021"
description = "A Rust interface for WebRTC"

View file

@ -1704,7 +1704,9 @@ impl AndroidPlatform {
Some(era_id) => env.new_string(era_id)?.into(),
};
let jni_max_devices = self.get_optional_u32_long_object(env, peek_info.max_devices)?;
let jni_device_count = peek_info.device_count() as jlong;
let jni_device_count_including_pending =
peek_info.device_count_including_pending_devices() as jlong;
let jni_device_count_excluding_pending = peek_info.devices.len() as jlong;
let pending_users = peek_info.unique_pending_users();
let pending_user_list = jni_new_arraylist(env, pending_users.len())?;
@ -1729,7 +1731,8 @@ impl AndroidPlatform {
jni_creator => [byte],
jni_era_id => java.lang.String,
jni_max_devices => java.lang.Long,
jni_device_count => long,
jni_device_count_including_pending => long,
jni_device_count_excluding_pending => long,
pending_user_list => java.util.List,
) -> org.signal.ringrtc.PeekInfo);
let result = env.call_static_method(

View file

@ -8,13 +8,16 @@ mod support {
}
use support::http_client;
use std::collections::HashMap;
use std::hash::{Hash, Hasher};
use std::io::Write;
use std::time::{Duration, SystemTime};
use rand::SeedableRng;
use ringrtc::lite::call_links::{CallLinkRootKey, CallLinkState, CallLinkUpdateRequest};
use ringrtc::lite::http;
use ringrtc::lite::call_links::{
CallLinkRestrictions, CallLinkRootKey, CallLinkState, CallLinkUpdateRequest,
};
use ringrtc::lite::http::{self, Client};
use zkgroup::call_links::CallLinkSecretParams;
struct Log;
@ -61,6 +64,30 @@ fn start_of_today_in_epoch_seconds() -> zkgroup::Timestamp {
now.as_secs() - remainder
}
fn issue_and_present_auth_credential(
server_zkparams: &zkgroup::generic_server_params::GenericServerSecretParams,
public_zkparams: &zkgroup::generic_server_params::GenericServerPublicParams,
root_key: &CallLinkRootKey,
) -> zkgroup::call_links::CallLinkAuthCredentialPresentation {
let timestamp = start_of_today_in_epoch_seconds();
let auth_credential = zkgroup::call_links::CallLinkAuthCredentialResponse::issue_credential(
USER_ID,
timestamp,
server_zkparams,
rand::random(),
)
.receive(USER_ID, timestamp, public_zkparams)
.unwrap();
let call_link_zkparams = CallLinkSecretParams::derive_from_root_key(&root_key.bytes());
auth_credential.present(
USER_ID,
timestamp,
public_zkparams,
&call_link_zkparams,
rand::random(),
)
}
fn show_result(result: Result<CallLinkState, http::ResponseStatus>) {
match result {
Ok(state) => println!("{state:#?}"),
@ -71,9 +98,9 @@ fn show_result(result: Result<CallLinkState, http::ResponseStatus>) {
fn main() {
let args: Vec<String> = std::env::args().collect();
let url = args
let url: &'static str = args
.get(1)
.map(String::as_str)
.map(|s| &*Box::leak(s.clone().into_boxed_str()))
.unwrap_or("http://localhost:8090");
let zkparams_base64 = args.get(2).map(String::as_str).unwrap_or(DEFAULT_ZKPARAMS);
let server_zkparams: zkgroup::generic_server_params::GenericServerSecretParams =
@ -104,10 +131,13 @@ help - show this message
create <id> - create a new link
read <id> - fetch the current state of a link
set-title <id> <new-title> - change the title of a link
admin-approval <id> (on|off) - turn on/off admin approval for a link
reset-expiration <id> - reset a link's expiration (if the server has this enabled)
root-key <id> - print the root key for a link
exit - quit
<id> can be any word you want; it is hashed to produce a root key.
The admin passkey for any created links is a constant {ADMIN_PASSKEY:?}.
"
);
prompt("> ");
@ -151,24 +181,10 @@ exit - quit
}
["read", id] => {
let root_key = root_key_from_id(id);
let timestamp = start_of_today_in_epoch_seconds();
let auth_credential =
zkgroup::call_links::CallLinkAuthCredentialResponse::issue_credential(
USER_ID,
timestamp,
let auth_credential_presentation = issue_and_present_auth_credential(
&server_zkparams,
rand::random(),
)
.receive(USER_ID, timestamp, &public_zkparams)
.unwrap();
let call_link_zkparams =
CallLinkSecretParams::derive_from_root_key(&root_key.bytes());
let auth_credential_presentation = auth_credential.present(
USER_ID,
timestamp,
&public_zkparams,
&call_link_zkparams,
rand::random(),
&root_key,
);
ringrtc::lite::call_links::read_call_link(
&http_client,
@ -181,24 +197,10 @@ exit - quit
["set-title", id, new_title] => {
let root_key = root_key_from_id(id);
let encrypted_name = root_key.encrypt(new_title.as_bytes(), rand::thread_rng());
let timestamp = start_of_today_in_epoch_seconds();
let auth_credential =
zkgroup::call_links::CallLinkAuthCredentialResponse::issue_credential(
USER_ID,
timestamp,
let auth_credential_presentation = issue_and_present_auth_credential(
&server_zkparams,
rand::random(),
)
.receive(USER_ID, timestamp, &public_zkparams)
.unwrap();
let call_link_zkparams =
CallLinkSecretParams::derive_from_root_key(&root_key.bytes());
let auth_credential_presentation = auth_credential.present(
USER_ID,
timestamp,
&public_zkparams,
&call_link_zkparams,
rand::random(),
&root_key,
);
ringrtc::lite::call_links::update_call_link(
&http_client,
@ -213,6 +215,83 @@ exit - quit
Box::new(show_result),
);
}
["admin-approval", id, on_or_off @ ("on" | "off")] => {
let root_key = root_key_from_id(id);
let auth_credential_presentation = issue_and_present_auth_credential(
&server_zkparams,
&public_zkparams,
&root_key,
);
let restrictions = if *on_or_off == "on" {
CallLinkRestrictions::AdminApproval
} else {
CallLinkRestrictions::None
};
ringrtc::lite::call_links::update_call_link(
&http_client,
url,
root_key,
&bincode::serialize(&auth_credential_presentation).unwrap(),
&CallLinkUpdateRequest {
admin_passkey: ADMIN_PASSKEY,
restrictions: Some(restrictions),
..CallLinkUpdateRequest::default()
},
Box::new(show_result),
);
}
["reset-expiration", id] => {
let root_key = root_key_from_id(id);
let auth_credential_presentation = issue_and_present_auth_credential(
&server_zkparams,
&public_zkparams,
&root_key,
);
// This is a testing-only API, so RingRTC doesn't implement it for us.
// Manually construct the request here.
let http_client_inner = http_client.clone();
http_client.send_request(
http::Request {
method: http::Method::Post,
url: format!("{url}/v1/call-link/reset-expiration"),
headers: HashMap::from_iter([
(
"Authorization".to_string(),
ringrtc::lite::call_links::auth_header_from_auth_credential(
&bincode::serialize(&auth_credential_presentation).unwrap(),
),
),
(
"X-Room-Id".to_string(),
hex::encode(root_key.derive_room_id()),
),
]),
body: None,
},
Box::new(move |response| match response {
Some(response) if response.status.is_success() => {
// Do a regular read to show the update.
// zkgroup sin: we're reusing a presentation.
// But this is a testing client only.
ringrtc::lite::call_links::read_call_link(
&http_client_inner,
url,
root_key,
&bincode::serialize(&auth_credential_presentation).unwrap(),
Box::new(show_result),
)
}
Some(response) => {
println!("failed: {}", response.status);
prompt("\n> ");
}
None => {
println!("request failed");
prompt("\n> ");
}
}),
)
}
["root-key", id] => {
let root_key = root_key_from_id(id);
println!("{}\n", root_key.to_formatted_string());

View file

@ -81,25 +81,37 @@ struct Args {
#[arg(long, default_value = "2000", value_parser = clap::value_parser!(u16).range(30..))]
allowed_bitrate_kbps: u16,
/// The target bitrate to encode audio at. When tcc is enabled, this is the initial bitrate.
#[arg(long, default_value = "40000", value_parser = clap::value_parser!(u16).range(500..))]
default_bitrate_bps: u16,
/// The initial bitrate for encoding audio.
#[arg(long, default_value = "32000", value_parser = clap::value_parser!(i32).range(500..))]
initial_bitrate_bps: i32,
/// The minimum bitrate to encode audio at. This is only used when tcc is enabled.
#[arg(long, default_value = "20000", value_parser = clap::value_parser!(u16).range(500..))]
min_bitrate_bps: u16,
/// The minimum bitrate for encoding audio.
#[arg(long, default_value = "16000", value_parser = clap::value_parser!(i32).range(500..))]
min_bitrate_bps: i32,
/// The maximum bitrate to encode audio at. This is only used when tcc is enabled.
#[arg(long, default_value = "40000", value_parser = clap::value_parser!(u16).range(500..))]
max_bitrate_bps: u16,
/// The maximum bitrate for encoding audio.
#[arg(long, default_value = "32000", value_parser = clap::value_parser!(i32).range(500..))]
max_bitrate_bps: i32,
/// The encoding bandwidth for audio.
#[arg(long, default_value_t = AudioBandwidth::Auto, value_enum)]
bandwidth: AudioBandwidth,
/// The encoding complexity for audio.
#[arg(long, default_value = "9", value_parser = clap::value_parser!(u16).range(0..=10))]
complexity: u16,
#[arg(long, default_value = "9", value_parser = clap::value_parser!(i32).range(0..=10))]
complexity: i32,
/// The length of an audio frame size (ptime).
/// The size of an audio frame (ptime).
#[arg(long, default_value = "20", value_parser = clap::builder::PossibleValuesParser::new(["20", "40", "60", "80", "100", "120"]))]
packet_size_ms: String,
initial_packet_size_ms: String,
/// The minimum size of an audio frame (ptime).
#[arg(long, default_value = "20", value_parser = clap::builder::PossibleValuesParser::new(["20", "40", "60", "80", "100", "120"]))]
min_packet_size_ms: String,
/// The maximum size of an audio frame (ptime).
#[arg(long, default_value = "20", value_parser = clap::builder::PossibleValuesParser::new(["20", "40", "60", "80", "100", "120"]))]
max_packet_size_ms: String,
/// Whether to use CBR for encoding audio. False means VBR.
#[arg(long, action = clap::ArgAction::Set, default_value = "true")]
@ -113,11 +125,19 @@ struct Args {
#[arg(long, action = clap::ArgAction::Set, default_value = "true")]
fec: bool,
/// Whether to use adaptation when encoding audio. Set to 0 to disable (default).
#[arg(long, default_value_t = 0)]
adaptation: i32,
/// Whether to enable transport-cc feedback for audio. This will allow the bitrate to vary
/// between `min_bitrate_bps` and `max_bitrate_bps` when using CBR.
#[arg(long, action = clap::ArgAction::Set, default_value = "false")]
tcc: bool,
/// Whether to enable redundant packets for audio.
#[arg(long, action = clap::ArgAction::Set, default_value = "false")]
red: bool,
/// Whether to enable the VP9 codec for video.
#[arg(long, action = clap::ArgAction::Set, default_value = "true")]
vp9: bool,
@ -159,7 +179,10 @@ struct Args {
force_relay: bool,
#[arg(long, default_value = "200")]
audio_jitter_buffer_max_packets: u16,
audio_jitter_buffer_max_packets: i32,
#[arg(long, default_value = "5000")]
audio_rtcp_report_interval_ms: i32,
}
fn main() -> Result<()> {
@ -221,18 +244,26 @@ fn main() -> Result<()> {
agc_enabled: args.agc,
},
audio_encoder_config: AudioEncoderConfig {
packet_size_ms: args.packet_size_ms.parse().expect("validated by clap"),
bandwidth: AudioBandwidth::Auto,
start_bitrate_bps: args.default_bitrate_bps,
initial_packet_size_ms: args
.initial_packet_size_ms
.parse()
.expect("validated by clap"),
min_packet_size_ms: args.min_packet_size_ms.parse().expect("validated by clap"),
max_packet_size_ms: args.max_packet_size_ms.parse().expect("validated by clap"),
initial_bitrate_bps: args.initial_bitrate_bps,
min_bitrate_bps: args.min_bitrate_bps,
max_bitrate_bps: args.max_bitrate_bps,
bandwidth: args.bandwidth,
complexity: args.complexity,
adaptation: args.adaptation,
enable_cbr: args.cbr,
enable_dtx: args.dtx,
enable_fec: args.fec,
},
enable_tcc_audio: args.tcc,
enable_red_audio: args.red,
audio_jitter_buffer_max_packets: args.audio_jitter_buffer_max_packets as isize,
audio_rtcp_report_interval_ms: args.audio_rtcp_report_interval_ms as isize,
enable_vp9: args.vp9,
};

View file

@ -78,7 +78,7 @@ impl group_call::Observer for Observer {
"Peek info changed to creator: {:?}, era: {:?} devices: {:?}/{:?} {:?}",
peek_info.creator,
peek_info.era_id,
peek_info.device_count(),
peek_info.device_count_including_pending_devices(),
peek_info.max_devices,
peek_info.devices,
);

View file

@ -763,7 +763,9 @@ pub struct CallConfig {
pub audio_config: AudioConfig,
pub audio_encoder_config: AudioEncoderConfig,
pub enable_tcc_audio: bool,
pub enable_red_audio: bool,
pub audio_jitter_buffer_max_packets: isize,
pub audio_rtcp_report_interval_ms: isize,
pub enable_vp9: bool,
}
@ -777,7 +779,9 @@ impl Default for CallConfig {
audio_config: Default::default(),
audio_encoder_config: Default::default(),
enable_tcc_audio: false,
enable_red_audio: false,
audio_jitter_buffer_max_packets: 50,
audio_rtcp_report_interval_ms: 5000,
enable_vp9: true,
}
}

View file

@ -696,22 +696,22 @@ where
peer_connection.set_local_description(observer.as_ref(), offer);
observer.get_result()?;
// Setup RTP data support before we set remote description to make sure we can handle
// the "accepted" message before we get ICE Connected. Warning: we're holding the
// lock to webrtc_data while we block on the WebRTC network thread, so we need to
// make sure we don't grab the webrtc_data lock in handle_rtp_received.
peer_connection.receive_rtp(RTP_DATA_PAYLOAD_TYPE)?;
let observer = create_ssd_observer();
peer_connection.set_remote_description(observer.as_ref(), answer);
// on_add_stream and on_ice_connected can all happen while
// SetRemoteDescription is happening. But none of those will be processed
// until start_fsm() is called below.
observer.get_result()?;
// Don't enable until the call is accepted.
// Don't enable outgoing media until the call is accepted.
peer_connection.set_outgoing_media_enabled(false);
// But do start incoming RTP right away so that we can receive the
// "accepted" message.
// Warning: we're holding the lock to webrtc_data while we
// block on the WebRTC network thread, so we need to make
// sure we don't grab the webrtc_data lock in
// handle_rtp_received.
peer_connection.receive_rtp(RTP_DATA_PAYLOAD_TYPE)?;
peer_connection.set_incoming_media_enabled(true);
peer_connection.configure_audio_encoders(&self.call_config.audio_encoder_config);
@ -831,7 +831,7 @@ where
return Err(RingRtcError::UnknownSignaledProtocolVersion.into());
};
// Don't enable incoming RTP until accepted.
// Don't enable incoming media until the call is accepted.
// This should be done before we set local description to make sure
// we don't get ICE connected really fast and allow any packets through.
peer_connection.set_incoming_media_enabled(false);
@ -843,15 +843,12 @@ where
// But it won't be processed until start_fsm() is called below.
observer.get_result()?;
// Don't enable until call is accepted.
// Don't enable outgoing media until the call is accepted.
peer_connection.set_outgoing_media_enabled(false);
// No RTP will be processed/received until
// peer_connection.set_incoming_media_enabled(true).
// Warning: we're holding the lock to webrtc_data while we
// block on the WebRTC network thread, so we need to make
// sure we don't grab the webrtc_data lock in
// handle_rtp_received.
// Setup RTP data support. Warning: we're holding the lock to webrtc_data while
// we block on the WebRTC network thread, so we need to make sure we don't grab
// the webrtc_data lock in handle_rtp_received.
peer_connection.receive_rtp(RTP_DATA_PAYLOAD_TYPE)?;
peer_connection.configure_audio_encoders(&self.call_config.audio_encoder_config);

View file

@ -1045,12 +1045,14 @@ impl Client {
let local_ice_ufrag = random_alphanumeric(4);
let local_ice_pwd = random_alphanumeric(22);
let audio_jitter_buffer_max_packets = 50;
let audio_rtcp_report_interval_ms = 5000;
let ice_server = IceServer::none();
let peer_connection = peer_connection_factory
.create_peer_connection(
peer_connection_observer,
pcf::RffiPeerConnectionKind::GroupCall,
audio_jitter_buffer_max_packets,
audio_rtcp_report_interval_ms,
&ice_server,
outgoing_audio_track,
outgoing_video_track,
@ -1424,8 +1426,8 @@ impl Client {
}
JoinState::NotJoined(ring_id) => {
if let Some(peek_info) = &state.last_peek_info {
if peek_info.device_count() >= peek_info.max_devices.unwrap_or(u32::MAX) as usize {
info!("Ending group call client because there are {}/{} devices in the call.", peek_info.device_count(), peek_info.max_devices.unwrap());
if peek_info.device_count_including_pending_devices() >= peek_info.max_devices.unwrap_or(u32::MAX) as usize {
info!("Ending group call client because there are {}/{} devices in the call.", peek_info.device_count_including_pending_devices(), peek_info.max_devices.unwrap());
Self::end(state, EndReason::HasMaxDevices);
return;
}
@ -4211,7 +4213,7 @@ mod tests {
owned_state.creator = peek_info.creator.clone();
owned_state.era_id = peek_info.era_id.clone();
owned_state.max_devices = peek_info.max_devices;
owned_state.device_count = peek_info.device_count();
owned_state.device_count = peek_info.device_count_including_pending_devices();
self.peek_changed.set();
}

View file

@ -157,7 +157,7 @@ fn redact_ice_password(text: Cow<'_, str>) -> Cow<'_, str> {
// - IPv4-mapped IPv6 addresses (section 2.1 of rfc2765)
// - IPv4-translated addresses (section 2.1 of rfc2765)
//
// To make the above easier to understand, the following "pseudo" code replicates the RE:
// To make the below easier to understand, the following "pseudo" code replicates the RE:
//
// IPV4SEG = (25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])
// IPV4ADDR = (IPV4SEG\.){3,3}IPV4SEG
@ -179,20 +179,26 @@ fn redact_ice_password(text: Cow<'_, str>) -> Cow<'_, str> {
#[cfg(any(not(debug_assertions), test))]
fn redact_ipv6(text: Cow<'_, str>) -> Cow<'_, str> {
let re = regex_aot::regex!("\
[Ff][Ee]80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|\
(::)?([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|\
([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|\
([0-9a-fA-F]{1,4}:){1,1}(:[0-9a-fA-F]{1,4}){1,6}|\
([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|\
([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|\
([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|\
([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|\
([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|\
// Differences from the above description:
// - IPV4SEG is simplified to [0-9]{1,3}
// - IPV4ADDR allows a final "x" segment
// - Any IPV6SEG after "::" may be "x", or any segment in a "full" non-zero-compressed address
let re = regex_aot::regex!(
"\
[Ff][Ee]80:(:(x|[0-9a-fA-F]{0,4})){0,4}%[0-9a-zA-Z]{1,}|\
(::)?([0-9a-fA-F]{1,4}:){1,4}:([0-9]{1,3}\\.){3,3}(x|[0-9]{1,3})|\
((x|[0-9a-fA-F]{1,4}):){7,7}(x|[0-9a-fA-F]{1,4})|\
([0-9a-fA-F]{1,4}:){1,1}(:(x|[0-9a-fA-F]{1,4})){1,6}|\
([0-9a-fA-F]{1,4}:){1,2}(:(x|[0-9a-fA-F]{1,4})){1,5}|\
([0-9a-fA-F]{1,4}:){1,3}(:(x|[0-9a-fA-F]{1,4})){1,4}|\
([0-9a-fA-F]{1,4}:){1,4}(:(x|[0-9a-fA-F]{1,4})){1,3}|\
([0-9a-fA-F]{1,4}:){1,5}(:(x|[0-9a-fA-F]{1,4})){1,2}|\
([0-9a-fA-F]{1,4}:){1,6}:(x|[0-9a-fA-F]{1,4})|\
([0-9a-fA-F]{1,4}:){1,7}:|\
::([fF]{4}(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|\
:((:[0-9a-fA-F]{1,4}){1,7}|:)\
");
::([fF]{4}(:0{1,4}){0,1}:){0,1}([0-9]{1,3}\\.){3,3}(x|[0-9]{1,3})|\
:((:(x|[0-9a-fA-F]{1,4})){1,7}|:)\
"
);
replace_all(text, re, "[REDACTED ipv6]")
}
@ -223,7 +229,7 @@ fn replace_all<'a>(
#[cfg(any(not(debug_assertions), test))]
fn redact_ipv4(text: Cow<'_, str>) -> Cow<'_, str> {
let re = regex_aot::regex!("(((25[0-5])|(2[0-4][0-9])|([0-1][0-9]{2,2})|([0-9]{1,2}))\\.){3,3}((25[0-5])|(2[0-4][0-9])|([0-1][0-9]{2,2})|([0-9]{1,2}))");
let re = regex_aot::regex!("([0-9]{1,3}\\.){3,3}(x|[0-9]{1,3})");
replace_all(text, re, "[REDACTED ipv4]")
}
@ -410,6 +416,12 @@ mod tests {
"::ffff:255.255.255.255",
"2001:db8:3:4::192.0.2.33",
"64:ff9b::192.0.2.33",
"aaaa:bbbb:cccc:x:x:x:x:x",
"1:2::x:x:x:x:x",
"1::x:x",
"::x",
"::196.168.50.x",
"::ffff:196.168.50.x",
];
let prefix = ["", "text", "text ", "<", "@"];
@ -459,6 +471,7 @@ mod tests {
"002.8.122.2",
"2.168.122.9",
"92.168.122.250",
"192.168.50.x",
];
let prefix = ["", "text", "text ", "<", "@"];

View file

@ -395,7 +395,7 @@ impl CallEndpoint {
let observer = js_object.as_ref().to_inner(cx);
let method_name = "processEvents";
let method = observer.get::<JsFunction, _, _>(cx, method_name)?;
method.call(cx, observer, Vec::<Handle<JsValue>>::new())?;
method.call(cx, observer, [])?;
Ok(())
}) {
Ok(_) => {}
@ -565,7 +565,11 @@ fn to_js_peek_info<'a>(
None => cx.undefined().upcast(),
Some(max_devices) => cx.number(*max_devices).upcast(),
};
let device_count: Handle<JsValue> = cx.number(peek_info.device_count() as u32).upcast();
let device_count_including_pending_devices: Handle<JsValue> = cx
.number(peek_info.device_count_including_pending_devices() as u32)
.upcast();
let device_count_excluding_pending_devices: Handle<JsValue> =
cx.number(peek_info.devices.len() as u32).upcast();
let pending_users = peek_info.unique_pending_users();
let js_pending_users = JsArray::new(cx, pending_users.len() as u32);
@ -579,7 +583,18 @@ fn to_js_peek_info<'a>(
js_info.set(cx, "creator", js_creator)?;
js_info.set(cx, "eraId", era_id)?;
js_info.set(cx, "maxDevices", max_devices)?;
js_info.set(cx, "deviceCount", device_count)?;
js_info.set(
cx,
"deviceCountIncludingPendingDevices",
device_count_including_pending_devices,
)?;
js_info.set(
cx,
"deviceCountExcludingPendingDevices",
device_count_excluding_pending_devices,
)?;
// For backwards compatibility.
js_info.set(cx, "deviceCount", device_count_including_pending_devices)?;
js_info.set(cx, "pendingUsers", js_pending_users)?;
Ok(js_info)
}
@ -2006,18 +2021,19 @@ fn processEvents(mut cx: FunctionContext) -> JsResult<JsValue> {
let this = cx.this();
let observer = this.get::<JsObject, _, _>(&mut cx, "observer")?;
{
let log_entries = std::mem::take(&mut *LOG_MESSAGES.lock().expect("lock log messages"));
let method = observer.get::<JsFunction, _, _>(&mut cx, "onLogMessage")?;
for log_entry in log_entries {
let method_name = "onLogMessage";
let args: Vec<Handle<JsValue>> = vec![
let args = [
cx.number(log_entry.level).upcast(),
cx.string(log_entry.file).upcast(),
cx.number(log_entry.line).upcast(),
cx.string(log_entry.message).upcast(),
];
let method = observer.get::<JsFunction, _, _>(&mut cx, method_name)?;
method.call(&mut cx, observer, args)?;
}
}
let events: Vec<Event> = with_call_endpoint(&mut cx, |endpoint| {
endpoint.events_receiver.try_iter().collect()
@ -2096,7 +2112,7 @@ fn processEvents(mut cx: FunctionContext) -> JsResult<JsValue> {
),
};
let method = observer.get::<JsFunction, _, _>(&mut cx, method_name)?;
let args = vec![
let args = [
cx.string(peer_id).upcast(),
cx.number(maybe_device_id.unwrap_or(0) as f64).upcast(),
create_id_arg(&mut cx, call_id.as_u64()),
@ -2110,7 +2126,7 @@ fn processEvents(mut cx: FunctionContext) -> JsResult<JsValue> {
Event::CallState(peer_id, call_id, CallState::Incoming(call_media_type)) => {
let method_name = "onStartIncomingCall";
let args: Vec<Handle<JsValue>> = vec![
let args = [
cx.string(peer_id).upcast(),
create_id_arg(&mut cx, call_id.as_u64()),
cx.boolean(call_media_type == CallMediaType::Video).upcast(),
@ -2121,7 +2137,7 @@ fn processEvents(mut cx: FunctionContext) -> JsResult<JsValue> {
Event::CallState(peer_id, call_id, CallState::Outgoing(_call_media_type)) => {
let method_name = "onStartOutgoingCall";
let args: Vec<Handle<JsValue>> = vec![
let args = [
cx.string(peer_id).upcast(),
create_id_arg(&mut cx, call_id.as_u64()),
];
@ -2155,7 +2171,7 @@ fn processEvents(mut cx: FunctionContext) -> JsResult<JsValue> {
EndReason::ReceivedOfferExpired { age } => age,
_ => Duration::ZERO,
};
let args = vec![
let args = [
cx.string(peer_id).upcast(),
create_id_arg(&mut cx, call_id.as_u64()),
cx.string(reason_string).upcast(),
@ -2189,7 +2205,7 @@ fn processEvents(mut cx: FunctionContext) -> JsResult<JsValue> {
CallState::Outgoing(_) => "outgoing",
CallState::Ended(_) => "ended",
};
let args = vec![
let args = [
cx.string(peer_id).upcast(),
cx.string(state_string).upcast(),
];
@ -2216,16 +2232,14 @@ fn processEvents(mut cx: FunctionContext) -> JsResult<JsValue> {
}
let method_name = "onRemoteVideoEnabled";
let args: Vec<Handle<JsValue>> =
vec![cx.string(peer_id).upcast(), cx.boolean(enabled).upcast()];
let args = [cx.string(peer_id).upcast(), cx.boolean(enabled).upcast()];
let method = observer.get::<JsFunction, _, _>(&mut cx, method_name)?;
method.call(&mut cx, observer, args)?;
}
Event::RemoteSharingScreenChange(peer_id, enabled) => {
let method_name = "onRemoteSharingScreen";
let args: Vec<Handle<JsValue>> =
vec![cx.string(peer_id).upcast(), cx.boolean(enabled).upcast()];
let args = [cx.string(peer_id).upcast(), cx.boolean(enabled).upcast()];
let method = observer.get::<JsFunction, _, _>(&mut cx, method_name)?;
method.call(&mut cx, observer, args)?;
}
@ -2236,7 +2250,7 @@ fn processEvents(mut cx: FunctionContext) -> JsResult<JsValue> {
received_level,
} => {
let method_name = "onAudioLevels";
let args: Vec<Handle<JsValue>> = vec![
let args = [
cx.string(peer_id).upcast(),
cx.number(captured_level).upcast(),
cx.number(received_level).upcast(),
@ -2273,7 +2287,7 @@ fn processEvents(mut cx: FunctionContext) -> JsResult<JsValue> {
js_body.upcast()
}
};
let args: Vec<Handle<JsValue>> = vec![
let args = [
cx.number(request_id).upcast(),
cx.string(url).upcast(),
cx.number(http_method).upcast(),
@ -2293,7 +2307,7 @@ fn processEvents(mut cx: FunctionContext) -> JsResult<JsValue> {
let recipient_uuid = to_js_buffer(&mut cx, &recipient_uuid);
let message = to_js_buffer(&mut cx, &message);
let urgency = cx.number(urgency as i32).upcast();
let args: Vec<Handle<JsValue>> = vec![recipient_uuid, message, urgency];
let args = [recipient_uuid, message, urgency];
let method = observer.get::<JsFunction, _, _>(&mut cx, method_name)?;
method.call(&mut cx, observer, args)?;
}
@ -2307,7 +2321,7 @@ fn processEvents(mut cx: FunctionContext) -> JsResult<JsValue> {
let group_id = to_js_buffer(&mut cx, &group_id);
let message = to_js_buffer(&mut cx, &message);
let urgency = cx.number(urgency as i32).upcast();
let args: Vec<Handle<JsValue>> = vec![group_id, message, urgency];
let args = [group_id, message, urgency];
let method = observer.get::<JsFunction, _, _>(&mut cx, method_name)?;
method.call(&mut cx, observer, args)?;
}
@ -2316,7 +2330,7 @@ fn processEvents(mut cx: FunctionContext) -> JsResult<JsValue> {
Event::GroupUpdate(GroupUpdate::RequestMembershipProof(client_id)) => {
let method_name = "requestMembershipProof";
let args: Vec<Handle<JsValue>> = vec![cx.number(client_id).upcast()];
let args = [cx.number(client_id).upcast()];
let method = observer.get::<JsFunction, _, _>(&mut cx, method_name)?;
method.call(&mut cx, observer, args)?;
}
@ -2324,7 +2338,7 @@ fn processEvents(mut cx: FunctionContext) -> JsResult<JsValue> {
Event::GroupUpdate(GroupUpdate::RequestGroupMembers(client_id)) => {
let method_name = "requestGroupMembers";
let args: Vec<Handle<JsValue>> = vec![cx.number(client_id).upcast()];
let args = [cx.number(client_id).upcast()];
let method = observer.get::<JsFunction, _, _>(&mut cx, method_name)?;
method.call(&mut cx, observer, args)?;
}
@ -2335,7 +2349,7 @@ fn processEvents(mut cx: FunctionContext) -> JsResult<JsValue> {
)) => {
let method_name = "handleConnectionStateChanged";
let args: Vec<Handle<JsValue>> = vec![
let args = [
cx.number(client_id).upcast(),
cx.number(connection_state as i32).upcast(),
];
@ -2357,7 +2371,7 @@ fn processEvents(mut cx: FunctionContext) -> JsResult<JsValue> {
Event::GroupUpdate(GroupUpdate::JoinStateChanged(client_id, join_state)) => {
let method_name = "handleJoinStateChanged";
let args: Vec<Handle<JsValue>> = vec![
let args = [
cx.number(client_id).upcast(),
cx.number(join_state.ordinal()).upcast(),
match join_state {
@ -2485,7 +2499,7 @@ fn processEvents(mut cx: FunctionContext) -> JsResult<JsValue> {
js_remote_device_states.set(&mut cx, i as u32, js_remote_device_state)?;
}
let args: Vec<Handle<JsValue>> = vec![
let args = [
cx.number(client_id).upcast(),
js_remote_device_states.upcast(),
];
@ -2500,8 +2514,7 @@ fn processEvents(mut cx: FunctionContext) -> JsResult<JsValue> {
let method_name = "handlePeekChanged";
let js_info = to_js_peek_info(&mut cx, peek_info)?;
let args: Vec<Handle<JsValue>> =
vec![cx.number(client_id).upcast(), js_info.upcast()];
let args = [cx.number(client_id).upcast(), js_info.upcast()];
let method = observer.get::<JsFunction, _, _>(&mut cx, method_name)?;
method.call(&mut cx, observer, args)?;
}
@ -2520,15 +2533,14 @@ fn processEvents(mut cx: FunctionContext) -> JsResult<JsValue> {
let method_name = "handlePeekResponse";
let args: Vec<Handle<JsValue>> =
vec![cx.number(request_id).upcast(), js_status.upcast(), js_info];
let args = [cx.number(request_id).upcast(), js_status.upcast(), js_info];
let method = observer.get::<JsFunction, _, _>(&mut cx, method_name)?;
method.call(&mut cx, observer, args)?;
}
Event::GroupUpdate(GroupUpdate::Ended(client_id, reason)) => {
let method_name = "handleEnded";
let args: Vec<Handle<JsValue>> = vec![
let args = [
cx.number(client_id).upcast(),
cx.number(reason as i32).upcast(),
];
@ -2575,7 +2587,7 @@ fn processEvents(mut cx: FunctionContext) -> JsResult<JsValue> {
}
let method_name = "handleAudioLevels";
let args: Vec<Handle<JsValue>> = vec![
let args = [
cx.number(client_id).upcast(),
cx.number(captured_level).upcast(),
js_received_levels.upcast(),

View file

@ -472,7 +472,8 @@ pub struct AppInterface {
creator: AppByteSlice,
eraId: AppByteSlice,
maxDevices: AppOptionalUInt32,
deviceCount: u32,
deviceCountIncludingPendingDevices: u32,
deviceCountExcludingPendingDevices: u32,
pendingUsers: AppUuidArray,
),
///

View file

@ -680,7 +680,9 @@ impl Platform for IosPlatform {
let app_era_id = app_slice_from_str(peek_info.era_id.as_ref());
let app_max_devices = app_option_from_u32(peek_info.max_devices);
let device_count = peek_info.device_count() as u32;
let device_count_including_pending_devices =
peek_info.device_count_including_pending_devices() as u32;
let device_count_excluding_pending_devices = peek_info.devices.len() as u32;
(self.app_interface.handlePeekChanged)(
self.app_interface.object,
@ -689,7 +691,8 @@ impl Platform for IosPlatform {
app_creator,
app_era_id,
app_max_devices,
device_count,
device_count_including_pending_devices,
device_count_excluding_pending_devices,
app_pending_users_array,
);
}

View file

@ -53,7 +53,7 @@ impl PeekInfo {
/// The number of devices currently joined (including the local device, any pending devices, and
/// unknown users).
pub fn device_count(&self) -> usize {
pub fn device_count_including_pending_devices(&self) -> usize {
self.devices.len() + self.pending_devices.len()
}
}
@ -645,7 +645,10 @@ pub mod ios {
creator: rtc_Bytes::from_or_default(peek_info.creator.as_ref()),
era_id: rtc_String::from_or_default(peek_info.era_id.as_ref()),
max_devices: rtc_OptionalU32::from_or_default(peek_info.max_devices),
device_count: peek_info.device_count() as u32,
device_count_including_pending_devices: peek_info
.device_count_including_pending_devices()
as u32,
device_count_excluding_pending_devices: peek_info.devices.len() as u32,
pending_users: rtc_UserIds::from(&rtc_pending_users),
},
};
@ -667,7 +670,8 @@ pub mod ios {
creator: rtc_Bytes<'a>,
era_id: rtc_String<'a>,
max_devices: rtc_OptionalU32,
device_count: u32,
device_count_including_pending_devices: u32,
device_count_excluding_pending_devices: u32,
joined_members: rtc_UserIds<'a>,
pending_users: rtc_UserIds<'a>,
}

View file

@ -433,6 +433,7 @@ impl Platform for NativePlatform {
pc_observer,
kind,
connection.call_config().audio_jitter_buffer_max_packets,
connection.call_config().audio_rtcp_report_interval_ms,
&context.ice_server,
context.outgoing_audio_track.clone(),
Some(context.outgoing_video_track.clone()),
@ -914,7 +915,7 @@ impl Platform for NativePlatform {
client_id,
peek_info.era_id,
peek_info.max_devices,
peek_info.device_count()
peek_info.device_count_including_pending_devices()
);
let result = self.send_group_update(GroupUpdate::PeekChanged {

View file

@ -62,6 +62,7 @@ extern "C" {
observer: webrtc::ptr::Borrowed<RffiPeerConnectionObserver>,
kind: RffiPeerConnectionKind,
audio_jitter_buffer_max_packets: isize,
audio_rtcp_report_interval_ms: isize,
ice_server: RffiIceServer,
outgoing_audio_track: webrtc::ptr::BorrowedRc<RffiAudioTrack>,
outgoing_video_track: webrtc::ptr::BorrowedRc<RffiVideoTrack>,

View file

@ -87,6 +87,7 @@ extern "C" {
offer: bool,
v4: webrtc::ptr::Borrowed<RffiConnectionParametersV4>,
enable_tcc_audio: bool,
enable_red_audio: bool,
enable_vp9: bool,
) -> webrtc::ptr::Owned<RffiSessionDescription>;

View file

@ -350,56 +350,63 @@ impl Clone for Box<dyn VideoSink> {
#[repr(C)]
#[derive(Clone, Debug)]
pub struct RffiAudioEncoderConfig {
packet_size_ms: u32,
initial_packet_size_ms: i32,
min_packet_size_ms: i32,
max_packet_size_ms: i32,
bandwidth: i32,
start_bitrate_bps: i32,
initial_bitrate_bps: i32,
min_bitrate_bps: i32,
max_bitrate_bps: i32,
bandwidth: i32,
complexity: i32,
enable_vbr: i32,
enable_dtx: i32,
enable_fec: i32,
adaptation: i32,
enable_cbr: bool,
enable_dtx: bool,
enable_fec: bool,
}
// A nice form of RffiAudioEncoderConfig
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct AudioEncoderConfig {
// AKA ptime or frame size
// Valid sizes: 10, 20, 40, 60, 120
// Default is 20ms
pub packet_size_ms: u32,
// Valid sizes: 10, 20, 40, 60, 80, 100, 120
pub initial_packet_size_ms: i32,
pub min_packet_size_ms: i32,
pub max_packet_size_ms: i32,
// Valid range: 6000-510000
pub initial_bitrate_bps: i32,
pub min_bitrate_bps: i32,
pub max_bitrate_bps: i32,
// Default is Auto
pub bandwidth: AudioBandwidth,
// Valid range: 6000-510000
// Default is to start at 32000 and move between 16000 and 32000.
pub start_bitrate_bps: u16,
pub min_bitrate_bps: u16,
pub max_bitrate_bps: u16,
// Valid range: 0-10 (10 most complex)
// Default is 9.
pub complexity: u16,
// Default is true.
pub complexity: i32,
pub adaptation: i32,
pub enable_cbr: bool,
// Default is true.
pub enable_dtx: bool,
// Default is true.
pub enable_fec: bool,
}
impl Default for AudioEncoderConfig {
fn default() -> Self {
Self {
packet_size_ms: 60,
initial_packet_size_ms: 60,
min_packet_size_ms: 60,
max_packet_size_ms: 60,
initial_bitrate_bps: 32000,
min_bitrate_bps: 32000,
max_bitrate_bps: 32000,
bandwidth: AudioBandwidth::Auto,
start_bitrate_bps: 32000,
min_bitrate_bps: 16000,
max_bitrate_bps: 32000,
complexity: 9,
adaptation: 0,
enable_cbr: true,
enable_dtx: true,
enable_fec: true,
@ -410,21 +417,24 @@ impl Default for AudioEncoderConfig {
impl From<&AudioEncoderConfig> for RffiAudioEncoderConfig {
fn from(config: &AudioEncoderConfig) -> Self {
Self {
packet_size_ms: config.packet_size_ms,
initial_packet_size_ms: config.initial_packet_size_ms,
min_packet_size_ms: config.min_packet_size_ms,
max_packet_size_ms: config.max_packet_size_ms,
initial_bitrate_bps: config.initial_bitrate_bps,
min_bitrate_bps: config.min_bitrate_bps,
max_bitrate_bps: config.max_bitrate_bps,
bandwidth: config.bandwidth as i32,
start_bitrate_bps: config.start_bitrate_bps as i32,
min_bitrate_bps: config.min_bitrate_bps as i32,
max_bitrate_bps: config.max_bitrate_bps as i32,
complexity: config.complexity as i32,
enable_vbr: i32::from(!config.enable_cbr),
enable_dtx: i32::from(config.enable_dtx),
enable_fec: i32::from(config.enable_fec),
complexity: config.complexity,
adaptation: config.adaptation,
enable_cbr: config.enable_cbr,
enable_dtx: config.enable_dtx,
enable_fec: config.enable_fec,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[cfg_attr(feature = "call_sim", derive(clap::ValueEnum))]
#[repr(i32)]
pub enum AudioBandwidth {
// Constants in libopus

View file

@ -290,6 +290,7 @@ impl PeerConnectionFactory {
pc_observer: PeerConnectionObserver<T>,
kind: RffiPeerConnectionKind,
audio_jitter_buffer_max_packets: isize,
audio_rtcp_report_interval_ms: isize,
ice_servers: &IceServer,
outgoing_audio_track: AudioTrack,
outgoing_video_track: Option<VideoTrack>,
@ -313,6 +314,7 @@ impl PeerConnectionFactory {
pc_observer_rffi.borrow(),
kind,
audio_jitter_buffer_max_packets,
audio_rtcp_report_interval_ms,
ice_servers.rffi(),
outgoing_audio_track.rffi().as_borrowed(),
outgoing_video_track

View file

@ -286,6 +286,7 @@ impl SessionDescription {
offer,
webrtc::ptr::Borrowed::from_ptr(&rffi_v4),
call_config.enable_tcc_audio,
call_config.enable_red_audio,
call_config.enable_vp9,
)
});

View file

@ -49,6 +49,7 @@ pub unsafe fn Rust_createPeerConnection(
_observer: webrtc::ptr::Borrowed<RffiPeerConnectionObserver>,
_kind: RffiPeerConnectionKind,
_audio_jitter_buffer_max_packets: isize,
_audio_rtcp_report_interval_ms: isize,
_ice_server: RffiIceServer,
_outgoing_audio_track: webrtc::ptr::BorrowedRc<RffiAudioTrack>,
_outgoing_video_track: webrtc::ptr::BorrowedRc<RffiVideoTrack>,

View file

@ -133,6 +133,7 @@ pub unsafe fn Rust_sessionDescriptionFromV4(
offer: bool,
_v4: webrtc::ptr::Borrowed<RffiConnectionParametersV4>,
_enable_tcc_audio: bool,
_enable_red_audio: bool,
_enable_vp9: bool,
) -> webrtc::ptr::Owned<RffiSessionDescription> {
info!("Rust_sessionDescriptionFromV4(): ");