mirror of
https://github.com/mollyim/webrtc.git
synced 2025-05-12 21:30:45 +01:00
Move C++/Rust FFI support into WebRTC repo
The gn files are simplified from what's currently in the RingRTC repo, looking ahead to when the RingRTC Rust and Java builds for Android aren't included in the WebRTC build.
This commit is contained in:
parent
b43b921418
commit
fe9009be92
35 changed files with 4041 additions and 1 deletions
1
ringrtc
1
ringrtc
|
@ -1 +0,0 @@
|
|||
../../../src
|
31
ringrtc/BUILD.gn
Normal file
31
ringrtc/BUILD.gn
Normal file
|
@ -0,0 +1,31 @@
|
|||
#
|
||||
# Copyright 2019-2021 Signal Messenger, LLC
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
#
|
||||
|
||||
if (is_android) {
|
||||
import("//webrtc.gni")
|
||||
|
||||
group("ringrtc") {
|
||||
public_deps = [
|
||||
"//sdk/android:libwebrtc",
|
||||
"rffi:libringrtc_rffi",
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
if (is_ios) {
|
||||
group("ringrtc") {
|
||||
# @note The RingRTC build is done outside of gn/ninja...
|
||||
}
|
||||
}
|
||||
|
||||
if (is_linux || is_mac || is_win) {
|
||||
import("//webrtc.gni")
|
||||
|
||||
rtc_library("ringrtc") {
|
||||
public_deps = [
|
||||
"rffi:libringrtc_rffi",
|
||||
]
|
||||
}
|
||||
}
|
100
ringrtc/rffi/BUILD.gn
Normal file
100
ringrtc/rffi/BUILD.gn
Normal file
|
@ -0,0 +1,100 @@
|
|||
#
|
||||
# Copyright 2019-2021 Signal Messenger, LLC
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
#
|
||||
|
||||
import("//webrtc.gni")
|
||||
|
||||
# C++ source files common to all platforms
|
||||
common_sources = [
|
||||
"src/field_trial.cc",
|
||||
"src/injectable_network.cc",
|
||||
"src/logging.cc",
|
||||
"src/media.cc",
|
||||
"src/network.cc",
|
||||
"src/peer_connection_factory.cc",
|
||||
"src/peer_connection.cc",
|
||||
"src/peer_connection_observer.cc",
|
||||
"src/ref_count.cc",
|
||||
"src/sdp_observer.cc",
|
||||
"src/stats_observer.cc",
|
||||
]
|
||||
|
||||
config("ringrtc_rffi_config") {
|
||||
include_dirs = [ ".." ]
|
||||
defines = [ "ENABLE_RINGRTC" ]
|
||||
}
|
||||
|
||||
if (is_android) {
|
||||
import("//build/config/android/config.gni")
|
||||
import("//build/config/android/rules.gni")
|
||||
|
||||
rtc_shared_library("libringrtc_rffi") {
|
||||
|
||||
visibility = [ "//ringrtc:*" ]
|
||||
|
||||
android_sdk = "//sdk/android"
|
||||
# jni_onload.cc -- taken from webrtc/sdk/android/BUILD.gn.
|
||||
sources = [
|
||||
"${android_sdk}/src/jni/jni_onload.cc",
|
||||
"src/android/java_media_stream.cc",
|
||||
"src/android/jni_peer_connection.cc",
|
||||
] + common_sources
|
||||
|
||||
# suppressed_config and hide_all_but_jni taken from
|
||||
# webrtc/sdk/android/BUILD.gn.
|
||||
suppressed_configs += [ "//build/config/android:hide_all_but_jni_onload" ]
|
||||
configs += [ "//build/config/android:hide_all_but_jni" ]
|
||||
|
||||
configs += [ ":ringrtc_rffi_config" ]
|
||||
|
||||
ldflags = [ "-Wl,--version-script=" +
|
||||
rebase_path("config/rffi_include.lst",
|
||||
root_build_dir) ]
|
||||
|
||||
# deps -- taken from webrtc/sdk/android/BUILD.gn.
|
||||
deps = [
|
||||
"${android_sdk}:libjingle_peerconnection_jni",
|
||||
"${android_sdk}:libjingle_peerconnection_metrics_default_jni",
|
||||
"//api/video_codecs:builtin_video_encoder_factory",
|
||||
"//api/video_codecs:builtin_video_decoder_factory",
|
||||
"//pc:libjingle_peerconnection",
|
||||
"//rtc_base:rtc_base",
|
||||
]
|
||||
output_extension = "so"
|
||||
}
|
||||
}
|
||||
|
||||
if (is_ios) {
|
||||
import("//build/config/ios/ios_sdk.gni")
|
||||
import("//build/config/ios/rules.gni")
|
||||
|
||||
rtc_static_library("libringrtc_rffi") {
|
||||
|
||||
visibility = [ "//sdk/*:*" ]
|
||||
|
||||
sources = common_sources
|
||||
|
||||
configs += [ ":ringrtc_rffi_config" ]
|
||||
|
||||
deps = [
|
||||
"//api/video_codecs:builtin_video_encoder_factory",
|
||||
"//api/video_codecs:builtin_video_decoder_factory",
|
||||
"//third_party/libyuv",
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
if (is_linux || is_mac || is_win) {
|
||||
rtc_library("libringrtc_rffi") {
|
||||
visibility = [ "*" ]
|
||||
|
||||
sources = common_sources
|
||||
|
||||
configs += [ ":ringrtc_rffi_config" ]
|
||||
|
||||
deps = [
|
||||
"//sdk:media_constraints",
|
||||
]
|
||||
}
|
||||
}
|
31
ringrtc/rffi/api/android/media_stream_intf.h
Normal file
31
ringrtc/rffi/api/android/media_stream_intf.h
Normal file
|
@ -0,0 +1,31 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
/**
|
||||
* Rust friendly wrapper around webrtc::jni::JavaMediaStream object
|
||||
*/
|
||||
|
||||
#ifndef ANDROID_MEDIA_STREAM_INTF_H__
|
||||
#define ANDROID_MEDIA_STREAM_INTF_H__
|
||||
|
||||
#include "rffi/api/rffi_defs.h"
|
||||
#include "sdk/android/src/jni/pc/media_stream.h"
|
||||
|
||||
// Create a JavaMediaStream C++ object from a
|
||||
// webrtc::MediaStreamInterface* object.
|
||||
// Returns an owned pointer.
|
||||
RUSTEXPORT webrtc::jni::JavaMediaStream*
|
||||
Rust_createJavaMediaStream(webrtc::MediaStreamInterface* media_stream_borrowed_rc);
|
||||
|
||||
// Delete a JavaMediaStream C++ object.
|
||||
RUSTEXPORT void
|
||||
Rust_deleteJavaMediaStream(webrtc::jni::JavaMediaStream* java_media_stream_owned);
|
||||
|
||||
// Return the Java JNI object contained within the JavaMediaStream C++
|
||||
// object.
|
||||
RUSTEXPORT jobject
|
||||
Rust_getJavaMediaStreamObject(webrtc::jni::JavaMediaStream* java_media_stream_borrowed);
|
||||
|
||||
#endif /* ANDROID_MEDIA_STREAM_INTF_H__ */
|
16
ringrtc/rffi/api/android/peer_connection_intf.h
Normal file
16
ringrtc/rffi/api/android/peer_connection_intf.h
Normal file
|
@ -0,0 +1,16 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#ifndef ANDROID_PEER_CONNECTION_H__
|
||||
#define ANDROID_PEER_CONNECTION_H__
|
||||
|
||||
#include "rffi/api/rffi_defs.h"
|
||||
#include <jni.h>
|
||||
|
||||
// Return a borrowed RC to the native PeerConnection inside of the Java wrapper.
|
||||
RUSTEXPORT webrtc::PeerConnectionInterface*
|
||||
Rust_borrowPeerConnectionFromJniOwnedPeerConnection(jlong owned_peer_connection);
|
||||
|
||||
#endif /* ANDROID_PEER_CONNECTION_H__ */
|
14
ringrtc/rffi/api/field_trial.h
Normal file
14
ringrtc/rffi/api/field_trial.h
Normal file
|
@ -0,0 +1,14 @@
|
|||
/*
|
||||
* Copyright 2022 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#ifndef RFFI_API_FIELD_TRIAL_H__
|
||||
#define RFFI_API_FIELD_TRIAL_H__
|
||||
|
||||
#include "rffi/api/rffi_defs.h"
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_setFieldTrials(const char* field_trials_string);
|
||||
|
||||
#endif /* RFFI_API_FIELD_TRIAL_H__ */
|
82
ringrtc/rffi/api/injectable_network.h
Normal file
82
ringrtc/rffi/api/injectable_network.h
Normal file
|
@ -0,0 +1,82 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#ifndef RFFI_API_INJECTABLE_NETWORK_H__
|
||||
#define RFFI_API_INJECTABLE_NETWORK_H__
|
||||
|
||||
#include "p2p/base/port_allocator.h"
|
||||
#include "rtc_base/thread.h"
|
||||
#include "rffi/api/network.h"
|
||||
#include "rffi/api/rffi_defs.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace rffi {
|
||||
|
||||
typedef struct {
|
||||
void* object_owned;
|
||||
int (*SendUdp)(void* object_borrowed, IpPort source, IpPort dest, const uint8_t* data_borrowed, size_t);
|
||||
int (*Delete)(void* object_owned);
|
||||
} InjectableNetworkSender;
|
||||
|
||||
// This is a class that acts like a PortAllocator + PacketSocketFactory + NetworkManager
|
||||
// to the network stack and allows simulated or injected networks to control the flow
|
||||
// of packets and which network interfaces come up and down.
|
||||
class InjectableNetwork {
|
||||
public:
|
||||
virtual ~InjectableNetwork() = default;
|
||||
|
||||
// This is what the network stack sees.
|
||||
// The PacketSocketFactory and NetworkManager are referenced by the PortAllocator.
|
||||
virtual std::unique_ptr<cricket::PortAllocator> CreatePortAllocator() = 0;
|
||||
|
||||
// This is what the "driver" of the network sees: control of packets,
|
||||
// network interfaces, etc.
|
||||
virtual void SetSender(const InjectableNetworkSender* sender) = 0;
|
||||
virtual void AddInterface(
|
||||
const char* name, rtc::AdapterType type, Ip ip, int preference) = 0;
|
||||
virtual void RemoveInterface(const char* name) = 0;
|
||||
virtual void ReceiveUdp(
|
||||
IpPort source, IpPort dest, const uint8_t* data, size_t size) = 0;
|
||||
|
||||
// These are more for internal use, not external, which is why the types
|
||||
// aren't the external types.
|
||||
virtual int SendUdp(
|
||||
const rtc::SocketAddress& local_address,
|
||||
const rtc::SocketAddress& remote_address,
|
||||
const uint8_t* data,
|
||||
size_t size) = 0;
|
||||
virtual void ForgetUdp(const rtc::SocketAddress& local_address) = 0;
|
||||
};
|
||||
|
||||
std::unique_ptr<InjectableNetwork> CreateInjectableNetwork(rtc::Thread* network_thread);
|
||||
|
||||
RUSTEXPORT void Rust_InjectableNetwork_SetSender(
|
||||
InjectableNetwork* network_borrowed,
|
||||
const InjectableNetworkSender* sender_borrowed);
|
||||
|
||||
RUSTEXPORT void Rust_InjectableNetwork_AddInterface(
|
||||
InjectableNetwork* network_borrowed,
|
||||
const char* name_borrowed,
|
||||
rtc::AdapterType type,
|
||||
Ip ip,
|
||||
int preference);
|
||||
|
||||
RUSTEXPORT void Rust_InjectableNetwork_RemoveInterface(
|
||||
InjectableNetwork* network_borrowed,
|
||||
const char* name_borrowed);
|
||||
|
||||
RUSTEXPORT void Rust_InjectableNetwork_ReceiveUdp(
|
||||
InjectableNetwork* network_borrowed,
|
||||
IpPort source,
|
||||
IpPort dest,
|
||||
const uint8_t* data_borrowed,
|
||||
size_t size);
|
||||
|
||||
} // namespace rffi
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif /* RFFI_API_INJECTABLE_NETWORK_H__ */
|
43
ringrtc/rffi/api/logging.h
Normal file
43
ringrtc/rffi/api/logging.h
Normal file
|
@ -0,0 +1,43 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#ifndef RFFI_LOGGING_H__
|
||||
#define RFFI_LOGGING_H__
|
||||
|
||||
#include "rffi/api/rffi_defs.h"
|
||||
#include "rtc_base/logging.h"
|
||||
|
||||
typedef struct {
|
||||
void (*onLogMessage)(rtc::LoggingSeverity severity, const char* message_borrowed);
|
||||
} LoggerCallbacks;
|
||||
|
||||
namespace webrtc {
|
||||
namespace rffi {
|
||||
|
||||
// As simple implementation of rtc::LogSink that just passes the message
|
||||
// to Rust.
|
||||
class Logger : public rtc::LogSink {
|
||||
public:
|
||||
Logger(LoggerCallbacks* cbs) : cbs_(*cbs) {}
|
||||
|
||||
void OnLogMessage(const std::string& message) override {
|
||||
OnLogMessage(message, rtc::LS_NONE);
|
||||
}
|
||||
|
||||
void OnLogMessage(const std::string& message, rtc::LoggingSeverity severity) override {
|
||||
cbs_.onLogMessage(severity, message.c_str());
|
||||
}
|
||||
|
||||
private:
|
||||
LoggerCallbacks cbs_;
|
||||
};
|
||||
|
||||
// Should only be called once.
|
||||
RUSTEXPORT void Rust_setLogger(LoggerCallbacks* cbs_borrowed, rtc::LoggingSeverity min_sev);
|
||||
|
||||
} // namespace rffi
|
||||
} // namespace webrtc
|
||||
|
||||
#endif /* RFFI_LOGGING_H__ */
|
88
ringrtc/rffi/api/media.h
Normal file
88
ringrtc/rffi/api/media.h
Normal file
|
@ -0,0 +1,88 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#ifndef RFFI_API_MEDIA_H__
|
||||
#define RFFI_API_MEDIA_H__
|
||||
|
||||
#include "api/media_stream_interface.h"
|
||||
#include "media/base/video_broadcaster.h"
|
||||
#include "pc/video_track_source.h"
|
||||
#include "rffi/api/rffi_defs.h"
|
||||
|
||||
typedef struct {
|
||||
uint32_t width;
|
||||
uint32_t height;
|
||||
webrtc::VideoRotation rotation;
|
||||
} RffiVideoFrameMetadata;
|
||||
|
||||
namespace webrtc {
|
||||
namespace rffi {
|
||||
|
||||
// A simple implementation of a VideoTrackSource which can be used for pushing frames into
|
||||
// an outgoing video track for encoding by calling Rust_pushVideoFrame.
|
||||
class VideoSource : public VideoTrackSource {
|
||||
public:
|
||||
VideoSource();
|
||||
~VideoSource() override;
|
||||
|
||||
void PushVideoFrame(const webrtc::VideoFrame& frame);
|
||||
|
||||
protected:
|
||||
rtc::VideoSourceInterface<webrtc::VideoFrame>* source() override {
|
||||
return &broadcaster_;
|
||||
}
|
||||
|
||||
private:
|
||||
rtc::VideoBroadcaster broadcaster_;
|
||||
};
|
||||
|
||||
} // namespace rffi
|
||||
} // namespace webrtc
|
||||
|
||||
// Parses track->id()
|
||||
// Returns 0 upon failure
|
||||
RUSTEXPORT uint32_t Rust_getTrackIdAsUint32(webrtc::MediaStreamTrackInterface* track_borrowed_rc);
|
||||
|
||||
// Same as AudioTrack::set_enabled
|
||||
RUSTEXPORT void Rust_setAudioTrackEnabled(webrtc::AudioTrackInterface* track_borrowed_rc, bool);
|
||||
|
||||
// Same as VideoTrack::set_enabled
|
||||
RUSTEXPORT void Rust_setVideoTrackEnabled(webrtc::VideoTrackInterface* track_borrowed_rc, bool);
|
||||
|
||||
// Same as VideoTrack::set_content_hint with true == kText and false == kNone
|
||||
RUSTEXPORT void Rust_setVideoTrackContentHint(webrtc::VideoTrackInterface* track_borrowed_rc, bool);
|
||||
|
||||
// Gets the first video track from the stream, or nullptr if there is none.
|
||||
RUSTEXPORT webrtc::VideoTrackInterface* Rust_getFistVideoTrack(
|
||||
webrtc::MediaStreamInterface* track_borrowed_rc);
|
||||
|
||||
// Same as VideoSource::PushVideoFrame, to get frames from Rust to C++.
|
||||
RUSTEXPORT void Rust_pushVideoFrame(webrtc::rffi::VideoSource* source_borrowed_rc, webrtc::VideoFrameBuffer* buffer_borrowed_rc);
|
||||
|
||||
// I420 => I420
|
||||
// Returns an owned RC.
|
||||
RUSTEXPORT webrtc::VideoFrameBuffer* Rust_copyVideoFrameBufferFromI420(
|
||||
uint32_t width, uint32_t height, uint8_t* src_borrowed);
|
||||
|
||||
// NV12 => I420
|
||||
// Returns an owned RC.
|
||||
RUSTEXPORT webrtc::VideoFrameBuffer* Rust_copyVideoFrameBufferFromNv12(
|
||||
uint32_t width, uint32_t height, uint8_t* src_borrowed);
|
||||
|
||||
// RGBA => I420
|
||||
// Returns an owned RC.
|
||||
RUSTEXPORT webrtc::VideoFrameBuffer* Rust_copyVideoFrameBufferFromRgba(
|
||||
uint32_t width, uint32_t height, uint8_t* src_borrowed);
|
||||
|
||||
// I420 => RGBA
|
||||
RUSTEXPORT void Rust_convertVideoFrameBufferToRgba(
|
||||
const webrtc::VideoFrameBuffer* buffer, uint8_t* rgba_out);
|
||||
|
||||
// RGBA => I420
|
||||
RUSTEXPORT webrtc::VideoFrameBuffer* Rust_copyAndRotateVideoFrameBuffer(
|
||||
const webrtc::VideoFrameBuffer* buffer_borrowed_rc, webrtc::VideoRotation rotation);
|
||||
|
||||
|
||||
#endif /* RFFI_API_MEDIA_H__ */
|
38
ringrtc/rffi/api/network.h
Normal file
38
ringrtc/rffi/api/network.h
Normal file
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#ifndef RFFI_API_NETWORK_H__
|
||||
#define RFFI_API_NETWORK_H__
|
||||
|
||||
#include "rtc_base/ip_address.h"
|
||||
#include "rtc_base/socket_address.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace rffi {
|
||||
|
||||
// A simplified version of rtc::IpAddress
|
||||
typedef struct {
|
||||
// If v6 == false, only use the first 4 bytes.
|
||||
bool v6;
|
||||
uint8_t address[16];
|
||||
} Ip;
|
||||
|
||||
// A simplified version of rtc::SocketAddress
|
||||
typedef struct {
|
||||
Ip ip;
|
||||
uint16_t port;
|
||||
} IpPort;
|
||||
|
||||
rtc::IPAddress IpToRtcIp(Ip ip);
|
||||
rtc::SocketAddress IpPortToRtcSocketAddress(IpPort ip_port);
|
||||
Ip RtcIpToIp(rtc::IPAddress address);
|
||||
IpPort RtcSocketAddressToIpPort(const rtc::SocketAddress& address);
|
||||
|
||||
} // namespace rffi
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif /* RFFI_API_NETWORK_H__ */
|
131
ringrtc/rffi/api/peer_connection_factory.h
Normal file
131
ringrtc/rffi/api/peer_connection_factory.h
Normal file
|
@ -0,0 +1,131 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#ifndef RFFI_API_PEER_CONNECTION_FACTORY_H__
|
||||
#define RFFI_API_PEER_CONNECTION_FACTORY_H__
|
||||
|
||||
#include "rffi/api/peer_connection_intf.h"
|
||||
|
||||
#include "rffi/api/injectable_network.h"
|
||||
#include "rtc_base/ref_count.h"
|
||||
|
||||
namespace rtc {
|
||||
class RTCCertificite;
|
||||
}
|
||||
|
||||
namespace webrtc {
|
||||
class PeerConnectionInterface;
|
||||
class PeerConnectionFactoryInterface;
|
||||
class AudioSourceInterface;
|
||||
class AudioTrackInterface;
|
||||
class AudioDeviceModule;
|
||||
|
||||
// This little indirection is needed so that we can have something
|
||||
// that owns the signaling thread (and other threads).
|
||||
// We could make our owner implement the PeerConnectionFactoryInterface,
|
||||
// but it's not worth the trouble. This is easier.
|
||||
class PeerConnectionFactoryOwner : public rtc::RefCountInterface {
|
||||
public:
|
||||
virtual ~PeerConnectionFactoryOwner() {}
|
||||
virtual PeerConnectionFactoryInterface* peer_connection_factory() = 0;
|
||||
// If we are using an injectable network, this is it.
|
||||
virtual rffi::InjectableNetwork* injectable_network() {
|
||||
return nullptr;
|
||||
}
|
||||
virtual int16_t AudioPlayoutDevices() {
|
||||
return 0;
|
||||
}
|
||||
virtual int32_t AudioPlayoutDeviceName(uint16_t index, char* name_out, char* uuid_out) {
|
||||
return -1;
|
||||
}
|
||||
virtual bool SetAudioPlayoutDevice(uint16_t index) {
|
||||
return false;
|
||||
}
|
||||
virtual int16_t AudioRecordingDevices() {
|
||||
return 0;
|
||||
}
|
||||
virtual int32_t AudioRecordingDeviceName(uint16_t index, char* name_out, char* uuid_out) {
|
||||
return -1;
|
||||
}
|
||||
virtual bool SetAudioRecordingDevice(uint16_t index) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
namespace rffi {
|
||||
class PeerConnectionObserverRffi;
|
||||
}
|
||||
}
|
||||
|
||||
typedef struct {
|
||||
const char* username_borrowed;
|
||||
const char* password_borrowed;
|
||||
const char** urls_borrowed;
|
||||
size_t urls_size;
|
||||
} RffiIceServer;
|
||||
|
||||
// Returns an owned RC.
|
||||
// You can create more than one, but you should probably only have one unless
|
||||
// you want to test separate endpoints that are as independent as possible.
|
||||
RUSTEXPORT webrtc::PeerConnectionFactoryOwner* Rust_createPeerConnectionFactory(
|
||||
bool use_new_audio_device_module,
|
||||
bool use_injectable_network);
|
||||
|
||||
// Returns an owned RC.
|
||||
RUSTEXPORT webrtc::PeerConnectionFactoryOwner* Rust_createPeerConnectionFactoryWrapper(
|
||||
webrtc::PeerConnectionFactoryInterface* factory_borrowed_rc);
|
||||
|
||||
// Returns a borrowed pointer.
|
||||
RUSTEXPORT webrtc::rffi::InjectableNetwork* Rust_getInjectableNetwork(
|
||||
webrtc::PeerConnectionFactoryOwner* factory_owner_borrowed_rc);
|
||||
|
||||
// Returns an owned RC.
|
||||
RUSTEXPORT webrtc::PeerConnectionInterface* Rust_createPeerConnection(
|
||||
webrtc::PeerConnectionFactoryOwner* factory_owner_borrowed_rc,
|
||||
webrtc::rffi::PeerConnectionObserverRffi* observer_borrowed,
|
||||
bool hide_ip,
|
||||
RffiIceServer ice_server,
|
||||
webrtc::AudioTrackInterface* outgoing_audio_track_borrowed_rc,
|
||||
webrtc::VideoTrackInterface* outgoing_video_track_borrowed_rc);
|
||||
|
||||
// Returns an owned RC.
|
||||
RUSTEXPORT webrtc::AudioTrackInterface* Rust_createAudioTrack(
|
||||
webrtc::PeerConnectionFactoryOwner* factory_owner_borrowed_rc);
|
||||
|
||||
// Returns an owned RC.
|
||||
RUSTEXPORT webrtc::VideoTrackSourceInterface* Rust_createVideoSource();
|
||||
|
||||
// Returns an owned RC.
|
||||
RUSTEXPORT webrtc::VideoTrackInterface* Rust_createVideoTrack(
|
||||
webrtc::PeerConnectionFactoryOwner* factory_owner_borrowed_rc,
|
||||
webrtc::VideoTrackSourceInterface* source_borrowed_rc);
|
||||
|
||||
RUSTEXPORT int16_t Rust_getAudioPlayoutDevices(
|
||||
webrtc::PeerConnectionFactoryOwner* factory_owner_borrowed_rc);
|
||||
|
||||
RUSTEXPORT int32_t Rust_getAudioPlayoutDeviceName(
|
||||
webrtc::PeerConnectionFactoryOwner* factory_owner_borrowed_rc,
|
||||
uint16_t index,
|
||||
char* name_out,
|
||||
char* uuid_out);
|
||||
|
||||
RUSTEXPORT bool Rust_setAudioPlayoutDevice(
|
||||
webrtc::PeerConnectionFactoryOwner* factory_owner_borrowed_rc,
|
||||
uint16_t index);
|
||||
|
||||
RUSTEXPORT int16_t Rust_getAudioRecordingDevices(
|
||||
webrtc::PeerConnectionFactoryOwner* factory_owner_borrowed_rc);
|
||||
|
||||
RUSTEXPORT int32_t Rust_getAudioRecordingDeviceName(
|
||||
webrtc::PeerConnectionFactoryOwner* factory_owner_borrowed_rc,
|
||||
uint16_t index,
|
||||
char* name_out,
|
||||
char* uuid_out);
|
||||
|
||||
RUSTEXPORT bool Rust_setAudioRecordingDevice(
|
||||
webrtc::PeerConnectionFactoryOwner* factory_owner_borrowed_rc,
|
||||
uint16_t index);
|
||||
|
||||
#endif /* RFFI_API_PEER_CONNECTION_FACTORY_H__ */
|
183
ringrtc/rffi/api/peer_connection_intf.h
Normal file
183
ringrtc/rffi/api/peer_connection_intf.h
Normal file
|
@ -0,0 +1,183 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#ifndef RFFI_API_PEER_CONNECTION_INTF_H__
|
||||
#define RFFI_API_PEER_CONNECTION_INTF_H__
|
||||
|
||||
#include "api/peer_connection_interface.h"
|
||||
#include "rffi/api/network.h"
|
||||
#include "rffi/api/sdp_observer_intf.h"
|
||||
#include "rffi/api/stats_observer_intf.h"
|
||||
|
||||
// TODO: Consider removing all these duplicative declarations.
|
||||
// It compiles without it.
|
||||
|
||||
/**
|
||||
* Rust friendly wrapper around some webrtc::PeerConnectionInterface
|
||||
* methods
|
||||
*/
|
||||
|
||||
// Borrows the observer until the result is given to the observer,
|
||||
// so the observer must stay alive until it's given a result.
|
||||
RUSTEXPORT void
|
||||
Rust_createOffer(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
webrtc::rffi::CreateSessionDescriptionObserverRffi* csd_observer_borrowed_rc);
|
||||
|
||||
|
||||
// Borrows the observer until the result is given to the observer,
|
||||
// so the observer must stay alive until it's given a result.
|
||||
RUSTEXPORT void
|
||||
Rust_setLocalDescription(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
webrtc::rffi::SetSessionDescriptionObserverRffi* ssd_observer_borrowed_rc,
|
||||
webrtc::SessionDescriptionInterface* local_description_owned);
|
||||
|
||||
// Returns an owned pointer.
|
||||
RUSTEXPORT const char*
|
||||
Rust_toSdp(webrtc::SessionDescriptionInterface* session_description_borrowed);
|
||||
|
||||
// Returns an owned pointer.
|
||||
RUSTEXPORT webrtc::SessionDescriptionInterface*
|
||||
Rust_answerFromSdp(const char* sdp_borrowed);
|
||||
|
||||
// Returns an owned pointer.
|
||||
RUSTEXPORT webrtc::SessionDescriptionInterface*
|
||||
Rust_offerFromSdp(const char* sdp_borrowed);
|
||||
|
||||
RUSTEXPORT bool
|
||||
Rust_disableDtlsAndSetSrtpKey(webrtc::SessionDescriptionInterface* session_description_borrowed,
|
||||
int crypto_suite,
|
||||
const char* key_borrowed,
|
||||
size_t key_len,
|
||||
const char* salt_borrowed,
|
||||
size_t salt_len);
|
||||
|
||||
enum RffiVideoCodecType {
|
||||
kRffiVideoCodecVp8 = 8,
|
||||
kRffiVideoCodecVp9 = 9,
|
||||
kRffiVideoCodecH264ConstrainedHigh = 46,
|
||||
kRffiVideoCodecH264ConstrainedBaseline = 40,
|
||||
};
|
||||
|
||||
typedef struct {
|
||||
RffiVideoCodecType type;
|
||||
uint32_t level;
|
||||
} RffiVideoCodec;
|
||||
|
||||
class ConnectionParametersV4 {
|
||||
public:
|
||||
std::string ice_ufrag;
|
||||
std::string ice_pwd;
|
||||
std::vector<RffiVideoCodec> receive_video_codecs;
|
||||
};
|
||||
|
||||
typedef struct {
|
||||
// These all just refer to the storage
|
||||
const char* ice_ufrag_borrowed;
|
||||
const char* ice_pwd_borrowed;
|
||||
RffiVideoCodec* receive_video_codecs_borrowed;
|
||||
size_t receive_video_codecs_size;
|
||||
|
||||
// When this is released, we must release the storage
|
||||
ConnectionParametersV4* backing_owned;
|
||||
} RffiConnectionParametersV4;
|
||||
|
||||
typedef struct {
|
||||
int suite;
|
||||
const char* key_borrowed;
|
||||
size_t key_len;
|
||||
const char* salt_borrowed;
|
||||
size_t salt_len;
|
||||
} RffiSrtpKey;
|
||||
|
||||
// Returns an owned pointer.
|
||||
RUSTEXPORT RffiConnectionParametersV4*
|
||||
Rust_sessionDescriptionToV4(const webrtc::SessionDescriptionInterface* session_description_borrowed);
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_deleteV4(RffiConnectionParametersV4* v4_owned);
|
||||
|
||||
RUSTEXPORT webrtc::SessionDescriptionInterface*
|
||||
Rust_sessionDescriptionFromV4(bool offer, const RffiConnectionParametersV4* v4_borrowed);
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_createAnswer(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
webrtc::rffi::CreateSessionDescriptionObserverRffi* csd_observer_borrowed_rc);
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_setRemoteDescription(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
webrtc::rffi::SetSessionDescriptionObserverRffi* ssd_observer_borrowed_rc,
|
||||
webrtc::SessionDescriptionInterface* remote_description_owned);
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_setOutgoingMediaEnabled(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
bool enabled);
|
||||
|
||||
RUSTEXPORT bool
|
||||
Rust_setIncomingMediaEnabled(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
bool enabled);
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_setAudioPlayoutEnabled(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
bool enabled);
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_setAudioRecordingEnabled(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
bool enabled);
|
||||
|
||||
RUSTEXPORT bool
|
||||
Rust_addIceCandidateFromSdp(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
const char* sdp);
|
||||
|
||||
RUSTEXPORT bool
|
||||
Rust_addIceCandidateFromServer(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
webrtc::rffi::Ip,
|
||||
uint16_t port,
|
||||
bool tcp);
|
||||
|
||||
RUSTEXPORT bool
|
||||
Rust_removeIceCandidates(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
webrtc::rffi::IpPort* removed_addresses_borrowed,
|
||||
size_t length);
|
||||
|
||||
RUSTEXPORT webrtc::IceGathererInterface*
|
||||
Rust_createSharedIceGatherer(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc);
|
||||
|
||||
RUSTEXPORT bool
|
||||
Rust_useSharedIceGatherer(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
webrtc::IceGathererInterface* ice_gatherer_borrowed_rc);
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_getStats(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
webrtc::rffi::StatsObserverRffi* stats_observer_borrowed_rc);
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_setSendBitrates(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
int32_t min_bitrate_bps,
|
||||
int32_t start_bitrate_bps,
|
||||
int32_t max_bitrate_bps);
|
||||
|
||||
RUSTEXPORT bool
|
||||
Rust_sendRtp(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
uint8_t pt,
|
||||
uint16_t seqnum,
|
||||
uint32_t timestamp,
|
||||
uint32_t ssrc,
|
||||
const uint8_t* payload_data_borrowed,
|
||||
size_t payload_size);
|
||||
|
||||
RUSTEXPORT bool
|
||||
Rust_receiveRtp(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc, uint8_t pt);
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_configureAudioEncoders(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc, const webrtc::AudioEncoder::Config* config_borrowed);
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_getAudioLevels(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
cricket::AudioLevel* captured_out,
|
||||
cricket::ReceivedAudioLevel* received_out,
|
||||
size_t received_out_size,
|
||||
size_t* received_size_out);
|
||||
|
||||
#endif /* RFFI_API_PEER_CONNECTION_INTF_H__ */
|
74
ringrtc/rffi/api/peer_connection_observer_intf.h
Normal file
74
ringrtc/rffi/api/peer_connection_observer_intf.h
Normal file
|
@ -0,0 +1,74 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#ifndef RFFI_API_PEER_CONNECTION_OBSERVER_INTF_H__
|
||||
#define RFFI_API_PEER_CONNECTION_OBSERVER_INTF_H__
|
||||
|
||||
#include "api/peer_connection_interface.h"
|
||||
#include "rffi/api/rffi_defs.h"
|
||||
#include "rffi/api/media.h"
|
||||
#include "rffi/api/network.h"
|
||||
#include "rtc_base/network_constants.h"
|
||||
|
||||
/**
|
||||
* Rust friendly wrapper around a custom class that implements the
|
||||
* webrtc::PeerConnectionObserver interface.
|
||||
*
|
||||
*/
|
||||
|
||||
namespace webrtc {
|
||||
namespace rffi {
|
||||
class PeerConnectionObserverRffi;
|
||||
|
||||
/* NetworkRoute structure passed between Rust and C++ */
|
||||
typedef struct {
|
||||
rtc::AdapterType local_adapter_type;
|
||||
rtc::AdapterType local_adapter_type_under_vpn;
|
||||
bool local_relayed;
|
||||
TransportProtocol local_relay_protocol;
|
||||
bool remote_relayed;
|
||||
} NetworkRoute;
|
||||
} // namespace rffi
|
||||
} // namespace webrtc
|
||||
|
||||
/* Peer Connection Observer callback function pointers */
|
||||
typedef struct {
|
||||
// ICE events
|
||||
void (*onIceCandidate)(void* observer_borrowed, const RustIceCandidate* candidate_borrowed);
|
||||
void (*onIceCandidatesRemoved)(void* observer_borrowed, const webrtc::rffi::IpPort* addresses_borrowed, size_t);
|
||||
void (*onIceConnectionChange)(void* observer_borrowed, webrtc::PeerConnectionInterface::IceConnectionState);
|
||||
void (*onIceNetworkRouteChange)(void* observer_borrowed, webrtc::rffi::NetworkRoute);
|
||||
|
||||
// Media events
|
||||
void (*onAddStream)(void* observer_borrowed, webrtc::MediaStreamInterface* stream_owned_rc);
|
||||
void (*onAddAudioRtpReceiver)(void* observer_borrowed, webrtc::MediaStreamTrackInterface* track_owned_rc);
|
||||
void (*onAddVideoRtpReceiver)(void* observer_borrowed, webrtc::MediaStreamTrackInterface* track_owned_rc);
|
||||
void (*onVideoFrame)(void* observer_borrowed, uint32_t track_id, RffiVideoFrameMetadata metadata, webrtc::VideoFrameBuffer* frame_buffer_borrowed);
|
||||
|
||||
// RTP data events
|
||||
// Warning: this runs on the WebRTC network thread, so doing anything that
|
||||
// would block is dangerous, especially taking a lock that is also taken
|
||||
// while calling something that blocks on the network thread.
|
||||
void (*onRtpReceived)(void* observer_borrowed, uint8_t, uint16_t, uint32_t, uint32_t, const uint8_t* payload_borrowed, size_t);
|
||||
|
||||
// Frame encryption
|
||||
size_t (*getMediaCiphertextBufferSize)(void* observer_borrowed, bool, size_t);
|
||||
bool (*encryptMedia)(void* observer_borrowed, bool, const uint8_t* plaintext_borrowed, size_t, uint8_t* ciphertext_out, size_t, size_t* ciphertext_size_out);
|
||||
size_t (*getMediaPlaintextBufferSize)(void* observer_borrowed, uint32_t, bool, size_t);
|
||||
bool (*decryptMedia)(void* observer_borrowed, uint32_t, bool, const uint8_t* ciphertext_borrowed, size_t, uint8_t* plaintext_out, size_t, size_t* plaintext_size_out);
|
||||
} PeerConnectionObserverCallbacks;
|
||||
|
||||
// Passed-in observer must live at least as long as the PeerConnectionObserverRffi,
|
||||
// which is at least as long as the PeerConnection.
|
||||
RUSTEXPORT webrtc::rffi::PeerConnectionObserverRffi*
|
||||
Rust_createPeerConnectionObserver(void* observer_borrowed,
|
||||
const PeerConnectionObserverCallbacks* callbacks_borrowed,
|
||||
bool enable_frame_encryption,
|
||||
bool enable_video_frame_event,
|
||||
bool enable_video_frame_content);
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_deletePeerConnectionObserver(webrtc::rffi::PeerConnectionObserverRffi* observer_owned);
|
||||
#endif /* RFFI_API_PEER_CONNECTION_OBSERVER_INTF_H__ */
|
29
ringrtc/rffi/api/ref_count.h
Normal file
29
ringrtc/rffi/api/ref_count.h
Normal file
|
@ -0,0 +1,29 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
/*
|
||||
* Rust friendly wrappers for:
|
||||
*
|
||||
* rtc::RefCountInterface::Release();
|
||||
* rtc::RefCountInterface::AddRef();
|
||||
*/
|
||||
|
||||
#ifndef RFFI_API_SCOPED_REFPTR_H__
|
||||
#define RFFI_API_SCOPED_REFPTR_H__
|
||||
|
||||
#include "rffi/api/rffi_defs.h"
|
||||
#include "rtc_base/ref_count.h"
|
||||
|
||||
// Decrements the ref count of a ref-counted object.
|
||||
// If the ref count goes to zero, the object is deleted.
|
||||
RUSTEXPORT void
|
||||
Rust_decRc(rtc::RefCountInterface* owned_rc);
|
||||
|
||||
// Increments the ref count of a ref-counted object.
|
||||
// The borrowed RC becomes an owned RC.
|
||||
RUSTEXPORT void
|
||||
Rust_incRc(rtc::RefCountInterface* borrowed_rc);
|
||||
|
||||
#endif /* RFFI_API_SCOPED_REFPTR_H__ */
|
31
ringrtc/rffi/api/rffi_defs.h
Normal file
31
ringrtc/rffi/api/rffi_defs.h
Normal file
|
@ -0,0 +1,31 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#ifndef RFFI_API_DEFS_H__
|
||||
#define RFFI_API_DEFS_H__
|
||||
|
||||
/**
|
||||
* Common definitions used throughout the Rust RFFI API.
|
||||
*
|
||||
*/
|
||||
|
||||
// Public interfaces exported to Rust as "extern C".
|
||||
#define RUSTEXPORT extern "C" __attribute__((visibility("default")))
|
||||
|
||||
enum class TransportProtocol {
|
||||
kUdp,
|
||||
kTcp,
|
||||
kTls,
|
||||
kUnknown,
|
||||
};
|
||||
|
||||
/* Ice Update Message structure passed between Rust and c++ */
|
||||
typedef struct {
|
||||
const char* sdp_borrowed;
|
||||
bool is_relayed;
|
||||
TransportProtocol relay_protocol;
|
||||
} RustIceCandidate;
|
||||
|
||||
#endif /* RFFI_API_DEFS_H__ */
|
46
ringrtc/rffi/api/sdp_observer_intf.h
Normal file
46
ringrtc/rffi/api/sdp_observer_intf.h
Normal file
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#ifndef RFFI_API_SDP_OBSERVER_INTF_H__
|
||||
#define RFFI_API_SDP_OBSERVER_INTF_H__
|
||||
|
||||
#include "api/peer_connection_interface.h"
|
||||
#include "rffi/api/rffi_defs.h"
|
||||
|
||||
/**
|
||||
* Rust friendly wrapper for creating objects that implement the
|
||||
* webrtc::CreateSessionDescriptionObserver and
|
||||
* webrtc::SetSessionDescriptionObserver interfaces.
|
||||
*
|
||||
*/
|
||||
|
||||
namespace webrtc {
|
||||
namespace rffi {
|
||||
class CreateSessionDescriptionObserverRffi;
|
||||
class SetSessionDescriptionObserverRffi;
|
||||
} // namespace rffi
|
||||
} // namespace webrtc
|
||||
|
||||
/* Create Session Description Observer callback function pointers */
|
||||
typedef struct {
|
||||
void (*onSuccess)(void* csd_observer_borrowed, webrtc::SessionDescriptionInterface* session_description_owned_rc);
|
||||
void (*onFailure)(void* csd_observer_borrowed, const char* err_message_borrowed, int32_t err_type);
|
||||
} CreateSessionDescriptionObserverCallbacks;
|
||||
|
||||
RUSTEXPORT webrtc::rffi::CreateSessionDescriptionObserverRffi*
|
||||
Rust_createCreateSessionDescriptionObserver(void* csd_observer_borrowed,
|
||||
const CreateSessionDescriptionObserverCallbacks* csd_observer_cbs_borrowed);
|
||||
|
||||
/* Set Session Description Observer callback function pointers */
|
||||
typedef struct {
|
||||
void (*onSuccess)(void* ssd_observer_borrowed);
|
||||
void (*onFailure)(void* ssd_observer_borrowed, const char* err_message_borrowed, int32_t err_type);
|
||||
} SetSessionDescriptionObserverCallbacks;
|
||||
|
||||
RUSTEXPORT webrtc::rffi::SetSessionDescriptionObserverRffi*
|
||||
Rust_createSetSessionDescriptionObserver(void* ssd_observer_borrowed,
|
||||
const SetSessionDescriptionObserverCallbacks* ssd_observer_cbs_borrowed);
|
||||
|
||||
#endif /* RFFI_API_SDP_OBSERVER_INTF_H__ */
|
104
ringrtc/rffi/api/stats_observer_intf.h
Normal file
104
ringrtc/rffi/api/stats_observer_intf.h
Normal file
|
@ -0,0 +1,104 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#ifndef RFFI_API_STATS_OBSERVER_INTF_H__
|
||||
#define RFFI_API_STATS_OBSERVER_INTF_H__
|
||||
|
||||
#include "api/peer_connection_interface.h"
|
||||
#include "rffi/api/rffi_defs.h"
|
||||
|
||||
/**
|
||||
* Rust friendly wrapper for creating objects that implement the
|
||||
* webrtc::StatsCollector interface.
|
||||
*
|
||||
*/
|
||||
|
||||
namespace webrtc {
|
||||
namespace rffi {
|
||||
class StatsObserverRffi;
|
||||
} // namespace rffi
|
||||
} // namespace webrtc
|
||||
|
||||
typedef struct {
|
||||
uint32_t ssrc;
|
||||
uint32_t packets_sent;
|
||||
uint64_t bytes_sent;
|
||||
int32_t remote_packets_lost;
|
||||
double remote_jitter;
|
||||
double remote_round_trip_time;
|
||||
double total_audio_energy;
|
||||
double echo_likelihood;
|
||||
} AudioSenderStatistics;
|
||||
|
||||
typedef struct {
|
||||
uint32_t ssrc;
|
||||
uint32_t packets_sent;
|
||||
uint64_t bytes_sent;
|
||||
uint32_t frames_encoded;
|
||||
uint32_t key_frames_encoded;
|
||||
double total_encode_time;
|
||||
uint32_t frame_width;
|
||||
uint32_t frame_height;
|
||||
uint64_t retransmitted_packets_sent;
|
||||
uint64_t retransmitted_bytes_sent;
|
||||
double total_packet_send_delay;
|
||||
uint32_t nack_count;
|
||||
uint32_t pli_count;
|
||||
uint32_t quality_limitation_reason; // 0 - kNone, 1 - kCpu, 2 - kBandwidth, 3 - kOther
|
||||
uint32_t quality_limitation_resolution_changes;
|
||||
int32_t remote_packets_lost;
|
||||
double remote_jitter;
|
||||
double remote_round_trip_time;
|
||||
} VideoSenderStatistics;
|
||||
|
||||
typedef struct {
|
||||
uint32_t ssrc;
|
||||
uint32_t packets_received;
|
||||
int32_t packets_lost;
|
||||
uint64_t bytes_received;
|
||||
double jitter;
|
||||
double total_audio_energy;
|
||||
} AudioReceiverStatistics;
|
||||
|
||||
typedef struct {
|
||||
uint32_t ssrc;
|
||||
uint32_t packets_received;
|
||||
int32_t packets_lost;
|
||||
uint64_t bytes_received;
|
||||
uint32_t frames_decoded;
|
||||
uint32_t key_frames_decoded;
|
||||
double total_decode_time;
|
||||
uint32_t frame_width;
|
||||
uint32_t frame_height;
|
||||
} VideoReceiverStatistics;
|
||||
|
||||
typedef struct {
|
||||
double current_round_trip_time;
|
||||
double available_outgoing_bitrate;
|
||||
} ConnectionStatistics;
|
||||
|
||||
typedef struct {
|
||||
int64_t timestamp_us;
|
||||
uint32_t audio_sender_statistics_size;
|
||||
const AudioSenderStatistics *audio_sender_statistics;
|
||||
uint32_t video_sender_statistics_size;
|
||||
const VideoSenderStatistics *video_sender_statistics;
|
||||
uint32_t audio_receiver_statistics_size;
|
||||
const AudioReceiverStatistics *audio_receiver_statistics;
|
||||
uint32_t video_receiver_statistics_count;
|
||||
const VideoReceiverStatistics *video_receiver_statistics;
|
||||
ConnectionStatistics connection_statistics;
|
||||
} MediaStatistics;
|
||||
|
||||
/* Stats Observer Callback callback function pointers */
|
||||
typedef struct {
|
||||
void (*OnStatsComplete)(void* stats_observer_borrowed, const MediaStatistics* media_statistics_borrowed);
|
||||
} StatsObserverCallbacks;
|
||||
|
||||
RUSTEXPORT webrtc::rffi::StatsObserverRffi*
|
||||
Rust_createStatsObserver(void* stats_observer_borrowed,
|
||||
const StatsObserverCallbacks* stats_observer_cbs_borrowed);
|
||||
|
||||
#endif /* RFFI_API_STATS_OBSERVER_INTF_H__ */
|
11
ringrtc/rffi/config/rffi_include.lst
Normal file
11
ringrtc/rffi/config/rffi_include.lst
Normal file
|
@ -0,0 +1,11 @@
|
|||
# Copyright 2019-2021 Signal Messenger, LLC
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
# Linker script that exports the Rust FFI symbols.
|
||||
|
||||
{
|
||||
global:
|
||||
Rust_*;
|
||||
local:
|
||||
*;
|
||||
};
|
38
ringrtc/rffi/src/android/java_media_stream.cc
Normal file
38
ringrtc/rffi/src/android/java_media_stream.cc
Normal file
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
/*
|
||||
* Rust friendly wrapper around JavaMediaStream object
|
||||
*/
|
||||
|
||||
#include "sdk/android/src/jni/pc/peer_connection.h"
|
||||
#include "rffi/api/android/media_stream_intf.h"
|
||||
#include "rffi/src/ptr.h"
|
||||
|
||||
#include <string>
|
||||
|
||||
namespace webrtc {
|
||||
namespace rffi {
|
||||
|
||||
// Returns an owned pointer.
|
||||
RUSTEXPORT webrtc::jni::JavaMediaStream*
|
||||
Rust_createJavaMediaStream(MediaStreamInterface* stream_borrowed_rc) {
|
||||
JNIEnv* env = AttachCurrentThreadIfNeeded();
|
||||
// jni::JavaMediaStream takes an owned RC.
|
||||
return new jni::JavaMediaStream(env, inc_rc(stream_borrowed_rc));
|
||||
}
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_deleteJavaMediaStream(webrtc::jni::JavaMediaStream* stream_owned) {
|
||||
delete stream_owned;
|
||||
}
|
||||
|
||||
RUSTEXPORT jobject
|
||||
Rust_getJavaMediaStreamObject(webrtc::jni::JavaMediaStream* stream_borrowed) {
|
||||
return stream_borrowed->j_media_stream().obj();
|
||||
}
|
||||
|
||||
} // namespace rffi
|
||||
} // namespace webrtc
|
21
ringrtc/rffi/src/android/jni_peer_connection.cc
Normal file
21
ringrtc/rffi/src/android/jni_peer_connection.cc
Normal file
|
@ -0,0 +1,21 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#include "sdk/android/src/jni/pc/peer_connection.h"
|
||||
#include "rffi/api/android/peer_connection_intf.h"
|
||||
|
||||
#include <string>
|
||||
|
||||
namespace webrtc {
|
||||
namespace rffi {
|
||||
|
||||
// Returns a borrowed RC.
|
||||
RUSTEXPORT PeerConnectionInterface*
|
||||
Rust_borrowPeerConnectionFromJniOwnedPeerConnection(jlong owned_peer_connection) {
|
||||
return reinterpret_cast<jni::OwnedPeerConnection*>(owned_peer_connection)->pc();
|
||||
}
|
||||
|
||||
} // namespace rffi
|
||||
} // namespace webrtc
|
22
ringrtc/rffi/src/field_trial.cc
Normal file
22
ringrtc/rffi/src/field_trial.cc
Normal file
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Copyright 2022 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#include "rffi/api/field_trial.h"
|
||||
#include "system_wrappers/include/field_trial.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace rffi {
|
||||
|
||||
// Initialize field trials from a string.
|
||||
// This method can be called at most once before any other call into WebRTC.
|
||||
// E.g. before the peer connection factory is constructed.
|
||||
// Note: field_trials_string must never be destroyed.
|
||||
RUSTEXPORT void
|
||||
Rust_setFieldTrials(const char* field_trials_string) {
|
||||
webrtc::field_trial::InitFieldTrialsFromString(field_trials_string);
|
||||
}
|
||||
|
||||
} // namespace rffi
|
||||
} // namespace webrtc
|
385
ringrtc/rffi/src/injectable_network.cc
Normal file
385
ringrtc/rffi/src/injectable_network.cc
Normal file
|
@ -0,0 +1,385 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#include "rffi/api/injectable_network.h"
|
||||
|
||||
#include "api/packet_socket_factory.h"
|
||||
#include "p2p/client/basic_port_allocator.h"
|
||||
#include "rffi/api/network.h"
|
||||
#include "rtc_base/ip_address.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace rffi {
|
||||
|
||||
class InjectableUdpSocket : public rtc::AsyncPacketSocket {
|
||||
public:
|
||||
InjectableUdpSocket(InjectableNetwork* network, const rtc::SocketAddress& local_address)
|
||||
: network_(network), local_address_(local_address) {
|
||||
}
|
||||
~InjectableUdpSocket() override {
|
||||
network_->ForgetUdp(local_address_);
|
||||
}
|
||||
|
||||
// As rtc::AsyncPacketSocket
|
||||
rtc::SocketAddress GetLocalAddress() const override {
|
||||
return local_address_;
|
||||
}
|
||||
|
||||
// As rtc::AsyncPacketSocket
|
||||
rtc::SocketAddress GetRemoteAddress() const override {
|
||||
// Only used for TCP.
|
||||
return rtc::SocketAddress();
|
||||
}
|
||||
|
||||
// As rtc::AsyncPacketSocket
|
||||
int Send(const void* data,
|
||||
size_t data_size,
|
||||
const rtc::PacketOptions& options) override {
|
||||
// Only used for TCP
|
||||
return -1;
|
||||
}
|
||||
|
||||
// As rtc::AsyncPacketSocket
|
||||
int SendTo(const void* data,
|
||||
size_t data_size,
|
||||
const rtc::SocketAddress& remote_address,
|
||||
const rtc::PacketOptions& options) override {
|
||||
// RTC_LOG(LS_VERBOSE) << "InjectableUdpSocket::SendTo()"
|
||||
// << " from " << local_address_.ToString()
|
||||
// << " to " << remote_address.ToString();
|
||||
int result = network_->SendUdp(local_address_, remote_address, static_cast<const uint8_t*>(data), data_size);
|
||||
if (result < 0) {
|
||||
last_error_ = result;
|
||||
return result;
|
||||
}
|
||||
|
||||
// Ends up going to Call::OnSentPacket for congestion control purposes.
|
||||
SignalSentPacket(this, rtc::SentPacket(options.packet_id, rtc::TimeMillis()));
|
||||
return result;
|
||||
}
|
||||
|
||||
void ReceiveFrom(const uint8_t* data,
|
||||
size_t data_size,
|
||||
const rtc::SocketAddress& remote_address) {
|
||||
RTC_LOG(LS_VERBOSE) << "InjectableUdpSocket::ReceiveFrom()"
|
||||
<< " from " << remote_address.ToString()
|
||||
<< " to " << local_address_.ToString();
|
||||
auto now = rtc::TimeMicros();
|
||||
SignalReadPacket(this, reinterpret_cast<const char*>(data), data_size, remote_address, now);
|
||||
}
|
||||
|
||||
// As rtc::AsyncPacketSocket
|
||||
int Close() override {
|
||||
// This appears to never be called.
|
||||
// And the real "close" is the destructor.
|
||||
return -1;
|
||||
}
|
||||
|
||||
// As rtc::AsyncPacketSocket
|
||||
State GetState() const override {
|
||||
// UDPPort waits until it's bound to generate a candidate and send binding requests.
|
||||
// If it's not currently bound, it will listen for SignalAddressReady.
|
||||
// TODO: Simulate slow binds?
|
||||
return rtc::AsyncPacketSocket::STATE_BOUND;
|
||||
}
|
||||
|
||||
// As rtc::AsyncPacketSocket
|
||||
int GetOption(rtc::Socket::Option option, int* value) override {
|
||||
// This appears to never be called.
|
||||
return -1;
|
||||
}
|
||||
|
||||
// As rtc::AsyncPacketSocket
|
||||
int SetOption(rtc::Socket::Option option, int value) override {
|
||||
// This is used to:
|
||||
// Set OPT_NODELAY on TCP connections (we can ignore that)
|
||||
// Set OPT_DSCP when DSCP is enabled (we can ignore that)
|
||||
// Set OPT_SNDBUF to 65536 (when video is used)
|
||||
// Set OPT_RCVBUF to 262144 (when video is used)
|
||||
// TODO: Simulate changes to OPT_SNDBUF and OPT_RCVBUF
|
||||
|
||||
// Pretend it worked.
|
||||
return 1;
|
||||
}
|
||||
|
||||
// As rtc::AsyncPacketSocket
|
||||
int GetError() const override {
|
||||
// UDPPort and TurnPort will call this if SendTo fails (returns < 0).
|
||||
// And that gets bubbled all the way up to RtpTransport::SendPacket
|
||||
// which will check to see if it's ENOTCONN, at which point it will
|
||||
// stop sending RTP/RTCP until SignalReadyToSend fires (weird, right?).
|
||||
// TODO: Simulate "ready" or "not ready to send" by returning ENOTCONN
|
||||
// and firing SignalReadyToSend at the appropriate times.
|
||||
return last_error_;
|
||||
}
|
||||
|
||||
// As rtc::AsyncPacketSocket
|
||||
void SetError(int error) override {
|
||||
// This appears to never be called.
|
||||
}
|
||||
|
||||
private:
|
||||
InjectableNetwork* network_;
|
||||
rtc::SocketAddress local_address_;
|
||||
int last_error_ = 0;
|
||||
};
|
||||
|
||||
class InjectableNetworkImpl : public InjectableNetwork, public rtc::NetworkManager, public rtc::PacketSocketFactory {
|
||||
public:
|
||||
InjectableNetworkImpl(rtc::Thread* network_thread) : network_thread_(network_thread) {
|
||||
}
|
||||
|
||||
~InjectableNetworkImpl() override {
|
||||
if (sender_.object_owned) {
|
||||
sender_.Delete(sender_.object_owned);
|
||||
}
|
||||
}
|
||||
|
||||
// As InjectableNetwork
|
||||
std::unique_ptr<cricket::PortAllocator> CreatePortAllocator() override {
|
||||
RTC_LOG(LS_INFO) << "InjectableNetworkImpl::CreatePortAllocator()";
|
||||
return network_thread_->BlockingCall([this] {
|
||||
return std::make_unique<cricket::BasicPortAllocator>(this, this);
|
||||
});
|
||||
}
|
||||
|
||||
void SetSender(const InjectableNetworkSender* sender) override {
|
||||
RTC_LOG(LS_INFO) << "InjectableNetworkImpl::SetSender()";
|
||||
sender_ = *sender;
|
||||
}
|
||||
|
||||
// name used for debugging a lot, but also as an ID for the network for TURN pruning.
|
||||
// type Affects Candidate network cost and other ICE behavior
|
||||
// preference affects ICE candidate priorities higher is more preferred
|
||||
void AddInterface(
|
||||
const char* name, rtc::AdapterType type, Ip ip, int preference) override {
|
||||
RTC_LOG(LS_INFO) << "InjectableNetworkImpl::AddInterface() name: " << name;
|
||||
// We need to access interface_by_name_ and SignalNetworksChanged on the network_thread_.
|
||||
// Make sure to copy the name first!
|
||||
network_thread_->PostTask(
|
||||
[this, name{std::string(name)}, type, ip, preference] {
|
||||
// TODO: Support different IP prefixes.
|
||||
auto interface = std::make_unique<rtc::Network>(
|
||||
name, name /* description */, IpToRtcIp(ip) /* prefix */, 0 /* prefix_length */, type);
|
||||
// TODO: Add more than one IP per network interface
|
||||
interface->AddIP(IpToRtcIp(ip));
|
||||
interface->set_preference(preference);
|
||||
interface_by_name_.insert({std::move(name), std::move(interface)});
|
||||
SignalNetworksChanged();
|
||||
});
|
||||
}
|
||||
|
||||
void RemoveInterface(const char* name) override {
|
||||
RTC_LOG(LS_INFO) << "InjectableNetworkImpl::RemoveInterface() name: " << name;
|
||||
// We need to access interface_by_name_ on the network_thread_.
|
||||
// Make sure to copy the name first!
|
||||
network_thread_->PostTask([this, name{std::string(name)}] {
|
||||
interface_by_name_.erase(name);
|
||||
});
|
||||
}
|
||||
|
||||
void ReceiveUdp(IpPort source,
|
||||
IpPort dest,
|
||||
const uint8_t* data,
|
||||
size_t size) override {
|
||||
// The network stack expects everything to happen on the network thread.
|
||||
// Make sure to copy the data!
|
||||
network_thread_->PostTask(
|
||||
[this, source, dest, data{std::vector<uint8_t>(data, data+size)}, size] {
|
||||
auto local_address = IpPortToRtcSocketAddress(dest);
|
||||
auto remote_address = IpPortToRtcSocketAddress(source);
|
||||
RTC_LOG(LS_VERBOSE) << "InjectableNetworkImpl::ReceiveUdp()"
|
||||
<< " from " << remote_address.ToString()
|
||||
<< " to " << local_address.ToString()
|
||||
<< " size: " << size;
|
||||
auto udp_socket = udp_socket_by_local_address_.find(local_address);
|
||||
if (udp_socket == udp_socket_by_local_address_.end()) {
|
||||
RTC_LOG(LS_WARNING) << "Received packet for unknown local address.";
|
||||
return;
|
||||
}
|
||||
udp_socket->second->ReceiveFrom(data.data(), data.size(), remote_address);
|
||||
});
|
||||
}
|
||||
|
||||
int SendUdp(const rtc::SocketAddress& local_address,
|
||||
const rtc::SocketAddress& remote_address,
|
||||
const uint8_t* data,
|
||||
size_t size) override {
|
||||
if (!sender_.object_owned) {
|
||||
RTC_LOG(LS_WARNING) << "Dropping packet because no sender set.";
|
||||
return -1;
|
||||
}
|
||||
IpPort local = RtcSocketAddressToIpPort(local_address);
|
||||
IpPort remote = RtcSocketAddressToIpPort(remote_address);
|
||||
// RTC_LOG(LS_VERBOSE) << "InjectableNetworkImpl::SendUdp()"
|
||||
// << " from " << local_address.ToString()
|
||||
// << " to " << remote_address.ToString()
|
||||
// << " size: " << size;
|
||||
sender_.SendUdp(sender_.object_owned, local, remote, data, size);
|
||||
return size;
|
||||
}
|
||||
|
||||
void ForgetUdp(const rtc::SocketAddress& local_address) override {
|
||||
// We need to access udp_socket_by_local_address_ on the network_thread_.
|
||||
network_thread_->PostTask([this, local_address] {
|
||||
udp_socket_by_local_address_.erase(local_address);
|
||||
});
|
||||
}
|
||||
|
||||
// As NetworkManager
|
||||
void StartUpdating() override {
|
||||
RTC_DCHECK(network_thread_->IsCurrent());
|
||||
RTC_LOG(LS_INFO) << "InjectableNetworkImpl::StartUpdating()";
|
||||
// TODO: Add support for changing networks dynamically.
|
||||
// BasicPortAllocatorSession listens to it do detect when networks have failed (gone away)
|
||||
// Documentation says this must be called by StartUpdating() once the network list is available.
|
||||
SignalNetworksChanged();
|
||||
}
|
||||
|
||||
// As NetworkManager
|
||||
void StopUpdating() override {
|
||||
}
|
||||
|
||||
// As NetworkManager
|
||||
std::vector<const rtc::Network*> GetNetworks() const override {
|
||||
RTC_LOG(LS_INFO) << "InjectableNetworkImpl::GetNetworks()";
|
||||
RTC_DCHECK(network_thread_->IsCurrent());
|
||||
|
||||
std::vector<const rtc::Network*> networks;
|
||||
for (const auto& kv : interface_by_name_) {
|
||||
networks.push_back(kv.second.get());
|
||||
}
|
||||
|
||||
return networks;
|
||||
}
|
||||
|
||||
// As NetworkManager
|
||||
webrtc::MdnsResponderInterface* GetMdnsResponder() const override {
|
||||
// We'll probably never use mDNS
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// As NetworkManager
|
||||
std::vector<const rtc::Network*> GetAnyAddressNetworks() override {
|
||||
// TODO: Add support for using a default route instead of choosing a particular network.
|
||||
// (such as when we can't enumerate networks or IPs)
|
||||
std::vector<const rtc::Network*> networks;
|
||||
|
||||
return networks;
|
||||
}
|
||||
|
||||
// As NetworkManager
|
||||
EnumerationPermission enumeration_permission() const override {
|
||||
// This is only really needed for web security things we don't need to worry about.
|
||||
// So, always allow.
|
||||
return ENUMERATION_ALLOWED;
|
||||
}
|
||||
|
||||
// As NetworkManager
|
||||
bool GetDefaultLocalAddress(int family, rtc::IPAddress* ipaddr) const override {
|
||||
// TODO: Add support for using a default route instead of choosing a particular network.
|
||||
// (such as when we can't enumerate networks or IPs)
|
||||
return false;
|
||||
}
|
||||
|
||||
// As PacketSocketFactory
|
||||
rtc::AsyncPacketSocket* CreateUdpSocket(const rtc::SocketAddress& local_address_without_port,
|
||||
uint16_t min_port,
|
||||
uint16_t max_port) override {
|
||||
RTC_DCHECK(network_thread_->IsCurrent());
|
||||
RTC_LOG(LS_INFO) << "InjectableNetworkImpl::CreateUdpSocket() ip: " << local_address_without_port.ip();
|
||||
const rtc::IPAddress& local_ip = local_address_without_port.ipaddr();
|
||||
// The min_port and max_port are ultimately controlled by the PortAllocator,
|
||||
// which we create, so we can ignore those.
|
||||
// And the local_address is supposed to have a port of 0.
|
||||
uint16_t local_port = next_udp_port_++;
|
||||
rtc::SocketAddress local_address(local_ip, local_port);
|
||||
auto udp_socket = new InjectableUdpSocket(this, local_address);
|
||||
udp_socket_by_local_address_.insert({local_address, udp_socket});
|
||||
// This really should return a std::unique_ptr because callers all take ownership.
|
||||
return udp_socket;
|
||||
}
|
||||
|
||||
// As PacketSocketFactory
|
||||
rtc::AsyncListenSocket* CreateServerTcpSocket(const rtc::SocketAddress& local_address,
|
||||
uint16_t min_port,
|
||||
uint16_t max_port,
|
||||
int opts) override {
|
||||
// We never plan to support TCP ICE (other than through TURN),
|
||||
// So we'll never implement this.
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// As PacketSocketFactory
|
||||
rtc::AsyncPacketSocket* CreateClientTcpSocket(
|
||||
const rtc::SocketAddress& local_address,
|
||||
const rtc::SocketAddress& remote_address,
|
||||
const rtc::ProxyInfo& proxy_info,
|
||||
const std::string& user_agent,
|
||||
const rtc::PacketSocketTcpOptions& tcp_options) override {
|
||||
// TODO: Support TCP for TURN
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// As PacketSocketFactory
|
||||
rtc::AsyncResolverInterface* CreateAsyncResolver() override {
|
||||
// TODO: Add support for DNS-based STUN/TURN servers.
|
||||
// For now, just use IP addresses
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
private:
|
||||
rtc::Thread* network_thread_;
|
||||
std::map<std::string, std::unique_ptr<rtc::Network>> interface_by_name_;
|
||||
std::map<rtc::SocketAddress, InjectableUdpSocket*> udp_socket_by_local_address_;
|
||||
// The ICE stack does not like ports below 1024.
|
||||
// Give it a nice even number to count up from.
|
||||
uint16_t next_udp_port_ = 2001;
|
||||
InjectableNetworkSender sender_ = {};
|
||||
};
|
||||
|
||||
std::unique_ptr<InjectableNetwork> CreateInjectableNetwork(rtc::Thread* network_thread) {
|
||||
return std::make_unique<InjectableNetworkImpl>(network_thread);
|
||||
}
|
||||
|
||||
// The passed-in sender must live as long as the InjectableNetwork,
|
||||
// which likely means it must live as long as the PeerConnection.
|
||||
RUSTEXPORT void Rust_InjectableNetwork_SetSender(
|
||||
InjectableNetwork* network_borrowed,
|
||||
const InjectableNetworkSender* sender_borrowed) {
|
||||
network_borrowed->SetSender(sender_borrowed);
|
||||
}
|
||||
|
||||
RUSTEXPORT void Rust_InjectableNetwork_AddInterface(
|
||||
InjectableNetwork* network_borrowed,
|
||||
const char* name_borrowed,
|
||||
rtc::AdapterType type,
|
||||
Ip ip,
|
||||
int preference) {
|
||||
network_borrowed->AddInterface(name_borrowed, type, ip, preference);
|
||||
}
|
||||
|
||||
RUSTEXPORT void Rust_InjectableNetwork_RemoveInterface(
|
||||
InjectableNetwork* network_borrowed,
|
||||
const char* name_borrowed) {
|
||||
network_borrowed->RemoveInterface(name_borrowed);
|
||||
}
|
||||
|
||||
RUSTEXPORT void Rust_InjectableNetwork_ReceiveUdp(
|
||||
InjectableNetwork* network_borrowed,
|
||||
IpPort local,
|
||||
IpPort remote,
|
||||
const uint8_t* data_borrowed,
|
||||
size_t size) {
|
||||
network_borrowed->ReceiveUdp(local, remote, data_borrowed, size);
|
||||
}
|
||||
|
||||
} // namespace rffi
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
|
||||
|
20
ringrtc/rffi/src/logging.cc
Normal file
20
ringrtc/rffi/src/logging.cc
Normal file
|
@ -0,0 +1,20 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#include "rffi/api/logging.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace rffi {
|
||||
|
||||
RUSTEXPORT void Rust_setLogger(LoggerCallbacks* cbs_borrowed, rtc::LoggingSeverity min_sev) {
|
||||
Logger* logger_owned = new Logger(cbs_borrowed);
|
||||
// LEAK: it's only called once, so it shouldn't matter.
|
||||
Logger* logger_borrowed = logger_owned;
|
||||
// Stores the sink, but does not delete it.
|
||||
rtc::LogMessage::AddLogToStream(logger_borrowed, min_sev);
|
||||
}
|
||||
|
||||
} // namespace rffi
|
||||
} // namespace webrtc
|
143
ringrtc/rffi/src/media.cc
Normal file
143
ringrtc/rffi/src/media.cc
Normal file
|
@ -0,0 +1,143 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#include "api/video/i420_buffer.h"
|
||||
#include "rtc_base/ref_counted_object.h"
|
||||
#include "rffi/api/media.h"
|
||||
#include "rffi/src/ptr.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/time_utils.h"
|
||||
#include "third_party/libyuv/include/libyuv/convert.h"
|
||||
#include "third_party/libyuv/include/libyuv/convert_argb.h"
|
||||
#include "third_party/libyuv/include/libyuv/convert_from.h"
|
||||
namespace webrtc {
|
||||
namespace rffi {
|
||||
|
||||
VideoSource::VideoSource() : VideoTrackSource(false /* remote */) {
|
||||
SetState(kLive);
|
||||
}
|
||||
|
||||
VideoSource::~VideoSource() {
|
||||
}
|
||||
|
||||
void VideoSource::PushVideoFrame(const webrtc::VideoFrame& frame) {
|
||||
broadcaster_.OnFrame(frame);
|
||||
}
|
||||
|
||||
// Returns 0 upon failure
|
||||
RUSTEXPORT uint32_t Rust_getTrackIdAsUint32(webrtc::MediaStreamTrackInterface* track_borrowed_rc) {
|
||||
uint32_t id = 0;
|
||||
rtc::FromString(track_borrowed_rc->id(), &id);
|
||||
return id;
|
||||
}
|
||||
|
||||
RUSTEXPORT void Rust_setAudioTrackEnabled(
|
||||
webrtc::AudioTrackInterface* track_borrowed_rc, bool enabled) {
|
||||
track_borrowed_rc->set_enabled(enabled);
|
||||
}
|
||||
|
||||
RUSTEXPORT void Rust_setVideoTrackEnabled(
|
||||
webrtc::VideoTrackInterface* track_borrowed_rc, bool enabled) {
|
||||
track_borrowed_rc->set_enabled(enabled);
|
||||
}
|
||||
|
||||
RUSTEXPORT void Rust_setVideoTrackContentHint(
|
||||
webrtc::VideoTrackInterface* track_borrowed_rc, bool is_screenshare) {
|
||||
track_borrowed_rc->set_content_hint(is_screenshare ? VideoTrackInterface::ContentHint::kText : VideoTrackInterface::ContentHint::kNone);
|
||||
}
|
||||
|
||||
RUSTEXPORT void Rust_pushVideoFrame(
|
||||
webrtc::rffi::VideoSource* source_borrowed_rc,
|
||||
VideoFrameBuffer* buffer_borrowed_rc) {
|
||||
auto timestamp_us = rtc::TimeMicros();
|
||||
auto frame = webrtc::VideoFrame::Builder()
|
||||
.set_video_frame_buffer(inc_rc(buffer_borrowed_rc))
|
||||
.set_timestamp_us(timestamp_us)
|
||||
.build();
|
||||
source_borrowed_rc->PushVideoFrame(std::move(frame));
|
||||
}
|
||||
|
||||
// Returns an owned RC.
|
||||
RUSTEXPORT VideoFrameBuffer* Rust_copyVideoFrameBufferFromI420(
|
||||
uint32_t width, uint32_t height, uint8_t* src_borrowed) {
|
||||
int width_y = static_cast<int>(width);
|
||||
int height_y = static_cast<int>(height);
|
||||
int width_u = (width_y + 1) / 2;
|
||||
int height_u = (height_y + 1) / 2;
|
||||
|
||||
int stride_y = width_y;
|
||||
int stride_u = width_u;
|
||||
int stride_v = width_u;
|
||||
|
||||
int size_y = width_y * height_y;
|
||||
int size_u = width_u * height_u;
|
||||
|
||||
uint8_t* src_y = src_borrowed;
|
||||
uint8_t* src_u = src_y + size_y;
|
||||
uint8_t* src_v = src_u + size_u;
|
||||
|
||||
return take_rc(I420Buffer::Copy(width, height, src_y, stride_y, src_u, stride_u, src_v, stride_v));
|
||||
}
|
||||
|
||||
// Returns an owned RC.
|
||||
RUSTEXPORT VideoFrameBuffer* Rust_copyVideoFrameBufferFromNv12(
|
||||
uint32_t width, uint32_t height, uint8_t* src_borrowed) {
|
||||
int width_y = static_cast<int>(width);
|
||||
int height_y = static_cast<int>(height);
|
||||
int width_u = (width_y + 1) / 2;
|
||||
int width_v = width_u;
|
||||
|
||||
int stride_y = width_y;
|
||||
int stride_uv = width_u + width_v;
|
||||
|
||||
int size_y = width_y * height_y;
|
||||
|
||||
uint8_t* src_y = src_borrowed;
|
||||
uint8_t* src_uv = src_y + size_y;
|
||||
|
||||
auto dest = I420Buffer::Create(width, height);
|
||||
libyuv::NV12ToI420(
|
||||
src_y, stride_y,
|
||||
src_uv, stride_uv,
|
||||
dest->MutableDataY(), dest->StrideY(),
|
||||
dest->MutableDataU(), dest->StrideU(),
|
||||
dest->MutableDataV(), dest->StrideV(),
|
||||
width_y, height_y);
|
||||
return take_rc(dest);
|
||||
}
|
||||
|
||||
// Returns an owned RC.
|
||||
RUSTEXPORT VideoFrameBuffer* Rust_copyVideoFrameBufferFromRgba(
|
||||
uint32_t width, uint32_t height, uint8_t* rgba_borrowed) {
|
||||
auto i420 = I420Buffer::Create(width, height);
|
||||
int rgba_stride = 4 * width;
|
||||
libyuv::ABGRToI420(
|
||||
rgba_borrowed, rgba_stride,
|
||||
i420->MutableDataY(), i420->StrideY(),
|
||||
i420->MutableDataU(), i420->StrideU(),
|
||||
i420->MutableDataV(), i420->StrideV(),
|
||||
width, height);
|
||||
return take_rc(i420);
|
||||
}
|
||||
|
||||
RUSTEXPORT void Rust_convertVideoFrameBufferToRgba(const VideoFrameBuffer* buffer_borrowed_rc, uint8_t* rgba_out) {
|
||||
const I420BufferInterface* i420 = buffer_borrowed_rc->GetI420();
|
||||
uint32_t rgba_stride = 4 * i420->width();
|
||||
libyuv::I420ToABGR(
|
||||
i420->DataY(), i420->StrideY(),
|
||||
i420->DataU(), i420->StrideU(),
|
||||
i420->DataV(), i420->StrideV(),
|
||||
rgba_out, rgba_stride,
|
||||
i420->width(), i420->height());
|
||||
}
|
||||
|
||||
// Returns an owned RC.
|
||||
RUSTEXPORT VideoFrameBuffer* Rust_copyAndRotateVideoFrameBuffer(
|
||||
const VideoFrameBuffer* buffer_borrowed_rc, VideoRotation rotation) {
|
||||
return take_rc(webrtc::I420Buffer::Rotate(*buffer_borrowed_rc->GetI420(), rotation));
|
||||
}
|
||||
|
||||
} // namespace rffi
|
||||
} // namespace webrtc
|
52
ringrtc/rffi/src/network.cc
Normal file
52
ringrtc/rffi/src/network.cc
Normal file
|
@ -0,0 +1,52 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#include "rffi/api/network.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace rffi {
|
||||
|
||||
rtc::IPAddress IpToRtcIp(Ip ip) {
|
||||
if (ip.v6) {
|
||||
in6_addr ipv6;
|
||||
::memcpy(&ipv6.s6_addr, &ip.address, 16);
|
||||
return rtc::IPAddress(ipv6);
|
||||
} else {
|
||||
in_addr ipv4;
|
||||
::memcpy(&ipv4.s_addr, &ip.address, 4);
|
||||
return rtc::IPAddress(ipv4);
|
||||
}
|
||||
}
|
||||
|
||||
rtc::SocketAddress IpPortToRtcSocketAddress(IpPort ip_port) {
|
||||
return rtc::SocketAddress(IpToRtcIp(ip_port.ip), ip_port.port);
|
||||
}
|
||||
|
||||
Ip RtcIpToIp(rtc::IPAddress address) {
|
||||
Ip ip;
|
||||
memset(&ip.address, 0, sizeof(ip.address));
|
||||
if (address.family() == AF_INET6) {
|
||||
in6_addr ipv6 = address.ipv6_address();
|
||||
ip.v6 = true;
|
||||
::memcpy(&ip.address, &ipv6.s6_addr, 16);
|
||||
} else {
|
||||
in_addr ipv4 = address.ipv4_address();
|
||||
ip.v6 = false;
|
||||
::memcpy(&ip.address, &ipv4.s_addr, 4);
|
||||
}
|
||||
return ip;
|
||||
}
|
||||
|
||||
IpPort RtcSocketAddressToIpPort(const rtc::SocketAddress& address) {
|
||||
IpPort ip_port;
|
||||
ip_port.ip = RtcIpToIp(address.ipaddr());
|
||||
ip_port.port = address.port();
|
||||
return ip_port;
|
||||
}
|
||||
|
||||
} // namespace rffi
|
||||
|
||||
} // namespace webrtc
|
958
ringrtc/rffi/src/peer_connection.cc
Normal file
958
ringrtc/rffi/src/peer_connection.cc
Normal file
|
@ -0,0 +1,958 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#include "api/ice_gatherer_interface.h"
|
||||
#include "api/ice_transport_interface.h"
|
||||
#include "api/jsep_session_description.h"
|
||||
#include "api/peer_connection_interface.h"
|
||||
#include "api/video_codecs/h264_profile_level_id.h"
|
||||
#include "api/video_codecs/vp9_profile.h"
|
||||
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
|
||||
#include "p2p/base/port.h"
|
||||
#include "pc/media_session.h"
|
||||
#include "pc/sdp_utils.h"
|
||||
#include "pc/session_description.h"
|
||||
#include "sdk/media_constraints.h"
|
||||
#include "rffi/api/peer_connection_intf.h"
|
||||
#include "rffi/src/ptr.h"
|
||||
#include "rffi/src/sdp_observer.h"
|
||||
#include "rffi/src/stats_observer.h"
|
||||
#include "rtc_base/message_digest.h"
|
||||
#include "rtc_base/string_encode.h"
|
||||
#include "rtc_base/third_party/base64/base64.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <string>
|
||||
|
||||
namespace webrtc {
|
||||
namespace rffi {
|
||||
|
||||
int TRANSPORT_CC1_EXT_ID = 1;
|
||||
int VIDEO_ORIENTATION_EXT_ID = 4;
|
||||
int AUDIO_LEVEL_EXT_ID = 5;
|
||||
int ABS_SEND_TIME_EXT_ID = 12;
|
||||
// Old clients used this value, so don't use it until they are all gone.
|
||||
int TX_TIME_OFFSET_EXT_ID = 13;
|
||||
|
||||
// Payload types must be over 96 and less than 128.
|
||||
// 101 used by connection.rs
|
||||
int DATA_PT = 101;
|
||||
int OPUS_PT = 102;
|
||||
int VP8_PT = 108;
|
||||
int VP8_RTX_PT = 118;
|
||||
int VP9_PT = 109;
|
||||
int VP9_RTX_PT = 119;
|
||||
int H264_CHP_PT = 104;
|
||||
int H264_CHP_RTX_PT = 114;
|
||||
int H264_CBP_PT = 103;
|
||||
int H264_CBP_RTX_PT = 113;
|
||||
int RED_PT = 120;
|
||||
int RED_RTX_PT = 121;
|
||||
int ULPFEC_PT = 122;
|
||||
|
||||
// Borrows the observer until the result is given to the observer,
|
||||
// so the observer must stay alive until it's given a result.
|
||||
RUSTEXPORT void
|
||||
Rust_createOffer(PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
CreateSessionDescriptionObserverRffi* csd_observer_borrowed_rc) {
|
||||
|
||||
// No constraints are set
|
||||
MediaConstraints constraints = MediaConstraints();
|
||||
PeerConnectionInterface::RTCOfferAnswerOptions options;
|
||||
|
||||
CopyConstraintsIntoOfferAnswerOptions(&constraints, &options);
|
||||
peer_connection_borrowed_rc->CreateOffer(csd_observer_borrowed_rc, options);
|
||||
}
|
||||
|
||||
// Borrows the observer until the result is given to the observer,
|
||||
// so the observer must stay alive until it's given a result.
|
||||
RUSTEXPORT void
|
||||
Rust_setLocalDescription(PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
SetSessionDescriptionObserverRffi* ssd_observer_borrowed_rc,
|
||||
SessionDescriptionInterface* local_description_owned) {
|
||||
peer_connection_borrowed_rc->SetLocalDescription(ssd_observer_borrowed_rc, local_description_owned);
|
||||
}
|
||||
|
||||
// Returns an owned pointer.
|
||||
RUSTEXPORT const char*
|
||||
Rust_toSdp(SessionDescriptionInterface* session_description_borrowed) {
|
||||
|
||||
std::string sdp;
|
||||
if (session_description_borrowed->ToString(&sdp)) {
|
||||
return strdup(&sdp[0u]);
|
||||
}
|
||||
|
||||
RTC_LOG(LS_ERROR) << "Unable to convert SessionDescription to SDP";
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Returns an owned pointer.
|
||||
static SessionDescriptionInterface*
|
||||
createSessionDescriptionInterface(SdpType type, const char* sdp_borrowed) {
|
||||
|
||||
if (sdp_borrowed != nullptr) {
|
||||
return CreateSessionDescription(type, std::string(sdp_borrowed)).release();
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
// Returns an owned pointer.
|
||||
RUSTEXPORT SessionDescriptionInterface*
|
||||
Rust_answerFromSdp(const char* sdp_borrowed) {
|
||||
return createSessionDescriptionInterface(SdpType::kAnswer, sdp_borrowed);
|
||||
}
|
||||
|
||||
RUSTEXPORT SessionDescriptionInterface*
|
||||
Rust_offerFromSdp(const char* sdp_borrowed) {
|
||||
return createSessionDescriptionInterface(SdpType::kOffer, sdp_borrowed);
|
||||
}
|
||||
|
||||
RUSTEXPORT bool
|
||||
Rust_disableDtlsAndSetSrtpKey(webrtc::SessionDescriptionInterface* session_description_borrowed,
|
||||
int crypto_suite,
|
||||
const char* key_borrowed,
|
||||
size_t key_len,
|
||||
const char* salt_borrowed,
|
||||
size_t salt_len) {
|
||||
if (!session_description_borrowed) {
|
||||
return false;
|
||||
}
|
||||
|
||||
cricket::SessionDescription* session = session_description_borrowed->description();
|
||||
if (!session) {
|
||||
return false;
|
||||
}
|
||||
|
||||
cricket::CryptoParams crypto_params;
|
||||
crypto_params.cipher_suite = rtc::SrtpCryptoSuiteToName(crypto_suite);
|
||||
|
||||
std::string key(key_borrowed, key_len);
|
||||
std::string salt(salt_borrowed, salt_len);
|
||||
crypto_params.key_params = "inline:" + rtc::Base64::Encode(key + salt);
|
||||
|
||||
// Disable DTLS
|
||||
for (cricket::TransportInfo& transport : session->transport_infos()) {
|
||||
transport.description.connection_role = cricket::CONNECTIONROLE_NONE;
|
||||
transport.description.identity_fingerprint = nullptr;
|
||||
}
|
||||
|
||||
// Set SRTP key
|
||||
for (cricket::ContentInfo& content : session->contents()) {
|
||||
cricket::MediaContentDescription* media = content.media_description();
|
||||
if (media) {
|
||||
media->set_protocol(cricket::kMediaProtocolSavpf);
|
||||
std::vector<cricket::CryptoParams> cryptos;
|
||||
cryptos.push_back(crypto_params);
|
||||
media->set_cryptos(cryptos);
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static int
|
||||
codecPriority(const RffiVideoCodec c) {
|
||||
// Lower values are given higher priority
|
||||
switch (c.type) {
|
||||
case kRffiVideoCodecVp9: return 0;
|
||||
case kRffiVideoCodecH264ConstrainedHigh: return 1;
|
||||
case kRffiVideoCodecH264ConstrainedBaseline: return 2;
|
||||
case kRffiVideoCodecVp8: return 3;
|
||||
default: return 100;
|
||||
}
|
||||
}
|
||||
|
||||
RUSTEXPORT RffiConnectionParametersV4*
|
||||
Rust_sessionDescriptionToV4(const webrtc::SessionDescriptionInterface* session_description_borrowed) {
|
||||
if (!session_description_borrowed) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
const cricket::SessionDescription* session = session_description_borrowed->description();
|
||||
if (!session) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Get ICE ufrag + pwd
|
||||
if (session->transport_infos().empty()) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
auto v4 = std::make_unique<ConnectionParametersV4>();
|
||||
|
||||
auto* transport = &session->transport_infos()[0].description;
|
||||
v4->ice_ufrag = transport->ice_ufrag;
|
||||
v4->ice_pwd = transport->ice_pwd;
|
||||
|
||||
// Get video codecs
|
||||
auto* video = cricket::GetFirstVideoContentDescription(session);
|
||||
if (video) {
|
||||
// We only support 1 CBP and 1 CHP codec.
|
||||
// So only include the first of each.
|
||||
// This should be OK because Android and iOS and native only
|
||||
// add one level per profile.
|
||||
bool has_h264_cbp = false;
|
||||
bool has_h264_chp = false;
|
||||
for (const auto& codec : video->codecs()) {
|
||||
auto codec_type = webrtc::PayloadStringToCodecType(codec.name);
|
||||
|
||||
if (codec_type == webrtc::kVideoCodecVP9) {
|
||||
auto profile = ParseSdpForVP9Profile(codec.params);
|
||||
if (!profile) {
|
||||
std::string profile_id_string;
|
||||
codec.GetParam("profile-id", &profile_id_string);
|
||||
RTC_LOG(LS_WARNING) << "Ignoring VP9 codec because profile-id = " << profile_id_string;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (profile != VP9Profile::kProfile0) {
|
||||
RTC_LOG(LS_WARNING) << "Ignoring VP9 codec with profile-id != 0";
|
||||
continue;
|
||||
}
|
||||
|
||||
RffiVideoCodec vp9;
|
||||
vp9.type = kRffiVideoCodecVp9;
|
||||
vp9.level = 0;
|
||||
v4->receive_video_codecs.push_back(vp9);
|
||||
} else if (codec_type == webrtc::kVideoCodecVP8) {
|
||||
RffiVideoCodec vp8;
|
||||
vp8.type = kRffiVideoCodecVp8;
|
||||
vp8.level = 0;
|
||||
v4->receive_video_codecs.push_back(vp8);
|
||||
} else if (codec_type == webrtc::kVideoCodecH264) {
|
||||
std::string level_asymmetry_allowed;
|
||||
if (codec.GetParam(cricket::kH264FmtpLevelAsymmetryAllowed, &level_asymmetry_allowed) && level_asymmetry_allowed != "1") {
|
||||
RTC_LOG(LS_WARNING) << "Ignoring H264 codec because level-asymmetry-allowed = " << level_asymmetry_allowed;
|
||||
continue;
|
||||
}
|
||||
|
||||
std::string packetization_mode;
|
||||
if (codec.GetParam(cricket::kH264FmtpPacketizationMode, &packetization_mode) && packetization_mode != "1") {
|
||||
// Not a warning because WebRTC software H264 encoders say they support mode 0 (even though it's useless).
|
||||
RTC_LOG(LS_INFO) << "Ignoring H264 codec because packetization_mode = " << packetization_mode;
|
||||
continue;
|
||||
}
|
||||
|
||||
auto profile_level_id = ParseSdpForH264ProfileLevelId(codec.params);
|
||||
if (!profile_level_id) {
|
||||
std::string profile_level_id_string;
|
||||
codec.GetParam("profile-level-id", &profile_level_id_string);
|
||||
RTC_LOG(LS_WARNING) << "Ignoring H264 codec because profile-level-id = " << profile_level_id_string;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (profile_level_id->profile == H264Profile::kProfileConstrainedHigh && !has_h264_chp) {
|
||||
RffiVideoCodec h264_chp;
|
||||
h264_chp.type = kRffiVideoCodecH264ConstrainedHigh;
|
||||
h264_chp.level = static_cast<uint32_t>(profile_level_id->level);
|
||||
v4->receive_video_codecs.push_back(h264_chp);
|
||||
has_h264_chp = true;
|
||||
} else if (profile_level_id->profile != H264Profile::kProfileConstrainedBaseline) {
|
||||
// Not a warning because WebRTC software H264 encoders say they support baseline, even though it's useless.
|
||||
RTC_LOG(LS_INFO) << "Ignoring H264 codec profile = " << profile_level_id->profile;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!has_h264_cbp) {
|
||||
// Any time we support anything, we assume we also support CBP
|
||||
// (but don't add it more than once)
|
||||
RffiVideoCodec h264_cbp;
|
||||
h264_cbp.type = kRffiVideoCodecH264ConstrainedBaseline;
|
||||
h264_cbp.level = static_cast<uint32_t>(profile_level_id->level);
|
||||
v4->receive_video_codecs.push_back(h264_cbp);
|
||||
has_h264_cbp = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
std::stable_sort(v4->receive_video_codecs.begin(), v4->receive_video_codecs.end(), [](const RffiVideoCodec lhs, const RffiVideoCodec rhs) {
|
||||
return codecPriority(lhs) < codecPriority(rhs);
|
||||
});
|
||||
|
||||
auto* rffi_v4 = new RffiConnectionParametersV4();
|
||||
rffi_v4->ice_ufrag_borrowed = v4->ice_ufrag.c_str();
|
||||
rffi_v4->ice_pwd_borrowed = v4->ice_pwd.c_str();
|
||||
rffi_v4->receive_video_codecs_borrowed = v4->receive_video_codecs.data();
|
||||
rffi_v4->receive_video_codecs_size = v4->receive_video_codecs.size();
|
||||
rffi_v4->backing_owned = v4.release();
|
||||
return rffi_v4;
|
||||
}
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_deleteV4(RffiConnectionParametersV4* v4_owned) {
|
||||
if (!v4_owned) {
|
||||
return;
|
||||
}
|
||||
|
||||
delete v4_owned->backing_owned;
|
||||
delete v4_owned;
|
||||
}
|
||||
|
||||
// Returns an owned pointer.
|
||||
RUSTEXPORT webrtc::SessionDescriptionInterface*
|
||||
Rust_sessionDescriptionFromV4(bool offer, const RffiConnectionParametersV4* v4_borrowed) {
|
||||
// Major changes from the default WebRTC behavior:
|
||||
// 1. We remove all codecs except Opus, VP8, VP9, and H264
|
||||
// 2. We remove all header extensions except for transport-cc, video orientation,
|
||||
// and abs send time.
|
||||
// 3. Opus CBR and DTX is enabled.
|
||||
|
||||
// For some reason, WebRTC insists that the video SSRCs for one side don't
|
||||
// overlap with SSRCs from the other side. To avoid potential problems, we'll give the
|
||||
// caller side 1XXX and the callee side 2XXX;
|
||||
uint32_t BASE_SSRC = offer ? 1000 : 2000;
|
||||
// 1001 and 2001 used by connection.rs
|
||||
uint32_t AUDIO_SSRC = BASE_SSRC + 2;
|
||||
uint32_t VIDEO_SSRC = BASE_SSRC + 3;
|
||||
uint32_t VIDEO_RTX_SSRC = BASE_SSRC + 13;
|
||||
|
||||
// This should stay in sync with PeerConnectionFactory.createAudioTrack
|
||||
std::string AUDIO_TRACK_ID = "audio1";
|
||||
// This must stay in sync with PeerConnectionFactory.createVideoTrack
|
||||
std::string VIDEO_TRACK_ID = "video1";
|
||||
|
||||
auto transport = cricket::TransportDescription();
|
||||
transport.ice_mode = cricket::ICEMODE_FULL;
|
||||
transport.ice_ufrag = std::string(v4_borrowed->ice_ufrag_borrowed);
|
||||
transport.ice_pwd = std::string(v4_borrowed->ice_pwd_borrowed);
|
||||
transport.AddOption(cricket::ICE_OPTION_TRICKLE);
|
||||
transport.AddOption(cricket::ICE_OPTION_RENOMINATION);
|
||||
|
||||
// DTLS is disabled
|
||||
transport.connection_role = cricket::CONNECTIONROLE_NONE;
|
||||
transport.identity_fingerprint = nullptr;
|
||||
|
||||
auto set_rtp_params = [] (cricket::MediaContentDescription* media) {
|
||||
media->set_protocol(cricket::kMediaProtocolSavpf);
|
||||
media->set_rtcp_mux(true);
|
||||
media->set_direction(webrtc::RtpTransceiverDirection::kSendRecv);
|
||||
};
|
||||
|
||||
auto audio = std::make_unique<cricket::AudioContentDescription>();
|
||||
set_rtp_params(audio.get());
|
||||
auto video = std::make_unique<cricket::VideoContentDescription>();
|
||||
set_rtp_params(video.get());
|
||||
|
||||
auto opus = cricket::AudioCodec(OPUS_PT, cricket::kOpusCodecName, 48000, 0, 2);
|
||||
// These are the current defaults for WebRTC
|
||||
// We set them explicitly to avoid having the defaults change on us.
|
||||
opus.SetParam("stereo", "0"); // "1" would cause non-VOIP mode to be used
|
||||
opus.SetParam("ptime", "20");
|
||||
opus.SetParam("minptime", "10");
|
||||
opus.SetParam("maxptime", "120");
|
||||
opus.SetParam("useinbandfec", "1");
|
||||
// This is not a default. We enable this to help reduce bandwidth because we
|
||||
// are using CBR.
|
||||
opus.SetParam("usedtx", "1");
|
||||
opus.SetParam("maxaveragebitrate", "32000");
|
||||
// This is not a default. We enable this for privacy.
|
||||
opus.SetParam("cbr", "1");
|
||||
opus.AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc, cricket::kParamValueEmpty));
|
||||
audio->AddCodec(opus);
|
||||
|
||||
auto add_video_feedback_params = [] (cricket::VideoCodec* video_codec) {
|
||||
video_codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc, cricket::kParamValueEmpty));
|
||||
video_codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir));
|
||||
video_codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamNack, cricket::kParamValueEmpty));
|
||||
video_codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamNack, cricket::kRtcpFbNackParamPli));
|
||||
video_codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamRemb, cricket::kParamValueEmpty));
|
||||
};
|
||||
|
||||
auto add_h264_params = [] (cricket::VideoCodec* h264_codec, H264Profile profile, uint32_t level) {
|
||||
// All of the codec implementations (iOS hardware, Android hardware) are only used by WebRTC
|
||||
// with packetization mode 1. Software codecs also support mode 0, but who cares. It's useless.
|
||||
// They also all allow for level asymmetry.
|
||||
h264_codec->SetParam(cricket::kH264FmtpLevelAsymmetryAllowed, "1");
|
||||
h264_codec->SetParam(cricket::kH264FmtpPacketizationMode, "1");
|
||||
// On Android and with software, the level is always 31. But it could be anything with iOS.
|
||||
auto profile_level_id_string = H264ProfileLevelIdToString(H264ProfileLevelId(profile, H264Level(level)));
|
||||
if (profile_level_id_string) {
|
||||
h264_codec->SetParam("profile-level-id", *profile_level_id_string);
|
||||
}
|
||||
};
|
||||
|
||||
std::stable_sort(v4_borrowed->receive_video_codecs_borrowed, v4_borrowed->receive_video_codecs_borrowed + v4_borrowed->receive_video_codecs_size, [](const RffiVideoCodec lhs, const RffiVideoCodec rhs) {
|
||||
return codecPriority(lhs) < codecPriority(rhs);
|
||||
});
|
||||
|
||||
for (size_t i = 0; i < v4_borrowed->receive_video_codecs_size; i++) {
|
||||
RffiVideoCodec rffi_codec = v4_borrowed->receive_video_codecs_borrowed[i];
|
||||
cricket::VideoCodec codec;
|
||||
if (rffi_codec.type == kRffiVideoCodecVp9) {
|
||||
auto vp9 = cricket::VideoCodec(VP9_PT, cricket::kVp9CodecName);
|
||||
auto vp9_rtx = cricket::VideoCodec::CreateRtxCodec(VP9_RTX_PT, VP9_PT);
|
||||
add_video_feedback_params(&vp9);
|
||||
|
||||
video->AddCodec(vp9);
|
||||
video->AddCodec(vp9_rtx);
|
||||
} else if (rffi_codec.type == kRffiVideoCodecVp8) {
|
||||
auto vp8 = cricket::VideoCodec(VP8_PT, cricket::kVp8CodecName);
|
||||
auto vp8_rtx = cricket::VideoCodec::CreateRtxCodec(VP8_RTX_PT, VP8_PT);
|
||||
add_video_feedback_params(&vp8);
|
||||
|
||||
video->AddCodec(vp8);
|
||||
video->AddCodec(vp8_rtx);
|
||||
} else if (rffi_codec.type == kRffiVideoCodecH264ConstrainedHigh) {
|
||||
auto h264_chp = cricket::VideoCodec(H264_CHP_PT, cricket::kH264CodecName);
|
||||
auto h264_chp_rtx = cricket::VideoCodec::CreateRtxCodec(H264_CHP_RTX_PT, H264_CHP_PT);
|
||||
add_h264_params(&h264_chp, H264Profile::kProfileConstrainedHigh, rffi_codec.level);
|
||||
add_video_feedback_params(&h264_chp);
|
||||
|
||||
video->AddCodec(h264_chp);
|
||||
video->AddCodec(h264_chp_rtx);
|
||||
} else if (rffi_codec.type == kRffiVideoCodecH264ConstrainedBaseline) {
|
||||
auto h264_cbp = cricket::VideoCodec(H264_CBP_PT, cricket::kH264CodecName);
|
||||
auto h264_cbp_rtx = cricket::VideoCodec::CreateRtxCodec(H264_CBP_RTX_PT, H264_CBP_PT);
|
||||
add_h264_params(&h264_cbp, H264Profile::kProfileConstrainedBaseline, rffi_codec.level);
|
||||
add_video_feedback_params(&h264_cbp);
|
||||
|
||||
video->AddCodec(h264_cbp);
|
||||
video->AddCodec(h264_cbp_rtx);
|
||||
}
|
||||
}
|
||||
|
||||
// These are "meta codecs" for redundancy and FEC.
|
||||
// They are enabled by default currently with WebRTC.
|
||||
auto red = cricket::VideoCodec(RED_PT, cricket::kRedCodecName);
|
||||
auto red_rtx = cricket::VideoCodec::CreateRtxCodec(RED_RTX_PT, RED_PT);
|
||||
auto ulpfec = cricket::VideoCodec(ULPFEC_PT, cricket::kUlpfecCodecName);
|
||||
|
||||
video->AddCodec(red);
|
||||
video->AddCodec(red_rtx);
|
||||
video->AddCodec(ulpfec);
|
||||
|
||||
auto transport_cc1 = webrtc::RtpExtension(webrtc::TransportSequenceNumber::Uri(), TRANSPORT_CC1_EXT_ID);
|
||||
// TransportCC V2 is now enabled by default, but the difference is that V2 doesn't send periodic updates
|
||||
// and instead waits for feedback requests. Since the existing clients don't send feedback
|
||||
// requests, we can't enable V2. We'd have to add it to signaling to move from V1 to V2.
|
||||
// auto transport_cc2 = webrtc::RtpExtension(webrtc::TransportSequenceNumberV2::Uri(), TRANSPORT_CC2_EXT_ID);
|
||||
auto video_orientation = webrtc::RtpExtension(webrtc::VideoOrientation ::Uri(), VIDEO_ORIENTATION_EXT_ID);
|
||||
// abs_send_time and tx_time_offset are used for more accurate REMB messages from the receiver,
|
||||
// which are used by googcc in some small ways. So, keep it enabled.
|
||||
// But it doesn't make sense to enable both abs_send_time and tx_time_offset, so only use abs_send_time.
|
||||
auto abs_send_time = webrtc::RtpExtension(webrtc::AbsoluteSendTime::Uri(), ABS_SEND_TIME_EXT_ID);
|
||||
// auto tx_time_offset = webrtc::RtpExtension(webrtc::TransmissionOffset::Uri(), TX_TIME_OFFSET_EXT_ID);
|
||||
|
||||
// Note: Do not add transport-cc for audio. Using transport-cc with audio is still experimental in WebRTC.
|
||||
// And don't add abs_send_time because it's only used for video.
|
||||
video->AddRtpHeaderExtension(transport_cc1);
|
||||
video->AddRtpHeaderExtension(video_orientation);
|
||||
video->AddRtpHeaderExtension(abs_send_time);
|
||||
|
||||
auto audio_stream = cricket::StreamParams();
|
||||
audio_stream.id = AUDIO_TRACK_ID;
|
||||
audio_stream.add_ssrc(AUDIO_SSRC);
|
||||
|
||||
auto video_stream = cricket::StreamParams();
|
||||
video_stream.id = VIDEO_TRACK_ID;
|
||||
video_stream.add_ssrc(VIDEO_SSRC);
|
||||
video_stream.AddFidSsrc(VIDEO_SSRC, VIDEO_RTX_SSRC); // AKA RTX
|
||||
|
||||
// Things that are the same for all of them
|
||||
for (auto* stream : {&audio_stream, &video_stream}) {
|
||||
// WebRTC just generates a random 16-byte string for the entire PeerConnection.
|
||||
// It's used to send an SDES RTCP message.
|
||||
// The value doesn't seem to be used for anything else.
|
||||
// We'll set it around just in case.
|
||||
// But everything seems to work fine without it.
|
||||
stream->cname = "CNAMECNAMECNAME!";
|
||||
}
|
||||
|
||||
audio->AddStream(audio_stream);
|
||||
video->AddStream(video_stream);
|
||||
|
||||
// TODO: Why is this only for video by default in WebRTC? Should we enable it for all of them?
|
||||
video->set_rtcp_reduced_size(true);
|
||||
|
||||
// Keep the order as the WebRTC default: (audio, video, data).
|
||||
auto audio_content_name = "audio";
|
||||
auto video_content_name = "video";
|
||||
|
||||
auto session = std::make_unique<cricket::SessionDescription>();
|
||||
session->AddTransportInfo(cricket::TransportInfo(audio_content_name, transport));
|
||||
session->AddTransportInfo(cricket::TransportInfo(video_content_name, transport));
|
||||
|
||||
bool stopped = false;
|
||||
session->AddContent(audio_content_name, cricket::MediaProtocolType::kRtp, stopped, std::move(audio));
|
||||
session->AddContent(video_content_name, cricket::MediaProtocolType::kRtp, stopped, std::move(video));
|
||||
|
||||
auto bundle = cricket::ContentGroup(cricket::GROUP_TYPE_BUNDLE);
|
||||
bundle.AddContentName(audio_content_name);
|
||||
bundle.AddContentName(video_content_name);
|
||||
session->AddGroup(bundle);
|
||||
|
||||
// This is the default and used for "Plan B" SDP, which is what we use in V1, V2, and V3.
|
||||
session->set_msid_signaling(cricket::kMsidSignalingSsrcAttribute);
|
||||
|
||||
auto typ = offer ? SdpType::kOffer : SdpType::kAnswer;
|
||||
return new webrtc::JsepSessionDescription(typ, std::move(session), "1", "1");
|
||||
}
|
||||
|
||||
const uint32_t INVALID_DEMUX_ID = 0;
|
||||
|
||||
webrtc::JsepSessionDescription*
|
||||
CreateSessionDescriptionForGroupCall(bool local,
|
||||
const std::string& ice_ufrag,
|
||||
const std::string& ice_pwd,
|
||||
RffiSrtpKey srtp_key,
|
||||
std::vector<uint32_t> rtp_demux_ids) {
|
||||
// Major changes from the default WebRTC behavior:
|
||||
// 1. We remove all codecs except Opus and VP8.
|
||||
// 2. We remove all header extensions except for transport-cc, video orientation,
|
||||
// abs send time, and audio level.
|
||||
// 3. Opus CBR and DTX is enabled.
|
||||
|
||||
// This must stay in sync with PeerConnectionFactory.createAudioTrack
|
||||
std::string LOCAL_AUDIO_TRACK_ID = "audio1";
|
||||
// This must stay in sync with PeerConnectionFactory.createVideoTrack
|
||||
std::string LOCAL_VIDEO_TRACK_ID = "video1";
|
||||
|
||||
auto transport = cricket::TransportDescription();
|
||||
transport.ice_mode = cricket::ICEMODE_FULL;
|
||||
transport.ice_ufrag = ice_ufrag;
|
||||
transport.ice_pwd = ice_pwd;
|
||||
transport.AddOption(cricket::ICE_OPTION_TRICKLE);
|
||||
|
||||
// DTLS is disabled
|
||||
transport.connection_role = cricket::CONNECTIONROLE_NONE;
|
||||
transport.identity_fingerprint = nullptr;
|
||||
|
||||
// Use SRTP master key material instead
|
||||
cricket::CryptoParams crypto_params;
|
||||
crypto_params.cipher_suite = rtc::SrtpCryptoSuiteToName(srtp_key.suite);
|
||||
std::string key(srtp_key.key_borrowed, srtp_key.key_len);
|
||||
std::string salt(srtp_key.salt_borrowed, srtp_key.salt_len);
|
||||
crypto_params.key_params = "inline:" + rtc::Base64::Encode(key + salt);
|
||||
|
||||
auto set_rtp_params = [crypto_params] (cricket::MediaContentDescription* media) {
|
||||
media->set_protocol(cricket::kMediaProtocolSavpf);
|
||||
media->set_rtcp_mux(true);
|
||||
media->set_direction(webrtc::RtpTransceiverDirection::kSendRecv);
|
||||
|
||||
std::vector<cricket::CryptoParams> cryptos;
|
||||
cryptos.push_back(crypto_params);
|
||||
media->set_cryptos(cryptos);
|
||||
};
|
||||
|
||||
auto audio = std::make_unique<cricket::AudioContentDescription>();
|
||||
set_rtp_params(audio.get());
|
||||
auto video = std::make_unique<cricket::VideoContentDescription>();
|
||||
set_rtp_params(video.get());
|
||||
|
||||
auto opus = cricket::AudioCodec(OPUS_PT, cricket::kOpusCodecName, 48000, 0, 2);
|
||||
// These are the current defaults for WebRTC
|
||||
// We set them explicitly to avoid having the defaults change on us.
|
||||
opus.SetParam("stereo", "0"); // "1" would cause non-VOIP mode to be used
|
||||
opus.SetParam("ptime", "20");
|
||||
opus.SetParam("minptime", "10");
|
||||
opus.SetParam("maxptime", "120");
|
||||
opus.SetParam("useinbandfec", "1");
|
||||
// This is not a default. We enable this to help reduce bandwidth because we
|
||||
// are using CBR.
|
||||
opus.SetParam("usedtx", "1");
|
||||
opus.SetParam("maxaveragebitrate", "32000");
|
||||
// This is not a default. We enable this for privacy.
|
||||
opus.SetParam("cbr", "1");
|
||||
opus.AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc, cricket::kParamValueEmpty));
|
||||
audio->AddCodec(opus);
|
||||
|
||||
auto add_video_feedback_params = [] (cricket::VideoCodec* video_codec) {
|
||||
video_codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc, cricket::kParamValueEmpty));
|
||||
video_codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir));
|
||||
video_codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamNack, cricket::kParamValueEmpty));
|
||||
video_codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamNack, cricket::kRtcpFbNackParamPli));
|
||||
video_codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamRemb, cricket::kParamValueEmpty));
|
||||
};
|
||||
|
||||
auto vp8 = cricket::VideoCodec(VP8_PT, cricket::kVp8CodecName);
|
||||
auto vp8_rtx = cricket::VideoCodec::CreateRtxCodec(VP8_RTX_PT, VP8_PT);
|
||||
add_video_feedback_params(&vp8);
|
||||
|
||||
video->AddCodec(vp8);
|
||||
video->AddCodec(vp8_rtx);
|
||||
|
||||
// These are "meta codecs" for redundancy and FEC.
|
||||
// They are enabled by default currently with WebRTC.
|
||||
auto red = cricket::VideoCodec(RED_PT, cricket::kRedCodecName);
|
||||
auto red_rtx = cricket::VideoCodec::CreateRtxCodec(RED_RTX_PT, RED_PT);
|
||||
|
||||
video->AddCodec(red);
|
||||
video->AddCodec(red_rtx);
|
||||
|
||||
auto transport_cc1 = webrtc::RtpExtension(webrtc::TransportSequenceNumber::Uri(), TRANSPORT_CC1_EXT_ID);
|
||||
// TransportCC V2 is now enabled by default, but the difference is that V2 doesn't send periodic updates
|
||||
// and instead waits for feedback requests. Since the SFU doesn't currently send feedback requests,
|
||||
// we can't enable V2. We'd have to add it to the SFU to move from V1 to V2.
|
||||
// auto transport_cc2 = webrtc::RtpExtension(webrtc::TransportSequenceNumberV2::Uri(), TRANSPORT_CC2_EXT_ID);
|
||||
auto video_orientation = webrtc::RtpExtension(webrtc::VideoOrientation::Uri(), VIDEO_ORIENTATION_EXT_ID);
|
||||
auto audio_level = webrtc::RtpExtension(webrtc::AudioLevel::Uri(), AUDIO_LEVEL_EXT_ID);
|
||||
// abs_send_time and tx_time_offset are used for more accurate REMB messages from the receiver,
|
||||
// but the SFU doesn't process REMB messages anyway, nor does it send or receive these header extensions.
|
||||
// So, don't waste bytes on them.
|
||||
// auto abs_send_time = webrtc::RtpExtension(webrtc::AbsoluteSendTime::Uri(), ABS_SEND_TIME_EXT_ID);
|
||||
// auto tx_time_offset = webrtc::RtpExtension(webrtc::TransmissionOffset::Uri(), TX_TIME_OFFSET_EXT_ID);
|
||||
|
||||
// Note: Do not add transport-cc for audio. Using transport-cc with audio is still experimental in WebRTC.
|
||||
// And don't add abs_send_time because it's only used for video.
|
||||
audio->AddRtpHeaderExtension(audio_level);
|
||||
video->AddRtpHeaderExtension(transport_cc1);
|
||||
video->AddRtpHeaderExtension(video_orientation);
|
||||
|
||||
for (uint32_t rtp_demux_id : rtp_demux_ids) {
|
||||
if (rtp_demux_id == INVALID_DEMUX_ID) {
|
||||
RTC_LOG(LS_WARNING) << "Ignoring demux ID of 0";
|
||||
continue;
|
||||
}
|
||||
|
||||
uint32_t audio_ssrc = rtp_demux_id + 0;
|
||||
// Leave room for audio RTX
|
||||
uint32_t video1_ssrc = rtp_demux_id + 2;
|
||||
uint32_t video1_rtx_ssrc = rtp_demux_id + 3;
|
||||
uint32_t video2_ssrc = rtp_demux_id + 4;
|
||||
uint32_t video2_rtx_ssrc = rtp_demux_id + 5;
|
||||
uint32_t video3_ssrc = rtp_demux_id + 6;
|
||||
uint32_t video3_rtx_ssrc = rtp_demux_id + 7;
|
||||
// Leave room for some more video layers or FEC
|
||||
// uint32_t data_ssrc = rtp_demux_id + 0xD; Used by group_call.rs
|
||||
|
||||
auto audio_stream = cricket::StreamParams();
|
||||
|
||||
// We will use the string version of the demux ID to know which
|
||||
// track is for which remote device.
|
||||
std::string rtp_demux_id_str = rtc::ToString(rtp_demux_id);
|
||||
|
||||
// For local, this should stay in sync with PeerConnectionFactory.createAudioTrack
|
||||
// For remote, this will result in the remote audio track/receiver's ID,
|
||||
audio_stream.id = local ? LOCAL_AUDIO_TRACK_ID : rtp_demux_id_str;
|
||||
audio_stream.add_ssrc(audio_ssrc);
|
||||
|
||||
auto video_stream = cricket::StreamParams();
|
||||
// For local, this should stay in sync with PeerConnectionFactory.createVideoSource
|
||||
// For remote, this will result in the remote video track/receiver's ID,
|
||||
video_stream.id = local ? LOCAL_VIDEO_TRACK_ID : rtp_demux_id_str;
|
||||
video_stream.add_ssrc(video1_ssrc);
|
||||
if (local) {
|
||||
// Don't add simulcast for remote descriptions
|
||||
video_stream.add_ssrc(video2_ssrc);
|
||||
video_stream.add_ssrc(video3_ssrc);
|
||||
video_stream.ssrc_groups.push_back(cricket::SsrcGroup(cricket::kSimSsrcGroupSemantics, video_stream.ssrcs));
|
||||
}
|
||||
video_stream.AddFidSsrc(video1_ssrc, video1_rtx_ssrc); // AKA RTX
|
||||
if (local) {
|
||||
// Don't add simulcast for remote descriptions
|
||||
video_stream.AddFidSsrc(video2_ssrc, video2_rtx_ssrc); // AKA RTX
|
||||
video_stream.AddFidSsrc(video3_ssrc, video3_rtx_ssrc); // AKA RTX
|
||||
}
|
||||
// This makes screen share use 2 layers of the highest resolution
|
||||
// (but different quality/framerate) rather than 3 layers of
|
||||
// differing resolution.
|
||||
video->set_conference_mode(true);
|
||||
|
||||
// Things that are the same for all of them
|
||||
for (auto* stream : {&audio_stream, &video_stream}) {
|
||||
// WebRTC just generates a random 16-byte string for the entire PeerConnection.
|
||||
// It's used to send an SDES RTCP message.
|
||||
// The value doesn't seem to be used for anything else.
|
||||
// We'll set it around just in case.
|
||||
// But everything seems to work fine without it.
|
||||
stream->cname = rtp_demux_id_str;
|
||||
}
|
||||
|
||||
audio->AddStream(audio_stream);
|
||||
video->AddStream(video_stream);
|
||||
}
|
||||
|
||||
// TODO: Why is this only for video by default in WebRTC? Should we enable it for all of them?
|
||||
video->set_rtcp_reduced_size(true);
|
||||
|
||||
// We don't set the crypto keys here.
|
||||
// We expect that will be done later by Rust_disableDtlsAndSetSrtpKey.
|
||||
|
||||
// Keep the order as the WebRTC default: (audio, video).
|
||||
auto audio_content_name = "audio";
|
||||
auto video_content_name = "video";
|
||||
|
||||
auto session = std::make_unique<cricket::SessionDescription>();
|
||||
session->AddTransportInfo(cricket::TransportInfo(audio_content_name, transport));
|
||||
session->AddTransportInfo(cricket::TransportInfo(video_content_name, transport));
|
||||
|
||||
bool stopped = false;
|
||||
session->AddContent(audio_content_name, cricket::MediaProtocolType::kRtp, stopped, std::move(audio));
|
||||
session->AddContent(video_content_name, cricket::MediaProtocolType::kRtp, stopped, std::move(video));
|
||||
|
||||
auto bundle = cricket::ContentGroup(cricket::GROUP_TYPE_BUNDLE);
|
||||
bundle.AddContentName(audio_content_name);
|
||||
bundle.AddContentName(video_content_name);
|
||||
session->AddGroup(bundle);
|
||||
|
||||
// This is the default and used for "Plan B" SDP, which is what we use in V1, V2, and V3.
|
||||
session->set_msid_signaling(cricket::kMsidSignalingSsrcAttribute);
|
||||
|
||||
auto typ = local ? SdpType::kOffer : SdpType::kAnswer;
|
||||
// The session ID and session version (both "1" here) go into SDP, but are not used at all.
|
||||
return new webrtc::JsepSessionDescription(typ, std::move(session), "1", "1");
|
||||
}
|
||||
|
||||
// Returns an owned pointer.
|
||||
RUSTEXPORT webrtc::SessionDescriptionInterface*
|
||||
Rust_localDescriptionForGroupCall(const char* ice_ufrag_borrowed,
|
||||
const char* ice_pwd_borrowed,
|
||||
RffiSrtpKey client_srtp_key,
|
||||
uint32_t rtp_demux_id) {
|
||||
std::vector<uint32_t> rtp_demux_ids;
|
||||
// A 0 demux_id means we don't know the demux ID yet and shouldn't include one.
|
||||
if (rtp_demux_id > 0) {
|
||||
rtp_demux_ids.push_back(rtp_demux_id);
|
||||
}
|
||||
return CreateSessionDescriptionForGroupCall(
|
||||
true /* local */, std::string(ice_ufrag_borrowed), std::string(ice_pwd_borrowed), client_srtp_key, rtp_demux_ids);
|
||||
}
|
||||
|
||||
// Returns an owned pointer.
|
||||
RUSTEXPORT webrtc::SessionDescriptionInterface*
|
||||
Rust_remoteDescriptionForGroupCall(const char* ice_ufrag_borrowed,
|
||||
const char* ice_pwd_borrowed,
|
||||
RffiSrtpKey server_srtp_key,
|
||||
uint32_t* rtp_demux_ids_borrowed,
|
||||
size_t rtp_demux_ids_len) {
|
||||
std::vector<uint32_t> rtp_demux_ids;
|
||||
rtp_demux_ids.assign(rtp_demux_ids_borrowed, rtp_demux_ids_borrowed + rtp_demux_ids_len);
|
||||
return CreateSessionDescriptionForGroupCall(
|
||||
false /* local */, std::string(ice_ufrag_borrowed), std::string(ice_pwd_borrowed), server_srtp_key, rtp_demux_ids);
|
||||
}
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_createAnswer(PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
CreateSessionDescriptionObserverRffi* csd_observer_borrowed_rc) {
|
||||
|
||||
// No constraints are set
|
||||
MediaConstraints constraints = MediaConstraints();
|
||||
PeerConnectionInterface::RTCOfferAnswerOptions options;
|
||||
|
||||
CopyConstraintsIntoOfferAnswerOptions(&constraints, &options);
|
||||
peer_connection_borrowed_rc->CreateAnswer(csd_observer_borrowed_rc, options);
|
||||
}
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_setRemoteDescription(PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
SetSessionDescriptionObserverRffi* ssd_observer_borrowed_rc,
|
||||
SessionDescriptionInterface* description_owned) {
|
||||
peer_connection_borrowed_rc->SetRemoteDescription(ssd_observer_borrowed_rc, description_owned);
|
||||
}
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_deleteSessionDescription(webrtc::SessionDescriptionInterface* description_owned) {
|
||||
delete description_owned;
|
||||
}
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_setOutgoingMediaEnabled(PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
bool enabled) {
|
||||
// Note: calling SetAudioRecording(enabled) is deprecated and it's not clear
|
||||
// that it even does anything any more.
|
||||
int encodings_changed = 0;
|
||||
for (auto& sender : peer_connection_borrowed_rc->GetSenders()) {
|
||||
RtpParameters parameters = sender->GetParameters();
|
||||
for (auto& encoding: parameters.encodings) {
|
||||
encoding.active = enabled;
|
||||
encodings_changed++;
|
||||
}
|
||||
sender->SetParameters(parameters);
|
||||
}
|
||||
RTC_LOG(LS_INFO) << "Rust_setOutgoingMediaEnabled(" << enabled << ") for " << encodings_changed << " encodings.";
|
||||
}
|
||||
|
||||
RUSTEXPORT bool
|
||||
Rust_setIncomingMediaEnabled(PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
bool enabled) {
|
||||
RTC_LOG(LS_INFO) << "Rust_setIncomingMediaEnabled(" << enabled << ")";
|
||||
return peer_connection_borrowed_rc->SetIncomingRtpEnabled(enabled);
|
||||
}
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_setAudioPlayoutEnabled(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
bool enabled) {
|
||||
RTC_LOG(LS_INFO) << "Rust_setAudioPlayoutEnabled(" << enabled << ")";
|
||||
peer_connection_borrowed_rc->SetAudioPlayout(enabled);
|
||||
}
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_setAudioRecordingEnabled(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
bool enabled) {
|
||||
RTC_LOG(LS_INFO) << "Rust_setAudioRecordingEnabled(" << enabled << ")";
|
||||
peer_connection_borrowed_rc->SetAudioRecording(enabled);
|
||||
}
|
||||
|
||||
RUSTEXPORT bool
|
||||
Rust_addIceCandidateFromSdp(PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
const char* sdp_borrowed) {
|
||||
// Since we always use bundle, we can always use index 0 and ignore the mid
|
||||
std::unique_ptr<IceCandidateInterface> ice_candidate(
|
||||
CreateIceCandidate("", 0, std::string(sdp_borrowed), nullptr));
|
||||
|
||||
return peer_connection_borrowed_rc->AddIceCandidate(ice_candidate.get());
|
||||
}
|
||||
|
||||
RUSTEXPORT bool
|
||||
Rust_removeIceCandidates(PeerConnectionInterface* pc_borrowed_rc,
|
||||
IpPort* removed_addresses_data_borrowed,
|
||||
size_t removed_addresses_len) {
|
||||
std::vector<IpPort> removed_addresses;
|
||||
removed_addresses.assign(removed_addresses_data_borrowed, removed_addresses_data_borrowed + removed_addresses_len);
|
||||
|
||||
std::vector<cricket::Candidate> candidates_removed;
|
||||
for (const auto& address_removed : removed_addresses) {
|
||||
// This only needs to contain the correct transport_name, component, protocol, and address.
|
||||
// SeeCandidate::MatchesForRemoval and JsepTransportController::RemoveRemoteCandidates
|
||||
// and JsepTransportController::RemoveRemoteCandidates.
|
||||
// But we know (because we bundle/rtcp-mux everything) that the transport name is "audio",
|
||||
// and the component is 1. We also know (because we don't use TCP candidates) that the
|
||||
// protocol is UDP. So we only need to know the address.
|
||||
cricket::Candidate candidate_removed;
|
||||
candidate_removed.set_transport_name("audio");
|
||||
candidate_removed.set_component(cricket::ICE_CANDIDATE_COMPONENT_RTP);
|
||||
candidate_removed.set_protocol(cricket::UDP_PROTOCOL_NAME);
|
||||
candidate_removed.set_address(IpPortToRtcSocketAddress(address_removed));
|
||||
|
||||
candidates_removed.push_back(candidate_removed);
|
||||
}
|
||||
|
||||
return pc_borrowed_rc->RemoveIceCandidates(candidates_removed);
|
||||
}
|
||||
|
||||
|
||||
RUSTEXPORT bool
|
||||
Rust_addIceCandidateFromServer(PeerConnectionInterface* pc_borrowed_rc,
|
||||
Ip ip,
|
||||
uint16_t port,
|
||||
bool tcp) {
|
||||
cricket::Candidate candidate;
|
||||
// The default foundation is "", which is fine because we bundle.
|
||||
// The default generation is 0, which is fine because we don't do ICE restarts.
|
||||
// The default username and password are "", which is fine because
|
||||
// P2PTransportChannel::AddRemoteCandidate looks up the ICE ufrag and pwd
|
||||
// from the remote description when the candidate's copy is empty.
|
||||
// Unset network ID, network cost, and network type are fine because they are for p2p use.
|
||||
// An unset relay protocol is fine because we aren't doing relay.
|
||||
// An unset related address is fine because we aren't doing relay or STUN.
|
||||
//
|
||||
// The critical values are component, type, protocol, and address, so we set those.
|
||||
//
|
||||
// The component doesn't really matter because we use RTCP-mux, so there is only one component.
|
||||
// However, WebRTC expects it to be set to ICE_CANDIDATE_COMPONENT_RTP(1), so we do that.
|
||||
//
|
||||
// The priority is also important for controlling whether we prefer IPv4 or IPv6 when both are available.
|
||||
// WebRTC generally prefers IPv6 over IPv4 for local candidates (see rtc_base::IPAddressPrecedence).
|
||||
// So we leave the priority unset to allow the local candidate preference to break the tie.
|
||||
candidate.set_component(cricket::ICE_CANDIDATE_COMPONENT_RTP);
|
||||
candidate.set_type(cricket::LOCAL_PORT_TYPE); // AKA "host"
|
||||
candidate.set_address(rtc::SocketAddress(IpToRtcIp(ip), port));
|
||||
candidate.set_protocol(tcp ? cricket::TCP_PROTOCOL_NAME : cricket::UDP_PROTOCOL_NAME);
|
||||
|
||||
// Since we always use bundle, we can always use index 0 and ignore the mid
|
||||
std::unique_ptr<IceCandidateInterface> ice_candidate(
|
||||
CreateIceCandidate("", 0, candidate));
|
||||
|
||||
return pc_borrowed_rc->AddIceCandidate(ice_candidate.get());
|
||||
}
|
||||
|
||||
RUSTEXPORT IceGathererInterface*
|
||||
Rust_createSharedIceGatherer(PeerConnectionInterface* peer_connection_borrowed_rc) {
|
||||
return take_rc(peer_connection_borrowed_rc->CreateSharedIceGatherer());
|
||||
}
|
||||
|
||||
RUSTEXPORT bool
|
||||
Rust_useSharedIceGatherer(PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
IceGathererInterface* ice_gatherer_borrowed_rc) {
|
||||
return peer_connection_borrowed_rc->UseSharedIceGatherer(inc_rc(ice_gatherer_borrowed_rc));
|
||||
}
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_getStats(PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
StatsObserverRffi* stats_observer_borrowed_rc) {
|
||||
peer_connection_borrowed_rc->GetStats(stats_observer_borrowed_rc);
|
||||
}
|
||||
|
||||
// This is fairly complex in WebRTC, but I think it's something like this:
|
||||
// Must be that 0 <= min <= start <= max.
|
||||
// But any value can be unset (-1). If so, here is what happens:
|
||||
// If min isn't set, either use 30kbps (from PeerConnectionFactory::CreateCall_w) or no min (0 from WebRtcVideoChannel::ApplyChangedParams)
|
||||
// If start isn't set, use the previous start; initially 100kbps (from PeerConnectionFactory::CreateCall_w)
|
||||
// If max isn't set, either use 2mbps (from PeerConnectionFactory::CreateCall_w) or no max (-1 from WebRtcVideoChannel::ApplyChangedParams
|
||||
// If min and max are set but haven't changed since last the last unset value, nothing happens.
|
||||
// There is only an action if either min or max has changed or start is set.
|
||||
RUSTEXPORT void
|
||||
Rust_setSendBitrates(PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
int32_t min_bitrate_bps,
|
||||
int32_t start_bitrate_bps,
|
||||
int32_t max_bitrate_bps) {
|
||||
struct BitrateSettings bitrate_settings;
|
||||
if (min_bitrate_bps >= 0) {
|
||||
bitrate_settings.min_bitrate_bps = min_bitrate_bps;
|
||||
}
|
||||
if (start_bitrate_bps >= 0) {
|
||||
bitrate_settings.start_bitrate_bps = start_bitrate_bps;
|
||||
}
|
||||
if (max_bitrate_bps >= 0) {
|
||||
bitrate_settings.max_bitrate_bps = max_bitrate_bps;
|
||||
}
|
||||
peer_connection_borrowed_rc->SetBitrate(bitrate_settings);
|
||||
}
|
||||
|
||||
// Warning: this blocks on the WebRTC network thread, so avoid calling it
|
||||
// while holding a lock, especially a lock also taken in a callback
|
||||
// from the network thread.
|
||||
RUSTEXPORT bool
|
||||
Rust_sendRtp(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
uint8_t pt,
|
||||
uint16_t seqnum,
|
||||
uint32_t timestamp,
|
||||
uint32_t ssrc,
|
||||
const uint8_t* payload_data_borrowed,
|
||||
size_t payload_size) {
|
||||
size_t packet_size = 12 /* RTP header */ + payload_size + 16 /* SRTP footer */;
|
||||
std::unique_ptr<RtpPacket> packet(
|
||||
new RtpPacket(nullptr /* header extension map */, packet_size));
|
||||
packet->SetPayloadType(pt);
|
||||
packet->SetSequenceNumber(seqnum);
|
||||
packet->SetTimestamp(timestamp);
|
||||
packet->SetSsrc(ssrc);
|
||||
memcpy(packet->AllocatePayload(payload_size), payload_data_borrowed, payload_size);
|
||||
return peer_connection_borrowed_rc->SendRtp(std::move(packet));
|
||||
}
|
||||
|
||||
// Warning: this blocks on the WebRTC network thread, so avoid calling it
|
||||
// while holding a lock, especially a lock also taken in a callback
|
||||
// from the network thread.
|
||||
RUSTEXPORT bool
|
||||
Rust_receiveRtp(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc, uint8_t pt) {
|
||||
return peer_connection_borrowed_rc->ReceiveRtp(pt);
|
||||
}
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_configureAudioEncoders(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc, const webrtc::AudioEncoder::Config* config_borrowed) {
|
||||
RTC_LOG(LS_INFO) << "Rust_configureAudioEncoders(...)";
|
||||
peer_connection_borrowed_rc->ConfigureAudioEncoders(*config_borrowed);
|
||||
}
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_getAudioLevels(webrtc::PeerConnectionInterface* peer_connection_borrowed_rc,
|
||||
cricket::AudioLevel* captured_out,
|
||||
cricket::ReceivedAudioLevel* received_out,
|
||||
size_t received_out_size,
|
||||
size_t* received_size_out) {
|
||||
RTC_LOG(LS_VERBOSE) << "Rust_getAudioLevels(...)";
|
||||
peer_connection_borrowed_rc->GetAudioLevels(captured_out, received_out, received_out_size, received_size_out);
|
||||
}
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_closePeerConnection(PeerConnectionInterface* peer_connection_borrowed_rc) {
|
||||
peer_connection_borrowed_rc->Close();
|
||||
}
|
||||
|
||||
} // namespace rffi
|
||||
} // namespace webrtc
|
437
ringrtc/rffi/src/peer_connection_factory.cc
Normal file
437
ringrtc/rffi/src/peer_connection_factory.cc
Normal file
|
@ -0,0 +1,437 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#include "api/create_peerconnection_factory.h"
|
||||
#include "api/call/call_factory_interface.h"
|
||||
#include "api/task_queue/default_task_queue_factory.h"
|
||||
#include "api/rtc_event_log/rtc_event_log_factory.h"
|
||||
#include "api/audio_codecs/builtin_audio_decoder_factory.h"
|
||||
#include "api/audio_codecs/builtin_audio_encoder_factory.h"
|
||||
#include "api/video_codecs/builtin_video_decoder_factory.h"
|
||||
#include "api/video_codecs/builtin_video_encoder_factory.h"
|
||||
#include "media/engine/webrtc_media_engine.h"
|
||||
#include "modules/audio_mixer/audio_mixer_impl.h"
|
||||
#include "modules/audio_processing/include/audio_processing.h"
|
||||
#include "pc/peer_connection_factory.h"
|
||||
#include "rffi/api/media.h"
|
||||
#include "rffi/api/peer_connection_factory.h"
|
||||
#include "rffi/api/peer_connection_observer_intf.h"
|
||||
#include "rffi/api/injectable_network.h"
|
||||
#include "rffi/src/peer_connection_observer.h"
|
||||
#include "rffi/src/ptr.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/log_sinks.h"
|
||||
#include "rtc_base/message_digest.h"
|
||||
|
||||
#if defined(WEBRTC_ANDROID)
|
||||
#include "sdk/android/src/jni/pc/android_network_monitor.h"
|
||||
#endif
|
||||
|
||||
#if defined(WEBRTC_WIN)
|
||||
#include "modules/audio_device/win/core_audio_utility_win.h"
|
||||
#include "modules/audio_device/include/audio_device_factory.h"
|
||||
#include "rtc_base/win/scoped_com_initializer.h"
|
||||
#endif
|
||||
|
||||
namespace webrtc {
|
||||
namespace rffi {
|
||||
|
||||
class PeerConnectionFactoryWithOwnedThreads
|
||||
: public PeerConnectionFactoryOwner {
|
||||
public:
|
||||
static rtc::scoped_refptr<PeerConnectionFactoryWithOwnedThreads> Create(
|
||||
bool use_new_audio_device_module,
|
||||
bool use_injectable_network) {
|
||||
// Creating a PeerConnectionFactory is a little complex. To make sure we're doing it right, we read several examples:
|
||||
// Android SDK:
|
||||
// https://cs.chromium.org/chromium/src/third_party/webrtc/sdk/android/src/jni/pc/peer_connection_factory.cc
|
||||
// iOS SDK:
|
||||
// https://cs.chromium.org/chromium/src/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm
|
||||
// Chromium:
|
||||
// https://cs.chromium.org/chromium/src/third_party/blink/renderer/modules/peerconnection/peer_connection_dependency_factory.cc
|
||||
// Default:
|
||||
// https://cs.chromium.org/chromium/src/third_party/webrtc/api/create_peerconnection_factory.cc?q=CreateModularPeerConnectionFactory%5C(&dr=C&l=40
|
||||
// Others:
|
||||
// https://cs.chromium.org/chromium/src/remoting/protocol/webrtc_transport.cc?l=246
|
||||
// https://cs.chromium.org/chromium/src/third_party/webrtc/examples/peerconnection/client/conductor.cc?q=CreatePeerConnectionFactory%5C(&l=133&dr=C
|
||||
// https://cs.chromium.org/chromium/src/third_party/webrtc/examples/unityplugin/simple_peer_connection.cc?q=CreatePeerConnectionFactory%5C(&dr=C&l=131
|
||||
// https://cs.chromium.org/chromium/src/third_party/webrtc/examples/objcnativeapi/objc/objc_call_client.mm?q=CreateModularPeerConnectionFactory%5C(&dr=C&l=104
|
||||
// https://cs.chromium.org/chromium/src/third_party/webrtc/examples/androidnativeapi/jni/android_call_client.cc?q=CreatePeerConnectionFactory%5C(&dr=C&l=141
|
||||
|
||||
auto network_thread = CreateAndStartNetworkThread("Network-Thread");
|
||||
auto worker_thread = CreateAndStartNonNetworkThread("Worker-Thread");
|
||||
auto signaling_thread = CreateAndStartNonNetworkThread("Signaling-Thread");
|
||||
std::unique_ptr<InjectableNetwork> injectable_network;
|
||||
if (use_injectable_network) {
|
||||
injectable_network = CreateInjectableNetwork(network_thread.get());
|
||||
}
|
||||
|
||||
PeerConnectionFactoryDependencies dependencies;
|
||||
dependencies.network_thread = network_thread.get();
|
||||
dependencies.worker_thread = worker_thread.get();
|
||||
dependencies.signaling_thread = signaling_thread.get();
|
||||
dependencies.task_queue_factory = CreateDefaultTaskQueueFactory();
|
||||
dependencies.call_factory = CreateCallFactory();
|
||||
dependencies.event_log_factory = std::make_unique<RtcEventLogFactory>(dependencies.task_queue_factory.get());
|
||||
#if defined(WEBRTC_ANDROID)
|
||||
dependencies.network_monitor_factory = std::make_unique<jni::AndroidNetworkMonitorFactory>();
|
||||
#endif
|
||||
cricket::MediaEngineDependencies media_dependencies;
|
||||
media_dependencies.task_queue_factory = dependencies.task_queue_factory.get();
|
||||
|
||||
#if defined(WEBRTC_WIN)
|
||||
std::unique_ptr<ScopedCOMInitializer> com_initializer;
|
||||
#endif
|
||||
|
||||
// The audio device module must be created (and destroyed) on the _worker_ thread.
|
||||
// It is safe to release the reference on this thread, however, because the PeerConnectionFactory keeps its own reference.
|
||||
auto adm = worker_thread->BlockingCall([&]() {
|
||||
if (use_new_audio_device_module) {
|
||||
#if defined(WEBRTC_WIN)
|
||||
com_initializer = std::make_unique<ScopedCOMInitializer>(ScopedCOMInitializer::kMTA);
|
||||
if (com_initializer->Succeeded()) {
|
||||
return CreateWindowsCoreAudioAudioDeviceModule(dependencies.task_queue_factory.get());
|
||||
} else {
|
||||
RTC_LOG(LS_WARNING) << "Failed to initialize ScopedCOMInitializer. Will use the default.";
|
||||
}
|
||||
#endif
|
||||
}
|
||||
return AudioDeviceModule::Create(
|
||||
AudioDeviceModule::kPlatformDefaultAudio, dependencies.task_queue_factory.get());
|
||||
});
|
||||
media_dependencies.adm = adm;
|
||||
media_dependencies.audio_encoder_factory = CreateBuiltinAudioEncoderFactory();
|
||||
media_dependencies.audio_decoder_factory = CreateBuiltinAudioDecoderFactory();
|
||||
media_dependencies.audio_processing = AudioProcessingBuilder().Create();
|
||||
media_dependencies.audio_mixer = AudioMixerImpl::Create();
|
||||
media_dependencies.video_encoder_factory = CreateBuiltinVideoEncoderFactory();
|
||||
media_dependencies.video_decoder_factory = CreateBuiltinVideoDecoderFactory();
|
||||
dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_dependencies));
|
||||
|
||||
auto factory = CreateModularPeerConnectionFactory(std::move(dependencies));
|
||||
return rtc::make_ref_counted<PeerConnectionFactoryWithOwnedThreads>(
|
||||
std::move(factory),
|
||||
std::move(network_thread),
|
||||
std::move(worker_thread),
|
||||
std::move(signaling_thread),
|
||||
std::move(injectable_network),
|
||||
#if defined(WEBRTC_WIN)
|
||||
std::move(com_initializer),
|
||||
#endif
|
||||
adm.get());
|
||||
}
|
||||
|
||||
~PeerConnectionFactoryWithOwnedThreads() override {
|
||||
RTC_LOG(LS_INFO) << "~PeerConnectionFactoryWithOwnedThreads()";
|
||||
}
|
||||
|
||||
PeerConnectionFactoryInterface* peer_connection_factory() override {
|
||||
return factory_.get();
|
||||
}
|
||||
|
||||
rffi::InjectableNetwork* injectable_network() override {
|
||||
return injectable_network_.get();
|
||||
}
|
||||
|
||||
int16_t AudioPlayoutDevices() override {
|
||||
return owned_worker_thread_->BlockingCall([&]() {
|
||||
return audio_device_module_->PlayoutDevices();
|
||||
});
|
||||
}
|
||||
|
||||
int32_t AudioPlayoutDeviceName(uint16_t index, char* name_out, char* uuid_out) override {
|
||||
return owned_worker_thread_->BlockingCall([&]() {
|
||||
return audio_device_module_->PlayoutDeviceName(index, name_out, uuid_out);
|
||||
});
|
||||
}
|
||||
|
||||
bool SetAudioPlayoutDevice(uint16_t index) override {
|
||||
return owned_worker_thread_->BlockingCall([&]() {
|
||||
// We need to stop and restart playout if it's already in progress.
|
||||
bool was_initialized = audio_device_module_->PlayoutIsInitialized();
|
||||
bool was_playing = audio_device_module_->Playing();
|
||||
if (was_initialized) {
|
||||
if (audio_device_module_->StopPlayout() != 0) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (audio_device_module_->SetPlayoutDevice(index) != 0) {
|
||||
return false;
|
||||
}
|
||||
if (was_initialized) {
|
||||
if (audio_device_module_->InitPlayout() != 0) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (was_playing) {
|
||||
if (audio_device_module_->StartPlayout() != 0) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
int16_t AudioRecordingDevices() override {
|
||||
return owned_worker_thread_->BlockingCall([&]() {
|
||||
return audio_device_module_->RecordingDevices();
|
||||
});
|
||||
}
|
||||
|
||||
int32_t AudioRecordingDeviceName(uint16_t index, char* name_out, char* uuid_out) override {
|
||||
return owned_worker_thread_->BlockingCall([&]() {
|
||||
return audio_device_module_->RecordingDeviceName(index, name_out, uuid_out);
|
||||
});
|
||||
}
|
||||
|
||||
bool SetAudioRecordingDevice(uint16_t index) override {
|
||||
return owned_worker_thread_->BlockingCall([&]() {
|
||||
// We need to stop and restart recording if it is already in progress.
|
||||
bool was_initialized = audio_device_module_->RecordingIsInitialized();
|
||||
bool was_recording = audio_device_module_->Recording();
|
||||
if (was_initialized) {
|
||||
if (audio_device_module_->StopRecording() != 0) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (audio_device_module_->SetRecordingDevice(index) != 0) {
|
||||
return false;
|
||||
}
|
||||
if (was_initialized) {
|
||||
if (audio_device_module_->InitRecording() != 0) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (was_recording) {
|
||||
if (audio_device_module_->StartRecording() != 0) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
protected:
|
||||
PeerConnectionFactoryWithOwnedThreads(
|
||||
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory,
|
||||
std::unique_ptr<rtc::Thread> owned_network_thread,
|
||||
std::unique_ptr<rtc::Thread> owned_worker_thread,
|
||||
std::unique_ptr<rtc::Thread> owned_signaling_thread,
|
||||
std::unique_ptr<rffi::InjectableNetwork> injectable_network,
|
||||
#if defined(WEBRTC_WIN)
|
||||
std::unique_ptr<ScopedCOMInitializer> com_initializer,
|
||||
#endif
|
||||
AudioDeviceModule* audio_device_module) :
|
||||
owned_network_thread_(std::move(owned_network_thread)),
|
||||
owned_worker_thread_(std::move(owned_worker_thread)),
|
||||
owned_signaling_thread_(std::move(owned_signaling_thread)),
|
||||
injectable_network_(std::move(injectable_network)),
|
||||
#if defined(WEBRTC_WIN)
|
||||
com_initializer_(std::move(com_initializer)),
|
||||
#endif
|
||||
audio_device_module_(audio_device_module),
|
||||
factory_(std::move(factory)) {
|
||||
}
|
||||
|
||||
private:
|
||||
static std::unique_ptr<rtc::Thread> CreateAndStartNetworkThread(std::string name) {
|
||||
std::unique_ptr<rtc::Thread> thread = rtc::Thread::CreateWithSocketServer();
|
||||
thread->SetName(name, nullptr);
|
||||
thread->Start();
|
||||
return thread;
|
||||
}
|
||||
|
||||
static std::unique_ptr<rtc::Thread> CreateAndStartNonNetworkThread(std::string name) {
|
||||
std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
|
||||
thread->SetName(name, nullptr);
|
||||
thread->Start();
|
||||
return thread;
|
||||
}
|
||||
|
||||
const std::unique_ptr<rtc::Thread> owned_network_thread_;
|
||||
const std::unique_ptr<rtc::Thread> owned_worker_thread_;
|
||||
const std::unique_ptr<rtc::Thread> owned_signaling_thread_;
|
||||
std::unique_ptr<rffi::InjectableNetwork> injectable_network_;
|
||||
#if defined(WEBRTC_WIN)
|
||||
std::unique_ptr<ScopedCOMInitializer> com_initializer_;
|
||||
#endif
|
||||
webrtc::AudioDeviceModule* audio_device_module_;
|
||||
const rtc::scoped_refptr<PeerConnectionFactoryInterface> factory_;
|
||||
};
|
||||
|
||||
// Returns an owned RC.
|
||||
RUSTEXPORT PeerConnectionFactoryOwner* Rust_createPeerConnectionFactory(
|
||||
bool use_new_audio_device_module,
|
||||
bool use_injectable_network) {
|
||||
auto factory_owner = PeerConnectionFactoryWithOwnedThreads::Create(
|
||||
use_new_audio_device_module,
|
||||
use_injectable_network);
|
||||
return take_rc(std::move(factory_owner));
|
||||
}
|
||||
|
||||
// Returns an owned RC.
|
||||
RUSTEXPORT PeerConnectionFactoryOwner* Rust_createPeerConnectionFactoryWrapper(
|
||||
PeerConnectionFactoryInterface* pcf_borrowed_rc) {
|
||||
class PeerConnectionFactoryWrapper : public PeerConnectionFactoryOwner {
|
||||
public:
|
||||
PeerConnectionFactoryInterface* peer_connection_factory() override {
|
||||
return factory_.get();
|
||||
}
|
||||
|
||||
PeerConnectionFactoryWrapper(
|
||||
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory) :
|
||||
factory_(std::move(factory)) {
|
||||
}
|
||||
|
||||
private:
|
||||
const rtc::scoped_refptr<PeerConnectionFactoryInterface> factory_;
|
||||
};
|
||||
|
||||
return take_rc(rtc::make_ref_counted<PeerConnectionFactoryWrapper>(inc_rc(pcf_borrowed_rc)));
|
||||
}
|
||||
|
||||
// Returns an owned RC.
|
||||
RUSTEXPORT PeerConnectionInterface* Rust_createPeerConnection(
|
||||
PeerConnectionFactoryOwner* factory_owner_borrowed_rc,
|
||||
PeerConnectionObserverRffi* observer_borrowed,
|
||||
bool hide_ip,
|
||||
RffiIceServer ice_server,
|
||||
webrtc::AudioTrackInterface* outgoing_audio_track_borrowed_rc,
|
||||
webrtc::VideoTrackInterface* outgoing_video_track_borrowed_rc) {
|
||||
auto factory = factory_owner_borrowed_rc->peer_connection_factory();
|
||||
|
||||
PeerConnectionInterface::RTCConfiguration config;
|
||||
config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
|
||||
config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
|
||||
config.tcp_candidate_policy = PeerConnectionInterface::kTcpCandidatePolicyDisabled;
|
||||
if (hide_ip) {
|
||||
config.type = PeerConnectionInterface::kRelay;
|
||||
}
|
||||
config.sdp_semantics = SdpSemantics::kPlanB_DEPRECATED;
|
||||
if (ice_server.urls_size > 0) {
|
||||
webrtc::PeerConnectionInterface::IceServer rtc_ice_server;
|
||||
rtc_ice_server.username = std::string(ice_server.username_borrowed);
|
||||
rtc_ice_server.password = std::string(ice_server.password_borrowed);
|
||||
for (size_t i = 0; i < ice_server.urls_size; i++) {
|
||||
rtc_ice_server.urls.push_back(std::string(ice_server.urls_borrowed[i]));
|
||||
}
|
||||
config.servers.push_back(rtc_ice_server);
|
||||
}
|
||||
|
||||
config.crypto_options = webrtc::CryptoOptions{};
|
||||
if (observer_borrowed->enable_frame_encryption()) {
|
||||
config.crypto_options->sframe.require_frame_encryption = true;
|
||||
}
|
||||
config.crypto_options->srtp.enable_gcm_crypto_suites = true;
|
||||
config.continual_gathering_policy = PeerConnectionInterface::ContinualGatheringPolicy::GATHER_CONTINUALLY;
|
||||
|
||||
// PeerConnectionDependencies.observer is copied to PeerConnection.observer_.
|
||||
// It must live as long as the PeerConnection.
|
||||
PeerConnectionDependencies deps(observer_borrowed);
|
||||
if (factory_owner_borrowed_rc->injectable_network()) {
|
||||
deps.allocator = factory_owner_borrowed_rc->injectable_network()->CreatePortAllocator();
|
||||
}
|
||||
auto result = factory->CreatePeerConnectionOrError(config, std::move(deps));
|
||||
if (!result.ok()) {
|
||||
RTC_LOG(LS_INFO) << "Failed to CreatePeerConnecton: " << result.error().message();
|
||||
return nullptr;
|
||||
}
|
||||
rtc::scoped_refptr<PeerConnectionInterface> pc = result.MoveValue();
|
||||
|
||||
// We use an arbitrary stream_id because existing apps want a MediaStream to pop out.
|
||||
auto stream_id = "s";
|
||||
std::vector<std::string> stream_ids;
|
||||
stream_ids.push_back(stream_id);
|
||||
|
||||
if (outgoing_audio_track_borrowed_rc) {
|
||||
auto result = pc->AddTrack(inc_rc(outgoing_audio_track_borrowed_rc), stream_ids);
|
||||
if (result.ok()) {
|
||||
if (observer_borrowed->enable_frame_encryption()) {
|
||||
auto rtp_sender = result.MoveValue();
|
||||
rtp_sender->SetFrameEncryptor(observer_borrowed->CreateEncryptor());
|
||||
}
|
||||
} else {
|
||||
RTC_LOG(LS_ERROR) << "Failed to PeerConnection::AddTrack(audio)";
|
||||
}
|
||||
}
|
||||
|
||||
if (outgoing_video_track_borrowed_rc) {
|
||||
auto result = pc->AddTrack(inc_rc(outgoing_video_track_borrowed_rc), stream_ids);
|
||||
if (result.ok()) {
|
||||
if (observer_borrowed->enable_frame_encryption()) {
|
||||
auto rtp_sender = result.MoveValue();
|
||||
rtp_sender->SetFrameEncryptor(observer_borrowed->CreateEncryptor());
|
||||
}
|
||||
} else {
|
||||
RTC_LOG(LS_ERROR) << "Failed to PeerConnection::AddTrack(video)";
|
||||
}
|
||||
}
|
||||
|
||||
return take_rc(pc);
|
||||
}
|
||||
|
||||
// Returns a borrowed pointer.
|
||||
RUSTEXPORT webrtc::rffi::InjectableNetwork* Rust_getInjectableNetwork(
|
||||
PeerConnectionFactoryOwner* factory_owner_borrowed_rc) {
|
||||
return factory_owner_borrowed_rc->injectable_network();
|
||||
}
|
||||
|
||||
// Returns an owned RC.
|
||||
RUSTEXPORT AudioTrackInterface* Rust_createAudioTrack(
|
||||
PeerConnectionFactoryOwner* factory_owner_borrowed_rc) {
|
||||
auto factory = factory_owner_borrowed_rc->peer_connection_factory();
|
||||
|
||||
cricket::AudioOptions options;
|
||||
auto source = factory->CreateAudioSource(options);
|
||||
// Note: This must stay "audio1" to stay in sync with V4 signaling.
|
||||
return take_rc(factory->CreateAudioTrack("audio1", source.get()));
|
||||
}
|
||||
|
||||
// Returns an owned RC.
|
||||
RUSTEXPORT VideoTrackSourceInterface* Rust_createVideoSource() {
|
||||
return take_rc(rtc::make_ref_counted<webrtc::rffi::VideoSource>());
|
||||
}
|
||||
|
||||
// Returns an owned RC.
|
||||
RUSTEXPORT VideoTrackInterface* Rust_createVideoTrack(
|
||||
PeerConnectionFactoryOwner* factory_owner_borrowed_rc,
|
||||
VideoTrackSourceInterface* source_borrowed_rc) {
|
||||
auto factory = factory_owner_borrowed_rc->peer_connection_factory();
|
||||
|
||||
// PeerConnectionFactory::CreateVideoTrack increments the refcount on source.
|
||||
// Note: This must stay "video1" to stay in sync with V4 signaling.
|
||||
return take_rc(factory->CreateVideoTrack("video1", source_borrowed_rc));
|
||||
}
|
||||
|
||||
RUSTEXPORT int16_t Rust_getAudioPlayoutDevices(
|
||||
PeerConnectionFactoryOwner* factory_owner_borrowed_rc) {
|
||||
return factory_owner_borrowed_rc->AudioPlayoutDevices();
|
||||
}
|
||||
|
||||
RUSTEXPORT int32_t Rust_getAudioPlayoutDeviceName(webrtc::PeerConnectionFactoryOwner* factory_owner_borrowed_rc, uint16_t index, char* name_out, char* uuid_out) {
|
||||
return factory_owner_borrowed_rc->AudioPlayoutDeviceName(index, name_out, uuid_out);
|
||||
}
|
||||
|
||||
RUSTEXPORT bool Rust_setAudioPlayoutDevice(
|
||||
webrtc::PeerConnectionFactoryOwner* factory_owner_borrowed_rc, uint16_t index) {
|
||||
return factory_owner_borrowed_rc->SetAudioPlayoutDevice(index);
|
||||
}
|
||||
|
||||
RUSTEXPORT int16_t Rust_getAudioRecordingDevices(
|
||||
PeerConnectionFactoryOwner* factory_owner_borrowed_rc) {
|
||||
return factory_owner_borrowed_rc->AudioRecordingDevices();
|
||||
}
|
||||
|
||||
RUSTEXPORT int32_t Rust_getAudioRecordingDeviceName(webrtc::PeerConnectionFactoryOwner* factory_owner_borrowed_rc, uint16_t index, char* name_out, char* uuid_out) {
|
||||
return factory_owner_borrowed_rc->AudioRecordingDeviceName(index, name_out, uuid_out);
|
||||
}
|
||||
|
||||
RUSTEXPORT bool Rust_setAudioRecordingDevice(
|
||||
webrtc::PeerConnectionFactoryOwner* factory_owner_borrowed_rc, uint16_t index) {
|
||||
return factory_owner_borrowed_rc->SetAudioRecordingDevice(index);
|
||||
}
|
||||
|
||||
} // namespace rffi
|
||||
} // namespace webrtc
|
357
ringrtc/rffi/src/peer_connection_observer.cc
Normal file
357
ringrtc/rffi/src/peer_connection_observer.cc
Normal file
|
@ -0,0 +1,357 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#include "rffi/api/peer_connection_observer_intf.h"
|
||||
|
||||
#include "rffi/api/media.h"
|
||||
#include "rffi/src/peer_connection_observer.h"
|
||||
#include "rffi/src/ptr.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace rffi {
|
||||
|
||||
PeerConnectionObserverRffi::PeerConnectionObserverRffi(void* observer,
|
||||
const PeerConnectionObserverCallbacks* callbacks,
|
||||
bool enable_frame_encryption,
|
||||
bool enable_video_frame_event,
|
||||
bool enable_video_frame_content)
|
||||
: observer_(observer), callbacks_(*callbacks), enable_frame_encryption_(enable_frame_encryption), enable_video_frame_event_(enable_video_frame_event), enable_video_frame_content_(enable_video_frame_content)
|
||||
{
|
||||
RTC_LOG(LS_INFO) << "PeerConnectionObserverRffi:ctor(): " << this->observer_;
|
||||
}
|
||||
|
||||
PeerConnectionObserverRffi::~PeerConnectionObserverRffi() {
|
||||
RTC_LOG(LS_INFO) << "PeerConnectionObserverRffi:dtor(): " << this->observer_;
|
||||
}
|
||||
|
||||
void PeerConnectionObserverRffi::OnIceCandidate(const IceCandidateInterface* candidate) {
|
||||
RustIceCandidate rust_candidate;
|
||||
|
||||
std::string sdp;
|
||||
candidate->ToString(&sdp);
|
||||
rust_candidate.sdp_borrowed = sdp.c_str();
|
||||
|
||||
rust_candidate.is_relayed = (candidate->candidate().type() == cricket::RELAY_PORT_TYPE);
|
||||
rust_candidate.relay_protocol = TransportProtocol::kUnknown;
|
||||
if (candidate->candidate().relay_protocol() == cricket::UDP_PROTOCOL_NAME) {
|
||||
rust_candidate.relay_protocol = TransportProtocol::kUdp;
|
||||
} else if (candidate->candidate().relay_protocol() == cricket::TCP_PROTOCOL_NAME) {
|
||||
rust_candidate.relay_protocol = TransportProtocol::kTcp;
|
||||
} else if (candidate->candidate().relay_protocol() == cricket::TLS_PROTOCOL_NAME) {
|
||||
rust_candidate.relay_protocol = TransportProtocol::kTls;
|
||||
}
|
||||
|
||||
callbacks_.onIceCandidate(observer_, &rust_candidate);
|
||||
}
|
||||
|
||||
void PeerConnectionObserverRffi::OnIceCandidatesRemoved(
|
||||
const std::vector<cricket::Candidate>& candidates) {
|
||||
|
||||
std::vector<IpPort> removed_addresses;
|
||||
for (const auto& candidate: candidates) {
|
||||
removed_addresses.push_back(RtcSocketAddressToIpPort(candidate.address()));
|
||||
}
|
||||
|
||||
callbacks_.onIceCandidatesRemoved(observer_, removed_addresses.data(), removed_addresses.size());
|
||||
}
|
||||
|
||||
void PeerConnectionObserverRffi::OnIceCandidateError(
|
||||
const std::string& address,
|
||||
int port,
|
||||
const std::string& url,
|
||||
int error_code,
|
||||
const std::string& error_text) {
|
||||
// Error code 701 is when we have an IPv4 local port trying to reach an IPv6 server or vice versa.
|
||||
// That's expected to not work, so we don't want to log that all the time.
|
||||
if (error_code != 701) {
|
||||
RTC_LOG(LS_WARNING) << "Failed to gather local ICE candidate from " << address << ":" << port << " to " << url << "; error " << error_code << ": " << error_text;
|
||||
}
|
||||
}
|
||||
|
||||
void PeerConnectionObserverRffi::OnSignalingChange(
|
||||
PeerConnectionInterface::SignalingState new_state) {
|
||||
}
|
||||
|
||||
void PeerConnectionObserverRffi::OnIceConnectionChange(
|
||||
PeerConnectionInterface::IceConnectionState new_state) {
|
||||
callbacks_.onIceConnectionChange(observer_, new_state);
|
||||
}
|
||||
|
||||
void PeerConnectionObserverRffi::OnConnectionChange(
|
||||
PeerConnectionInterface::PeerConnectionState new_state) {
|
||||
}
|
||||
|
||||
void PeerConnectionObserverRffi::OnIceConnectionReceivingChange(bool receiving) {
|
||||
RTC_LOG(LS_INFO) << "OnIceConnectionReceivingChange()";
|
||||
}
|
||||
|
||||
void PeerConnectionObserverRffi::OnIceSelectedCandidatePairChanged(
|
||||
const cricket::CandidatePairChangeEvent& event) {
|
||||
auto& local = event.selected_candidate_pair.local_candidate();
|
||||
auto& remote = event.selected_candidate_pair.remote_candidate();
|
||||
auto local_adapter_type = local.network_type();
|
||||
auto local_adapter_type_under_vpn = local.underlying_type_for_vpn();
|
||||
bool local_relayed = (local.type() == cricket::RELAY_PORT_TYPE) || !local.relay_protocol().empty();
|
||||
TransportProtocol local_relay_protocol = TransportProtocol::kUnknown;
|
||||
if (local.relay_protocol() == cricket::UDP_PROTOCOL_NAME) {
|
||||
local_relay_protocol = TransportProtocol::kUdp;
|
||||
} else if (local.relay_protocol() == cricket::TCP_PROTOCOL_NAME) {
|
||||
local_relay_protocol = TransportProtocol::kTcp;
|
||||
} else if (local.relay_protocol() == cricket::TLS_PROTOCOL_NAME) {
|
||||
local_relay_protocol = TransportProtocol::kTls;
|
||||
}
|
||||
bool remote_relayed = (remote.type() == cricket::RELAY_PORT_TYPE);
|
||||
auto network_route = webrtc::rffi::NetworkRoute{ local_adapter_type, local_adapter_type_under_vpn, local_relayed, local_relay_protocol, remote_relayed};
|
||||
callbacks_.onIceNetworkRouteChange(observer_, network_route);
|
||||
}
|
||||
|
||||
void PeerConnectionObserverRffi::OnIceGatheringChange(
|
||||
PeerConnectionInterface::IceGatheringState new_state) {
|
||||
RTC_LOG(LS_INFO) << "OnIceGatheringChange()";
|
||||
}
|
||||
|
||||
void PeerConnectionObserverRffi::OnAddStream(
|
||||
rtc::scoped_refptr<MediaStreamInterface> stream) {
|
||||
RTC_LOG(LS_INFO) << "OnAddStream()";
|
||||
|
||||
auto video_tracks = stream->GetVideoTracks();
|
||||
if (!video_tracks.empty()) {
|
||||
AddVideoSink(video_tracks[0].get());
|
||||
}
|
||||
callbacks_.onAddStream(observer_, take_rc(stream));
|
||||
}
|
||||
|
||||
void PeerConnectionObserverRffi::OnRemoveStream(
|
||||
rtc::scoped_refptr<MediaStreamInterface> stream) {
|
||||
RTC_LOG(LS_INFO) << "OnRemoveStream()";
|
||||
}
|
||||
|
||||
void PeerConnectionObserverRffi::OnRtpPacket(const RtpPacketReceived& rtp_packet) {
|
||||
uint8_t pt = rtp_packet.PayloadType();
|
||||
uint16_t seqnum = rtp_packet.SequenceNumber();
|
||||
uint32_t timestamp = rtp_packet.Timestamp();
|
||||
uint32_t ssrc = rtp_packet.Ssrc();
|
||||
const uint8_t* payload_data = rtp_packet.payload().data();
|
||||
size_t payload_size = rtp_packet.payload().size();
|
||||
RTC_LOG(LS_VERBOSE) << "OnRtpReceived() << pt: " << pt << " seqnum: " << seqnum << " timestamp: " << timestamp << " ssrc: " << ssrc << " size: " << payload_size;
|
||||
callbacks_.onRtpReceived(observer_, pt, seqnum, timestamp, ssrc, payload_data, payload_size);
|
||||
}
|
||||
|
||||
void PeerConnectionObserverRffi::OnRenegotiationNeeded() {
|
||||
RTC_LOG(LS_INFO) << "OnRenegotiationNeeded()";
|
||||
}
|
||||
|
||||
void PeerConnectionObserverRffi::OnAddTrack(
|
||||
rtc::scoped_refptr<RtpReceiverInterface> receiver,
|
||||
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
|
||||
// TODO: Define FFI for an RtpReceiver and pass that here instead.
|
||||
// Ownership is transferred to the rust call back
|
||||
// handler. Someone must call RefCountInterface::Release()
|
||||
// eventually.
|
||||
if (receiver->media_type() == cricket::MEDIA_TYPE_AUDIO) {
|
||||
if (enable_frame_encryption_) {
|
||||
uint32_t id = Rust_getTrackIdAsUint32(receiver->track().get());
|
||||
if (id != 0) {
|
||||
receiver->SetFrameDecryptor(CreateDecryptor(id));
|
||||
callbacks_.onAddAudioRtpReceiver(observer_, take_rc(receiver->track()));
|
||||
} else {
|
||||
RTC_LOG(LS_WARNING) << "Not sending decryptor for RtpReceiver with strange ID: " << receiver->track()->id();
|
||||
}
|
||||
} else {
|
||||
callbacks_.onAddAudioRtpReceiver(observer_, take_rc(receiver->track()));
|
||||
}
|
||||
} else if (receiver->media_type() == cricket::MEDIA_TYPE_VIDEO) {
|
||||
if (enable_frame_encryption_) {
|
||||
uint32_t id = Rust_getTrackIdAsUint32(receiver->track().get());
|
||||
if (id != 0) {
|
||||
receiver->SetFrameDecryptor(CreateDecryptor(id));
|
||||
AddVideoSink(static_cast<webrtc::VideoTrackInterface*>(receiver->track().get()));
|
||||
callbacks_.onAddVideoRtpReceiver(observer_, take_rc(receiver->track()));
|
||||
} else {
|
||||
RTC_LOG(LS_WARNING) << "Not sending decryptor for RtpReceiver with strange ID: " << receiver->track()->id();
|
||||
}
|
||||
} else {
|
||||
AddVideoSink(static_cast<webrtc::VideoTrackInterface*>(receiver->track().get()));
|
||||
callbacks_.onAddVideoRtpReceiver(observer_, take_rc(receiver->track()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void PeerConnectionObserverRffi::OnTrack(
|
||||
rtc::scoped_refptr<RtpTransceiverInterface> transceiver) {
|
||||
RTC_LOG(LS_INFO) << "OnTrack()";
|
||||
}
|
||||
|
||||
class Encryptor : public webrtc::FrameEncryptorInterface {
|
||||
public:
|
||||
// Passed-in observer must live at least as long as the Encryptor,
|
||||
// which likely means as long as the PeerConnection.
|
||||
Encryptor(void* observer, PeerConnectionObserverCallbacks* callbacks) : observer_(observer), callbacks_(callbacks) {}
|
||||
|
||||
// This is called just before Encrypt to get the size of the ciphertext
|
||||
// buffer that will be given to Encrypt.
|
||||
size_t GetMaxCiphertextByteSize(cricket::MediaType media_type,
|
||||
size_t plaintext_size) override {
|
||||
bool is_audio = (media_type == cricket::MEDIA_TYPE_AUDIO);
|
||||
bool is_video = (media_type == cricket::MEDIA_TYPE_VIDEO);
|
||||
if (!is_audio && !is_video) {
|
||||
RTC_LOG(LS_WARNING) << "GetMaxCiphertextByteSize called with weird media type: " << media_type;
|
||||
return 0;
|
||||
}
|
||||
return callbacks_->getMediaCiphertextBufferSize(observer_, is_audio, plaintext_size);
|
||||
}
|
||||
|
||||
int Encrypt(cricket::MediaType media_type,
|
||||
// Our encryption mechanism is the same regardless of SSRC
|
||||
uint32_t _ssrc,
|
||||
// This is not supported by our SFU currently, so don't bother trying to use it.
|
||||
rtc::ArrayView<const uint8_t> _generic_video_header,
|
||||
rtc::ArrayView<const uint8_t> plaintext,
|
||||
rtc::ArrayView<uint8_t> ciphertext_buffer,
|
||||
size_t* ciphertext_size) override {
|
||||
bool is_audio = (media_type == cricket::MEDIA_TYPE_AUDIO);
|
||||
bool is_video = (media_type == cricket::MEDIA_TYPE_VIDEO);
|
||||
if (!is_audio && !is_video) {
|
||||
RTC_LOG(LS_WARNING) << "Encrypt called with weird media type: " << media_type;
|
||||
return -1; // Error
|
||||
}
|
||||
if (!callbacks_->encryptMedia(observer_, is_audio, plaintext.data(), plaintext.size(), ciphertext_buffer.data(), ciphertext_buffer.size(), ciphertext_size)) {
|
||||
return -2; // Error
|
||||
}
|
||||
return 0; // No error
|
||||
}
|
||||
|
||||
private:
|
||||
void* observer_;
|
||||
PeerConnectionObserverCallbacks* callbacks_;
|
||||
};
|
||||
|
||||
rtc::scoped_refptr<FrameEncryptorInterface> PeerConnectionObserverRffi::CreateEncryptor() {
|
||||
// The PeerConnectionObserverRffi outlives the Encryptor because it outlives the PeerConnection,
|
||||
// which outlives the RtpSender, which owns the Encryptor.
|
||||
// So we know the PeerConnectionObserverRffi outlives the Encryptor.
|
||||
return rtc::make_ref_counted<Encryptor>(observer_, &callbacks_);
|
||||
}
|
||||
|
||||
void PeerConnectionObserverRffi::AddVideoSink(VideoTrackInterface* track) {
|
||||
if (!enable_video_frame_event_ || !track) {
|
||||
return;
|
||||
}
|
||||
|
||||
uint32_t track_id = Rust_getTrackIdAsUint32(track);
|
||||
auto sink = std::make_unique<VideoSink>(track_id, this);
|
||||
|
||||
rtc::VideoSinkWants wants;
|
||||
// Note: this causes frames to be dropped, not rotated.
|
||||
// So don't set it to true, even if it seems to make sense!
|
||||
wants.rotation_applied = false;
|
||||
|
||||
// The sink gets stored in the track, but never destroys it.
|
||||
// The sink must live as long as the track, which is why we
|
||||
// stored it in the PeerConnectionObserverRffi.
|
||||
track->AddOrUpdateSink(sink.get(), wants);
|
||||
video_sinks_.push_back(std::move(sink));
|
||||
}
|
||||
|
||||
VideoSink::VideoSink(uint32_t track_id, PeerConnectionObserverRffi* pc_observer)
|
||||
: track_id_(track_id), pc_observer_(pc_observer) {
|
||||
}
|
||||
|
||||
void VideoSink::OnFrame(const webrtc::VideoFrame& frame) {
|
||||
pc_observer_->OnVideoFrame(track_id_, frame);
|
||||
}
|
||||
|
||||
void PeerConnectionObserverRffi::OnVideoFrame(uint32_t track_id, const webrtc::VideoFrame& frame) {
|
||||
RffiVideoFrameMetadata metadata = {};
|
||||
metadata.width = frame.width();
|
||||
metadata.height = frame.height();
|
||||
metadata.rotation = frame.rotation();
|
||||
// We can't keep a reference to the buffer around or it will slow down the video decoder.
|
||||
// This introduces a copy, but only in the case where we aren't rotated,
|
||||
// and it's a copy of i420 and not RGBA (i420 is smaller than RGBA).
|
||||
// TODO: Figure out if we can make the decoder have a larger frame output pool
|
||||
// so that we don't need to do this.
|
||||
auto* buffer_owned_rc = enable_video_frame_content_ ? Rust_copyAndRotateVideoFrameBuffer(frame.video_frame_buffer().get(), frame.rotation()) : nullptr;
|
||||
// If we rotated the frame, we need to update metadata as well
|
||||
if ((metadata.rotation == kVideoRotation_90) || (metadata.rotation == kVideoRotation_270)) {
|
||||
metadata.width = frame.height();
|
||||
metadata.height = frame.width();
|
||||
}
|
||||
metadata.rotation = kVideoRotation_0;
|
||||
|
||||
callbacks_.onVideoFrame(observer_, track_id, metadata, buffer_owned_rc);
|
||||
}
|
||||
|
||||
class Decryptor : public webrtc::FrameDecryptorInterface {
|
||||
public:
|
||||
// Passed-in observer must live at least as long as the Decryptor,
|
||||
// which likely means as long as the PeerConnection.
|
||||
Decryptor(uint32_t track_id, void* observer, PeerConnectionObserverCallbacks* callbacks) : track_id_(track_id), observer_(observer), callbacks_(callbacks) {}
|
||||
|
||||
// This is called just before Decrypt to get the size of the plaintext
|
||||
// buffer that will be given to Decrypt.
|
||||
size_t GetMaxPlaintextByteSize(cricket::MediaType media_type,
|
||||
size_t ciphertext_size) override {
|
||||
bool is_audio = (media_type == cricket::MEDIA_TYPE_AUDIO);
|
||||
bool is_video = (media_type == cricket::MEDIA_TYPE_VIDEO);
|
||||
if (!is_audio && !is_video) {
|
||||
RTC_LOG(LS_WARNING) << "GetMaxPlaintextByteSize called with weird media type: " << media_type;
|
||||
return 0;
|
||||
}
|
||||
return callbacks_->getMediaPlaintextBufferSize(observer_, track_id_, is_audio, ciphertext_size);
|
||||
}
|
||||
|
||||
FrameDecryptorInterface::Result Decrypt(cricket::MediaType media_type,
|
||||
// Our encryption mechanism is the same regardless of CSRCs
|
||||
const std::vector<uint32_t>& _csrcs,
|
||||
// This is not supported by our SFU currently, so don't bother trying to use it.
|
||||
rtc::ArrayView<const uint8_t> _generic_video_header,
|
||||
rtc::ArrayView<const uint8_t> ciphertext,
|
||||
rtc::ArrayView<uint8_t> plaintext_buffer) override {
|
||||
bool is_audio = (media_type == cricket::MEDIA_TYPE_AUDIO);
|
||||
bool is_video = (media_type == cricket::MEDIA_TYPE_VIDEO);
|
||||
if (!is_audio && !is_video) {
|
||||
RTC_LOG(LS_WARNING) << "Decrypt called with weird media type: " << media_type;
|
||||
return FrameDecryptorInterface::Result(FrameDecryptorInterface::Status::kUnknown, 0);
|
||||
}
|
||||
size_t plaintext_size = 0;
|
||||
if (!callbacks_->decryptMedia(observer_, track_id_, is_audio, ciphertext.data(), ciphertext.size(), plaintext_buffer.data(), plaintext_buffer.size(), &plaintext_size)) {
|
||||
return FrameDecryptorInterface::Result(FrameDecryptorInterface::Status::kFailedToDecrypt, 0);
|
||||
}
|
||||
return FrameDecryptorInterface::Result(FrameDecryptorInterface::Status::kOk, plaintext_size);
|
||||
}
|
||||
|
||||
private:
|
||||
uint32_t track_id_;
|
||||
void* observer_;
|
||||
PeerConnectionObserverCallbacks* callbacks_;
|
||||
};
|
||||
|
||||
rtc::scoped_refptr<FrameDecryptorInterface> PeerConnectionObserverRffi::CreateDecryptor(uint32_t track_id) {
|
||||
// The PeerConnectionObserverRffi outlives the Decryptor because it outlives the PeerConnection,
|
||||
// which outlives the RtpReceiver, which owns the Decryptor.
|
||||
// So we know the PeerConnectionObserverRffi outlives the Decryptor.
|
||||
return rtc::make_ref_counted<Decryptor>(track_id, observer_, &callbacks_);
|
||||
}
|
||||
|
||||
// Returns an owned pointer.
|
||||
// Passed-in observer must live at least as long as the returned value,
|
||||
// which in turn must live at least as long as the PeerConnection.
|
||||
RUSTEXPORT PeerConnectionObserverRffi*
|
||||
Rust_createPeerConnectionObserver(void* observer_borrowed,
|
||||
const PeerConnectionObserverCallbacks* callbacks_borrowed,
|
||||
bool enable_frame_encryption,
|
||||
bool enable_video_frame_event,
|
||||
bool enable_video_frame_content) {
|
||||
return new PeerConnectionObserverRffi(observer_borrowed, callbacks_borrowed, enable_frame_encryption, enable_video_frame_event, enable_video_frame_content);
|
||||
}
|
||||
|
||||
RUSTEXPORT void
|
||||
Rust_deletePeerConnectionObserver(PeerConnectionObserverRffi* observer_owned) {
|
||||
delete observer_owned;
|
||||
}
|
||||
|
||||
} // namespace rffi
|
||||
} // namespace webrtc
|
112
ringrtc/rffi/src/peer_connection_observer.h
Normal file
112
ringrtc/rffi/src/peer_connection_observer.h
Normal file
|
@ -0,0 +1,112 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#ifndef RFFI_PEER_CONNECTION_OBSERVER_H__
|
||||
#define RFFI_PEER_CONNECTION_OBSERVER_H__
|
||||
|
||||
#include "api/crypto/frame_encryptor_interface.h"
|
||||
#include "api/media_stream_interface.h"
|
||||
#include "api/peer_connection_interface.h"
|
||||
|
||||
/**
|
||||
* Adapter between the C++ PeerConnectionObserver interface and the
|
||||
* Rust PeerConnection.Observer interface. Wraps an instance of the
|
||||
* Rust interface and dispatches C++ callbacks to Rust.
|
||||
*/
|
||||
|
||||
namespace webrtc {
|
||||
namespace rffi {
|
||||
|
||||
class VideoSink;
|
||||
|
||||
class PeerConnectionObserverRffi : public PeerConnectionObserver {
|
||||
public:
|
||||
// Passed-in observer must live at least as long as the PeerConnectionObserverRffi.
|
||||
PeerConnectionObserverRffi(void* observer,
|
||||
const PeerConnectionObserverCallbacks* callbacks,
|
||||
bool enable_frame_encryption,
|
||||
bool enable_video_frame_event,
|
||||
bool enable_video_frame_content);
|
||||
~PeerConnectionObserverRffi() override;
|
||||
|
||||
// If enabled, the PeerConnection will be configured to encrypt and decrypt
|
||||
// media frames using PeerConnectionObserverCallbacks.
|
||||
bool enable_frame_encryption() { return enable_frame_encryption_; }
|
||||
// These will be a passed into RtpSenders and will be implemented
|
||||
// with callbacks to PeerConnectionObserverCallbacks.
|
||||
rtc::scoped_refptr<FrameEncryptorInterface> CreateEncryptor();
|
||||
// These will be a passed into RtpReceivers and will be implemented
|
||||
// with callbacks to PeerConnectionObserverCallbacks.
|
||||
rtc::scoped_refptr<FrameDecryptorInterface> CreateDecryptor(uint32_t track_id);
|
||||
|
||||
// Implementation of PeerConnectionObserver interface, which propagates
|
||||
// the callbacks to the Rust observer.
|
||||
void OnIceCandidate(const IceCandidateInterface* candidate) override;
|
||||
void OnIceCandidatesRemoved(
|
||||
const std::vector<cricket::Candidate>& candidates) override;
|
||||
void OnIceCandidateError(
|
||||
const std::string& address,
|
||||
int port,
|
||||
const std::string& url,
|
||||
int error_code,
|
||||
const std::string& error_text) override;
|
||||
void OnSignalingChange(
|
||||
PeerConnectionInterface::SignalingState new_state) override;
|
||||
void OnIceConnectionChange(
|
||||
PeerConnectionInterface::IceConnectionState new_state) override;
|
||||
void OnConnectionChange(
|
||||
PeerConnectionInterface::PeerConnectionState new_state) override;
|
||||
void OnIceConnectionReceivingChange(bool receiving) override;
|
||||
void OnIceGatheringChange(
|
||||
PeerConnectionInterface::IceGatheringState new_state) override;
|
||||
void OnIceSelectedCandidatePairChanged(
|
||||
const cricket::CandidatePairChangeEvent& event) override;
|
||||
void OnAddStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
|
||||
void OnRemoveStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
|
||||
void OnDataChannel(rtc::scoped_refptr<DataChannelInterface> channel) override {}
|
||||
void OnRtpPacket(const RtpPacketReceived& rtp_packet) override;
|
||||
void OnRenegotiationNeeded() override;
|
||||
void OnAddTrack(rtc::scoped_refptr<RtpReceiverInterface> receiver,
|
||||
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&
|
||||
streams) override;
|
||||
void OnTrack(
|
||||
rtc::scoped_refptr<RtpTransceiverInterface> transceiver) override;
|
||||
|
||||
// Called by the VideoSinks in video_sinks_.
|
||||
void OnVideoFrame(uint32_t track_id, const webrtc::VideoFrame& frame);
|
||||
|
||||
private:
|
||||
// Add a VideoSink to the video_sinks_ for ownership and pass
|
||||
// a borrowed pointer to the track.
|
||||
void AddVideoSink(VideoTrackInterface* track);
|
||||
|
||||
void* observer_;
|
||||
PeerConnectionObserverCallbacks callbacks_;
|
||||
bool enable_frame_encryption_ = false;
|
||||
bool enable_video_frame_event_ = false;
|
||||
bool enable_video_frame_content_ = false;
|
||||
std::vector<std::unique_ptr<VideoSink>> video_sinks_;
|
||||
};
|
||||
|
||||
// A simple implementation of a VideoSinkInterface which passes video frames
|
||||
// back to the PeerConnectionObserver with a track_id.
|
||||
class VideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
|
||||
public:
|
||||
VideoSink(uint32_t track_id, PeerConnectionObserverRffi*);
|
||||
~VideoSink() override = default;
|
||||
|
||||
void OnFrame(const webrtc::VideoFrame& frame) override;
|
||||
|
||||
private:
|
||||
uint32_t track_id_;
|
||||
PeerConnectionObserverRffi* pc_observer_;
|
||||
};
|
||||
|
||||
|
||||
|
||||
} // namespace rffi
|
||||
} // namespace webrtc
|
||||
|
||||
#endif /* RFFI_PEER_CONNECTION_OBSERVER_H__ */
|
34
ringrtc/rffi/src/ptr.h
Normal file
34
ringrtc/rffi/src/ptr.h
Normal file
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Copyright 2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#ifndef RFFI_POINTERS_H__
|
||||
#define RFFI_POINTERS_H__
|
||||
|
||||
#include "api/scoped_refptr.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace rffi {
|
||||
|
||||
// This just makes it easier to read.
|
||||
// Calling the rtc::scoped_refptr constructor doesn't make it very clear that
|
||||
// it increments the ref count.
|
||||
template <typename T>
|
||||
rtc::scoped_refptr<T> inc_rc(T* borrowed_rc) {
|
||||
return rtc::scoped_refptr<T>(borrowed_rc);
|
||||
}
|
||||
|
||||
// This just makes it easier to read.
|
||||
// Calling the rtc::scoped_refptr::release() doesn't make it very clear that
|
||||
// it prevents decrementing the RC.
|
||||
// The caller now owns an RC.
|
||||
template <typename T>
|
||||
T* take_rc(rtc::scoped_refptr<T> scoped) {
|
||||
return scoped.release();
|
||||
}
|
||||
|
||||
} // namespace rffi
|
||||
} // namespace webrtc
|
||||
|
||||
#endif /* RFFI_PEER_CONNECTION_OBSERVER_H__ */
|
36
ringrtc/rffi/src/ref_count.cc
Normal file
36
ringrtc/rffi/src/ref_count.cc
Normal file
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#include "rffi/api/ref_count.h"
|
||||
#include "rtc_base/logging.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace rffi {
|
||||
|
||||
// Decrements the ref count of a ref-counted object.
|
||||
// If the ref count goes to zero, the object is deleted.
|
||||
RUSTEXPORT void
|
||||
Rust_decRc(rtc::RefCountInterface* owned_rc) {
|
||||
if (!owned_rc) {
|
||||
return;
|
||||
}
|
||||
|
||||
auto result = owned_rc->Release();
|
||||
RTC_LOG(LS_VERBOSE) << "Did it get deleted? " << (result == rtc::RefCountReleaseStatus::kDroppedLastRef);
|
||||
}
|
||||
|
||||
// Increments the ref count of a ref-counted object.
|
||||
// The borrowed RC becomes an owned RC.
|
||||
RUSTEXPORT void
|
||||
Rust_incRc(rtc::RefCountInterface* borrowed_rc) {
|
||||
if (!borrowed_rc) {
|
||||
return;
|
||||
}
|
||||
|
||||
borrowed_rc->AddRef();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
84
ringrtc/rffi/src/sdp_observer.cc
Normal file
84
ringrtc/rffi/src/sdp_observer.cc
Normal file
|
@ -0,0 +1,84 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#include "rffi/api/sdp_observer_intf.h"
|
||||
#include "rffi/src/ptr.h"
|
||||
#include "rffi/src/sdp_observer.h"
|
||||
#include <regex>
|
||||
|
||||
namespace webrtc {
|
||||
namespace rffi {
|
||||
|
||||
CreateSessionDescriptionObserverRffi::CreateSessionDescriptionObserverRffi(void* csd_observer,
|
||||
const CreateSessionDescriptionObserverCallbacks* csd_observer_cbs)
|
||||
: csd_observer_(csd_observer), csd_observer_cbs_(*csd_observer_cbs)
|
||||
{
|
||||
RTC_LOG(LS_INFO) << "CreateSessionDescriptionObserverRffi:ctor(): " << this->csd_observer_;
|
||||
}
|
||||
|
||||
CreateSessionDescriptionObserverRffi::~CreateSessionDescriptionObserverRffi() {
|
||||
RTC_LOG(LS_INFO) << "CreateSessionDescriptionObserverRffi:dtor(): " << this->csd_observer_;
|
||||
}
|
||||
|
||||
void CreateSessionDescriptionObserverRffi::OnSuccess(SessionDescriptionInterface* session_description) {
|
||||
// OnSuccess transfers ownership of the description
|
||||
RTC_LOG(LS_INFO) << "CreateSessionDescriptionObserverRffi:OnSuccess(): ";
|
||||
|
||||
// TODO tweak the response a little
|
||||
std::string sdp;
|
||||
if (session_description->ToString(&sdp)) {
|
||||
sdp = std::regex_replace(sdp, std::regex("(a=fmtp:111 ((?!cbr=).)*)\r?\n"), "$1;cbr=1\r\n");
|
||||
sdp = std::regex_replace(sdp, std::regex(".+urn:ietf:params:rtp-hdrext:ssrc-audio-level.*\r?\n"), "");
|
||||
|
||||
std::unique_ptr<SessionDescriptionInterface> session_description2 = CreateSessionDescription(session_description->GetType(), sdp);
|
||||
delete session_description;
|
||||
this->csd_observer_cbs_.onSuccess(this->csd_observer_, session_description2.release());
|
||||
} else {
|
||||
RTC_LOG(LS_ERROR) << "Unable to convert SessionDescriptionInterface to std::string";
|
||||
}
|
||||
}
|
||||
|
||||
void CreateSessionDescriptionObserverRffi::OnFailure(RTCError error) {
|
||||
RTC_LOG(LS_INFO) << "CreateSessionDescriptionObserverRffi:OnFailure(): ";
|
||||
this->csd_observer_cbs_.onFailure(this->csd_observer_, error.message(), static_cast<int32_t>(error.type()));
|
||||
}
|
||||
|
||||
RUSTEXPORT CreateSessionDescriptionObserverRffi*
|
||||
Rust_createCreateSessionDescriptionObserver(void* csd_observer_borrowed,
|
||||
const CreateSessionDescriptionObserverCallbacks* csd_observer_cbs_borrowed) {
|
||||
return take_rc(rtc::make_ref_counted<CreateSessionDescriptionObserverRffi>(csd_observer_borrowed, csd_observer_cbs_borrowed));
|
||||
}
|
||||
|
||||
SetSessionDescriptionObserverRffi::SetSessionDescriptionObserverRffi(void* ssd_observer_borrowed,
|
||||
const SetSessionDescriptionObserverCallbacks* ssd_observer_cbs_borrowed)
|
||||
: ssd_observer_(ssd_observer_borrowed), ssd_observer_cbs_(*ssd_observer_cbs_borrowed)
|
||||
{
|
||||
RTC_LOG(LS_INFO) << "SetSessionDescriptionObserverRffi:ctor(): " << this->ssd_observer_;
|
||||
}
|
||||
|
||||
SetSessionDescriptionObserverRffi::~SetSessionDescriptionObserverRffi() {
|
||||
RTC_LOG(LS_INFO) << "SetSessionDescriptionObserverRffi:dtor(): " << this->ssd_observer_;
|
||||
}
|
||||
|
||||
void SetSessionDescriptionObserverRffi::OnSuccess() {
|
||||
RTC_LOG(LS_INFO) << "SetSessionDescriptionObserverRffi:OnSuccess(): ";
|
||||
this->ssd_observer_cbs_.onSuccess(this->ssd_observer_);
|
||||
}
|
||||
|
||||
void SetSessionDescriptionObserverRffi::OnFailure(RTCError error) {
|
||||
RTC_LOG(LS_INFO) << "SetSessionDescriptionObserverRffi:OnFailure(): ";
|
||||
this->ssd_observer_cbs_.onFailure(this->ssd_observer_, error.message(), static_cast<int32_t>(error.type()));
|
||||
}
|
||||
|
||||
// Returns an owned RC.
|
||||
// The passed-in values must outlive the returned value.
|
||||
RUSTEXPORT SetSessionDescriptionObserverRffi*
|
||||
Rust_createSetSessionDescriptionObserver(void* ssd_observer_borrowed,
|
||||
const SetSessionDescriptionObserverCallbacks* ssd_observer_cbs_borrowed) {
|
||||
return take_rc(rtc::make_ref_counted<SetSessionDescriptionObserverRffi>(ssd_observer_borrowed, ssd_observer_cbs_borrowed));
|
||||
}
|
||||
|
||||
} // namespace rffi
|
||||
} // namespace webrtc
|
63
ringrtc/rffi/src/sdp_observer.h
Normal file
63
ringrtc/rffi/src/sdp_observer.h
Normal file
|
@ -0,0 +1,63 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#ifndef RFFI_SDP_OBSERVER_H__
|
||||
#define RFFI_SDP_OBSERVER_H__
|
||||
|
||||
#include "api/peer_connection_interface.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace rffi {
|
||||
|
||||
/**
|
||||
* Adapter between the C++ CreateSessionDescriptionObserver interface
|
||||
* and Rust. Wraps an instance of the Rust interface and dispatches
|
||||
* C++ callbacks to Rust.
|
||||
*/
|
||||
|
||||
class CreateSessionDescriptionObserverRffi : public CreateSessionDescriptionObserver {
|
||||
public:
|
||||
// Passed-in observer must live as long as the CreateSessionDescriptionRffi.
|
||||
CreateSessionDescriptionObserverRffi(void* csd_observer,
|
||||
const CreateSessionDescriptionObserverCallbacks* csd_observer_cbs);
|
||||
~CreateSessionDescriptionObserverRffi() override;
|
||||
|
||||
// MediaConstraintsInterface* constraints() { return constraints_.get(); }
|
||||
|
||||
void OnSuccess(SessionDescriptionInterface* session_description) override;
|
||||
void OnFailure(RTCError error) override;
|
||||
|
||||
private:
|
||||
void* csd_observer_;
|
||||
CreateSessionDescriptionObserverCallbacks csd_observer_cbs_;
|
||||
|
||||
};
|
||||
|
||||
/**
|
||||
* Adapter between the C++ SetSessionDescriptionObserver interface and
|
||||
* Rust. Wraps an instance of the Rust interface and dispatches C++
|
||||
* callbacks to Rust.
|
||||
*/
|
||||
|
||||
class SetSessionDescriptionObserverRffi : public SetSessionDescriptionObserver {
|
||||
public:
|
||||
// Passed-in observer must live as long as the SetSessionDescriptionRffi.
|
||||
SetSessionDescriptionObserverRffi(void* ssd_observer,
|
||||
const SetSessionDescriptionObserverCallbacks* ssd_observer_cbs);
|
||||
~SetSessionDescriptionObserverRffi() override;
|
||||
|
||||
void OnSuccess() override;
|
||||
void OnFailure(RTCError error) override;
|
||||
|
||||
private:
|
||||
void* ssd_observer_;
|
||||
SetSessionDescriptionObserverCallbacks ssd_observer_cbs_;
|
||||
|
||||
};
|
||||
|
||||
} // namespace rffi
|
||||
} // namespace webrtc
|
||||
|
||||
#endif /* RFFI_SDP_OBSERVER_H__ */
|
184
ringrtc/rffi/src/stats_observer.cc
Normal file
184
ringrtc/rffi/src/stats_observer.cc
Normal file
|
@ -0,0 +1,184 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#include "rffi/api/stats_observer_intf.h"
|
||||
#include "rffi/src/ptr.h"
|
||||
#include "rffi/src/stats_observer.h"
|
||||
#include "api/stats/rtcstats_objects.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace rffi {
|
||||
|
||||
StatsObserverRffi::StatsObserverRffi(void* stats_observer,
|
||||
const StatsObserverCallbacks* stats_observer_cbs)
|
||||
: stats_observer_(stats_observer), stats_observer_cbs_(*stats_observer_cbs)
|
||||
{
|
||||
RTC_LOG(LS_INFO) << "StatsObserverRffi:ctor(): " << this->stats_observer_;
|
||||
}
|
||||
|
||||
StatsObserverRffi::~StatsObserverRffi() {
|
||||
RTC_LOG(LS_INFO) << "StatsObserverRffi:dtor(): " << this->stats_observer_;
|
||||
}
|
||||
|
||||
void StatsObserverRffi::OnStatsDelivered(const rtc::scoped_refptr<const RTCStatsReport>& report) {
|
||||
// RTC_LOG(LS_INFO) << report->ToJson();
|
||||
|
||||
this->audio_sender_statistics_.clear();
|
||||
this->video_sender_statistics_.clear();
|
||||
this->audio_receiver_statistics_.clear();
|
||||
this->video_receiver_statistics_.clear();
|
||||
|
||||
auto outbound_stream_stats = report->GetStatsOfType<RTCOutboundRTPStreamStats>();
|
||||
auto inbound_stream_stats = report->GetStatsOfType<RTCInboundRTPStreamStats>();
|
||||
auto candidate_pair_stats = report->GetStatsOfType<RTCIceCandidatePairStats>();
|
||||
|
||||
for (const auto& stat : outbound_stream_stats) {
|
||||
if (*stat->kind == "audio") {
|
||||
AudioSenderStatistics audio_sender = {0};
|
||||
|
||||
audio_sender.ssrc = stat->ssrc.ValueOrDefault(0);
|
||||
audio_sender.packets_sent = stat->packets_sent.ValueOrDefault(0);
|
||||
audio_sender.bytes_sent = stat->bytes_sent.ValueOrDefault(0);
|
||||
|
||||
if (stat->remote_id.is_defined()) {
|
||||
auto remote_stat = report->GetAs<RTCRemoteInboundRtpStreamStats>(*stat->remote_id);
|
||||
if (remote_stat) {
|
||||
audio_sender.remote_packets_lost = remote_stat->packets_lost.ValueOrDefault(0);
|
||||
audio_sender.remote_jitter = remote_stat->jitter.ValueOrDefault(0.0);
|
||||
audio_sender.remote_round_trip_time = remote_stat->round_trip_time.ValueOrDefault(0.0);
|
||||
}
|
||||
}
|
||||
|
||||
if (stat->media_source_id.is_defined()) {
|
||||
auto audio_source_stat = report->GetAs<RTCAudioSourceStats>(*stat->media_source_id);
|
||||
if (audio_source_stat) {
|
||||
audio_sender.total_audio_energy = audio_source_stat->total_audio_energy.ValueOrDefault(0.0);
|
||||
audio_sender.echo_likelihood = audio_source_stat->echo_likelihood.ValueOrDefault(0.0);
|
||||
}
|
||||
}
|
||||
|
||||
this->audio_sender_statistics_.push_back(audio_sender);
|
||||
} else if (*stat->kind == "video") {
|
||||
VideoSenderStatistics video_sender = {0};
|
||||
|
||||
video_sender.ssrc = stat->ssrc.ValueOrDefault(0);
|
||||
video_sender.packets_sent = stat->packets_sent.ValueOrDefault(0);
|
||||
video_sender.bytes_sent = stat->bytes_sent.ValueOrDefault(0);
|
||||
video_sender.frames_encoded = stat->frames_encoded.ValueOrDefault(0);
|
||||
video_sender.key_frames_encoded = stat->key_frames_encoded.ValueOrDefault(0);
|
||||
video_sender.total_encode_time = stat->total_encode_time.ValueOrDefault(0.0);
|
||||
video_sender.frame_width = stat->frame_width.ValueOrDefault(0);
|
||||
video_sender.frame_height = stat->frame_height.ValueOrDefault(0);
|
||||
video_sender.retransmitted_packets_sent = stat->retransmitted_packets_sent.ValueOrDefault(0);
|
||||
video_sender.retransmitted_bytes_sent = stat->retransmitted_bytes_sent.ValueOrDefault(0);
|
||||
video_sender.total_packet_send_delay = stat->total_packet_send_delay.ValueOrDefault(0.0);
|
||||
video_sender.nack_count = stat->nack_count.ValueOrDefault(0);
|
||||
video_sender.pli_count = stat->pli_count.ValueOrDefault(0);
|
||||
if (stat->quality_limitation_reason.is_defined()) {
|
||||
// "none" = 0 (the default)
|
||||
if (*stat->quality_limitation_reason == "cpu") {
|
||||
video_sender.quality_limitation_reason = 1;
|
||||
} else if (*stat->quality_limitation_reason == "bandwidth") {
|
||||
video_sender.quality_limitation_reason = 2;
|
||||
} else {
|
||||
video_sender.quality_limitation_reason = 3;
|
||||
}
|
||||
}
|
||||
video_sender.quality_limitation_resolution_changes = stat->quality_limitation_resolution_changes.ValueOrDefault(0);
|
||||
|
||||
if (stat->remote_id.is_defined()) {
|
||||
auto remote_stat = report->GetAs<RTCRemoteInboundRtpStreamStats>(*stat->remote_id);
|
||||
if (remote_stat) {
|
||||
video_sender.remote_packets_lost = remote_stat->packets_lost.ValueOrDefault(0);
|
||||
video_sender.remote_jitter = remote_stat->jitter.ValueOrDefault(0.0);
|
||||
video_sender.remote_round_trip_time = remote_stat->round_trip_time.ValueOrDefault(0.0);
|
||||
}
|
||||
}
|
||||
|
||||
this->video_sender_statistics_.push_back(video_sender);
|
||||
}
|
||||
}
|
||||
|
||||
for (const auto& stat : inbound_stream_stats) {
|
||||
if (*stat->kind == "audio") {
|
||||
AudioReceiverStatistics audio_receiver = {0};
|
||||
|
||||
audio_receiver.ssrc = stat->ssrc.ValueOrDefault(0);
|
||||
audio_receiver.packets_received = stat->packets_received.ValueOrDefault(0);
|
||||
audio_receiver.packets_lost = stat->packets_lost.ValueOrDefault(0);
|
||||
audio_receiver.bytes_received = stat->bytes_received.ValueOrDefault(0);
|
||||
audio_receiver.jitter = stat->jitter.ValueOrDefault(0.0);
|
||||
|
||||
if (stat->track_id.is_defined()) {
|
||||
auto track_stat = report->GetAs<RTCMediaStreamTrackStats>(*stat->track_id);
|
||||
if (track_stat) {
|
||||
audio_receiver.total_audio_energy = track_stat->total_audio_energy.ValueOrDefault(0.0);
|
||||
}
|
||||
}
|
||||
|
||||
this->audio_receiver_statistics_.push_back(audio_receiver);
|
||||
} else if (*stat->kind == "video") {
|
||||
VideoReceiverStatistics video_receiver = {0};
|
||||
|
||||
video_receiver.ssrc = stat->ssrc.ValueOrDefault(0);
|
||||
video_receiver.packets_received = stat->packets_received.ValueOrDefault(0);
|
||||
video_receiver.packets_lost = stat->packets_lost.ValueOrDefault(0);
|
||||
video_receiver.bytes_received = stat->bytes_received.ValueOrDefault(0);
|
||||
video_receiver.frames_decoded = stat->frames_decoded.ValueOrDefault(0);
|
||||
video_receiver.key_frames_decoded = stat->key_frames_decoded.ValueOrDefault(0);
|
||||
video_receiver.total_decode_time = stat->total_decode_time.ValueOrDefault(0.0);
|
||||
|
||||
if (stat->track_id.is_defined()) {
|
||||
auto track_stat = report->GetAs<RTCMediaStreamTrackStats>(*stat->track_id);
|
||||
if (track_stat) {
|
||||
video_receiver.frame_width = track_stat->frame_width.ValueOrDefault(0);
|
||||
video_receiver.frame_height = track_stat->frame_height.ValueOrDefault(0);
|
||||
}
|
||||
}
|
||||
|
||||
this->video_receiver_statistics_.push_back(video_receiver);
|
||||
}
|
||||
}
|
||||
|
||||
ConnectionStatistics connection_statistics = {0};
|
||||
uint64_t highest_priority = 0;
|
||||
|
||||
for (const auto& stat : candidate_pair_stats) {
|
||||
// We'll only look at the pair that is nominated with the highest priority, usually
|
||||
// that has useful values (there does not seem to be a 'in_use' type of flag).
|
||||
uint64_t current_priority = stat->priority.ValueOrDefault(0);
|
||||
if (*stat->nominated && stat->priority.ValueOrDefault(0) > highest_priority) {
|
||||
highest_priority = current_priority;
|
||||
connection_statistics.current_round_trip_time = stat->current_round_trip_time.ValueOrDefault(0.0);
|
||||
connection_statistics.available_outgoing_bitrate = stat->available_outgoing_bitrate.ValueOrDefault(0.0);
|
||||
}
|
||||
}
|
||||
|
||||
MediaStatistics media_statistics;
|
||||
media_statistics.timestamp_us = report->timestamp_us();
|
||||
media_statistics.audio_sender_statistics_size = this->audio_sender_statistics_.size();
|
||||
media_statistics.audio_sender_statistics = this->audio_sender_statistics_.data();
|
||||
media_statistics.video_sender_statistics_size = this->video_sender_statistics_.size();
|
||||
media_statistics.video_sender_statistics = this->video_sender_statistics_.data();
|
||||
media_statistics.audio_receiver_statistics_size = this->audio_receiver_statistics_.size();
|
||||
media_statistics.audio_receiver_statistics = this->audio_receiver_statistics_.data();
|
||||
media_statistics.video_receiver_statistics_count = this->video_receiver_statistics_.size();
|
||||
media_statistics.video_receiver_statistics = this->video_receiver_statistics_.data();
|
||||
media_statistics.connection_statistics = connection_statistics;
|
||||
|
||||
// Pass media_statistics up to Rust, which will consume the data before returning.
|
||||
this->stats_observer_cbs_.OnStatsComplete(this->stats_observer_, &media_statistics);
|
||||
}
|
||||
|
||||
// Returns an owned RC.
|
||||
// Pass-in values must outlive the returned value.
|
||||
RUSTEXPORT StatsObserverRffi*
|
||||
Rust_createStatsObserver(void* stats_observer_borrowed,
|
||||
const StatsObserverCallbacks* stats_observer_cbs_borrowed) {
|
||||
return take_rc(rtc::make_ref_counted<StatsObserverRffi>(stats_observer_borrowed, stats_observer_cbs_borrowed));
|
||||
}
|
||||
|
||||
} // namespace rffi
|
||||
} // namespace webrtc
|
43
ringrtc/rffi/src/stats_observer.h
Normal file
43
ringrtc/rffi/src/stats_observer.h
Normal file
|
@ -0,0 +1,43 @@
|
|||
/*
|
||||
* Copyright 2019-2021 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
#ifndef RFFI_STATS_OBSERVER_H__
|
||||
#define RFFI_STATS_OBSERVER_H__
|
||||
|
||||
#include "api/peer_connection_interface.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace rffi {
|
||||
|
||||
/**
|
||||
* Adapter between the C++ StatsObserver interface
|
||||
* and Rust. Wraps an instance of the Rust interface and dispatches
|
||||
* C++ callbacks to Rust.
|
||||
*/
|
||||
|
||||
class StatsObserverRffi : public RTCStatsCollectorCallback {
|
||||
public:
|
||||
// Passed-in observer must live as long as the StatsObserverRffi.
|
||||
StatsObserverRffi(void* stats_observer_borrowed,
|
||||
const StatsObserverCallbacks* stats_observer_cbs_borrowed);
|
||||
~StatsObserverRffi() override;
|
||||
|
||||
protected:
|
||||
void OnStatsDelivered(const rtc::scoped_refptr<const RTCStatsReport>& report) override;
|
||||
|
||||
private:
|
||||
void* stats_observer_;
|
||||
StatsObserverCallbacks stats_observer_cbs_;
|
||||
|
||||
std::vector<AudioSenderStatistics> audio_sender_statistics_;
|
||||
std::vector<VideoSenderStatistics> video_sender_statistics_;
|
||||
std::vector<AudioReceiverStatistics> audio_receiver_statistics_;
|
||||
std::vector<VideoReceiverStatistics> video_receiver_statistics_;
|
||||
};
|
||||
|
||||
} // namespace rffi
|
||||
} // namespace webrtc
|
||||
|
||||
#endif /* RFFI_STATS_OBSERVER_H__ */
|
Loading…
Reference in a new issue