Add support for simulcast with Vp8 from caller into PC level quality tests.

Add support of negotiating simulcast offer/answer. Also fix some minor
issues around to make it finally work.

Bug: webrtc:10138
Change-Id: I382f5df04ca6ac04d8ed1e030e7b2ae5706dd10c
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/137425
Commit-Queue: Artem Titov <titovartem@webrtc.org>
Reviewed-by: Florent Castelli <orphis@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Reviewed-by: Amit Hilbuch <amithi@webrtc.org>
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#28274}
This commit is contained in:
Artem Titov 2019-06-13 16:36:52 +02:00 committed by Commit Bot
parent 6751260241
commit ef3fd9c8ad
16 changed files with 902 additions and 165 deletions

View file

@ -113,6 +113,31 @@ class PeerConnectionE2EQualityTestFixture {
enum VideoGeneratorType { kDefault, kI420A, kI010 };
struct VideoSimulcastConfig {
VideoSimulcastConfig(int simulcast_streams_count, int target_spatial_index)
: simulcast_streams_count(simulcast_streams_count),
target_spatial_index(target_spatial_index) {
RTC_CHECK_GT(simulcast_streams_count, 1);
RTC_CHECK_GE(target_spatial_index, 0);
RTC_CHECK_LT(target_spatial_index, simulcast_streams_count);
}
// Specified amount of simulcast streams/SVC layers, depending on which
// encoder is used.
int simulcast_streams_count;
// Specifies spatial index of the video stream to analyze.
// There are 2 cases:
// 1. simulcast encoder is used:
// in such case |target_spatial_index| will specify the index of
// simulcast stream, that should be analyzed. Other streams will be
// dropped.
// 2. SVC encoder is used:
// in such case |target_spatial_index| will specify the top interesting
// spatial layer and all layers below, including target one will be
// processed. All layers above target one will be dropped.
int target_spatial_index;
};
// Contains properties of single video stream.
struct VideoConfig {
VideoConfig(size_t width, size_t height, int32_t fps)
@ -136,19 +161,13 @@ class PeerConnectionE2EQualityTestFixture {
absl::optional<std::string> input_file_name;
// If specified screen share video stream will be created as input.
absl::optional<ScreenShareConfig> screen_share_config;
// Specifies spatial index of the video stream to analyze.
// There are 3 cases:
// 1. |target_spatial_index| omitted: in such case it will be assumed that
// video stream has not spatial layers and simulcast streams.
// 2. |target_spatial_index| presented and simulcast encoder is used:
// in such case |target_spatial_index| will specify the index of
// simulcast stream, that should be analyzed. Other streams will be
// dropped.
// 3. |target_spatial_index| presented and SVP encoder is used:
// in such case |target_spatial_index| will specify the top interesting
// spatial layer and all layers bellow, including target one will be
// processed. All layers above target one will be dropped.
absl::optional<int> target_spatial_index;
// If presented video will be transfered in simulcast/SVC mode depending on
// which encoder is used.
//
// Simulcast is supported only from 1st added peer and for now only for
// Vp8 encoder. Also RTX doesn't supported with simulcast and will
// automatically disabled for tracks with simulcast.
absl::optional<VideoSimulcastConfig> simulcast_config;
// If specified the input stream will be also copied to specified file.
// It is actually one of the test's output file, which contains copy of what
// was captured during the test for this video stream on sender side.

View file

@ -57,5 +57,9 @@ specific_include_rules = {
],
".*peer_connection_quality_test\.(h|cc)": [
"+pc",
],
".*sdp_changer\.(h|cc)": [
"+pc",
"+p2p",
]
}

View file

@ -118,6 +118,19 @@ rtc_source_set("id_generator") {
deps = []
}
rtc_source_set("simulcast_dummy_buffer_helper") {
visibility = [ "*" ]
testonly = true
sources = [
"analyzer/video/simulcast_dummy_buffer_helper.cc",
"analyzer/video/simulcast_dummy_buffer_helper.h",
]
deps = [
"../../../api/video:video_frame",
"../../../api/video:video_frame_i420",
]
}
rtc_source_set("quality_analyzing_video_decoder") {
visibility = [ "*" ]
testonly = true
@ -128,6 +141,7 @@ rtc_source_set("quality_analyzing_video_decoder") {
deps = [
":encoded_image_data_injector_api",
":id_generator",
":simulcast_dummy_buffer_helper",
"../../../api:video_quality_analyzer_api",
"../../../api/video:encoded_image",
"../../../api/video:video_frame",
@ -177,6 +191,7 @@ if (rtc_include_tests) {
":id_generator",
":quality_analyzing_video_decoder",
":quality_analyzing_video_encoder",
":simulcast_dummy_buffer_helper",
"../../../api:stats_observer_interface",
"../../../api:video_quality_analyzer_api",
"../../../api/video:video_frame",
@ -198,7 +213,6 @@ if (rtc_include_tests) {
deps = [
":peer_connection_quality_test_params",
":video_quality_analyzer_injection_helper",
"../../../api:array_view",
"../../../api:peer_connection_quality_test_fixture_api",
"../../../api:scoped_refptr",
"../../../api/rtc_event_log:rtc_event_log_factory",
@ -472,9 +486,15 @@ rtc_source_set("sdp_changer") {
"sdp/sdp_changer.h",
]
deps = [
"../../../api:array_view",
"../../../api:libjingle_peerconnection_api",
"../../../media:rtc_media_base",
"../../../p2p:rtc_p2p",
"../../../pc:peerconnection",
"../../../pc:rtc_pc_base",
"../../../rtc_base:stringutils",
"//third_party/abseil-cpp/absl/memory:memory",
"//third_party/abseil-cpp/absl/strings:strings",
"//third_party/abseil-cpp/absl/types:optional",
]
}

View file

@ -19,15 +19,10 @@
#include "api/video/i420_buffer.h"
#include "modules/video_coding/include/video_error_codes.h"
#include "rtc_base/logging.h"
#include "test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h"
namespace webrtc {
namespace webrtc_pc_e2e {
namespace {
constexpr size_t kIrrelatedSimulcastStreamFrameWidth = 320;
constexpr size_t kIrrelatedSimulcastStreamFrameHeight = 480;
} // namespace
QualityAnalyzingVideoDecoder::QualityAnalyzingVideoDecoder(
int id,
@ -178,33 +173,26 @@ void QualityAnalyzingVideoDecoder::DecoderCallback::Decoded(
int32_t
QualityAnalyzingVideoDecoder::DecoderCallback::IrrelevantSimulcastStreamDecoded(
uint16_t frame_id,
int64_t timestamp_ms) {
webrtc::VideoFrame black_frame =
uint32_t timestamp_ms) {
webrtc::VideoFrame dummy_frame =
webrtc::VideoFrame::Builder()
.set_video_frame_buffer(
GetBlackFrameBuffer(kIrrelatedSimulcastStreamFrameWidth,
kIrrelatedSimulcastStreamFrameHeight))
.set_timestamp_ms(timestamp_ms)
.set_video_frame_buffer(GetDummyFrameBuffer())
.set_timestamp_rtp(timestamp_ms)
.set_id(frame_id)
.build();
rtc::CritScope crit(&callback_lock_);
RTC_DCHECK(delegate_callback_);
return delegate_callback_->Decoded(black_frame);
delegate_callback_->Decoded(dummy_frame, absl::nullopt, absl::nullopt);
return WEBRTC_VIDEO_CODEC_OK;
}
rtc::scoped_refptr<webrtc::VideoFrameBuffer>
QualityAnalyzingVideoDecoder::DecoderCallback::GetBlackFrameBuffer(int width,
int height) {
if (!black_frame_buffer_ || black_frame_buffer_->width() != width ||
black_frame_buffer_->height() != height) {
// Use i420 buffer here as default one and supported by all codecs.
rtc::scoped_refptr<webrtc::I420Buffer> buffer =
webrtc::I420Buffer::Create(width, height);
webrtc::I420Buffer::SetBlack(buffer.get());
black_frame_buffer_ = buffer;
QualityAnalyzingVideoDecoder::DecoderCallback::GetDummyFrameBuffer() {
if (!dummy_frame_buffer_) {
dummy_frame_buffer_ = CreateDummyFrameBuffer();
}
return black_frame_buffer_;
return dummy_frame_buffer_;
}
void QualityAnalyzingVideoDecoder::OnFrameDecoded(

View file

@ -86,16 +86,14 @@ class QualityAnalyzingVideoDecoder : public VideoDecoder {
absl::optional<uint8_t> qp) override;
int32_t IrrelevantSimulcastStreamDecoded(uint16_t frame_id,
int64_t timestamp_ms);
uint32_t timestamp_ms);
private:
rtc::scoped_refptr<webrtc::VideoFrameBuffer> GetBlackFrameBuffer(
int width,
int height);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> GetDummyFrameBuffer();
QualityAnalyzingVideoDecoder* const decoder_;
rtc::scoped_refptr<webrtc::VideoFrameBuffer> black_frame_buffer_;
rtc::scoped_refptr<webrtc::VideoFrameBuffer> dummy_frame_buffer_;
rtc::CriticalSection callback_lock_;
DecodedImageCallback* delegate_callback_ RTC_GUARDED_BY(callback_lock_);

View file

@ -0,0 +1,57 @@
/*
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h"
namespace webrtc {
namespace webrtc_pc_e2e {
namespace {
constexpr char kIrrelatedSimulcastStreamFrameData[] = "Dummy!";
} // namespace
rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateDummyFrameBuffer() {
// Use i420 buffer here as default one and supported by all codecs.
rtc::scoped_refptr<webrtc::I420Buffer> buffer =
webrtc::I420Buffer::Create(2, 2);
memcpy(buffer->MutableDataY(), kIrrelatedSimulcastStreamFrameData, 2);
memcpy(buffer->MutableDataY() + buffer->StrideY(),
kIrrelatedSimulcastStreamFrameData + 2, 2);
memcpy(buffer->MutableDataU(), kIrrelatedSimulcastStreamFrameData + 4, 1);
memcpy(buffer->MutableDataV(), kIrrelatedSimulcastStreamFrameData + 5, 1);
return buffer;
}
bool IsDummyFrameBuffer(
rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer) {
rtc::scoped_refptr<webrtc::I420BufferInterface> buffer =
video_frame_buffer->ToI420();
if (buffer->width() != 2 || buffer->height() != 2) {
return false;
}
if (memcmp(buffer->DataY(), kIrrelatedSimulcastStreamFrameData, 2) != 0) {
return false;
}
if (memcmp(buffer->DataY() + buffer->StrideY(),
kIrrelatedSimulcastStreamFrameData + 2, 2) != 0) {
return false;
}
if (memcmp(buffer->DataU(), kIrrelatedSimulcastStreamFrameData + 4, 1) != 0) {
return false;
}
if (memcmp(buffer->DataV(), kIrrelatedSimulcastStreamFrameData + 5, 1) != 0) {
return false;
}
return true;
}
} // namespace webrtc_pc_e2e
} // namespace webrtc

View file

@ -0,0 +1,28 @@
/*
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef TEST_PC_E2E_ANALYZER_VIDEO_SIMULCAST_DUMMY_BUFFER_HELPER_H_
#define TEST_PC_E2E_ANALYZER_VIDEO_SIMULCAST_DUMMY_BUFFER_HELPER_H_
#include "api/video/i420_buffer.h"
#include "api/video/video_frame_buffer.h"
namespace webrtc {
namespace webrtc_pc_e2e {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateDummyFrameBuffer();
bool IsDummyFrameBuffer(
rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer);
} // namespace webrtc_pc_e2e
} // namespace webrtc
#endif // TEST_PC_E2E_ANALYZER_VIDEO_SIMULCAST_DUMMY_BUFFER_HELPER_H_

View file

@ -15,6 +15,7 @@
#include "absl/memory/memory.h"
#include "test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h"
#include "test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h"
#include "test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h"
namespace webrtc {
namespace webrtc_pc_e2e {
@ -71,6 +72,10 @@ class AnalyzingVideoSink : public rtc::VideoSinkInterface<VideoFrame> {
~AnalyzingVideoSink() override = default;
void OnFrame(const VideoFrame& frame) override {
if (IsDummyFrameBuffer(frame.video_frame_buffer()->ToI420())) {
// This is dummy frame, so we don't need to process it further.
return;
}
analyzer_->OnFrameRendered(frame);
if (video_writer_) {
bool result = video_writer_->WriteFrame(frame);

View file

@ -62,8 +62,10 @@ void NetworkQualityMetricsReporter::ReportStats(
ReportResult("bytes_sent", network_label, stats.bytes_sent.bytes(),
"sizeInBytes");
ReportResult("packets_sent", network_label, stats.packets_sent, "unitless");
ReportResult("average_send_rate", network_label,
stats.AverageSendRate().bytes_per_sec(), "bytesPerSecond");
ReportResult(
"average_send_rate", network_label,
stats.packets_sent >= 2 ? stats.AverageSendRate().bytes_per_sec() : 0,
"bytesPerSecond");
ReportResult("bytes_dropped", network_label, stats.bytes_dropped.bytes(),
"sizeInBytes");
ReportResult("packets_dropped", network_label, stats.packets_dropped,
@ -73,7 +75,10 @@ void NetworkQualityMetricsReporter::ReportStats(
ReportResult("packets_received", network_label, stats.packets_received,
"unitless");
ReportResult("average_receive_rate", network_label,
stats.AverageReceiveRate().bytes_per_sec(), "bytesPerSecond");
stats.packets_received >= 2
? stats.AverageReceiveRate().bytes_per_sec()
: 0,
"bytesPerSecond");
ReportResult("sent_packets_loss", network_label, packet_loss, "unitless");
}

View file

@ -25,14 +25,10 @@
namespace webrtc {
namespace webrtc_pc_e2e {
namespace {
// IOS debug builds can be quite slow, disabling to avoid issues with timeouts.
#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG)
#define MAYBE_RunWithEmulatedNetwork DISABLED_RunWithEmulatedNetwork
#else
#define MAYBE_RunWithEmulatedNetwork RunWithEmulatedNetwork
#endif
TEST(PeerConnectionE2EQualityTestSmokeTest, MAYBE_RunWithEmulatedNetwork) {
class PeerConnectionE2EQualityTestSmokeTest : public ::testing::Test {
public:
using PeerConfigurer = PeerConnectionE2EQualityTestFixture::PeerConfigurer;
using RunParams = PeerConnectionE2EQualityTestFixture::RunParams;
using VideoConfig = PeerConnectionE2EQualityTestFixture::VideoConfig;
@ -40,73 +36,120 @@ TEST(PeerConnectionE2EQualityTestSmokeTest, MAYBE_RunWithEmulatedNetwork) {
using ScreenShareConfig =
PeerConnectionE2EQualityTestFixture::ScreenShareConfig;
using ScrollingParams = PeerConnectionE2EQualityTestFixture::ScrollingParams;
using VideoSimulcastConfig =
PeerConnectionE2EQualityTestFixture::VideoSimulcastConfig;
// Setup emulated network
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
void RunTest(const std::string& test_case_name,
const RunParams& run_params,
rtc::FunctionView<void(PeerConfigurer*)> alice_configurer,
rtc::FunctionView<void(PeerConfigurer*)> bob_configurer) {
// Setup emulated network
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
auto alice_network_behavior =
absl::make_unique<SimulatedNetwork>(BuiltInNetworkBehaviorConfig());
SimulatedNetwork* alice_network_behavior_ptr = alice_network_behavior.get();
EmulatedNetworkNode* alice_node =
network_emulation_manager->CreateEmulatedNode(
std::move(alice_network_behavior));
EmulatedNetworkNode* bob_node = network_emulation_manager->CreateEmulatedNode(
absl::make_unique<SimulatedNetwork>(BuiltInNetworkBehaviorConfig()));
EmulatedEndpoint* alice_endpoint =
network_emulation_manager->CreateEndpoint(EmulatedEndpointConfig());
EmulatedEndpoint* bob_endpoint =
network_emulation_manager->CreateEndpoint(EmulatedEndpointConfig());
network_emulation_manager->CreateRoute(alice_endpoint, {alice_node},
bob_endpoint);
network_emulation_manager->CreateRoute(bob_endpoint, {bob_node},
alice_endpoint);
auto alice_network_behavior =
absl::make_unique<SimulatedNetwork>(BuiltInNetworkBehaviorConfig());
SimulatedNetwork* alice_network_behavior_ptr = alice_network_behavior.get();
EmulatedNetworkNode* alice_node =
network_emulation_manager->CreateEmulatedNode(
std::move(alice_network_behavior));
EmulatedNetworkNode* bob_node =
network_emulation_manager->CreateEmulatedNode(
absl::make_unique<SimulatedNetwork>(
BuiltInNetworkBehaviorConfig()));
auto* alice_endpoint =
network_emulation_manager->CreateEndpoint(EmulatedEndpointConfig());
EmulatedEndpoint* bob_endpoint =
network_emulation_manager->CreateEndpoint(EmulatedEndpointConfig());
network_emulation_manager->CreateRoute(alice_endpoint, {alice_node},
bob_endpoint);
network_emulation_manager->CreateRoute(bob_endpoint, {bob_node},
alice_endpoint);
// Create analyzers.
std::unique_ptr<VideoQualityAnalyzerInterface> video_quality_analyzer =
absl::make_unique<DefaultVideoQualityAnalyzer>();
// This is only done for the sake of smoke testing. In general there should
// be no need to explicitly pull data from analyzers after the run.
auto* video_analyzer_ptr =
static_cast<DefaultVideoQualityAnalyzer*>(video_quality_analyzer.get());
// Create analyzers.
std::unique_ptr<VideoQualityAnalyzerInterface> video_quality_analyzer =
absl::make_unique<DefaultVideoQualityAnalyzer>();
// This is only done for the sake of smoke testing. In general there should
// be no need to explicitly pull data from analyzers after the run.
auto* video_analyzer_ptr =
static_cast<DefaultVideoQualityAnalyzer*>(video_quality_analyzer.get());
std::unique_ptr<AudioQualityAnalyzerInterface> audio_quality_analyzer =
absl::make_unique<DefaultAudioQualityAnalyzer>();
auto fixture = CreatePeerConnectionE2EQualityTestFixture(
test_case_name, /*audio_quality_analyzer=*/nullptr,
std::move(video_quality_analyzer));
fixture->ExecuteAt(TimeDelta::seconds(2),
[alice_network_behavior_ptr](TimeDelta) {
BuiltInNetworkBehaviorConfig config;
config.loss_percent = 5;
alice_network_behavior_ptr->SetConfig(config);
});
auto fixture = CreatePeerConnectionE2EQualityTestFixture(
"smoke_test", std::move(audio_quality_analyzer),
std::move(video_quality_analyzer));
fixture->ExecuteAt(TimeDelta::seconds(2),
[alice_network_behavior_ptr](TimeDelta) {
BuiltInNetworkBehaviorConfig config;
config.loss_percent = 5;
alice_network_behavior_ptr->SetConfig(config);
});
// Setup components. We need to provide rtc::NetworkManager compatible with
// emulated network layer.
EmulatedNetworkManagerInterface* alice_network =
network_emulation_manager->CreateEmulatedNetworkManagerInterface(
{alice_endpoint});
EmulatedNetworkManagerInterface* bob_network =
network_emulation_manager->CreateEmulatedNetworkManagerInterface(
{bob_endpoint});
// Setup components. We need to provide rtc::NetworkManager compatible with
// emulated network layer.
EmulatedNetworkManagerInterface* alice_network =
network_emulation_manager->CreateEmulatedNetworkManagerInterface(
{alice_endpoint});
fixture->AddPeer(alice_network->network_thread(),
alice_network->network_manager(), [](PeerConfigurer* alice) {
VideoConfig video(640, 360, 30);
video.stream_label = "alice-video";
alice->AddVideoConfig(std::move(video));
fixture->AddPeer(alice_network->network_thread(),
alice_network->network_manager(), alice_configurer);
fixture->AddPeer(bob_network->network_thread(),
bob_network->network_manager(), bob_configurer);
fixture->AddQualityMetricsReporter(
absl::make_unique<NetworkQualityMetricsReporter>(alice_network,
bob_network));
AudioConfig audio;
audio.stream_label = "alice-audio";
audio.mode = AudioConfig::Mode::kFile;
audio.input_file_name = test::ResourcePath(
"pc_quality_smoke_test_alice_source", "wav");
alice->SetAudioConfig(std::move(audio));
});
fixture->Run(run_params);
EmulatedNetworkManagerInterface* bob_network =
network_emulation_manager->CreateEmulatedNetworkManagerInterface(
{bob_endpoint});
fixture->AddPeer(
bob_network->network_thread(), bob_network->network_manager(),
EXPECT_GE(fixture->GetRealTestDuration(), run_params.run_duration);
for (auto stream_label : video_analyzer_ptr->GetKnownVideoStreams()) {
FrameCounters stream_conters =
video_analyzer_ptr->GetPerStreamCounters().at(stream_label);
// 150 = 30fps * 5s. On some devices pipeline can be too slow, so it can
// happen, that frames will stuck in the middle, so we actually can't
// force real constraints here, so lets just check, that at least 1 frame
// passed whole pipeline.
EXPECT_GE(stream_conters.captured, 150);
EXPECT_GE(stream_conters.pre_encoded, 1);
EXPECT_GE(stream_conters.encoded, 1);
EXPECT_GE(stream_conters.received, 1);
EXPECT_GE(stream_conters.decoded, 1);
EXPECT_GE(stream_conters.rendered, 1);
}
}
};
} // namespace
// IOS debug builds can be quite slow, disabling to avoid issues with timeouts.
#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG)
#define MAYBE_RunWithEmulatedNetwork DISABLED_RunWithEmulatedNetwork
#else
#define MAYBE_RunWithEmulatedNetwork RunWithEmulatedNetwork
#endif
TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Smoke) {
RunParams run_params(TimeDelta::seconds(7));
run_params.video_codec_name = cricket::kVp9CodecName;
run_params.video_codec_required_params = {{"profile-id", "0"}};
run_params.use_flex_fec = true;
run_params.use_ulp_fec = true;
run_params.video_encoder_bitrate_multiplier = 1.1;
RunTest(
"smoke", run_params,
[](PeerConfigurer* alice) {
VideoConfig video(640, 360, 30);
video.stream_label = "alice-video";
alice->AddVideoConfig(std::move(video));
AudioConfig audio;
audio.stream_label = "alice-audio";
audio.mode = AudioConfig::Mode::kFile;
audio.input_file_name =
test::ResourcePath("pc_quality_smoke_test_alice_source", "wav");
alice->SetAudioConfig(std::move(audio));
},
[](PeerConfigurer* bob) {
VideoConfig video(640, 360, 30);
video.stream_label = "bob-video";
@ -127,34 +170,44 @@ TEST(PeerConnectionE2EQualityTestSmokeTest, MAYBE_RunWithEmulatedNetwork) {
test::ResourcePath("pc_quality_smoke_test_bob_source", "wav");
bob->SetAudioConfig(std::move(audio));
});
}
fixture->AddQualityMetricsReporter(
absl::make_unique<NetworkQualityMetricsReporter>(alice_network,
bob_network));
// IOS debug builds can be quite slow, disabling to avoid issues with timeouts.
#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG)
#define MAYBE_RunWithEmulatedNetwork DISABLED_RunWithEmulatedNetwork
#else
#define MAYBE_RunWithEmulatedNetwork RunWithEmulatedNetwork
#endif
TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Simulcast) {
RunParams run_params(TimeDelta::seconds(7));
run_params.video_codec_name = cricket::kVp9CodecName;
run_params.video_codec_required_params = {{"profile-id", "0"}};
run_params.use_flex_fec = true;
run_params.use_ulp_fec = true;
run_params.video_encoder_bitrate_multiplier = 1.1;
fixture->Run(run_params);
run_params.video_codec_name = cricket::kVp8CodecName;
RunTest(
"simulcast", run_params,
[](PeerConfigurer* alice) {
VideoConfig simulcast(1280, 720, 30);
simulcast.stream_label = "alice-simulcast";
simulcast.simulcast_config = VideoSimulcastConfig(3, 0);
alice->AddVideoConfig(std::move(simulcast));
EXPECT_GE(fixture->GetRealTestDuration(), run_params.run_duration);
for (auto stream_label : video_analyzer_ptr->GetKnownVideoStreams()) {
FrameCounters stream_conters =
video_analyzer_ptr->GetPerStreamCounters().at(stream_label);
// 150 = 30fps * 5s. On some devices pipeline can be too slow, so it can
// happen, that frames will stuck in the middle, so we actually can't force
// real constraints here, so lets just check, that at least 1 frame passed
// whole pipeline.
EXPECT_GE(stream_conters.captured, 150);
EXPECT_GE(stream_conters.pre_encoded, 1);
EXPECT_GE(stream_conters.encoded, 1);
EXPECT_GE(stream_conters.received, 1);
EXPECT_GE(stream_conters.decoded, 1);
EXPECT_GE(stream_conters.rendered, 1);
}
AudioConfig audio;
audio.stream_label = "alice-audio";
audio.mode = AudioConfig::Mode::kFile;
audio.input_file_name =
test::ResourcePath("pc_quality_smoke_test_alice_source", "wav");
alice->SetAudioConfig(std::move(audio));
},
[](PeerConfigurer* bob) {
VideoConfig video(640, 360, 30);
video.stream_label = "bob-video";
bob->AddVideoConfig(std::move(video));
AudioConfig audio;
audio.stream_label = "bob-audio";
audio.mode = AudioConfig::Mode::kFile;
audio.input_file_name =
test::ResourcePath("pc_quality_smoke_test_bob_source", "wav");
bob->SetAudioConfig(std::move(audio));
});
}
} // namespace webrtc_pc_e2e

View file

@ -14,6 +14,7 @@
#include <utility>
#include "absl/memory/memory.h"
#include "api/jsep.h"
#include "api/media_stream_interface.h"
#include "api/peer_connection_interface.h"
#include "api/rtc_event_log_output_file.h"
@ -31,7 +32,6 @@
#include "system_wrappers/include/field_trial.h"
#include "test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h"
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h"
#include "test/pc/e2e/sdp/sdp_changer.h"
#include "test/pc/e2e/stats_poller.h"
#include "test/testsupport/file_utils.h"
@ -460,7 +460,8 @@ void PeerConnectionE2EQualityTest::ValidateParams(const RunParams& run_params,
std::set<std::string> audio_labels;
int media_streams_count = 0;
for (Params* p : params) {
for (size_t i = 0; i < params.size(); ++i) {
Params* p = params[i];
if (p->audio_config) {
media_streams_count++;
}
@ -516,6 +517,13 @@ void PeerConnectionE2EQualityTest::ValidateParams(const RunParams& run_params,
video_config.height);
}
}
if (video_config.simulcast_config) {
// We support simulcast only for Vp8 for now.
// RTC_CHECK_EQ(run_params.video_codec_name, cricket::kVp8CodecName);
// Also we support simulcast only from caller.
RTC_CHECK_EQ(i, 0)
<< "Only simulcast stream from first peer is supported";
}
}
if (p->audio_config) {
bool inserted =
@ -589,16 +597,37 @@ void PeerConnectionE2EQualityTest::SetupCallOnSignalingThread(
// forbidden to add new media sections in answer in Unified Plan.
RtpTransceiverInit receive_only_transceiver_init;
receive_only_transceiver_init.direction = RtpTransceiverDirection::kRecvOnly;
int alice_transceivers_counter = 0;
if (bob_->params()->audio_config) {
// Setup receive audio transceiver if Bob has audio to send. If we'll need
// multiple audio streams, then we need transceiver for each Bob's audio
// stream.
alice_->AddTransceiver(cricket::MediaType::MEDIA_TYPE_AUDIO,
receive_only_transceiver_init);
alice_transceivers_counter++;
}
for (auto& video_config : alice_->params()->video_configs) {
if (video_config.simulcast_config) {
RtpTransceiverInit transceiver_params;
transceiver_params.direction = RtpTransceiverDirection::kSendOnly;
for (int i = 0;
i < video_config.simulcast_config->simulcast_streams_count; ++i) {
RtpEncodingParameters enc_params;
// We need to be sure, that all rids will be unique with all mids.
enc_params.rid = std::to_string(alice_transceivers_counter) + "000" +
std::to_string(i);
transceiver_params.send_encodings.push_back(enc_params);
}
alice_->AddTransceiver(cricket::MediaType::MEDIA_TYPE_VIDEO,
transceiver_params);
alice_transceivers_counter++;
}
}
for (size_t i = 0; i < bob_->params()->video_configs.size(); ++i) {
alice_->AddTransceiver(cricket::MediaType::MEDIA_TYPE_VIDEO,
receive_only_transceiver_init);
alice_transceivers_counter++;
}
// Then add media for Alice and Bob
alice_video_sources_ = MaybeAddMedia(alice_.get());
@ -751,38 +780,115 @@ void PeerConnectionE2EQualityTest::MaybeAddAudio(TestPeer* peer) {
void PeerConnectionE2EQualityTest::SetPeerCodecPreferences(
TestPeer* peer,
const RunParams& run_params) {
std::vector<RtpCodecCapability> video_capabilities = FilterCodecCapabilities(
run_params.video_codec_name, run_params.video_codec_required_params,
run_params.use_ulp_fec, run_params.use_flex_fec,
peer->pc_factory()
->GetRtpSenderCapabilities(cricket::MediaType::MEDIA_TYPE_VIDEO)
.codecs);
std::vector<RtpCodecCapability> with_rtx_video_capabilities =
FilterCodecCapabilities(
run_params.video_codec_name, run_params.video_codec_required_params,
true, run_params.use_ulp_fec, run_params.use_flex_fec,
peer->pc_factory()
->GetRtpSenderCapabilities(cricket::MediaType::MEDIA_TYPE_VIDEO)
.codecs);
std::vector<RtpCodecCapability> without_rtx_video_capabilities =
FilterCodecCapabilities(
run_params.video_codec_name, run_params.video_codec_required_params,
false, run_params.use_ulp_fec, run_params.use_flex_fec,
peer->pc_factory()
->GetRtpSenderCapabilities(cricket::MediaType::MEDIA_TYPE_VIDEO)
.codecs);
// Set codecs for transceivers
for (auto transceiver : peer->pc()->GetTransceivers()) {
if (transceiver->media_type() == cricket::MediaType::MEDIA_TYPE_VIDEO) {
transceiver->SetCodecPreferences(video_capabilities);
if (transceiver->sender()->init_send_encodings().size() > 1) {
// If transceiver's sender has more then 1 send encodings, it means it
// has multiple simulcast streams, so we need disable RTX on it.
transceiver->SetCodecPreferences(without_rtx_video_capabilities);
} else {
transceiver->SetCodecPreferences(with_rtx_video_capabilities);
}
}
}
}
void PeerConnectionE2EQualityTest::SetupCall() {
SignalingInterceptor signaling_interceptor;
// Connect peers.
ASSERT_TRUE(alice_->ExchangeOfferAnswerWith(bob_.get()));
ExchangeOfferAnswer(&signaling_interceptor);
// Do the SDP negotiation, and also exchange ice candidates.
ASSERT_EQ_WAIT(alice_->signaling_state(), PeerConnectionInterface::kStable,
kDefaultTimeoutMs);
ASSERT_TRUE_WAIT(alice_->IsIceGatheringDone(), kDefaultTimeoutMs);
ASSERT_TRUE_WAIT(bob_->IsIceGatheringDone(), kDefaultTimeoutMs);
// Connect an ICE candidate pairs.
ASSERT_TRUE(bob_->AddIceCandidates(alice_->observer()->GetAllCandidates()));
ASSERT_TRUE(alice_->AddIceCandidates(bob_->observer()->GetAllCandidates()));
ExchangeIceCandidates(&signaling_interceptor);
// This means that ICE and DTLS are connected.
ASSERT_TRUE_WAIT(bob_->IsIceConnected(), kDefaultTimeoutMs);
ASSERT_TRUE_WAIT(alice_->IsIceConnected(), kDefaultTimeoutMs);
}
void PeerConnectionE2EQualityTest::ExchangeOfferAnswer(
SignalingInterceptor* signaling_interceptor) {
std::string log_output;
auto offer = alice_->CreateOffer();
RTC_CHECK(offer);
offer->ToString(&log_output);
RTC_LOG(INFO) << "Original offer: " << log_output;
LocalAndRemoteSdp patch_result =
signaling_interceptor->PatchOffer(std::move(offer));
patch_result.local_sdp->ToString(&log_output);
RTC_LOG(INFO) << "Offer to set as local description: " << log_output;
patch_result.remote_sdp->ToString(&log_output);
RTC_LOG(INFO) << "Offer to set as remote description: " << log_output;
bool set_local_offer =
alice_->SetLocalDescription(std::move(patch_result.local_sdp));
RTC_CHECK(set_local_offer);
bool set_remote_offer =
bob_->SetRemoteDescription(std::move(patch_result.remote_sdp));
RTC_CHECK(set_remote_offer);
auto answer = bob_->CreateAnswer();
RTC_CHECK(answer);
answer->ToString(&log_output);
RTC_LOG(INFO) << "Original answer: " << log_output;
patch_result = signaling_interceptor->PatchAnswer(std::move(answer));
patch_result.local_sdp->ToString(&log_output);
RTC_LOG(INFO) << "Answer to set as local description: " << log_output;
patch_result.remote_sdp->ToString(&log_output);
RTC_LOG(INFO) << "Answer to set as remote description: " << log_output;
bool set_local_answer =
bob_->SetLocalDescription(std::move(patch_result.local_sdp));
RTC_CHECK(set_local_answer);
bool set_remote_answer =
alice_->SetRemoteDescription(std::move(patch_result.remote_sdp));
RTC_CHECK(set_remote_answer);
}
void PeerConnectionE2EQualityTest::ExchangeIceCandidates(
SignalingInterceptor* signaling_interceptor) {
// Connect an ICE candidate pairs.
std::vector<std::unique_ptr<IceCandidateInterface>> alice_candidates =
signaling_interceptor->PatchOffererIceCandidates(
alice_->observer()->GetAllCandidates());
for (auto& candidate : alice_candidates) {
std::string candidate_str;
RTC_CHECK(candidate->ToString(&candidate_str));
RTC_LOG(INFO) << "Alice ICE candidate(mid= " << candidate->sdp_mid()
<< "): " << candidate_str;
}
ASSERT_TRUE(bob_->AddIceCandidates(std::move(alice_candidates)));
std::vector<std::unique_ptr<IceCandidateInterface>> bob_candidates =
signaling_interceptor->PatchAnswererIceCandidates(
bob_->observer()->GetAllCandidates());
for (auto& candidate : bob_candidates) {
std::string candidate_str;
RTC_CHECK(candidate->ToString(&candidate_str));
RTC_LOG(INFO) << "Bob ICE candidate(mid= " << candidate->sdp_mid()
<< "): " << candidate_str;
}
ASSERT_TRUE(alice_->AddIceCandidates(std::move(bob_candidates)));
}
void PeerConnectionE2EQualityTest::StartVideo(
const std::vector<
rtc::scoped_refptr<FrameGeneratorCapturerVideoTrackSource>>& sources) {
@ -821,6 +927,7 @@ test::VideoFrameWriter* PeerConnectionE2EQualityTest::MaybeCreateVideoWriter(
if (!file_name) {
return nullptr;
}
// TODO(titovartem) create only one file writer for simulcast video track.
auto video_writer = absl::make_unique<test::VideoFrameWriter>(
file_name.value(), config.width, config.height, config.fps);
test::VideoFrameWriter* out = video_writer.get();

View file

@ -32,6 +32,7 @@
#include "test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h"
#include "test/pc/e2e/analyzer_helper.h"
#include "test/pc/e2e/peer_connection_quality_test_params.h"
#include "test/pc/e2e/sdp/sdp_changer.h"
#include "test/pc/e2e/test_peer.h"
#include "test/testsupport/video_frame_writer.h"
@ -239,6 +240,8 @@ class PeerConnectionE2EQualityTest
void MaybeAddAudio(TestPeer* peer);
void SetPeerCodecPreferences(TestPeer* peer, const RunParams& run_params);
void SetupCall();
void ExchangeOfferAnswer(SignalingInterceptor* signaling_interceptor);
void ExchangeIceCandidates(SignalingInterceptor* signaling_interceptor);
void StartVideo(
const std::vector<
rtc::scoped_refptr<FrameGeneratorCapturerVideoTrackSource>>& sources);

View file

@ -12,7 +12,11 @@
#include <utility>
#include "absl/memory/memory.h"
#include "api/jsep_session_description.h"
#include "media/base/media_constants.h"
#include "p2p/base/p2p_constants.h"
#include "pc/sdp_utils.h"
#include "rtc_base/strings/string_builder.h"
namespace webrtc {
@ -33,8 +37,9 @@ std::string CodecRequiredParamsToString(
std::vector<RtpCodecCapability> FilterCodecCapabilities(
absl::string_view codec_name,
const std::map<std::string, std::string>& codec_required_params,
bool ulpfec,
bool flexfec,
bool use_rtx,
bool use_ulpfec,
bool use_flexfec,
std::vector<RtpCodecCapability> supported_codecs) {
std::vector<RtpCodecCapability> output_codecs;
// Find main requested codecs among supported and add them to output.
@ -66,13 +71,13 @@ std::vector<RtpCodecCapability> FilterCodecCapabilities(
// Add required FEC and RTX codecs to output.
for (auto& codec : supported_codecs) {
if (codec.name == cricket::kRtxCodecName) {
if (codec.name == cricket::kRtxCodecName && use_rtx) {
output_codecs.push_back(codec);
} else if (codec.name == cricket::kFlexfecCodecName && flexfec) {
} else if (codec.name == cricket::kFlexfecCodecName && use_flexfec) {
output_codecs.push_back(codec);
} else if ((codec.name == cricket::kRedCodecName ||
codec.name == cricket::kUlpfecCodecName) &&
ulpfec) {
use_flexfec) {
// Red and ulpfec should be enabled or disabled together.
output_codecs.push_back(codec);
}
@ -80,5 +85,367 @@ std::vector<RtpCodecCapability> FilterCodecCapabilities(
return output_codecs;
}
// If offer has no simulcast video sections - do nothing.
//
// If offer has simulcast video sections - for each section creates
// SimulcastSectionInfo and put it into |context_|.
void SignalingInterceptor::FillContext(SessionDescriptionInterface* offer) {
for (auto& content : offer->description()->contents()) {
context_.mids_order.push_back(content.mid());
cricket::MediaContentDescription* media_desc = content.media_description();
if (media_desc->type() != cricket::MediaType::MEDIA_TYPE_VIDEO) {
continue;
}
if (media_desc->HasSimulcast()) {
// We support only single stream simulcast sections with rids.
RTC_CHECK_EQ(media_desc->mutable_streams().size(), 1);
RTC_CHECK(media_desc->mutable_streams()[0].has_rids());
// Create SimulcastSectionInfo for this video section.
SimulcastSectionInfo info(content.mid(), content.type,
media_desc->mutable_streams()[0].rids());
// Set new rids basing on created SimulcastSectionInfo.
std::vector<cricket::RidDescription> rids;
cricket::SimulcastDescription simulcast_description;
for (std::string& rid : info.rids) {
rids.emplace_back(rid, cricket::RidDirection::kSend);
simulcast_description.send_layers().AddLayer(
cricket::SimulcastLayer(rid, false));
}
media_desc->mutable_streams()[0].set_rids(rids);
media_desc->set_simulcast_description(simulcast_description);
info.simulcast_description = media_desc->simulcast_description();
for (auto extension : media_desc->rtp_header_extensions()) {
if (extension.uri == RtpExtension::kMidUri) {
info.mid_extension = extension;
} else if (extension.uri == RtpExtension::kRidUri) {
info.rid_extension = extension;
} else if (extension.uri == RtpExtension::kRepairedRidUri) {
info.rrid_extension = extension;
}
}
RTC_CHECK_NE(info.rid_extension.id, 0);
RTC_CHECK_NE(info.mid_extension.id, 0);
bool transport_description_found = false;
for (auto& transport_info : offer->description()->transport_infos()) {
if (transport_info.content_name == info.mid) {
info.transport_description = transport_info.description;
transport_description_found = true;
break;
}
}
RTC_CHECK(transport_description_found);
context_.AddSimulcastInfo(info);
}
}
}
LocalAndRemoteSdp SignalingInterceptor::PatchOffer(
std::unique_ptr<SessionDescriptionInterface> offer) {
FillContext(offer.get());
if (!context_.HasSimulcast()) {
auto offer_for_remote = CloneSessionDescription(offer.get());
return LocalAndRemoteSdp(std::move(offer), std::move(offer_for_remote));
}
// Clone original offer description. We mustn't access original offer after
// this point.
std::unique_ptr<cricket::SessionDescription> desc =
offer->description()->Clone();
for (auto& info : context_.simulcast_infos) {
// For each simulcast section we have to perform:
// 1. Swap MID and RID header extensions
// 2. Remove RIDs from streams and remove SimulcastDescription
// 3. For each RID duplicate media section
cricket::ContentInfo* simulcast_content = desc->GetContentByName(info.mid);
// Now we need to prepare common prototype for "m=video" sections, in which
// single simulcast section will be converted. Do it before removing content
// because otherwise description will be deleted.
std::unique_ptr<cricket::MediaContentDescription> prototype_media_desc =
absl::WrapUnique(simulcast_content->media_description()->Copy());
// Remove simulcast video section from offer.
RTC_CHECK(desc->RemoveContentByName(simulcast_content->mid()));
// Clear |simulcast_content|, because now it is pointing to removed object.
simulcast_content = nullptr;
// Swap mid and rid extensions, so remote peer will understand rid as mid.
// Also remove rid extension.
std::vector<webrtc::RtpExtension> extensions =
prototype_media_desc->rtp_header_extensions();
for (auto ext_it = extensions.begin(); ext_it != extensions.end();) {
if (ext_it->uri == RtpExtension::kRidUri) {
// We don't need rid extension for remote peer.
extensions.erase(ext_it);
continue;
}
if (ext_it->uri == RtpExtension::kRepairedRidUri) {
// We don't support RTX in simulcast.
extensions.erase(ext_it);
continue;
}
if (ext_it->uri == RtpExtension::kMidUri) {
ext_it->id = info.rid_extension.id;
}
++ext_it;
}
prototype_media_desc->ClearRtpHeaderExtensions();
prototype_media_desc->set_rtp_header_extensions(extensions);
// We support only single stream inside video section with simulcast
RTC_CHECK_EQ(prototype_media_desc->mutable_streams().size(), 1);
// This stream must have rids.
RTC_CHECK(prototype_media_desc->mutable_streams()[0].has_rids());
// Remove rids and simulcast description from media description.
prototype_media_desc->mutable_streams()[0].set_rids({});
prototype_media_desc->set_simulcast_description(
cricket::SimulcastDescription());
// For each rid add separate video section.
for (std::string& rid : info.rids) {
desc->AddContent(rid, info.media_protocol_type,
prototype_media_desc->Clone());
}
}
// Now we need to add bundle line to have all media bundled together.
cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE);
for (auto& content : desc->contents()) {
bundle_group.AddContentName(content.mid());
}
if (desc->HasGroup(cricket::GROUP_TYPE_BUNDLE)) {
desc->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
}
desc->AddGroup(bundle_group);
// Update transport_infos to add TransportInfo for each new media section.
std::vector<cricket::TransportInfo> transport_infos = desc->transport_infos();
for (auto info_it = transport_infos.begin();
info_it != transport_infos.end();) {
if (context_.simulcast_infos_by_mid.find(info_it->content_name) !=
context_.simulcast_infos_by_mid.end()) {
// Remove transport infos that correspond to simulcast video sections.
transport_infos.erase(info_it);
} else {
++info_it;
}
}
for (auto& info : context_.simulcast_infos) {
for (auto& rid : info.rids) {
transport_infos.emplace_back(rid, info.transport_description);
}
}
desc->set_transport_infos(transport_infos);
// Create patched offer.
auto patched_offer =
absl::make_unique<JsepSessionDescription>(SdpType::kOffer);
patched_offer->Initialize(std::move(desc), offer->session_id(),
offer->session_version());
return LocalAndRemoteSdp(std::move(offer), std::move(patched_offer));
}
std::unique_ptr<cricket::SessionDescription>
SignalingInterceptor::RestoreMediaSectionsOrder(
std::unique_ptr<cricket::SessionDescription> source) {
std::unique_ptr<cricket::SessionDescription> out = source->Clone();
for (auto& mid : context_.mids_order) {
RTC_CHECK(out->RemoveContentByName(mid));
}
RTC_CHECK_EQ(out->contents().size(), 0);
for (auto& mid : context_.mids_order) {
cricket::ContentInfo* content = source->GetContentByName(mid);
RTC_CHECK(content);
out->AddContent(mid, content->type, content->media_description()->Clone());
}
return out;
}
LocalAndRemoteSdp SignalingInterceptor::PatchAnswer(
std::unique_ptr<SessionDescriptionInterface> answer) {
if (!context_.HasSimulcast()) {
auto answer_for_remote = CloneSessionDescription(answer.get());
return LocalAndRemoteSdp(std::move(answer), std::move(answer_for_remote));
}
std::unique_ptr<cricket::SessionDescription> desc =
answer->description()->Clone();
for (auto& info : context_.simulcast_infos) {
cricket::ContentInfo* simulcast_content =
desc->GetContentByName(info.rids[0]);
RTC_CHECK(simulcast_content);
// Get media description, which will be converted to simulcast answer.
std::unique_ptr<cricket::MediaContentDescription> media_desc =
simulcast_content->media_description()->Clone();
// Set |simulcast_content| to nullptr, because then it will be removed, so
// it will point to deleted object.
simulcast_content = nullptr;
// Remove separate media sections for simulcast streams.
for (auto& rid : info.rids) {
RTC_CHECK(desc->RemoveContentByName(rid));
}
// Patch |media_desc| to make it simulcast answer description.
// Restore mid/rid rtp header extensions
std::vector<webrtc::RtpExtension> extensions =
media_desc->rtp_header_extensions();
// First remove existing rid/mid header extensions.
for (auto ext_it = extensions.begin(); ext_it != extensions.end();) {
if (ext_it->uri == RtpExtension::kMidUri ||
ext_it->uri == RtpExtension::kRidUri ||
ext_it->uri == RtpExtension::kRepairedRidUri) {
extensions.erase(ext_it);
continue;
}
++ext_it;
}
// Then add right ones.
extensions.push_back(info.mid_extension);
extensions.push_back(info.rid_extension);
// extensions.push_back(info.rrid_extension);
media_desc->ClearRtpHeaderExtensions();
media_desc->set_rtp_header_extensions(extensions);
// Add StreamParams with rids for receive.
RTC_CHECK_EQ(media_desc->mutable_streams().size(), 0);
std::vector<cricket::RidDescription> rids;
for (auto& rid : info.rids) {
rids.emplace_back(rid, cricket::RidDirection::kReceive);
}
cricket::StreamParams stream_params;
stream_params.set_rids(rids);
media_desc->mutable_streams().push_back(stream_params);
// Restore SimulcastDescription. It should correspond to one from offer,
// but it have to have receive layers instead of send. So we need to put
// send layers from offer to receive layers in answer.
cricket::SimulcastDescription simulcast_description;
for (auto layer : info.simulcast_description.send_layers()) {
simulcast_description.receive_layers().AddLayerWithAlternatives(layer);
}
media_desc->set_simulcast_description(simulcast_description);
// Add simulcast media section.
desc->AddContent(info.mid, info.media_protocol_type, std::move(media_desc));
}
desc = RestoreMediaSectionsOrder(std::move(desc));
// Now we need to add bundle line to have all media bundled together.
cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE);
for (auto& content : desc->contents()) {
bundle_group.AddContentName(content.mid());
}
if (desc->HasGroup(cricket::GROUP_TYPE_BUNDLE)) {
desc->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
}
desc->AddGroup(bundle_group);
// Fix transport_infos: it have to have single info for simulcast section.
std::vector<cricket::TransportInfo> transport_infos = desc->transport_infos();
std::map<std::string, cricket::TransportDescription>
mid_to_transport_description;
for (auto info_it = transport_infos.begin();
info_it != transport_infos.end();) {
auto it = context_.simulcast_infos_by_rid.find(info_it->content_name);
if (it != context_.simulcast_infos_by_rid.end()) {
// This transport info correspond to some extra added media section.
mid_to_transport_description.insert(
{it->second->mid, info_it->description});
transport_infos.erase(info_it);
} else {
++info_it;
}
}
for (auto& info : context_.simulcast_infos) {
transport_infos.emplace_back(info.mid,
mid_to_transport_description.at(info.mid));
}
desc->set_transport_infos(transport_infos);
auto patched_answer =
absl::make_unique<JsepSessionDescription>(SdpType::kAnswer);
patched_answer->Initialize(std::move(desc), answer->session_id(),
answer->session_version());
return LocalAndRemoteSdp(std::move(answer), std::move(patched_answer));
}
std::vector<std::unique_ptr<IceCandidateInterface>>
SignalingInterceptor::PatchOffererIceCandidates(
rtc::ArrayView<const IceCandidateInterface* const> candidates) {
std::vector<std::unique_ptr<IceCandidateInterface>> out;
for (auto* candidate : candidates) {
auto simulcast_info_it =
context_.simulcast_infos_by_mid.find(candidate->sdp_mid());
if (simulcast_info_it != context_.simulcast_infos_by_mid.end()) {
// This is candidate for simulcast section, so it should be transformed
// into candidates for replicated sections
out.push_back(CreateIceCandidate(simulcast_info_it->second->rids[0], 0,
candidate->candidate()));
} else {
out.push_back(CreateIceCandidate(candidate->sdp_mid(),
candidate->sdp_mline_index(),
candidate->candidate()));
}
}
RTC_CHECK_GT(out.size(), 0);
return out;
}
std::vector<std::unique_ptr<IceCandidateInterface>>
SignalingInterceptor::PatchAnswererIceCandidates(
rtc::ArrayView<const IceCandidateInterface* const> candidates) {
std::vector<std::unique_ptr<IceCandidateInterface>> out;
for (auto* candidate : candidates) {
auto simulcast_info_it =
context_.simulcast_infos_by_rid.find(candidate->sdp_mid());
if (simulcast_info_it != context_.simulcast_infos_by_rid.end()) {
// This is candidate for replicated section, created from single simulcast
// section, so it should be transformed into candidates for simulcast
// section.
out.push_back(CreateIceCandidate(simulcast_info_it->second->mid, 0,
candidate->candidate()));
} else {
out.push_back(CreateIceCandidate(candidate->sdp_mid(),
candidate->sdp_mline_index(),
candidate->candidate()));
}
}
RTC_CHECK_GT(out.size(), 0);
return out;
}
SignalingInterceptor::SimulcastSectionInfo::SimulcastSectionInfo(
const std::string& mid,
cricket::MediaProtocolType media_protocol_type,
const std::vector<cricket::RidDescription>& rids_desc)
: mid(mid), media_protocol_type(media_protocol_type) {
for (auto& rid : rids_desc) {
rids.push_back(rid.rid);
}
}
void SignalingInterceptor::SignalingContext::AddSimulcastInfo(
const SimulcastSectionInfo& info) {
simulcast_infos.push_back(info);
bool inserted =
simulcast_infos_by_mid.insert({info.mid, &simulcast_infos.back()}).second;
RTC_CHECK(inserted);
for (auto& rid : info.rids) {
inserted =
simulcast_infos_by_rid.insert({rid, &simulcast_infos.back()}).second;
RTC_CHECK(inserted);
}
}
} // namespace webrtc_pc_e2e
} // namespace webrtc

View file

@ -16,7 +16,13 @@
#include <vector>
#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "api/jsep.h"
#include "api/rtp_parameters.h"
#include "media/base/rid_description.h"
#include "pc/session_description.h"
#include "pc/simulcast_description.h"
namespace webrtc {
namespace webrtc_pc_e2e {
@ -36,10 +42,79 @@ namespace webrtc_pc_e2e {
std::vector<RtpCodecCapability> FilterCodecCapabilities(
absl::string_view codec_name,
const std::map<std::string, std::string>& codec_required_params,
bool ulpfec,
bool flexfec,
bool use_rtx,
bool use_ulpfec,
bool use_flexfec,
std::vector<RtpCodecCapability> supported_codecs);
struct LocalAndRemoteSdp {
LocalAndRemoteSdp(std::unique_ptr<SessionDescriptionInterface> local_sdp,
std::unique_ptr<SessionDescriptionInterface> remote_sdp)
: local_sdp(std::move(local_sdp)), remote_sdp(std::move(remote_sdp)) {}
// Sdp, that should be as local description on the peer, that created it.
std::unique_ptr<SessionDescriptionInterface> local_sdp;
// Sdp, that should be set as remote description on the peer opposite to the
// one, who created it.
std::unique_ptr<SessionDescriptionInterface> remote_sdp;
};
class SignalingInterceptor {
public:
LocalAndRemoteSdp PatchOffer(
std::unique_ptr<SessionDescriptionInterface> offer);
LocalAndRemoteSdp PatchAnswer(
std::unique_ptr<SessionDescriptionInterface> offer);
std::vector<std::unique_ptr<IceCandidateInterface>> PatchOffererIceCandidates(
rtc::ArrayView<const IceCandidateInterface* const> candidates);
std::vector<std::unique_ptr<IceCandidateInterface>>
PatchAnswererIceCandidates(
rtc::ArrayView<const IceCandidateInterface* const> candidates);
private:
// Contains information about simulcast section, that is required to perform
// modified offer/answer and ice candidates exchange.
struct SimulcastSectionInfo {
SimulcastSectionInfo(const std::string& mid,
cricket::MediaProtocolType media_protocol_type,
const std::vector<cricket::RidDescription>& rids_desc);
const std::string mid;
const cricket::MediaProtocolType media_protocol_type;
std::vector<std::string> rids;
cricket::SimulcastDescription simulcast_description;
webrtc::RtpExtension mid_extension;
webrtc::RtpExtension rid_extension;
webrtc::RtpExtension rrid_extension;
cricket::TransportDescription transport_description;
};
struct SignalingContext {
SignalingContext() = default;
// SignalingContext is not copyable and movable.
SignalingContext(SignalingContext&) = delete;
SignalingContext& operator=(SignalingContext&) = delete;
SignalingContext(SignalingContext&&) = delete;
SignalingContext& operator=(SignalingContext&&) = delete;
void AddSimulcastInfo(const SimulcastSectionInfo& info);
bool HasSimulcast() const { return !simulcast_infos.empty(); }
std::vector<SimulcastSectionInfo> simulcast_infos;
std::map<std::string, SimulcastSectionInfo*> simulcast_infos_by_mid;
std::map<std::string, SimulcastSectionInfo*> simulcast_infos_by_rid;
std::vector<std::string> mids_order;
};
void FillContext(SessionDescriptionInterface* offer);
std::unique_ptr<cricket::SessionDescription> RestoreMediaSectionsOrder(
std::unique_ptr<cricket::SessionDescription> source);
SignalingContext context_;
};
} // namespace webrtc_pc_e2e
} // namespace webrtc

View file

@ -77,10 +77,14 @@ struct TestPeerComponents {
for (auto& video_config : params.video_configs) {
// Stream label should be set by fixture implementation here.
RTC_DCHECK(video_config.stream_label);
bool res = stream_required_spatial_index
.insert({*video_config.stream_label,
video_config.target_spatial_index})
.second;
bool res =
stream_required_spatial_index
.insert({*video_config.stream_label,
video_config.simulcast_config
? absl::optional<int>(video_config.simulcast_config
->target_spatial_index)
: absl::nullopt})
.second;
RTC_DCHECK(res) << "Duplicate video_config.stream_label="
<< *video_config.stream_label;
}
@ -314,16 +318,18 @@ std::unique_ptr<TestPeer> TestPeer::CreateTestPeer(
}
bool TestPeer::AddIceCandidates(
rtc::ArrayView<const IceCandidateInterface* const> candidates) {
std::vector<std::unique_ptr<IceCandidateInterface>> candidates) {
bool success = true;
for (const auto* candidate : candidates) {
if (!pc()->AddIceCandidate(candidate)) {
for (auto& candidate : candidates) {
if (!pc()->AddIceCandidate(candidate.get())) {
std::string candidate_str;
bool res = candidate->ToString(&candidate_str);
RTC_CHECK(res);
RTC_LOG(LS_ERROR) << "Failed to add ICE candidate, candidate_str="
<< candidate_str;
success = false;
} else {
remote_ice_candidates_.push_back(std::move(candidate));
}
}
return success;

View file

@ -13,9 +13,9 @@
#include <memory>
#include <string>
#include <vector>
#include "absl/memory/memory.h"
#include "api/array_view.h"
#include "api/test/peerconnection_quality_test_fixture.h"
#include "media/base/media_engine.h"
#include "modules/audio_device/include/test_audio_device.h"
@ -63,7 +63,7 @@ class TestPeer final : public PeerConnectionWrapper {
// Adds provided |candidates| to the owned peer connection.
bool AddIceCandidates(
rtc::ArrayView<const IceCandidateInterface* const> candidates);
std::vector<std::unique_ptr<IceCandidateInterface>> candidates);
private:
TestPeer(rtc::scoped_refptr<PeerConnectionFactoryInterface> pc_factory,
@ -74,6 +74,8 @@ class TestPeer final : public PeerConnectionWrapper {
std::unique_ptr<Params> params_;
rtc::scoped_refptr<AudioProcessing> audio_processing_;
std::vector<std::unique_ptr<IceCandidateInterface>> remote_ice_candidates_;
};
} // namespace webrtc_pc_e2e