Use independet frame IDs between simulcast streams when WebRTC-GenericDescriptorAuth is disabled.

Implemented behind `WebRTC-Video-SimulcastIndependentFrameIds`.

Bug: b/329063481
Change-Id: I683e567bb5b449f998be57ec3a11bb3b95e3ace4
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/343382
Commit-Queue: Philip Eliasson <philipel@webrtc.org>
Reviewed-by: Danil Chapovalov <danilchap@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#41927}
This commit is contained in:
philipel 2024-03-19 09:38:01 +01:00 committed by WebRTC LUCI CQ
parent 5f86437dbc
commit 626edea852
8 changed files with 234 additions and 69 deletions

View file

@ -27,6 +27,7 @@ struct RtpPayloadState {
int16_t picture_id = -1;
uint8_t tl0_pic_idx = 0;
int64_t shared_frame_id = 0;
int64_t frame_id = 0;
};
// Settings for LNTF (LossNotification). Still highly experimental.

View file

@ -178,7 +178,7 @@ RtpPayloadParams::RtpPayloadParams(const uint32_t ssrc,
simulate_generic_structure_(absl::StartsWith(
trials.Lookup("WebRTC-GenericCodecDependencyDescriptor"),
"Enabled")) {
for (auto& spatial_layer : last_shared_frame_id_)
for (auto& spatial_layer : last_frame_id_)
spatial_layer.fill(-1);
chain_last_frame_id_.fill(-1);
@ -188,6 +188,7 @@ RtpPayloadParams::RtpPayloadParams(const uint32_t ssrc,
state_.picture_id =
state ? state->picture_id : (random.Rand<int16_t>() & 0x7FFF);
state_.tl0_pic_idx = state ? state->tl0_pic_idx : (random.Rand<uint8_t>());
state_.frame_id = state ? state->frame_id : random.Rand<uint16_t>();
}
RtpPayloadParams::RtpPayloadParams(const RtpPayloadParams& other) = default;
@ -197,7 +198,14 @@ RtpPayloadParams::~RtpPayloadParams() {}
RTPVideoHeader RtpPayloadParams::GetRtpVideoHeader(
const EncodedImage& image,
const CodecSpecificInfo* codec_specific_info,
int64_t shared_frame_id) {
absl::optional<int64_t> shared_frame_id) {
int64_t frame_id;
if (shared_frame_id) {
frame_id = *shared_frame_id;
} else {
frame_id = state_.frame_id++;
}
RTPVideoHeader rtp_video_header;
if (codec_specific_info) {
PopulateRtpWithCodecSpecifics(*codec_specific_info, image.SpatialIndex(),
@ -224,8 +232,7 @@ RTPVideoHeader RtpPayloadParams::GetRtpVideoHeader(
SetCodecSpecific(&rtp_video_header, first_frame_in_picture);
SetGeneric(codec_specific_info, shared_frame_id, is_keyframe,
&rtp_video_header);
SetGeneric(codec_specific_info, frame_id, is_keyframe, &rtp_video_header);
return rtp_video_header;
}
@ -409,31 +416,31 @@ absl::optional<FrameDependencyStructure> RtpPayloadParams::GenericStructure(
RTC_DCHECK_NOTREACHED() << "Unsupported codec.";
}
void RtpPayloadParams::GenericToGeneric(int64_t shared_frame_id,
void RtpPayloadParams::GenericToGeneric(int64_t frame_id,
bool is_keyframe,
RTPVideoHeader* rtp_video_header) {
RTPVideoHeader::GenericDescriptorInfo& generic =
rtp_video_header->generic.emplace();
generic.frame_id = shared_frame_id;
generic.frame_id = frame_id;
generic.decode_target_indications.push_back(DecodeTargetIndication::kSwitch);
if (is_keyframe) {
generic.chain_diffs.push_back(0);
last_shared_frame_id_[0].fill(-1);
last_frame_id_[0].fill(-1);
} else {
int64_t frame_id = last_shared_frame_id_[0][0];
RTC_DCHECK_NE(frame_id, -1);
RTC_DCHECK_LT(frame_id, shared_frame_id);
generic.chain_diffs.push_back(shared_frame_id - frame_id);
generic.dependencies.push_back(frame_id);
int64_t last_frame_id = last_frame_id_[0][0];
RTC_DCHECK_NE(last_frame_id, -1);
RTC_DCHECK_LT(last_frame_id, frame_id);
generic.chain_diffs.push_back(frame_id - last_frame_id);
generic.dependencies.push_back(last_frame_id);
}
last_shared_frame_id_[0][0] = shared_frame_id;
last_frame_id_[0][0] = frame_id;
}
void RtpPayloadParams::H264ToGeneric(const CodecSpecificInfoH264& h264_info,
int64_t shared_frame_id,
int64_t frame_id,
bool is_keyframe,
RTPVideoHeader* rtp_video_header) {
const int temporal_index =
@ -448,45 +455,44 @@ void RtpPayloadParams::H264ToGeneric(const CodecSpecificInfoH264& h264_info,
RTPVideoHeader::GenericDescriptorInfo& generic =
rtp_video_header->generic.emplace();
generic.frame_id = shared_frame_id;
generic.frame_id = frame_id;
generic.temporal_index = temporal_index;
if (is_keyframe) {
RTC_DCHECK_EQ(temporal_index, 0);
last_shared_frame_id_[/*spatial index*/ 0].fill(-1);
last_shared_frame_id_[/*spatial index*/ 0][temporal_index] =
shared_frame_id;
last_frame_id_[/*spatial index*/ 0].fill(-1);
last_frame_id_[/*spatial index*/ 0][temporal_index] = frame_id;
return;
}
if (h264_info.base_layer_sync) {
int64_t tl0_frame_id = last_shared_frame_id_[/*spatial index*/ 0][0];
int64_t tl0_frame_id = last_frame_id_[/*spatial index*/ 0][0];
for (int i = 1; i < RtpGenericFrameDescriptor::kMaxTemporalLayers; ++i) {
if (last_shared_frame_id_[/*spatial index*/ 0][i] < tl0_frame_id) {
last_shared_frame_id_[/*spatial index*/ 0][i] = -1;
if (last_frame_id_[/*spatial index*/ 0][i] < tl0_frame_id) {
last_frame_id_[/*spatial index*/ 0][i] = -1;
}
}
RTC_DCHECK_GE(tl0_frame_id, 0);
RTC_DCHECK_LT(tl0_frame_id, shared_frame_id);
RTC_DCHECK_LT(tl0_frame_id, frame_id);
generic.dependencies.push_back(tl0_frame_id);
} else {
for (int i = 0; i <= temporal_index; ++i) {
int64_t frame_id = last_shared_frame_id_[/*spatial index*/ 0][i];
int64_t last_frame_id = last_frame_id_[/*spatial index*/ 0][i];
if (frame_id != -1) {
RTC_DCHECK_LT(frame_id, shared_frame_id);
generic.dependencies.push_back(frame_id);
if (last_frame_id != -1) {
RTC_DCHECK_LT(last_frame_id, frame_id);
generic.dependencies.push_back(last_frame_id);
}
}
}
last_shared_frame_id_[/*spatial_index*/ 0][temporal_index] = shared_frame_id;
last_frame_id_[/*spatial_index*/ 0][temporal_index] = frame_id;
}
void RtpPayloadParams::Vp8ToGeneric(const CodecSpecificInfoVP8& vp8_info,
int64_t shared_frame_id,
int64_t frame_id,
bool is_keyframe,
RTPVideoHeader* rtp_video_header) {
const auto& vp8_header =
@ -505,7 +511,7 @@ void RtpPayloadParams::Vp8ToGeneric(const CodecSpecificInfoVP8& vp8_info,
RTPVideoHeader::GenericDescriptorInfo& generic =
rtp_video_header->generic.emplace();
generic.frame_id = shared_frame_id;
generic.frame_id = frame_id;
generic.spatial_index = spatial_index;
generic.temporal_index = temporal_index;
@ -519,26 +525,26 @@ void RtpPayloadParams::Vp8ToGeneric(const CodecSpecificInfoVP8& vp8_info,
// Frame dependencies.
if (vp8_info.useExplicitDependencies) {
SetDependenciesVp8New(vp8_info, shared_frame_id, is_keyframe,
vp8_header.layerSync, &generic);
SetDependenciesVp8New(vp8_info, frame_id, is_keyframe, vp8_header.layerSync,
&generic);
} else {
SetDependenciesVp8Deprecated(vp8_info, shared_frame_id, is_keyframe,
spatial_index, temporal_index,
vp8_header.layerSync, &generic);
SetDependenciesVp8Deprecated(vp8_info, frame_id, is_keyframe, spatial_index,
temporal_index, vp8_header.layerSync,
&generic);
}
// Calculate chains.
generic.chain_diffs = {
(is_keyframe || chain_last_frame_id_[0] < 0)
? 0
: static_cast<int>(shared_frame_id - chain_last_frame_id_[0])};
: static_cast<int>(frame_id - chain_last_frame_id_[0])};
if (temporal_index == 0) {
chain_last_frame_id_[0] = shared_frame_id;
chain_last_frame_id_[0] = frame_id;
}
}
void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info,
int64_t shared_frame_id,
int64_t frame_id,
RTPVideoHeader& rtp_video_header) {
const auto& vp9_header =
absl::get<RTPVideoHeaderVP9>(rtp_video_header.video_type_header);
@ -574,7 +580,7 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info,
RTPVideoHeader::GenericDescriptorInfo& result =
rtp_video_header.generic.emplace();
result.frame_id = shared_frame_id;
result.frame_id = frame_id;
result.spatial_index = spatial_index;
result.temporal_index = temporal_index;
@ -630,7 +636,7 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info,
}
}
last_vp9_frame_id_[vp9_header.picture_id % kPictureDiffLimit]
[spatial_index] = shared_frame_id;
[spatial_index] = frame_id;
} else {
// Implementing general conversion logic for non-flexible mode requires some
// work and we will almost certainly never need it, so for now support only
@ -646,7 +652,7 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info,
// last frame id.
result.dependencies.push_back(last_vp9_frame_id_[0][0]);
}
last_vp9_frame_id_[0][0] = shared_frame_id;
last_vp9_frame_id_[0][0] = frame_id;
}
result.active_decode_targets =
@ -668,11 +674,11 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info,
result.chain_diffs[sid] = 0;
continue;
}
int64_t chain_diff = shared_frame_id - chain_last_frame_id_[sid];
int64_t chain_diff = frame_id - chain_last_frame_id_[sid];
if (chain_diff >= 256) {
RTC_LOG(LS_ERROR)
<< "Too many frames since last VP9 T0 frame for spatial layer #"
<< sid << " at frame#" << shared_frame_id;
<< sid << " at frame#" << frame_id;
chain_last_frame_id_[sid] = -1;
chain_diff = 0;
}
@ -680,10 +686,10 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info,
}
if (temporal_index == 0) {
chain_last_frame_id_[spatial_index] = shared_frame_id;
chain_last_frame_id_[spatial_index] = frame_id;
if (!vp9_header.non_ref_for_inter_layer_pred) {
for (int sid = spatial_index + 1; sid <= last_active_spatial_id; ++sid) {
chain_last_frame_id_[sid] = shared_frame_id;
chain_last_frame_id_[sid] = frame_id;
}
}
}
@ -691,7 +697,7 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info,
void RtpPayloadParams::SetDependenciesVp8Deprecated(
const CodecSpecificInfoVP8& vp8_info,
int64_t shared_frame_id,
int64_t frame_id,
bool is_keyframe,
int spatial_index,
int temporal_index,
@ -703,40 +709,40 @@ void RtpPayloadParams::SetDependenciesVp8Deprecated(
if (is_keyframe) {
RTC_DCHECK_EQ(temporal_index, 0);
last_shared_frame_id_[spatial_index].fill(-1);
last_shared_frame_id_[spatial_index][temporal_index] = shared_frame_id;
last_frame_id_[spatial_index].fill(-1);
last_frame_id_[spatial_index][temporal_index] = frame_id;
return;
}
if (layer_sync) {
int64_t tl0_frame_id = last_shared_frame_id_[spatial_index][0];
int64_t tl0_frame_id = last_frame_id_[spatial_index][0];
for (int i = 1; i < RtpGenericFrameDescriptor::kMaxTemporalLayers; ++i) {
if (last_shared_frame_id_[spatial_index][i] < tl0_frame_id) {
last_shared_frame_id_[spatial_index][i] = -1;
if (last_frame_id_[spatial_index][i] < tl0_frame_id) {
last_frame_id_[spatial_index][i] = -1;
}
}
RTC_DCHECK_GE(tl0_frame_id, 0);
RTC_DCHECK_LT(tl0_frame_id, shared_frame_id);
RTC_DCHECK_LT(tl0_frame_id, frame_id);
generic->dependencies.push_back(tl0_frame_id);
} else {
for (int i = 0; i <= temporal_index; ++i) {
int64_t frame_id = last_shared_frame_id_[spatial_index][i];
int64_t last_frame_id = last_frame_id_[spatial_index][i];
if (frame_id != -1) {
RTC_DCHECK_LT(frame_id, shared_frame_id);
generic->dependencies.push_back(frame_id);
if (last_frame_id != -1) {
RTC_DCHECK_LT(last_frame_id, frame_id);
generic->dependencies.push_back(last_frame_id);
}
}
}
last_shared_frame_id_[spatial_index][temporal_index] = shared_frame_id;
last_frame_id_[spatial_index][temporal_index] = frame_id;
}
void RtpPayloadParams::SetDependenciesVp8New(
const CodecSpecificInfoVP8& vp8_info,
int64_t shared_frame_id,
int64_t frame_id,
bool is_keyframe,
bool layer_sync,
RTPVideoHeader::GenericDescriptorInfo* generic) {
@ -746,7 +752,7 @@ void RtpPayloadParams::SetDependenciesVp8New(
if (is_keyframe) {
RTC_DCHECK_EQ(vp8_info.referencedBuffersCount, 0u);
buffer_id_to_frame_id_.fill(shared_frame_id);
buffer_id_to_frame_id_.fill(frame_id);
return;
}
@ -764,7 +770,7 @@ void RtpPayloadParams::SetDependenciesVp8New(
const int64_t dependency_frame_id =
buffer_id_to_frame_id_[referenced_buffer];
RTC_DCHECK_GE(dependency_frame_id, 0);
RTC_DCHECK_LT(dependency_frame_id, shared_frame_id);
RTC_DCHECK_LT(dependency_frame_id, frame_id);
const bool is_new_dependency =
std::find(generic->dependencies.begin(), generic->dependencies.end(),
@ -777,7 +783,7 @@ void RtpPayloadParams::SetDependenciesVp8New(
RTC_DCHECK_LE(vp8_info.updatedBuffersCount, kBuffersCountVp8);
for (size_t i = 0; i < vp8_info.updatedBuffersCount; ++i) {
const size_t updated_id = vp8_info.updatedBuffers[i];
buffer_id_to_frame_id_[updated_id] = shared_frame_id;
buffer_id_to_frame_id_[updated_id] = frame_id;
}
RTC_DCHECK_LE(buffer_id_to_frame_id_.size(), kBuffersCountVp8);

View file

@ -38,7 +38,7 @@ class RtpPayloadParams final {
RTPVideoHeader GetRtpVideoHeader(const EncodedImage& image,
const CodecSpecificInfo* codec_specific_info,
int64_t shared_frame_id);
absl::optional<int64_t> shared_frame_id);
// Returns structure that aligns with simulated generic info generated by
// `GetRtpVideoHeader` for the `codec_specific_info`
@ -61,20 +61,20 @@ class RtpPayloadParams final {
RTPVideoHeader* rtp_video_header);
void Vp8ToGeneric(const CodecSpecificInfoVP8& vp8_info,
int64_t shared_frame_id,
int64_t frame_id,
bool is_keyframe,
RTPVideoHeader* rtp_video_header);
void Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info,
int64_t shared_frame_id,
int64_t frame_id,
RTPVideoHeader& rtp_video_header);
void H264ToGeneric(const CodecSpecificInfoH264& h264_info,
int64_t shared_frame_id,
int64_t frame_id,
bool is_keyframe,
RTPVideoHeader* rtp_video_header);
void GenericToGeneric(int64_t shared_frame_id,
void GenericToGeneric(int64_t frame_id,
bool is_keyframe,
RTPVideoHeader* rtp_video_header);
@ -83,14 +83,14 @@ class RtpPayloadParams final {
// wrappers have been updated.
void SetDependenciesVp8Deprecated(
const CodecSpecificInfoVP8& vp8_info,
int64_t shared_frame_id,
int64_t frame_id,
bool is_keyframe,
int spatial_index,
int temporal_index,
bool layer_sync,
RTPVideoHeader::GenericDescriptorInfo* generic);
void SetDependenciesVp8New(const CodecSpecificInfoVP8& vp8_info,
int64_t shared_frame_id,
int64_t frame_id,
bool is_keyframe,
bool layer_sync,
RTPVideoHeader::GenericDescriptorInfo* generic);
@ -101,7 +101,7 @@ class RtpPayloadParams final {
// Holds the last shared frame id for a given (spatial, temporal) layer.
std::array<std::array<int64_t, RtpGenericFrameDescriptor::kMaxTemporalLayers>,
RtpGenericFrameDescriptor::kMaxSpatialLayers>
last_shared_frame_id_;
last_frame_id_;
// circular buffer of frame ids for the last 128 vp9 pictures.
// ids for the `picture_id` are stored at the index `picture_id % 128`.
std::vector<std::array<int64_t, RtpGenericFrameDescriptor::kMaxSpatialLayers>>

View file

@ -360,6 +360,30 @@ TEST(RtpPayloadParamsTest, Tl0PicIdxUpdatedForVp9) {
EXPECT_EQ(kInitialTl0PicIdx1 + 1, params.state().tl0_pic_idx);
}
TEST(RtpPayloadParamsTest, GenerateFrameIdWhenExternalFrameIdsAreNotProvided) {
RtpPayloadState state;
state.frame_id = 123;
EncodedImage encoded_image;
encoded_image._frameType = VideoFrameType::kVideoFrameKey;
CodecSpecificInfo codec_info;
codec_info.codecType = kVideoCodecGeneric;
RtpPayloadParams params(kSsrc1, &state, FieldTrialBasedConfig());
RTPVideoHeader header =
params.GetRtpVideoHeader(encoded_image, &codec_info, absl::nullopt);
EXPECT_THAT(header.codec, Eq(kVideoCodecGeneric));
ASSERT_TRUE(header.generic);
EXPECT_THAT(header.generic->frame_id, Eq(123));
encoded_image._frameType = VideoFrameType::kVideoFrameDelta;
header = params.GetRtpVideoHeader(encoded_image, &codec_info, absl::nullopt);
ASSERT_TRUE(header.generic);
EXPECT_THAT(header.generic->frame_id, Eq(124));
}
TEST(RtpPayloadParamsTest, PictureIdForOldGenericFormat) {
test::ScopedKeyValueConfig field_trials("WebRTC-GenericPictureId/Enabled/");
RtpPayloadState state{};

View file

@ -402,6 +402,10 @@ RtpVideoSender::RtpVideoSender(
rtp_config_(rtp_config),
codec_type_(GetVideoCodecType(rtp_config)),
transport_(transport),
independent_frame_ids_(
field_trials_.IsEnabled(
"WebRTC-Video-SimulcastIndependentFrameIds") &&
field_trials_.IsDisabled("WebRTC-GenericDescriptorAuth")),
transport_overhead_bytes_per_packet_(0),
encoder_target_rate_bps_(0),
frame_counts_(rtp_config.ssrcs.size()),
@ -574,11 +578,16 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage(
}
}
absl::optional<int64_t> frame_id;
if (!independent_frame_ids_) {
frame_id = shared_frame_id_;
}
bool send_result =
rtp_streams_[simulcast_index].sender_video->SendEncodedImage(
rtp_config_.payload_type, codec_type_, rtp_timestamp, encoded_image,
params_[simulcast_index].GetRtpVideoHeader(
encoded_image, codec_specific_info, shared_frame_id_),
encoded_image, codec_specific_info, frame_id),
expected_retransmission_time);
if (frame_count_observer_) {
FrameCounts& counts = frame_counts_[simulcast_index];

View file

@ -196,6 +196,7 @@ class RtpVideoSender : public RtpVideoSenderInterface,
// rewrite the frame id), therefore `shared_frame_id` has to live in a place
// where we are aware of all the different streams.
int64_t shared_frame_id_ = 0;
const bool independent_frame_ids_;
std::vector<RtpPayloadParams> params_ RTC_GUARDED_BY(mutex_);
size_t transport_overhead_bytes_per_packet_ RTC_GUARDED_BY(mutex_);

View file

@ -28,6 +28,7 @@
#include "modules/video_coding/fec_controller_default.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "rtc_base/rate_limiter.h"
#include "test/explicit_key_value_config.h"
#include "test/gmock.h"
#include "test/gtest.h"
#include "test/mock_transport.h"
@ -736,6 +737,126 @@ TEST(RtpVideoSenderTest, SupportsDependencyDescriptor) {
sent_packets.back().HasExtension<RtpDependencyDescriptorExtension>());
}
TEST(RtpVideoSenderTest, SimulcastIndependentFrameIds) {
test::ExplicitKeyValueConfig field_trials(
"WebRTC-Video-SimulcastIndependentFrameIds/Enabled/"
"WebRTC-GenericDescriptorAuth/Disabled/");
const std::map<uint32_t, RtpPayloadState> kPayloadStates = {
{kSsrc1, {.frame_id = 100}}, {kSsrc2, {.frame_id = 200}}};
RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {}, kPayloadType,
kPayloadStates, &field_trials);
test.SetSending(true);
RtpHeaderExtensionMap extensions;
extensions.Register<RtpDependencyDescriptorExtension>(
kDependencyDescriptorExtensionId);
std::vector<RtpPacket> sent_packets;
ON_CALL(test.transport(), SendRtp)
.WillByDefault([&](rtc::ArrayView<const uint8_t> packet,
const PacketOptions& options) {
sent_packets.emplace_back(&extensions);
EXPECT_TRUE(sent_packets.back().Parse(packet));
return true;
});
const uint8_t kPayload[1] = {'a'};
EncodedImage encoded_image;
encoded_image.SetEncodedData(
EncodedImageBuffer::Create(kPayload, sizeof(kPayload)));
CodecSpecificInfo codec_specific;
codec_specific.codecType = VideoCodecType::kVideoCodecGeneric;
codec_specific.template_structure.emplace();
codec_specific.template_structure->num_decode_targets = 1;
codec_specific.template_structure->templates = {
FrameDependencyTemplate().T(0).Dtis("S"),
FrameDependencyTemplate().T(0).Dtis("S").FrameDiffs({1}),
};
codec_specific.generic_frame_info =
GenericFrameInfo::Builder().T(0).Dtis("S").Build();
encoded_image._frameType = VideoFrameType::kVideoFrameKey;
codec_specific.generic_frame_info->encoder_buffers = {{0, false, true}};
encoded_image.SetSimulcastIndex(0);
EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error,
EncodedImageCallback::Result::OK);
encoded_image.SetSimulcastIndex(1);
EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error,
EncodedImageCallback::Result::OK);
test.AdvanceTime(TimeDelta::Millis(33));
ASSERT_THAT(sent_packets, SizeIs(2));
DependencyDescriptorMandatory dd_s0;
DependencyDescriptorMandatory dd_s1;
ASSERT_TRUE(
sent_packets[0].GetExtension<RtpDependencyDescriptorExtension>(&dd_s0));
ASSERT_TRUE(
sent_packets[1].GetExtension<RtpDependencyDescriptorExtension>(&dd_s1));
EXPECT_EQ(dd_s0.frame_number(), 100);
EXPECT_EQ(dd_s1.frame_number(), 200);
}
TEST(RtpVideoSenderTest,
SimulcastNoIndependentFrameIdsIfGenericDescriptorAuthIsEnabled) {
test::ExplicitKeyValueConfig field_trials(
"WebRTC-Video-SimulcastIndependentFrameIds/Enabled/"
"WebRTC-GenericDescriptorAuth/Enabled/");
const std::map<uint32_t, RtpPayloadState> kPayloadStates = {
{kSsrc1, {.shared_frame_id = 1000, .frame_id = 100}},
{kSsrc2, {.shared_frame_id = 1000, .frame_id = 200}}};
RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {}, kPayloadType,
kPayloadStates, &field_trials);
test.SetSending(true);
RtpHeaderExtensionMap extensions;
extensions.Register<RtpDependencyDescriptorExtension>(
kDependencyDescriptorExtensionId);
std::vector<RtpPacket> sent_packets;
ON_CALL(test.transport(), SendRtp)
.WillByDefault([&](rtc::ArrayView<const uint8_t> packet,
const PacketOptions& options) {
sent_packets.emplace_back(&extensions);
EXPECT_TRUE(sent_packets.back().Parse(packet));
return true;
});
const uint8_t kPayload[1] = {'a'};
EncodedImage encoded_image;
encoded_image.SetEncodedData(
EncodedImageBuffer::Create(kPayload, sizeof(kPayload)));
CodecSpecificInfo codec_specific;
codec_specific.codecType = VideoCodecType::kVideoCodecGeneric;
codec_specific.template_structure.emplace();
codec_specific.template_structure->num_decode_targets = 1;
codec_specific.template_structure->templates = {
FrameDependencyTemplate().T(0).Dtis("S"),
FrameDependencyTemplate().T(0).Dtis("S").FrameDiffs({1}),
};
codec_specific.generic_frame_info =
GenericFrameInfo::Builder().T(0).Dtis("S").Build();
encoded_image._frameType = VideoFrameType::kVideoFrameKey;
codec_specific.generic_frame_info->encoder_buffers = {{0, false, true}};
encoded_image.SetSimulcastIndex(0);
EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error,
EncodedImageCallback::Result::OK);
encoded_image.SetSimulcastIndex(1);
EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error,
EncodedImageCallback::Result::OK);
test.AdvanceTime(TimeDelta::Millis(33));
ASSERT_THAT(sent_packets, SizeIs(2));
DependencyDescriptorMandatory dd_s0;
DependencyDescriptorMandatory dd_s1;
ASSERT_TRUE(
sent_packets[0].GetExtension<RtpDependencyDescriptorExtension>(&dd_s0));
ASSERT_TRUE(
sent_packets[1].GetExtension<RtpDependencyDescriptorExtension>(&dd_s1));
EXPECT_EQ(dd_s0.frame_number(), 1001);
EXPECT_EQ(dd_s1.frame_number(), 1002);
}
TEST(RtpVideoSenderTest,
SupportsDependencyDescriptorForVp8NotProvidedByEncoder) {
constexpr uint8_t kPayload[1] = {'a'};

View file

@ -152,6 +152,9 @@ ACTIVE_FIELD_TRIALS: FrozenSet[FieldTrial] = frozenset([
FieldTrial('WebRTC-Video-RequestedResolutionOverrideOutputFormatRequest',
'webrtc:14451',
date(2024, 4, 1)),
FieldTrial('WebRTC-Video-SimulcastIndependentFrameIds',
'webrtc:15875',
date(2024, 12, 1)),
FieldTrial('WebRTC-VideoEncoderSettings',
'chromium:1406331',
date(2024, 4, 1)),