Remove extra usage of video-content-type header extension

This extension is documented to carry one bit: Screenshare.
It's been used for carrying simulcast layers and experiment IDs.
This CL removes that usage.

Bug: webrtc:15383
Change-Id: I048b283cde59bf1f607d8abdd53ced07a7add6f8
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/312420
Commit-Queue: Harald Alvestrand <hta@webrtc.org>
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#40457}
This commit is contained in:
Harald Alvestrand 2023-07-21 07:23:01 +00:00 committed by WebRTC LUCI CQ
parent f58c818148
commit 00f11224fd
9 changed files with 56 additions and 289 deletions

View file

@ -28,6 +28,7 @@ rtc_library("video_rtp_headers") {
deps = [
"..:array_view",
"../../rtc_base:checks",
"../../rtc_base:logging",
"../../rtc_base:safe_conversions",
"../../rtc_base:stringutils",

View file

@ -10,21 +10,7 @@
#include "api/video/video_content_type.h"
// VideoContentType stored as a single byte, which is sent over the network.
// Structure:
//
// 0 1 2 3 4 5 6 7
// +---------------+
// |r r e e e s s c|
//
// where:
// r - reserved bits.
// e - 3-bit number of an experiment group counted from 1. 0 means there's no
// experiment ongoing.
// s - 2-bit simulcast stream id or spatial layer, counted from 1. 0 means that
// no simulcast information is set.
// c - content type. 0 means real-time video, 1 means screenshare.
//
#include "rtc_base/checks.h"
namespace webrtc {
namespace videocontenttypehelpers {
@ -33,57 +19,21 @@ namespace {
static constexpr uint8_t kScreenshareBitsSize = 1;
static constexpr uint8_t kScreenshareBitsMask =
(1u << kScreenshareBitsSize) - 1;
static constexpr uint8_t kSimulcastShift = 1;
static constexpr uint8_t kSimulcastBitsSize = 2;
static constexpr uint8_t kSimulcastBitsMask = ((1u << kSimulcastBitsSize) - 1)
<< kSimulcastShift; // 0b00000110
static constexpr uint8_t kExperimentShift = 3;
static constexpr uint8_t kExperimentBitsSize = 3;
static constexpr uint8_t kExperimentBitsMask =
((1u << kExperimentBitsSize) - 1) << kExperimentShift; // 0b00111000
static constexpr uint8_t kTotalBitsSize =
kScreenshareBitsSize + kSimulcastBitsSize + kExperimentBitsSize;
} // namespace
bool SetExperimentId(VideoContentType* content_type, uint8_t experiment_id) {
// Store in bits 2-4.
if (experiment_id >= (1 << kExperimentBitsSize))
return false;
*content_type = static_cast<VideoContentType>(
(static_cast<uint8_t>(*content_type) & ~kExperimentBitsMask) |
((experiment_id << kExperimentShift) & kExperimentBitsMask));
return true;
}
bool SetSimulcastId(VideoContentType* content_type, uint8_t simulcast_id) {
// Store in bits 5-6.
if (simulcast_id >= (1 << kSimulcastBitsSize))
return false;
*content_type = static_cast<VideoContentType>(
(static_cast<uint8_t>(*content_type) & ~kSimulcastBitsMask) |
((simulcast_id << kSimulcastShift) & kSimulcastBitsMask));
return true;
}
uint8_t GetExperimentId(const VideoContentType& content_type) {
return (static_cast<uint8_t>(content_type) & kExperimentBitsMask) >>
kExperimentShift;
}
uint8_t GetSimulcastId(const VideoContentType& content_type) {
return (static_cast<uint8_t>(content_type) & kSimulcastBitsMask) >>
kSimulcastShift;
}
bool IsScreenshare(const VideoContentType& content_type) {
// Ensure no bits apart from the screenshare bit is set.
// This CHECK is a temporary measure to detect code that introduces
// values according to old versions.
RTC_CHECK((static_cast<uint8_t>(content_type) & !kScreenshareBitsMask) == 0);
return (static_cast<uint8_t>(content_type) & kScreenshareBitsMask) > 0;
}
bool IsValidContentType(uint8_t value) {
// Any 6-bit value is allowed.
return value < (1 << kTotalBitsSize);
// Only the screenshare bit is allowed.
// However, due to previous usage of the next 5 bits, we allow
// the lower 6 bits to be set.
return value < (1 << 6);
}
const char* ToString(const VideoContentType& content_type) {

View file

@ -15,21 +15,15 @@
namespace webrtc {
// VideoContentType can take on more values than the two below, therefore care
// should be taken to avoid using the equality operator to check for screenshare
// usage. See https://bugs.chromium.org/p/webrtc/issues/detail?id=15381.
// VideoContentType stored as a single byte, which is sent over the network
// in the rtp-hdrext/video-content-type extension.
// Only the lowest bit is used, per the enum.
enum class VideoContentType : uint8_t {
UNSPECIFIED = 0,
SCREENSHARE = 1,
};
namespace videocontenttypehelpers {
bool SetExperimentId(VideoContentType* content_type, uint8_t experiment_id);
bool SetSimulcastId(VideoContentType* content_type, uint8_t simulcast_id);
uint8_t GetExperimentId(const VideoContentType& content_type);
uint8_t GetSimulcastId(const VideoContentType& content_type);
bool IsScreenshare(const VideoContentType& content_type);
bool IsValidContentType(uint8_t value);

View file

@ -462,7 +462,11 @@ bool VideoContentTypeExtension::Parse(rtc::ArrayView<const uint8_t> data,
VideoContentType* content_type) {
if (data.size() == 1 &&
videocontenttypehelpers::IsValidContentType(data[0])) {
*content_type = static_cast<VideoContentType>(data[0]);
// Only the lowest bit of ContentType has a defined meaning.
// Due to previous, now removed, usage of 5 more bits, values with
// those bits set are accepted as valid, but we mask them out before
// converting to a VideoContentType.
*content_type = static_cast<VideoContentType>(data[0] & 0x1);
return true;
}
return false;

View file

@ -2464,18 +2464,6 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Video) {
expected_video);
EXPECT_TRUE(report->Get(*expected_video.transport_id));
EXPECT_TRUE(report->Get(*expected_video.codec_id));
// Make sure content type is still reported as "screenshare" even when the
// VideoContentType enum is overloaded with additional information.
videocontenttypehelpers::SetSimulcastId(
&video_media_info.receivers[0].content_type, 2);
video_media_channels.first->SetStats(video_media_info);
video_media_channels.second->SetStats(video_media_info);
report = stats_->GetFreshStatsReport();
EXPECT_EQ(
report->Get(expected_video.id())->cast_to<RTCInboundRtpStreamStats>(),
expected_video);
}
TEST_F(RTCStatsCollectorTest, CollectRTCAudioPlayoutStats) {
@ -2726,18 +2714,6 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRtpStreamStats_Video) {
expected_video);
EXPECT_TRUE(report->Get(*expected_video.transport_id));
EXPECT_TRUE(report->Get(*expected_video.codec_id));
// Make sure content type is still reported as "screenshare" even when the
// VideoContentType enum is overloaded with additional information.
videocontenttypehelpers::SetSimulcastId(
&video_media_info.senders[0].content_type, 2);
video_media_channels.first->SetStats(video_media_info);
video_media_channels.second->SetStats(video_media_info);
report = stats_->GetFreshStatsReport();
EXPECT_EQ(
report->Get(expected_video.id())->cast_to<RTCOutboundRtpStreamStats>(),
expected_video);
}
TEST_F(RTCStatsCollectorTest, CollectRTCTransportStats) {

View file

@ -157,9 +157,6 @@ void HistogramTest::VerifyHistogramStats(bool use_rtx,
const std::string video_prefix =
screenshare ? "WebRTC.Video.Screenshare." : "WebRTC.Video.";
// The content type extension is disabled in non screenshare test,
// therefore no slicing on simulcast id should be present.
const std::string video_suffix = screenshare ? ".S0" : "";
// Verify that stats have been updated once.
EXPECT_METRIC_EQ(2, metrics::NumSamples("WebRTC.Call.LifetimeInSeconds"));
@ -248,17 +245,13 @@ void HistogramTest::VerifyHistogramStats(bool use_rtx,
EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.CurrentDelayInMs"));
EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.OnewayDelayInMs"));
EXPECT_METRIC_EQ(1, metrics::NumSamples(video_prefix + "EndToEndDelayInMs" +
video_suffix));
EXPECT_METRIC_EQ(1, metrics::NumSamples(video_prefix + "EndToEndDelayInMs"));
EXPECT_METRIC_EQ(1,
metrics::NumSamples(video_prefix + "EndToEndDelayMaxInMs" +
video_suffix));
EXPECT_METRIC_EQ(1, metrics::NumSamples(video_prefix + "InterframeDelayInMs" +
video_suffix));
metrics::NumSamples(video_prefix + "EndToEndDelayMaxInMs"));
EXPECT_METRIC_EQ(1,
metrics::NumSamples(video_prefix + "InterframeDelayMaxInMs" +
video_suffix));
metrics::NumSamples(video_prefix + "InterframeDelayInMs"));
EXPECT_METRIC_EQ(
1, metrics::NumSamples(video_prefix + "InterframeDelayMaxInMs"));
EXPECT_METRIC_EQ(
1, metrics::NumSamples("WebRTC.Video.RenderSqrtPixelsPerSecond"));

View file

@ -49,20 +49,6 @@ const char* UmaPrefixForContentType(VideoContentType content_type) {
return "WebRTC.Video";
}
std::string UmaSuffixForContentType(VideoContentType content_type) {
char ss_buf[1024];
rtc::SimpleStringBuilder ss(ss_buf);
int simulcast_id = videocontenttypehelpers::GetSimulcastId(content_type);
if (simulcast_id > 0) {
ss << ".S" << simulcast_id - 1;
}
int experiment_id = videocontenttypehelpers::GetExperimentId(content_type);
if (experiment_id > 0) {
ss << ".ExperimentGroup" << experiment_id - 1;
}
return ss.str();
}
// TODO(https://bugs.webrtc.org/11572): Workaround for an issue with some
// rtc::Thread instances and/or implementations that don't register as the
// current task queue.
@ -254,22 +240,8 @@ void ReceiveStatisticsProxy::UpdateHistograms(
for (const auto& it : content_specific_stats_) {
// Calculate simulcast specific metrics (".S0" ... ".S2" suffixes).
VideoContentType content_type = it.first;
if (videocontenttypehelpers::GetSimulcastId(content_type) > 0) {
// Aggregate on experiment id.
videocontenttypehelpers::SetExperimentId(&content_type, 0);
aggregated_stats[content_type].Add(it.second);
}
// Calculate experiment specific metrics (".ExperimentGroup[0-7]" suffixes).
content_type = it.first;
if (videocontenttypehelpers::GetExperimentId(content_type) > 0) {
// Aggregate on simulcast id.
videocontenttypehelpers::SetSimulcastId(&content_type, 0);
aggregated_stats[content_type].Add(it.second);
}
// Calculate aggregated metrics (no suffixes. Aggregated on everything).
content_type = it.first;
videocontenttypehelpers::SetSimulcastId(&content_type, 0);
videocontenttypehelpers::SetExperimentId(&content_type, 0);
aggregated_stats[content_type].Add(it.second);
}
@ -277,77 +249,66 @@ void ReceiveStatisticsProxy::UpdateHistograms(
// For the metric Foo we report the following slices:
// WebRTC.Video.Foo,
// WebRTC.Video.Screenshare.Foo,
// WebRTC.Video.Foo.S[0-3],
// WebRTC.Video.Foo.ExperimentGroup[0-7],
// WebRTC.Video.Screenshare.Foo.S[0-3],
// WebRTC.Video.Screenshare.Foo.ExperimentGroup[0-7].
auto content_type = it.first;
auto stats = it.second;
std::string uma_prefix = UmaPrefixForContentType(content_type);
std::string uma_suffix = UmaSuffixForContentType(content_type);
// Metrics can be sliced on either simulcast id or experiment id but not
// both.
RTC_DCHECK(videocontenttypehelpers::GetExperimentId(content_type) == 0 ||
videocontenttypehelpers::GetSimulcastId(content_type) == 0);
absl::optional<int> e2e_delay_ms =
stats.e2e_delay_counter.Avg(kMinRequiredSamples);
if (e2e_delay_ms) {
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
uma_prefix + ".EndToEndDelayInMs" + uma_suffix, *e2e_delay_ms);
log_stream << uma_prefix << ".EndToEndDelayInMs" << uma_suffix << " "
<< *e2e_delay_ms << '\n';
RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix + ".EndToEndDelayInMs",
*e2e_delay_ms);
log_stream << uma_prefix << ".EndToEndDelayInMs"
<< " " << *e2e_delay_ms << '\n';
}
absl::optional<int> e2e_delay_max_ms = stats.e2e_delay_counter.Max();
if (e2e_delay_max_ms && e2e_delay_ms) {
RTC_HISTOGRAM_COUNTS_SPARSE_100000(
uma_prefix + ".EndToEndDelayMaxInMs" + uma_suffix, *e2e_delay_max_ms);
log_stream << uma_prefix << ".EndToEndDelayMaxInMs" << uma_suffix << " "
<< *e2e_delay_max_ms << '\n';
RTC_HISTOGRAM_COUNTS_SPARSE_100000(uma_prefix + ".EndToEndDelayMaxInMs",
*e2e_delay_max_ms);
log_stream << uma_prefix << ".EndToEndDelayMaxInMs"
<< " " << *e2e_delay_max_ms << '\n';
}
absl::optional<int> interframe_delay_ms =
stats.interframe_delay_counter.Avg(kMinRequiredSamples);
if (interframe_delay_ms) {
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
uma_prefix + ".InterframeDelayInMs" + uma_suffix,
*interframe_delay_ms);
log_stream << uma_prefix << ".InterframeDelayInMs" << uma_suffix << " "
<< *interframe_delay_ms << '\n';
RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix + ".InterframeDelayInMs",
*interframe_delay_ms);
log_stream << uma_prefix << ".InterframeDelayInMs"
<< " " << *interframe_delay_ms << '\n';
}
absl::optional<int> interframe_delay_max_ms =
stats.interframe_delay_counter.Max();
if (interframe_delay_max_ms && interframe_delay_ms) {
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
uma_prefix + ".InterframeDelayMaxInMs" + uma_suffix,
*interframe_delay_max_ms);
log_stream << uma_prefix << ".InterframeDelayMaxInMs" << uma_suffix << " "
<< *interframe_delay_max_ms << '\n';
RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix + ".InterframeDelayMaxInMs",
*interframe_delay_max_ms);
log_stream << uma_prefix << ".InterframeDelayMaxInMs"
<< " " << *interframe_delay_max_ms << '\n';
}
absl::optional<uint32_t> interframe_delay_95p_ms =
stats.interframe_delay_percentiles.GetPercentile(0.95f);
if (interframe_delay_95p_ms && interframe_delay_ms != -1) {
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
uma_prefix + ".InterframeDelay95PercentileInMs" + uma_suffix,
uma_prefix + ".InterframeDelay95PercentileInMs",
*interframe_delay_95p_ms);
log_stream << uma_prefix << ".InterframeDelay95PercentileInMs"
<< uma_suffix << " " << *interframe_delay_95p_ms << '\n';
<< " " << *interframe_delay_95p_ms << '\n';
}
absl::optional<int> width = stats.received_width.Avg(kMinRequiredSamples);
if (width) {
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
uma_prefix + ".ReceivedWidthInPixels" + uma_suffix, *width);
log_stream << uma_prefix << ".ReceivedWidthInPixels" << uma_suffix << " "
<< *width << '\n';
RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix + ".ReceivedWidthInPixels",
*width);
log_stream << uma_prefix << ".ReceivedWidthInPixels"
<< " " << *width << '\n';
}
absl::optional<int> height = stats.received_height.Avg(kMinRequiredSamples);
if (height) {
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
uma_prefix + ".ReceivedHeightInPixels" + uma_suffix, *height);
log_stream << uma_prefix << ".ReceivedHeightInPixels" << uma_suffix << " "
<< *height << '\n';
RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix + ".ReceivedHeightInPixels",
*height);
log_stream << uma_prefix << ".ReceivedHeightInPixels"
<< " " << *height << '\n';
}
if (content_type != VideoContentType::UNSPECIFIED) {
@ -358,9 +319,8 @@ void ReceiveStatisticsProxy::UpdateHistograms(
int media_bitrate_kbps = static_cast<int>(stats.total_media_bytes * 8 /
flow_duration_sec / 1000);
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
uma_prefix + ".MediaBitrateReceivedInKbps" + uma_suffix,
media_bitrate_kbps);
log_stream << uma_prefix << ".MediaBitrateReceivedInKbps" << uma_suffix
uma_prefix + ".MediaBitrateReceivedInKbps", media_bitrate_kbps);
log_stream << uma_prefix << ".MediaBitrateReceivedInKbps"
<< " " << media_bitrate_kbps << '\n';
}
@ -371,18 +331,16 @@ void ReceiveStatisticsProxy::UpdateHistograms(
int key_frames_permille =
(num_key_frames * 1000 + num_total_frames / 2) / num_total_frames;
RTC_HISTOGRAM_COUNTS_SPARSE_1000(
uma_prefix + ".KeyFramesReceivedInPermille" + uma_suffix,
key_frames_permille);
log_stream << uma_prefix << ".KeyFramesReceivedInPermille" << uma_suffix
uma_prefix + ".KeyFramesReceivedInPermille", key_frames_permille);
log_stream << uma_prefix << ".KeyFramesReceivedInPermille"
<< " " << key_frames_permille << '\n';
}
absl::optional<int> qp = stats.qp_counter.Avg(kMinRequiredSamples);
if (qp) {
RTC_HISTOGRAM_COUNTS_SPARSE_200(
uma_prefix + ".Decoded.Vp8.Qp" + uma_suffix, *qp);
log_stream << uma_prefix << ".Decoded.Vp8.Qp" << uma_suffix << " "
<< *qp << '\n';
RTC_HISTOGRAM_COUNTS_SPARSE_200(uma_prefix + ".Decoded.Vp8.Qp", *qp);
log_stream << uma_prefix << ".Decoded.Vp8.Qp"
<< " " << *qp << '\n';
}
}
}

View file

@ -1708,98 +1708,5 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, DecodeTimeReported) {
1, metrics::NumEvents("WebRTC.Video.DecodeTimeInMs", kDecodeTime.ms()));
}
TEST_P(ReceiveStatisticsProxyTestWithContent,
StatsAreSlicedOnSimulcastAndExperiment) {
const uint8_t experiment_id = 1;
webrtc::VideoContentType content_type = content_type_;
videocontenttypehelpers::SetExperimentId(&content_type, experiment_id);
const TimeDelta kInterFrameDelay1 = TimeDelta::Millis(30);
const TimeDelta kInterFrameDelay2 = TimeDelta::Millis(50);
webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight);
videocontenttypehelpers::SetSimulcastId(&content_type, 1);
for (int i = 0; i <= kMinRequiredSamples; ++i) {
time_controller_.AdvanceTime(kInterFrameDelay1);
statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(),
content_type);
}
videocontenttypehelpers::SetSimulcastId(&content_type, 2);
for (int i = 0; i <= kMinRequiredSamples; ++i) {
time_controller_.AdvanceTime(kInterFrameDelay2);
statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(),
content_type);
}
FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr);
if (videocontenttypehelpers::IsScreenshare(content_type)) {
EXPECT_METRIC_EQ(
1, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayInMs"));
EXPECT_METRIC_EQ(1, metrics::NumSamples(
"WebRTC.Video.Screenshare.InterframeDelayMaxInMs"));
EXPECT_METRIC_EQ(1, metrics::NumSamples(
"WebRTC.Video.Screenshare.InterframeDelayInMs.S0"));
EXPECT_METRIC_EQ(1,
metrics::NumSamples(
"WebRTC.Video.Screenshare.InterframeDelayMaxInMs.S0"));
EXPECT_METRIC_EQ(1, metrics::NumSamples(
"WebRTC.Video.Screenshare.InterframeDelayInMs.S1"));
EXPECT_METRIC_EQ(1,
metrics::NumSamples(
"WebRTC.Video.Screenshare.InterframeDelayMaxInMs.S1"));
EXPECT_METRIC_EQ(
1, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayInMs"
".ExperimentGroup0"));
EXPECT_METRIC_EQ(
1, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayMaxInMs"
".ExperimentGroup0"));
EXPECT_METRIC_EQ(
kInterFrameDelay1.ms(),
metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs.S0"));
EXPECT_METRIC_EQ(
kInterFrameDelay2.ms(),
metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs.S1"));
EXPECT_METRIC_EQ(
((kInterFrameDelay1 + kInterFrameDelay2) / 2).ms(),
metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs"));
EXPECT_METRIC_EQ(
kInterFrameDelay2.ms(),
metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayMaxInMs"));
EXPECT_METRIC_EQ(
((kInterFrameDelay1 + kInterFrameDelay2) / 2).ms(),
metrics::MinSample(
"WebRTC.Video.Screenshare.InterframeDelayInMs.ExperimentGroup0"));
} else {
EXPECT_METRIC_EQ(1,
metrics::NumSamples("WebRTC.Video.InterframeDelayInMs"));
EXPECT_METRIC_EQ(
1, metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs"));
EXPECT_METRIC_EQ(
1, metrics::NumSamples("WebRTC.Video.InterframeDelayInMs.S0"));
EXPECT_METRIC_EQ(
1, metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs.S0"));
EXPECT_METRIC_EQ(
1, metrics::NumSamples("WebRTC.Video.InterframeDelayInMs.S1"));
EXPECT_METRIC_EQ(
1, metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs.S1"));
EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.InterframeDelayInMs"
".ExperimentGroup0"));
EXPECT_METRIC_EQ(1,
metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs"
".ExperimentGroup0"));
EXPECT_METRIC_EQ(kInterFrameDelay1.ms(),
metrics::MinSample("WebRTC.Video.InterframeDelayInMs.S0"));
EXPECT_METRIC_EQ(kInterFrameDelay2.ms(),
metrics::MinSample("WebRTC.Video.InterframeDelayInMs.S1"));
EXPECT_METRIC_EQ((kInterFrameDelay1 + kInterFrameDelay2).ms() / 2,
metrics::MinSample("WebRTC.Video.InterframeDelayInMs"));
EXPECT_METRIC_EQ(kInterFrameDelay2.ms(),
metrics::MinSample("WebRTC.Video.InterframeDelayMaxInMs"));
EXPECT_METRIC_EQ((kInterFrameDelay1 + kInterFrameDelay2).ms() / 2,
metrics::MinSample(
"WebRTC.Video.InterframeDelayInMs.ExperimentGroup0"));
}
}
} // namespace internal
} // namespace webrtc

View file

@ -2131,22 +2131,6 @@ EncodedImage VideoStreamEncoder::AugmentEncodedImage(
image_copy.SetAtTargetQuality(codec_type == kVideoCodecVP8 &&
image_copy.qp_ <= kVp8SteadyStateQpThreshold);
// Piggyback ALR experiment group id and simulcast id into the content type.
const uint8_t experiment_id =
experiment_groups_[videocontenttypehelpers::IsScreenshare(
image_copy.content_type_)];
// TODO(ilnik): This will force content type extension to be present even
// for realtime video. At the expense of miniscule overhead we will get
// sliced receive statistics.
RTC_CHECK(videocontenttypehelpers::SetExperimentId(&image_copy.content_type_,
experiment_id));
// We count simulcast streams from 1 on the wire. That's why we set simulcast
// id in content type to +1 of that is actual simulcast index. This is because
// value 0 on the wire is reserved for 'no simulcast stream specified'.
RTC_CHECK(videocontenttypehelpers::SetSimulcastId(
&image_copy.content_type_, static_cast<uint8_t>(stream_idx + 1)));
return image_copy;
}