mirror of
https://github.com/mollyim/webrtc.git
synced 2025-05-13 05:40:42 +01:00
Fix video version of RTCInboundRtpStreamStats.jitterBufferDelay to obey spec.
Prior to this CL, the video `jitterBufferDelay` stat was the accumulated current delay, which is a smoothened version of the target delay. This is not correct according to the spec [1]. Rather, the stat should be the accumulated time spent in the jitter buffer, for all emitted frames. This CL fixes this spec compliance problem. Expect changes to test metrics and product monitoring as this CL rolls out. [1]: https://www.w3.org/TR/webrtc-stats/#dom-rtcinboundrtpstreamstats-jitterbufferdelay Tested: 1. Go to https://jsfiddle.net/jib1/0L6duga2/show 2. Apply 2.0 seconds of video delay. 3. Notice that "Video jitter buffer delay" is slightly less than 1990ms. (2000ms playoutdelayhint - 10ms render delay - Xms decode delay). Bug: webrtc:15085 Change-Id: I42805faafd7dd3bcdcf3ad08e751e08d6de38906 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/304521 Reviewed-by: Åsa Persson <asapersson@webrtc.org> Commit-Queue: Rasmus Brandt <brandtr@webrtc.org> Reviewed-by: Henrik Boström <hbos@webrtc.org> Cr-Commit-Position: refs/heads/main@{#40138}
This commit is contained in:
parent
8ac66a2ba8
commit
621cb2943d
14 changed files with 127 additions and 48 deletions
|
@ -56,11 +56,18 @@ std::string VideoReceiveStreamInterface::Stats::ToString(
|
|||
ss << "VideoReceiveStreamInterface stats: " << time_ms << ", {ssrc: " << ssrc
|
||||
<< ", ";
|
||||
ss << "total_bps: " << total_bitrate_bps << ", ";
|
||||
ss << "width: " << width << ", ";
|
||||
ss << "height: " << height << ", ";
|
||||
// Spec-compliant stats are camelCased to distinguish them from
|
||||
// the legacy and internal stats.
|
||||
ss << "frameWidth: " << width << ", ";
|
||||
ss << "frameHeight: " << height << ", ";
|
||||
// TODO(crbug.com/webrtc/15166): `key` and `delta` will not
|
||||
// perfectly match the other frame counters.
|
||||
ss << "key: " << frame_counts.key_frames << ", ";
|
||||
ss << "delta: " << frame_counts.delta_frames << ", ";
|
||||
ss << "frames_dropped: " << frames_dropped << ", ";
|
||||
ss << "framesAssembledFromMultiplePackets: "
|
||||
<< frames_assembled_from_multiple_packets << ", ";
|
||||
ss << "framesDecoded: " << frames_decoded << ", ";
|
||||
ss << "framesDropped: " << frames_dropped << ", ";
|
||||
ss << "network_fps: " << network_frame_rate << ", ";
|
||||
ss << "decode_fps: " << decode_frame_rate << ", ";
|
||||
ss << "render_fps: " << render_frame_rate << ", ";
|
||||
|
@ -68,17 +75,21 @@ std::string VideoReceiveStreamInterface::Stats::ToString(
|
|||
ss << "max_decode_ms: " << max_decode_ms << ", ";
|
||||
ss << "first_frame_received_to_decoded_ms: "
|
||||
<< first_frame_received_to_decoded_ms << ", ";
|
||||
ss << "cur_delay_ms: " << current_delay_ms << ", ";
|
||||
ss << "targ_delay_ms: " << target_delay_ms << ", ";
|
||||
ss << "jb_delay_ms: " << jitter_buffer_ms << ", ";
|
||||
ss << "jb_cumulative_delay_seconds: " << jitter_buffer_delay_seconds << ", ";
|
||||
ss << "jb_emitted_count: " << jitter_buffer_emitted_count << ", ";
|
||||
ss << "current_delay_ms: " << current_delay_ms << ", ";
|
||||
ss << "target_delay_ms: " << target_delay_ms << ", ";
|
||||
ss << "jitter_delay_ms: " << jitter_buffer_ms << ", ";
|
||||
ss << "totalAssemblyTime: " << total_assembly_time.seconds<double>() << ", ";
|
||||
ss << "jitterBufferDelay: " << jitter_buffer_delay.seconds<double>() << ", ";
|
||||
ss << "jitterBufferEmittedCount: " << jitter_buffer_emitted_count << ", ";
|
||||
ss << "totalDecodeTime: " << total_decode_time.seconds<double>() << ", ";
|
||||
ss << "totalProcessingDelay: " << total_processing_delay.seconds<double>()
|
||||
<< ", ";
|
||||
ss << "min_playout_delay_ms: " << min_playout_delay_ms << ", ";
|
||||
ss << "sync_offset_ms: " << sync_offset_ms << ", ";
|
||||
ss << "cum_loss: " << rtp_stats.packets_lost << ", ";
|
||||
ss << "nack: " << rtcp_packet_type_counts.nack_packets << ", ";
|
||||
ss << "fir: " << rtcp_packet_type_counts.fir_packets << ", ";
|
||||
ss << "pli: " << rtcp_packet_type_counts.pli_packets;
|
||||
ss << "nackCount: " << rtcp_packet_type_counts.nack_packets << ", ";
|
||||
ss << "firCount: " << rtcp_packet_type_counts.fir_packets << ", ";
|
||||
ss << "pliCount: " << rtcp_packet_type_counts.pli_packets;
|
||||
ss << '}';
|
||||
return ss.str();
|
||||
}
|
||||
|
|
|
@ -96,9 +96,9 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface {
|
|||
int current_delay_ms = 0;
|
||||
int target_delay_ms = 0;
|
||||
int jitter_buffer_ms = 0;
|
||||
// https://w3c.github.io/webrtc-stats/#dom-rtcvideoreceiverstats-jitterbufferdelay
|
||||
double jitter_buffer_delay_seconds = 0;
|
||||
// https://w3c.github.io/webrtc-stats/#dom-rtcvideoreceiverstats-jitterbufferemittedcount
|
||||
// https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-jitterbufferdelay
|
||||
TimeDelta jitter_buffer_delay = TimeDelta::Zero();
|
||||
// https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-jitterbufferemittedcount
|
||||
uint64_t jitter_buffer_emitted_count = 0;
|
||||
int min_playout_delay_ms = 0;
|
||||
int render_delay_ms = 10;
|
||||
|
|
|
@ -3392,7 +3392,8 @@ WebRtcVideoChannel::WebRtcVideoReceiveStream::GetVideoReceiverInfo(
|
|||
info.current_delay_ms = stats.current_delay_ms;
|
||||
info.target_delay_ms = stats.target_delay_ms;
|
||||
info.jitter_buffer_ms = stats.jitter_buffer_ms;
|
||||
info.jitter_buffer_delay_seconds = stats.jitter_buffer_delay_seconds;
|
||||
info.jitter_buffer_delay_seconds =
|
||||
stats.jitter_buffer_delay.seconds<double>();
|
||||
info.jitter_buffer_emitted_count = stats.jitter_buffer_emitted_count;
|
||||
info.min_playout_delay_ms = stats.min_playout_delay_ms;
|
||||
info.render_delay_ms = stats.render_delay_ms;
|
||||
|
|
|
@ -2088,6 +2088,8 @@ TEST_F(WebRtcVideoChannelBaseTest, GetStats) {
|
|||
EXPECT_GT(receive_info.receivers[0].framerate_received, 0);
|
||||
EXPECT_GT(receive_info.receivers[0].framerate_decoded, 0);
|
||||
EXPECT_GT(receive_info.receivers[0].framerate_output, 0);
|
||||
EXPECT_GT(receive_info.receivers[0].jitter_buffer_delay_seconds, 0.0);
|
||||
EXPECT_GT(receive_info.receivers[0].jitter_buffer_emitted_count, 0u);
|
||||
|
||||
EXPECT_EQ(1U, receive_info.receive_codecs.count(DefaultCodec().id));
|
||||
EXPECT_EQ(DefaultCodec().ToCodecParameters(),
|
||||
|
@ -6557,7 +6559,7 @@ TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesDecodeStatsCorrectly) {
|
|||
stats.current_delay_ms = 4;
|
||||
stats.target_delay_ms = 5;
|
||||
stats.jitter_buffer_ms = 6;
|
||||
stats.jitter_buffer_delay_seconds = 60;
|
||||
stats.jitter_buffer_delay = TimeDelta::Seconds(60);
|
||||
stats.jitter_buffer_emitted_count = 6;
|
||||
stats.min_playout_delay_ms = 7;
|
||||
stats.render_delay_ms = 8;
|
||||
|
@ -6590,7 +6592,7 @@ TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesDecodeStatsCorrectly) {
|
|||
EXPECT_EQ(stats.current_delay_ms, receive_info.receivers[0].current_delay_ms);
|
||||
EXPECT_EQ(stats.target_delay_ms, receive_info.receivers[0].target_delay_ms);
|
||||
EXPECT_EQ(stats.jitter_buffer_ms, receive_info.receivers[0].jitter_buffer_ms);
|
||||
EXPECT_EQ(stats.jitter_buffer_delay_seconds,
|
||||
EXPECT_EQ(stats.jitter_buffer_delay.seconds<double>(),
|
||||
receive_info.receivers[0].jitter_buffer_delay_seconds);
|
||||
EXPECT_EQ(stats.jitter_buffer_emitted_count,
|
||||
receive_info.receivers[0].jitter_buffer_emitted_count);
|
||||
|
|
|
@ -245,6 +245,7 @@ rtc_library("video_stream_buffer_controller") {
|
|||
"../api/task_queue",
|
||||
"../api/units:data_size",
|
||||
"../api/units:time_delta",
|
||||
"../api/units:timestamp",
|
||||
"../api/video:encoded_frame",
|
||||
"../api/video:frame_buffer",
|
||||
"../api/video:video_rtp_headers",
|
||||
|
|
|
@ -127,6 +127,11 @@ TEST_F(StatsEndToEndTest, GetStats) {
|
|||
stats.frame_counts.key_frames != 0 ||
|
||||
stats.frame_counts.delta_frames != 0;
|
||||
|
||||
receive_stats_filled_["JitterBufferDelay"] =
|
||||
stats.jitter_buffer_delay > TimeDelta::Zero();
|
||||
receive_stats_filled_["JitterBufferEmittedCount"] =
|
||||
stats.jitter_buffer_emitted_count != 0;
|
||||
|
||||
receive_stats_filled_["CName"] |= !stats.c_name.empty();
|
||||
|
||||
receive_stats_filled_["RtcpPacketTypeCount"] |=
|
||||
|
|
|
@ -106,7 +106,7 @@ Updated when the available bitrate changes, `VideoSendStreamImpl::OnBitrateUpdat
|
|||
|
||||
### ReceiveStatisticsProxy
|
||||
`VideoReceiveStream` owns a [ReceiveStatisticsProxy] which implements
|
||||
`VCMReceiveStatisticsCallback`,
|
||||
`VideoStreamBufferControllerStatsObserver`,
|
||||
`RtcpCnameCallback`,
|
||||
`RtcpPacketTypeCounterObserver`,
|
||||
`CallStatsObserver`
|
||||
|
@ -123,13 +123,13 @@ Updated when a complete frame is received, `FrameBuffer::InsertFrame`.
|
|||
* `network_frame_rate` - number of frames received during the last second.
|
||||
|
||||
Updated when a frame is ready for decoding, `FrameBuffer::GetNextFrame`. From `VCMTiming`:
|
||||
* `jitter_buffer_ms` - jitter buffer delay in ms.
|
||||
* `jitter_buffer_ms` - jitter delay in ms: this is the delay added to handle network jitter
|
||||
* `max_decode_ms` - the 95th percentile observed decode time within a time window (10 sec).
|
||||
* `render_delay_ms` - render delay in ms.
|
||||
* `min_playout_delay_ms` - minimum playout delay in ms.
|
||||
* `target_delay_ms` - target playout delay in ms. Max(`min_playout_delay_ms`, `jitter_delay_ms` + `max_decode_ms` + `render_delay_ms`).
|
||||
* `current_delay_ms` - actual playout delay in ms.
|
||||
* `jitter_buffer_delay_seconds` - total jitter buffer delay in seconds [[rtcinboundrtpstreamstats-jitterbufferdelay]].
|
||||
* `jitter_buffer_delay_seconds` - total jitter buffer delay in seconds: this is the time spent waiting in the jitter buffer [[rtcinboundrtpstreamstats-jitterbufferdelay]].
|
||||
* `jitter_buffer_emitted_count` - total number of frames that have come out from the jitter buffer [[rtcinboundrtpstreamstats-jitterbufferemittedcount]].
|
||||
|
||||
Updated (if changed) after a frame is passed to the decoder, `VCMGenericDecoder::Decode`.
|
||||
|
|
|
@ -223,7 +223,7 @@ void ReceiveStatisticsProxy::UpdateHistograms(
|
|||
log_stream << "WebRTC.Video.DecodeTimeInMs " << *decode_ms << '\n';
|
||||
}
|
||||
absl::optional<int> jb_delay_ms =
|
||||
jitter_buffer_delay_counter_.Avg(kMinRequiredSamples);
|
||||
jitter_delay_counter_.Avg(kMinRequiredSamples);
|
||||
if (jb_delay_ms) {
|
||||
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.JitterBufferDelayInMs",
|
||||
*jb_delay_ms);
|
||||
|
@ -506,10 +506,6 @@ VideoReceiveStreamInterface::Stats ReceiveStatisticsProxy::GetStats() const {
|
|||
|
||||
stats_.content_type = last_content_type_;
|
||||
stats_.timing_frame_info = timing_frame_info_counter_.Max(now_ms);
|
||||
stats_.jitter_buffer_delay_seconds =
|
||||
static_cast<double>(current_delay_counter_.Sum(1).value_or(0)) /
|
||||
rtc::kNumMillisecsPerSec;
|
||||
stats_.jitter_buffer_emitted_count = current_delay_counter_.NumSamples();
|
||||
stats_.estimated_playout_ntp_timestamp_ms =
|
||||
GetCurrentEstimatedPlayoutNtpTimestampMs(now_ms);
|
||||
return stats_;
|
||||
|
@ -536,21 +532,32 @@ void ReceiveStatisticsProxy::OnDecoderInfo(
|
|||
}));
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnDecodableFrame(TimeDelta jitter_buffer_delay) {
|
||||
RTC_DCHECK_RUN_ON(&main_thread_);
|
||||
// Cumulative stats exposed through standardized GetStats.
|
||||
// TODO(crbug.com/webrtc/14244): Implement targetDelay and minimumDelay here.
|
||||
stats_.jitter_buffer_delay += jitter_buffer_delay;
|
||||
++stats_.jitter_buffer_emitted_count;
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnFrameBufferTimingsUpdated(
|
||||
int estimated_max_decode_time_ms,
|
||||
int current_delay_ms,
|
||||
int target_delay_ms,
|
||||
int jitter_buffer_ms,
|
||||
int jitter_delay_ms,
|
||||
int min_playout_delay_ms,
|
||||
int render_delay_ms) {
|
||||
RTC_DCHECK_RUN_ON(&main_thread_);
|
||||
// Instantaneous stats exposed through legacy GetStats.
|
||||
stats_.max_decode_ms = estimated_max_decode_time_ms;
|
||||
stats_.current_delay_ms = current_delay_ms;
|
||||
stats_.target_delay_ms = target_delay_ms;
|
||||
stats_.jitter_buffer_ms = jitter_buffer_ms;
|
||||
stats_.jitter_buffer_ms = jitter_delay_ms;
|
||||
stats_.min_playout_delay_ms = min_playout_delay_ms;
|
||||
stats_.render_delay_ms = render_delay_ms;
|
||||
jitter_buffer_delay_counter_.Add(jitter_buffer_ms);
|
||||
|
||||
// UMA stats.
|
||||
jitter_delay_counter_.Add(jitter_delay_ms);
|
||||
target_delay_counter_.Add(target_delay_ms);
|
||||
current_delay_counter_.Add(current_delay_ms);
|
||||
// Estimated one-way delay: network delay (rtt/2) + target_delay_ms (jitter
|
||||
|
|
|
@ -89,13 +89,13 @@ class ReceiveStatisticsProxy : public VideoStreamBufferControllerStatsObserver,
|
|||
size_t size_bytes,
|
||||
VideoContentType content_type) override;
|
||||
void OnDroppedFrames(uint32_t frames_dropped) override;
|
||||
void OnDecodableFrame(TimeDelta jitter_buffer_delay) override;
|
||||
void OnFrameBufferTimingsUpdated(int estimated_max_decode_time_ms,
|
||||
int current_delay_ms,
|
||||
int target_delay_ms,
|
||||
int jitter_buffer_ms,
|
||||
int jitter_delay_ms,
|
||||
int min_playout_delay_ms,
|
||||
int render_delay_ms) override;
|
||||
|
||||
void OnTimingFrameInfoUpdated(const TimingFrameInfo& info) override;
|
||||
|
||||
// Implements RtcpCnameCallback.
|
||||
|
@ -161,7 +161,7 @@ class ReceiveStatisticsProxy : public VideoStreamBufferControllerStatsObserver,
|
|||
rtc::RateTracker render_pixel_tracker_ RTC_GUARDED_BY(main_thread_);
|
||||
rtc::SampleCounter sync_offset_counter_ RTC_GUARDED_BY(main_thread_);
|
||||
rtc::SampleCounter decode_time_counter_ RTC_GUARDED_BY(main_thread_);
|
||||
rtc::SampleCounter jitter_buffer_delay_counter_ RTC_GUARDED_BY(main_thread_);
|
||||
rtc::SampleCounter jitter_delay_counter_ RTC_GUARDED_BY(main_thread_);
|
||||
rtc::SampleCounter target_delay_counter_ RTC_GUARDED_BY(main_thread_);
|
||||
rtc::SampleCounter current_delay_counter_ RTC_GUARDED_BY(main_thread_);
|
||||
rtc::SampleCounter oneway_delay_counter_ RTC_GUARDED_BY(main_thread_);
|
||||
|
|
|
@ -561,21 +561,34 @@ TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsDecodeTimingStats) {
|
|||
const int kMaxDecodeMs = 2;
|
||||
const int kCurrentDelayMs = 3;
|
||||
const int kTargetDelayMs = 4;
|
||||
const int kJitterBufferMs = 5;
|
||||
const int kJitterDelayMs = 5;
|
||||
const int kMinPlayoutDelayMs = 6;
|
||||
const int kRenderDelayMs = 7;
|
||||
const int64_t kRttMs = 8;
|
||||
const int kJitterBufferDelayMs = 9;
|
||||
statistics_proxy_->OnRttUpdate(kRttMs);
|
||||
statistics_proxy_->OnFrameBufferTimingsUpdated(
|
||||
kMaxDecodeMs, kCurrentDelayMs, kTargetDelayMs, kJitterBufferMs,
|
||||
kMaxDecodeMs, kCurrentDelayMs, kTargetDelayMs, kJitterDelayMs,
|
||||
kMinPlayoutDelayMs, kRenderDelayMs);
|
||||
statistics_proxy_->OnDecodableFrame(TimeDelta::Millis(kJitterBufferDelayMs));
|
||||
VideoReceiveStreamInterface::Stats stats = FlushAndGetStats();
|
||||
EXPECT_EQ(kMaxDecodeMs, stats.max_decode_ms);
|
||||
EXPECT_EQ(kCurrentDelayMs, stats.current_delay_ms);
|
||||
EXPECT_EQ(kTargetDelayMs, stats.target_delay_ms);
|
||||
EXPECT_EQ(kJitterBufferMs, stats.jitter_buffer_ms);
|
||||
EXPECT_EQ(kJitterDelayMs, stats.jitter_buffer_ms);
|
||||
EXPECT_EQ(kMinPlayoutDelayMs, stats.min_playout_delay_ms);
|
||||
EXPECT_EQ(kRenderDelayMs, stats.render_delay_ms);
|
||||
EXPECT_EQ(kJitterBufferDelayMs, stats.jitter_buffer_delay.ms());
|
||||
EXPECT_EQ(1u, stats.jitter_buffer_emitted_count);
|
||||
}
|
||||
|
||||
TEST_F(ReceiveStatisticsProxyTest, CumulativeDecodeGetStatsAccumulate) {
|
||||
const int kJitterBufferDelayMs = 3;
|
||||
statistics_proxy_->OnDecodableFrame(TimeDelta::Millis(kJitterBufferDelayMs));
|
||||
statistics_proxy_->OnDecodableFrame(TimeDelta::Millis(kJitterBufferDelayMs));
|
||||
VideoReceiveStreamInterface::Stats stats = FlushAndGetStats();
|
||||
EXPECT_EQ(2 * kJitterBufferDelayMs, stats.jitter_buffer_delay.ms());
|
||||
EXPECT_EQ(2u, stats.jitter_buffer_emitted_count);
|
||||
}
|
||||
|
||||
TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsRtcpPacketTypeCounts) {
|
||||
|
@ -882,13 +895,13 @@ TEST_F(ReceiveStatisticsProxyTest, TimingHistogramsNotUpdatedForTooFewSamples) {
|
|||
const int kMaxDecodeMs = 2;
|
||||
const int kCurrentDelayMs = 3;
|
||||
const int kTargetDelayMs = 4;
|
||||
const int kJitterBufferMs = 5;
|
||||
const int kJitterDelayMs = 5;
|
||||
const int kMinPlayoutDelayMs = 6;
|
||||
const int kRenderDelayMs = 7;
|
||||
|
||||
for (int i = 0; i < kMinRequiredSamples - 1; ++i) {
|
||||
statistics_proxy_->OnFrameBufferTimingsUpdated(
|
||||
kMaxDecodeMs, kCurrentDelayMs, kTargetDelayMs, kJitterBufferMs,
|
||||
kMaxDecodeMs, kCurrentDelayMs, kTargetDelayMs, kJitterDelayMs,
|
||||
kMinPlayoutDelayMs, kRenderDelayMs);
|
||||
}
|
||||
|
||||
|
@ -906,13 +919,13 @@ TEST_F(ReceiveStatisticsProxyTest, TimingHistogramsAreUpdated) {
|
|||
const int kMaxDecodeMs = 2;
|
||||
const int kCurrentDelayMs = 3;
|
||||
const int kTargetDelayMs = 4;
|
||||
const int kJitterBufferMs = 5;
|
||||
const int kJitterDelayMs = 5;
|
||||
const int kMinPlayoutDelayMs = 6;
|
||||
const int kRenderDelayMs = 7;
|
||||
|
||||
for (int i = 0; i < kMinRequiredSamples; ++i) {
|
||||
statistics_proxy_->OnFrameBufferTimingsUpdated(
|
||||
kMaxDecodeMs, kCurrentDelayMs, kTargetDelayMs, kJitterBufferMs,
|
||||
kMaxDecodeMs, kCurrentDelayMs, kTargetDelayMs, kJitterDelayMs,
|
||||
kMinPlayoutDelayMs, kRenderDelayMs);
|
||||
}
|
||||
|
||||
|
@ -924,7 +937,7 @@ TEST_F(ReceiveStatisticsProxyTest, TimingHistogramsAreUpdated) {
|
|||
EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.OnewayDelayInMs"));
|
||||
|
||||
EXPECT_METRIC_EQ(1, metrics::NumEvents("WebRTC.Video.JitterBufferDelayInMs",
|
||||
kJitterBufferMs));
|
||||
kJitterDelayMs));
|
||||
EXPECT_METRIC_EQ(
|
||||
1, metrics::NumEvents("WebRTC.Video.TargetDelayInMs", kTargetDelayMs));
|
||||
EXPECT_METRIC_EQ(
|
||||
|
|
|
@ -764,6 +764,7 @@ void VideoReceiveStream2::OnEncodedFrame(std::unique_ptr<EncodedFrame> frame) {
|
|||
frame->FrameType() == VideoFrameType::kVideoFrameKey;
|
||||
|
||||
// Current OnPreDecode only cares about QP for VP8.
|
||||
// TODO(brandtr): Move to stats_proxy_.OnDecodableFrame in VSBC, or deprecate.
|
||||
int qp = -1;
|
||||
if (frame->CodecSpecific()->codecType == kVideoCodecVP8) {
|
||||
if (!vp8::GetQp(frame->data(), frame->size(), &qp)) {
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
#include "api/task_queue/task_queue_base.h"
|
||||
#include "api/units/data_size.h"
|
||||
#include "api/units/time_delta.h"
|
||||
#include "api/units/timestamp.h"
|
||||
#include "api/video/encoded_frame.h"
|
||||
#include "api/video/frame_buffer.h"
|
||||
#include "api/video/video_content_type.h"
|
||||
|
@ -67,6 +68,16 @@ struct FrameMetadata {
|
|||
const absl::optional<Timestamp> receive_time;
|
||||
};
|
||||
|
||||
Timestamp MinReceiveTime(const EncodedFrame& frame) {
|
||||
Timestamp first_recv_time = Timestamp::PlusInfinity();
|
||||
for (const auto& packet_info : frame.PacketInfos()) {
|
||||
if (packet_info.receive_time().IsFinite()) {
|
||||
first_recv_time = std::min(first_recv_time, packet_info.receive_time());
|
||||
}
|
||||
}
|
||||
return first_recv_time;
|
||||
}
|
||||
|
||||
Timestamp ReceiveTime(const EncodedFrame& frame) {
|
||||
absl::optional<Timestamp> ts = frame.ReceivedTimestamp();
|
||||
RTC_DCHECK(ts.has_value()) << "Received frame must have a timestamp set!";
|
||||
|
@ -202,7 +213,8 @@ void VideoStreamBufferController::OnFrameReady(
|
|||
bool superframe_delayed_by_retransmission = false;
|
||||
DataSize superframe_size = DataSize::Zero();
|
||||
const EncodedFrame& first_frame = *frames.front();
|
||||
Timestamp receive_time = ReceiveTime(first_frame);
|
||||
Timestamp min_receive_time = MinReceiveTime(first_frame);
|
||||
Timestamp max_receive_time = ReceiveTime(first_frame);
|
||||
|
||||
if (first_frame.is_keyframe())
|
||||
keyframe_required_ = false;
|
||||
|
@ -222,13 +234,14 @@ void VideoStreamBufferController::OnFrameReady(
|
|||
frame->SetRenderTime(render_time.ms());
|
||||
|
||||
superframe_delayed_by_retransmission |= frame->delayed_by_retransmission();
|
||||
receive_time = std::max(receive_time, ReceiveTime(*frame));
|
||||
min_receive_time = std::min(min_receive_time, MinReceiveTime(*frame));
|
||||
max_receive_time = std::max(max_receive_time, ReceiveTime(*frame));
|
||||
superframe_size += DataSize::Bytes(frame->size());
|
||||
}
|
||||
|
||||
if (!superframe_delayed_by_retransmission) {
|
||||
absl::optional<TimeDelta> inter_frame_delay_variation =
|
||||
ifdv_calculator_.Calculate(first_frame.Timestamp(), receive_time);
|
||||
ifdv_calculator_.Calculate(first_frame.Timestamp(), max_receive_time);
|
||||
if (inter_frame_delay_variation) {
|
||||
jitter_estimator_.UpdateEstimate(*inter_frame_delay_variation,
|
||||
superframe_size);
|
||||
|
@ -250,7 +263,7 @@ void VideoStreamBufferController::OnFrameReady(
|
|||
|
||||
// Update stats.
|
||||
UpdateDroppedFrames();
|
||||
UpdateJitterDelay();
|
||||
UpdateFrameBufferTimings(min_receive_time, now);
|
||||
UpdateTimingFrameInfo();
|
||||
|
||||
std::unique_ptr<EncodedFrame> frame =
|
||||
|
@ -315,7 +328,10 @@ void VideoStreamBufferController::UpdateDroppedFrames()
|
|||
buffer_->GetTotalNumberOfDroppedFrames();
|
||||
}
|
||||
|
||||
void VideoStreamBufferController::UpdateJitterDelay() {
|
||||
void VideoStreamBufferController::UpdateFrameBufferTimings(
|
||||
Timestamp min_receive_time,
|
||||
Timestamp now) {
|
||||
// Update instantaneous delays.
|
||||
auto timings = timing_->GetTimings();
|
||||
if (timings.num_decoded_frames) {
|
||||
stats_proxy_->OnFrameBufferTimingsUpdated(
|
||||
|
@ -323,6 +339,19 @@ void VideoStreamBufferController::UpdateJitterDelay() {
|
|||
timings.target_delay.ms(), timings.jitter_delay.ms(),
|
||||
timings.min_playout_delay.ms(), timings.render_delay.ms());
|
||||
}
|
||||
|
||||
// The spec mandates that `jitterBufferDelay` is the "time the first
|
||||
// packet is received by the jitter buffer (ingest timestamp) to the time it
|
||||
// exits the jitter buffer (emit timestamp)". Since the "jitter buffer"
|
||||
// is not a monolith in the webrtc.org implementation, we take the freedom to
|
||||
// define "ingest timestamp" as "first packet received by
|
||||
// RtpVideoStreamReceiver2" and "emit timestamp" as "decodable frame released
|
||||
// by VideoStreamBufferController".
|
||||
//
|
||||
// https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-jitterbufferdelay
|
||||
TimeDelta jitter_buffer_delay =
|
||||
std::max(TimeDelta::Zero(), now - min_receive_time);
|
||||
stats_proxy_->OnDecodableFrame(jitter_buffer_delay);
|
||||
}
|
||||
|
||||
void VideoStreamBufferController::UpdateTimingFrameInfo() {
|
||||
|
|
|
@ -45,10 +45,14 @@ class VideoStreamBufferControllerStatsObserver {
|
|||
|
||||
virtual void OnDroppedFrames(uint32_t frames_dropped) = 0;
|
||||
|
||||
// Actual delay experienced by a single frame.
|
||||
virtual void OnDecodableFrame(TimeDelta jitter_buffer_delay) = 0;
|
||||
|
||||
// Various jitter buffer delays determined by VCMTiming.
|
||||
virtual void OnFrameBufferTimingsUpdated(int estimated_max_decode_time_ms,
|
||||
int current_delay_ms,
|
||||
int target_delay_ms,
|
||||
int jitter_buffer_ms,
|
||||
int jitter_delay_ms,
|
||||
int min_playout_delay_ms,
|
||||
int render_delay_ms) = 0;
|
||||
|
||||
|
@ -86,7 +90,7 @@ class VideoStreamBufferController {
|
|||
void OnTimeout(TimeDelta delay);
|
||||
void FrameReadyForDecode(uint32_t rtp_timestamp, Timestamp render_time);
|
||||
void UpdateDroppedFrames() RTC_RUN_ON(&worker_sequence_checker_);
|
||||
void UpdateJitterDelay();
|
||||
void UpdateFrameBufferTimings(Timestamp min_receive_time, Timestamp now);
|
||||
void UpdateTimingFrameInfo();
|
||||
bool IsTooManyFramesQueued() const RTC_RUN_ON(&worker_sequence_checker_);
|
||||
void ForceKeyFrameReleaseImmediately() RTC_RUN_ON(&worker_sequence_checker_);
|
||||
|
|
|
@ -105,12 +105,16 @@ class VideoStreamBufferControllerStatsObserverMock
|
|||
VideoContentType content_type),
|
||||
(override));
|
||||
MOCK_METHOD(void, OnDroppedFrames, (uint32_t num_dropped), (override));
|
||||
MOCK_METHOD(void,
|
||||
OnDecodableFrame,
|
||||
(TimeDelta jitter_buffer_delay),
|
||||
(override));
|
||||
MOCK_METHOD(void,
|
||||
OnFrameBufferTimingsUpdated,
|
||||
(int max_decode_ms,
|
||||
(int estimated_max_decode_time_ms,
|
||||
int current_delay_ms,
|
||||
int target_delay_ms,
|
||||
int jitter_buffer_ms,
|
||||
int jitter_delay_ms,
|
||||
int min_playout_delay_ms,
|
||||
int render_delay_ms),
|
||||
(override));
|
||||
|
@ -621,6 +625,7 @@ TEST_P(VideoStreamBufferControllerTest, SameFrameNotScheduledTwice) {
|
|||
TEST_P(VideoStreamBufferControllerTest, TestStatsCallback) {
|
||||
EXPECT_CALL(stats_callback_,
|
||||
OnCompleteFrame(true, kFrameSize, VideoContentType::UNSPECIFIED));
|
||||
EXPECT_CALL(stats_callback_, OnDecodableFrame);
|
||||
EXPECT_CALL(stats_callback_, OnFrameBufferTimingsUpdated);
|
||||
|
||||
// Fake timing having received decoded frame.
|
||||
|
|
Loading…
Reference in a new issue