Reland "Copy video frames metadata between encoded and plain frames in one place"

Reland with fixes.

Currently some video frames metadata like rotation or ntp timestamps are
copied in every encoder and decoder separately. This CL makes copying to
happen at a single place for send or receive side. This will make it
easier to add new metadata in the future.

Also, added some missing tests.

Original Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/133346

Bug: webrtc:10460
Change-Id: I98629589fa55ca1d74056033cf86faccfdf848cd
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/136582
Commit-Queue: Ilya Nikolaevskiy <ilnik@webrtc.org>
Reviewed-by: Danil Chapovalov <danilchap@webrtc.org>
Reviewed-by: Niels Moller <nisse@webrtc.org>
Reviewed-by: Erik Språng <sprang@webrtc.org>
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#27930}
This commit is contained in:
Ilya Nikolaevskiy 2019-05-13 16:13:36 +02:00 committed by Commit Bot
parent dab21c6a91
commit 2ebf523978
19 changed files with 545 additions and 299 deletions

View file

@ -28,6 +28,13 @@ void FakeVideoRenderer::OnFrame(const webrtc::VideoFrame& frame) {
height_ = frame.height(); height_ = frame.height();
rotation_ = frame.rotation(); rotation_ = frame.rotation();
timestamp_us_ = frame.timestamp_us(); timestamp_us_ = frame.timestamp_us();
ntp_timestamp_ms_ = frame.ntp_time_ms();
color_space_ = frame.color_space();
frame_rendered_event_.Set();
}
bool FakeVideoRenderer::WaitForRenderedFrame(int64_t timeout_ms) {
return frame_rendered_event_.Wait(timeout_ms);
} }
} // namespace cricket } // namespace cricket

View file

@ -19,6 +19,7 @@
#include "api/video/video_rotation.h" #include "api/video/video_rotation.h"
#include "api/video/video_sink_interface.h" #include "api/video/video_sink_interface.h"
#include "rtc_base/critical_section.h" #include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
namespace cricket { namespace cricket {
@ -30,6 +31,7 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
void OnFrame(const webrtc::VideoFrame& frame) override; void OnFrame(const webrtc::VideoFrame& frame) override;
int errors() const { return errors_; } int errors() const { return errors_; }
int width() const { int width() const {
rtc::CritScope cs(&crit_); rtc::CritScope cs(&crit_);
return width_; return width_;
@ -38,6 +40,7 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
rtc::CritScope cs(&crit_); rtc::CritScope cs(&crit_);
return height_; return height_;
} }
webrtc::VideoRotation rotation() const { webrtc::VideoRotation rotation() const {
rtc::CritScope cs(&crit_); rtc::CritScope cs(&crit_);
return rotation_; return rotation_;
@ -47,15 +50,29 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
rtc::CritScope cs(&crit_); rtc::CritScope cs(&crit_);
return timestamp_us_; return timestamp_us_;
} }
int num_rendered_frames() const { int num_rendered_frames() const {
rtc::CritScope cs(&crit_); rtc::CritScope cs(&crit_);
return num_rendered_frames_; return num_rendered_frames_;
} }
bool black_frame() const { bool black_frame() const {
rtc::CritScope cs(&crit_); rtc::CritScope cs(&crit_);
return black_frame_; return black_frame_;
} }
int64_t ntp_time_ms() const {
rtc::CritScope cs(&crit_);
return ntp_timestamp_ms_;
}
absl::optional<webrtc::ColorSpace> color_space() const {
rtc::CritScope cs(&crit_);
return color_space_;
}
bool WaitForRenderedFrame(int64_t timeout_ms);
private: private:
static bool CheckFrameColorYuv(uint8_t y_min, static bool CheckFrameColorYuv(uint8_t y_min,
uint8_t y_max, uint8_t y_max,
@ -116,8 +133,11 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
webrtc::VideoRotation rotation_ = webrtc::kVideoRotation_0; webrtc::VideoRotation rotation_ = webrtc::kVideoRotation_0;
int64_t timestamp_us_ = 0; int64_t timestamp_us_ = 0;
int num_rendered_frames_ = 0; int num_rendered_frames_ = 0;
int64_t ntp_timestamp_ms_ = 0;
bool black_frame_ = false; bool black_frame_ = false;
rtc::CriticalSection crit_; rtc::CriticalSection crit_;
rtc::Event frame_rendered_event_;
absl::optional<webrtc::ColorSpace> color_space_;
}; };
} // namespace cricket } // namespace cricket

View file

@ -781,6 +781,7 @@ if (rtc_include_tests) {
"decoding_state_unittest.cc", "decoding_state_unittest.cc",
"fec_controller_unittest.cc", "fec_controller_unittest.cc",
"frame_buffer2_unittest.cc", "frame_buffer2_unittest.cc",
"generic_decoder_unittest.cc",
"h264_sprop_parameter_sets_unittest.cc", "h264_sprop_parameter_sets_unittest.cc",
"h264_sps_pps_tracker_unittest.cc", "h264_sps_pps_tracker_unittest.cc",
"histogram_unittest.cc", "histogram_unittest.cc",
@ -840,6 +841,7 @@ if (rtc_include_tests) {
"../../api:scoped_refptr", "../../api:scoped_refptr",
"../../api:simulcast_test_fixture_api", "../../api:simulcast_test_fixture_api",
"../../api:videocodec_test_fixture_api", "../../api:videocodec_test_fixture_api",
"../../api/task_queue:default_task_queue_factory",
"../../api/test/video:function_video_factory", "../../api/test/video:function_video_factory",
"../../api/video:builtin_video_bitrate_allocator_factory", "../../api/video:builtin_video_bitrate_allocator_factory",
"../../api/video:video_bitrate_allocation", "../../api/video:video_bitrate_allocation",
@ -850,6 +852,7 @@ if (rtc_include_tests) {
"../../api/video_codecs:video_codecs_api", "../../api/video_codecs:video_codecs_api",
"../../api/video_codecs:vp8_temporal_layers_factory", "../../api/video_codecs:vp8_temporal_layers_factory",
"../../common_video", "../../common_video",
"../../common_video/test:utilities",
"../../media:rtc_media_base", "../../media:rtc_media_base",
"../../rtc_base", "../../rtc_base",
"../../rtc_base:checks", "../../rtc_base:checks",
@ -863,6 +866,7 @@ if (rtc_include_tests) {
"../../system_wrappers:event_wrapper", "../../system_wrappers:event_wrapper",
"../../system_wrappers:field_trial", "../../system_wrappers:field_trial",
"../../system_wrappers:metrics", "../../system_wrappers:metrics",
"../../test:fake_video_codecs",
"../../test:field_trial", "../../test:field_trial",
"../../test:fileutils", "../../test:fileutils",
"../../test:test_common", "../../test:test_common",

View file

@ -19,7 +19,6 @@
#include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_decoder.h"
#include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder.h"
#include "common_video/libyuv/include/webrtc_libyuv.h" #include "common_video/libyuv/include/webrtc_libyuv.h"
#include "common_video/test/utilities.h"
#include "media/base/codec.h" #include "media/base/codec.h"
#include "media/base/media_constants.h" #include "media/base/media_constants.h"
#include "modules/video_coding/codecs/h264/include/h264.h" #include "modules/video_coding/codecs/h264/include/h264.h"
@ -49,17 +48,9 @@ class TestH264Impl : public VideoCodecUnitTest {
#ifdef WEBRTC_USE_H264 #ifdef WEBRTC_USE_H264
#define MAYBE_EncodeDecode EncodeDecode #define MAYBE_EncodeDecode EncodeDecode
#define MAYBE_DecodedQpEqualsEncodedQp DecodedQpEqualsEncodedQp #define MAYBE_DecodedQpEqualsEncodedQp DecodedQpEqualsEncodedQp
#define MAYBE_EncodedColorSpaceEqualsInputColorSpace \
EncodedColorSpaceEqualsInputColorSpace
#define MAYBE_DecodedColorSpaceEqualsEncodedColorSpace \
DecodedColorSpaceEqualsEncodedColorSpace
#else #else
#define MAYBE_EncodeDecode DISABLED_EncodeDecode #define MAYBE_EncodeDecode DISABLED_EncodeDecode
#define MAYBE_DecodedQpEqualsEncodedQp DISABLED_DecodedQpEqualsEncodedQp #define MAYBE_DecodedQpEqualsEncodedQp DISABLED_DecodedQpEqualsEncodedQp
#define MAYBE_EncodedColorSpaceEqualsInputColorSpace \
DISABLED_EncodedColorSpaceEqualsInputColorSpace
#define MAYBE_DecodedColorSpaceEqualsEncodedColorSpace \
DISABLED_DecodedColorSpaceEqualsEncodedColorSpace
#endif #endif
TEST_F(TestH264Impl, MAYBE_EncodeDecode) { TEST_F(TestH264Impl, MAYBE_EncodeDecode) {
@ -105,45 +96,4 @@ TEST_F(TestH264Impl, MAYBE_DecodedQpEqualsEncodedQp) {
EXPECT_EQ(encoded_frame.qp_, *decoded_qp); EXPECT_EQ(encoded_frame.qp_, *decoded_qp);
} }
TEST_F(TestH264Impl, MAYBE_EncodedColorSpaceEqualsInputColorSpace) {
VideoFrame* input_frame = NextInputFrame();
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(*input_frame, nullptr));
EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
EXPECT_FALSE(encoded_frame.ColorSpace());
// Video frame with explicit color space information.
ColorSpace color_space = CreateTestColorSpace(/*with_hdr_metadata=*/false);
VideoFrame input_frame_w_color_space =
VideoFrame::Builder()
.set_video_frame_buffer(input_frame->video_frame_buffer())
.set_color_space(color_space)
.build();
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->Encode(input_frame_w_color_space, nullptr));
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
ASSERT_TRUE(encoded_frame.ColorSpace());
EXPECT_EQ(*encoded_frame.ColorSpace(), color_space);
}
TEST_F(TestH264Impl, MAYBE_DecodedColorSpaceEqualsEncodedColorSpace) {
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->Encode(*NextInputFrame(), nullptr));
EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
// Add color space to encoded frame.
ColorSpace color_space = CreateTestColorSpace(/*with_hdr_metadata=*/false);
encoded_frame.SetColorSpace(color_space);
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, false, 0));
std::unique_ptr<VideoFrame> decoded_frame;
absl::optional<uint8_t> decoded_qp;
ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
ASSERT_TRUE(decoded_frame);
ASSERT_TRUE(decoded_frame->color_space());
EXPECT_EQ(color_space, *decoded_frame->color_space());
}
} // namespace webrtc } // namespace webrtc

View file

@ -227,51 +227,10 @@ TEST_F(TestVp8Impl, OnEncodedImageReportsInfo) {
EncodeAndWaitForFrame(*input_frame, &encoded_frame, &codec_specific_info); EncodeAndWaitForFrame(*input_frame, &encoded_frame, &codec_specific_info);
EXPECT_EQ(kInitialTimestampRtp, encoded_frame.Timestamp()); EXPECT_EQ(kInitialTimestampRtp, encoded_frame.Timestamp());
EXPECT_EQ(kInitialTimestampMs, encoded_frame.capture_time_ms_);
EXPECT_EQ(kWidth, static_cast<int>(encoded_frame._encodedWidth)); EXPECT_EQ(kWidth, static_cast<int>(encoded_frame._encodedWidth));
EXPECT_EQ(kHeight, static_cast<int>(encoded_frame._encodedHeight)); EXPECT_EQ(kHeight, static_cast<int>(encoded_frame._encodedHeight));
} }
// We only test the encoder here, since the decoded frame rotation is set based
// on the CVO RTP header extension in VCMDecodedFrameCallback::Decoded.
// TODO(brandtr): Consider passing through the rotation flag through the decoder
// in the same way as done in the encoder.
TEST_F(TestVp8Impl, EncodedRotationEqualsInputRotation) {
VideoFrame* input_frame = NextInputFrame();
input_frame->set_rotation(kVideoRotation_0);
EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
EncodeAndWaitForFrame(*input_frame, &encoded_frame, &codec_specific_info);
EXPECT_EQ(kVideoRotation_0, encoded_frame.rotation_);
input_frame->set_rotation(kVideoRotation_90);
EncodeAndWaitForFrame(*input_frame, &encoded_frame, &codec_specific_info);
EXPECT_EQ(kVideoRotation_90, encoded_frame.rotation_);
}
TEST_F(TestVp8Impl, EncodedColorSpaceEqualsInputColorSpace) {
// Video frame without explicit color space information.
VideoFrame* input_frame = NextInputFrame();
EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
EncodeAndWaitForFrame(*input_frame, &encoded_frame, &codec_specific_info);
EXPECT_FALSE(encoded_frame.ColorSpace());
// Video frame with explicit color space information.
ColorSpace color_space = CreateTestColorSpace(/*with_hdr_metadata=*/false);
VideoFrame input_frame_w_color_space =
VideoFrame::Builder()
.set_video_frame_buffer(input_frame->video_frame_buffer())
.set_color_space(color_space)
.build();
EncodeAndWaitForFrame(input_frame_w_color_space, &encoded_frame,
&codec_specific_info);
ASSERT_TRUE(encoded_frame.ColorSpace());
EXPECT_EQ(*encoded_frame.ColorSpace(), color_space);
}
TEST_F(TestVp8Impl, DecodedQpEqualsEncodedQp) { TEST_F(TestVp8Impl, DecodedQpEqualsEncodedQp) {
VideoFrame* input_frame = NextInputFrame(); VideoFrame* input_frame = NextInputFrame();
EncodedImage encoded_frame; EncodedImage encoded_frame;
@ -290,24 +249,6 @@ TEST_F(TestVp8Impl, DecodedQpEqualsEncodedQp) {
EXPECT_EQ(encoded_frame.qp_, *decoded_qp); EXPECT_EQ(encoded_frame.qp_, *decoded_qp);
} }
TEST_F(TestVp8Impl, DecodedColorSpaceEqualsEncodedColorSpace) {
VideoFrame* input_frame = NextInputFrame();
EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
EncodeAndWaitForFrame(*input_frame, &encoded_frame, &codec_specific_info);
// Encoded frame with explicit color space information.
ColorSpace color_space = CreateTestColorSpace(/*with_hdr_metadata=*/false);
encoded_frame.SetColorSpace(color_space);
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, false, -1));
std::unique_ptr<VideoFrame> decoded_frame;
absl::optional<uint8_t> decoded_qp;
ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
ASSERT_TRUE(decoded_frame);
ASSERT_TRUE(decoded_frame->color_space());
EXPECT_EQ(color_space, *decoded_frame->color_space());
}
TEST_F(TestVp8Impl, ChecksSimulcastSettings) { TEST_F(TestVp8Impl, ChecksSimulcastSettings) {
codec_settings_.numberOfSimulcastStreams = 2; codec_settings_.numberOfSimulcastStreams = 2;
// Resolutions are not in ascending order, temporal layers do not match. // Resolutions are not in ascending order, temporal layers do not match.
@ -402,7 +343,6 @@ TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
// Compute PSNR on all planes (faster than SSIM). // Compute PSNR on all planes (faster than SSIM).
EXPECT_GT(I420PSNR(input_frame, decoded_frame.get()), 36); EXPECT_GT(I420PSNR(input_frame, decoded_frame.get()), 36);
EXPECT_EQ(kInitialTimestampRtp, decoded_frame->timestamp()); EXPECT_EQ(kInitialTimestampRtp, decoded_frame->timestamp());
EXPECT_EQ(kTestNtpTimeMs, decoded_frame->ntp_time_ms());
} }
#if defined(WEBRTC_ANDROID) #if defined(WEBRTC_ANDROID)

View file

@ -11,7 +11,6 @@
#include "api/video/color_space.h" #include "api/video/color_space.h"
#include "api/video/i420_buffer.h" #include "api/video/i420_buffer.h"
#include "common_video/libyuv/include/webrtc_libyuv.h" #include "common_video/libyuv/include/webrtc_libyuv.h"
#include "common_video/test/utilities.h"
#include "media/base/vp9_profile.h" #include "media/base/vp9_profile.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/video_coding/codecs/test/video_codec_unittest.h" #include "modules/video_coding/codecs/test/video_codec_unittest.h"
@ -146,50 +145,7 @@ TEST_F(TestVp9Impl, EncodeDecode) {
color_space.chroma_siting_vertical()); color_space.chroma_siting_vertical());
} }
// We only test the encoder here, since the decoded frame rotation is set based TEST_F(TestVp9Impl, DecodedColorSpaceFromBitstream) {
// on the CVO RTP header extension in VCMDecodedFrameCallback::Decoded.
// TODO(brandtr): Consider passing through the rotation flag through the decoder
// in the same way as done in the encoder.
TEST_F(TestVp9Impl, EncodedRotationEqualsInputRotation) {
VideoFrame* input_frame = NextInputFrame();
input_frame->set_rotation(kVideoRotation_0);
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(*input_frame, nullptr));
EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
EXPECT_EQ(kVideoRotation_0, encoded_frame.rotation_);
input_frame = NextInputFrame();
input_frame->set_rotation(kVideoRotation_90);
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(*input_frame, nullptr));
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
EXPECT_EQ(kVideoRotation_90, encoded_frame.rotation_);
}
TEST_F(TestVp9Impl, EncodedColorSpaceEqualsInputColorSpace) {
// Video frame without explicit color space information.
VideoFrame* input_frame = NextInputFrame();
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(*input_frame, nullptr));
EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
EXPECT_FALSE(encoded_frame.ColorSpace());
// Video frame with explicit color space information.
ColorSpace color_space = CreateTestColorSpace(/*with_hdr_metadata=*/true);
VideoFrame input_frame_w_hdr =
VideoFrame::Builder()
.set_video_frame_buffer(input_frame->video_frame_buffer())
.set_color_space(color_space)
.build();
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->Encode(input_frame_w_hdr, nullptr));
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
ASSERT_TRUE(encoded_frame.ColorSpace());
EXPECT_EQ(*encoded_frame.ColorSpace(), color_space);
}
TEST_F(TestVp9Impl, DecodedColorSpaceEqualsEncodedColorSpace) {
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->Encode(*NextInputFrame(), nullptr)); encoder_->Encode(*NextInputFrame(), nullptr));
EncodedImage encoded_frame; EncodedImage encoded_frame;
@ -206,15 +162,6 @@ TEST_F(TestVp9Impl, DecodedColorSpaceEqualsEncodedColorSpace) {
ASSERT_TRUE(decoded_frame->color_space()); ASSERT_TRUE(decoded_frame->color_space());
// No HDR metadata present. // No HDR metadata present.
EXPECT_FALSE(decoded_frame->color_space()->hdr_metadata()); EXPECT_FALSE(decoded_frame->color_space()->hdr_metadata());
// Encoded frame with explicit color space information.
ColorSpace color_space = CreateTestColorSpace(/*with_hdr_metadata=*/true);
encoded_frame.SetColorSpace(color_space);
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, false, 0));
ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
ASSERT_TRUE(decoded_frame);
ASSERT_TRUE(decoded_frame->color_space());
EXPECT_EQ(color_space, *decoded_frame->color_space());
} }
TEST_F(TestVp9Impl, DecodedQpEqualsEncodedQp) { TEST_F(TestVp9Impl, DecodedQpEqualsEncodedQp) {

View file

@ -52,8 +52,10 @@ class VCMEncodedFrame : protected EncodedImage {
return static_cast<const webrtc::EncodedImage&>(*this); return static_cast<const webrtc::EncodedImage&>(*this);
} }
using EncodedImage::ColorSpace;
using EncodedImage::data; using EncodedImage::data;
using EncodedImage::set_size; using EncodedImage::set_size;
using EncodedImage::SetColorSpace;
using EncodedImage::SetSpatialIndex; using EncodedImage::SetSpatialIndex;
using EncodedImage::SetTimestamp; using EncodedImage::SetTimestamp;
using EncodedImage::size; using EncodedImage::size;

View file

@ -80,6 +80,12 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage,
return; return;
} }
decodedImage.set_ntp_time_ms(frameInfo->ntp_time_ms);
if (frameInfo->color_space) {
decodedImage.set_color_space(frameInfo->color_space);
}
decodedImage.set_rotation(frameInfo->rotation);
const int64_t now_ms = _clock->TimeInMilliseconds(); const int64_t now_ms = _clock->TimeInMilliseconds();
if (!decode_time_ms) { if (!decode_time_ms) {
decode_time_ms = now_ms - frameInfo->decodeStartTimeMs; decode_time_ms = now_ms - frameInfo->decodeStartTimeMs;
@ -140,7 +146,6 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage,
decodedImage.set_timestamp_us(frameInfo->renderTimeMs * decodedImage.set_timestamp_us(frameInfo->renderTimeMs *
rtc::kNumMicrosecsPerMillisec); rtc::kNumMicrosecsPerMillisec);
decodedImage.set_rotation(frameInfo->rotation);
_receiveCallback->FrameToRender(decodedImage, qp, frameInfo->content_type); _receiveCallback->FrameToRender(decodedImage, qp, frameInfo->content_type);
} }
@ -199,6 +204,14 @@ int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, int64_t nowMs) {
_frameInfos[_nextFrameInfoIdx].renderTimeMs = frame.RenderTimeMs(); _frameInfos[_nextFrameInfoIdx].renderTimeMs = frame.RenderTimeMs();
_frameInfos[_nextFrameInfoIdx].rotation = frame.rotation(); _frameInfos[_nextFrameInfoIdx].rotation = frame.rotation();
_frameInfos[_nextFrameInfoIdx].timing = frame.video_timing(); _frameInfos[_nextFrameInfoIdx].timing = frame.video_timing();
_frameInfos[_nextFrameInfoIdx].ntp_time_ms =
frame.EncodedImage().ntp_time_ms_;
if (frame.ColorSpace()) {
_frameInfos[_nextFrameInfoIdx].color_space = *frame.ColorSpace();
} else {
_frameInfos[_nextFrameInfoIdx].color_space = absl::nullopt;
}
// Set correctly only for key frames. Thus, use latest key frame // Set correctly only for key frames. Thus, use latest key frame
// content type. If the corresponding key frame was lost, decode will fail // content type. If the corresponding key frame was lost, decode will fail
// and content type will be ignored. // and content type will be ignored.

View file

@ -34,6 +34,8 @@ struct VCMFrameInformation {
VideoRotation rotation; VideoRotation rotation;
VideoContentType content_type; VideoContentType content_type;
EncodedImage::Timing timing; EncodedImage::Timing timing;
int64_t ntp_time_ms;
absl::optional<ColorSpace> color_space;
}; };
class VCMDecodedFrameCallback : public DecodedImageCallback { class VCMDecodedFrameCallback : public DecodedImageCallback {

View file

@ -0,0 +1,126 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/video_coding/generic_decoder.h"
#include <vector>
#include "absl/types/optional.h"
#include "api/task_queue/default_task_queue_factory.h"
#include "common_video/test/utilities.h"
#include "modules/video_coding/timing.h"
#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
#include "system_wrappers/include/clock.h"
#include "test/fake_decoder.h"
#include "test/gmock.h"
#include "test/gtest.h"
namespace webrtc {
namespace video_coding {
class ReceiveCallback : public VCMReceiveCallback {
public:
int32_t FrameToRender(VideoFrame& videoFrame, // NOLINT
absl::optional<uint8_t> qp,
VideoContentType content_type) override {
{
rtc::CritScope cs(&lock_);
last_frame_ = videoFrame;
}
received_frame_event_.Set();
return 0;
}
absl::optional<VideoFrame> GetLastFrame() {
rtc::CritScope cs(&lock_);
return last_frame_;
}
absl::optional<VideoFrame> WaitForFrame(int64_t wait_ms) {
if (received_frame_event_.Wait(wait_ms)) {
rtc::CritScope cs(&lock_);
return last_frame_;
} else {
return absl::nullopt;
}
}
private:
rtc::CriticalSection lock_;
rtc::Event received_frame_event_;
absl::optional<VideoFrame> last_frame_ RTC_GUARDED_BY(lock_);
};
class GenericDecoderTest : public ::testing::Test {
protected:
GenericDecoderTest()
: clock_(0),
timing_(&clock_),
task_queue_factory_(CreateDefaultTaskQueueFactory()),
decoder_(task_queue_factory_.get()),
vcm_callback_(&timing_, &clock_),
generic_decoder_(&decoder_, /*isExternal=*/true) {}
void SetUp() override {
generic_decoder_.RegisterDecodeCompleteCallback(&vcm_callback_);
vcm_callback_.SetUserReceiveCallback(&user_callback_);
VideoCodec settings;
settings.codecType = kVideoCodecVP8;
settings.width = 10;
settings.height = 10;
generic_decoder_.InitDecode(&settings, /*numberOfCores=*/4);
}
SimulatedClock clock_;
VCMTiming timing_;
std::unique_ptr<TaskQueueFactory> task_queue_factory_;
webrtc::test::FakeDecoder decoder_;
VCMDecodedFrameCallback vcm_callback_;
VCMGenericDecoder generic_decoder_;
ReceiveCallback user_callback_;
};
TEST_F(GenericDecoderTest, PassesColorSpace) {
webrtc::ColorSpace color_space =
CreateTestColorSpace(/*with_hdr_metadata=*/true);
VCMEncodedFrame encoded_frame;
encoded_frame.SetColorSpace(color_space);
generic_decoder_.Decode(encoded_frame, clock_.TimeInMilliseconds());
absl::optional<VideoFrame> decoded_frame = user_callback_.WaitForFrame(10);
ASSERT_TRUE(decoded_frame.has_value());
absl::optional<webrtc::ColorSpace> decoded_color_space =
decoded_frame->color_space();
ASSERT_TRUE(decoded_color_space.has_value());
EXPECT_EQ(*decoded_color_space, color_space);
}
TEST_F(GenericDecoderTest, PassesColorSpaceForDelayedDecoders) {
webrtc::ColorSpace color_space =
CreateTestColorSpace(/*with_hdr_metadata=*/true);
decoder_.SetDelayedDecoding(100);
{
// Ensure the original frame is destroyed before the decoding is completed.
VCMEncodedFrame encoded_frame;
encoded_frame.SetColorSpace(color_space);
generic_decoder_.Decode(encoded_frame, clock_.TimeInMilliseconds());
}
absl::optional<VideoFrame> decoded_frame = user_callback_.WaitForFrame(200);
ASSERT_TRUE(decoded_frame.has_value());
absl::optional<webrtc::ColorSpace> decoded_color_space =
decoded_frame->color_space();
ASSERT_TRUE(decoded_color_space.has_value());
EXPECT_EQ(*decoded_color_space, color_space);
}
} // namespace video_coding
} // namespace webrtc

View file

@ -12,6 +12,7 @@
#include <string.h> #include <string.h>
#include "absl/memory/memory.h"
#include "api/scoped_refptr.h" #include "api/scoped_refptr.h"
#include "api/video/i420_buffer.h" #include "api/video/i420_buffer.h"
#include "api/video/video_frame.h" #include "api/video/video_frame.h"
@ -19,6 +20,7 @@
#include "api/video/video_rotation.h" #include "api/video/video_rotation.h"
#include "modules/video_coding/include/video_error_codes.h" #include "modules/video_coding/include/video_error_codes.h"
#include "rtc_base/checks.h" #include "rtc_base/checks.h"
#include "rtc_base/task_queue.h"
#include "rtc_base/time_utils.h" #include "rtc_base/time_utils.h"
namespace webrtc { namespace webrtc {
@ -29,8 +31,14 @@ const int kDefaultWidth = 320;
const int kDefaultHeight = 180; const int kDefaultHeight = 180;
} // namespace } // namespace
FakeDecoder::FakeDecoder() FakeDecoder::FakeDecoder() : FakeDecoder(nullptr) {}
: callback_(NULL), width_(kDefaultWidth), height_(kDefaultHeight) {}
FakeDecoder::FakeDecoder(TaskQueueFactory* task_queue_factory)
: callback_(nullptr),
width_(kDefaultWidth),
height_(kDefaultHeight),
task_queue_factory_(task_queue_factory),
decode_delay_ms_(0) {}
int32_t FakeDecoder::InitDecode(const VideoCodec* config, int32_t FakeDecoder::InitDecode(const VideoCodec* config,
int32_t number_of_cores) { int32_t number_of_cores) {
@ -45,20 +53,40 @@ int32_t FakeDecoder::Decode(const EncodedImage& input,
height_ = input._encodedHeight; height_ = input._encodedHeight;
} }
VideoFrame frame = rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(width_, height_);
VideoFrame::Builder() I420Buffer::SetBlack(buffer);
.set_video_frame_buffer(I420Buffer::Create(width_, height_)) VideoFrame frame = VideoFrame::Builder()
.set_rotation(webrtc::kVideoRotation_0) .set_video_frame_buffer(buffer)
.set_timestamp_ms(render_time_ms) .set_rotation(webrtc::kVideoRotation_0)
.build(); .set_timestamp_ms(render_time_ms)
.build();
frame.set_timestamp(input.Timestamp()); frame.set_timestamp(input.Timestamp());
frame.set_ntp_time_ms(input.ntp_time_ms_); frame.set_ntp_time_ms(input.ntp_time_ms_);
callback_->Decoded(frame); if (decode_delay_ms_ == 0 || !task_queue_) {
callback_->Decoded(frame);
} else {
task_queue_->PostDelayedTask(
[frame, this]() {
VideoFrame copy = frame;
callback_->Decoded(copy);
},
decode_delay_ms_);
}
return WEBRTC_VIDEO_CODEC_OK; return WEBRTC_VIDEO_CODEC_OK;
} }
void FakeDecoder::SetDelayedDecoding(int decode_delay_ms) {
RTC_CHECK(task_queue_factory_);
if (!task_queue_) {
task_queue_ =
absl::make_unique<rtc::TaskQueue>(task_queue_factory_->CreateTaskQueue(
"fake_decoder", TaskQueueFactory::Priority::NORMAL));
}
decode_delay_ms_ = decode_delay_ms;
}
int32_t FakeDecoder::RegisterDecodeCompleteCallback( int32_t FakeDecoder::RegisterDecodeCompleteCallback(
DecodedImageCallback* callback) { DecodedImageCallback* callback) {
callback_ = callback; callback_ = callback;

View file

@ -13,10 +13,12 @@
#include <stdint.h> #include <stdint.h>
#include "api/task_queue/task_queue_factory.h"
#include "api/video/encoded_image.h" #include "api/video/encoded_image.h"
#include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_codec.h"
#include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_decoder.h"
#include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_codec_interface.h"
#include "rtc_base/task_queue.h"
namespace webrtc { namespace webrtc {
namespace test { namespace test {
@ -24,6 +26,7 @@ namespace test {
class FakeDecoder : public VideoDecoder { class FakeDecoder : public VideoDecoder {
public: public:
FakeDecoder(); FakeDecoder();
explicit FakeDecoder(TaskQueueFactory* task_queue_factory);
virtual ~FakeDecoder() {} virtual ~FakeDecoder() {}
int32_t InitDecode(const VideoCodec* config, int32_t InitDecode(const VideoCodec* config,
@ -42,10 +45,15 @@ class FakeDecoder : public VideoDecoder {
static const char* kImplementationName; static const char* kImplementationName;
void SetDelayedDecoding(int decode_delay_ms);
private: private:
DecodedImageCallback* callback_; DecodedImageCallback* callback_;
int width_; int width_;
int height_; int height_;
std::unique_ptr<rtc::TaskQueue> task_queue_;
TaskQueueFactory* task_queue_factory_;
int decode_delay_ms_;
}; };
class FakeH264Decoder : public FakeDecoder { class FakeH264Decoder : public FakeDecoder {

View file

@ -171,8 +171,8 @@ rtc_source_set("video_stream_encoder_impl") {
"encoder_bitrate_adjuster.h", "encoder_bitrate_adjuster.h",
"encoder_overshoot_detector.cc", "encoder_overshoot_detector.cc",
"encoder_overshoot_detector.h", "encoder_overshoot_detector.h",
"frame_encode_timer.cc", "frame_encode_metadata_writer.cc",
"frame_encode_timer.h", "frame_encode_metadata_writer.h",
"overuse_frame_detector.cc", "overuse_frame_detector.cc",
"overuse_frame_detector.h", "overuse_frame_detector.h",
"video_stream_encoder.cc", "video_stream_encoder.cc",
@ -492,7 +492,7 @@ if (rtc_include_tests) {
"end_to_end_tests/ssrc_tests.cc", "end_to_end_tests/ssrc_tests.cc",
"end_to_end_tests/stats_tests.cc", "end_to_end_tests/stats_tests.cc",
"end_to_end_tests/transport_feedback_tests.cc", "end_to_end_tests/transport_feedback_tests.cc",
"frame_encode_timer_unittest.cc", "frame_encode_metadata_writer_unittest.cc",
"overuse_frame_detector_unittest.cc", "overuse_frame_detector_unittest.cc",
"picture_id_tests.cc", "picture_id_tests.cc",
"quality_scaling_tests.cc", "quality_scaling_tests.cc",

View file

@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include "video/frame_encode_timer.h" #include "video/frame_encode_metadata_writer.h"
#include <algorithm> #include <algorithm>
@ -23,29 +23,31 @@ const int kMessagesThrottlingThreshold = 2;
const int kThrottleRatio = 100000; const int kThrottleRatio = 100000;
} // namespace } // namespace
FrameEncodeTimer::TimingFramesLayerInfo::TimingFramesLayerInfo() = default; FrameEncodeMetadataWriter::TimingFramesLayerInfo::TimingFramesLayerInfo() =
FrameEncodeTimer::TimingFramesLayerInfo::~TimingFramesLayerInfo() = default; default;
FrameEncodeMetadataWriter::TimingFramesLayerInfo::~TimingFramesLayerInfo() =
default;
FrameEncodeTimer::FrameEncodeTimer(EncodedImageCallback* frame_drop_callback) FrameEncodeMetadataWriter::FrameEncodeMetadataWriter(
EncodedImageCallback* frame_drop_callback)
: frame_drop_callback_(frame_drop_callback), : frame_drop_callback_(frame_drop_callback),
internal_source_(false), internal_source_(false),
framerate_fps_(0), framerate_fps_(0),
last_timing_frame_time_ms_(-1), last_timing_frame_time_ms_(-1),
incorrect_capture_time_logged_messages_(0),
reordered_frames_logged_messages_(0), reordered_frames_logged_messages_(0),
stalled_encoder_logged_messages_(0) { stalled_encoder_logged_messages_(0) {
codec_settings_.timing_frame_thresholds = {-1, 0}; codec_settings_.timing_frame_thresholds = {-1, 0};
} }
FrameEncodeTimer::~FrameEncodeTimer() {} FrameEncodeMetadataWriter::~FrameEncodeMetadataWriter() {}
void FrameEncodeTimer::OnEncoderInit(const VideoCodec& codec, void FrameEncodeMetadataWriter::OnEncoderInit(const VideoCodec& codec,
bool internal_source) { bool internal_source) {
rtc::CritScope cs(&lock_); rtc::CritScope cs(&lock_);
codec_settings_ = codec; codec_settings_ = codec;
internal_source_ = internal_source; internal_source_ = internal_source;
} }
void FrameEncodeTimer::OnSetRates( void FrameEncodeMetadataWriter::OnSetRates(
const VideoBitrateAllocation& bitrate_allocation, const VideoBitrateAllocation& bitrate_allocation,
uint32_t framerate_fps) { uint32_t framerate_fps) {
rtc::CritScope cs(&lock_); rtc::CritScope cs(&lock_);
@ -60,8 +62,7 @@ void FrameEncodeTimer::OnSetRates(
} }
} }
void FrameEncodeTimer::OnEncodeStarted(uint32_t rtp_timestamp, void FrameEncodeMetadataWriter::OnEncodeStarted(const VideoFrame& frame) {
int64_t capture_time_ms) {
rtc::CritScope cs(&lock_); rtc::CritScope cs(&lock_);
if (internal_source_) { if (internal_source_) {
return; return;
@ -69,19 +70,24 @@ void FrameEncodeTimer::OnEncodeStarted(uint32_t rtp_timestamp,
const size_t num_spatial_layers = NumSpatialLayers(); const size_t num_spatial_layers = NumSpatialLayers();
timing_frames_info_.resize(num_spatial_layers); timing_frames_info_.resize(num_spatial_layers);
FrameMetadata metadata;
metadata.rtp_timestamp = frame.timestamp();
metadata.encode_start_time_ms = rtc::TimeMillis();
metadata.ntp_time_ms = frame.ntp_time_ms();
metadata.timestamp_us = frame.timestamp_us();
metadata.rotation = frame.rotation();
metadata.color_space = frame.color_space();
for (size_t si = 0; si < num_spatial_layers; ++si) { for (size_t si = 0; si < num_spatial_layers; ++si) {
RTC_DCHECK( RTC_DCHECK(timing_frames_info_[si].frames.empty() ||
timing_frames_info_[si].encode_start_list.empty() || rtc::TimeDiff(
rtc::TimeDiff( frame.render_time_ms(),
capture_time_ms, timing_frames_info_[si].frames.back().timestamp_us / 1000) >=
timing_frames_info_[si].encode_start_list.back().capture_time_ms) >= 0);
0);
// If stream is disabled due to low bandwidth OnEncodeStarted still will be // If stream is disabled due to low bandwidth OnEncodeStarted still will be
// called and have to be ignored. // called and have to be ignored.
if (timing_frames_info_[si].target_bitrate_bytes_per_sec == 0) if (timing_frames_info_[si].target_bitrate_bytes_per_sec == 0)
return; return;
if (timing_frames_info_[si].encode_start_list.size() == if (timing_frames_info_[si].frames.size() == kMaxEncodeStartTimeListSize) {
kMaxEncodeStartTimeListSize) {
++stalled_encoder_logged_messages_; ++stalled_encoder_logged_messages_;
if (stalled_encoder_logged_messages_ <= kMessagesThrottlingThreshold || if (stalled_encoder_logged_messages_ <= kMessagesThrottlingThreshold ||
stalled_encoder_logged_messages_ % kThrottleRatio == 0) { stalled_encoder_logged_messages_ % kThrottleRatio == 0) {
@ -95,25 +101,26 @@ void FrameEncodeTimer::OnEncodeStarted(uint32_t rtp_timestamp,
} }
frame_drop_callback_->OnDroppedFrame( frame_drop_callback_->OnDroppedFrame(
EncodedImageCallback::DropReason::kDroppedByEncoder); EncodedImageCallback::DropReason::kDroppedByEncoder);
timing_frames_info_[si].encode_start_list.pop_front(); timing_frames_info_[si].frames.pop_front();
} }
timing_frames_info_[si].encode_start_list.emplace_back( timing_frames_info_[si].frames.emplace_back(metadata);
rtp_timestamp, capture_time_ms, rtc::TimeMillis());
} }
} }
void FrameEncodeTimer::FillTimingInfo(size_t simulcast_svc_idx, void FrameEncodeMetadataWriter::FillTimingInfo(size_t simulcast_svc_idx,
EncodedImage* encoded_image, EncodedImage* encoded_image) {
int64_t encode_done_ms) {
rtc::CritScope cs(&lock_); rtc::CritScope cs(&lock_);
absl::optional<size_t> outlier_frame_size; absl::optional<size_t> outlier_frame_size;
absl::optional<int64_t> encode_start_ms; absl::optional<int64_t> encode_start_ms;
uint8_t timing_flags = VideoSendTiming::kNotTriggered; uint8_t timing_flags = VideoSendTiming::kNotTriggered;
int64_t encode_done_ms = rtc::TimeMillis();
// Encoders with internal sources do not call OnEncodeStarted // Encoders with internal sources do not call OnEncodeStarted
// |timing_frames_info_| may be not filled here. // |timing_frames_info_| may be not filled here.
if (!internal_source_) { if (!internal_source_) {
encode_start_ms = ExtractEncodeStartTime(simulcast_svc_idx, encoded_image); encode_start_ms =
ExtractEncodeStartTimeAndFillMetadata(simulcast_svc_idx, encoded_image);
} }
if (timing_frames_info_.size() > simulcast_svc_idx) { if (timing_frames_info_.size() > simulcast_svc_idx) {
@ -176,7 +183,7 @@ void FrameEncodeTimer::FillTimingInfo(size_t simulcast_svc_idx,
} }
} }
void FrameEncodeTimer::Reset() { void FrameEncodeMetadataWriter::Reset() {
rtc::CritScope cs(&lock_); rtc::CritScope cs(&lock_);
timing_frames_info_.clear(); timing_frames_info_.clear();
last_timing_frame_time_ms_ = -1; last_timing_frame_time_ms_ = -1;
@ -184,48 +191,40 @@ void FrameEncodeTimer::Reset() {
stalled_encoder_logged_messages_ = 0; stalled_encoder_logged_messages_ = 0;
} }
absl::optional<int64_t> FrameEncodeTimer::ExtractEncodeStartTime( absl::optional<int64_t>
FrameEncodeMetadataWriter::ExtractEncodeStartTimeAndFillMetadata(
size_t simulcast_svc_idx, size_t simulcast_svc_idx,
EncodedImage* encoded_image) { EncodedImage* encoded_image) {
absl::optional<int64_t> result; absl::optional<int64_t> result;
size_t num_simulcast_svc_streams = timing_frames_info_.size(); size_t num_simulcast_svc_streams = timing_frames_info_.size();
if (simulcast_svc_idx < num_simulcast_svc_streams) { if (simulcast_svc_idx < num_simulcast_svc_streams) {
auto encode_start_list = auto metadata_list = &timing_frames_info_[simulcast_svc_idx].frames;
&timing_frames_info_[simulcast_svc_idx].encode_start_list;
// Skip frames for which there was OnEncodeStarted but no OnEncodedImage // Skip frames for which there was OnEncodeStarted but no OnEncodedImage
// call. These are dropped by encoder internally. // call. These are dropped by encoder internally.
// Because some hardware encoders don't preserve capture timestamp we // Because some hardware encoders don't preserve capture timestamp we
// use RTP timestamps here. // use RTP timestamps here.
while (!encode_start_list->empty() && while (!metadata_list->empty() &&
IsNewerTimestamp(encoded_image->Timestamp(), IsNewerTimestamp(encoded_image->Timestamp(),
encode_start_list->front().rtp_timestamp)) { metadata_list->front().rtp_timestamp)) {
frame_drop_callback_->OnDroppedFrame( frame_drop_callback_->OnDroppedFrame(
EncodedImageCallback::DropReason::kDroppedByEncoder); EncodedImageCallback::DropReason::kDroppedByEncoder);
encode_start_list->pop_front(); metadata_list->pop_front();
} }
if (!encode_start_list->empty() && if (!metadata_list->empty() &&
encode_start_list->front().rtp_timestamp == metadata_list->front().rtp_timestamp == encoded_image->Timestamp()) {
encoded_image->Timestamp()) { result.emplace(metadata_list->front().encode_start_time_ms);
result.emplace(encode_start_list->front().encode_start_time_ms);
if (encoded_image->capture_time_ms_ != encoded_image->capture_time_ms_ =
encode_start_list->front().capture_time_ms) { metadata_list->front().timestamp_us / 1000;
// Force correct capture timestamp. encoded_image->ntp_time_ms_ = metadata_list->front().ntp_time_ms;
encoded_image->capture_time_ms_ = encoded_image->rotation_ = metadata_list->front().rotation;
encode_start_list->front().capture_time_ms; encoded_image->SetColorSpace(metadata_list->front().color_space);
++incorrect_capture_time_logged_messages_; encoded_image->content_type_ =
if (incorrect_capture_time_logged_messages_ <= (codec_settings_.mode == VideoCodecMode::kScreensharing)
kMessagesThrottlingThreshold || ? VideoContentType::SCREENSHARE
incorrect_capture_time_logged_messages_ % kThrottleRatio == 0) { : VideoContentType::UNSPECIFIED;
RTC_LOG(LS_WARNING)
<< "Encoder is not preserving capture timestamps."; metadata_list->pop_front();
if (incorrect_capture_time_logged_messages_ ==
kMessagesThrottlingThreshold) {
RTC_LOG(LS_WARNING) << "Too many log messages. Further incorrect "
"timestamps warnings will be throttled.";
}
}
}
encode_start_list->pop_front();
} else { } else {
++reordered_frames_logged_messages_; ++reordered_frames_logged_messages_;
if (reordered_frames_logged_messages_ <= kMessagesThrottlingThreshold || if (reordered_frames_logged_messages_ <= kMessagesThrottlingThreshold ||
@ -243,7 +242,7 @@ absl::optional<int64_t> FrameEncodeTimer::ExtractEncodeStartTime(
return result; return result;
} }
size_t FrameEncodeTimer::NumSpatialLayers() const { size_t FrameEncodeMetadataWriter::NumSpatialLayers() const {
size_t num_spatial_layers = codec_settings_.numberOfSimulcastStreams; size_t num_spatial_layers = codec_settings_.numberOfSimulcastStreams;
if (codec_settings_.codecType == kVideoCodecVP9) { if (codec_settings_.codecType == kVideoCodecVP9) {
num_spatial_layers = std::max( num_spatial_layers = std::max(

View file

@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#ifndef VIDEO_FRAME_ENCODE_TIMER_H_ #ifndef VIDEO_FRAME_ENCODE_METADATA_WRITER_H_
#define VIDEO_FRAME_ENCODE_TIMER_H_ #define VIDEO_FRAME_ENCODE_METADATA_WRITER_H_
#include <list> #include <list>
#include <vector> #include <vector>
@ -22,20 +22,18 @@
namespace webrtc { namespace webrtc {
class FrameEncodeTimer { class FrameEncodeMetadataWriter {
public: public:
explicit FrameEncodeTimer(EncodedImageCallback* frame_drop_callback); explicit FrameEncodeMetadataWriter(EncodedImageCallback* frame_drop_callback);
~FrameEncodeTimer(); ~FrameEncodeMetadataWriter();
void OnEncoderInit(const VideoCodec& codec, bool internal_source); void OnEncoderInit(const VideoCodec& codec, bool internal_source);
void OnSetRates(const VideoBitrateAllocation& bitrate_allocation, void OnSetRates(const VideoBitrateAllocation& bitrate_allocation,
uint32_t framerate_fps); uint32_t framerate_fps);
void OnEncodeStarted(uint32_t rtp_timestamp, int64_t capture_time_ms); void OnEncodeStarted(const VideoFrame& frame);
void FillTimingInfo(size_t simulcast_svc_idx, void FillTimingInfo(size_t simulcast_svc_idx, EncodedImage* encoded_image);
EncodedImage* encoded_image,
int64_t encode_done_ms);
void Reset(); void Reset();
private: private:
@ -43,26 +41,23 @@ class FrameEncodeTimer {
// For non-internal-source encoders, returns encode started time and fixes // For non-internal-source encoders, returns encode started time and fixes
// capture timestamp for the frame, if corrupted by the encoder. // capture timestamp for the frame, if corrupted by the encoder.
absl::optional<int64_t> ExtractEncodeStartTime(size_t simulcast_svc_idx, absl::optional<int64_t> ExtractEncodeStartTimeAndFillMetadata(
EncodedImage* encoded_image) size_t simulcast_svc_idx,
RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); EncodedImage* encoded_image) RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_);
struct EncodeStartTimeRecord { struct FrameMetadata {
EncodeStartTimeRecord(uint32_t timestamp,
int64_t capture_time,
int64_t encode_start_time)
: rtp_timestamp(timestamp),
capture_time_ms(capture_time),
encode_start_time_ms(encode_start_time) {}
uint32_t rtp_timestamp; uint32_t rtp_timestamp;
int64_t capture_time_ms;
int64_t encode_start_time_ms; int64_t encode_start_time_ms;
int64_t ntp_time_ms = 0;
int64_t timestamp_us = 0;
VideoRotation rotation = kVideoRotation_0;
absl::optional<ColorSpace> color_space;
}; };
struct TimingFramesLayerInfo { struct TimingFramesLayerInfo {
TimingFramesLayerInfo(); TimingFramesLayerInfo();
~TimingFramesLayerInfo(); ~TimingFramesLayerInfo();
size_t target_bitrate_bytes_per_sec = 0; size_t target_bitrate_bytes_per_sec = 0;
std::list<EncodeStartTimeRecord> encode_start_list; std::list<FrameMetadata> frames;
}; };
rtc::CriticalSection lock_; rtc::CriticalSection lock_;
@ -74,11 +69,10 @@ class FrameEncodeTimer {
// Separate instance for each simulcast stream or spatial layer. // Separate instance for each simulcast stream or spatial layer.
std::vector<TimingFramesLayerInfo> timing_frames_info_ RTC_GUARDED_BY(&lock_); std::vector<TimingFramesLayerInfo> timing_frames_info_ RTC_GUARDED_BY(&lock_);
int64_t last_timing_frame_time_ms_ RTC_GUARDED_BY(&lock_); int64_t last_timing_frame_time_ms_ RTC_GUARDED_BY(&lock_);
size_t incorrect_capture_time_logged_messages_ RTC_GUARDED_BY(&lock_);
size_t reordered_frames_logged_messages_ RTC_GUARDED_BY(&lock_); size_t reordered_frames_logged_messages_ RTC_GUARDED_BY(&lock_);
size_t stalled_encoder_logged_messages_ RTC_GUARDED_BY(&lock_); size_t stalled_encoder_logged_messages_ RTC_GUARDED_BY(&lock_);
}; };
} // namespace webrtc } // namespace webrtc
#endif // VIDEO_FRAME_ENCODE_TIMER_H_ #endif // VIDEO_FRAME_ENCODE_METADATA_WRITER_H_

View file

@ -8,18 +8,25 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include "video/frame_encode_metadata_writer.h"
#include <cstddef> #include <cstddef>
#include <vector> #include <vector>
#include "api/video/i420_buffer.h"
#include "api/video/video_frame.h"
#include "api/video/video_timing.h" #include "api/video/video_timing.h"
#include "common_video/test/utilities.h"
#include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/include/video_coding_defines.h"
#include "rtc_base/time_utils.h" #include "rtc_base/time_utils.h"
#include "test/gtest.h" #include "test/gtest.h"
#include "video/frame_encode_timer.h"
namespace webrtc { namespace webrtc {
namespace test { namespace test {
namespace { namespace {
const rtc::scoped_refptr<I420Buffer> kFrameBuffer = I420Buffer::Create(4, 4);
inline size_t FrameSize(const size_t& min_frame_size, inline size_t FrameSize(const size_t& min_frame_size,
const size_t& max_frame_size, const size_t& max_frame_size,
const int& s, const int& s,
@ -65,7 +72,7 @@ std::vector<std::vector<FrameType>> GetTimingFrames(
const int num_streams, const int num_streams,
const int num_frames) { const int num_frames) {
FakeEncodedImageCallback sink; FakeEncodedImageCallback sink;
FrameEncodeTimer encode_timer(&sink); FrameEncodeMetadataWriter encode_timer(&sink);
VideoCodec codec_settings; VideoCodec codec_settings;
codec_settings.numberOfSimulcastStreams = num_streams; codec_settings.numberOfSimulcastStreams = num_streams;
codec_settings.timing_frame_thresholds = {delay_ms, codec_settings.timing_frame_thresholds = {delay_ms,
@ -83,8 +90,12 @@ std::vector<std::vector<FrameType>> GetTimingFrames(
int64_t current_timestamp = 0; int64_t current_timestamp = 0;
for (int i = 0; i < num_frames; ++i) { for (int i = 0; i < num_frames; ++i) {
current_timestamp += 1; current_timestamp += 1;
encode_timer.OnEncodeStarted(static_cast<uint32_t>(current_timestamp * 90), VideoFrame frame = VideoFrame::Builder()
current_timestamp); .set_timestamp_rtp(current_timestamp * 90)
.set_timestamp_ms(current_timestamp)
.set_video_frame_buffer(kFrameBuffer)
.build();
encode_timer.OnEncodeStarted(frame);
for (int si = 0; si < num_streams; ++si) { for (int si = 0; si < num_streams; ++si) {
// every (5+s)-th frame is dropped on s-th stream by design. // every (5+s)-th frame is dropped on s-th stream by design.
bool dropped = i % (5 + si) == 0; bool dropped = i % (5 + si) == 0;
@ -101,7 +112,7 @@ std::vector<std::vector<FrameType>> GetTimingFrames(
continue; continue;
} }
encode_timer.FillTimingInfo(si, &image, current_timestamp); encode_timer.FillTimingInfo(si, &image);
if (IsTimingFrame(image)) { if (IsTimingFrame(image)) {
result[si].push_back(FrameType::kTiming); result[si].push_back(FrameType::kTiming);
@ -190,7 +201,7 @@ TEST(FrameEncodeTimerTest, NoTimingFrameIfNoEncodeStartTime) {
image.SetTimestamp(static_cast<uint32_t>(timestamp * 90)); image.SetTimestamp(static_cast<uint32_t>(timestamp * 90));
FakeEncodedImageCallback sink; FakeEncodedImageCallback sink;
FrameEncodeTimer encode_timer(&sink); FrameEncodeMetadataWriter encode_timer(&sink);
VideoCodec codec_settings; VideoCodec codec_settings;
// Make all frames timing frames. // Make all frames timing frames.
codec_settings.timing_frame_thresholds.delay_ms = 1; codec_settings.timing_frame_thresholds.delay_ms = 1;
@ -200,16 +211,20 @@ TEST(FrameEncodeTimerTest, NoTimingFrameIfNoEncodeStartTime) {
encode_timer.OnSetRates(bitrate_allocation, 30); encode_timer.OnSetRates(bitrate_allocation, 30);
// Verify a single frame works with encode start time set. // Verify a single frame works with encode start time set.
encode_timer.OnEncodeStarted(static_cast<uint32_t>(timestamp * 90), VideoFrame frame = VideoFrame::Builder()
timestamp); .set_timestamp_ms(timestamp)
encode_timer.FillTimingInfo(0, &image, timestamp); .set_timestamp_rtp(timestamp * 90)
.set_video_frame_buffer(kFrameBuffer)
.build();
encode_timer.OnEncodeStarted(frame);
encode_timer.FillTimingInfo(0, &image);
EXPECT_TRUE(IsTimingFrame(image)); EXPECT_TRUE(IsTimingFrame(image));
// New frame, now skip OnEncodeStarted. Should not result in timing frame. // New frame, now skip OnEncodeStarted. Should not result in timing frame.
image.capture_time_ms_ = ++timestamp; image.capture_time_ms_ = ++timestamp;
image.SetTimestamp(static_cast<uint32_t>(timestamp * 90)); image.SetTimestamp(static_cast<uint32_t>(timestamp * 90));
image.timing_ = EncodedImage::Timing(); image.timing_ = EncodedImage::Timing();
encode_timer.FillTimingInfo(0, &image, timestamp); encode_timer.FillTimingInfo(0, &image);
EXPECT_FALSE(IsTimingFrame(image)); EXPECT_FALSE(IsTimingFrame(image));
} }
@ -226,7 +241,7 @@ TEST(FrameEncodeTimerTest, AdjustsCaptureTimeForInternalSourceEncoder) {
image.SetTimestamp(static_cast<uint32_t>(timestamp * 90)); image.SetTimestamp(static_cast<uint32_t>(timestamp * 90));
FakeEncodedImageCallback sink; FakeEncodedImageCallback sink;
FrameEncodeTimer encode_timer(&sink); FrameEncodeMetadataWriter encode_timer(&sink);
VideoCodec codec_settings; VideoCodec codec_settings;
// Make all frames timing frames. // Make all frames timing frames.
@ -238,7 +253,7 @@ TEST(FrameEncodeTimerTest, AdjustsCaptureTimeForInternalSourceEncoder) {
encode_timer.OnSetRates(bitrate_allocation, 30); encode_timer.OnSetRates(bitrate_allocation, 30);
// Verify a single frame without encode timestamps isn't a timing frame. // Verify a single frame without encode timestamps isn't a timing frame.
encode_timer.FillTimingInfo(0, &image, timestamp); encode_timer.FillTimingInfo(0, &image);
EXPECT_FALSE(IsTimingFrame(image)); EXPECT_FALSE(IsTimingFrame(image));
// New frame, but this time with encode timestamps set in timing_. // New frame, but this time with encode timestamps set in timing_.
@ -248,14 +263,14 @@ TEST(FrameEncodeTimerTest, AdjustsCaptureTimeForInternalSourceEncoder) {
image.timing_ = EncodedImage::Timing(); image.timing_ = EncodedImage::Timing();
image.timing_.encode_start_ms = timestamp + kEncodeStartDelayMs; image.timing_.encode_start_ms = timestamp + kEncodeStartDelayMs;
image.timing_.encode_finish_ms = timestamp + kEncodeFinishDelayMs; image.timing_.encode_finish_ms = timestamp + kEncodeFinishDelayMs;
const int64_t kEncodeDoneTimestamp = 1234567;
encode_timer.FillTimingInfo(0, &image, kEncodeDoneTimestamp); encode_timer.FillTimingInfo(0, &image);
EXPECT_TRUE(IsTimingFrame(image)); EXPECT_TRUE(IsTimingFrame(image));
// Frame is captured kEncodeFinishDelayMs before it's encoded, so restored // Frame is captured kEncodeFinishDelayMs before it's encoded, so restored
// capture timestamp should be kEncodeFinishDelayMs in the past. // capture timestamp should be kEncodeFinishDelayMs in the past.
EXPECT_EQ(image.capture_time_ms_, EXPECT_NEAR(image.capture_time_ms_, rtc::TimeMillis() - kEncodeFinishDelayMs,
kEncodeDoneTimestamp - kEncodeFinishDelayMs); 1);
} }
TEST(FrameEncodeTimerTest, NotifiesAboutDroppedFrames) { TEST(FrameEncodeTimerTest, NotifiesAboutDroppedFrames) {
@ -265,7 +280,7 @@ TEST(FrameEncodeTimerTest, NotifiesAboutDroppedFrames) {
const int64_t kTimestampMs4 = 47721870; const int64_t kTimestampMs4 = 47721870;
FakeEncodedImageCallback sink; FakeEncodedImageCallback sink;
FrameEncodeTimer encode_timer(&sink); FrameEncodeMetadataWriter encode_timer(&sink);
encode_timer.OnEncoderInit(VideoCodec(), false); encode_timer.OnEncoderInit(VideoCodec(), false);
// Any non-zero bitrate needed to be set before the first frame. // Any non-zero bitrate needed to be set before the first frame.
VideoBitrateAllocation bitrate_allocation; VideoBitrateAllocation bitrate_allocation;
@ -273,17 +288,27 @@ TEST(FrameEncodeTimerTest, NotifiesAboutDroppedFrames) {
encode_timer.OnSetRates(bitrate_allocation, 30); encode_timer.OnSetRates(bitrate_allocation, 30);
EncodedImage image; EncodedImage image;
VideoFrame frame = VideoFrame::Builder()
.set_timestamp_rtp(kTimestampMs1 * 90)
.set_timestamp_ms(kTimestampMs1)
.set_video_frame_buffer(kFrameBuffer)
.build();
image.capture_time_ms_ = kTimestampMs1; image.capture_time_ms_ = kTimestampMs1;
image.SetTimestamp(static_cast<uint32_t>(image.capture_time_ms_ * 90)); image.SetTimestamp(static_cast<uint32_t>(image.capture_time_ms_ * 90));
encode_timer.OnEncodeStarted(image.Timestamp(), image.capture_time_ms_); frame.set_timestamp(image.capture_time_ms_ * 90);
frame.set_timestamp_us(image.capture_time_ms_ * 1000);
encode_timer.OnEncodeStarted(frame);
EXPECT_EQ(0u, sink.GetNumFramesDropped()); EXPECT_EQ(0u, sink.GetNumFramesDropped());
encode_timer.FillTimingInfo(0, &image, kTimestampMs1); encode_timer.FillTimingInfo(0, &image);
image.capture_time_ms_ = kTimestampMs2; image.capture_time_ms_ = kTimestampMs2;
image.SetTimestamp(static_cast<uint32_t>(image.capture_time_ms_ * 90)); image.SetTimestamp(static_cast<uint32_t>(image.capture_time_ms_ * 90));
image.timing_ = EncodedImage::Timing(); image.timing_ = EncodedImage::Timing();
encode_timer.OnEncodeStarted(image.Timestamp(), image.capture_time_ms_); frame.set_timestamp(image.capture_time_ms_ * 90);
frame.set_timestamp_us(image.capture_time_ms_ * 1000);
encode_timer.OnEncodeStarted(frame);
// No OnEncodedImageCall for timestamp2. Yet, at this moment it's not known // No OnEncodedImageCall for timestamp2. Yet, at this moment it's not known
// that frame with timestamp2 was dropped. // that frame with timestamp2 was dropped.
EXPECT_EQ(0u, sink.GetNumFramesDropped()); EXPECT_EQ(0u, sink.GetNumFramesDropped());
@ -291,15 +316,19 @@ TEST(FrameEncodeTimerTest, NotifiesAboutDroppedFrames) {
image.capture_time_ms_ = kTimestampMs3; image.capture_time_ms_ = kTimestampMs3;
image.SetTimestamp(static_cast<uint32_t>(image.capture_time_ms_ * 90)); image.SetTimestamp(static_cast<uint32_t>(image.capture_time_ms_ * 90));
image.timing_ = EncodedImage::Timing(); image.timing_ = EncodedImage::Timing();
encode_timer.OnEncodeStarted(image.Timestamp(), image.capture_time_ms_); frame.set_timestamp(image.capture_time_ms_ * 90);
encode_timer.FillTimingInfo(0, &image, kTimestampMs3); frame.set_timestamp_us(image.capture_time_ms_ * 1000);
encode_timer.OnEncodeStarted(frame);
encode_timer.FillTimingInfo(0, &image);
EXPECT_EQ(1u, sink.GetNumFramesDropped()); EXPECT_EQ(1u, sink.GetNumFramesDropped());
image.capture_time_ms_ = kTimestampMs4; image.capture_time_ms_ = kTimestampMs4;
image.SetTimestamp(static_cast<uint32_t>(image.capture_time_ms_ * 90)); image.SetTimestamp(static_cast<uint32_t>(image.capture_time_ms_ * 90));
image.timing_ = EncodedImage::Timing(); image.timing_ = EncodedImage::Timing();
encode_timer.OnEncodeStarted(image.Timestamp(), image.capture_time_ms_); frame.set_timestamp(image.capture_time_ms_ * 90);
encode_timer.FillTimingInfo(0, &image, kTimestampMs4); frame.set_timestamp_us(image.capture_time_ms_ * 1000);
encode_timer.OnEncodeStarted(frame);
encode_timer.FillTimingInfo(0, &image);
EXPECT_EQ(1u, sink.GetNumFramesDropped()); EXPECT_EQ(1u, sink.GetNumFramesDropped());
} }
@ -308,7 +337,7 @@ TEST(FrameEncodeTimerTest, RestoresCaptureTimestamps) {
const int64_t kTimestampMs = 123456; const int64_t kTimestampMs = 123456;
FakeEncodedImageCallback sink; FakeEncodedImageCallback sink;
FrameEncodeTimer encode_timer(&sink); FrameEncodeMetadataWriter encode_timer(&sink);
encode_timer.OnEncoderInit(VideoCodec(), false); encode_timer.OnEncoderInit(VideoCodec(), false);
// Any non-zero bitrate needed to be set before the first frame. // Any non-zero bitrate needed to be set before the first frame.
VideoBitrateAllocation bitrate_allocation; VideoBitrateAllocation bitrate_allocation;
@ -317,11 +346,93 @@ TEST(FrameEncodeTimerTest, RestoresCaptureTimestamps) {
image.capture_time_ms_ = kTimestampMs; // Correct timestamp. image.capture_time_ms_ = kTimestampMs; // Correct timestamp.
image.SetTimestamp(static_cast<uint32_t>(image.capture_time_ms_ * 90)); image.SetTimestamp(static_cast<uint32_t>(image.capture_time_ms_ * 90));
encode_timer.OnEncodeStarted(image.Timestamp(), image.capture_time_ms_); VideoFrame frame = VideoFrame::Builder()
.set_timestamp_ms(image.capture_time_ms_)
.set_timestamp_rtp(image.capture_time_ms_ * 90)
.set_video_frame_buffer(kFrameBuffer)
.build();
encode_timer.OnEncodeStarted(frame);
image.capture_time_ms_ = 0; // Incorrect timestamp. image.capture_time_ms_ = 0; // Incorrect timestamp.
encode_timer.FillTimingInfo(0, &image, kTimestampMs); encode_timer.FillTimingInfo(0, &image);
EXPECT_EQ(kTimestampMs, image.capture_time_ms_); EXPECT_EQ(kTimestampMs, image.capture_time_ms_);
} }
TEST(FrameEncodeTimerTest, CopiesRotation) {
EncodedImage image;
const int64_t kTimestampMs = 123456;
FakeEncodedImageCallback sink;
FrameEncodeMetadataWriter encode_timer(&sink);
encode_timer.OnEncoderInit(VideoCodec(), false);
// Any non-zero bitrate needed to be set before the first frame.
VideoBitrateAllocation bitrate_allocation;
bitrate_allocation.SetBitrate(0, 0, 500000);
encode_timer.OnSetRates(bitrate_allocation, 30);
image.SetTimestamp(static_cast<uint32_t>(kTimestampMs * 90));
VideoFrame frame = VideoFrame::Builder()
.set_timestamp_ms(kTimestampMs)
.set_timestamp_rtp(kTimestampMs * 90)
.set_rotation(kVideoRotation_180)
.set_video_frame_buffer(kFrameBuffer)
.build();
encode_timer.OnEncodeStarted(frame);
encode_timer.FillTimingInfo(0, &image);
EXPECT_EQ(kVideoRotation_180, image.rotation_);
}
TEST(FrameEncodeTimerTest, SetsContentType) {
EncodedImage image;
const int64_t kTimestampMs = 123456;
FakeEncodedImageCallback sink;
FrameEncodeMetadataWriter encode_timer(&sink);
VideoCodec codec;
codec.mode = VideoCodecMode::kScreensharing;
encode_timer.OnEncoderInit(codec, false);
// Any non-zero bitrate needed to be set before the first frame.
VideoBitrateAllocation bitrate_allocation;
bitrate_allocation.SetBitrate(0, 0, 500000);
encode_timer.OnSetRates(bitrate_allocation, 30);
image.SetTimestamp(static_cast<uint32_t>(kTimestampMs * 90));
VideoFrame frame = VideoFrame::Builder()
.set_timestamp_ms(kTimestampMs)
.set_timestamp_rtp(kTimestampMs * 90)
.set_rotation(kVideoRotation_180)
.set_video_frame_buffer(kFrameBuffer)
.build();
encode_timer.OnEncodeStarted(frame);
encode_timer.FillTimingInfo(0, &image);
EXPECT_EQ(VideoContentType::SCREENSHARE, image.content_type_);
}
TEST(FrameEncodeTimerTest, CopiesColorSpace) {
EncodedImage image;
const int64_t kTimestampMs = 123456;
FakeEncodedImageCallback sink;
FrameEncodeMetadataWriter encode_timer(&sink);
encode_timer.OnEncoderInit(VideoCodec(), false);
// Any non-zero bitrate needed to be set before the first frame.
VideoBitrateAllocation bitrate_allocation;
bitrate_allocation.SetBitrate(0, 0, 500000);
encode_timer.OnSetRates(bitrate_allocation, 30);
webrtc::ColorSpace color_space =
CreateTestColorSpace(/*with_hdr_metadata=*/true);
image.SetTimestamp(static_cast<uint32_t>(kTimestampMs * 90));
VideoFrame frame = VideoFrame::Builder()
.set_timestamp_ms(kTimestampMs)
.set_timestamp_rtp(kTimestampMs * 90)
.set_color_space(color_space)
.set_video_frame_buffer(kFrameBuffer)
.build();
encode_timer.OnEncodeStarted(frame);
encode_timer.FillTimingInfo(0, &image);
ASSERT_NE(image.ColorSpace(), nullptr);
EXPECT_EQ(color_space, *image.ColorSpace());
}
} // namespace test } // namespace test
} // namespace webrtc } // namespace webrtc

View file

@ -14,9 +14,12 @@
#include "test/gmock.h" #include "test/gmock.h"
#include "test/gtest.h" #include "test/gtest.h"
#include "absl/memory/memory.h"
#include "api/task_queue/default_task_queue_factory.h" #include "api/task_queue/default_task_queue_factory.h"
#include "api/test/video/function_video_decoder_factory.h"
#include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_decoder.h"
#include "call/rtp_stream_receiver_controller.h" #include "call/rtp_stream_receiver_controller.h"
#include "common_video/test/utilities.h"
#include "media/base/fake_video_renderer.h" #include "media/base/fake_video_renderer.h"
#include "modules/pacing/packet_router.h" #include "modules/pacing/packet_router.h"
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
@ -24,6 +27,7 @@
#include "rtc_base/critical_section.h" #include "rtc_base/critical_section.h"
#include "rtc_base/event.h" #include "rtc_base/event.h"
#include "system_wrappers/include/clock.h" #include "system_wrappers/include/clock.h"
#include "test/fake_decoder.h"
#include "test/field_trial.h" #include "test/field_trial.h"
#include "test/video_decoder_proxy_factory.h" #include "test/video_decoder_proxy_factory.h"
#include "video/call_stats.h" #include "video/call_stats.h"
@ -62,6 +66,12 @@ class MockVideoDecoder : public VideoDecoder {
class FrameObjectFake : public video_coding::EncodedFrame { class FrameObjectFake : public video_coding::EncodedFrame {
public: public:
void SetPayloadType(uint8_t payload_type) { _payloadType = payload_type; }
void SetRotation(const VideoRotation& rotation) { rotation_ = rotation; }
void SetNtpTime(int64_t ntp_time_ms) { ntp_time_ms_ = ntp_time_ms; }
int64_t ReceivedTime() const override { return 0; } int64_t ReceivedTime() const override { return 0; }
int64_t RenderTime() const override { return _renderTimeMs; } int64_t RenderTime() const override { return _renderTimeMs; }
@ -100,10 +110,11 @@ class VideoReceiveStreamTest : public ::testing::Test {
Clock* clock = Clock::GetRealTimeClock(); Clock* clock = Clock::GetRealTimeClock();
timing_ = new VCMTiming(clock); timing_ = new VCMTiming(clock);
video_receive_stream_.reset(new webrtc::internal::VideoReceiveStream( video_receive_stream_ =
task_queue_factory_.get(), &rtp_stream_receiver_controller_, absl::make_unique<webrtc::internal::VideoReceiveStream>(
kDefaultNumCpuCores, &packet_router_, config_.Copy(), task_queue_factory_.get(), &rtp_stream_receiver_controller_,
process_thread_.get(), &call_stats_, clock, timing_)); kDefaultNumCpuCores, &packet_router_, config_.Copy(),
process_thread_.get(), &call_stats_, clock, timing_);
} }
protected: protected:
@ -212,4 +223,90 @@ TEST_F(VideoReceiveStreamTest, PlayoutDelayPreservesDefaultMinValue) {
EXPECT_EQ(default_min_playout_latency, timing_->min_playout_delay()); EXPECT_EQ(default_min_playout_latency, timing_->min_playout_delay());
} }
class VideoReceiveStreamTestWithFakeDecoder : public ::testing::Test {
public:
VideoReceiveStreamTestWithFakeDecoder()
: fake_decoder_factory_(
[]() { return absl::make_unique<test::FakeDecoder>(); }),
process_thread_(ProcessThread::Create("TestThread")),
task_queue_factory_(CreateDefaultTaskQueueFactory()),
config_(&mock_transport_),
call_stats_(Clock::GetRealTimeClock(), process_thread_.get()) {}
void SetUp() {
constexpr int kDefaultNumCpuCores = 2;
config_.rtp.remote_ssrc = 1111;
config_.rtp.local_ssrc = 2222;
config_.renderer = &fake_renderer_;
VideoReceiveStream::Decoder fake_decoder;
fake_decoder.payload_type = 99;
fake_decoder.video_format = SdpVideoFormat("VP8");
fake_decoder.decoder_factory = &fake_decoder_factory_;
config_.decoders.push_back(fake_decoder);
Clock* clock = Clock::GetRealTimeClock();
timing_ = new VCMTiming(clock);
video_receive_stream_.reset(new webrtc::internal::VideoReceiveStream(
task_queue_factory_.get(), &rtp_stream_receiver_controller_,
kDefaultNumCpuCores, &packet_router_, config_.Copy(),
process_thread_.get(), &call_stats_, clock, timing_));
}
protected:
test::FunctionVideoDecoderFactory fake_decoder_factory_;
std::unique_ptr<ProcessThread> process_thread_;
const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
VideoReceiveStream::Config config_;
CallStats call_stats_;
cricket::FakeVideoRenderer fake_renderer_;
MockTransport mock_transport_;
PacketRouter packet_router_;
RtpStreamReceiverController rtp_stream_receiver_controller_;
std::unique_ptr<webrtc::internal::VideoReceiveStream> video_receive_stream_;
VCMTiming* timing_;
};
TEST_F(VideoReceiveStreamTestWithFakeDecoder, PassesNtpTime) {
const int64_t kNtpTimestamp = 12345;
auto test_frame = absl::make_unique<FrameObjectFake>();
test_frame->SetPayloadType(99);
test_frame->id.picture_id = 0;
test_frame->SetNtpTime(kNtpTimestamp);
video_receive_stream_->Start();
video_receive_stream_->OnCompleteFrame(std::move(test_frame));
EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs));
EXPECT_EQ(kNtpTimestamp, fake_renderer_.ntp_time_ms());
}
TEST_F(VideoReceiveStreamTestWithFakeDecoder, PassesRotation) {
const webrtc::VideoRotation kRotation = webrtc::kVideoRotation_180;
auto test_frame = absl::make_unique<FrameObjectFake>();
test_frame->SetPayloadType(99);
test_frame->id.picture_id = 0;
test_frame->SetRotation(kRotation);
video_receive_stream_->Start();
video_receive_stream_->OnCompleteFrame(std::move(test_frame));
EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs));
EXPECT_EQ(kRotation, fake_renderer_.rotation());
}
TEST_F(VideoReceiveStreamTestWithFakeDecoder, PassesColorSpace) {
auto test_frame = absl::make_unique<FrameObjectFake>();
test_frame->SetPayloadType(99);
test_frame->id.picture_id = 0;
webrtc::ColorSpace color_space =
CreateTestColorSpace(/*with_hdr_metadata=*/true);
test_frame->SetColorSpace(color_space);
video_receive_stream_->Start();
video_receive_stream_->OnCompleteFrame(std::move(test_frame));
EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs));
ASSERT_TRUE(fake_renderer_.color_space().has_value());
EXPECT_EQ(color_space, *fake_renderer_.color_space());
}
} // namespace webrtc } // namespace webrtc

View file

@ -490,7 +490,7 @@ VideoStreamEncoder::VideoStreamEncoder(
input_framerate_(kFrameRateAvergingWindowSizeMs, 1000), input_framerate_(kFrameRateAvergingWindowSizeMs, 1000),
pending_frame_drops_(0), pending_frame_drops_(0),
next_frame_types_(1, VideoFrameType::kVideoFrameDelta), next_frame_types_(1, VideoFrameType::kVideoFrameDelta),
frame_encoder_timer_(this), frame_encode_metadata_writer_(this),
experiment_groups_(GetExperimentGroups()), experiment_groups_(GetExperimentGroups()),
next_frame_id_(0), next_frame_id_(0),
encoder_queue_(task_queue_factory->CreateTaskQueue( encoder_queue_(task_queue_factory->CreateTaskQueue(
@ -745,10 +745,11 @@ void VideoStreamEncoder::ReconfigureEncoder() {
} else { } else {
encoder_initialized_ = true; encoder_initialized_ = true;
encoder_->RegisterEncodeCompleteCallback(this); encoder_->RegisterEncodeCompleteCallback(this);
frame_encoder_timer_.OnEncoderInit(send_codec_, HasInternalSource()); frame_encode_metadata_writer_.OnEncoderInit(send_codec_,
HasInternalSource());
} }
frame_encoder_timer_.Reset(); frame_encode_metadata_writer_.Reset();
last_encode_info_ms_ = absl::nullopt; last_encode_info_ms_ = absl::nullopt;
} }
@ -1074,7 +1075,7 @@ void VideoStreamEncoder::SetEncoderRates(
if (settings_changes) { if (settings_changes) {
encoder_->SetRates(rate_settings); encoder_->SetRates(rate_settings);
frame_encoder_timer_.OnSetRates( frame_encode_metadata_writer_.OnSetRates(
rate_settings.bitrate, rate_settings.bitrate,
static_cast<uint32_t>(rate_settings.framerate_fps + 0.5)); static_cast<uint32_t>(rate_settings.framerate_fps + 0.5));
} }
@ -1334,8 +1335,7 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame,
TRACE_EVENT1("webrtc", "VCMGenericEncoder::Encode", "timestamp", TRACE_EVENT1("webrtc", "VCMGenericEncoder::Encode", "timestamp",
out_frame.timestamp()); out_frame.timestamp());
frame_encoder_timer_.OnEncodeStarted(out_frame.timestamp(), frame_encode_metadata_writer_.OnEncodeStarted(out_frame);
out_frame.render_time_ms());
const int32_t encode_status = encoder_->Encode(out_frame, &next_frame_types_); const int32_t encode_status = encoder_->Encode(out_frame, &next_frame_types_);
@ -1405,9 +1405,7 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage(
const size_t spatial_idx = encoded_image.SpatialIndex().value_or(0); const size_t spatial_idx = encoded_image.SpatialIndex().value_or(0);
EncodedImage image_copy(encoded_image); EncodedImage image_copy(encoded_image);
frame_encoder_timer_.FillTimingInfo( frame_encode_metadata_writer_.FillTimingInfo(spatial_idx, &image_copy);
spatial_idx, &image_copy,
rtc::TimeMicros() / rtc::kNumMicrosecsPerMillisec);
// Piggyback ALR experiment group id and simulcast id into the content type. // Piggyback ALR experiment group id and simulcast id into the content type.
const uint8_t experiment_id = const uint8_t experiment_id =

View file

@ -37,7 +37,7 @@
#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/task_queue.h" #include "rtc_base/task_queue.h"
#include "video/encoder_bitrate_adjuster.h" #include "video/encoder_bitrate_adjuster.h"
#include "video/frame_encode_timer.h" #include "video/frame_encode_metadata_writer.h"
#include "video/overuse_frame_detector.h" #include "video/overuse_frame_detector.h"
namespace webrtc { namespace webrtc {
@ -345,7 +345,7 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
// turn this into a simple bool |pending_keyframe_request_|. // turn this into a simple bool |pending_keyframe_request_|.
std::vector<VideoFrameType> next_frame_types_ RTC_GUARDED_BY(&encoder_queue_); std::vector<VideoFrameType> next_frame_types_ RTC_GUARDED_BY(&encoder_queue_);
FrameEncodeTimer frame_encoder_timer_; FrameEncodeMetadataWriter frame_encode_metadata_writer_;
// Experiment groups parsed from field trials for realtime video ([0]) and // Experiment groups parsed from field trials for realtime video ([0]) and
// screenshare ([1]). 0 means no group specified. Positive values are // screenshare ([1]). 0 means no group specified. Positive values are