Revert "Add Profile 2 configuration to VP9 Encoder and Decoder"

This reverts commit fc9c4e88b5.

Reason for revert: Speculative revert. I suspect this breaks the internal importing tests. Will reland it if it is not the culprit.

Original change's description:
> Add Profile 2 configuration to VP9 Encoder and Decoder
> 
> Bug: webrtc:9376
> Change-Id: I4f627fb2b6c146a90cfcaa815da459b09dc00003
> Reviewed-on: https://webrtc-review.googlesource.com/81980
> Commit-Queue: Emircan Uysaler <emircan@webrtc.org>
> Reviewed-by: Niklas Enbom <niklas.enbom@webrtc.org>
> Reviewed-by: Erik Språng <sprang@webrtc.org>
> Reviewed-by: Jerome Jiang <jianj@google.com>
> Cr-Commit-Position: refs/heads/master@{#23917}

TBR=niklase@google.com,jianj@google.com,sprang@webrtc.org,marpan@google.com,niklas.enbom@webrtc.org,emircan@webrtc.org

Change-Id: I6a8c851827707eb861776591087e595de7206ae4
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: webrtc:9376
Reviewed-on: https://webrtc-review.googlesource.com/88100
Reviewed-by: Qingsi Wang <qingsi@webrtc.org>
Commit-Queue: Qingsi Wang <qingsi@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23920}
This commit is contained in:
Qingsi Wang 2018-07-11 06:04:44 +00:00 committed by Commit Bot
parent 1a2cc0acba
commit 2d82adea03
13 changed files with 33 additions and 207 deletions

View file

@ -57,7 +57,6 @@ class VideoQualityTestFixtureInterface {
bool automatic_scaling;
std::string clip_name; // "Generator" to generate frames instead.
size_t capture_device_index;
SdpVideoFormat::Parameters sdp_params;
} video[2];
struct Audio {
bool enabled;

View file

@ -44,7 +44,7 @@ std::unique_ptr<VideoEncoder> InternalEncoderFactory::CreateVideoEncoder(
if (cricket::CodecNamesEq(format.name, cricket::kVp8CodecName))
return VP8Encoder::Create();
if (cricket::CodecNamesEq(format.name, cricket::kVp9CodecName))
return VP9Encoder::Create(cricket::VideoCodec(format));
return VP9Encoder::Create();
if (cricket::CodecNamesEq(format.name, cricket::kH264CodecName))
return H264Encoder::Create(cricket::VideoCodec(format));
RTC_LOG(LS_ERROR) << "Trying to created encoder of unsupported format "

View file

@ -479,7 +479,6 @@ rtc_static_library("webrtc_vp9") {
":webrtc_vp9_helpers",
"..:module_api",
"../..:webrtc_common",
"../../api/video:video_frame_i010",
"../../api/video_codecs:video_codecs_api",
"../../common_video",
"../../media:rtc_media_base",
@ -725,7 +724,6 @@ if (rtc_include_tests) {
"../../media:rtc_h264_profile_id",
"../../media:rtc_internal_video_codecs",
"../../media:rtc_media_base",
"../../media:rtc_vp9_profile",
"../../rtc_base:rtc_base",
"../../test:fileutils",
"../../test:test_common",
@ -734,7 +732,6 @@ if (rtc_include_tests) {
"../rtp_rtcp:rtp_rtcp_format",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
"//third_party/libyuv",
]
data = video_coding_modules_tests_resources

View file

@ -69,7 +69,8 @@ void VideoCodecUnitTest::SetUp() {
input_frame_generator_ = test::FrameGenerator::CreateSquareGenerator(
codec_settings_.width, codec_settings_.height,
test::FrameGenerator::OutputType::I420, absl::optional<int>());
absl::optional<test::FrameGenerator::OutputType>(),
absl::optional<int>());
encoder_ = CreateEncoder();
decoder_ = CreateDecoder();

View file

@ -105,7 +105,6 @@ class VideoCodecUnitTest : public ::testing::Test {
std::unique_ptr<VideoEncoder> encoder_;
std::unique_ptr<VideoDecoder> decoder_;
std::unique_ptr<test::FrameGenerator> input_frame_generator_;
private:
FakeEncodeCompleteCallback encode_complete_callback_;
@ -125,6 +124,7 @@ class VideoCodecUnitTest : public ::testing::Test {
RTC_GUARDED_BY(decoded_frame_section_);
absl::optional<uint8_t> decoded_qp_ RTC_GUARDED_BY(decoded_frame_section_);
std::unique_ptr<test::FrameGenerator> input_frame_generator_;
uint32_t last_input_frame_timestamp_;
};

View file

@ -10,13 +10,11 @@
#include "api/video/i420_buffer.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "media/base/vp9_profile.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/video_coding/codecs/test/video_codec_unittest.h"
#include "modules/video_coding/codecs/vp9/include/vp9.h"
#include "modules/video_coding/codecs/vp9/svc_config.h"
#include "test/video_codec_settings.h"
#include "third_party/libyuv/include/libyuv/convert.h"
namespace webrtc {
@ -462,54 +460,4 @@ TEST_F(TestVp9ImplFrameDropping, PreEncodeFrameDropping) {
max_abs_framerate_error_fps);
}
class TestVp9ImplProfile2 : public TestVp9Impl {
protected:
void SetUp() override {
TestVp9Impl::SetUp();
input_frame_generator_ = test::FrameGenerator::CreateSquareGenerator(
codec_settings_.width, codec_settings_.height,
test::FrameGenerator::OutputType::I010, absl::optional<int>());
}
std::unique_ptr<VideoEncoder> CreateEncoder() override {
cricket::VideoCodec profile2_codec;
profile2_codec.SetParam(kVP9FmtpProfileId,
VP9ProfileToString(VP9Profile::kProfile2));
return VP9Encoder::Create(profile2_codec);
}
std::unique_ptr<VideoDecoder> CreateDecoder() override {
return VP9Decoder::Create();
}
};
// Disabled until https://bugs.chromium.org/p/webm/issues/detail?id=1544 is
// solved.
#if defined(WEBRTC_ANDROID)
#define MAYBE_EncodeDecode DISABLED_EncodeDecode
#else
#define MAYBE_EncodeDecode EncodeDecode
#endif
TEST_F(TestVp9ImplProfile2, MAYBE_EncodeDecode) {
VideoFrame* input_frame = NextInputFrame();
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->Encode(*input_frame, nullptr, nullptr));
EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
// First frame should be a key frame.
encoded_frame._frameType = kVideoFrameKey;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
decoder_->Decode(encoded_frame, false, nullptr, 0));
std::unique_ptr<VideoFrame> decoded_frame;
absl::optional<uint8_t> decoded_qp;
ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
ASSERT_TRUE(decoded_frame);
// TODO(emircan): Add PSNR for different color depths.
EXPECT_GT(I420PSNR(*input_frame->video_frame_buffer()->ToI420(),
*decoded_frame->video_frame_buffer()->ToI420()),
31);
}
} // namespace webrtc

View file

@ -21,7 +21,6 @@
#include "vpx/vpx_encoder.h"
#include "absl/memory/memory.h"
#include "api/video/i010_buffer.h"
#include "common_video/include/video_frame_buffer.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
@ -101,6 +100,7 @@ VP9EncoderImpl::VP9EncoderImpl(const cricket::VideoCodec& codec)
is_flexible_mode_(false) {
memset(&codec_, 0, sizeof(codec_));
memset(&svc_params_, 0, sizeof(vpx_svc_extra_cfg_t));
RTC_DCHECK(VP9Profile::kProfile0 == profile_);
}
VP9EncoderImpl::~VP9EncoderImpl() {
@ -316,37 +316,15 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
CalcBufferSize(VideoType::kI420, codec_.width, codec_.height);
encoded_image_._buffer = new uint8_t[encoded_image_._size];
encoded_image_._completeFrame = true;
// Creating a wrapper to the image - setting image data to nullptr. Actual
// pointer will be set in encode. Setting align to 1, as it is meaningless
// (actual memory is not allocated).
raw_ = vpx_img_wrap(nullptr, VPX_IMG_FMT_I420, codec_.width, codec_.height, 1,
nullptr);
// Populate encoder configuration with default values.
if (vpx_codec_enc_config_default(vpx_codec_vp9_cx(), config_, 0)) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
vpx_img_fmt img_fmt = VPX_IMG_FMT_NONE;
unsigned int bits_for_storage = 8;
switch (profile_) {
case VP9Profile::kProfile0:
img_fmt = VPX_IMG_FMT_I420;
bits_for_storage = 8;
config_->g_bit_depth = VPX_BITS_8;
config_->g_profile = 0;
config_->g_input_bit_depth = 8;
break;
case VP9Profile::kProfile2:
img_fmt = VPX_IMG_FMT_I42016;
bits_for_storage = 16;
config_->g_bit_depth = VPX_BITS_10;
config_->g_profile = 2;
config_->g_input_bit_depth = 10;
break;
}
// Creating a wrapper to the image - setting image data to nullptr. Actual
// pointer will be set in encode. Setting align to 1, as it is meaningless
// (actual memory is not allocated).
raw_ =
vpx_img_wrap(nullptr, img_fmt, codec_.width, codec_.height, 1, nullptr);
raw_->bit_depth = bits_for_storage;
config_->g_w = codec_.width;
config_->g_h = codec_.height;
config_->rc_target_bitrate = inst->startBitrate; // in kbit/s
@ -503,11 +481,7 @@ int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
const vpx_codec_err_t rv = vpx_codec_enc_init(
encoder_, vpx_codec_vp9_cx(), config_,
config_->g_bit_depth == VPX_BITS_8 ? 0 : VPX_CODEC_USE_HIGHBITDEPTH);
if (rv != VPX_CODEC_OK) {
RTC_LOG(LS_ERROR) << "Init error: " << vpx_codec_err_to_string(rv);
if (vpx_codec_enc_init(encoder_, vpx_codec_vp9_cx(), config_, 0)) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
vpx_codec_control(encoder_, VP8E_SET_CPUUSED, cpu_speed_);
@ -632,47 +606,16 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image,
// doing this.
input_image_ = &input_image;
// Keep reference to buffer until encode completes.
rtc::scoped_refptr<I420BufferInterface> i420_buffer;
rtc::scoped_refptr<I010BufferInterface> i010_buffer;
switch (profile_) {
case VP9Profile::kProfile0: {
i420_buffer = input_image.video_frame_buffer()->ToI420();
// Image in vpx_image_t format.
// Input image is const. VPX's raw image is not defined as const.
raw_->planes[VPX_PLANE_Y] = const_cast<uint8_t*>(i420_buffer->DataY());
raw_->planes[VPX_PLANE_U] = const_cast<uint8_t*>(i420_buffer->DataU());
raw_->planes[VPX_PLANE_V] = const_cast<uint8_t*>(i420_buffer->DataV());
raw_->stride[VPX_PLANE_Y] = i420_buffer->StrideY();
raw_->stride[VPX_PLANE_U] = i420_buffer->StrideU();
raw_->stride[VPX_PLANE_V] = i420_buffer->StrideV();
break;
}
case VP9Profile::kProfile2: {
// We can inject kI010 frames directly for encode. All other formats
// should be converted to it.
switch (input_image.video_frame_buffer()->type()) {
case VideoFrameBuffer::Type::kI010: {
i010_buffer = input_image.video_frame_buffer()->GetI010();
break;
}
default: {
i010_buffer =
I010Buffer::Copy(*input_image.video_frame_buffer()->ToI420());
}
}
raw_->planes[VPX_PLANE_Y] = const_cast<uint8_t*>(
reinterpret_cast<const uint8_t*>(i010_buffer->DataY()));
raw_->planes[VPX_PLANE_U] = const_cast<uint8_t*>(
reinterpret_cast<const uint8_t*>(i010_buffer->DataU()));
raw_->planes[VPX_PLANE_V] = const_cast<uint8_t*>(
reinterpret_cast<const uint8_t*>(i010_buffer->DataV()));
raw_->stride[VPX_PLANE_Y] = i010_buffer->StrideY() * 2;
raw_->stride[VPX_PLANE_U] = i010_buffer->StrideU() * 2;
raw_->stride[VPX_PLANE_V] = i010_buffer->StrideV() * 2;
break;
}
}
rtc::scoped_refptr<I420BufferInterface> i420_buffer =
input_image.video_frame_buffer()->ToI420();
// Image in vpx_image_t format.
// Input image is const. VPX's raw image is not defined as const.
raw_->planes[VPX_PLANE_Y] = const_cast<uint8_t*>(i420_buffer->DataY());
raw_->planes[VPX_PLANE_U] = const_cast<uint8_t*>(i420_buffer->DataU());
raw_->planes[VPX_PLANE_V] = const_cast<uint8_t*>(i420_buffer->DataV());
raw_->stride[VPX_PLANE_Y] = i420_buffer->StrideY();
raw_->stride[VPX_PLANE_U] = i420_buffer->StrideU();
raw_->stride[VPX_PLANE_V] = i420_buffer->StrideV();
vpx_enc_frame_flags_t flags = 0;
if (force_key_frame_) {
@ -682,13 +625,8 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image,
RTC_CHECK_GT(codec_.maxFramerate, 0);
uint32_t duration =
90000 / target_framerate_fps_.value_or(codec_.maxFramerate);
const vpx_codec_err_t rv = vpx_codec_encode(encoder_, raw_, timestamp_,
duration, flags, VPX_DL_REALTIME);
if (rv != VPX_CODEC_OK) {
RTC_LOG(LS_ERROR) << "Encoding error: " << vpx_codec_err_to_string(rv)
<< "\n"
<< "Details: " << vpx_codec_error(encoder_) << "\n"
<< vpx_codec_error_detail(encoder_);
if (vpx_codec_encode(encoder_, raw_, timestamp_, duration, flags,
VPX_DL_REALTIME)) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
timestamp_ += duration;
@ -1149,13 +1087,10 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img,
// vpx_codec_decode calls or vpx_codec_destroy).
Vp9FrameBufferPool::Vp9FrameBuffer* img_buffer =
static_cast<Vp9FrameBufferPool::Vp9FrameBuffer*>(img->fb_priv);
// The buffer can be used directly by the VideoFrame (without copy) by
// using a Wrapped*Buffer.
rtc::scoped_refptr<VideoFrameBuffer> img_wrapped_buffer;
switch (img->bit_depth) {
case 8:
img_wrapped_buffer = WrapI420Buffer(
// using a WrappedI420Buffer.
rtc::scoped_refptr<WrappedI420Buffer> img_wrapped_buffer(
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
img->d_w, img->d_h, img->planes[VPX_PLANE_Y],
img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U],
img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V],
@ -1163,22 +1098,8 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img,
// WrappedI420Buffer's mechanism for allowing the release of its frame
// buffer is through a callback function. This is where we should
// release |img_buffer|.
rtc::KeepRefUntilDone(img_buffer));
break;
case 10:
img_wrapped_buffer = WrapI010Buffer(
img->d_w, img->d_h,
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_Y]),
img->stride[VPX_PLANE_Y] / 2,
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_U]),
img->stride[VPX_PLANE_U] / 2,
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_V]),
img->stride[VPX_PLANE_V] / 2, rtc::KeepRefUntilDone(img_buffer));
break;
default:
RTC_NOTREACHED();
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
}
rtc::KeepRefUntilDone(img_buffer)));
VideoFrame decoded_image(img_wrapped_buffer, timestamp,
0 /* render_time_ms */, webrtc::kVideoRotation_0);
decoded_image.set_ntp_time_ms(ntp_time_ms);

View file

@ -64,7 +64,6 @@ rtc_source_set("video_test_common") {
"../:typedefs",
"../api:libjingle_peerconnection_api",
"../api/video:video_frame",
"../api/video:video_frame_i010",
"../api/video:video_frame_i420",
"../api/video_codecs:video_codecs_api",
"../call:video_stream_api",

View file

@ -15,7 +15,6 @@
#include <memory>
#include "api/video/i010_buffer.h"
#include "api/video/i420_buffer.h"
#include "common_video/include/video_frame_buffer.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
@ -69,8 +68,7 @@ class SquareGenerator : public FrameGenerator {
rtc::scoped_refptr<VideoFrameBuffer> buffer = nullptr;
switch (type_) {
case OutputType::I420:
case OutputType::I010: {
case OutputType::I420: {
buffer = CreateI420Buffer(width_, height_);
break;
}
@ -92,10 +90,6 @@ class SquareGenerator : public FrameGenerator {
for (const auto& square : squares_)
square->Draw(buffer);
if (type_ == OutputType::I010) {
buffer = I010Buffer::Copy(*buffer->ToI420());
}
frame_.reset(
new VideoFrame(buffer, webrtc::kVideoRotation_0, 0 /* timestamp_us */));
return frame_.get();

View file

@ -58,7 +58,7 @@ class FrameGenerator {
RTC_NOTREACHED();
}
enum class OutputType { I420, I420A, I010 };
enum class OutputType { I420, I420A };
// Creates a frame generator that produces frames with small squares that
// move randomly towards the lower right corner.

View file

@ -203,7 +203,6 @@ if (rtc_include_tests) {
]
deps = [
":video_quality_test",
"../media:rtc_vp9_profile",
"../modules/pacing:pacing",
"../rtc_base:rtc_base_approved",
"../rtc_base/experiments:alr_experiment",

View file

@ -9,7 +9,6 @@
*/
#include <stdio.h>
#include "media/base/vp9_profile.h"
#include "rtc_base/experiments/alr_experiment.h"
#include "rtc_base/flags.h"
#include "test/field_trial.h"
@ -112,30 +111,6 @@ TEST(FullStackTest, ForemanCifPlr5Vp9) {
fixture->RunWithAnalyzer(foreman_cif);
}
// Disabled until https://bugs.chromium.org/p/webm/issues/detail?id=1544 is
// solved.
#if defined(WEBRTC_ANDROID)
#define MAYBE_GeneratorWithoutPacketLossVp9Profile2 \
DISABLED_GeneratorWithoutPacketLossVp9Profile2
#else
#define MAYBE_GeneratorWithoutPacketLossVp9Profile2 \
GeneratorWithoutPacketLossVp9Profile2
#endif
TEST(FullStackTest, MAYBE_GeneratorWithoutPacketLossVp9Profile2) {
auto fixture = CreateVideoQualityTestFixture();
SdpVideoFormat::Parameters vp92 = {
{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile2)}};
ParamsWithLogging generator;
generator.call.send_side_bwe = true;
generator.video[0] = {
true, 352, 288, 30, 700000, 700000, 700000, false, "VP9",
1, 0, 0, false, false, false, "GeneratorI010", 0, vp92};
generator.analyzer = {"generator_net_delay_0_0_plr_0_VP9Profile2", 0.0, 0.0,
kFullStackTestDurationSecs};
fixture->RunWithAnalyzer(generator);
}
TEST(FullStackTest, ForemanCifWithoutPacketLossMultiplexI420Frame) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging foreman_cif;

View file

@ -92,8 +92,9 @@ VideoQualityTest::TestVideoEncoderFactory::CreateVideoEncoder(
} else if (format.name == "multiplex") {
return absl::make_unique<MultiplexEncoderAdapter>(
&internal_encoder_factory_, SdpVideoFormat(cricket::kVp9CodecName));
} else {
return internal_encoder_factory_.CreateVideoEncoder(format);
}
return internal_encoder_factory_.CreateVideoEncoder(format);
}
VideoQualityTest::VideoQualityTest(
@ -390,6 +391,7 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
RTC_CHECK_GT(num_video_substreams, 0);
CreateVideoSendConfig(&video_send_configs_[video_idx], num_video_substreams,
total_streams_used, send_transport);
int payload_type;
if (params_.video[video_idx].codec == "H264") {
payload_type = kPayloadTypeH264;
@ -433,9 +435,6 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
video_encoder_configs_[video_idx].video_format.name =
params_.video[video_idx].codec;
video_encoder_configs_[video_idx].video_format.parameters =
params_.video[video_idx].sdp_params;
video_encoder_configs_[video_idx].codec_type =
PayloadStringToCodecType(params_.video[video_idx].codec);
@ -815,12 +814,6 @@ void VideoQualityTest::CreateCapturers() {
static_cast<int>(params_.video[video_idx].height),
test::FrameGenerator::OutputType::I420A, absl::nullopt,
params_.video[video_idx].fps, clock_));
} else if (params_.video[video_idx].clip_name == "GeneratorI010") {
video_capturers_[video_idx].reset(test::FrameGeneratorCapturer::Create(
static_cast<int>(params_.video[video_idx].width),
static_cast<int>(params_.video[video_idx].height),
test::FrameGenerator::OutputType::I010, absl::nullopt,
params_.video[video_idx].fps, clock_));
} else if (params_.video[video_idx].clip_name.empty()) {
video_capturers_[video_idx].reset(test::VcmCapturer::Create(
params_.video[video_idx].width, params_.video[video_idx].height,