webrtc/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc
Rasmus Brandt 98a867ccd2 Rename VideoCodecTest to VideoCodecUnitTest.
The VideoCodecTest class is a fixture base class for the
libvpx-VP8, libvpx-VP9, and OpenH264 unit tests. It is unrelated
to the VideoProcessor tests, which we colloquially refer to as
the "codec test".

This rename is thus to reduce this confusion. It should have no
functional impact.

Bug: webrtc:8448
Change-Id: If73443bda5df0f29a71ce6ce069ac128795ff0ad
Reviewed-on: https://webrtc-review.googlesource.com/47160
Reviewed-by: Sergey Silkin <ssilkin@webrtc.org>
Commit-Queue: Rasmus Brandt <brandtr@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#21867}
2018-02-02 10:27:33 +00:00

224 lines
9.4 KiB
C++

/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/mock_video_decoder_factory.h"
#include "api/test/mock_video_encoder_factory.h"
#include "api/video_codecs/sdp_video_format.h"
#include "common_video/include/video_frame_buffer.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "media/base/mediaconstants.h"
#include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h"
#include "modules/video_coding/codecs/multiplex/include/multiplex_encoded_image_packer.h"
#include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h"
#include "modules/video_coding/codecs/test/video_codec_unittest.h"
#include "modules/video_coding/codecs/vp9/include/vp9.h"
#include "rtc_base/keep_ref_until_done.h"
#include "rtc_base/ptr_util.h"
using testing::_;
using testing::Return;
namespace webrtc {
constexpr const char* kMultiplexAssociatedCodecName = cricket::kVp9CodecName;
const VideoCodecType kMultiplexAssociatedCodecType =
PayloadStringToCodecType(kMultiplexAssociatedCodecName);
class TestMultiplexAdapter : public VideoCodecUnitTest {
public:
TestMultiplexAdapter()
: decoder_factory_(new webrtc::MockVideoDecoderFactory),
encoder_factory_(new webrtc::MockVideoEncoderFactory) {}
protected:
std::unique_ptr<VideoDecoder> CreateDecoder() override {
return rtc::MakeUnique<MultiplexDecoderAdapter>(
decoder_factory_.get(), SdpVideoFormat(kMultiplexAssociatedCodecName));
}
std::unique_ptr<VideoEncoder> CreateEncoder() override {
return rtc::MakeUnique<MultiplexEncoderAdapter>(
encoder_factory_.get(), SdpVideoFormat(kMultiplexAssociatedCodecName));
}
VideoCodec codec_settings() override {
VideoCodec codec_settings;
codec_settings.codecType = kMultiplexAssociatedCodecType;
codec_settings.VP9()->numberOfTemporalLayers = 1;
codec_settings.VP9()->numberOfSpatialLayers = 1;
codec_settings.codecType = webrtc::kVideoCodecMultiplex;
return codec_settings;
}
std::unique_ptr<VideoFrame> CreateI420AInputFrame() {
rtc::scoped_refptr<webrtc::I420BufferInterface> yuv_buffer =
input_frame_->video_frame_buffer()->ToI420();
rtc::scoped_refptr<I420ABufferInterface> yuva_buffer = WrapI420ABuffer(
yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(),
yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(),
yuv_buffer->DataV(), yuv_buffer->StrideV(), yuv_buffer->DataY(),
yuv_buffer->StrideY(), rtc::KeepRefUntilDone(yuv_buffer));
return rtc::WrapUnique<VideoFrame>(
new VideoFrame(yuva_buffer, 123 /* timestamp_us */,
345 /* render_time_ms */, kVideoRotation_0));
}
std::unique_ptr<VideoFrame> ExtractAXXFrame(const VideoFrame& yuva_frame) {
const I420ABufferInterface* yuva_buffer =
yuva_frame.video_frame_buffer()->GetI420A();
rtc::scoped_refptr<I420BufferInterface> axx_buffer = WrapI420Buffer(
yuva_buffer->width(), yuva_buffer->height(), yuva_buffer->DataA(),
yuva_buffer->StrideA(), yuva_buffer->DataU(), yuva_buffer->StrideU(),
yuva_buffer->DataV(), yuva_buffer->StrideV(),
rtc::KeepRefUntilDone(yuva_frame.video_frame_buffer()));
return rtc::WrapUnique<VideoFrame>(
new VideoFrame(axx_buffer, 123 /* timestamp_us */,
345 /* render_time_ms */, kVideoRotation_0));
}
private:
void SetUp() override {
EXPECT_CALL(*decoder_factory_, Die());
// The decoders/encoders will be owned by the caller of
// CreateVideoDecoder()/CreateVideoEncoder().
VideoDecoder* decoder1 = VP9Decoder::Create().release();
VideoDecoder* decoder2 = VP9Decoder::Create().release();
EXPECT_CALL(*decoder_factory_, CreateVideoDecoderProxy(_))
.WillOnce(Return(decoder1))
.WillOnce(Return(decoder2));
EXPECT_CALL(*encoder_factory_, Die());
VideoEncoder* encoder1 = VP9Encoder::Create().release();
VideoEncoder* encoder2 = VP9Encoder::Create().release();
EXPECT_CALL(*encoder_factory_, CreateVideoEncoderProxy(_))
.WillOnce(Return(encoder1))
.WillOnce(Return(encoder2));
VideoCodecUnitTest::SetUp();
}
const std::unique_ptr<webrtc::MockVideoDecoderFactory> decoder_factory_;
const std::unique_ptr<webrtc::MockVideoEncoderFactory> encoder_factory_;
};
// TODO(emircan): Currently VideoCodecUnitTest tests do a complete setup
// step that goes beyond constructing |decoder_|. Simplify these tests to do
// less.
TEST_F(TestMultiplexAdapter, ConstructAndDestructDecoder) {
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Release());
}
TEST_F(TestMultiplexAdapter, ConstructAndDestructEncoder) {
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
}
TEST_F(TestMultiplexAdapter, EncodeDecodeI420Frame) {
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->Encode(*input_frame_, nullptr, nullptr));
EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
EXPECT_EQ(
WEBRTC_VIDEO_CODEC_OK,
decoder_->Decode(encoded_frame, false, nullptr, &codec_specific_info));
std::unique_ptr<VideoFrame> decoded_frame;
rtc::Optional<uint8_t> decoded_qp;
ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
ASSERT_TRUE(decoded_frame);
EXPECT_GT(I420PSNR(input_frame_.get(), decoded_frame.get()), 36);
}
TEST_F(TestMultiplexAdapter, EncodeDecodeI420AFrame) {
std::unique_ptr<VideoFrame> yuva_frame = CreateI420AInputFrame();
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->Encode(*yuva_frame, nullptr, nullptr));
EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
decoder_->Decode(encoded_frame, false, nullptr, nullptr));
std::unique_ptr<VideoFrame> decoded_frame;
rtc::Optional<uint8_t> decoded_qp;
ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
ASSERT_TRUE(decoded_frame);
EXPECT_GT(I420PSNR(yuva_frame.get(), decoded_frame.get()), 36);
// Find PSNR for AXX bits.
std::unique_ptr<VideoFrame> input_axx_frame = ExtractAXXFrame(*yuva_frame);
std::unique_ptr<VideoFrame> output_axx_frame =
ExtractAXXFrame(*decoded_frame);
EXPECT_GT(I420PSNR(input_axx_frame.get(), output_axx_frame.get()), 47);
}
TEST_F(TestMultiplexAdapter, CheckSingleFrameEncodedBitstream) {
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->Encode(*input_frame_, nullptr, nullptr));
EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
EXPECT_EQ(0, codec_specific_info.codecSpecific.generic.simulcast_idx);
const MultiplexImage& unpacked_frame =
MultiplexEncodedImagePacker::Unpack(encoded_frame);
EXPECT_EQ(0, unpacked_frame.image_index);
EXPECT_EQ(1, unpacked_frame.component_count);
const MultiplexImageComponent& component = unpacked_frame.image_components[0];
EXPECT_EQ(0, component.component_index);
EXPECT_NE(nullptr, component.encoded_image._buffer);
EXPECT_EQ(kVideoFrameKey, component.encoded_image._frameType);
}
TEST_F(TestMultiplexAdapter, CheckDoubleFramesEncodedBitstream) {
std::unique_ptr<VideoFrame> yuva_frame = CreateI420AInputFrame();
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->Encode(*yuva_frame, nullptr, nullptr));
EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
EXPECT_EQ(0, codec_specific_info.codecSpecific.generic.simulcast_idx);
const MultiplexImage& unpacked_frame =
MultiplexEncodedImagePacker::Unpack(encoded_frame);
EXPECT_EQ(0, unpacked_frame.image_index);
EXPECT_EQ(2, unpacked_frame.component_count);
EXPECT_EQ(unpacked_frame.image_components.size(),
unpacked_frame.component_count);
for (int i = 0; i < unpacked_frame.component_count; ++i) {
const MultiplexImageComponent& component =
unpacked_frame.image_components[i];
EXPECT_EQ(i, component.component_index);
EXPECT_NE(nullptr, component.encoded_image._buffer);
EXPECT_EQ(kVideoFrameKey, component.encoded_image._frameType);
}
}
TEST_F(TestMultiplexAdapter, ImageIndexIncreases) {
std::unique_ptr<VideoFrame> yuva_frame = CreateI420AInputFrame();
const size_t expected_num_encoded_frames = 3;
for (size_t i = 0; i < expected_num_encoded_frames; ++i) {
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->Encode(*yuva_frame, nullptr, nullptr));
EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
const MultiplexImage& unpacked_frame =
MultiplexEncodedImagePacker::Unpack(encoded_frame);
EXPECT_EQ(i, unpacked_frame.image_index);
EXPECT_EQ(i ? kVideoFrameDelta : kVideoFrameKey, encoded_frame._frameType);
}
}
} // namespace webrtc