Add 420 and 422 10 bit h264 decoding.

422 10 bit format is called I210 in the code and implemented in I210Buffer, and 420 10-bit format format is using is using the already existing I010 format and implemented in I010Buffer.

Bug: webrtc:13826
Change-Id: I6b6ed65b9fbb295386ea20f751bd0badc49ef21b
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/256964
Reviewed-by: Niels Moller <nisse@webrtc.org>
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Commit-Queue: Ilya Nikolaevskiy <ilnik@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#37252}
This commit is contained in:
Sergio Garcia Murillo 2022-06-17 11:48:14 +02:00 committed by WebRTC LUCI CQ
parent 142104183d
commit 8545ebae28
17 changed files with 963 additions and 227 deletions

View file

@ -90,6 +90,8 @@ rtc_library("video_frame_i010") {
sources = [
"i010_buffer.cc",
"i010_buffer.h",
"i210_buffer.cc",
"i210_buffer.h",
]
deps = [
":video_frame",

View file

@ -14,6 +14,10 @@ specific_include_rules = {
"+rtc_base/memory/aligned_malloc.h",
],
"i210_buffer\.h": [
"+rtc_base/memory/aligned_malloc.h",
],
"i420_buffer\.h": [
"+rtc_base/memory/aligned_malloc.h",
],

345
api/video/i210_buffer.cc Normal file
View file

@ -0,0 +1,345 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/video/i210_buffer.h"
#include <utility>
#include "api/make_ref_counted.h"
#include "api/video/i420_buffer.h"
#include "api/video/i422_buffer.h"
#include "rtc_base/checks.h"
#include "third_party/libyuv/include/libyuv/convert.h"
#include "third_party/libyuv/include/libyuv/scale.h"
// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
static const int kBufferAlignment = 64;
static const int kBytesPerPixel = 2;
namespace webrtc {
namespace {
int I210DataSize(int height, int stride_y, int stride_u, int stride_v) {
return kBytesPerPixel *
(stride_y * height + stride_u * height + stride_v * height);
}
void webrtcRotatePlane90_16(const uint16_t* src,
int src_stride,
uint16_t* dst,
int dst_stride,
int width,
int height) {
for (int x = 0; x < width; x++) {
for (int y = 0; y < height; y++) {
int dest_x = height - y - 1;
int dest_y = x;
dst[dest_x + dst_stride * dest_y] = src[x + src_stride * y];
}
}
}
void webrtcRotatePlane180_16(const uint16_t* src,
int src_stride,
uint16_t* dst,
int dst_stride,
int width,
int height) {
for (int x = 0; x < width; x++) {
for (int y = 0; y < height; y++) {
int dest_x = width - x - 1;
int dest_y = height - y - 1;
dst[dest_x + dst_stride * dest_y] = src[x + src_stride * y];
}
}
}
void webrtcRotatePlane270_16(const uint16_t* src,
int src_stride,
uint16_t* dst,
int dst_stride,
int width,
int height) {
for (int x = 0; x < width; x++) {
for (int y = 0; y < height; y++) {
int dest_x = y;
int dest_y = width - x - 1;
dst[dest_x + dst_stride * dest_y] = src[x + src_stride * y];
}
}
}
// TODO(sergio.garcia.murillo@gmail.com): Remove as soon it is available in
// libyuv. Due to the rotate&scale required, this function may not be merged in
// to libyuv inmediatelly.
// https://bugs.chromium.org/p/libyuv/issues/detail?id=926
// This method assumes continuous allocation of the y-plane, possibly clobbering
// any padding between pixel rows.
int webrtcI210Rotate(const uint16_t* src_y,
int src_stride_y,
const uint16_t* src_u,
int src_stride_u,
const uint16_t* src_v,
int src_stride_v,
uint16_t* dst_y,
int dst_stride_y,
uint16_t* dst_u,
int dst_stride_u,
uint16_t* dst_v,
int dst_stride_v,
int width,
int height,
enum libyuv::RotationMode mode) {
int halfwidth = (width + 1) >> 1;
int halfheight = (height + 1) >> 1;
if (!src_y || !src_u || !src_v || width <= 0 || height == 0 || !dst_y ||
!dst_u || !dst_v || dst_stride_y < 0) {
return -1;
}
// Negative height means invert the image.
if (height < 0) {
height = -height;
src_y = src_y + (height - 1) * src_stride_y;
src_u = src_u + (height - 1) * src_stride_u;
src_v = src_v + (height - 1) * src_stride_v;
src_stride_y = -src_stride_y;
src_stride_u = -src_stride_u;
src_stride_v = -src_stride_v;
}
switch (mode) {
case libyuv::kRotate0:
// copy frame
libyuv::CopyPlane_16(src_y, src_stride_y, dst_y, dst_stride_y, width,
height);
libyuv::CopyPlane_16(src_u, src_stride_u, dst_u, dst_stride_u, halfwidth,
height);
libyuv::CopyPlane_16(src_v, src_stride_v, dst_v, dst_stride_v, halfwidth,
height);
return 0;
case libyuv::kRotate90:
// We need to rotate and rescale, we use plane Y as temporal storage.
webrtcRotatePlane90_16(src_u, src_stride_u, dst_y, height, halfwidth,
height);
libyuv::ScalePlane_16(dst_y, height, height, halfwidth, dst_u, halfheight,
halfheight, width, libyuv::kFilterBilinear);
webrtcRotatePlane90_16(src_v, src_stride_v, dst_y, height, halfwidth,
height);
libyuv::ScalePlane_16(dst_y, height, height, halfwidth, dst_v, halfheight,
halfheight, width, libyuv::kFilterLinear);
webrtcRotatePlane90_16(src_y, src_stride_y, dst_y, dst_stride_y, width,
height);
return 0;
case libyuv::kRotate270:
// We need to rotate and rescale, we use plane Y as temporal storage.
webrtcRotatePlane270_16(src_u, src_stride_u, dst_y, height, halfwidth,
height);
libyuv::ScalePlane_16(dst_y, height, height, halfwidth, dst_u, halfheight,
halfheight, width, libyuv::kFilterBilinear);
webrtcRotatePlane270_16(src_v, src_stride_v, dst_y, height, halfwidth,
height);
libyuv::ScalePlane_16(dst_y, height, height, halfwidth, dst_v, halfheight,
halfheight, width, libyuv::kFilterLinear);
webrtcRotatePlane270_16(src_y, src_stride_y, dst_y, dst_stride_y, width,
height);
return 0;
case libyuv::kRotate180:
webrtcRotatePlane180_16(src_y, src_stride_y, dst_y, dst_stride_y, width,
height);
webrtcRotatePlane180_16(src_u, src_stride_u, dst_u, dst_stride_u,
halfwidth, height);
webrtcRotatePlane180_16(src_v, src_stride_v, dst_v, dst_stride_v,
halfwidth, height);
return 0;
default:
break;
}
return -1;
}
} // namespace
I210Buffer::I210Buffer(int width,
int height,
int stride_y,
int stride_u,
int stride_v)
: width_(width),
height_(height),
stride_y_(stride_y),
stride_u_(stride_u),
stride_v_(stride_v),
data_(static_cast<uint16_t*>(
AlignedMalloc(I210DataSize(height, stride_y, stride_u, stride_v),
kBufferAlignment))) {
RTC_DCHECK_GT(width, 0);
RTC_DCHECK_GT(height, 0);
RTC_DCHECK_GE(stride_y, width);
RTC_DCHECK_GE(stride_u, (width + 1) / 2);
RTC_DCHECK_GE(stride_v, (width + 1) / 2);
}
I210Buffer::~I210Buffer() {}
// static
rtc::scoped_refptr<I210Buffer> I210Buffer::Create(int width, int height) {
return rtc::make_ref_counted<I210Buffer>(width, height, width,
(width + 1) / 2, (width + 1) / 2);
}
// static
rtc::scoped_refptr<I210Buffer> I210Buffer::Copy(
const I210BufferInterface& source) {
const int width = source.width();
const int height = source.height();
rtc::scoped_refptr<I210Buffer> buffer = Create(width, height);
RTC_CHECK_EQ(
0, libyuv::I210Copy(
source.DataY(), source.StrideY(), source.DataU(), source.StrideU(),
source.DataV(), source.StrideV(), buffer->MutableDataY(),
buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
buffer->MutableDataV(), buffer->StrideV(), width, height));
return buffer;
}
// static
rtc::scoped_refptr<I210Buffer> I210Buffer::Copy(
const I420BufferInterface& source) {
const int width = source.width();
const int height = source.height();
auto i422buffer = I422Buffer::Copy(source);
rtc::scoped_refptr<I210Buffer> buffer = Create(width, height);
RTC_CHECK_EQ(0, libyuv::I422ToI210(i422buffer->DataY(), i422buffer->StrideY(),
i422buffer->DataU(), i422buffer->StrideU(),
i422buffer->DataV(), i422buffer->StrideV(),
buffer->MutableDataY(), buffer->StrideY(),
buffer->MutableDataU(), buffer->StrideU(),
buffer->MutableDataV(), buffer->StrideV(),
width, height));
return buffer;
}
// static
rtc::scoped_refptr<I210Buffer> I210Buffer::Rotate(
const I210BufferInterface& src,
VideoRotation rotation) {
RTC_CHECK(src.DataY());
RTC_CHECK(src.DataU());
RTC_CHECK(src.DataV());
int rotated_width = src.width();
int rotated_height = src.height();
if (rotation == webrtc::kVideoRotation_90 ||
rotation == webrtc::kVideoRotation_270) {
std::swap(rotated_width, rotated_height);
}
rtc::scoped_refptr<webrtc::I210Buffer> buffer =
I210Buffer::Create(rotated_width, rotated_height);
RTC_CHECK_EQ(0,
webrtcI210Rotate(
src.DataY(), src.StrideY(), src.DataU(), src.StrideU(),
src.DataV(), src.StrideV(), buffer->MutableDataY(),
buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
buffer->MutableDataV(), buffer->StrideV(), src.width(),
src.height(), static_cast<libyuv::RotationMode>(rotation)));
return buffer;
}
rtc::scoped_refptr<I420BufferInterface> I210Buffer::ToI420() {
rtc::scoped_refptr<I420Buffer> i420_buffer =
I420Buffer::Create(width(), height());
libyuv::I210ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(),
i420_buffer->MutableDataY(), i420_buffer->StrideY(),
i420_buffer->MutableDataU(), i420_buffer->StrideU(),
i420_buffer->MutableDataV(), i420_buffer->StrideV(),
width(), height());
return i420_buffer;
}
int I210Buffer::width() const {
return width_;
}
int I210Buffer::height() const {
return height_;
}
const uint16_t* I210Buffer::DataY() const {
return data_.get();
}
const uint16_t* I210Buffer::DataU() const {
return data_.get() + stride_y_ * height_;
}
const uint16_t* I210Buffer::DataV() const {
return data_.get() + stride_y_ * height_ + stride_u_ * height_;
}
int I210Buffer::StrideY() const {
return stride_y_;
}
int I210Buffer::StrideU() const {
return stride_u_;
}
int I210Buffer::StrideV() const {
return stride_v_;
}
uint16_t* I210Buffer::MutableDataY() {
return const_cast<uint16_t*>(DataY());
}
uint16_t* I210Buffer::MutableDataU() {
return const_cast<uint16_t*>(DataU());
}
uint16_t* I210Buffer::MutableDataV() {
return const_cast<uint16_t*>(DataV());
}
void I210Buffer::CropAndScaleFrom(const I210BufferInterface& src,
int offset_x,
int offset_y,
int crop_width,
int crop_height) {
RTC_CHECK_LE(crop_width, src.width());
RTC_CHECK_LE(crop_height, src.height());
RTC_CHECK_LE(crop_width + offset_x, src.width());
RTC_CHECK_LE(crop_height + offset_y, src.height());
RTC_CHECK_GE(offset_x, 0);
RTC_CHECK_GE(offset_y, 0);
RTC_CHECK_GE(crop_width, 0);
RTC_CHECK_GE(crop_height, 0);
// Make sure offset is even so that u/v plane becomes aligned.
const int uv_offset_x = offset_x / 2;
const int uv_offset_y = offset_y;
offset_x = uv_offset_x * 2;
const uint16_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x;
const uint16_t* u_plane =
src.DataU() + src.StrideU() * uv_offset_y + uv_offset_x;
const uint16_t* v_plane =
src.DataV() + src.StrideV() * uv_offset_y + uv_offset_x;
int res = libyuv::I422Scale_16(
y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane, src.StrideV(),
crop_width, crop_height, MutableDataY(), StrideY(), MutableDataU(),
StrideU(), MutableDataV(), StrideV(), width(), height(),
libyuv::kFilterBox);
RTC_DCHECK_EQ(res, 0);
}
void I210Buffer::ScaleFrom(const I210BufferInterface& src) {
CropAndScaleFrom(src, 0, 0, src.width(), src.height());
}
} // namespace webrtc

84
api/video/i210_buffer.h Normal file
View file

@ -0,0 +1,84 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_VIDEO_I210_BUFFER_H_
#define API_VIDEO_I210_BUFFER_H_
#include <stdint.h>
#include <memory>
#include "api/scoped_refptr.h"
#include "api/video/video_frame_buffer.h"
#include "api/video/video_rotation.h"
#include "rtc_base/memory/aligned_malloc.h"
namespace webrtc {
// Plain I210 (yuv 422 planar 10 bits) buffer in standard memory.
class I210Buffer : public I210BufferInterface {
public:
// Create a new buffer.
static rtc::scoped_refptr<I210Buffer> Create(int width, int height);
// Create a new buffer and copy the pixel data.
static rtc::scoped_refptr<I210Buffer> Copy(const I210BufferInterface& buffer);
// Convert and put I420 buffer into a new buffer.
static rtc::scoped_refptr<I210Buffer> Copy(const I420BufferInterface& buffer);
// Return a rotated copy of `src`.
static rtc::scoped_refptr<I210Buffer> Rotate(const I210BufferInterface& src,
VideoRotation rotation);
// VideoFrameBuffer implementation.
rtc::scoped_refptr<I420BufferInterface> ToI420() override;
// PlanarYuv16BBuffer implementation.
int width() const override;
int height() const override;
const uint16_t* DataY() const override;
const uint16_t* DataU() const override;
const uint16_t* DataV() const override;
int StrideY() const override;
int StrideU() const override;
int StrideV() const override;
uint16_t* MutableDataY();
uint16_t* MutableDataU();
uint16_t* MutableDataV();
// Scale the cropped area of `src` to the size of `this` buffer, and
// write the result into `this`.
void CropAndScaleFrom(const I210BufferInterface& src,
int offset_x,
int offset_y,
int crop_width,
int crop_height);
// Scale all of `src` to the size of `this` buffer, with no cropping.
void ScaleFrom(const I210BufferInterface& src);
protected:
I210Buffer(int width, int height, int stride_y, int stride_u, int stride_v);
~I210Buffer() override;
private:
const int width_;
const int height_;
const int stride_y_;
const int stride_u_;
const int stride_v_;
const std::unique_ptr<uint16_t, AlignedFreeDeleter> data_;
};
} // namespace webrtc
#endif // API_VIDEO_I210_BUFFER_H_

View file

@ -31,129 +31,6 @@ namespace {
int I422DataSize(int height, int stride_y, int stride_u, int stride_v) {
return stride_y * height + stride_u * height + stride_v * height;
}
// TODO(sergio.garcia.murillo@gmail.com): Remove as soon it is available in
// libyuv. Due to the rotate&scale required, this function may not be merged in
// to libyuv inmediatelly.
// https://bugs.chromium.org/p/libyuv/issues/detail?id=926
int webrtcI422Rotate(const uint8_t* src_y,
int src_stride_y,
const uint8_t* src_u,
int src_stride_u,
const uint8_t* src_v,
int src_stride_v,
uint8_t* dst_y,
int dst_stride_y,
uint8_t* dst_u,
int dst_stride_u,
uint8_t* dst_v,
int dst_stride_v,
int width,
int height,
enum libyuv::RotationMode mode) {
int halfwidth = (width + 1) >> 1;
int halfheight = (height + 1) >> 1;
if (!src_y || !src_u || !src_v || width <= 0 || height == 0 || !dst_y ||
!dst_u || !dst_v) {
return -1;
}
// Negative height means invert the image.
if (height < 0) {
height = -height;
src_y = src_y + (height - 1) * src_stride_y;
src_u = src_u + (height - 1) * src_stride_u;
src_v = src_v + (height - 1) * src_stride_v;
src_stride_y = -src_stride_y;
src_stride_u = -src_stride_u;
src_stride_v = -src_stride_v;
}
switch (mode) {
case libyuv::kRotate0:
// copy frame
libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width,
height);
libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, halfwidth,
height);
libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, halfwidth,
height);
return 0;
case libyuv::kRotate90:
// We need to rotate and rescale, we use plane Y as temporal storage.
libyuv::RotatePlane90(src_u, src_stride_u, dst_y, height, halfwidth,
height);
libyuv::ScalePlane(dst_y, height, height, halfwidth, dst_u, halfheight,
halfheight, width, libyuv::kFilterBilinear);
libyuv::RotatePlane90(src_v, src_stride_v, dst_y, height, halfwidth,
height);
libyuv::ScalePlane(dst_y, height, height, halfwidth, dst_v, halfheight,
halfheight, width, libyuv::kFilterLinear);
libyuv::RotatePlane90(src_y, src_stride_y, dst_y, dst_stride_y, width,
height);
return 0;
case libyuv::kRotate270:
// We need to rotate and rescale, we use plane Y as temporal storage.
libyuv::RotatePlane270(src_u, src_stride_u, dst_y, height, halfwidth,
height);
libyuv::ScalePlane(dst_y, height, height, halfwidth, dst_u, halfheight,
halfheight, width, libyuv::kFilterBilinear);
libyuv::RotatePlane270(src_v, src_stride_v, dst_y, height, halfwidth,
height);
libyuv::ScalePlane(dst_y, height, height, halfwidth, dst_v, halfheight,
halfheight, width, libyuv::kFilterLinear);
libyuv::RotatePlane270(src_y, src_stride_y, dst_y, dst_stride_y, width,
height);
return 0;
case libyuv::kRotate180:
libyuv::RotatePlane180(src_y, src_stride_y, dst_y, dst_stride_y, width,
height);
libyuv::RotatePlane180(src_u, src_stride_u, dst_u, dst_stride_u,
halfwidth, height);
libyuv::RotatePlane180(src_v, src_stride_v, dst_v, dst_stride_v,
halfwidth, height);
return 0;
default:
break;
}
return -1;
}
// TODO(sergio.garcia.murillo@gmail.com): Remove this function with libyuv one
// as soon as the dependency is updated.
int webrtcI422Scale(const uint8_t* src_y,
int src_stride_y,
const uint8_t* src_u,
int src_stride_u,
const uint8_t* src_v,
int src_stride_v,
int src_width,
int src_height,
uint8_t* dst_y,
int dst_stride_y,
uint8_t* dst_u,
int dst_stride_u,
uint8_t* dst_v,
int dst_stride_v,
int dst_width,
int dst_height,
enum libyuv::FilterMode filtering) {
if (!src_y || !src_u || !src_v || src_width <= 0 || src_height == 0 ||
src_width > 32768 || src_height > 32768 || !dst_y || !dst_u || !dst_v ||
dst_width <= 0 || dst_height <= 0) {
return -1;
}
int src_halfwidth = (src_width + 1) >> 1;
int dst_halfwidth = (dst_width + 1) >> 1;
libyuv::ScalePlane(src_y, src_stride_y, src_width, src_height, dst_y,
dst_stride_y, dst_width, dst_height, filtering);
libyuv::ScalePlane(src_u, src_stride_u, src_halfwidth, src_height, dst_u,
dst_stride_u, dst_halfwidth, dst_height, filtering);
libyuv::ScalePlane(src_v, src_stride_v, src_halfwidth, src_height, dst_v,
dst_stride_v, dst_halfwidth, dst_height, filtering);
return 0;
}
} // namespace
I422Buffer::I422Buffer(int width, int height)
@ -257,7 +134,7 @@ rtc::scoped_refptr<I422Buffer> I422Buffer::Rotate(
I422Buffer::Create(rotated_width, rotated_height);
RTC_CHECK_EQ(0,
webrtcI422Rotate(
libyuv::I422Rotate(
src.DataY(), src.StrideY(), src.DataU(), src.StrideU(),
src.DataV(), src.StrideV(), buffer->MutableDataY(),
buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
@ -343,8 +220,9 @@ void I422Buffer::CropAndScaleFrom(const I422BufferInterface& src,
src.DataU() + src.StrideU() * uv_offset_y + uv_offset_x;
const uint8_t* v_plane =
src.DataV() + src.StrideV() * uv_offset_y + uv_offset_x;
int res =
webrtcI422Scale(y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane,
libyuv::I422Scale(y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane,
src.StrideV(), crop_width, crop_height, MutableDataY(),
StrideY(), MutableDataU(), StrideU(), MutableDataV(),
StrideV(), width(), height(), libyuv::kFilterBox);

View file

@ -12,6 +12,7 @@ rtc_library("rtc_api_video_unittests") {
testonly = true
sources = [
"color_space_unittest.cc",
"i210_buffer_unittest.cc",
"i422_buffer_unittest.cc",
"i444_buffer_unittest.cc",
"nv12_buffer_unittest.cc",
@ -22,6 +23,7 @@ rtc_library("rtc_api_video_unittests") {
"..:video_adaptation",
"..:video_bitrate_allocation",
"..:video_frame",
"..:video_frame_i010",
"..:video_rtp_headers",
"../../../test:frame_utils",
"../../../test:test_support",

View file

@ -0,0 +1,126 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/video/i210_buffer.h"
#include "api/video/i420_buffer.h"
#include "test/frame_utils.h"
#include "test/gmock.h"
#include "test/gtest.h"
namespace webrtc {
namespace {
int GetY(rtc::scoped_refptr<I210BufferInterface> buf, int col, int row) {
return buf->DataY()[row * buf->StrideY() + col];
}
int GetU(rtc::scoped_refptr<I210BufferInterface> buf, int col, int row) {
return buf->DataU()[row * buf->StrideU() + col];
}
int GetV(rtc::scoped_refptr<I210BufferInterface> buf, int col, int row) {
return buf->DataV()[row * buf->StrideV() + col];
}
void FillI210Buffer(rtc::scoped_refptr<I210Buffer> buf) {
const uint16_t Y = 4;
const uint16_t U = 8;
const uint16_t V = 16;
for (int row = 0; row < buf->height(); ++row) {
for (int col = 0; col < buf->width(); ++col) {
buf->MutableDataY()[row * buf->StrideY() + col] = Y;
}
}
for (int row = 0; row < buf->ChromaHeight(); ++row) {
for (int col = 0; col < buf->ChromaWidth(); ++col) {
buf->MutableDataU()[row * buf->StrideU() + col] = U;
buf->MutableDataV()[row * buf->StrideV() + col] = V;
}
}
}
} // namespace
TEST(I210BufferTest, InitialData) {
constexpr int stride = 3;
constexpr int halfstride = (stride + 1) >> 1;
constexpr int width = 3;
constexpr int halfwidth = (width + 1) >> 1;
constexpr int height = 3;
rtc::scoped_refptr<I210Buffer> i210_buffer(I210Buffer::Create(width, height));
EXPECT_EQ(width, i210_buffer->width());
EXPECT_EQ(height, i210_buffer->height());
EXPECT_EQ(stride, i210_buffer->StrideY());
EXPECT_EQ(halfstride, i210_buffer->StrideU());
EXPECT_EQ(halfstride, i210_buffer->StrideV());
EXPECT_EQ(halfwidth, i210_buffer->ChromaWidth());
EXPECT_EQ(height, i210_buffer->ChromaHeight());
}
TEST(I210BufferTest, ReadPixels) {
constexpr int width = 3;
constexpr int halfwidth = (width + 1) >> 1;
constexpr int height = 3;
rtc::scoped_refptr<I210Buffer> i210_buffer(I210Buffer::Create(width, height));
// Y = 4, U = 8, V = 16.
FillI210Buffer(i210_buffer);
for (int row = 0; row < height; row++) {
for (int col = 0; col < width; col++) {
EXPECT_EQ(4, GetY(i210_buffer, col, row));
}
}
for (int row = 0; row < height; row++) {
for (int col = 0; col < halfwidth; col++) {
EXPECT_EQ(8, GetU(i210_buffer, col, row));
EXPECT_EQ(16, GetV(i210_buffer, col, row));
}
}
}
TEST(I210BufferTest, ToI420) {
constexpr int width = 3;
constexpr int halfwidth = (width + 1) >> 1;
constexpr int height = 3;
constexpr int size = width * height;
constexpr int quartersize = (width + 1) / 2 * (height + 1) / 2;
rtc::scoped_refptr<I420Buffer> reference(I420Buffer::Create(width, height));
memset(reference->MutableDataY(), 1, size);
memset(reference->MutableDataU(), 2, quartersize);
memset(reference->MutableDataV(), 4, quartersize);
rtc::scoped_refptr<I210Buffer> i210_buffer(I210Buffer::Create(width, height));
// Y = 4, U = 8, V = 16.
FillI210Buffer(i210_buffer);
// Confirm YUV values are as expected.
for (int row = 0; row < height; row++) {
for (int col = 0; col < width; col++) {
EXPECT_EQ(4, GetY(i210_buffer, col, row));
}
}
for (int row = 0; row < height; row++) {
for (int col = 0; col < halfwidth; col++) {
EXPECT_EQ(8, GetU(i210_buffer, col, row));
EXPECT_EQ(16, GetV(i210_buffer, col, row));
}
}
rtc::scoped_refptr<I420BufferInterface> i420_buffer(i210_buffer->ToI420());
EXPECT_TRUE(test::FrameBufsEqual(reference, i420_buffer));
EXPECT_EQ(height, i420_buffer->height());
EXPECT_EQ(width, i420_buffer->width());
}
} // namespace webrtc

View file

@ -58,6 +58,11 @@ const I010BufferInterface* VideoFrameBuffer::GetI010() const {
return static_cast<const I010BufferInterface*>(this);
}
const I210BufferInterface* VideoFrameBuffer::GetI210() const {
RTC_CHECK(type() == Type::kI210);
return static_cast<const I210BufferInterface*>(this);
}
const NV12BufferInterface* VideoFrameBuffer::GetNV12() const {
RTC_CHECK(type() == Type::kNV12);
return static_cast<const NV12BufferInterface*>(this);
@ -87,6 +92,8 @@ const char* VideoFrameBufferTypeToString(VideoFrameBuffer::Type type) {
return "kI422";
case VideoFrameBuffer::Type::kI010:
return "kI010";
case VideoFrameBuffer::Type::kI210:
return "kI210";
case VideoFrameBuffer::Type::kNV12:
return "kNV12";
default:
@ -176,6 +183,18 @@ int I010BufferInterface::ChromaHeight() const {
return (height() + 1) / 2;
}
VideoFrameBuffer::Type I210BufferInterface::type() const {
return Type::kI210;
}
int I210BufferInterface::ChromaWidth() const {
return (width() + 1) / 2;
}
int I210BufferInterface::ChromaHeight() const {
return height();
}
VideoFrameBuffer::Type NV12BufferInterface::type() const {
return Type::kNV12;
}

View file

@ -25,6 +25,7 @@ class I420ABufferInterface;
class I422BufferInterface;
class I444BufferInterface;
class I010BufferInterface;
class I210BufferInterface;
class NV12BufferInterface;
// Base class for frame buffers of different types of pixel format and storage.
@ -56,6 +57,7 @@ class RTC_EXPORT VideoFrameBuffer : public rtc::RefCountInterface {
kI422,
kI444,
kI010,
kI210,
kNV12,
};
@ -109,6 +111,7 @@ class RTC_EXPORT VideoFrameBuffer : public rtc::RefCountInterface {
const I422BufferInterface* GetI422() const;
const I444BufferInterface* GetI444() const;
const I010BufferInterface* GetI010() const;
const I210BufferInterface* GetI210() const;
const NV12BufferInterface* GetNV12() const;
// From a kNative frame, returns a VideoFrameBuffer with a pixel format in
@ -218,7 +221,8 @@ class I444BufferInterface : public PlanarYuv8Buffer {
~I444BufferInterface() override {}
};
// This interface represents 8-bit to 16-bit color depth formats: Type::kI010.
// This interface represents 8-bit to 16-bit color depth formats: Type::kI010 or
// Type::kI210 .
class PlanarYuv16BBuffer : public PlanarYuvBuffer {
public:
// Returns pointer to the pixel data for a given plane. The memory is owned by
@ -244,6 +248,19 @@ class I010BufferInterface : public PlanarYuv16BBuffer {
~I010BufferInterface() override {}
};
// Represents Type::kI210, allocates 16 bits per pixel and fills 10 least
// significant bits with color information.
class I210BufferInterface : public PlanarYuv16BBuffer {
public:
Type type() const override;
int ChromaWidth() const final;
int ChromaHeight() const final;
protected:
~I210BufferInterface() override {}
};
class BiplanarYuvBuffer : public VideoFrameBuffer {
public:
virtual int ChromaWidth() const = 0;

View file

@ -53,6 +53,7 @@ rtc_library("common_video") {
"../api/video:video_bitrate_allocation",
"../api/video:video_bitrate_allocator",
"../api/video:video_frame",
"../api/video:video_frame_i010",
"../api/video:video_rtp_headers",
"../api/video_codecs:bitstream_parser_api",
"../api/video_codecs:video_codecs_api",

View file

@ -89,6 +89,16 @@ rtc::scoped_refptr<I010BufferInterface> WrapI010Buffer(
int v_stride,
std::function<void()> no_longer_used);
rtc::scoped_refptr<I210BufferInterface> WrapI210Buffer(
int width,
int height,
const uint16_t* y_plane,
int y_stride,
const uint16_t* u_plane,
int u_stride,
const uint16_t* v_plane,
int v_stride,
std::function<void()> no_longer_used);
} // namespace webrtc
#endif // COMMON_VIDEO_INCLUDE_VIDEO_FRAME_BUFFER_H_

View file

@ -16,6 +16,8 @@
#include <list>
#include "api/scoped_refptr.h"
#include "api/video/i010_buffer.h"
#include "api/video/i210_buffer.h"
#include "api/video/i420_buffer.h"
#include "api/video/i422_buffer.h"
#include "api/video/i444_buffer.h"
@ -46,6 +48,8 @@ class VideoFrameBufferPool {
rtc::scoped_refptr<I420Buffer> CreateI420Buffer(int width, int height);
rtc::scoped_refptr<I422Buffer> CreateI422Buffer(int width, int height);
rtc::scoped_refptr<I444Buffer> CreateI444Buffer(int width, int height);
rtc::scoped_refptr<I010Buffer> CreateI010Buffer(int width, int height);
rtc::scoped_refptr<I210Buffer> CreateI210Buffer(int width, int height);
rtc::scoped_refptr<NV12Buffer> CreateNV12Buffer(int width, int height);
// Changes the max amount of buffers in the pool to the new value.

View file

@ -211,6 +211,22 @@ rtc::scoped_refptr<I420BufferInterface> I010BufferBase::ToI420() {
return i420_buffer;
}
class I210BufferBase : public I210BufferInterface {
public:
rtc::scoped_refptr<I420BufferInterface> ToI420() final;
};
rtc::scoped_refptr<I420BufferInterface> I210BufferBase::ToI420() {
rtc::scoped_refptr<I420Buffer> i420_buffer =
I420Buffer::Create(width(), height());
libyuv::I210ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(),
i420_buffer->MutableDataY(), i420_buffer->StrideY(),
i420_buffer->MutableDataU(), i420_buffer->StrideU(),
i420_buffer->MutableDataV(), i420_buffer->StrideV(),
width(), height());
return i420_buffer;
}
} // namespace
rtc::scoped_refptr<I420BufferInterface> WrapI420Buffer(
@ -321,4 +337,20 @@ rtc::scoped_refptr<I010BufferInterface> WrapI010Buffer(
v_stride, no_longer_used));
}
rtc::scoped_refptr<I210BufferInterface> WrapI210Buffer(
int width,
int height,
const uint16_t* y_plane,
int y_stride,
const uint16_t* u_plane,
int u_stride,
const uint16_t* v_plane,
int v_stride,
std::function<void()> no_longer_used) {
return rtc::scoped_refptr<I210BufferInterface>(
rtc::make_ref_counted<WrappedYuv16BBuffer<I210BufferBase>>(
width, height, y_plane, y_stride, u_plane, u_stride, v_plane,
v_stride, no_longer_used));
}
} // namespace webrtc

View file

@ -36,6 +36,14 @@ bool HasOneRef(const rtc::scoped_refptr<VideoFrameBuffer>& buffer) {
return static_cast<rtc::RefCountedObject<I422Buffer>*>(buffer.get())
->HasOneRef();
}
case VideoFrameBuffer::Type::kI010: {
return static_cast<rtc::RefCountedObject<I010Buffer>*>(buffer.get())
->HasOneRef();
}
case VideoFrameBuffer::Type::kI210: {
return static_cast<rtc::RefCountedObject<I210Buffer>*>(buffer.get())
->HasOneRef();
}
case VideoFrameBuffer::Type::kNV12: {
return static_cast<rtc::RefCountedObject<NV12Buffer>*>(buffer.get())
->HasOneRef();
@ -219,6 +227,60 @@ rtc::scoped_refptr<NV12Buffer> VideoFrameBufferPool::CreateNV12Buffer(
return buffer;
}
rtc::scoped_refptr<I010Buffer> VideoFrameBufferPool::CreateI010Buffer(
int width,
int height) {
RTC_DCHECK_RUNS_SERIALIZED(&race_checker_);
rtc::scoped_refptr<VideoFrameBuffer> existing_buffer =
GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI010);
if (existing_buffer) {
// Cast is safe because the only way kI010 buffer is created is
// in the same function below, where |RefCountedObject<I010Buffer>|
// is created.
rtc::RefCountedObject<I010Buffer>* raw_buffer =
static_cast<rtc::RefCountedObject<I010Buffer>*>(existing_buffer.get());
// Creates a new scoped_refptr, which is also pointing to the same
// RefCountedObject as buffer, increasing ref count.
return rtc::scoped_refptr<I010Buffer>(raw_buffer);
}
if (buffers_.size() >= max_number_of_buffers_)
return nullptr;
// Allocate new buffer.
rtc::scoped_refptr<I010Buffer> buffer = I010Buffer::Create(width, height);
buffers_.push_back(buffer);
return buffer;
}
rtc::scoped_refptr<I210Buffer> VideoFrameBufferPool::CreateI210Buffer(
int width,
int height) {
RTC_DCHECK_RUNS_SERIALIZED(&race_checker_);
rtc::scoped_refptr<VideoFrameBuffer> existing_buffer =
GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI210);
if (existing_buffer) {
// Cast is safe because the only way kI210 buffer is created is
// in the same function below, where |RefCountedObject<I210Buffer>|
// is created.
rtc::RefCountedObject<I210Buffer>* raw_buffer =
static_cast<rtc::RefCountedObject<I210Buffer>*>(existing_buffer.get());
// Creates a new scoped_refptr, which is also pointing to the same
// RefCountedObject as buffer, increasing ref count.
return rtc::scoped_refptr<I210Buffer>(raw_buffer);
}
if (buffers_.size() >= max_number_of_buffers_)
return nullptr;
// Allocate new buffer.
rtc::scoped_refptr<I210Buffer> buffer = I210Buffer::Create(width, height);
buffers_.push_back(buffer);
return buffer;
}
rtc::scoped_refptr<VideoFrameBuffer> VideoFrameBufferPool::GetExistingBuffer(
int width,
int height,

View file

@ -79,13 +79,54 @@ TEST(TestVideoFrameBufferPool, ProducesNv12) {
EXPECT_NE(nullptr, buffer.get());
}
TEST(TestVideoFrameBufferPool, ProducesI422) {
VideoFrameBufferPool pool(false, 1);
auto buffer = pool.CreateI422Buffer(16, 16);
EXPECT_NE(nullptr, buffer.get());
}
TEST(TestVideoFrameBufferPool, ProducesI444) {
VideoFrameBufferPool pool(false, 1);
auto buffer = pool.CreateI444Buffer(16, 16);
EXPECT_NE(nullptr, buffer.get());
}
TEST(TestVideoFrameBufferPool, ProducesI010) {
VideoFrameBufferPool pool(false, 1);
auto buffer = pool.CreateI010Buffer(16, 16);
EXPECT_NE(nullptr, buffer.get());
}
TEST(TestVideoFrameBufferPool, ProducesI210) {
VideoFrameBufferPool pool(false, 1);
auto buffer = pool.CreateI210Buffer(16, 16);
EXPECT_NE(nullptr, buffer.get());
}
TEST(TestVideoFrameBufferPool, SwitchingPixelFormat) {
VideoFrameBufferPool pool(false, 1);
auto buffer = pool.CreateNV12Buffer(16, 16);
auto buffeNV12 = pool.CreateNV12Buffer(16, 16);
EXPECT_EQ(nullptr, pool.CreateNV12Buffer(16, 16).get());
auto buffer2 = pool.CreateI420Buffer(16, 16);
EXPECT_NE(nullptr, buffer2.get());
auto bufferI420 = pool.CreateI420Buffer(16, 16);
EXPECT_NE(nullptr, bufferI420.get());
EXPECT_EQ(nullptr, pool.CreateI420Buffer(16, 16).get());
auto bufferI444 = pool.CreateI444Buffer(16, 16);
EXPECT_NE(nullptr, bufferI444.get());
EXPECT_EQ(nullptr, pool.CreateI444Buffer(16, 16).get());
auto bufferI422 = pool.CreateI422Buffer(16, 16);
EXPECT_NE(nullptr, bufferI422.get());
EXPECT_EQ(nullptr, pool.CreateI422Buffer(16, 16).get());
auto bufferI010 = pool.CreateI010Buffer(16, 16);
EXPECT_NE(nullptr, bufferI010.get());
EXPECT_EQ(nullptr, pool.CreateI010Buffer(16, 16).get());
auto bufferI210 = pool.CreateI210Buffer(16, 16);
EXPECT_NE(nullptr, bufferI210.get());
EXPECT_EQ(nullptr, pool.CreateI210Buffer(16, 16).get());
}
} // namespace webrtc

View file

@ -14,6 +14,7 @@
#include <string.h>
#include "api/video/i010_buffer.h"
#include "api/video/i210_buffer.h"
#include "api/video/i420_buffer.h"
#include "api/video/i422_buffer.h"
#include "api/video/i444_buffer.h"
@ -44,6 +45,8 @@ SubSampling SubSamplingForType(VideoFrameBuffer::Type type) {
return {.x = 1, .y = 1};
case VideoFrameBuffer::Type::kI010:
return {.x = 2, .y = 2};
case VideoFrameBuffer::Type::kI210:
return {.x = 2, .y = 1};
default:
return {};
}
@ -172,6 +175,17 @@ void CheckRotate(int width,
int x = corners[j].x * (rotated_width - 1);
int y = corners[j].y * (rotated_height - 1);
EXPECT_EQ(colors[i].y, rotated.DataY()[x + y * rotated.StrideY()]);
if (rotated.type() == VideoFrameBuffer::Type::kI422 ||
rotated.type() == VideoFrameBuffer::Type::kI210) {
EXPECT_NEAR(colors[i].u,
rotated.DataU()[(x / plane_divider.x) +
(y / plane_divider.y) * rotated.StrideU()],
1);
EXPECT_NEAR(colors[i].v,
rotated.DataV()[(x / plane_divider.x) +
(y / plane_divider.y) * rotated.StrideV()],
1);
} else {
EXPECT_EQ(colors[i].u,
rotated.DataU()[(x / plane_divider.x) +
(y / plane_divider.y) * rotated.StrideU()]);
@ -179,6 +193,7 @@ void CheckRotate(int width,
rotated.DataV()[(x / plane_divider.x) +
(y / plane_divider.y) * rotated.StrideV()]);
}
}
}
} // namespace
@ -287,7 +302,8 @@ rtc::scoped_refptr<T> CreateAndFillBuffer() {
if (buf->type() == VideoFrameBuffer::Type::kI444) {
memset(buf->MutableDataU(), 2, 200);
memset(buf->MutableDataV(), 3, 200);
} else if (buf->type() == VideoFrameBuffer::Type::kI422) {
} else if (buf->type() == VideoFrameBuffer::Type::kI422 ||
buf->type() == VideoFrameBuffer::Type::kI210) {
memset(buf->MutableDataU(), 2, 100);
memset(buf->MutableDataV(), 3, 100);
} else {
@ -368,8 +384,8 @@ REGISTER_TYPED_TEST_SUITE_P(TestPlanarYuvBuffer,
CropYNotCenter,
CropAndScale16x9);
using TestTypesAll =
::testing::Types<I420Buffer, I010Buffer, I444Buffer, I422Buffer>;
using TestTypesAll = ::testing::
Types<I420Buffer, I010Buffer, I444Buffer, I422Buffer, I210Buffer>;
INSTANTIATE_TYPED_TEST_SUITE_P(All, TestPlanarYuvBuffer, TestTypesAll);
template <class T>
@ -387,7 +403,7 @@ TYPED_TEST_P(TestPlanarYuvBufferScale, Scale) {
REGISTER_TYPED_TEST_SUITE_P(TestPlanarYuvBufferScale, Scale);
using TestTypesScale = ::testing::Types<I420Buffer, I010Buffer>;
using TestTypesScale = ::testing::Types<I420Buffer, I010Buffer, I210Buffer>;
INSTANTIATE_TYPED_TEST_SUITE_P(All, TestPlanarYuvBufferScale, TestTypesScale);
template <class T>
@ -411,8 +427,8 @@ TYPED_TEST_P(TestPlanarYuvBufferRotate, Rotates) {
REGISTER_TYPED_TEST_SUITE_P(TestPlanarYuvBufferRotate, Rotates);
using TestTypesRotate =
::testing::Types<I420Buffer, I010Buffer, I444Buffer, I422Buffer>;
using TestTypesRotate = ::testing::
Types<I420Buffer, I010Buffer, I444Buffer, I422Buffer, I210Buffer>;
INSTANTIATE_TYPED_TEST_SUITE_P(Rotate,
TestPlanarYuvBufferRotate,
TestTypesRotate);

View file

@ -41,9 +41,10 @@ namespace webrtc {
namespace {
constexpr std::array<AVPixelFormat, 6> kPixelFormatsSupported = {
constexpr std::array<AVPixelFormat, 8> kPixelFormatsSupported = {
AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV444P,
AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ444P};
AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ444P,
AV_PIX_FMT_YUV420P10LE, AV_PIX_FMT_YUV422P10LE};
const size_t kYPlaneIndex = 0;
const size_t kUPlaneIndex = 1;
const size_t kVPlaneIndex = 2;
@ -115,10 +116,13 @@ int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context,
// http://crbug.com/390941. Our pool is set up to zero-initialize new buffers.
// TODO(nisse): Delete that feature from the video pool, instead add
// an explicit call to InitializeData here.
rtc::scoped_refptr<PlanarYuv8Buffer> frame_buffer;
rtc::scoped_refptr<PlanarYuvBuffer> frame_buffer;
rtc::scoped_refptr<I444Buffer> i444_buffer;
rtc::scoped_refptr<I420Buffer> i420_buffer;
rtc::scoped_refptr<I422Buffer> i422_buffer;
rtc::scoped_refptr<I010Buffer> i010_buffer;
rtc::scoped_refptr<I210Buffer> i210_buffer;
int bytes_per_pixel = 1;
switch (context->pix_fmt) {
case AV_PIX_FMT_YUV420P:
case AV_PIX_FMT_YUVJ420P:
@ -160,6 +164,38 @@ int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context,
av_frame->linesize[kVPlaneIndex] = i422_buffer->StrideV();
frame_buffer = i422_buffer;
break;
case AV_PIX_FMT_YUV420P10LE:
i010_buffer =
decoder->ffmpeg_buffer_pool_.CreateI010Buffer(width, height);
// Set `av_frame` members as required by FFmpeg.
av_frame->data[kYPlaneIndex] =
reinterpret_cast<uint8_t*>(i010_buffer->MutableDataY());
av_frame->linesize[kYPlaneIndex] = i010_buffer->StrideY() * 2;
av_frame->data[kUPlaneIndex] =
reinterpret_cast<uint8_t*>(i010_buffer->MutableDataU());
av_frame->linesize[kUPlaneIndex] = i010_buffer->StrideU() * 2;
av_frame->data[kVPlaneIndex] =
reinterpret_cast<uint8_t*>(i010_buffer->MutableDataV());
av_frame->linesize[kVPlaneIndex] = i010_buffer->StrideV() * 2;
frame_buffer = i010_buffer;
bytes_per_pixel = 2;
break;
case AV_PIX_FMT_YUV422P10LE:
i210_buffer =
decoder->ffmpeg_buffer_pool_.CreateI210Buffer(width, height);
// Set `av_frame` members as required by FFmpeg.
av_frame->data[kYPlaneIndex] =
reinterpret_cast<uint8_t*>(i210_buffer->MutableDataY());
av_frame->linesize[kYPlaneIndex] = i210_buffer->StrideY() * 2;
av_frame->data[kUPlaneIndex] =
reinterpret_cast<uint8_t*>(i210_buffer->MutableDataU());
av_frame->linesize[kUPlaneIndex] = i210_buffer->StrideU() * 2;
av_frame->data[kVPlaneIndex] =
reinterpret_cast<uint8_t*>(i210_buffer->MutableDataV());
av_frame->linesize[kVPlaneIndex] = i210_buffer->StrideV() * 2;
frame_buffer = i210_buffer;
bytes_per_pixel = 2;
break;
default:
RTC_LOG(LS_ERROR) << "Unsupported buffer type " << context->pix_fmt
<< ". Check supported supported pixel formats!";
@ -167,11 +203,14 @@ int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context,
return -1;
}
int y_size = width * height;
int uv_size = frame_buffer->ChromaWidth() * frame_buffer->ChromaHeight();
int y_size = width * height * bytes_per_pixel;
int uv_size = frame_buffer->ChromaWidth() * frame_buffer->ChromaHeight() *
bytes_per_pixel;
// DCHECK that we have a continuous buffer as is required.
RTC_DCHECK_EQ(frame_buffer->DataU(), frame_buffer->DataY() + y_size);
RTC_DCHECK_EQ(frame_buffer->DataV(), frame_buffer->DataU() + uv_size);
RTC_DCHECK_EQ(av_frame->data[kUPlaneIndex],
av_frame->data[kYPlaneIndex] + y_size);
RTC_DCHECK_EQ(av_frame->data[kVPlaneIndex],
av_frame->data[kUPlaneIndex] + uv_size);
int total_size = y_size + 2 * uv_size;
av_frame->format = context->pix_fmt;
@ -360,18 +399,36 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
rtc::scoped_refptr<VideoFrameBuffer> frame_buffer =
input_frame->video_frame_buffer();
// Instantiate Planar YUV8 buffer according to video frame buffer type
// Instantiate Planar YUV buffer according to video frame buffer type
const webrtc::PlanarYuvBuffer* planar_yuv_buffer = nullptr;
const webrtc::PlanarYuv8Buffer* planar_yuv8_buffer = nullptr;
const webrtc::PlanarYuv16BBuffer* planar_yuv16_buffer = nullptr;
VideoFrameBuffer::Type video_frame_buffer_type = frame_buffer->type();
switch (video_frame_buffer_type) {
case VideoFrameBuffer::Type::kI420:
planar_yuv8_buffer = frame_buffer->GetI420();
planar_yuv_buffer = frame_buffer->GetI420();
planar_yuv8_buffer =
reinterpret_cast<const webrtc::PlanarYuv8Buffer*>(planar_yuv_buffer);
break;
case VideoFrameBuffer::Type::kI444:
planar_yuv8_buffer = frame_buffer->GetI444();
planar_yuv_buffer = frame_buffer->GetI444();
planar_yuv8_buffer =
reinterpret_cast<const webrtc::PlanarYuv8Buffer*>(planar_yuv_buffer);
break;
case VideoFrameBuffer::Type::kI422:
planar_yuv8_buffer = frame_buffer->GetI422();
planar_yuv_buffer = frame_buffer->GetI422();
planar_yuv8_buffer =
reinterpret_cast<const webrtc::PlanarYuv8Buffer*>(planar_yuv_buffer);
break;
case VideoFrameBuffer::Type::kI010:
planar_yuv_buffer = frame_buffer->GetI010();
planar_yuv16_buffer = reinterpret_cast<const webrtc::PlanarYuv16BBuffer*>(
planar_yuv_buffer);
break;
case VideoFrameBuffer::Type::kI210:
planar_yuv_buffer = frame_buffer->GetI210();
planar_yuv16_buffer = reinterpret_cast<const webrtc::PlanarYuv16BBuffer*>(
planar_yuv_buffer);
break;
default:
// If this code is changed to allow other video frame buffer type,
@ -389,25 +446,74 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
// When needed, FFmpeg applies cropping by moving plane pointers and adjusting
// frame width/height. Ensure that cropped buffers lie within the allocated
// memory.
RTC_DCHECK_LE(av_frame_->width, planar_yuv8_buffer->width());
RTC_DCHECK_LE(av_frame_->height, planar_yuv8_buffer->height());
RTC_DCHECK_LE(av_frame_->width, planar_yuv_buffer->width());
RTC_DCHECK_LE(av_frame_->height, planar_yuv_buffer->height());
switch (video_frame_buffer_type) {
case VideoFrameBuffer::Type::kI420:
case VideoFrameBuffer::Type::kI444:
case VideoFrameBuffer::Type::kI422: {
RTC_DCHECK_GE(av_frame_->data[kYPlaneIndex], planar_yuv8_buffer->DataY());
RTC_DCHECK_LE(av_frame_->data[kYPlaneIndex] +
RTC_DCHECK_LE(
av_frame_->data[kYPlaneIndex] +
av_frame_->linesize[kYPlaneIndex] * av_frame_->height,
planar_yuv8_buffer->DataY() + planar_yuv8_buffer->StrideY() *
planar_yuv8_buffer->height());
planar_yuv8_buffer->DataY() +
planar_yuv8_buffer->StrideY() * planar_yuv8_buffer->height());
RTC_DCHECK_GE(av_frame_->data[kUPlaneIndex], planar_yuv8_buffer->DataU());
RTC_DCHECK_LE(av_frame_->data[kUPlaneIndex] +
av_frame_->linesize[kUPlaneIndex] * av_frame_->height / 2,
RTC_DCHECK_LE(
av_frame_->data[kUPlaneIndex] +
av_frame_->linesize[kUPlaneIndex] *
planar_yuv8_buffer->ChromaHeight(),
planar_yuv8_buffer->DataU() + planar_yuv8_buffer->StrideU() *
planar_yuv8_buffer->height() /
2);
planar_yuv8_buffer->ChromaHeight());
RTC_DCHECK_GE(av_frame_->data[kVPlaneIndex], planar_yuv8_buffer->DataV());
RTC_DCHECK_LE(av_frame_->data[kVPlaneIndex] +
av_frame_->linesize[kVPlaneIndex] * av_frame_->height / 2,
RTC_DCHECK_LE(
av_frame_->data[kVPlaneIndex] +
av_frame_->linesize[kVPlaneIndex] *
planar_yuv8_buffer->ChromaHeight(),
planar_yuv8_buffer->DataV() + planar_yuv8_buffer->StrideV() *
planar_yuv8_buffer->height() /
2);
planar_yuv8_buffer->ChromaHeight());
break;
}
case VideoFrameBuffer::Type::kI010:
case VideoFrameBuffer::Type::kI210: {
RTC_DCHECK_GE(
av_frame_->data[kYPlaneIndex],
reinterpret_cast<const uint8_t*>(planar_yuv16_buffer->DataY()));
RTC_DCHECK_LE(
av_frame_->data[kYPlaneIndex] +
av_frame_->linesize[kYPlaneIndex] * av_frame_->height,
reinterpret_cast<const uint8_t*>(planar_yuv16_buffer->DataY()) +
planar_yuv16_buffer->StrideY() * 2 *
planar_yuv16_buffer->height());
RTC_DCHECK_GE(
av_frame_->data[kUPlaneIndex],
reinterpret_cast<const uint8_t*>(planar_yuv16_buffer->DataU()));
RTC_DCHECK_LE(
av_frame_->data[kUPlaneIndex] +
av_frame_->linesize[kUPlaneIndex] *
planar_yuv16_buffer->ChromaHeight(),
reinterpret_cast<const uint8_t*>(planar_yuv16_buffer->DataU()) +
planar_yuv16_buffer->StrideU() * 2 *
planar_yuv16_buffer->ChromaHeight());
RTC_DCHECK_GE(
av_frame_->data[kVPlaneIndex],
reinterpret_cast<const uint8_t*>(planar_yuv16_buffer->DataV()));
RTC_DCHECK_LE(
av_frame_->data[kVPlaneIndex] +
av_frame_->linesize[kVPlaneIndex] *
planar_yuv16_buffer->ChromaHeight(),
reinterpret_cast<const uint8_t*>(planar_yuv16_buffer->DataV()) +
planar_yuv16_buffer->StrideV() * 2 *
planar_yuv16_buffer->ChromaHeight());
break;
}
default:
RTC_LOG(LS_ERROR) << "frame_buffer type: "
<< static_cast<int32_t>(video_frame_buffer_type)
<< " is not supported!";
ReportError();
return WEBRTC_VIDEO_CODEC_ERROR;
}
rtc::scoped_refptr<webrtc::VideoFrameBuffer> cropped_buffer;
switch (video_frame_buffer_type) {
@ -438,6 +544,30 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
// To keep reference alive.
[frame_buffer] {});
break;
case VideoFrameBuffer::Type::kI010:
cropped_buffer = WrapI010Buffer(
av_frame_->width, av_frame_->height,
reinterpret_cast<const uint16_t*>(av_frame_->data[kYPlaneIndex]),
av_frame_->linesize[kYPlaneIndex] / 2,
reinterpret_cast<const uint16_t*>(av_frame_->data[kUPlaneIndex]),
av_frame_->linesize[kUPlaneIndex] / 2,
reinterpret_cast<const uint16_t*>(av_frame_->data[kVPlaneIndex]),
av_frame_->linesize[kVPlaneIndex] / 2,
// To keep reference alive.
[frame_buffer] {});
break;
case VideoFrameBuffer::Type::kI210:
cropped_buffer = WrapI210Buffer(
av_frame_->width, av_frame_->height,
reinterpret_cast<const uint16_t*>(av_frame_->data[kYPlaneIndex]),
av_frame_->linesize[kYPlaneIndex] / 2,
reinterpret_cast<const uint16_t*>(av_frame_->data[kUPlaneIndex]),
av_frame_->linesize[kUPlaneIndex] / 2,
reinterpret_cast<const uint16_t*>(av_frame_->data[kVPlaneIndex]),
av_frame_->linesize[kVPlaneIndex] / 2,
// To keep reference alive.
[frame_buffer] {});
break;
default:
RTC_LOG(LS_ERROR) << "frame_buffer type: "
<< static_cast<int32_t>(video_frame_buffer_type)
@ -446,60 +576,23 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
return WEBRTC_VIDEO_CODEC_ERROR;
}
if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) {
// Preference for NV12 output format is ignored if actual format isn't
// trivially convertible to it.
if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12 &&
video_frame_buffer_type == VideoFrameBuffer::Type::kI420) {
auto nv12_buffer = output_buffer_pool_.CreateNV12Buffer(
cropped_buffer->width(), cropped_buffer->height());
const PlanarYuv8Buffer* cropped_planar_yuv8_buffer = nullptr;
switch (video_frame_buffer_type) {
case VideoFrameBuffer::Type::kI420:
cropped_planar_yuv8_buffer = cropped_buffer->GetI420();
libyuv::I420ToNV12(cropped_planar_yuv8_buffer->DataY(),
cropped_planar_yuv8_buffer->StrideY(),
cropped_planar_yuv8_buffer->DataU(),
cropped_planar_yuv8_buffer->StrideU(),
cropped_planar_yuv8_buffer->DataV(),
cropped_planar_yuv8_buffer->StrideV(),
const PlanarYuv8Buffer* cropped_planar_yuv_buffer =
cropped_buffer->GetI420();
libyuv::I420ToNV12(cropped_planar_yuv_buffer->DataY(),
cropped_planar_yuv_buffer->StrideY(),
cropped_planar_yuv_buffer->DataU(),
cropped_planar_yuv_buffer->StrideU(),
cropped_planar_yuv_buffer->DataV(),
cropped_planar_yuv_buffer->StrideV(),
nv12_buffer->MutableDataY(), nv12_buffer->StrideY(),
nv12_buffer->MutableDataUV(),
nv12_buffer->StrideUV(), planar_yuv8_buffer->width(),
planar_yuv8_buffer->height());
break;
case VideoFrameBuffer::Type::kI444:
cropped_planar_yuv8_buffer = cropped_buffer->GetI444();
libyuv::I444ToNV12(cropped_planar_yuv8_buffer->DataY(),
cropped_planar_yuv8_buffer->StrideY(),
cropped_planar_yuv8_buffer->DataU(),
cropped_planar_yuv8_buffer->StrideU(),
cropped_planar_yuv8_buffer->DataV(),
cropped_planar_yuv8_buffer->StrideV(),
nv12_buffer->MutableDataY(), nv12_buffer->StrideY(),
nv12_buffer->MutableDataUV(),
nv12_buffer->StrideUV(), planar_yuv8_buffer->width(),
planar_yuv8_buffer->height());
break;
case VideoFrameBuffer::Type::kI422:
cropped_planar_yuv8_buffer = cropped_buffer->GetI422();
// Swap src_u and src_v to implement I422ToNV12.
libyuv::I422ToNV21(cropped_planar_yuv8_buffer->DataY(),
cropped_planar_yuv8_buffer->StrideY(),
cropped_planar_yuv8_buffer->DataV(),
cropped_planar_yuv8_buffer->StrideV(),
cropped_planar_yuv8_buffer->DataU(),
cropped_planar_yuv8_buffer->StrideU(),
nv12_buffer->MutableDataY(), nv12_buffer->StrideY(),
nv12_buffer->MutableDataUV(),
nv12_buffer->StrideUV(), planar_yuv8_buffer->width(),
planar_yuv8_buffer->height());
break;
default:
RTC_LOG(LS_ERROR) << "frame_buffer type: "
<< static_cast<int32_t>(video_frame_buffer_type)
<< " is not supported!";
ReportError();
return WEBRTC_VIDEO_CODEC_ERROR;
}
nv12_buffer->MutableDataUV(), nv12_buffer->StrideUV(),
planar_yuv_buffer->width(), planar_yuv_buffer->height());
cropped_buffer = nv12_buffer;
}