Allow feeding a Receiver encoded videoframe into a Sender Transform

Instead of crashing with a CHECK fail when an insertable stream of a
Video RTPSender is given a frame from an RTPReceiver's insertable
stream, construct a reasonable analogous sender frame and pass it
through to be decoded.

A small step towards removing the split we have between Sender and
Receiver implementations of TransformableFrameInterface which just
confuses users of the API.

Counterpart to https://webrtc-review.googlesource.com/c/src/+/301181 in
the opposite direction.

Bug: chromium:1250638
Change-Id: If66da7d553f14979ff1c5b4e00bff715f58cfce0
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/303480
Reviewed-by: Stefan Holmer <stefan@webrtc.org>
Commit-Queue: Tony Herre <herre@google.com>
Reviewed-by: Palak Agarwal <agpalak@google.com>
Cr-Commit-Position: refs/heads/main@{#39963}
This commit is contained in:
Tony Herre 2023-04-26 12:01:22 +00:00 committed by WebRTC LUCI CQ
parent ea7f3d7230
commit 272b464e92
2 changed files with 77 additions and 12 deletions

View file

@ -158,21 +158,34 @@ void RTPSenderVideoFrameTransformerDelegate::OnTransformedFrame(
void RTPSenderVideoFrameTransformerDelegate::SendVideo(
std::unique_ptr<TransformableFrameInterface> transformed_frame) const {
RTC_DCHECK_RUN_ON(transformation_queue_.get());
RTC_CHECK_EQ(transformed_frame->GetDirection(),
TransformableFrameInterface::Direction::kSender);
MutexLock lock(&sender_lock_);
if (!sender_)
return;
auto* transformed_video_frame =
static_cast<TransformableVideoSenderFrame*>(transformed_frame.get());
sender_->SendVideo(transformed_video_frame->GetPayloadType(),
transformed_video_frame->GetCodecType(),
transformed_video_frame->GetTimestamp(),
transformed_video_frame->GetCaptureTimeMs(),
transformed_video_frame->GetData(),
transformed_video_frame->GetHeader(),
transformed_video_frame->GetExpectedRetransmissionTimeMs(),
transformed_video_frame->Metadata().GetCsrcs());
if (transformed_frame->GetDirection() ==
TransformableFrameInterface::Direction::kSender) {
auto* transformed_video_frame =
static_cast<TransformableVideoSenderFrame*>(transformed_frame.get());
sender_->SendVideo(
transformed_video_frame->GetPayloadType(),
transformed_video_frame->GetCodecType(),
transformed_video_frame->GetTimestamp(),
transformed_video_frame->GetCaptureTimeMs(),
transformed_video_frame->GetData(),
transformed_video_frame->GetHeader(),
transformed_video_frame->GetExpectedRetransmissionTimeMs(),
transformed_video_frame->Metadata().GetCsrcs());
} else {
auto* transformed_video_frame =
static_cast<TransformableVideoFrameInterface*>(transformed_frame.get());
VideoFrameMetadata metadata = transformed_video_frame->Metadata();
sender_->SendVideo(
transformed_video_frame->GetPayloadType(), metadata.GetCodec(),
transformed_video_frame->GetTimestamp(),
/*capture_time_ms=*/0, transformed_video_frame->GetData(),
RTPVideoHeader::FromMetadata(metadata),
/*expected_retransmission_time_ms_=*/absl::nullopt,
metadata.GetCsrcs());
}
}
void RTPSenderVideoFrameTransformerDelegate::SetVideoStructureUnderLock(

View file

@ -12,6 +12,7 @@
#include <utility>
#include "api/test/mock_transformable_video_frame.h"
#include "rtc_base/event.h"
#include "test/gmock.h"
#include "test/gtest.h"
@ -22,6 +23,8 @@ namespace webrtc {
namespace {
using ::testing::_;
using ::testing::NiceMock;
using ::testing::Return;
using ::testing::SaveArg;
using ::testing::WithoutArgs;
@ -217,5 +220,54 @@ TEST_F(RtpSenderVideoFrameTransformerDelegateTest, MetadataAfterSetMetadata) {
EXPECT_EQ(metadata.GetCsrcs(), actual_metadata.GetCsrcs());
}
TEST_F(RtpSenderVideoFrameTransformerDelegateTest,
ReceiverFrameConvertedToSenderFrame) {
auto delegate = rtc::make_ref_counted<RTPSenderVideoFrameTransformerDelegate>(
&test_sender_, frame_transformer_,
/*ssrc=*/1111, /*csrcs=*/std::vector<uint32_t>(),
time_controller_.CreateTaskQueueFactory().get());
const uint8_t payload_type = 1;
const uint32_t timestamp = 2;
const std::vector<uint32_t> frame_csrcs = {123, 456, 789};
auto mock_receiver_frame =
std::make_unique<NiceMock<MockTransformableVideoFrame>>();
ON_CALL(*mock_receiver_frame, GetDirection)
.WillByDefault(Return(TransformableFrameInterface::Direction::kReceiver));
VideoFrameMetadata metadata;
metadata.SetCodec(kVideoCodecVP8);
metadata.SetRTPVideoHeaderCodecSpecifics(RTPVideoHeaderVP8());
metadata.SetCsrcs(frame_csrcs);
ON_CALL(*mock_receiver_frame, Metadata).WillByDefault(Return(metadata));
rtc::ArrayView<const uint8_t> buffer =
(rtc::ArrayView<const uint8_t>)*EncodedImageBuffer::Create(1);
ON_CALL(*mock_receiver_frame, GetData).WillByDefault(Return(buffer));
ON_CALL(*mock_receiver_frame, GetPayloadType)
.WillByDefault(Return(payload_type));
ON_CALL(*mock_receiver_frame, GetTimestamp).WillByDefault(Return(timestamp));
rtc::scoped_refptr<TransformedFrameCallback> callback;
EXPECT_CALL(*frame_transformer_, RegisterTransformedFrameSinkCallback)
.WillOnce(SaveArg<0>(&callback));
delegate->Init();
ASSERT_TRUE(callback);
rtc::Event event;
EXPECT_CALL(test_sender_,
SendVideo(payload_type, absl::make_optional(kVideoCodecVP8),
timestamp, /*capture_time_ms=*/0, buffer, _,
/*expected_retransmission_time_ms_=*/
(absl::optional<int64_t>)absl::nullopt, frame_csrcs))
.WillOnce(WithoutArgs([&] {
event.Set();
return true;
}));
callback->OnTransformedFrame(std::move(mock_receiver_frame));
event.Wait(TimeDelta::Seconds(1));
}
} // namespace
} // namespace webrtc