Renamed FrameObject to EncodedFrame.

The plan is to:
 1. Move FrameObject to api/video.
 2. Rename FrameObject to EncodedFrame.
 3. Move EncodedFrame out of the video_coding namespace.

This is the 2nd CL.

Bug: webrtc:8909
Change-Id: I5e76a0a3b306156b8bc1de67834b4adf14bebef9
Reviewed-on: https://webrtc-review.googlesource.com/56182
Commit-Queue: Philip Eliasson <philipel@webrtc.org>
Commit-Queue: Tommi <tommi@webrtc.org>
Reviewed-by: Niels Moller <nisse@webrtc.org>
Reviewed-by: Tommi <tommi@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22158}
This commit is contained in:
philipel 2018-02-22 14:35:06 +01:00 committed by Commit Bot
parent a5c735f5d9
commit e7c891f953
14 changed files with 49 additions and 46 deletions

View file

@ -16,14 +16,13 @@
namespace webrtc {
namespace video_coding {
// TODO(philipel): Rename FrameObject to EncodedFrame.
// TODO(philipel): Remove webrtc::VCMEncodedFrame inheritance.
class FrameObject : public webrtc::VCMEncodedFrame {
class EncodedFrame : public webrtc::VCMEncodedFrame {
public:
static const uint8_t kMaxFrameReferences = 5;
FrameObject() = default;
virtual ~FrameObject() {}
EncodedFrame() = default;
virtual ~EncodedFrame() {}
virtual bool GetBitstream(uint8_t* destination) const = 0;
@ -59,6 +58,9 @@ class FrameObject : public webrtc::VCMEncodedFrame {
bool inter_layer_predicted = false;
};
// TODO(philipel): Remove this when downstream projects have been updated.
using FrameObject = EncodedFrame;
} // namespace video_coding
} // namespace webrtc

View file

@ -60,7 +60,7 @@ FrameBuffer::~FrameBuffer() {}
FrameBuffer::ReturnReason FrameBuffer::NextFrame(
int64_t max_wait_time_ms,
std::unique_ptr<FrameObject>* frame_out,
std::unique_ptr<EncodedFrame>* frame_out,
bool keyframe_required) {
TRACE_EVENT0("webrtc", "FrameBuffer::NextFrame");
int64_t latest_return_time_ms =
@ -106,7 +106,7 @@ FrameBuffer::ReturnReason FrameBuffer::NextFrame(
continue;
}
FrameObject* frame = frame_it->second.frame.get();
EncodedFrame* frame = frame_it->second.frame.get();
if (keyframe_required && !frame->is_keyframe())
continue;
@ -134,7 +134,7 @@ FrameBuffer::ReturnReason FrameBuffer::NextFrame(
rtc::CritScope lock(&crit_);
now_ms = clock_->TimeInMilliseconds();
if (next_frame_it_ != frames_.end()) {
std::unique_ptr<FrameObject> frame =
std::unique_ptr<EncodedFrame> frame =
std::move(next_frame_it_->second.frame);
if (!frame->delayed_by_retransmission()) {
@ -208,7 +208,8 @@ FrameBuffer::ReturnReason FrameBuffer::NextFrame(
return kTimeout;
}
bool FrameBuffer::HasBadRenderTiming(const FrameObject& frame, int64_t now_ms) {
bool FrameBuffer::HasBadRenderTiming(const EncodedFrame& frame,
int64_t now_ms) {
// Assume that render timing errors are due to changes in the video stream.
int64_t render_time_ms = frame.RenderTimeMs();
const int64_t kMaxVideoDelayMs = 10000;
@ -255,7 +256,7 @@ void FrameBuffer::UpdateRtt(int64_t rtt_ms) {
jitter_estimator_->UpdateRtt(rtt_ms);
}
bool FrameBuffer::ValidReferences(const FrameObject& frame) const {
bool FrameBuffer::ValidReferences(const EncodedFrame& frame) const {
if (frame.picture_id < 0)
return false;
@ -275,7 +276,7 @@ bool FrameBuffer::ValidReferences(const FrameObject& frame) const {
return true;
}
void FrameBuffer::UpdatePlayoutDelays(const FrameObject& frame) {
void FrameBuffer::UpdatePlayoutDelays(const EncodedFrame& frame) {
TRACE_EVENT0("webrtc", "FrameBuffer::UpdatePlayoutDelays");
PlayoutDelay playout_delay = frame.EncodedImage().playout_delay_;
if (playout_delay.min_ms >= 0)
@ -285,7 +286,7 @@ void FrameBuffer::UpdatePlayoutDelays(const FrameObject& frame) {
timing_->set_max_playout_delay(playout_delay.max_ms);
}
int64_t FrameBuffer::InsertFrame(std::unique_ptr<FrameObject> frame) {
int64_t FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
TRACE_EVENT0("webrtc", "FrameBuffer::InsertFrame");
RTC_DCHECK(frame);
if (stats_callback_)
@ -459,7 +460,7 @@ void FrameBuffer::AdvanceLastDecodedFrame(FrameMap::iterator decoded) {
}
}
bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const FrameObject& frame,
bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const EncodedFrame& frame,
FrameMap::iterator info) {
TRACE_EVENT0("webrtc", "FrameBuffer::UpdateFrameInfoWithIncomingFrame");
FrameKey key(frame.picture_id, frame.spatial_layer);

View file

@ -16,7 +16,7 @@
#include <memory>
#include <utility>
#include "modules/video_coding/frame_object.h"
#include "api/video/encoded_frame.h"
#include "modules/video_coding/include/video_coding_defines.h"
#include "modules/video_coding/inter_frame_delay.h"
#include "rtc_base/constructormagic.h"
@ -47,7 +47,7 @@ class FrameBuffer {
// Insert a frame into the frame buffer. Returns the picture id
// of the last continuous frame or -1 if there is no continuous frame.
int64_t InsertFrame(std::unique_ptr<FrameObject> frame);
int64_t InsertFrame(std::unique_ptr<EncodedFrame> frame);
// Get the next frame for decoding. Will return at latest after
// |max_wait_time_ms|.
@ -57,7 +57,7 @@ class FrameBuffer {
// kTimeout.
// - If the FrameBuffer is stopped then it will return kStopped.
ReturnReason NextFrame(int64_t max_wait_time_ms,
std::unique_ptr<FrameObject>* frame_out,
std::unique_ptr<EncodedFrame>* frame_out,
bool keyframe_required = false);
// Tells the FrameBuffer which protection mode that is in use. Affects
@ -120,18 +120,18 @@ class FrameBuffer {
// If this frame is continuous or not.
bool continuous = false;
// The actual FrameObject.
std::unique_ptr<FrameObject> frame;
// The actual EncodedFrame.
std::unique_ptr<EncodedFrame> frame;
};
using FrameMap = std::map<FrameKey, FrameInfo>;
// Check that the references of |frame| are valid.
bool ValidReferences(const FrameObject& frame) const;
bool ValidReferences(const EncodedFrame& frame) const;
// Updates the minimal and maximal playout delays
// depending on the frame.
void UpdatePlayoutDelays(const FrameObject& frame)
void UpdatePlayoutDelays(const EncodedFrame& frame)
RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
// Update all directly dependent and indirectly dependent frames and mark
@ -151,7 +151,7 @@ class FrameBuffer {
// Update the corresponding FrameInfo of |frame| and all FrameInfos that
// |frame| references.
// Return false if |frame| will never be decodable, true otherwise.
bool UpdateFrameInfoWithIncomingFrame(const FrameObject& frame,
bool UpdateFrameInfoWithIncomingFrame(const EncodedFrame& frame,
FrameMap::iterator info)
RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
@ -161,7 +161,7 @@ class FrameBuffer {
void ClearFramesAndHistory() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
bool HasBadRenderTiming(const FrameObject& frame, int64_t now_ms)
bool HasBadRenderTiming(const EncodedFrame& frame, int64_t now_ms)
RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
FrameMap frames_ RTC_GUARDED_BY(crit_);

View file

@ -86,7 +86,7 @@ class VCMJitterEstimatorMock : public VCMJitterEstimator {
MOCK_METHOD1(GetJitterEstimate, int(double rttMultiplier));
};
class FrameObjectFake : public FrameObject {
class FrameObjectFake : public EncodedFrame {
public:
bool GetBitstream(uint8_t* destination) const override { return true; }
@ -155,7 +155,7 @@ class TestFrameBuffer2 : public ::testing::Test {
bool inter_layer_predicted,
T... refs) {
static_assert(sizeof...(refs) <= kMaxReferences,
"To many references specified for FrameObject.");
"To many references specified for EncodedFrame.");
std::array<uint16_t, sizeof...(refs)> references = {
{rtc::checked_cast<uint16_t>(refs)...}};
@ -174,7 +174,7 @@ class TestFrameBuffer2 : public ::testing::Test {
void ExtractFrame(int64_t max_wait_time = 0, bool keyframe_required = false) {
crit_.Enter();
if (max_wait_time == 0) {
std::unique_ptr<FrameObject> frame;
std::unique_ptr<EncodedFrame> frame;
FrameBuffer::ReturnReason res =
buffer_.NextFrame(0, &frame, keyframe_required);
if (res != FrameBuffer::ReturnReason::kStopped)
@ -213,7 +213,7 @@ class TestFrameBuffer2 : public ::testing::Test {
if (tfb->tear_down_)
return;
std::unique_ptr<FrameObject> frame;
std::unique_ptr<EncodedFrame> frame;
FrameBuffer::ReturnReason res =
tfb->buffer_.NextFrame(tfb->max_wait_time_, &frame);
if (res != FrameBuffer::ReturnReason::kStopped)
@ -228,7 +228,7 @@ class TestFrameBuffer2 : public ::testing::Test {
VCMTimingFake timing_;
::testing::NiceMock<VCMJitterEstimatorMock> jitter_estimator_;
FrameBuffer buffer_;
std::vector<std::unique_ptr<FrameObject>> frames_;
std::vector<std::unique_ptr<EncodedFrame>> frames_;
Random rand_;
::testing::NiceMock<VCMReceiveStatisticsCallbackMock> stats_callback_;

View file

@ -32,11 +32,11 @@ RtpFrameObject::RtpFrameObject(PacketBuffer* packet_buffer,
VCMPacket* first_packet = packet_buffer_->GetPacket(first_seq_num);
RTC_CHECK(first_packet);
// RtpFrameObject members
// EncodedFrame members
frame_type_ = first_packet->frameType;
codec_type_ = first_packet->codec;
// TODO(philipel): Remove when encoded image is replaced by FrameObject.
// TODO(philipel): Remove when encoded image is replaced by EncodedFrame.
// VCMEncodedFrame members
CopyCodecSpecific(&first_packet->video_header);
_completeFrame = true;
@ -68,7 +68,7 @@ RtpFrameObject::RtpFrameObject(PacketBuffer* packet_buffer,
_encodedWidth = first_packet->width;
_encodedHeight = first_packet->height;
// FrameObject members
// EncodedFrame members
timestamp = first_packet->timestamp;
VCMPacket* last_packet = packet_buffer_->GetPacket(last_seq_num);

View file

@ -21,7 +21,7 @@ namespace video_coding {
class PacketBuffer;
class RtpFrameObject : public FrameObject {
class RtpFrameObject : public EncodedFrame {
public:
RtpFrameObject(PacketBuffer* packet_buffer,
uint16_t first_seq_num,

View file

@ -30,7 +30,6 @@ class Clock;
namespace video_coding {
class FrameObject;
class RtpFrameObject;
// A received frame is a frame which has received all its packets.

View file

@ -26,7 +26,7 @@
namespace webrtc {
namespace video_coding {
class FrameObject;
class EncodedFrame;
class RtpFrameObject;
// A complete frame is a frame which has received all its packets and all its
@ -34,7 +34,7 @@ class RtpFrameObject;
class OnCompleteFrameCallback {
public:
virtual ~OnCompleteFrameCallback() {}
virtual void OnCompleteFrame(std::unique_ptr<FrameObject> frame) = 0;
virtual void OnCompleteFrame(std::unique_ptr<EncodedFrame> frame) = 0;
};
class RtpFrameReferenceFinder {

View file

@ -64,7 +64,7 @@ class TestRtpFrameReferenceFinder : public ::testing::Test,
uint16_t Rand() { return rand_.Rand<uint16_t>(); }
void OnCompleteFrame(std::unique_ptr<FrameObject> frame) override {
void OnCompleteFrame(std::unique_ptr<EncodedFrame> frame) override {
int64_t pid = frame->picture_id;
uint16_t sidx = frame->spatial_layer;
auto frame_it = frames_from_callback_.find(std::make_pair(pid, sidx));
@ -260,8 +260,9 @@ class TestRtpFrameReferenceFinder : public ::testing::Test,
return f1.first < f2.first;
}
};
std::map<std::pair<int64_t, uint8_t>, std::unique_ptr<FrameObject>, FrameComp>
frames_from_callback_;
std::
map<std::pair<int64_t, uint8_t>, std::unique_ptr<EncodedFrame>, FrameComp>
frames_from_callback_;
};
TEST_F(TestRtpFrameReferenceFinder, PaddingPackets) {

View file

@ -395,7 +395,7 @@ void RtpVideoStreamReceiver::OnReceivedFrame(
}
void RtpVideoStreamReceiver::OnCompleteFrame(
std::unique_ptr<video_coding::FrameObject> frame) {
std::unique_ptr<video_coding::EncodedFrame> frame) {
{
rtc::CritScope lock(&last_seq_num_cs_);
video_coding::RtpFrameObject* rtp_frame =

View file

@ -135,7 +135,7 @@ class RtpVideoStreamReceiver : public RtpData,
// Implements OnCompleteFrameCallback.
void OnCompleteFrame(
std::unique_ptr<video_coding::FrameObject> frame) override;
std::unique_ptr<video_coding::EncodedFrame> frame) override;
void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override;

View file

@ -61,14 +61,14 @@ class MockOnCompleteFrameCallback
public:
MockOnCompleteFrameCallback() : buffer_(rtc::ByteBuffer::ORDER_NETWORK) {}
MOCK_METHOD1(DoOnCompleteFrame, void(video_coding::FrameObject* frame));
MOCK_METHOD1(DoOnCompleteFrame, void(video_coding::EncodedFrame* frame));
MOCK_METHOD1(DoOnCompleteFrameFailNullptr,
void(video_coding::FrameObject* frame));
void(video_coding::EncodedFrame* frame));
MOCK_METHOD1(DoOnCompleteFrameFailLength,
void(video_coding::FrameObject* frame));
void(video_coding::EncodedFrame* frame));
MOCK_METHOD1(DoOnCompleteFrameFailBitstream,
void(video_coding::FrameObject* frame));
void OnCompleteFrame(std::unique_ptr<video_coding::FrameObject> frame) {
void(video_coding::EncodedFrame* frame));
void OnCompleteFrame(std::unique_ptr<video_coding::EncodedFrame> frame) {
if (!frame) {
DoOnCompleteFrameFailNullptr(nullptr);
return;

View file

@ -355,7 +355,7 @@ void VideoReceiveStream::RequestKeyFrame() {
}
void VideoReceiveStream::OnCompleteFrame(
std::unique_ptr<video_coding::FrameObject> frame) {
std::unique_ptr<video_coding::EncodedFrame> frame) {
int64_t last_continuous_pid = frame_buffer_->InsertFrame(std::move(frame));
if (last_continuous_pid != -1)
rtp_video_stream_receiver_.FrameContinuous(last_continuous_pid);
@ -416,7 +416,7 @@ bool VideoReceiveStream::Decode() {
static const int kMaxWaitForKeyFrameMs = 200;
int wait_ms = keyframe_required_ ? kMaxWaitForKeyFrameMs : kMaxWaitForFrameMs;
std::unique_ptr<video_coding::FrameObject> frame;
std::unique_ptr<video_coding::EncodedFrame> frame;
// TODO(philipel): Call NextFrame with |keyframe_required| argument when
// downstream project has been fixed.
video_coding::FrameBuffer::ReturnReason res =

View file

@ -101,7 +101,7 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
// Implements video_coding::OnCompleteFrameCallback.
void OnCompleteFrame(
std::unique_ptr<video_coding::FrameObject> frame) override;
std::unique_ptr<video_coding::EncodedFrame> frame) override;
// Implements CallStatsObserver::OnRttUpdate
void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override;