mirror of
https://github.com/mollyim/webrtc.git
synced 2025-05-16 23:30:48 +01:00
Running FrameBuffer on task queue.
This prepares for running WebRTC in simulated time where event::Wait based timing doesn't work. Bug: webrtc:10365 Change-Id: Ia0f9b1cc8e3c8c27a38e45b40487050a4699d8cf Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/129962 Reviewed-by: Philip Eliasson <philipel@webrtc.org> Reviewed-by: Erik Språng <sprang@webrtc.org> Commit-Queue: Sebastian Jansson <srte@webrtc.org> Cr-Commit-Position: refs/heads/master@{#27422}
This commit is contained in:
parent
d98cbd8f91
commit
13943b7b7f
5 changed files with 519 additions and 255 deletions
|
@ -151,6 +151,7 @@ rtc_static_library("video_coding") {
|
||||||
"..:module_api_public",
|
"..:module_api_public",
|
||||||
"../../api:fec_controller_api",
|
"../../api:fec_controller_api",
|
||||||
"../../api:rtp_headers",
|
"../../api:rtp_headers",
|
||||||
|
"../../api/task_queue:global_task_queue_factory",
|
||||||
"../../api/units:data_rate",
|
"../../api/units:data_rate",
|
||||||
"../../api/video:builtin_video_bitrate_allocator_factory",
|
"../../api/video:builtin_video_bitrate_allocator_factory",
|
||||||
"../../api/video:encoded_frame",
|
"../../api/video:encoded_frame",
|
||||||
|
@ -170,6 +171,7 @@ rtc_static_library("video_coding") {
|
||||||
"../../rtc_base/experiments:jitter_upper_bound_experiment",
|
"../../rtc_base/experiments:jitter_upper_bound_experiment",
|
||||||
"../../rtc_base/experiments:rtt_mult_experiment",
|
"../../rtc_base/experiments:rtt_mult_experiment",
|
||||||
"../../rtc_base/system:fallthrough",
|
"../../rtc_base/system:fallthrough",
|
||||||
|
"../../rtc_base/task_utils:repeating_task",
|
||||||
"../../rtc_base/third_party/base64",
|
"../../rtc_base/third_party/base64",
|
||||||
"../../rtc_base/time:timestamp_extrapolator",
|
"../../rtc_base/time:timestamp_extrapolator",
|
||||||
"../../system_wrappers",
|
"../../system_wrappers",
|
||||||
|
|
|
@ -17,6 +17,8 @@
|
||||||
#include <utility>
|
#include <utility>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
|
#include "absl/memory/memory.h"
|
||||||
|
#include "api/task_queue/global_task_queue_factory.h"
|
||||||
#include "api/video/encoded_image.h"
|
#include "api/video/encoded_image.h"
|
||||||
#include "api/video/video_timing.h"
|
#include "api/video/video_timing.h"
|
||||||
#include "modules/video_coding/include/video_coding_defines.h"
|
#include "modules/video_coding/include/video_coding_defines.h"
|
||||||
|
@ -45,14 +47,30 @@ constexpr int kMaxFramesHistory = 1 << 13;
|
||||||
constexpr int kMaxAllowedFrameDelayMs = 5;
|
constexpr int kMaxAllowedFrameDelayMs = 5;
|
||||||
|
|
||||||
constexpr int64_t kLogNonDecodedIntervalMs = 5000;
|
constexpr int64_t kLogNonDecodedIntervalMs = 5000;
|
||||||
|
|
||||||
|
std::unique_ptr<TaskQueueBase, TaskQueueDeleter> CreateQueue(
|
||||||
|
TaskQueueFactory* task_queue_factory) {
|
||||||
|
if (!task_queue_factory)
|
||||||
|
task_queue_factory = &GlobalTaskQueueFactory();
|
||||||
|
return task_queue_factory->CreateTaskQueue("FrameBuffer",
|
||||||
|
TaskQueueFactory::Priority::HIGH);
|
||||||
|
}
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
FrameBuffer::FrameBuffer(Clock* clock,
|
FrameBuffer::FrameBuffer(Clock* clock,
|
||||||
|
VCMJitterEstimator* jitter_estimator,
|
||||||
|
VCMTiming* timing,
|
||||||
|
VCMReceiveStatisticsCallback* stats_proxy)
|
||||||
|
: FrameBuffer(clock, nullptr, jitter_estimator, timing, stats_proxy) {}
|
||||||
|
|
||||||
|
FrameBuffer::FrameBuffer(Clock* clock,
|
||||||
|
TaskQueueFactory* task_queue_factory,
|
||||||
VCMJitterEstimator* jitter_estimator,
|
VCMJitterEstimator* jitter_estimator,
|
||||||
VCMTiming* timing,
|
VCMTiming* timing,
|
||||||
VCMReceiveStatisticsCallback* stats_callback)
|
VCMReceiveStatisticsCallback* stats_callback)
|
||||||
: decoded_frames_history_(kMaxFramesHistory),
|
: decoded_frames_history_(kMaxFramesHistory),
|
||||||
clock_(clock),
|
clock_(clock),
|
||||||
|
use_task_queue_(task_queue_factory != nullptr),
|
||||||
jitter_estimator_(jitter_estimator),
|
jitter_estimator_(jitter_estimator),
|
||||||
timing_(timing),
|
timing_(timing),
|
||||||
inter_frame_delay_(clock_->TimeInMilliseconds()),
|
inter_frame_delay_(clock_->TimeInMilliseconds()),
|
||||||
|
@ -61,14 +79,69 @@ FrameBuffer::FrameBuffer(Clock* clock,
|
||||||
stats_callback_(stats_callback),
|
stats_callback_(stats_callback),
|
||||||
last_log_non_decoded_ms_(-kLogNonDecodedIntervalMs),
|
last_log_non_decoded_ms_(-kLogNonDecodedIntervalMs),
|
||||||
add_rtt_to_playout_delay_(
|
add_rtt_to_playout_delay_(
|
||||||
webrtc::field_trial::IsEnabled("WebRTC-AddRttToPlayoutDelay")) {}
|
webrtc::field_trial::IsEnabled("WebRTC-AddRttToPlayoutDelay")),
|
||||||
|
task_queue_(CreateQueue(task_queue_factory)) {}
|
||||||
|
|
||||||
FrameBuffer::~FrameBuffer() {}
|
FrameBuffer::~FrameBuffer() {}
|
||||||
|
|
||||||
|
void FrameBuffer::NextFrame(
|
||||||
|
int64_t max_wait_time_ms,
|
||||||
|
bool keyframe_required,
|
||||||
|
std::function<void(std::unique_ptr<EncodedFrame>, ReturnReason)> handler) {
|
||||||
|
RTC_DCHECK(use_task_queue_);
|
||||||
|
TRACE_EVENT0("webrtc", "FrameBuffer::NextFrame");
|
||||||
|
int64_t latest_return_time_ms =
|
||||||
|
clock_->TimeInMilliseconds() + max_wait_time_ms;
|
||||||
|
task_queue_.PostTask([=] {
|
||||||
|
RTC_DCHECK_RUN_ON(&task_queue_);
|
||||||
|
rtc::CritScope lock(&crit_);
|
||||||
|
if (stopped_) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
latest_return_time_ms_ = latest_return_time_ms;
|
||||||
|
keyframe_required_ = keyframe_required;
|
||||||
|
frame_handler_ = handler;
|
||||||
|
NextFrameOnQueue();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
void FrameBuffer::NextFrameOnQueue() {
|
||||||
|
RTC_DCHECK(use_task_queue_);
|
||||||
|
RTC_DCHECK(!callback_task_.Running());
|
||||||
|
int64_t wait_ms = UpdateFramesToDecode(clock_->TimeInMilliseconds());
|
||||||
|
callback_task_ = RepeatingTaskHandle::DelayedStart(
|
||||||
|
task_queue_.Get(), TimeDelta::ms(wait_ms), [this] {
|
||||||
|
// If this task has not been cancelled, we did not get any new frames
|
||||||
|
// while waiting. Continue with frame delivery.
|
||||||
|
RTC_DCHECK_RUN_ON(&task_queue_);
|
||||||
|
rtc::CritScope lock(&crit_);
|
||||||
|
if (!frames_to_decode_.empty()) {
|
||||||
|
// We have frames, deliver!
|
||||||
|
frame_handler_(absl::WrapUnique(GetFrameToDecode()), kFrameFound);
|
||||||
|
frame_handler_ = {};
|
||||||
|
callback_task_.Stop();
|
||||||
|
return TimeDelta::Zero(); // Ignored.
|
||||||
|
} else if (clock_->TimeInMilliseconds() >= latest_return_time_ms_) {
|
||||||
|
// We have timed out, signal this and stop repeating.
|
||||||
|
frame_handler_(nullptr, kTimeout);
|
||||||
|
frame_handler_ = {};
|
||||||
|
callback_task_.Stop();
|
||||||
|
return TimeDelta::Zero(); // Ignored.
|
||||||
|
} else {
|
||||||
|
// If there's no frames to decode and there is still time left, it
|
||||||
|
// means that the frame buffer was cleared between creation and
|
||||||
|
// execution of this task. Continue waiting for the remaining time.
|
||||||
|
int64_t wait_ms = UpdateFramesToDecode(clock_->TimeInMilliseconds());
|
||||||
|
return TimeDelta::ms(wait_ms);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
FrameBuffer::ReturnReason FrameBuffer::NextFrame(
|
FrameBuffer::ReturnReason FrameBuffer::NextFrame(
|
||||||
int64_t max_wait_time_ms,
|
int64_t max_wait_time_ms,
|
||||||
std::unique_ptr<EncodedFrame>* frame_out,
|
std::unique_ptr<EncodedFrame>* frame_out,
|
||||||
bool keyframe_required) {
|
bool keyframe_required) {
|
||||||
|
RTC_DCHECK(!use_task_queue_);
|
||||||
TRACE_EVENT0("webrtc", "FrameBuffer::NextFrame");
|
TRACE_EVENT0("webrtc", "FrameBuffer::NextFrame");
|
||||||
int64_t latest_return_time_ms =
|
int64_t latest_return_time_ms =
|
||||||
clock_->TimeInMilliseconds() + max_wait_time_ms;
|
clock_->TimeInMilliseconds() + max_wait_time_ms;
|
||||||
|
@ -83,18 +156,42 @@ FrameBuffer::ReturnReason FrameBuffer::NextFrame(
|
||||||
if (stopped_)
|
if (stopped_)
|
||||||
return kStopped;
|
return kStopped;
|
||||||
|
|
||||||
wait_ms = max_wait_time_ms;
|
// Need to hold |crit_| in order to access the members. therefore we
|
||||||
|
|
||||||
// Need to hold |crit_| in order to access frames_to_decode_. therefore we
|
|
||||||
// set it here in the loop instead of outside the loop in order to not
|
// set it here in the loop instead of outside the loop in order to not
|
||||||
// acquire the lock unnecessarily.
|
// acquire the lock unnecessarily.
|
||||||
|
keyframe_required_ = keyframe_required;
|
||||||
|
latest_return_time_ms_ = latest_return_time_ms;
|
||||||
|
wait_ms = UpdateFramesToDecode(now_ms);
|
||||||
|
}
|
||||||
|
} while (new_continuous_frame_event_.Wait(wait_ms));
|
||||||
|
|
||||||
|
{
|
||||||
|
rtc::CritScope lock(&crit_);
|
||||||
|
|
||||||
|
if (!frames_to_decode_.empty()) {
|
||||||
|
frame_out->reset(GetFrameToDecode());
|
||||||
|
return kFrameFound;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (latest_return_time_ms - clock_->TimeInMilliseconds() > 0) {
|
||||||
|
// If |next_frame_it_ == frames_.end()| and there is still time left, it
|
||||||
|
// means that the frame buffer was cleared as the thread in this function
|
||||||
|
// was waiting to acquire |crit_| in order to return. Wait for the
|
||||||
|
// remaining time and then return.
|
||||||
|
return NextFrame(latest_return_time_ms - now_ms, frame_out);
|
||||||
|
}
|
||||||
|
return kTimeout;
|
||||||
|
}
|
||||||
|
|
||||||
|
int64_t FrameBuffer::UpdateFramesToDecode(int64_t now_ms) {
|
||||||
|
int64_t wait_ms = latest_return_time_ms_ - now_ms;
|
||||||
frames_to_decode_.clear();
|
frames_to_decode_.clear();
|
||||||
|
|
||||||
// |last_continuous_frame_| may be empty below, but nullopt is smaller
|
// |last_continuous_frame_| may be empty below, but nullopt is smaller
|
||||||
// than everything else and loop will immediately terminate as expected.
|
// than everything else and loop will immediately terminate as expected.
|
||||||
for (auto frame_it = frames_.begin();
|
for (auto frame_it = frames_.begin();
|
||||||
frame_it != frames_.end() &&
|
frame_it != frames_.end() && frame_it->first <= last_continuous_frame_;
|
||||||
frame_it->first <= last_continuous_frame_;
|
|
||||||
++frame_it) {
|
++frame_it) {
|
||||||
if (!frame_it->second.continuous ||
|
if (!frame_it->second.continuous ||
|
||||||
frame_it->second.num_missing_decodable > 0) {
|
frame_it->second.num_missing_decodable > 0) {
|
||||||
|
@ -103,7 +200,7 @@ FrameBuffer::ReturnReason FrameBuffer::NextFrame(
|
||||||
|
|
||||||
EncodedFrame* frame = frame_it->second.frame.get();
|
EncodedFrame* frame = frame_it->second.frame.get();
|
||||||
|
|
||||||
if (keyframe_required && !frame->is_keyframe())
|
if (keyframe_required_ && !frame->is_keyframe())
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
auto last_decoded_frame_timestamp =
|
auto last_decoded_frame_timestamp =
|
||||||
|
@ -126,8 +223,7 @@ FrameBuffer::ReturnReason FrameBuffer::NextFrame(
|
||||||
// Gather all remaining frames for the same superframe.
|
// Gather all remaining frames for the same superframe.
|
||||||
std::vector<FrameMap::iterator> current_superframe;
|
std::vector<FrameMap::iterator> current_superframe;
|
||||||
current_superframe.push_back(frame_it);
|
current_superframe.push_back(frame_it);
|
||||||
bool last_layer_completed =
|
bool last_layer_completed = frame_it->second.frame->is_last_spatial_layer;
|
||||||
frame_it->second.frame->is_last_spatial_layer;
|
|
||||||
FrameMap::iterator next_frame_it = frame_it;
|
FrameMap::iterator next_frame_it = frame_it;
|
||||||
while (true) {
|
while (true) {
|
||||||
++next_frame_it;
|
++next_frame_it;
|
||||||
|
@ -146,14 +242,12 @@ FrameBuffer::ReturnReason FrameBuffer::NextFrame(
|
||||||
}
|
}
|
||||||
// All frames in the superframe should have the same timestamp.
|
// All frames in the superframe should have the same timestamp.
|
||||||
if (frame->Timestamp() != next_frame_it->second.frame->Timestamp()) {
|
if (frame->Timestamp() != next_frame_it->second.frame->Timestamp()) {
|
||||||
RTC_LOG(LS_WARNING)
|
RTC_LOG(LS_WARNING) << "Frames in a single superframe have different"
|
||||||
<< "Frames in a single superframe have different"
|
|
||||||
" timestamps. Skipping undecodable superframe.";
|
" timestamps. Skipping undecodable superframe.";
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
current_superframe.push_back(next_frame_it);
|
current_superframe.push_back(next_frame_it);
|
||||||
last_layer_completed =
|
last_layer_completed = next_frame_it->second.frame->is_last_spatial_layer;
|
||||||
next_frame_it->second.frame->is_last_spatial_layer;
|
|
||||||
}
|
}
|
||||||
// Check if the current superframe is complete.
|
// Check if the current superframe is complete.
|
||||||
// TODO(bugs.webrtc.org/10064): consider returning all available to
|
// TODO(bugs.webrtc.org/10064): consider returning all available to
|
||||||
|
@ -165,8 +259,7 @@ FrameBuffer::ReturnReason FrameBuffer::NextFrame(
|
||||||
frames_to_decode_ = std::move(current_superframe);
|
frames_to_decode_ = std::move(current_superframe);
|
||||||
|
|
||||||
if (frame->RenderTime() == -1) {
|
if (frame->RenderTime() == -1) {
|
||||||
frame->SetRenderTime(
|
frame->SetRenderTime(timing_->RenderTimeMs(frame->Timestamp(), now_ms));
|
||||||
timing_->RenderTimeMs(frame->Timestamp(), now_ms));
|
|
||||||
}
|
}
|
||||||
wait_ms = timing_->MaxWaitingTime(frame->RenderTime(), now_ms);
|
wait_ms = timing_->MaxWaitingTime(frame->RenderTime(), now_ms);
|
||||||
|
|
||||||
|
@ -180,19 +273,17 @@ FrameBuffer::ReturnReason FrameBuffer::NextFrame(
|
||||||
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
} // rtc::Critscope lock(&crit_);
|
wait_ms = std::min<int64_t>(wait_ms, latest_return_time_ms_ - now_ms);
|
||||||
|
|
||||||
wait_ms = std::min<int64_t>(wait_ms, latest_return_time_ms - now_ms);
|
|
||||||
wait_ms = std::max<int64_t>(wait_ms, 0);
|
wait_ms = std::max<int64_t>(wait_ms, 0);
|
||||||
} while (new_continuous_frame_event_.Wait(wait_ms));
|
return wait_ms;
|
||||||
|
}
|
||||||
|
|
||||||
{
|
EncodedFrame* FrameBuffer::GetFrameToDecode() {
|
||||||
rtc::CritScope lock(&crit_);
|
int64_t now_ms = clock_->TimeInMilliseconds();
|
||||||
now_ms = clock_->TimeInMilliseconds();
|
|
||||||
// TODO(ilnik): remove |frames_out| use frames_to_decode_ directly.
|
// TODO(ilnik): remove |frames_out| use frames_to_decode_ directly.
|
||||||
std::vector<EncodedFrame*> frames_out;
|
std::vector<EncodedFrame*> frames_out;
|
||||||
|
|
||||||
if (!frames_to_decode_.empty()) {
|
RTC_DCHECK(!frames_to_decode_.empty());
|
||||||
bool superframe_delayed_by_retransmission = false;
|
bool superframe_delayed_by_retransmission = false;
|
||||||
size_t superframe_size = 0;
|
size_t superframe_size = 0;
|
||||||
EncodedFrame* first_frame = frames_to_decode_[0]->second.frame.get();
|
EncodedFrame* first_frame = frames_to_decode_[0]->second.frame.get();
|
||||||
|
@ -202,8 +293,7 @@ FrameBuffer::ReturnReason FrameBuffer::NextFrame(
|
||||||
if (HasBadRenderTiming(*first_frame, now_ms)) {
|
if (HasBadRenderTiming(*first_frame, now_ms)) {
|
||||||
jitter_estimator_->Reset();
|
jitter_estimator_->Reset();
|
||||||
timing_->Reset();
|
timing_->Reset();
|
||||||
render_time_ms =
|
render_time_ms = timing_->RenderTimeMs(first_frame->Timestamp(), now_ms);
|
||||||
timing_->RenderTimeMs(first_frame->Timestamp(), now_ms);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for (FrameMap::iterator& frame_it : frames_to_decode_) {
|
for (FrameMap::iterator& frame_it : frames_to_decode_) {
|
||||||
|
@ -212,14 +302,12 @@ FrameBuffer::ReturnReason FrameBuffer::NextFrame(
|
||||||
|
|
||||||
frame->SetRenderTime(render_time_ms);
|
frame->SetRenderTime(render_time_ms);
|
||||||
|
|
||||||
superframe_delayed_by_retransmission |=
|
superframe_delayed_by_retransmission |= frame->delayed_by_retransmission();
|
||||||
frame->delayed_by_retransmission();
|
|
||||||
receive_time_ms = std::max(receive_time_ms, frame->ReceivedTime());
|
receive_time_ms = std::max(receive_time_ms, frame->ReceivedTime());
|
||||||
superframe_size += frame->size();
|
superframe_size += frame->size();
|
||||||
|
|
||||||
PropagateDecodability(frame_it->second);
|
PropagateDecodability(frame_it->second);
|
||||||
decoded_frames_history_.InsertDecoded(frame_it->first,
|
decoded_frames_history_.InsertDecoded(frame_it->first, frame->Timestamp());
|
||||||
frame->Timestamp());
|
|
||||||
|
|
||||||
// Remove decoded frame and all undecoded frames before it.
|
// Remove decoded frame and all undecoded frames before it.
|
||||||
frames_.erase(frames_.begin(), ++frame_it);
|
frames_.erase(frames_.begin(), ++frame_it);
|
||||||
|
@ -248,25 +336,12 @@ FrameBuffer::ReturnReason FrameBuffer::NextFrame(
|
||||||
|
|
||||||
UpdateJitterDelay();
|
UpdateJitterDelay();
|
||||||
UpdateTimingFrameInfo();
|
UpdateTimingFrameInfo();
|
||||||
}
|
|
||||||
if (!frames_out.empty()) {
|
|
||||||
if (frames_out.size() == 1) {
|
|
||||||
frame_out->reset(frames_out[0]);
|
|
||||||
} else {
|
|
||||||
frame_out->reset(CombineAndDeleteFrames(frames_out));
|
|
||||||
}
|
|
||||||
return kFrameFound;
|
|
||||||
}
|
|
||||||
} // rtc::Critscope lock(&crit_)
|
|
||||||
|
|
||||||
if (latest_return_time_ms - now_ms > 0) {
|
if (frames_out.size() == 1) {
|
||||||
// If |next_frame_it_ == frames_.end()| and there is still time left, it
|
return frames_out[0];
|
||||||
// means that the frame buffer was cleared as the thread in this function
|
} else {
|
||||||
// was waiting to acquire |crit_| in order to return. Wait for the
|
return CombineAndDeleteFrames(frames_out);
|
||||||
// remaining time and then return.
|
|
||||||
return NextFrame(latest_return_time_ms - now_ms, frame_out);
|
|
||||||
}
|
}
|
||||||
return kTimeout;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool FrameBuffer::HasBadRenderTiming(const EncodedFrame& frame,
|
bool FrameBuffer::HasBadRenderTiming(const EncodedFrame& frame,
|
||||||
|
@ -297,33 +372,63 @@ bool FrameBuffer::HasBadRenderTiming(const EncodedFrame& frame,
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void FrameBuffer::SafePost(std::function<void()> func) {
|
||||||
|
if (!use_task_queue_) {
|
||||||
|
func();
|
||||||
|
} else {
|
||||||
|
task_queue_.PostTask(func);
|
||||||
|
}
|
||||||
|
}
|
||||||
void FrameBuffer::SetProtectionMode(VCMVideoProtection mode) {
|
void FrameBuffer::SetProtectionMode(VCMVideoProtection mode) {
|
||||||
TRACE_EVENT0("webrtc", "FrameBuffer::SetProtectionMode");
|
TRACE_EVENT0("webrtc", "FrameBuffer::SetProtectionMode");
|
||||||
|
SafePost([this, mode] {
|
||||||
rtc::CritScope lock(&crit_);
|
rtc::CritScope lock(&crit_);
|
||||||
protection_mode_ = mode;
|
protection_mode_ = mode;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
void FrameBuffer::Start() {
|
void FrameBuffer::Start() {
|
||||||
TRACE_EVENT0("webrtc", "FrameBuffer::Start");
|
TRACE_EVENT0("webrtc", "FrameBuffer::Start");
|
||||||
|
SafePost([this] {
|
||||||
rtc::CritScope lock(&crit_);
|
rtc::CritScope lock(&crit_);
|
||||||
stopped_ = false;
|
stopped_ = false;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
void FrameBuffer::Stop() {
|
void FrameBuffer::Stop() {
|
||||||
TRACE_EVENT0("webrtc", "FrameBuffer::Stop");
|
TRACE_EVENT0("webrtc", "FrameBuffer::Stop");
|
||||||
|
if (!use_task_queue_) {
|
||||||
rtc::CritScope lock(&crit_);
|
rtc::CritScope lock(&crit_);
|
||||||
stopped_ = true;
|
stopped_ = true;
|
||||||
new_continuous_frame_event_.Set();
|
new_continuous_frame_event_.Set();
|
||||||
|
} else {
|
||||||
|
rtc::Event done;
|
||||||
|
task_queue_.PostTask([this, &done] {
|
||||||
|
rtc::CritScope lock(&crit_);
|
||||||
|
stopped_ = true;
|
||||||
|
if (frame_handler_) {
|
||||||
|
RTC_DCHECK(callback_task_.Running());
|
||||||
|
callback_task_.Stop();
|
||||||
|
frame_handler_ = {};
|
||||||
|
}
|
||||||
|
done.Set();
|
||||||
|
});
|
||||||
|
done.Wait(rtc::Event::kForever);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void FrameBuffer::Clear() {
|
void FrameBuffer::Clear() {
|
||||||
|
SafePost([this] {
|
||||||
rtc::CritScope lock(&crit_);
|
rtc::CritScope lock(&crit_);
|
||||||
ClearFramesAndHistory();
|
ClearFramesAndHistory();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
void FrameBuffer::UpdateRtt(int64_t rtt_ms) {
|
void FrameBuffer::UpdateRtt(int64_t rtt_ms) {
|
||||||
|
SafePost([this, rtt_ms] {
|
||||||
rtc::CritScope lock(&crit_);
|
rtc::CritScope lock(&crit_);
|
||||||
jitter_estimator_->UpdateRtt(rtt_ms);
|
jitter_estimator_->UpdateRtt(rtt_ms);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
bool FrameBuffer::ValidReferences(const EncodedFrame& frame) const {
|
bool FrameBuffer::ValidReferences(const EncodedFrame& frame) const {
|
||||||
|
@ -384,6 +489,22 @@ bool FrameBuffer::IsCompleteSuperFrame(const EncodedFrame& frame) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame,
|
||||||
|
std::function<void(int64_t)> picture_id_handler) {
|
||||||
|
struct InsertFrameTask {
|
||||||
|
void operator()() {
|
||||||
|
RTC_DCHECK_RUN_ON(&frame_buffer->task_queue_);
|
||||||
|
int64_t last_continuous_pid = frame_buffer->InsertFrame(std::move(frame));
|
||||||
|
picture_id_handler(last_continuous_pid);
|
||||||
|
}
|
||||||
|
FrameBuffer* frame_buffer;
|
||||||
|
std::unique_ptr<EncodedFrame> frame;
|
||||||
|
std::function<void(int64_t)> picture_id_handler;
|
||||||
|
};
|
||||||
|
task_queue_.PostTask(
|
||||||
|
InsertFrameTask{this, std::move(frame), std::move(picture_id_handler)});
|
||||||
|
}
|
||||||
|
|
||||||
int64_t FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
|
int64_t FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
|
||||||
TRACE_EVENT0("webrtc", "FrameBuffer::InsertFrame");
|
TRACE_EVENT0("webrtc", "FrameBuffer::InsertFrame");
|
||||||
RTC_DCHECK(frame);
|
RTC_DCHECK(frame);
|
||||||
|
@ -487,9 +608,14 @@ int64_t FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
|
||||||
last_continuous_picture_id = last_continuous_frame_->picture_id;
|
last_continuous_picture_id = last_continuous_frame_->picture_id;
|
||||||
|
|
||||||
// Since we now have new continuous frames there might be a better frame
|
// Since we now have new continuous frames there might be a better frame
|
||||||
// to return from NextFrame. Signal that thread so that it again can choose
|
// to return from NextFrame.
|
||||||
// which frame to return.
|
if (!use_task_queue_) {
|
||||||
new_continuous_frame_event_.Set();
|
new_continuous_frame_event_.Set();
|
||||||
|
} else if (callback_task_.Running()) {
|
||||||
|
RTC_CHECK(frame_handler_);
|
||||||
|
callback_task_.Stop();
|
||||||
|
NextFrameOnQueue();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return last_continuous_picture_id;
|
return last_continuous_picture_id;
|
||||||
|
|
|
@ -27,6 +27,8 @@
|
||||||
#include "rtc_base/event.h"
|
#include "rtc_base/event.h"
|
||||||
#include "rtc_base/experiments/rtt_mult_experiment.h"
|
#include "rtc_base/experiments/rtt_mult_experiment.h"
|
||||||
#include "rtc_base/numerics/sequence_number_util.h"
|
#include "rtc_base/numerics/sequence_number_util.h"
|
||||||
|
#include "rtc_base/task_queue.h"
|
||||||
|
#include "rtc_base/task_utils/repeating_task.h"
|
||||||
#include "rtc_base/thread_annotations.h"
|
#include "rtc_base/thread_annotations.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
@ -45,7 +47,13 @@ class FrameBuffer {
|
||||||
FrameBuffer(Clock* clock,
|
FrameBuffer(Clock* clock,
|
||||||
VCMJitterEstimator* jitter_estimator,
|
VCMJitterEstimator* jitter_estimator,
|
||||||
VCMTiming* timing,
|
VCMTiming* timing,
|
||||||
VCMReceiveStatisticsCallback* stats_proxy);
|
VCMReceiveStatisticsCallback* stats_callback);
|
||||||
|
|
||||||
|
FrameBuffer(Clock* clock,
|
||||||
|
TaskQueueFactory* task_queue_factory,
|
||||||
|
VCMJitterEstimator* jitter_estimator,
|
||||||
|
VCMTiming* timing,
|
||||||
|
VCMReceiveStatisticsCallback* stats_callback);
|
||||||
|
|
||||||
virtual ~FrameBuffer();
|
virtual ~FrameBuffer();
|
||||||
|
|
||||||
|
@ -54,6 +62,9 @@ class FrameBuffer {
|
||||||
// TODO(philipel): Return a VideoLayerFrameId and not only the picture id.
|
// TODO(philipel): Return a VideoLayerFrameId and not only the picture id.
|
||||||
int64_t InsertFrame(std::unique_ptr<EncodedFrame> frame);
|
int64_t InsertFrame(std::unique_ptr<EncodedFrame> frame);
|
||||||
|
|
||||||
|
void InsertFrame(std::unique_ptr<EncodedFrame> frame,
|
||||||
|
std::function<void(int64_t)> picture_id_handler);
|
||||||
|
|
||||||
// Get the next frame for decoding. Will return at latest after
|
// Get the next frame for decoding. Will return at latest after
|
||||||
// |max_wait_time_ms|.
|
// |max_wait_time_ms|.
|
||||||
// - If a frame is available within |max_wait_time_ms| it will return
|
// - If a frame is available within |max_wait_time_ms| it will return
|
||||||
|
@ -64,6 +75,10 @@ class FrameBuffer {
|
||||||
ReturnReason NextFrame(int64_t max_wait_time_ms,
|
ReturnReason NextFrame(int64_t max_wait_time_ms,
|
||||||
std::unique_ptr<EncodedFrame>* frame_out,
|
std::unique_ptr<EncodedFrame>* frame_out,
|
||||||
bool keyframe_required = false);
|
bool keyframe_required = false);
|
||||||
|
void NextFrame(
|
||||||
|
int64_t max_wait_time_ms,
|
||||||
|
bool keyframe_required,
|
||||||
|
std::function<void(std::unique_ptr<EncodedFrame>, ReturnReason)> handler);
|
||||||
|
|
||||||
// Tells the FrameBuffer which protection mode that is in use. Affects
|
// Tells the FrameBuffer which protection mode that is in use. Affects
|
||||||
// the frame timing.
|
// the frame timing.
|
||||||
|
@ -115,9 +130,16 @@ class FrameBuffer {
|
||||||
|
|
||||||
using FrameMap = std::map<VideoLayerFrameId, FrameInfo>;
|
using FrameMap = std::map<VideoLayerFrameId, FrameInfo>;
|
||||||
|
|
||||||
|
void SafePost(std::function<void()> func);
|
||||||
|
|
||||||
// Check that the references of |frame| are valid.
|
// Check that the references of |frame| are valid.
|
||||||
bool ValidReferences(const EncodedFrame& frame) const;
|
bool ValidReferences(const EncodedFrame& frame) const;
|
||||||
|
|
||||||
|
void NextFrameOnQueue() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
|
||||||
|
int64_t UpdateFramesToDecode(int64_t now_ms)
|
||||||
|
RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
|
||||||
|
EncodedFrame* GetFrameToDecode() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
|
||||||
|
|
||||||
// Update all directly dependent and indirectly dependent frames and mark
|
// Update all directly dependent and indirectly dependent frames and mark
|
||||||
// them as continuous if all their references has been fulfilled.
|
// them as continuous if all their references has been fulfilled.
|
||||||
void PropagateContinuity(FrameMap::iterator start)
|
void PropagateContinuity(FrameMap::iterator start)
|
||||||
|
@ -158,9 +180,19 @@ class FrameBuffer {
|
||||||
FrameMap frames_ RTC_GUARDED_BY(crit_);
|
FrameMap frames_ RTC_GUARDED_BY(crit_);
|
||||||
DecodedFramesHistory decoded_frames_history_ RTC_GUARDED_BY(crit_);
|
DecodedFramesHistory decoded_frames_history_ RTC_GUARDED_BY(crit_);
|
||||||
|
|
||||||
|
// TODO(srte): Remove this lock when always running on task queue.
|
||||||
rtc::CriticalSection crit_;
|
rtc::CriticalSection crit_;
|
||||||
Clock* const clock_;
|
Clock* const clock_;
|
||||||
|
const bool use_task_queue_;
|
||||||
|
|
||||||
|
RepeatingTaskHandle callback_task_ RTC_GUARDED_BY(crit_);
|
||||||
|
std::function<void(std::unique_ptr<EncodedFrame>, ReturnReason)>
|
||||||
|
frame_handler_ RTC_GUARDED_BY(crit_);
|
||||||
|
int64_t latest_return_time_ms_ RTC_GUARDED_BY(crit_);
|
||||||
|
bool keyframe_required_ RTC_GUARDED_BY(crit_);
|
||||||
|
|
||||||
rtc::Event new_continuous_frame_event_;
|
rtc::Event new_continuous_frame_event_;
|
||||||
|
|
||||||
VCMJitterEstimator* const jitter_estimator_ RTC_GUARDED_BY(crit_);
|
VCMJitterEstimator* const jitter_estimator_ RTC_GUARDED_BY(crit_);
|
||||||
VCMTiming* const timing_ RTC_GUARDED_BY(crit_);
|
VCMTiming* const timing_ RTC_GUARDED_BY(crit_);
|
||||||
VCMInterFrameDelay inter_frame_delay_ RTC_GUARDED_BY(crit_);
|
VCMInterFrameDelay inter_frame_delay_ RTC_GUARDED_BY(crit_);
|
||||||
|
@ -174,6 +206,8 @@ class FrameBuffer {
|
||||||
|
|
||||||
const bool add_rtt_to_playout_delay_;
|
const bool add_rtt_to_playout_delay_;
|
||||||
|
|
||||||
|
// Defined last so it is destroyed before other members.
|
||||||
|
rtc::TaskQueue task_queue_;
|
||||||
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(FrameBuffer);
|
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(FrameBuffer);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -56,6 +56,10 @@
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
|
|
||||||
|
using video_coding::EncodedFrame;
|
||||||
|
using ReturnReason = video_coding::FrameBuffer::ReturnReason;
|
||||||
|
|
||||||
constexpr int kMinBaseMinimumDelayMs = 0;
|
constexpr int kMinBaseMinimumDelayMs = 0;
|
||||||
constexpr int kMaxBaseMinimumDelayMs = 10000;
|
constexpr int kMaxBaseMinimumDelayMs = 10000;
|
||||||
|
|
||||||
|
@ -184,6 +188,8 @@ VideoReceiveStream::VideoReceiveStream(
|
||||||
num_cpu_cores_(num_cpu_cores),
|
num_cpu_cores_(num_cpu_cores),
|
||||||
process_thread_(process_thread),
|
process_thread_(process_thread),
|
||||||
clock_(clock),
|
clock_(clock),
|
||||||
|
use_task_queue_(
|
||||||
|
!field_trial::IsDisabled("WebRTC-Video-DecodeOnTaskQueue")),
|
||||||
decode_thread_(&DecodeThreadFunction,
|
decode_thread_(&DecodeThreadFunction,
|
||||||
this,
|
this,
|
||||||
"DecodingThread",
|
"DecodingThread",
|
||||||
|
@ -212,7 +218,10 @@ VideoReceiveStream::VideoReceiveStream(
|
||||||
.value_or(kMaxWaitForKeyFrameMs)),
|
.value_or(kMaxWaitForKeyFrameMs)),
|
||||||
max_wait_for_frame_ms_(KeyframeIntervalSettings::ParseFromFieldTrials()
|
max_wait_for_frame_ms_(KeyframeIntervalSettings::ParseFromFieldTrials()
|
||||||
.MaxWaitForFrameMs()
|
.MaxWaitForFrameMs()
|
||||||
.value_or(kMaxWaitForFrameMs)) {
|
.value_or(kMaxWaitForFrameMs)),
|
||||||
|
decode_queue_(task_queue_factory_->CreateTaskQueue(
|
||||||
|
"DecodingQueue",
|
||||||
|
TaskQueueFactory::Priority::HIGH)) {
|
||||||
RTC_LOG(LS_INFO) << "VideoReceiveStream: " << config_.ToString();
|
RTC_LOG(LS_INFO) << "VideoReceiveStream: " << config_.ToString();
|
||||||
|
|
||||||
RTC_DCHECK(config_.renderer);
|
RTC_DCHECK(config_.renderer);
|
||||||
|
@ -237,7 +246,8 @@ VideoReceiveStream::VideoReceiveStream(
|
||||||
|
|
||||||
jitter_estimator_.reset(new VCMJitterEstimator(clock_));
|
jitter_estimator_.reset(new VCMJitterEstimator(clock_));
|
||||||
frame_buffer_.reset(new video_coding::FrameBuffer(
|
frame_buffer_.reset(new video_coding::FrameBuffer(
|
||||||
clock_, jitter_estimator_.get(), timing_.get(), &stats_proxy_));
|
clock_, use_task_queue_ ? task_queue_factory_ : nullptr,
|
||||||
|
jitter_estimator_.get(), timing_.get(), &stats_proxy_));
|
||||||
|
|
||||||
process_thread_->RegisterModule(&rtp_stream_sync_, RTC_FROM_HERE);
|
process_thread_->RegisterModule(&rtp_stream_sync_, RTC_FROM_HERE);
|
||||||
|
|
||||||
|
@ -308,7 +318,7 @@ void VideoReceiveStream::SetSync(Syncable* audio_syncable) {
|
||||||
void VideoReceiveStream::Start() {
|
void VideoReceiveStream::Start() {
|
||||||
RTC_DCHECK_CALLED_SEQUENTIALLY(&worker_sequence_checker_);
|
RTC_DCHECK_CALLED_SEQUENTIALLY(&worker_sequence_checker_);
|
||||||
|
|
||||||
if (decode_thread_.IsRunning()) {
|
if (decoder_running_) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -387,7 +397,17 @@ void VideoReceiveStream::Start() {
|
||||||
// Start the decode thread
|
// Start the decode thread
|
||||||
video_receiver_.DecoderThreadStarting();
|
video_receiver_.DecoderThreadStarting();
|
||||||
stats_proxy_.DecoderThreadStarting();
|
stats_proxy_.DecoderThreadStarting();
|
||||||
|
if (!use_task_queue_) {
|
||||||
decode_thread_.Start();
|
decode_thread_.Start();
|
||||||
|
} else {
|
||||||
|
decode_queue_.PostTask([this] {
|
||||||
|
RTC_DCHECK_RUN_ON(&decode_queue_);
|
||||||
|
RTC_DCHECK(decoder_stopped_);
|
||||||
|
decoder_stopped_ = false;
|
||||||
|
StartNextDecode();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
decoder_running_ = true;
|
||||||
rtp_video_stream_receiver_.StartReceive();
|
rtp_video_stream_receiver_.StartReceive();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -401,13 +421,24 @@ void VideoReceiveStream::Stop() {
|
||||||
frame_buffer_->Stop();
|
frame_buffer_->Stop();
|
||||||
call_stats_->DeregisterStatsObserver(this);
|
call_stats_->DeregisterStatsObserver(this);
|
||||||
|
|
||||||
if (decode_thread_.IsRunning()) {
|
if (decoder_running_) {
|
||||||
// TriggerDecoderShutdown will release any waiting decoder thread and make
|
// TriggerDecoderShutdown will release any waiting decoder thread and make
|
||||||
// it stop immediately, instead of waiting for a timeout. Needs to be called
|
// it stop immediately, instead of waiting for a timeout. Needs to be called
|
||||||
// before joining the decoder thread.
|
// before joining the decoder thread.
|
||||||
video_receiver_.TriggerDecoderShutdown();
|
video_receiver_.TriggerDecoderShutdown();
|
||||||
|
if (!use_task_queue_) {
|
||||||
decode_thread_.Stop();
|
decode_thread_.Stop();
|
||||||
|
} else {
|
||||||
|
rtc::Event done;
|
||||||
|
decode_queue_.PostTask([this, &done] {
|
||||||
|
RTC_DCHECK_RUN_ON(&decode_queue_);
|
||||||
|
decoder_stopped_ = true;
|
||||||
|
done.Set();
|
||||||
|
});
|
||||||
|
done.Wait(rtc::Event::kForever);
|
||||||
|
}
|
||||||
|
decoder_running_ = false;
|
||||||
|
|
||||||
video_receiver_.DecoderThreadStopped();
|
video_receiver_.DecoderThreadStopped();
|
||||||
stats_proxy_.DecoderThreadStopped();
|
stats_proxy_.DecoderThreadStopped();
|
||||||
// Deregister external decoders so they are no longer running during
|
// Deregister external decoders so they are no longer running during
|
||||||
|
@ -511,10 +542,17 @@ void VideoReceiveStream::OnCompleteFrame(
|
||||||
frame_maximum_playout_delay_ms_ = playout_delay.max_ms;
|
frame_maximum_playout_delay_ms_ = playout_delay.max_ms;
|
||||||
UpdatePlayoutDelays();
|
UpdatePlayoutDelays();
|
||||||
}
|
}
|
||||||
|
if (!use_task_queue_) {
|
||||||
int64_t last_continuous_pid = frame_buffer_->InsertFrame(std::move(frame));
|
int64_t last_continuous_pid = frame_buffer_->InsertFrame(std::move(frame));
|
||||||
if (last_continuous_pid != -1)
|
if (last_continuous_pid != -1)
|
||||||
rtp_video_stream_receiver_.FrameContinuous(last_continuous_pid);
|
rtp_video_stream_receiver_.FrameContinuous(last_continuous_pid);
|
||||||
|
} else {
|
||||||
|
frame_buffer_->InsertFrame(
|
||||||
|
std::move(frame), [this](int64_t last_continuous_pid) {
|
||||||
|
if (last_continuous_pid != -1)
|
||||||
|
rtp_video_stream_receiver_.FrameContinuous(last_continuous_pid);
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void VideoReceiveStream::OnData(uint64_t channel_id,
|
void VideoReceiveStream::OnData(uint64_t channel_id,
|
||||||
|
@ -562,6 +600,51 @@ void VideoReceiveStream::SetMinimumPlayoutDelay(int delay_ms) {
|
||||||
UpdatePlayoutDelays();
|
UpdatePlayoutDelays();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int64_t VideoReceiveStream::GetWaitMs() const {
|
||||||
|
return keyframe_required_ ? max_wait_for_keyframe_ms_
|
||||||
|
: max_wait_for_frame_ms_;
|
||||||
|
}
|
||||||
|
|
||||||
|
void VideoReceiveStream::StartNextDecode() {
|
||||||
|
RTC_DCHECK(use_task_queue_);
|
||||||
|
TRACE_EVENT0("webrtc", "VideoReceiveStream::StartNextDecode");
|
||||||
|
|
||||||
|
struct DecodeTask {
|
||||||
|
void operator()() {
|
||||||
|
RTC_DCHECK_RUN_ON(&stream->decode_queue_);
|
||||||
|
if (stream->decoder_stopped_)
|
||||||
|
return;
|
||||||
|
if (frame) {
|
||||||
|
stream->HandleEncodedFrame(std::move(frame));
|
||||||
|
} else {
|
||||||
|
stream->HandleFrameBufferTimeout();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
VideoReceiveStream* stream;
|
||||||
|
std::unique_ptr<EncodedFrame> frame;
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO(philipel): Call NextFrame with |keyframe_required| argument set when
|
||||||
|
// downstream project has been fixed.
|
||||||
|
frame_buffer_->NextFrame(
|
||||||
|
GetWaitMs(), /*keyframe_required*/ false,
|
||||||
|
[this](std::unique_ptr<EncodedFrame> frame, ReturnReason res) {
|
||||||
|
RTC_DCHECK_EQ(frame == nullptr, res == ReturnReason::kTimeout);
|
||||||
|
RTC_DCHECK_EQ(frame != nullptr, res == ReturnReason::kFrameFound);
|
||||||
|
decode_queue_.PostTask(DecodeTask{this, std::move(frame)});
|
||||||
|
// Start the next decode after a delay or when the previous decode is
|
||||||
|
// finished (as it will be blocked by the queue).
|
||||||
|
constexpr int kMinDecodeIntervalMs = 1;
|
||||||
|
decode_queue_.PostDelayedTask(
|
||||||
|
[this] {
|
||||||
|
RTC_DCHECK_RUN_ON(&decode_queue_);
|
||||||
|
if (!decoder_stopped_)
|
||||||
|
StartNextDecode();
|
||||||
|
},
|
||||||
|
kMinDecodeIntervalMs);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
void VideoReceiveStream::DecodeThreadFunction(void* ptr) {
|
void VideoReceiveStream::DecodeThreadFunction(void* ptr) {
|
||||||
ScopedRegisterThreadForDebugging thread_dbg(RTC_FROM_HERE);
|
ScopedRegisterThreadForDebugging thread_dbg(RTC_FROM_HERE);
|
||||||
while (static_cast<VideoReceiveStream*>(ptr)->Decode()) {
|
while (static_cast<VideoReceiveStream*>(ptr)->Decode()) {
|
||||||
|
@ -569,23 +652,30 @@ void VideoReceiveStream::DecodeThreadFunction(void* ptr) {
|
||||||
}
|
}
|
||||||
|
|
||||||
bool VideoReceiveStream::Decode() {
|
bool VideoReceiveStream::Decode() {
|
||||||
|
RTC_DCHECK(!use_task_queue_);
|
||||||
TRACE_EVENT0("webrtc", "VideoReceiveStream::Decode");
|
TRACE_EVENT0("webrtc", "VideoReceiveStream::Decode");
|
||||||
|
|
||||||
const int wait_ms =
|
|
||||||
keyframe_required_ ? max_wait_for_keyframe_ms_ : max_wait_for_frame_ms_;
|
|
||||||
std::unique_ptr<video_coding::EncodedFrame> frame;
|
std::unique_ptr<video_coding::EncodedFrame> frame;
|
||||||
// TODO(philipel): Call NextFrame with |keyframe_required| argument when
|
// TODO(philipel): Call NextFrame with |keyframe_required| argument when
|
||||||
// downstream project has been fixed.
|
// downstream project has been fixed.
|
||||||
video_coding::FrameBuffer::ReturnReason res =
|
video_coding::FrameBuffer::ReturnReason res =
|
||||||
frame_buffer_->NextFrame(wait_ms, &frame);
|
frame_buffer_->NextFrame(GetWaitMs(), &frame);
|
||||||
|
if (res == ReturnReason::kStopped) {
|
||||||
if (res == video_coding::FrameBuffer::ReturnReason::kStopped) {
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (frame) {
|
if (frame) {
|
||||||
|
RTC_DCHECK_EQ(res, ReturnReason::kFrameFound);
|
||||||
|
HandleEncodedFrame(std::move(frame));
|
||||||
|
} else {
|
||||||
|
RTC_DCHECK_EQ(res, ReturnReason::kTimeout);
|
||||||
|
HandleFrameBufferTimeout();
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
void VideoReceiveStream::HandleEncodedFrame(
|
||||||
|
std::unique_ptr<EncodedFrame> frame) {
|
||||||
int64_t now_ms = clock_->TimeInMilliseconds();
|
int64_t now_ms = clock_->TimeInMilliseconds();
|
||||||
RTC_DCHECK_EQ(res, video_coding::FrameBuffer::ReturnReason::kFrameFound);
|
|
||||||
|
|
||||||
// Current OnPreDecode only cares about QP for VP8.
|
// Current OnPreDecode only cares about QP for VP8.
|
||||||
int qp = -1;
|
int qp = -1;
|
||||||
|
@ -606,16 +696,16 @@ bool VideoReceiveStream::Decode() {
|
||||||
if (decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME)
|
if (decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME)
|
||||||
RequestKeyFrame();
|
RequestKeyFrame();
|
||||||
} else if (!frame_decoded_ || !keyframe_required_ ||
|
} else if (!frame_decoded_ || !keyframe_required_ ||
|
||||||
(last_keyframe_request_ms_ + max_wait_for_keyframe_ms_ <
|
(last_keyframe_request_ms_ + max_wait_for_keyframe_ms_ < now_ms)) {
|
||||||
now_ms)) {
|
|
||||||
keyframe_required_ = true;
|
keyframe_required_ = true;
|
||||||
// TODO(philipel): Remove this keyframe request when downstream project
|
// TODO(philipel): Remove this keyframe request when downstream project
|
||||||
// has been fixed.
|
// has been fixed.
|
||||||
RequestKeyFrame();
|
RequestKeyFrame();
|
||||||
last_keyframe_request_ms_ = now_ms;
|
last_keyframe_request_ms_ = now_ms;
|
||||||
}
|
}
|
||||||
} else {
|
}
|
||||||
RTC_DCHECK_EQ(res, video_coding::FrameBuffer::ReturnReason::kTimeout);
|
|
||||||
|
void VideoReceiveStream::HandleFrameBufferTimeout() {
|
||||||
int64_t now_ms = clock_->TimeInMilliseconds();
|
int64_t now_ms = clock_->TimeInMilliseconds();
|
||||||
absl::optional<int64_t> last_packet_ms =
|
absl::optional<int64_t> last_packet_ms =
|
||||||
rtp_video_stream_receiver_.LastReceivedPacketMs();
|
rtp_video_stream_receiver_.LastReceivedPacketMs();
|
||||||
|
@ -637,13 +727,11 @@ bool VideoReceiveStream::Decode() {
|
||||||
if (stream_is_active && !receiving_keyframe &&
|
if (stream_is_active && !receiving_keyframe &&
|
||||||
(!config_.crypto_options.sframe.require_frame_encryption ||
|
(!config_.crypto_options.sframe.require_frame_encryption ||
|
||||||
rtp_video_stream_receiver_.IsDecryptable())) {
|
rtp_video_stream_receiver_.IsDecryptable())) {
|
||||||
RTC_LOG(LS_WARNING) << "No decodable frame in " << wait_ms
|
RTC_LOG(LS_WARNING) << "No decodable frame in " << GetWaitMs()
|
||||||
<< " ms, requesting keyframe.";
|
<< " ms, requesting keyframe.";
|
||||||
RequestKeyFrame();
|
RequestKeyFrame();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
void VideoReceiveStream::UpdatePlayoutDelays() const {
|
void VideoReceiveStream::UpdatePlayoutDelays() const {
|
||||||
const int minimum_delay_ms =
|
const int minimum_delay_ms =
|
||||||
|
|
|
@ -23,6 +23,7 @@
|
||||||
#include "modules/video_coding/frame_buffer2.h"
|
#include "modules/video_coding/frame_buffer2.h"
|
||||||
#include "modules/video_coding/video_coding_impl.h"
|
#include "modules/video_coding/video_coding_impl.h"
|
||||||
#include "rtc_base/sequenced_task_checker.h"
|
#include "rtc_base/sequenced_task_checker.h"
|
||||||
|
#include "rtc_base/task_queue.h"
|
||||||
#include "system_wrappers/include/clock.h"
|
#include "system_wrappers/include/clock.h"
|
||||||
#include "video/receive_statistics_proxy.h"
|
#include "video/receive_statistics_proxy.h"
|
||||||
#include "video/rtp_streams_synchronizer.h"
|
#include "video/rtp_streams_synchronizer.h"
|
||||||
|
@ -129,8 +130,13 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
|
||||||
std::vector<webrtc::RtpSource> GetSources() const override;
|
std::vector<webrtc::RtpSource> GetSources() const override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
int64_t GetWaitMs() const;
|
||||||
|
void StartNextDecode() RTC_RUN_ON(decode_queue_);
|
||||||
static void DecodeThreadFunction(void* ptr);
|
static void DecodeThreadFunction(void* ptr);
|
||||||
bool Decode();
|
bool Decode();
|
||||||
|
void HandleEncodedFrame(std::unique_ptr<video_coding::EncodedFrame> frame);
|
||||||
|
void HandleFrameBufferTimeout();
|
||||||
|
|
||||||
void UpdatePlayoutDelays() const
|
void UpdatePlayoutDelays() const
|
||||||
RTC_EXCLUSIVE_LOCKS_REQUIRED(playout_delay_lock_);
|
RTC_EXCLUSIVE_LOCKS_REQUIRED(playout_delay_lock_);
|
||||||
|
|
||||||
|
@ -146,10 +152,15 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
|
||||||
ProcessThread* const process_thread_;
|
ProcessThread* const process_thread_;
|
||||||
Clock* const clock_;
|
Clock* const clock_;
|
||||||
|
|
||||||
|
const bool use_task_queue_;
|
||||||
|
|
||||||
rtc::PlatformThread decode_thread_;
|
rtc::PlatformThread decode_thread_;
|
||||||
|
|
||||||
CallStats* const call_stats_;
|
CallStats* const call_stats_;
|
||||||
|
|
||||||
|
bool decoder_running_ RTC_GUARDED_BY(worker_sequence_checker_) = false;
|
||||||
|
bool decoder_stopped_ RTC_GUARDED_BY(decode_queue_) = true;
|
||||||
|
|
||||||
ReceiveStatisticsProxy stats_proxy_;
|
ReceiveStatisticsProxy stats_proxy_;
|
||||||
// Shared by media and rtx stream receivers, since the latter has no RtpRtcp
|
// Shared by media and rtx stream receivers, since the latter has no RtpRtcp
|
||||||
// module of its own.
|
// module of its own.
|
||||||
|
@ -165,10 +176,10 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
|
||||||
// TODO(nisse, philipel): Creation and ownership of video encoders should be
|
// TODO(nisse, philipel): Creation and ownership of video encoders should be
|
||||||
// moved to the new VideoStreamDecoder.
|
// moved to the new VideoStreamDecoder.
|
||||||
std::vector<std::unique_ptr<VideoDecoder>> video_decoders_;
|
std::vector<std::unique_ptr<VideoDecoder>> video_decoders_;
|
||||||
|
std::unique_ptr<video_coding::FrameBuffer> frame_buffer_;
|
||||||
|
|
||||||
// Members for the new jitter buffer experiment.
|
// Members for the new jitter buffer experiment.
|
||||||
std::unique_ptr<VCMJitterEstimator> jitter_estimator_;
|
std::unique_ptr<VCMJitterEstimator> jitter_estimator_;
|
||||||
std::unique_ptr<video_coding::FrameBuffer> frame_buffer_;
|
|
||||||
|
|
||||||
std::unique_ptr<RtpStreamReceiverInterface> media_receiver_;
|
std::unique_ptr<RtpStreamReceiverInterface> media_receiver_;
|
||||||
std::unique_ptr<RtxReceiveStream> rtx_receive_stream_;
|
std::unique_ptr<RtxReceiveStream> rtx_receive_stream_;
|
||||||
|
@ -204,6 +215,9 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
|
||||||
|
|
||||||
// Maximum delay as decided by the RTP playout delay extension.
|
// Maximum delay as decided by the RTP playout delay extension.
|
||||||
int frame_maximum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = -1;
|
int frame_maximum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = -1;
|
||||||
|
|
||||||
|
// Defined last so they are destroyed before all other members.
|
||||||
|
rtc::TaskQueue decode_queue_;
|
||||||
};
|
};
|
||||||
} // namespace internal
|
} // namespace internal
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
|
Loading…
Reference in a new issue