PipeWire capturer: copy content from PW buffer directly to DesktopFrame

This avoids an additional step where we originally copied content from
PipeWire buffer to a temporary location and from there to DesktopFrame.
This results into less copy operations and hopefully to faster
screensharing.

I didn't do some exact measures, but simply running htop while sharing a
4k screen I can see following results (usage per top 5 processes):
1) Without this change - 66%, 64%, 26% 23%, 10%
2) With this change - 41%, 39%, 19%, 17%, 12%,

Bug: webrtc:13239
Change-Id: I6a661ecc96bfeef370c1a5a3b9dc5e3c0fc665c8
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/231684
Reviewed-by: Tommi <tommi@webrtc.org>
Reviewed-by: Erik Språng <sprang@webrtc.org>
Commit-Queue: Tommi <tommi@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#35156}
This commit is contained in:
Jan Grulich 2021-10-06 12:45:10 +02:00 committed by WebRTC LUCI CQ
parent 6d19d14c26
commit 3695640504
3 changed files with 31 additions and 57 deletions

View file

@ -537,8 +537,6 @@ void BaseCapturerPipeWire::HandleBuffer(pw_buffer* buffer) {
return;
}
std::function<void()> cleanup;
const int32_t src_stride = spa_buffer->datas[0].chunk->stride;
if (spa_buffer->datas[0].type == SPA_DATA_MemFd) {
map.initialize(
static_cast<uint8_t*>(
@ -608,7 +606,6 @@ void BaseCapturerPipeWire::HandleBuffer(pw_buffer* buffer) {
video_metadata_use = true;
}
DesktopSize video_size_prev = video_size_;
if (video_metadata_use) {
video_size_ =
DesktopSize(video_metadata_size->width, video_metadata_size->height);
@ -616,54 +613,41 @@ void BaseCapturerPipeWire::HandleBuffer(pw_buffer* buffer) {
video_size_ = desktop_size_;
}
uint32_t y_offset = video_metadata_use && (video_metadata->region.position.y +
video_size_.height() <=
desktop_size_.height())
? video_metadata->region.position.y
: 0;
uint32_t x_offset = video_metadata_use && (video_metadata->region.position.x +
video_size_.width() <=
desktop_size_.width())
? video_metadata->region.position.x
: 0;
webrtc::MutexLock lock(&current_frame_lock_);
if (!current_frame_ || !video_size_.equals(video_size_prev)) {
current_frame_ = std::make_unique<uint8_t[]>(
video_size_.width() * video_size_.height() * kBytesPerPixel);
}
const int32_t dst_stride = video_size_.width() * kBytesPerPixel;
uint8_t* updated_src = src + (spa_buffer->datas[0].chunk->stride * y_offset) +
(kBytesPerPixel * x_offset);
current_frame_ = std::make_unique<BasicDesktopFrame>(
DesktopSize(video_size_.width(), video_size_.height()));
current_frame_->CopyPixelsFrom(
updated_src,
(spa_buffer->datas[0].chunk->stride - (kBytesPerPixel * x_offset)),
DesktopRect::MakeWH(video_size_.width(), video_size_.height()));
if (src_stride != (desktop_size_.width() * kBytesPerPixel)) {
RTC_LOG(LS_ERROR) << "Got buffer with stride different from screen stride: "
<< src_stride
<< " != " << (desktop_size_.width() * kBytesPerPixel);
portal_init_failed_ = true;
return;
}
// Adjust source content based on metadata video position
if (video_metadata_use &&
(video_metadata->region.position.y + video_size_.height() <=
desktop_size_.height())) {
src += src_stride * video_metadata->region.position.y;
}
const int x_offset =
video_metadata_use &&
(video_metadata->region.position.x + video_size_.width() <=
desktop_size_.width())
? video_metadata->region.position.x * kBytesPerPixel
: 0;
uint8_t* dst = current_frame_.get();
for (int i = 0; i < video_size_.height(); ++i) {
// Adjust source content based on crop video position if needed
src += x_offset;
std::memcpy(dst, src, dst_stride);
// If both sides decided to go with the RGBx format we need to convert it to
// BGRx to match color format expected by WebRTC.
if (spa_video_format_.format == SPA_VIDEO_FORMAT_RGBx ||
spa_video_format_.format == SPA_VIDEO_FORMAT_RGBA) {
ConvertRGBxToBGRx(dst, dst_stride);
if (spa_video_format_.format == SPA_VIDEO_FORMAT_RGBx ||
spa_video_format_.format == SPA_VIDEO_FORMAT_RGBA) {
uint8_t* tmp_src = current_frame_->data();
for (int i = 0; i < video_size_.height(); ++i) {
// If both sides decided to go with the RGBx format we need to convert it
// to BGRx to match color format expected by WebRTC.
ConvertRGBxToBGRx(tmp_src, current_frame_->stride());
tmp_src += current_frame_->stride();
}
src += src_stride - x_offset;
dst += dst_stride;
}
}
void BaseCapturerPipeWire::ConvertRGBxToBGRx(uint8_t* frame, uint32_t size) {
// Change color format for KDE KWin which uses RGBx and not BGRx
for (uint32_t i = 0; i < size; i += 4) {
uint8_t tempR = frame[i];
uint8_t tempB = frame[i + 2];
@ -1103,18 +1087,7 @@ void BaseCapturerPipeWire::CaptureFrame() {
}
webrtc::MutexLock lock(&current_frame_lock_);
if (!current_frame_) {
callback_->OnCaptureResult(Result::ERROR_TEMPORARY, nullptr);
return;
}
DesktopSize frame_size = video_size_;
std::unique_ptr<DesktopFrame> result(new BasicDesktopFrame(frame_size));
result->CopyPixelsFrom(
current_frame_.get(), (frame_size.width() * kBytesPerPixel),
DesktopRect::MakeWH(frame_size.width(), frame_size.height()));
if (!result) {
if (!current_frame_ || !current_frame_->data()) {
callback_->OnCaptureResult(Result::ERROR_TEMPORARY, nullptr);
return;
}
@ -1122,7 +1095,7 @@ void BaseCapturerPipeWire::CaptureFrame() {
// TODO(julien.isorce): http://crbug.com/945468. Set the icc profile on the
// frame, see ScreenCapturerX11::CaptureFrame.
callback_->OnCaptureResult(Result::SUCCESS, std::move(result));
callback_->OnCaptureResult(Result::SUCCESS, std::move(current_frame_));
}
bool BaseCapturerPipeWire::GetSourceList(SourceList* sources) {

View file

@ -95,7 +95,7 @@ class BaseCapturerPipeWire : public DesktopCapturer {
DesktopCaptureOptions options_ = {};
webrtc::Mutex current_frame_lock_;
std::unique_ptr<uint8_t[]> current_frame_;
std::unique_ptr<BasicDesktopFrame> current_frame_;
Callback* callback_ = nullptr;
bool portal_init_failed_ = false;

View file

@ -20,6 +20,7 @@
#include <xf86drm.h>
#include "absl/memory/memory.h"
#include "absl/types/optional.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "rtc_base/sanitizer.h"