PipeWire capturer: split xdg-desktop-portal and PipeWire implementations

Make PipeWire stream shared through DesktopCaptureOptions (similar to
X11 implementation sharing XDisplay) so we can implement better cursor
support with our own MouseCursorMonitor implementation.

Bug: webrtc:13429
Change-Id: I781482aa29cee0c105c42e5109f28e95dde9881b
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/238174
Reviewed-by: Mark Foltz <mfoltz@chromium.org>
Commit-Queue: Mark Foltz <mfoltz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#35765}
This commit is contained in:
Jan Grulich 2022-01-21 08:58:04 +01:00 committed by WebRTC LUCI CQ
parent b39fce858f
commit c0a4316825
11 changed files with 1601 additions and 1340 deletions

View File

@ -565,10 +565,13 @@ rtc_library("desktop_capture_generic") {
"linux/wayland/base_capturer_pipewire.h",
"linux/wayland/egl_dmabuf.cc",
"linux/wayland/egl_dmabuf.h",
"linux/wayland/screencast_portal.cc",
"linux/wayland/screencast_portal.h",
"linux/wayland/shared_screencast_stream.cc",
"linux/wayland/shared_screencast_stream.h",
]
configs += [
":pipewire_config",
":gio",
":pipewire",
":gbm",
@ -581,6 +584,8 @@ rtc_library("desktop_capture_generic") {
deps += [ ":pipewire_stubs" ]
}
public_configs += [ ":pipewire_config" ]
deps += [ "../../rtc_base:sanitizer" ]
}

View File

@ -14,6 +14,9 @@
#elif defined(WEBRTC_WIN)
#include "modules/desktop_capture/win/full_screen_win_application_handler.h"
#endif
#if defined(WEBRTC_USE_PIPEWIRE)
#include "modules/desktop_capture/linux/wayland/shared_screencast_stream.h"
#endif
#include "rtc_base/ref_counted_object.h"
@ -37,6 +40,9 @@ DesktopCaptureOptions DesktopCaptureOptions::CreateDefault() {
#if defined(WEBRTC_USE_X11)
result.set_x_display(SharedXDisplay::CreateDefault());
#endif
#if defined(WEBRTC_USE_PIPEWIRE)
result.set_screencast_stream(SharedScreenCastStream::CreateDefault());
#endif
#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
result.set_configuration_monitor(
rtc::make_ref_counted<DesktopConfigurationMonitor>());

View File

@ -17,6 +17,10 @@
#include "modules/desktop_capture/linux/x11/shared_x_display.h"
#endif
#if defined(WEBRTC_USE_PIPEWIRE)
#include "modules/desktop_capture/linux/wayland/shared_screencast_stream.h"
#endif
#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
#include "modules/desktop_capture/mac/desktop_configuration_monitor.h"
#endif
@ -165,13 +169,26 @@ class RTC_EXPORT DesktopCaptureOptions {
#if defined(WEBRTC_USE_PIPEWIRE)
bool allow_pipewire() const { return allow_pipewire_; }
void set_allow_pipewire(bool allow) { allow_pipewire_ = allow; }
const rtc::scoped_refptr<SharedScreenCastStream>& screencast_stream() const {
return screencast_stream_;
}
void set_screencast_stream(
rtc::scoped_refptr<SharedScreenCastStream> stream) {
screencast_stream_ = stream;
}
#endif
private:
#if defined(WEBRTC_USE_X11)
rtc::scoped_refptr<SharedXDisplay> x_display_;
#endif
#if defined(WEBRTC_USE_PIPEWIRE)
// An instance of shared PipeWire ScreenCast stream we share between
// BaseCapturerPipeWire and MouseCursorMonitorPipeWire as cursor information
// is sent together with screen content.
rtc::scoped_refptr<SharedScreenCastStream> screencast_stream_;
#endif
#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
rtc::scoped_refptr<DesktopConfigurationMonitor> configuration_monitor_;
bool allow_iosurface_ = false;

File diff suppressed because it is too large Load Diff

View File

@ -10,199 +10,38 @@
#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_BASE_CAPTURER_PIPEWIRE_H_
#define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_BASE_CAPTURER_PIPEWIRE_H_
#include <gio/gio.h>
#define typeof __typeof__
#include <pipewire/pipewire.h>
#include <spa/param/video/format-utils.h>
#include <spa/utils/result.h>
#include <memory>
#include "absl/types/optional.h"
#include "modules/desktop_capture/desktop_capture_options.h"
#include "modules/desktop_capture/desktop_capturer.h"
#include "modules/desktop_capture/linux/wayland/egl_dmabuf.h"
#include "modules/desktop_capture/linux/wayland/screencast_portal.h"
#include "modules/desktop_capture/linux/wayland/shared_screencast_stream.h"
#include "rtc_base/constructor_magic.h"
#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
class BaseCapturerPipeWire : public DesktopCapturer {
class BaseCapturerPipeWire : public DesktopCapturer,
public ScreenCastPortal::PortalNotifier {
public:
// Values are set based on source type property in
// xdg-desktop-portal/screencast
// https://github.com/flatpak/xdg-desktop-portal/blob/master/data/org.freedesktop.portal.ScreenCast.xml
enum class CaptureSourceType : uint32_t {
kScreen = 0b01,
kWindow = 0b10,
kAny = 0b11
};
enum class CursorMode : uint32_t {
kHidden = 0b01,
kEmbedded = 0b10,
kMetadata = 0b100
};
struct PipeWireVersion {
int major = 0;
int minor = 0;
int micro = 0;
};
explicit BaseCapturerPipeWire(CaptureSourceType source_type);
BaseCapturerPipeWire(const DesktopCaptureOptions& options);
~BaseCapturerPipeWire() override;
static std::unique_ptr<DesktopCapturer> CreateRawCapturer(
const DesktopCaptureOptions& options);
// DesktopCapturer interface.
void Start(Callback* delegate) override;
void CaptureFrame() override;
bool GetSourceList(SourceList* sources) override;
bool SelectSource(SourceId id) override;
// ScreenCastPortal::PortalNotifier interface.
void OnScreenCastRequestResult(ScreenCastPortal::RequestResponse result,
uint32_t stream_node_id,
int fd) override;
void OnScreenCastSessionClosed() override;
private:
// PipeWire types -->
struct pw_context* pw_context_ = nullptr;
struct pw_core* pw_core_ = nullptr;
struct pw_stream* pw_stream_ = nullptr;
struct pw_thread_loop* pw_main_loop_ = nullptr;
spa_hook spa_core_listener_;
spa_hook spa_stream_listener_;
// A number used to verify all previous methods and the resulting
// events have been handled.
int server_version_sync_ = 0;
// Version of the running PipeWire server we communicate with
PipeWireVersion pw_server_version_;
// Version of the library used to run our code
PipeWireVersion pw_client_version_;
// event handlers
pw_core_events pw_core_events_ = {};
pw_stream_events pw_stream_events_ = {};
struct spa_video_info_raw spa_video_format_;
guint32 pw_stream_node_id_ = 0;
gint32 pw_fd_ = -1;
CaptureSourceType capture_source_type_ =
BaseCapturerPipeWire::CaptureSourceType::kScreen;
// <-- end of PipeWire types
GDBusConnection* connection_ = nullptr;
GDBusProxy* proxy_ = nullptr;
GCancellable* cancellable_ = nullptr;
gchar* portal_handle_ = nullptr;
gchar* session_handle_ = nullptr;
gchar* sources_handle_ = nullptr;
gchar* start_handle_ = nullptr;
guint session_request_signal_id_ = 0;
guint sources_request_signal_id_ = 0;
guint start_request_signal_id_ = 0;
guint session_closed_signal_id_ = 0;
int64_t modifier_;
DesktopSize video_size_;
DesktopSize desktop_size_ = {};
DesktopCaptureOptions options_ = {};
webrtc::Mutex current_frame_lock_;
std::unique_ptr<BasicDesktopFrame> current_frame_;
Callback* callback_ = nullptr;
bool portal_init_failed_ = false;
std::unique_ptr<EglDmaBuf> egl_dmabuf_;
void Init();
void InitPortal();
void InitPipeWireTypes();
pw_stream* CreateReceivingStream();
void HandleBuffer(pw_buffer* buffer);
void ConvertRGBxToBGRx(uint8_t* frame, uint32_t size);
static void OnCoreError(void* data,
uint32_t id,
int seq,
int res,
const char* message);
static void OnCoreDone(void* user_data, uint32_t id, int seq);
static void OnCoreInfo(void* user_data, const pw_core_info* info);
static void OnStreamParamChanged(void* data,
uint32_t id,
const struct spa_pod* format);
static void OnStreamStateChanged(void* data,
pw_stream_state old_state,
pw_stream_state state,
const char* error_message);
static void OnStreamProcess(void* data);
static void OnNewBuffer(void* data, uint32_t id);
guint SetupRequestResponseSignal(const gchar* object_path,
GDBusSignalCallback callback);
static void OnProxyRequested(GObject* object,
GAsyncResult* result,
gpointer user_data);
static gchar* PrepareSignalHandle(GDBusConnection* connection,
const gchar* token);
void SessionRequest();
static void OnSessionRequested(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data);
static void OnSessionRequestResponseSignal(GDBusConnection* connection,
const gchar* sender_name,
const gchar* object_path,
const gchar* interface_name,
const gchar* signal_name,
GVariant* parameters,
gpointer user_data);
static void OnSessionClosedSignal(GDBusConnection* connection,
const gchar* sender_name,
const gchar* object_path,
const gchar* interface_name,
const gchar* signal_name,
GVariant* parameters,
gpointer user_data);
void SourcesRequest();
static void OnSourcesRequested(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data);
static void OnSourcesRequestResponseSignal(GDBusConnection* connection,
const gchar* sender_name,
const gchar* object_path,
const gchar* interface_name,
const gchar* signal_name,
GVariant* parameters,
gpointer user_data);
void StartRequest();
static void OnStartRequested(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data);
static void OnStartRequestResponseSignal(GDBusConnection* connection,
const gchar* sender_name,
const gchar* object_path,
const gchar* interface_name,
const gchar* signal_name,
GVariant* parameters,
gpointer user_data);
void OpenPipeWireRemote();
static void OnOpenPipeWireRemoteRequested(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data);
bool capturer_failed_ = false;
std::unique_ptr<ScreenCastPortal> screencast_portal_;
RTC_DISALLOW_COPY_AND_ASSIGN(BaseCapturerPipeWire);
};

View File

@ -0,0 +1,600 @@
/*
* Copyright 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/desktop_capture/linux/wayland/screencast_portal.h"
#include <gio/gunixfdlist.h>
#include <glib-object.h>
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
namespace webrtc {
const char kDesktopBusName[] = "org.freedesktop.portal.Desktop";
const char kDesktopObjectPath[] = "/org/freedesktop/portal/desktop";
const char kDesktopRequestObjectPath[] =
"/org/freedesktop/portal/desktop/request";
const char kSessionInterfaceName[] = "org.freedesktop.portal.Session";
const char kRequestInterfaceName[] = "org.freedesktop.portal.Request";
const char kScreenCastInterfaceName[] = "org.freedesktop.portal.ScreenCast";
template <class T>
class Scoped {
public:
Scoped() {}
explicit Scoped(T* val) { ptr_ = val; }
~Scoped() { RTC_DCHECK_NOTREACHED(); }
T* operator->() { return ptr_; }
bool operator!() { return ptr_ == nullptr; }
T* get() { return ptr_; }
T** receive() {
RTC_CHECK(!ptr_);
return &ptr_;
}
Scoped& operator=(T* val) {
ptr_ = val;
return *this;
}
protected:
T* ptr_ = nullptr;
};
template <>
Scoped<GError>::~Scoped() {
if (ptr_) {
g_error_free(ptr_);
}
}
template <>
Scoped<char>::~Scoped() {
if (ptr_) {
g_free(ptr_);
}
}
template <>
Scoped<GVariant>::~Scoped() {
if (ptr_) {
g_variant_unref(ptr_);
}
}
template <>
Scoped<GVariantIter>::~Scoped() {
if (ptr_) {
g_variant_iter_free(ptr_);
}
}
template <>
Scoped<GDBusMessage>::~Scoped() {
if (ptr_) {
g_object_unref(ptr_);
}
}
template <>
Scoped<GUnixFDList>::~Scoped() {
if (ptr_) {
g_object_unref(ptr_);
}
}
ScreenCastPortal::ScreenCastPortal(CaptureSourceType source_type,
PortalNotifier* notifier)
: notifier_(notifier), capture_source_type_(source_type) {}
ScreenCastPortal::~ScreenCastPortal() {
if (start_request_signal_id_) {
g_dbus_connection_signal_unsubscribe(connection_, start_request_signal_id_);
}
if (sources_request_signal_id_) {
g_dbus_connection_signal_unsubscribe(connection_,
sources_request_signal_id_);
}
if (session_request_signal_id_) {
g_dbus_connection_signal_unsubscribe(connection_,
session_request_signal_id_);
}
if (!session_handle_.empty()) {
Scoped<GDBusMessage> message(
g_dbus_message_new_method_call(kDesktopBusName, session_handle_.c_str(),
kSessionInterfaceName, "Close"));
if (message.get()) {
Scoped<GError> error;
g_dbus_connection_send_message(connection_, message.get(),
G_DBUS_SEND_MESSAGE_FLAGS_NONE,
/*out_serial=*/nullptr, error.receive());
if (error.get()) {
RTC_LOG(LS_ERROR) << "Failed to close the session: " << error->message;
}
}
}
if (cancellable_) {
g_cancellable_cancel(cancellable_);
g_object_unref(cancellable_);
cancellable_ = nullptr;
}
if (proxy_) {
g_object_unref(proxy_);
proxy_ = nullptr;
}
if (pw_fd_ != -1) {
close(pw_fd_);
}
}
void ScreenCastPortal::Start() {
cancellable_ = g_cancellable_new();
g_dbus_proxy_new_for_bus(
G_BUS_TYPE_SESSION, G_DBUS_PROXY_FLAGS_NONE, /*info=*/nullptr,
kDesktopBusName, kDesktopObjectPath, kScreenCastInterfaceName,
cancellable_, reinterpret_cast<GAsyncReadyCallback>(OnProxyRequested),
this);
}
void ScreenCastPortal::PortalFailed(RequestResponse result) {
notifier_->OnScreenCastRequestResult(result, pw_stream_node_id_, pw_fd_);
}
uint32_t ScreenCastPortal::SetupRequestResponseSignal(
const char* object_path,
GDBusSignalCallback callback) {
return g_dbus_connection_signal_subscribe(
connection_, kDesktopBusName, kRequestInterfaceName, "Response",
object_path, /*arg0=*/nullptr, G_DBUS_SIGNAL_FLAGS_NO_MATCH_RULE,
callback, this, /*user_data_free_func=*/nullptr);
}
// static
void ScreenCastPortal::OnProxyRequested(GObject* /*object*/,
GAsyncResult* result,
gpointer user_data) {
ScreenCastPortal* that = static_cast<ScreenCastPortal*>(user_data);
RTC_DCHECK(that);
Scoped<GError> error;
GDBusProxy* proxy = g_dbus_proxy_new_finish(result, error.receive());
if (!proxy) {
if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED))
return;
RTC_LOG(LS_ERROR) << "Failed to create a proxy for the screen cast portal: "
<< error->message;
that->PortalFailed(RequestResponse::kError);
return;
}
that->proxy_ = proxy;
that->connection_ = g_dbus_proxy_get_connection(that->proxy_);
RTC_LOG(LS_INFO) << "Created proxy for the screen cast portal.";
that->SessionRequest();
}
// static
std::string ScreenCastPortal::PrepareSignalHandle(GDBusConnection* connection,
const char* token) {
Scoped<char> sender(
g_strdup(g_dbus_connection_get_unique_name(connection) + 1));
for (int i = 0; sender.get()[i]; ++i) {
if (sender.get()[i] == '.') {
sender.get()[i] = '_';
}
}
const char* handle = g_strconcat(kDesktopRequestObjectPath, "/", sender.get(),
"/", token, /*end of varargs*/ nullptr);
return handle;
}
void ScreenCastPortal::SessionRequest() {
GVariantBuilder builder;
Scoped<char> variant_string;
g_variant_builder_init(&builder, G_VARIANT_TYPE_VARDICT);
variant_string =
g_strdup_printf("webrtc_session%d", g_random_int_range(0, G_MAXINT));
g_variant_builder_add(&builder, "{sv}", "session_handle_token",
g_variant_new_string(variant_string.get()));
variant_string = g_strdup_printf("webrtc%d", g_random_int_range(0, G_MAXINT));
g_variant_builder_add(&builder, "{sv}", "handle_token",
g_variant_new_string(variant_string.get()));
portal_handle_ = PrepareSignalHandle(connection_, variant_string.get());
session_request_signal_id_ = SetupRequestResponseSignal(
portal_handle_.c_str(), OnSessionRequestResponseSignal);
RTC_LOG(LS_INFO) << "Screen cast session requested.";
g_dbus_proxy_call(proxy_, "CreateSession", g_variant_new("(a{sv})", &builder),
G_DBUS_CALL_FLAGS_NONE, /*timeout=*/-1, cancellable_,
reinterpret_cast<GAsyncReadyCallback>(OnSessionRequested),
this);
}
// static
void ScreenCastPortal::OnSessionRequested(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data) {
ScreenCastPortal* that = static_cast<ScreenCastPortal*>(user_data);
RTC_DCHECK(that);
Scoped<GError> error;
Scoped<GVariant> variant(
g_dbus_proxy_call_finish(proxy, result, error.receive()));
if (!variant) {
if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED))
return;
RTC_LOG(LS_ERROR) << "Failed to create a screen cast session: "
<< error->message;
that->PortalFailed(RequestResponse::kError);
return;
}
RTC_LOG(LS_INFO) << "Initializing the screen cast session.";
Scoped<char> handle;
g_variant_get_child(variant.get(), 0, "o", &handle);
if (!handle) {
RTC_LOG(LS_ERROR) << "Failed to initialize the screen cast session.";
if (that->session_request_signal_id_) {
g_dbus_connection_signal_unsubscribe(that->connection_,
that->session_request_signal_id_);
that->session_request_signal_id_ = 0;
}
that->PortalFailed(RequestResponse::kError);
return;
}
RTC_LOG(LS_INFO) << "Subscribing to the screen cast session.";
}
// static
void ScreenCastPortal::OnSessionRequestResponseSignal(
GDBusConnection* connection,
const char* sender_name,
const char* object_path,
const char* interface_name,
const char* signal_name,
GVariant* parameters,
gpointer user_data) {
ScreenCastPortal* that = static_cast<ScreenCastPortal*>(user_data);
RTC_DCHECK(that);
RTC_LOG(LS_INFO)
<< "Received response for the screen cast session subscription.";
uint32_t portal_response;
Scoped<GVariant> response_data;
g_variant_get(parameters, "(u@a{sv})", &portal_response,
response_data.receive());
Scoped<GVariant> session_handle(
g_variant_lookup_value(response_data.get(), "session_handle", nullptr));
that->session_handle_ = g_variant_dup_string(session_handle.get(), nullptr);
if (that->session_handle_.empty() || portal_response) {
RTC_LOG(LS_ERROR)
<< "Failed to request the screen cast session subscription.";
that->PortalFailed(RequestResponse::kError);
return;
}
that->session_closed_signal_id_ = g_dbus_connection_signal_subscribe(
that->connection_, kDesktopBusName, kSessionInterfaceName, "Closed",
that->session_handle_.c_str(), /*arg0=*/nullptr, G_DBUS_SIGNAL_FLAGS_NONE,
OnSessionClosedSignal, that, /*user_data_free_func=*/nullptr);
that->SourcesRequest();
}
// static
void ScreenCastPortal::OnSessionClosedSignal(GDBusConnection* connection,
const char* sender_name,
const char* object_path,
const char* interface_name,
const char* signal_name,
GVariant* parameters,
gpointer user_data) {
ScreenCastPortal* that = static_cast<ScreenCastPortal*>(user_data);
RTC_DCHECK(that);
RTC_LOG(LS_INFO) << "Received closed signal from session.";
that->notifier_->OnScreenCastSessionClosed();
// Unsubscribe from the signal and free the session handle to avoid calling
// Session::Close from the destructor since it's already closed
g_dbus_connection_signal_unsubscribe(that->connection_,
that->session_closed_signal_id_);
}
void ScreenCastPortal::SourcesRequest() {
GVariantBuilder builder;
Scoped<char> variant_string;
g_variant_builder_init(&builder, G_VARIANT_TYPE_VARDICT);
// We want to record monitor content.
g_variant_builder_add(
&builder, "{sv}", "types",
g_variant_new_uint32(static_cast<uint32_t>(capture_source_type_)));
// We don't want to allow selection of multiple sources.
g_variant_builder_add(&builder, "{sv}", "multiple",
g_variant_new_boolean(false));
Scoped<GVariant> variant(
g_dbus_proxy_get_cached_property(proxy_, "AvailableCursorModes"));
if (variant.get()) {
uint32_t modes = 0;
g_variant_get(variant.get(), "u", &modes);
// Make request only if this mode is advertised by the portal
// implementation.
if (modes & static_cast<uint32_t>(cursor_mode_)) {
g_variant_builder_add(
&builder, "{sv}", "cursor_mode",
g_variant_new_uint32(static_cast<uint32_t>(cursor_mode_)));
}
}
variant_string = g_strdup_printf("webrtc%d", g_random_int_range(0, G_MAXINT));
g_variant_builder_add(&builder, "{sv}", "handle_token",
g_variant_new_string(variant_string.get()));
sources_handle_ = PrepareSignalHandle(connection_, variant_string.get());
sources_request_signal_id_ = SetupRequestResponseSignal(
sources_handle_.c_str(), OnSourcesRequestResponseSignal);
RTC_LOG(LS_INFO) << "Requesting sources from the screen cast session.";
g_dbus_proxy_call(
proxy_, "SelectSources",
g_variant_new("(oa{sv})", session_handle_.c_str(), &builder),
G_DBUS_CALL_FLAGS_NONE, /*timeout=*/-1, cancellable_,
reinterpret_cast<GAsyncReadyCallback>(OnSourcesRequested), this);
}
// static
void ScreenCastPortal::OnSourcesRequested(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data) {
ScreenCastPortal* that = static_cast<ScreenCastPortal*>(user_data);
RTC_DCHECK(that);
Scoped<GError> error;
Scoped<GVariant> variant(
g_dbus_proxy_call_finish(proxy, result, error.receive()));
if (!variant) {
if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED))
return;
RTC_LOG(LS_ERROR) << "Failed to request the sources: " << error->message;
that->PortalFailed(RequestResponse::kError);
return;
}
RTC_LOG(LS_INFO) << "Sources requested from the screen cast session.";
Scoped<char> handle;
g_variant_get_child(variant.get(), 0, "o", handle.receive());
if (!handle) {
RTC_LOG(LS_ERROR) << "Failed to initialize the screen cast session.";
if (that->sources_request_signal_id_) {
g_dbus_connection_signal_unsubscribe(that->connection_,
that->sources_request_signal_id_);
that->sources_request_signal_id_ = 0;
}
that->PortalFailed(RequestResponse::kError);
return;
}
RTC_LOG(LS_INFO) << "Subscribed to sources signal.";
}
// static
void ScreenCastPortal::OnSourcesRequestResponseSignal(
GDBusConnection* connection,
const char* sender_name,
const char* object_path,
const char* interface_name,
const char* signal_name,
GVariant* parameters,
gpointer user_data) {
ScreenCastPortal* that = static_cast<ScreenCastPortal*>(user_data);
RTC_DCHECK(that);
RTC_LOG(LS_INFO) << "Received sources signal from session.";
uint32_t portal_response;
g_variant_get(parameters, "(u@a{sv})", &portal_response, nullptr);
if (portal_response) {
RTC_LOG(LS_ERROR)
<< "Failed to select sources for the screen cast session.";
that->PortalFailed(RequestResponse::kError);
return;
}
that->StartRequest();
}
void ScreenCastPortal::StartRequest() {
GVariantBuilder builder;
Scoped<char> variant_string;
g_variant_builder_init(&builder, G_VARIANT_TYPE_VARDICT);
variant_string = g_strdup_printf("webrtc%d", g_random_int_range(0, G_MAXINT));
g_variant_builder_add(&builder, "{sv}", "handle_token",
g_variant_new_string(variant_string.get()));
start_handle_ = PrepareSignalHandle(connection_, variant_string.get());
start_request_signal_id_ = SetupRequestResponseSignal(
start_handle_.c_str(), OnStartRequestResponseSignal);
// "Identifier for the application window", this is Wayland, so not "x11:...".
const char parent_window[] = "";
RTC_LOG(LS_INFO) << "Starting the screen cast session.";
g_dbus_proxy_call(proxy_, "Start",
g_variant_new("(osa{sv})", session_handle_.c_str(),
parent_window, &builder),
G_DBUS_CALL_FLAGS_NONE, /*timeout=*/-1, cancellable_,
reinterpret_cast<GAsyncReadyCallback>(OnStartRequested),
this);
}
// static
void ScreenCastPortal::OnStartRequested(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data) {
ScreenCastPortal* that = static_cast<ScreenCastPortal*>(user_data);
RTC_DCHECK(that);
Scoped<GError> error;
Scoped<GVariant> variant(
g_dbus_proxy_call_finish(proxy, result, error.receive()));
if (!variant) {
if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED))
return;
RTC_LOG(LS_ERROR) << "Failed to start the screen cast session: "
<< error->message;
that->PortalFailed(RequestResponse::kError);
return;
}
RTC_LOG(LS_INFO) << "Initializing the start of the screen cast session.";
Scoped<char> handle;
g_variant_get_child(variant.get(), 0, "o", handle.receive());
if (!handle) {
RTC_LOG(LS_ERROR)
<< "Failed to initialize the start of the screen cast session.";
if (that->start_request_signal_id_) {
g_dbus_connection_signal_unsubscribe(that->connection_,
that->start_request_signal_id_);
that->start_request_signal_id_ = 0;
}
that->PortalFailed(RequestResponse::kError);
return;
}
RTC_LOG(LS_INFO) << "Subscribed to the start signal.";
}
// static
void ScreenCastPortal::OnStartRequestResponseSignal(GDBusConnection* connection,
const char* sender_name,
const char* object_path,
const char* interface_name,
const char* signal_name,
GVariant* parameters,
gpointer user_data) {
ScreenCastPortal* that = static_cast<ScreenCastPortal*>(user_data);
RTC_DCHECK(that);
RTC_LOG(LS_INFO) << "Start signal received.";
uint32_t portal_response;
Scoped<GVariant> response_data;
Scoped<GVariantIter> iter;
g_variant_get(parameters, "(u@a{sv})", &portal_response,
response_data.receive());
if (portal_response || !response_data) {
RTC_LOG(LS_ERROR) << "Failed to start the screen cast session.";
that->PortalFailed(static_cast<RequestResponse>(portal_response));
return;
}
// Array of PipeWire streams. See
// https://github.com/flatpak/xdg-desktop-portal/blob/master/data/org.freedesktop.portal.ScreenCast.xml
// documentation for <method name="Start">.
if (g_variant_lookup(response_data.get(), "streams", "a(ua{sv})",
iter.receive())) {
Scoped<GVariant> variant;
while (g_variant_iter_next(iter.get(), "@(ua{sv})", variant.receive())) {
uint32_t stream_id;
uint32_t type;
Scoped<GVariant> options;
g_variant_get(variant.get(), "(u@a{sv})", &stream_id, options.receive());
RTC_DCHECK(options.get());
if (g_variant_lookup(options.get(), "source_type", "u", &type)) {
that->capture_source_type_ =
static_cast<ScreenCastPortal::CaptureSourceType>(type);
}
that->pw_stream_node_id_ = stream_id;
break;
}
}
that->OpenPipeWireRemote();
}
void ScreenCastPortal::OpenPipeWireRemote() {
GVariantBuilder builder;
g_variant_builder_init(&builder, G_VARIANT_TYPE_VARDICT);
RTC_LOG(LS_INFO) << "Opening the PipeWire remote.";
g_dbus_proxy_call_with_unix_fd_list(
proxy_, "OpenPipeWireRemote",
g_variant_new("(oa{sv})", session_handle_.c_str(), &builder),
G_DBUS_CALL_FLAGS_NONE, /*timeout=*/-1, /*fd_list=*/nullptr, cancellable_,
reinterpret_cast<GAsyncReadyCallback>(OnOpenPipeWireRemoteRequested),
this);
}
// static
void ScreenCastPortal::OnOpenPipeWireRemoteRequested(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data) {
ScreenCastPortal* that = static_cast<ScreenCastPortal*>(user_data);
RTC_DCHECK(that);
Scoped<GError> error;
Scoped<GUnixFDList> outlist;
Scoped<GVariant> variant(g_dbus_proxy_call_with_unix_fd_list_finish(
proxy, outlist.receive(), result, error.receive()));
if (!variant) {
if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED))
return;
RTC_LOG(LS_ERROR) << "Failed to open the PipeWire remote: "
<< error->message;
that->PortalFailed(RequestResponse::kError);
return;
}
int32_t index;
g_variant_get(variant.get(), "(h)", &index);
that->pw_fd_ = g_unix_fd_list_get(outlist.get(), index, error.receive());
if (that->pw_fd_ == -1) {
RTC_LOG(LS_ERROR) << "Failed to get file descriptor from the list: "
<< error->message;
that->PortalFailed(RequestResponse::kError);
return;
}
that->notifier_->OnScreenCastRequestResult(
ScreenCastPortal::RequestResponse::kSuccess, that->pw_stream_node_id_,
that->pw_fd_);
}
} // namespace webrtc

View File

@ -0,0 +1,170 @@
/*
* Copyright 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCREENCAST_PORTAL_H_
#define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCREENCAST_PORTAL_H_
#include <gio/gio.h>
#define typeof __typeof__
#include <string>
#include "absl/types/optional.h"
namespace webrtc {
class ScreenCastPortal {
public:
// Values are set based on source type property in
// xdg-desktop-portal/screencast
// https://github.com/flatpak/xdg-desktop-portal/blob/master/data/org.freedesktop.portal.ScreenCast.xml
enum class CaptureSourceType : uint32_t {
kScreen = 0b01,
kWindow = 0b10,
kAnyScreenContent = kScreen | kWindow
};
// Values are set based on cursor mode property in
// xdg-desktop-portal/screencast
// https://github.com/flatpak/xdg-desktop-portal/blob/master/data/org.freedesktop.portal.ScreenCast.xml
enum class CursorMode : uint32_t {
// Mouse cursor will not be included in any form
kHidden = 0b01,
// Mouse cursor will be part of the screen content
kEmbedded = 0b10,
// Mouse cursor information will be send separately in form of metadata
kMetadata = 0b100
};
// Interface that must be implemented by the ScreenCastPortal consumers.
enum class RequestResponse {
// Success, the request is carried out.
kSuccess,
// The user cancelled the interaction.
kUserCancelled,
// The user interaction was ended in some other way.
kError,
kMaxValue = kError
};
class PortalNotifier {
public:
virtual void OnScreenCastRequestResult(RequestResponse result,
uint32_t stream_node_id,
int fd) = 0;
virtual void OnScreenCastSessionClosed() = 0;
protected:
PortalNotifier() = default;
virtual ~PortalNotifier() = default;
};
explicit ScreenCastPortal(CaptureSourceType source_type,
PortalNotifier* notifier);
~ScreenCastPortal();
// Initialize ScreenCastPortal with series of DBus calls where we try to
// obtain all the required information, like PipeWire file descriptor and
// PipeWire stream node ID.
//
// The observer will return whether the communication with xdg-desktop-portal
// was successful and only then you will be able to get all the required
// information in order to continue working with PipeWire.
void Start();
private:
PortalNotifier* notifier_;
// A PipeWire stream ID of stream we will be connecting to
uint32_t pw_stream_node_id_ = 0;
// A file descriptor of PipeWire socket
int pw_fd_ = -1;
CaptureSourceType capture_source_type_ =
ScreenCastPortal::CaptureSourceType::kScreen;
CursorMode cursor_mode_ = ScreenCastPortal::CursorMode::kEmbedded;
GDBusConnection* connection_ = nullptr;
GDBusProxy* proxy_ = nullptr;
GCancellable* cancellable_ = nullptr;
std::string portal_handle_;
std::string session_handle_;
std::string sources_handle_;
std::string start_handle_;
guint session_request_signal_id_ = 0;
guint sources_request_signal_id_ = 0;
guint start_request_signal_id_ = 0;
guint session_closed_signal_id_ = 0;
void PortalFailed(RequestResponse result);
uint32_t SetupRequestResponseSignal(const char* object_path,
GDBusSignalCallback callback);
static void OnProxyRequested(GObject* object,
GAsyncResult* result,
gpointer user_data);
static std::string PrepareSignalHandle(GDBusConnection* connection,
const char* token);
void SessionRequest();
static void OnSessionRequested(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data);
static void OnSessionRequestResponseSignal(GDBusConnection* connection,
const char* sender_name,
const char* object_path,
const char* interface_name,
const char* signal_name,
GVariant* parameters,
gpointer user_data);
static void OnSessionClosedSignal(GDBusConnection* connection,
const char* sender_name,
const char* object_path,
const char* interface_name,
const char* signal_name,
GVariant* parameters,
gpointer user_data);
void SourcesRequest();
static void OnSourcesRequested(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data);
static void OnSourcesRequestResponseSignal(GDBusConnection* connection,
const char* sender_name,
const char* object_path,
const char* interface_name,
const char* signal_name,
GVariant* parameters,
gpointer user_data);
void StartRequest();
static void OnStartRequested(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data);
static void OnStartRequestResponseSignal(GDBusConnection* connection,
const char* sender_name,
const char* object_path,
const char* interface_name,
const char* signal_name,
GVariant* parameters,
gpointer user_data);
void OpenPipeWireRemote();
static void OnOpenPipeWireRemoteRequested(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data);
};
} // namespace webrtc
#endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCREENCAST_PORTAL_H_

View File

@ -0,0 +1,710 @@
/*
* Copyright 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/desktop_capture/linux/wayland/shared_screencast_stream.h"
#include <libdrm/drm_fourcc.h>
#include <pipewire/pipewire.h>
#include <spa/param/format-utils.h>
#include <spa/param/props.h>
#include <spa/param/video/format-utils.h>
#include <spa/utils/result.h>
#include <sys/mman.h>
#include <string>
#include <tuple>
#include <utility>
#include <vector>
#include "modules/desktop_capture/linux/wayland/egl_dmabuf.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "rtc_base/string_encode.h"
#include "rtc_base/string_to_number.h"
#include "rtc_base/synchronization/mutex.h"
#if defined(WEBRTC_DLOPEN_PIPEWIRE)
#include "modules/desktop_capture/linux/wayland/pipewire_stubs.h"
using modules_desktop_capture_linux_wayland::InitializeStubs;
using modules_desktop_capture_linux_wayland::kModuleDrm;
using modules_desktop_capture_linux_wayland::kModulePipewire;
using modules_desktop_capture_linux_wayland::StubPathMap;
#endif // defined(WEBRTC_DLOPEN_PIPEWIRE)
namespace webrtc {
const int kBytesPerPixel = 4;
#if defined(WEBRTC_DLOPEN_PIPEWIRE)
const char kPipeWireLib[] = "libpipewire-0.3.so.0";
const char kDrmLib[] = "libdrm.so.2";
#endif
#if !PW_CHECK_VERSION(0, 3, 29)
#define SPA_POD_PROP_FLAG_MANDATORY (1u << 3)
#endif
#if !PW_CHECK_VERSION(0, 3, 33)
#define SPA_POD_PROP_FLAG_DONT_FIXATE (1u << 4)
#endif
struct PipeWireVersion {
int major = 0;
int minor = 0;
int micro = 0;
};
constexpr PipeWireVersion kDmaBufMinVersion = {0, 3, 24};
constexpr PipeWireVersion kDmaBufModifierMinVersion = {0, 3, 33};
PipeWireVersion ParsePipeWireVersion(const char* version) {
std::vector<std::string> parsed_version;
rtc::split(version, '.', &parsed_version);
if (parsed_version.size() != 3) {
return {};
}
absl::optional<int> major = rtc::StringToNumber<int>(parsed_version.at(0));
absl::optional<int> minor = rtc::StringToNumber<int>(parsed_version.at(1));
absl::optional<int> micro = rtc::StringToNumber<int>(parsed_version.at(2));
// Return invalid version if we failed to parse it
if (!major || !minor || !micro) {
return {0, 0, 0};
}
return {major.value(), micro.value(), micro.value()};
}
spa_pod* BuildFormat(spa_pod_builder* builder,
uint32_t format,
const std::vector<uint64_t>& modifiers) {
bool first = true;
spa_pod_frame frames[2];
spa_rectangle pw_min_screen_bounds = spa_rectangle{1, 1};
spa_rectangle pw_max_screen_bounds = spa_rectangle{UINT32_MAX, UINT32_MAX};
spa_pod_builder_push_object(builder, &frames[0], SPA_TYPE_OBJECT_Format,
SPA_PARAM_EnumFormat);
spa_pod_builder_add(builder, SPA_FORMAT_mediaType,
SPA_POD_Id(SPA_MEDIA_TYPE_video), 0);
spa_pod_builder_add(builder, SPA_FORMAT_mediaSubtype,
SPA_POD_Id(SPA_MEDIA_SUBTYPE_raw), 0);
spa_pod_builder_add(builder, SPA_FORMAT_VIDEO_format, SPA_POD_Id(format), 0);
if (modifiers.size()) {
spa_pod_builder_prop(
builder, SPA_FORMAT_VIDEO_modifier,
SPA_POD_PROP_FLAG_MANDATORY | SPA_POD_PROP_FLAG_DONT_FIXATE);
spa_pod_builder_push_choice(builder, &frames[1], SPA_CHOICE_Enum, 0);
// modifiers from the array
for (int64_t val : modifiers) {
spa_pod_builder_long(builder, val);
// Add the first modifier twice as the very first value is the default
// option
if (first) {
spa_pod_builder_long(builder, val);
first = false;
}
}
spa_pod_builder_pop(builder, &frames[1]);
}
spa_pod_builder_add(
builder, SPA_FORMAT_VIDEO_size,
SPA_POD_CHOICE_RANGE_Rectangle(
&pw_min_screen_bounds, &pw_min_screen_bounds, &pw_max_screen_bounds),
0);
return static_cast<spa_pod*>(spa_pod_builder_pop(builder, &frames[0]));
}
class PipeWireThreadLoopLock {
public:
explicit PipeWireThreadLoopLock(pw_thread_loop* loop) : loop_(loop) {
pw_thread_loop_lock(loop_);
}
~PipeWireThreadLoopLock() { pw_thread_loop_unlock(loop_); }
private:
pw_thread_loop* const loop_;
};
class ScopedBuf {
public:
ScopedBuf() {}
ScopedBuf(uint8_t* map, int map_size, int fd)
: map_(map), map_size_(map_size), fd_(fd) {}
~ScopedBuf() {
if (map_ != MAP_FAILED) {
munmap(map_, map_size_);
}
}
operator bool() { return map_ != MAP_FAILED; }
void initialize(uint8_t* map, int map_size, int fd) {
map_ = map;
map_size_ = map_size;
fd_ = fd;
}
uint8_t* get() { return map_; }
protected:
uint8_t* map_ = static_cast<uint8_t*>(MAP_FAILED);
int map_size_;
int fd_;
};
class SharedScreenCastStreamPrivate {
public:
SharedScreenCastStreamPrivate();
~SharedScreenCastStreamPrivate();
bool StartScreenCastStream(uint32_t stream_node_id, int fd);
void StopScreenCastStream();
std::unique_ptr<BasicDesktopFrame> CaptureFrame();
private:
uint32_t pw_stream_node_id_ = 0;
int pw_fd_ = -1;
DesktopSize desktop_size_ = {};
DesktopSize video_size_;
webrtc::Mutex current_frame_lock_;
std::unique_ptr<BasicDesktopFrame> current_frame_;
int64_t modifier_;
std::unique_ptr<EglDmaBuf> egl_dmabuf_;
// PipeWire types
struct pw_context* pw_context_ = nullptr;
struct pw_core* pw_core_ = nullptr;
struct pw_stream* pw_stream_ = nullptr;
struct pw_thread_loop* pw_main_loop_ = nullptr;
spa_hook spa_core_listener_;
spa_hook spa_stream_listener_;
// A number used to verify all previous methods and the resulting
// events have been handled.
int server_version_sync_ = 0;
// Version of the running PipeWire server we communicate with
PipeWireVersion pw_server_version_;
// Version of the library used to run our code
PipeWireVersion pw_client_version_;
// event handlers
pw_core_events pw_core_events_ = {};
pw_stream_events pw_stream_events_ = {};
struct spa_video_info_raw spa_video_format_;
void ProcessBuffer(pw_buffer* buffer);
void ConvertRGBxToBGRx(uint8_t* frame, uint32_t size);
// PipeWire callbacks
static void OnCoreError(void* data,
uint32_t id,
int seq,
int res,
const char* message);
static void OnCoreDone(void* user_data, uint32_t id, int seq);
static void OnCoreInfo(void* user_data, const pw_core_info* info);
static void OnStreamParamChanged(void* data,
uint32_t id,
const struct spa_pod* format);
static void OnStreamStateChanged(void* data,
pw_stream_state old_state,
pw_stream_state state,
const char* error_message);
static void OnStreamProcess(void* data);
};
bool operator>=(const PipeWireVersion& current_pw_version,
const PipeWireVersion& required_pw_version) {
if (!current_pw_version.major && !current_pw_version.minor &&
!current_pw_version.micro) {
return false;
}
return std::tie(current_pw_version.major, current_pw_version.minor,
current_pw_version.micro) >=
std::tie(required_pw_version.major, required_pw_version.minor,
required_pw_version.micro);
}
bool operator<=(const PipeWireVersion& current_pw_version,
const PipeWireVersion& required_pw_version) {
if (!current_pw_version.major && !current_pw_version.minor &&
!current_pw_version.micro) {
return false;
}
return std::tie(current_pw_version.major, current_pw_version.minor,
current_pw_version.micro) <=
std::tie(required_pw_version.major, required_pw_version.minor,
required_pw_version.micro);
}
void SharedScreenCastStreamPrivate::OnCoreError(void* data,
uint32_t id,
int seq,
int res,
const char* message) {
SharedScreenCastStreamPrivate* that =
static_cast<SharedScreenCastStreamPrivate*>(data);
RTC_DCHECK(that);
RTC_LOG(LS_ERROR) << "PipeWire remote error: " << message;
}
void SharedScreenCastStreamPrivate::OnCoreInfo(void* data,
const pw_core_info* info) {
SharedScreenCastStreamPrivate* stream =
static_cast<SharedScreenCastStreamPrivate*>(data);
RTC_DCHECK(stream);
stream->pw_server_version_ = ParsePipeWireVersion(info->version);
}
void SharedScreenCastStreamPrivate::OnCoreDone(void* data,
uint32_t id,
int seq) {
const SharedScreenCastStreamPrivate* stream =
static_cast<SharedScreenCastStreamPrivate*>(data);
RTC_DCHECK(stream);
if (id == PW_ID_CORE && stream->server_version_sync_ == seq) {
pw_thread_loop_signal(stream->pw_main_loop_, false);
}
}
// static
void SharedScreenCastStreamPrivate::OnStreamStateChanged(
void* data,
pw_stream_state old_state,
pw_stream_state state,
const char* error_message) {
SharedScreenCastStreamPrivate* that =
static_cast<SharedScreenCastStreamPrivate*>(data);
RTC_DCHECK(that);
switch (state) {
case PW_STREAM_STATE_ERROR:
RTC_LOG(LS_ERROR) << "PipeWire stream state error: " << error_message;
break;
case PW_STREAM_STATE_PAUSED:
case PW_STREAM_STATE_STREAMING:
case PW_STREAM_STATE_UNCONNECTED:
case PW_STREAM_STATE_CONNECTING:
break;
}
}
// static
void SharedScreenCastStreamPrivate::OnStreamParamChanged(
void* data,
uint32_t id,
const struct spa_pod* format) {
SharedScreenCastStreamPrivate* that =
static_cast<SharedScreenCastStreamPrivate*>(data);
RTC_DCHECK(that);
RTC_LOG(LS_INFO) << "PipeWire stream format changed.";
if (!format || id != SPA_PARAM_Format) {
return;
}
spa_format_video_raw_parse(format, &that->spa_video_format_);
auto width = that->spa_video_format_.size.width;
auto height = that->spa_video_format_.size.height;
auto stride = SPA_ROUND_UP_N(width * kBytesPerPixel, 4);
auto size = height * stride;
that->desktop_size_ = DesktopSize(width, height);
uint8_t buffer[1024] = {};
auto builder = spa_pod_builder{buffer, sizeof(buffer)};
// Setup buffers and meta header for new format.
// When SPA_FORMAT_VIDEO_modifier is present we can use DMA-BUFs as
// the server announces support for it.
// See https://github.com/PipeWire/pipewire/blob/master/doc/dma-buf.dox
const bool has_modifier =
spa_pod_find_prop(format, nullptr, SPA_FORMAT_VIDEO_modifier);
that->modifier_ =
has_modifier ? that->spa_video_format_.modifier : DRM_FORMAT_MOD_INVALID;
std::vector<const spa_pod*> params;
const int buffer_types =
has_modifier || (that->pw_server_version_ >= kDmaBufMinVersion)
? (1 << SPA_DATA_DmaBuf) | (1 << SPA_DATA_MemFd) |
(1 << SPA_DATA_MemPtr)
: (1 << SPA_DATA_MemFd) | (1 << SPA_DATA_MemPtr);
params.push_back(reinterpret_cast<spa_pod*>(spa_pod_builder_add_object(
&builder, SPA_TYPE_OBJECT_ParamBuffers, SPA_PARAM_Buffers,
SPA_PARAM_BUFFERS_size, SPA_POD_Int(size), SPA_PARAM_BUFFERS_stride,
SPA_POD_Int(stride), SPA_PARAM_BUFFERS_buffers,
SPA_POD_CHOICE_RANGE_Int(8, 1, 32), SPA_PARAM_BUFFERS_dataType,
SPA_POD_CHOICE_FLAGS_Int(buffer_types))));
params.push_back(reinterpret_cast<spa_pod*>(spa_pod_builder_add_object(
&builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, SPA_PARAM_META_type,
SPA_POD_Id(SPA_META_Header), SPA_PARAM_META_size,
SPA_POD_Int(sizeof(struct spa_meta_header)))));
params.push_back(reinterpret_cast<spa_pod*>(spa_pod_builder_add_object(
&builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, SPA_PARAM_META_type,
SPA_POD_Id(SPA_META_VideoCrop), SPA_PARAM_META_size,
SPA_POD_Int(sizeof(struct spa_meta_region)))));
pw_stream_update_params(that->pw_stream_, params.data(), params.size());
}
// static
void SharedScreenCastStreamPrivate::OnStreamProcess(void* data) {
SharedScreenCastStreamPrivate* that =
static_cast<SharedScreenCastStreamPrivate*>(data);
RTC_DCHECK(that);
struct pw_buffer* next_buffer;
struct pw_buffer* buffer = nullptr;
next_buffer = pw_stream_dequeue_buffer(that->pw_stream_);
while (next_buffer) {
buffer = next_buffer;
next_buffer = pw_stream_dequeue_buffer(that->pw_stream_);
if (next_buffer) {
pw_stream_queue_buffer(that->pw_stream_, buffer);
}
}
if (!buffer) {
return;
}
that->ProcessBuffer(buffer);
pw_stream_queue_buffer(that->pw_stream_, buffer);
}
SharedScreenCastStreamPrivate::SharedScreenCastStreamPrivate() {}
SharedScreenCastStreamPrivate::~SharedScreenCastStreamPrivate() {
if (pw_main_loop_) {
pw_thread_loop_stop(pw_main_loop_);
}
if (pw_stream_) {
pw_stream_destroy(pw_stream_);
}
if (pw_core_) {
pw_core_disconnect(pw_core_);
}
if (pw_context_) {
pw_context_destroy(pw_context_);
}
if (pw_main_loop_) {
pw_thread_loop_destroy(pw_main_loop_);
}
}
bool SharedScreenCastStreamPrivate::StartScreenCastStream(
uint32_t stream_node_id,
int fd) {
#if defined(WEBRTC_DLOPEN_PIPEWIRE)
StubPathMap paths;
// Check if the PipeWire and DRM libraries are available.
paths[kModulePipewire].push_back(kPipeWireLib);
paths[kModuleDrm].push_back(kDrmLib);
if (!InitializeStubs(paths)) {
RTC_LOG(LS_ERROR) << "Failed to load the PipeWire library and symbols.";
return false;
}
#endif // defined(WEBRTC_DLOPEN_PIPEWIRE)
egl_dmabuf_ = std::make_unique<EglDmaBuf>();
pw_stream_node_id_ = stream_node_id;
pw_fd_ = fd;
pw_init(/*argc=*/nullptr, /*argc=*/nullptr);
pw_main_loop_ = pw_thread_loop_new("pipewire-main-loop", nullptr);
pw_context_ =
pw_context_new(pw_thread_loop_get_loop(pw_main_loop_), nullptr, 0);
if (!pw_context_) {
RTC_LOG(LS_ERROR) << "Failed to create PipeWire context";
return false;
}
if (pw_thread_loop_start(pw_main_loop_) < 0) {
RTC_LOG(LS_ERROR) << "Failed to start main PipeWire loop";
return false;
}
pw_client_version_ = ParsePipeWireVersion(pw_get_library_version());
// Initialize event handlers, remote end and stream-related.
pw_core_events_.version = PW_VERSION_CORE_EVENTS;
pw_core_events_.info = &OnCoreInfo;
pw_core_events_.done = &OnCoreDone;
pw_core_events_.error = &OnCoreError;
pw_stream_events_.version = PW_VERSION_STREAM_EVENTS;
pw_stream_events_.state_changed = &OnStreamStateChanged;
pw_stream_events_.param_changed = &OnStreamParamChanged;
pw_stream_events_.process = &OnStreamProcess;
{
PipeWireThreadLoopLock thread_loop_lock(pw_main_loop_);
pw_core_ = pw_context_connect_fd(pw_context_, pw_fd_, nullptr, 0);
if (!pw_core_) {
RTC_LOG(LS_ERROR) << "Failed to connect PipeWire context";
return false;
}
pw_core_add_listener(pw_core_, &spa_core_listener_, &pw_core_events_, this);
server_version_sync_ =
pw_core_sync(pw_core_, PW_ID_CORE, server_version_sync_);
pw_thread_loop_wait(pw_main_loop_);
pw_properties* reuseProps =
pw_properties_new_string("pipewire.client.reuse=1");
pw_stream_ = pw_stream_new(pw_core_, "webrtc-consume-stream", reuseProps);
if (!pw_stream_) {
RTC_LOG(LS_ERROR) << "Failed to create PipeWire stream";
return false;
}
pw_stream_add_listener(pw_stream_, &spa_stream_listener_,
&pw_stream_events_, this);
uint8_t buffer[2048] = {};
std::vector<uint64_t> modifiers;
spa_pod_builder builder = spa_pod_builder{buffer, sizeof(buffer)};
std::vector<const spa_pod*> params;
const bool has_required_pw_client_version =
pw_client_version_ >= kDmaBufModifierMinVersion;
const bool has_required_pw_server_version =
pw_server_version_ >= kDmaBufModifierMinVersion;
for (uint32_t format : {SPA_VIDEO_FORMAT_BGRA, SPA_VIDEO_FORMAT_RGBA,
SPA_VIDEO_FORMAT_BGRx, SPA_VIDEO_FORMAT_RGBx}) {
// Modifiers can be used with PipeWire >= 0.3.33
if (has_required_pw_client_version && has_required_pw_server_version) {
modifiers = egl_dmabuf_->QueryDmaBufModifiers(format);
if (!modifiers.empty()) {
params.push_back(BuildFormat(&builder, format, modifiers));
}
}
params.push_back(BuildFormat(&builder, format, /*modifiers=*/{}));
}
if (pw_stream_connect(pw_stream_, PW_DIRECTION_INPUT, pw_stream_node_id_,
PW_STREAM_FLAG_AUTOCONNECT, params.data(),
params.size()) != 0) {
RTC_LOG(LS_ERROR) << "Could not connect receiving stream.";
return false;
}
RTC_LOG(LS_INFO) << "PipeWire remote opened.";
}
return true;
}
void SharedScreenCastStreamPrivate::StopScreenCastStream() {
if (pw_stream_) {
pw_stream_disconnect(pw_stream_);
}
}
std::unique_ptr<BasicDesktopFrame>
SharedScreenCastStreamPrivate::CaptureFrame() {
webrtc::MutexLock lock(&current_frame_lock_);
if (!current_frame_ || !current_frame_->data()) {
return nullptr;
}
return std::move(current_frame_);
}
void SharedScreenCastStreamPrivate::ProcessBuffer(pw_buffer* buffer) {
spa_buffer* spa_buffer = buffer->buffer;
ScopedBuf map;
std::unique_ptr<uint8_t[]> src_unique_ptr;
uint8_t* src = nullptr;
if (spa_buffer->datas[0].chunk->size == 0) {
RTC_LOG(LS_ERROR) << "Failed to get video stream: Zero size.";
return;
}
if (spa_buffer->datas[0].type == SPA_DATA_MemFd) {
map.initialize(
static_cast<uint8_t*>(
mmap(nullptr,
spa_buffer->datas[0].maxsize + spa_buffer->datas[0].mapoffset,
PROT_READ, MAP_PRIVATE, spa_buffer->datas[0].fd, 0)),
spa_buffer->datas[0].maxsize + spa_buffer->datas[0].mapoffset,
spa_buffer->datas[0].fd);
if (!map) {
RTC_LOG(LS_ERROR) << "Failed to mmap the memory: "
<< std::strerror(errno);
return;
}
src = SPA_MEMBER(map.get(), spa_buffer->datas[0].mapoffset, uint8_t);
} else if (spa_buffer->datas[0].type == SPA_DATA_DmaBuf) {
const uint n_planes = spa_buffer->n_datas;
if (!n_planes) {
return;
}
std::vector<EglDmaBuf::PlaneData> plane_datas;
for (uint32_t i = 0; i < n_planes; ++i) {
EglDmaBuf::PlaneData data = {
static_cast<int32_t>(spa_buffer->datas[i].fd),
static_cast<uint32_t>(spa_buffer->datas[i].chunk->stride),
static_cast<uint32_t>(spa_buffer->datas[i].chunk->offset)};
plane_datas.push_back(data);
}
src_unique_ptr = egl_dmabuf_->ImageFromDmaBuf(
desktop_size_, spa_video_format_.format, plane_datas, modifier_);
src = src_unique_ptr.get();
} else if (spa_buffer->datas[0].type == SPA_DATA_MemPtr) {
src = static_cast<uint8_t*>(spa_buffer->datas[0].data);
}
if (!src) {
return;
}
struct spa_meta_region* video_metadata =
static_cast<struct spa_meta_region*>(spa_buffer_find_meta_data(
spa_buffer, SPA_META_VideoCrop, sizeof(*video_metadata)));
// Video size from metadata is bigger than an actual video stream size.
// The metadata are wrong or we should up-scale the video...in both cases
// just quit now.
if (video_metadata && (video_metadata->region.size.width >
static_cast<uint32_t>(desktop_size_.width()) ||
video_metadata->region.size.height >
static_cast<uint32_t>(desktop_size_.height()))) {
RTC_LOG(LS_ERROR) << "Stream metadata sizes are wrong!";
return;
}
// Use video metadata when video size from metadata is set and smaller than
// video stream size, so we need to adjust it.
bool video_metadata_use = false;
const struct spa_rectangle* video_metadata_size =
video_metadata ? &video_metadata->region.size : nullptr;
if (video_metadata_size && video_metadata_size->width != 0 &&
video_metadata_size->height != 0 &&
(static_cast<int>(video_metadata_size->width) < desktop_size_.width() ||
static_cast<int>(video_metadata_size->height) <
desktop_size_.height())) {
video_metadata_use = true;
}
if (video_metadata_use) {
video_size_ =
DesktopSize(video_metadata_size->width, video_metadata_size->height);
} else {
video_size_ = desktop_size_;
}
uint32_t y_offset = video_metadata_use && (video_metadata->region.position.y +
video_size_.height() <=
desktop_size_.height())
? video_metadata->region.position.y
: 0;
uint32_t x_offset = video_metadata_use && (video_metadata->region.position.x +
video_size_.width() <=
desktop_size_.width())
? video_metadata->region.position.x
: 0;
webrtc::MutexLock lock(&current_frame_lock_);
uint8_t* updated_src = src + (spa_buffer->datas[0].chunk->stride * y_offset) +
(kBytesPerPixel * x_offset);
current_frame_ = std::make_unique<BasicDesktopFrame>(
DesktopSize(video_size_.width(), video_size_.height()));
current_frame_->CopyPixelsFrom(
updated_src,
(spa_buffer->datas[0].chunk->stride - (kBytesPerPixel * x_offset)),
DesktopRect::MakeWH(video_size_.width(), video_size_.height()));
if (spa_video_format_.format == SPA_VIDEO_FORMAT_RGBx ||
spa_video_format_.format == SPA_VIDEO_FORMAT_RGBA) {
uint8_t* tmp_src = current_frame_->data();
for (int i = 0; i < video_size_.height(); ++i) {
// If both sides decided to go with the RGBx format we need to convert it
// to BGRx to match color format expected by WebRTC.
ConvertRGBxToBGRx(tmp_src, current_frame_->stride());
tmp_src += current_frame_->stride();
}
}
}
void SharedScreenCastStreamPrivate::ConvertRGBxToBGRx(uint8_t* frame,
uint32_t size) {
for (uint32_t i = 0; i < size; i += 4) {
uint8_t tempR = frame[i];
uint8_t tempB = frame[i + 2];
frame[i] = tempB;
frame[i + 2] = tempR;
}
}
SharedScreenCastStream::SharedScreenCastStream()
: private_(std::make_unique<SharedScreenCastStreamPrivate>()) {}
SharedScreenCastStream::~SharedScreenCastStream() {}
rtc::scoped_refptr<SharedScreenCastStream>
SharedScreenCastStream::CreateDefault() {
return new SharedScreenCastStream();
}
bool SharedScreenCastStream::StartScreenCastStream(uint32_t stream_node_id,
int fd) {
return private_->StartScreenCastStream(stream_node_id, fd);
}
void SharedScreenCastStream::StopScreenCastStream() {
private_->StopScreenCastStream();
}
std::unique_ptr<BasicDesktopFrame> SharedScreenCastStream::CaptureFrame() {
return private_->CaptureFrame();
}
} // namespace webrtc

View File

@ -0,0 +1,48 @@
/*
* Copyright 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SHARED_SCREENCAST_STREAM_H_
#define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SHARED_SCREENCAST_STREAM_H_
#include <memory>
#include "api/ref_counted_base.h"
#include "api/scoped_refptr.h"
#include "modules/desktop_capture/desktop_frame.h"
#include "rtc_base/system/rtc_export.h"
namespace webrtc {
class SharedScreenCastStreamPrivate;
class RTC_EXPORT SharedScreenCastStream
: public rtc::RefCountedNonVirtual<SharedScreenCastStream> {
public:
static rtc::scoped_refptr<SharedScreenCastStream> CreateDefault();
bool StartScreenCastStream(uint32_t stream_node_id, int fd);
void StopScreenCastStream();
std::unique_ptr<BasicDesktopFrame> CaptureFrame();
~SharedScreenCastStream();
protected:
SharedScreenCastStream();
private:
SharedScreenCastStream(const SharedScreenCastStream&) = delete;
SharedScreenCastStream& operator=(const SharedScreenCastStream&) = delete;
std::unique_ptr<SharedScreenCastStreamPrivate> private_;
};
} // namespace webrtc
#endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SHARED_SCREENCAST_STREAM_H_

View File

@ -28,7 +28,7 @@ std::unique_ptr<DesktopCapturer> DesktopCapturer::CreateRawScreenCapturer(
const DesktopCaptureOptions& options) {
#if defined(WEBRTC_USE_PIPEWIRE)
if (options.allow_pipewire() && DesktopCapturer::IsRunningUnderWayland()) {
return BaseCapturerPipeWire::CreateRawCapturer(options);
return std::make_unique<BaseCapturerPipeWire>(options);
}
#endif // defined(WEBRTC_USE_PIPEWIRE)

View File

@ -28,7 +28,7 @@ std::unique_ptr<DesktopCapturer> DesktopCapturer::CreateRawWindowCapturer(
const DesktopCaptureOptions& options) {
#if defined(WEBRTC_USE_PIPEWIRE)
if (options.allow_pipewire() && DesktopCapturer::IsRunningUnderWayland()) {
return BaseCapturerPipeWire::CreateRawCapturer(options);
return std::make_unique<BaseCapturerPipeWire>(options);
}
#endif // defined(WEBRTC_USE_PIPEWIRE)