Add pipewire/portal video capture support

This makes it possible to access cameras through xdg-desktop-portal and
pipewire.

For pipewire, a shared state is needed between the enumeration and the
creation of camera object. So a new API is needed with a shared options
object that holds the state and can be used to choose which backend to try.

Bug: webrtc:13177
Change-Id: Iaad2333b41e4e6fb112f4558ea4b623e59afcbd1
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/261620
Reviewed-by: Alexander Cooper <alcooper@chromium.org>
Commit-Queue: Alexander Cooper <alcooper@chromium.org>
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#39251}
This commit is contained in:
Michael Olbrich 2023-01-30 14:10:49 +01:00 committed by WebRTC LUCI CQ
parent fad9a6dae7
commit f0be3bee1f
15 changed files with 1477 additions and 4 deletions

View File

@ -33,6 +33,7 @@ int pw_stream_set_active(pw_stream *stream, bool active);
int pw_stream_update_params(pw_stream *stream, const spa_pod **params, uint32_t n_params); int pw_stream_update_params(pw_stream *stream, const spa_pod **params, uint32_t n_params);
uint32_t pw_stream_get_node_id(pw_stream *stream); uint32_t pw_stream_get_node_id(pw_stream *stream);
pw_stream_state pw_stream_get_state(pw_stream *stream, const char **error); pw_stream_state pw_stream_get_state(pw_stream *stream, const char **error);
const char * pw_stream_state_as_string(enum pw_stream_state state);
// thread-loop.h // thread-loop.h
void pw_thread_loop_destroy(pw_thread_loop *loop); void pw_thread_loop_destroy(pw_thread_loop *loop);
@ -50,3 +51,6 @@ void pw_context_destroy(pw_context *context);
pw_context *pw_context_new(pw_loop *main_loop, pw_properties *props, size_t user_data_size); pw_context *pw_context_new(pw_loop *main_loop, pw_properties *props, size_t user_data_size);
pw_core * pw_context_connect(pw_context *context, pw_properties *properties, size_t user_data_size); pw_core * pw_context_connect(pw_context *context, pw_properties *properties, size_t user_data_size);
pw_core * pw_context_connect_fd(pw_context *context, int fd, pw_properties *properties, size_t user_data_size); pw_core * pw_context_connect_fd(pw_context *context, int fd, pw_properties *properties, size_t user_data_size);
// proxy.h
void pw_proxy_destroy(struct pw_proxy *proxy);

View File

@ -60,11 +60,16 @@ if (!build_with_chromium) {
"../../rtc_base:refcount", "../../rtc_base:refcount",
"../../rtc_base:stringutils", "../../rtc_base:stringutils",
"../../rtc_base/synchronization:mutex", "../../rtc_base/synchronization:mutex",
"../../rtc_base/system:rtc_export",
"../../system_wrappers", "../../system_wrappers",
] ]
sources = [
"video_capture_options.cc",
"video_capture_options.h",
]
if (is_linux || is_chromeos) { if (is_linux || is_chromeos) {
sources = [ sources += [
"linux/device_info_linux.cc", "linux/device_info_linux.cc",
"linux/device_info_v4l2.cc", "linux/device_info_v4l2.cc",
"linux/device_info_v4l2.h", "linux/device_info_v4l2.h",
@ -73,9 +78,31 @@ if (!build_with_chromium) {
"linux/video_capture_v4l2.h", "linux/video_capture_v4l2.h",
] ]
deps += [ "../../media:rtc_media_base" ] deps += [ "../../media:rtc_media_base" ]
if (rtc_use_pipewire) {
sources += [
"linux/device_info_pipewire.cc",
"linux/device_info_pipewire.h",
"linux/pipewire_session.cc",
"linux/pipewire_session.h",
"linux/video_capture_pipewire.cc",
"linux/video_capture_pipewire.h",
]
configs += [ "../portal:pipewire_base" ]
public_configs = [ "../portal:pipewire_config" ]
deps += [
"../../api:refcountedbase",
"../../common_video",
"../../media:rtc_media_base",
"../portal",
]
}
} }
if (is_win) { if (is_win) {
sources = [ sources += [
"windows/device_info_ds.cc", "windows/device_info_ds.cc",
"windows/device_info_ds.h", "windows/device_info_ds.h",
"windows/help_functions_ds.cc", "windows/help_functions_ds.cc",
@ -104,7 +131,7 @@ if (!build_with_chromium) {
} }
} }
if (is_fuchsia) { if (is_fuchsia) {
sources = [ "video_capture_factory_null.cc" ] sources += [ "video_capture_factory_null.cc" ]
} }
if (build_with_mozilla && is_android) { if (build_with_mozilla && is_android) {
@ -114,7 +141,7 @@ if (!build_with_chromium) {
"/nsprpub/pr/include", "/nsprpub/pr/include",
] ]
sources = [ sources += [
"android/device_info_android.cc", "android/device_info_android.cc",
"android/video_capture_android.cc", "android/video_capture_android.cc",
] ]

View File

@ -20,10 +20,14 @@
#include <vector> #include <vector>
#if defined(WEBRTC_USE_PIPEWIRE)
#include "modules/video_capture/linux/device_info_pipewire.h"
#endif
#include "modules/video_capture/linux/device_info_v4l2.h" #include "modules/video_capture/linux/device_info_v4l2.h"
#include "modules/video_capture/video_capture.h" #include "modules/video_capture/video_capture.h"
#include "modules/video_capture/video_capture_defines.h" #include "modules/video_capture/video_capture_defines.h"
#include "modules/video_capture/video_capture_impl.h" #include "modules/video_capture/video_capture_impl.h"
#include "modules/video_capture/video_capture_options.h"
#include "rtc_base/logging.h" #include "rtc_base/logging.h"
namespace webrtc { namespace webrtc {
@ -31,5 +35,18 @@ namespace videocapturemodule {
VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo() { VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo() {
return new videocapturemodule::DeviceInfoV4l2(); return new videocapturemodule::DeviceInfoV4l2();
} }
VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo(
VideoCaptureOptions* options) {
#if defined(WEBRTC_USE_PIPEWIRE)
if (options->allow_pipewire()) {
return new videocapturemodule::DeviceInfoPipeWire(options);
}
#endif
if (options->allow_v4l2())
return new videocapturemodule::DeviceInfoV4l2();
return nullptr;
}
} // namespace videocapturemodule } // namespace videocapturemodule
} // namespace webrtc } // namespace webrtc

View File

@ -0,0 +1,111 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/video_capture/linux/device_info_pipewire.h"
#include <errno.h>
#include <fcntl.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/ioctl.h>
#include <unistd.h>
#include <vector>
#include "modules/video_capture/video_capture.h"
#include "modules/video_capture/video_capture_defines.h"
#include "modules/video_capture/video_capture_impl.h"
#include "modules/video_capture/video_capture_options.h"
#include "rtc_base/logging.h"
namespace webrtc {
namespace videocapturemodule {
DeviceInfoPipeWire::DeviceInfoPipeWire(VideoCaptureOptions* options)
: DeviceInfoImpl(), pipewire_session_(options->pipewire_session()) {}
int32_t DeviceInfoPipeWire::Init() {
return 0;
}
DeviceInfoPipeWire::~DeviceInfoPipeWire() = default;
uint32_t DeviceInfoPipeWire::NumberOfDevices() {
return pipewire_session_->nodes().size();
}
int32_t DeviceInfoPipeWire::GetDeviceName(uint32_t deviceNumber,
char* deviceNameUTF8,
uint32_t deviceNameLength,
char* deviceUniqueIdUTF8,
uint32_t deviceUniqueIdUTF8Length,
char* productUniqueIdUTF8,
uint32_t productUniqueIdUTF8Length) {
if (deviceNumber >= NumberOfDevices())
return -1;
const PipeWireNode& node = pipewire_session_->nodes().at(deviceNumber);
if (deviceNameLength <= node.display_name().length()) {
RTC_LOG(LS_INFO) << "deviceNameUTF8 buffer passed is too small";
return -1;
}
if (deviceUniqueIdUTF8Length <= node.unique_id().length()) {
RTC_LOG(LS_INFO) << "deviceUniqueIdUTF8 buffer passed is too small";
return -1;
}
if (productUniqueIdUTF8 &&
productUniqueIdUTF8Length <= node.model_id().length()) {
RTC_LOG(LS_INFO) << "productUniqueIdUTF8 buffer passed is too small";
return -1;
}
memset(deviceNameUTF8, 0, deviceNameLength);
node.display_name().copy(deviceNameUTF8, deviceNameLength);
memset(deviceUniqueIdUTF8, 0, deviceUniqueIdUTF8Length);
node.unique_id().copy(deviceUniqueIdUTF8, deviceUniqueIdUTF8Length);
if (productUniqueIdUTF8) {
memset(productUniqueIdUTF8, 0, productUniqueIdUTF8Length);
node.model_id().copy(productUniqueIdUTF8, productUniqueIdUTF8Length);
}
return 0;
}
int32_t DeviceInfoPipeWire::CreateCapabilityMap(
const char* deviceUniqueIdUTF8) {
for (auto& node : pipewire_session_->nodes()) {
if (node.unique_id().compare(deviceUniqueIdUTF8) != 0)
continue;
_captureCapabilities = node.capabilities();
_lastUsedDeviceNameLength = node.display_name().length();
_lastUsedDeviceName = static_cast<char*>(
realloc(_lastUsedDeviceName, _lastUsedDeviceNameLength + 1));
memcpy(_lastUsedDeviceName, deviceUniqueIdUTF8,
_lastUsedDeviceNameLength + 1);
return _captureCapabilities.size();
}
return -1;
}
int32_t DeviceInfoPipeWire::DisplayCaptureSettingsDialogBox(
const char* /*deviceUniqueIdUTF8*/,
const char* /*dialogTitleUTF8*/,
void* /*parentWindow*/,
uint32_t /*positionX*/,
uint32_t /*positionY*/) {
return -1;
}
} // namespace videocapturemodule
} // namespace webrtc

View File

@ -0,0 +1,51 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_VIDEO_CAPTURE_LINUX_DEVICE_INFO_PIPEWIRE_H_
#define MODULES_VIDEO_CAPTURE_LINUX_DEVICE_INFO_PIPEWIRE_H_
#include <stdint.h>
#include "modules/video_capture/device_info_impl.h"
#include "modules/video_capture/linux/pipewire_session.h"
namespace webrtc {
namespace videocapturemodule {
class DeviceInfoPipeWire : public DeviceInfoImpl {
public:
explicit DeviceInfoPipeWire(VideoCaptureOptions* options);
~DeviceInfoPipeWire() override;
uint32_t NumberOfDevices() override;
int32_t GetDeviceName(uint32_t deviceNumber,
char* deviceNameUTF8,
uint32_t deviceNameLength,
char* deviceUniqueIdUTF8,
uint32_t deviceUniqueIdUTF8Length,
char* productUniqueIdUTF8 = nullptr,
uint32_t productUniqueIdUTF8Length = 0) override;
/*
* Fills the membervariable _captureCapabilities with capabilites for the
* given device name.
*/
int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) override
RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock);
int32_t DisplayCaptureSettingsDialogBox(const char* /*deviceUniqueIdUTF8*/,
const char* /*dialogTitleUTF8*/,
void* /*parentWindow*/,
uint32_t /*positionX*/,
uint32_t /*positionY*/) override;
int32_t Init() override;
private:
rtc::scoped_refptr<PipeWireSession> pipewire_session_;
};
} // namespace videocapturemodule
} // namespace webrtc
#endif // MODULES_VIDEO_CAPTURE_LINUX_DEVICE_INFO_PIPEWIRE_H_

View File

@ -0,0 +1,530 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/video_capture/linux/pipewire_session.h"
#include <gio/gunixfdlist.h>
#include <spa/monitor/device.h>
#include <spa/param/format-utils.h>
#include <spa/param/format.h>
#include <spa/param/video/raw.h>
#include <spa/pod/parser.h>
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/portal/pipewire_utils.h"
#include "modules/portal/xdg_desktop_portal_utils.h"
#include "modules/video_capture/device_info_impl.h"
#include "rtc_base/string_encode.h"
#include "rtc_base/string_to_number.h"
namespace webrtc {
namespace videocapturemodule {
using xdg_portal::RequestSessionProxy;
constexpr char kCameraInterfaceName[] = "org.freedesktop.portal.Camera";
VideoType PipeWireRawFormatToVideoType(uint32_t id) {
switch (id) {
case SPA_VIDEO_FORMAT_I420:
return VideoType::kI420;
case SPA_VIDEO_FORMAT_NV12:
return VideoType::kNV12;
case SPA_VIDEO_FORMAT_YUY2:
return VideoType::kYUY2;
case SPA_VIDEO_FORMAT_UYVY:
return VideoType::kUYVY;
case SPA_VIDEO_FORMAT_RGB:
return VideoType::kRGB24;
default:
return VideoType::kUnknown;
}
}
PipeWireNode::PipeWireNode(PipeWireSession* session,
uint32_t id,
const spa_dict* props)
: session_(session),
id_(id),
display_name_(spa_dict_lookup(props, PW_KEY_NODE_DESCRIPTION)),
unique_id_(rtc::ToString(id)) {
RTC_LOG(LS_VERBOSE) << "Found Camera: " << display_name_;
proxy_ = static_cast<pw_proxy*>(pw_registry_bind(
session_->pw_registry_, id, PW_TYPE_INTERFACE_Node, PW_VERSION_NODE, 0));
static const pw_node_events node_events{
.version = PW_VERSION_NODE_EVENTS,
.info = OnNodeInfo,
.param = OnNodeParam,
};
pw_node_add_listener(proxy_, &node_listener_, &node_events, this);
}
PipeWireNode::~PipeWireNode() {
pw_proxy_destroy(proxy_);
spa_hook_remove(&node_listener_);
}
// static
void PipeWireNode::OnNodeInfo(void* data, const pw_node_info* info) {
PipeWireNode* that = static_cast<PipeWireNode*>(data);
if (info->change_mask & PW_NODE_CHANGE_MASK_PROPS) {
const char* vid_str;
const char* pid_str;
absl::optional<int> vid;
absl::optional<int> pid;
vid_str = spa_dict_lookup(info->props, SPA_KEY_DEVICE_VENDOR_ID);
pid_str = spa_dict_lookup(info->props, SPA_KEY_DEVICE_PRODUCT_ID);
vid = vid_str ? rtc::StringToNumber<int>(vid_str) : absl::nullopt;
pid = pid_str ? rtc::StringToNumber<int>(pid_str) : absl::nullopt;
if (vid && pid) {
char model_str[10];
snprintf(model_str, sizeof(model_str), "%04x:%04x", vid.value(),
pid.value());
that->model_id_ = model_str;
}
} else if (info->change_mask & PW_NODE_CHANGE_MASK_PARAMS) {
for (uint32_t i = 0; i < info->n_params; i++) {
uint32_t id = info->params[i].id;
if (id == SPA_PARAM_EnumFormat &&
info->params[i].flags & SPA_PARAM_INFO_READ) {
pw_node_enum_params(that->proxy_, 0, id, 0, UINT32_MAX, nullptr);
break;
}
}
that->session_->PipeWireSync();
}
}
// static
void PipeWireNode::OnNodeParam(void* data,
int seq,
uint32_t id,
uint32_t index,
uint32_t next,
const spa_pod* param) {
PipeWireNode* that = static_cast<PipeWireNode*>(data);
auto* obj = reinterpret_cast<const spa_pod_object*>(param);
const spa_pod_prop* prop = nullptr;
VideoCaptureCapability cap;
spa_pod* val;
uint32_t n_items, choice;
cap.videoType = VideoType::kUnknown;
cap.maxFPS = 0;
prop = spa_pod_object_find_prop(obj, prop, SPA_FORMAT_VIDEO_framerate);
if (prop) {
val = spa_pod_get_values(&prop->value, &n_items, &choice);
if (val->type == SPA_TYPE_Fraction) {
spa_fraction* fract;
fract = static_cast<spa_fraction*>(SPA_POD_BODY(val));
if (choice == SPA_CHOICE_None)
cap.maxFPS = 1.0 * fract[0].num / fract[0].denom;
else if (choice == SPA_CHOICE_Range && fract[1].num > 0)
cap.maxFPS = 1.0 * fract[1].num / fract[1].denom;
}
}
prop = spa_pod_object_find_prop(obj, prop, SPA_FORMAT_VIDEO_size);
if (!prop)
return;
val = spa_pod_get_values(&prop->value, &n_items, &choice);
if (val->type != SPA_TYPE_Rectangle)
return;
if (choice != SPA_CHOICE_None)
return;
if (!ParseFormat(param, &cap))
return;
spa_rectangle* rect;
rect = static_cast<spa_rectangle*>(SPA_POD_BODY(val));
cap.width = rect[0].width;
cap.height = rect[0].height;
RTC_LOG(LS_VERBOSE) << "Found Format(" << that->display_name_
<< "): " << static_cast<int>(cap.videoType) << "("
<< cap.width << "x" << cap.height << "@" << cap.maxFPS
<< ")";
that->capabilities_.push_back(cap);
}
// static
bool PipeWireNode::ParseFormat(const spa_pod* param,
VideoCaptureCapability* cap) {
auto* obj = reinterpret_cast<const spa_pod_object*>(param);
uint32_t media_type, media_subtype;
if (spa_format_parse(param, &media_type, &media_subtype) < 0) {
RTC_LOG(LS_ERROR) << "Failed to parse video format.";
return false;
}
if (media_type != SPA_MEDIA_TYPE_video)
return false;
if (media_subtype == SPA_MEDIA_SUBTYPE_raw) {
const spa_pod_prop* prop = nullptr;
uint32_t n_items, choice;
spa_pod* val;
uint32_t* id;
prop = spa_pod_object_find_prop(obj, prop, SPA_FORMAT_VIDEO_format);
if (!prop)
return false;
val = spa_pod_get_values(&prop->value, &n_items, &choice);
if (val->type != SPA_TYPE_Id)
return false;
if (choice != SPA_CHOICE_None)
return false;
id = static_cast<uint32_t*>(SPA_POD_BODY(val));
cap->videoType = PipeWireRawFormatToVideoType(id[0]);
if (cap->videoType == VideoType::kUnknown) {
RTC_LOG(LS_INFO) << "Unsupported PipeWire pixel format " << id[0];
return false;
}
} else if (media_subtype == SPA_MEDIA_SUBTYPE_mjpg) {
cap->videoType = VideoType::kMJPEG;
} else {
RTC_LOG(LS_INFO) << "Unsupported PipeWire media subtype " << media_subtype;
}
return cap->videoType != VideoType::kUnknown;
}
PipeWireSession::PipeWireSession()
: status_(VideoCaptureOptions::Status::UNINITIALIZED) {}
PipeWireSession::~PipeWireSession() {
Cleanup();
}
void PipeWireSession::Init(VideoCaptureOptions::Callback* callback) {
callback_ = callback;
cancellable_ = g_cancellable_new();
Scoped<GError> error;
RequestSessionProxy(kCameraInterfaceName, OnProxyRequested, cancellable_,
this);
}
// static
void PipeWireSession::OnProxyRequested(GObject* gobject,
GAsyncResult* result,
gpointer user_data) {
PipeWireSession* that = static_cast<PipeWireSession*>(user_data);
Scoped<GError> error;
GDBusProxy* proxy = g_dbus_proxy_new_finish(result, error.receive());
if (!proxy) {
// Ignore the error caused by user cancelling the request via `cancellable_`
if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED))
return;
RTC_LOG(LS_ERROR) << "Failed to get a proxy for the portal: "
<< error->message;
that->Finish(VideoCaptureOptions::Status::DENIED);
return;
}
RTC_LOG(LS_VERBOSE) << "Successfully created proxy for the portal.";
that->ProxyRequested(proxy);
}
void PipeWireSession::ProxyRequested(GDBusProxy* proxy) {
GVariantBuilder builder;
Scoped<char> variant_string;
std::string access_handle;
proxy_ = proxy;
connection_ = g_dbus_proxy_get_connection(proxy);
g_variant_builder_init(&builder, G_VARIANT_TYPE_VARDICT);
variant_string =
g_strdup_printf("capture%d", g_random_int_range(0, G_MAXINT));
g_variant_builder_add(&builder, "{sv}", "handle_token",
g_variant_new_string(variant_string.get()));
access_handle =
xdg_portal::PrepareSignalHandle(variant_string.get(), connection_);
access_request_signal_id_ = xdg_portal::SetupRequestResponseSignal(
access_handle.c_str(), OnResponseSignalEmitted, this, connection_);
RTC_LOG(LS_VERBOSE) << "Requesting camera access from the portal.";
g_dbus_proxy_call(proxy_, "AccessCamera", g_variant_new("(a{sv})", &builder),
G_DBUS_CALL_FLAGS_NONE, /*timeout_msec=*/-1, cancellable_,
reinterpret_cast<GAsyncReadyCallback>(OnAccessResponse),
this);
}
// static
void PipeWireSession::OnAccessResponse(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data) {
PipeWireSession* that = static_cast<PipeWireSession*>(user_data);
RTC_DCHECK(that);
Scoped<GError> error;
Scoped<GVariant> variant(
g_dbus_proxy_call_finish(proxy, result, error.receive()));
if (!variant) {
if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED))
return;
RTC_LOG(LS_ERROR) << "Failed to access portal:" << error->message;
if (that->access_request_signal_id_) {
g_dbus_connection_signal_unsubscribe(that->connection_,
that->access_request_signal_id_);
that->access_request_signal_id_ = 0;
}
that->Finish(VideoCaptureOptions::Status::ERROR);
}
}
// static
void PipeWireSession::OnResponseSignalEmitted(GDBusConnection* connection,
const char* sender_name,
const char* object_path,
const char* interface_name,
const char* signal_name,
GVariant* parameters,
gpointer user_data) {
PipeWireSession* that = static_cast<PipeWireSession*>(user_data);
RTC_DCHECK(that);
uint32_t portal_response;
g_variant_get(parameters, "(u@a{sv})", &portal_response, nullptr);
if (portal_response) {
RTC_LOG(LS_INFO) << "Camera access denied by the XDG portal.";
that->Finish(VideoCaptureOptions::Status::DENIED);
return;
}
RTC_LOG(LS_VERBOSE) << "Camera access granted by the XDG portal.";
GVariantBuilder builder;
g_variant_builder_init(&builder, G_VARIANT_TYPE_VARDICT);
g_dbus_proxy_call(
that->proxy_, "OpenPipeWireRemote", g_variant_new("(a{sv})", &builder),
G_DBUS_CALL_FLAGS_NONE, /*timeout_msec=*/-1, that->cancellable_,
reinterpret_cast<GAsyncReadyCallback>(OnOpenResponse), that);
}
void PipeWireSession::OnOpenResponse(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data) {
PipeWireSession* that = static_cast<PipeWireSession*>(user_data);
RTC_DCHECK(that);
Scoped<GError> error;
Scoped<GUnixFDList> outlist;
Scoped<GVariant> variant(g_dbus_proxy_call_with_unix_fd_list_finish(
proxy, outlist.receive(), result, error.receive()));
if (!variant) {
if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED))
return;
RTC_LOG(LS_ERROR) << "Failed to open PipeWire remote:" << error->message;
if (that->access_request_signal_id_) {
g_dbus_connection_signal_unsubscribe(that->connection_,
that->access_request_signal_id_);
that->access_request_signal_id_ = 0;
}
that->Finish(VideoCaptureOptions::Status::ERROR);
return;
}
int32_t index;
g_variant_get(variant.get(), "(h)", &index);
int fd = g_unix_fd_list_get(outlist.get(), index, error.receive());
if (fd == -1) {
RTC_LOG(LS_ERROR) << "Failed to get file descriptor from the list: "
<< error->message;
that->Finish(VideoCaptureOptions::Status::ERROR);
return;
}
if (!InitializePipeWire()) {
that->Finish(VideoCaptureOptions::Status::UNAVAILABLE);
return;
}
if (!that->StartPipeWire(fd))
that->Finish(VideoCaptureOptions::Status::ERROR);
}
void PipeWireSession::StopDBus() {
if (access_request_signal_id_) {
g_dbus_connection_signal_unsubscribe(connection_,
access_request_signal_id_);
access_request_signal_id_ = 0;
}
if (cancellable_) {
g_cancellable_cancel(cancellable_);
g_object_unref(cancellable_);
cancellable_ = nullptr;
}
if (proxy_) {
g_object_unref(proxy_);
proxy_ = nullptr;
connection_ = nullptr;
}
}
bool PipeWireSession::StartPipeWire(int fd) {
pw_init(/*argc=*/nullptr, /*argv=*/nullptr);
pw_main_loop_ = pw_thread_loop_new("pipewire-main-loop", nullptr);
pw_context_ =
pw_context_new(pw_thread_loop_get_loop(pw_main_loop_), nullptr, 0);
if (!pw_context_) {
RTC_LOG(LS_ERROR) << "Failed to create PipeWire context";
return false;
}
pw_core_ = pw_context_connect_fd(pw_context_, fd, nullptr, 0);
if (!pw_core_) {
RTC_LOG(LS_ERROR) << "Failed to connect PipeWire context";
return false;
}
static const pw_core_events core_events{
.version = PW_VERSION_CORE_EVENTS,
.done = &OnCoreDone,
.error = &OnCoreError,
};
pw_core_add_listener(pw_core_, &core_listener_, &core_events, this);
static const pw_registry_events registry_events{
.version = PW_VERSION_REGISTRY_EVENTS,
.global = OnRegistryGlobal,
.global_remove = OnRegistryGlobalRemove,
};
pw_registry_ = pw_core_get_registry(pw_core_, PW_VERSION_REGISTRY, 0);
pw_registry_add_listener(pw_registry_, &registry_listener_, &registry_events,
this);
PipeWireSync();
if (pw_thread_loop_start(pw_main_loop_) < 0) {
RTC_LOG(LS_ERROR) << "Failed to start main PipeWire loop";
return false;
}
return true;
}
void PipeWireSession::StopPipeWire() {
if (pw_main_loop_)
pw_thread_loop_stop(pw_main_loop_);
if (pw_core_) {
pw_core_disconnect(pw_core_);
pw_core_ = nullptr;
}
if (pw_context_) {
pw_context_destroy(pw_context_);
pw_context_ = nullptr;
}
if (pw_main_loop_) {
pw_thread_loop_destroy(pw_main_loop_);
pw_main_loop_ = nullptr;
}
}
void PipeWireSession::PipeWireSync() {
sync_seq_ = pw_core_sync(pw_core_, PW_ID_CORE, sync_seq_);
}
// static
void PipeWireSession::OnCoreError(void* data,
uint32_t id,
int seq,
int res,
const char* message) {
RTC_LOG(LS_ERROR) << "PipeWire remote error: " << message;
}
// static
void PipeWireSession::OnCoreDone(void* data, uint32_t id, int seq) {
PipeWireSession* that = static_cast<PipeWireSession*>(data);
if (id == PW_ID_CORE) {
if (seq == that->sync_seq_) {
RTC_LOG(LS_VERBOSE) << "Enumerating PipeWire camera devices complete.";
that->Finish(VideoCaptureOptions::Status::SUCCESS);
}
}
}
// static
void PipeWireSession::OnRegistryGlobal(void* data,
uint32_t id,
uint32_t permissions,
const char* type,
uint32_t version,
const spa_dict* props) {
PipeWireSession* that = static_cast<PipeWireSession*>(data);
if (type != absl::string_view(PW_TYPE_INTERFACE_Node))
return;
if (!spa_dict_lookup(props, PW_KEY_NODE_DESCRIPTION))
return;
that->nodes_.emplace_back(that, id, props);
that->PipeWireSync();
}
// static
void PipeWireSession::OnRegistryGlobalRemove(void* data, uint32_t id) {
PipeWireSession* that = static_cast<PipeWireSession*>(data);
for (auto it = that->nodes_.begin(); it != that->nodes().end(); ++it) {
if ((*it).id() == id) {
that->nodes_.erase(it);
break;
}
}
}
void PipeWireSession::Finish(VideoCaptureOptions::Status status) {
if (callback_) {
callback_->OnInitialized(status);
callback_ = nullptr;
}
}
void PipeWireSession::Cleanup() {
StopPipeWire();
StopDBus();
}
} // namespace videocapturemodule
} // namespace webrtc

View File

@ -0,0 +1,150 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_VIDEO_CAPTURE_LINUX_PIPEWIRE_SESSION_H_
#define MODULES_VIDEO_CAPTURE_LINUX_PIPEWIRE_SESSION_H_
#include <gio/gio.h>
#include <pipewire/core.h>
#include <pipewire/pipewire.h>
#include <deque>
#include <string>
#include <vector>
#include "api/ref_counted_base.h"
#include "api/scoped_refptr.h"
#include "modules/video_capture/video_capture.h"
#include "modules/video_capture/video_capture_options.h"
namespace webrtc {
namespace videocapturemodule {
class PipeWireSession;
class VideoCaptureModulePipeWire;
// PipeWireNode objects are the local representation of PipeWire node objects.
// The portal API ensured that only camera nodes are visible to the client.
// So they all represent one camera that is available via PipeWire.
class PipeWireNode {
public:
PipeWireNode(PipeWireSession* session, uint32_t id, const spa_dict* props);
~PipeWireNode();
uint32_t id() const { return id_; }
std::string display_name() const { return display_name_; }
std::string unique_id() const { return unique_id_; }
std::string model_id() const { return model_id_; }
std::vector<VideoCaptureCapability> capabilities() const {
return capabilities_;
}
private:
static void OnNodeInfo(void* data, const pw_node_info* info);
static void OnNodeParam(void* data,
int seq,
uint32_t id,
uint32_t index,
uint32_t next,
const spa_pod* param);
static bool ParseFormat(const spa_pod* param, VideoCaptureCapability* cap);
pw_proxy* proxy_;
spa_hook node_listener_;
PipeWireSession* session_;
uint32_t id_;
std::string display_name_;
std::string unique_id_;
std::string model_id_;
std::vector<VideoCaptureCapability> capabilities_;
};
class PipeWireSession : public rtc::RefCountedNonVirtual<PipeWireSession> {
public:
PipeWireSession();
~PipeWireSession();
void Init(VideoCaptureOptions::Callback* callback);
void CancelInit();
const std::deque<PipeWireNode>& nodes() const { return nodes_; }
friend class PipeWireNode;
friend class VideoCaptureModulePipeWire;
private:
static void OnProxyRequested(GObject* object,
GAsyncResult* result,
gpointer user_data);
void ProxyRequested(GDBusProxy* proxy);
static void OnAccessResponse(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data);
static void OnResponseSignalEmitted(GDBusConnection* connection,
const char* sender_name,
const char* object_path,
const char* interface_name,
const char* signal_name,
GVariant* parameters,
gpointer user_data);
static void OnOpenResponse(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data);
void StopDBus();
bool StartPipeWire(int fd);
void StopPipeWire();
void PipeWireSync();
static void OnCoreError(void* data,
uint32_t id,
int seq,
int res,
const char* message);
static void OnCoreDone(void* data, uint32_t id, int seq);
static void OnRegistryGlobal(void* data,
uint32_t id,
uint32_t permissions,
const char* type,
uint32_t version,
const spa_dict* props);
static void OnRegistryGlobalRemove(void* data, uint32_t id);
void Finish(VideoCaptureOptions::Status status);
void Cleanup();
VideoCaptureOptions::Callback* callback_ = nullptr;
GDBusConnection* connection_ = nullptr;
GDBusProxy* proxy_ = nullptr;
GCancellable* cancellable_ = nullptr;
guint access_request_signal_id_ = 0;
VideoCaptureOptions::Status status_;
struct pw_thread_loop* pw_main_loop_ = nullptr;
struct pw_context* pw_context_ = nullptr;
struct pw_core* pw_core_ = nullptr;
struct spa_hook core_listener_;
struct pw_registry* pw_registry_ = nullptr;
struct spa_hook registry_listener_;
int sync_seq_ = 0;
std::deque<PipeWireNode> nodes_;
};
} // namespace videocapturemodule
} // namespace webrtc
#endif // MODULES_VIDEO_CAPTURE_LINUX_PIPEWIRE_SESSION_H_

View File

@ -24,8 +24,12 @@
#include "api/scoped_refptr.h" #include "api/scoped_refptr.h"
#include "media/base/video_common.h" #include "media/base/video_common.h"
#if defined(WEBRTC_USE_PIPEWIRE)
#include "modules/video_capture/linux/video_capture_pipewire.h"
#endif
#include "modules/video_capture/linux/video_capture_v4l2.h" #include "modules/video_capture/linux/video_capture_v4l2.h"
#include "modules/video_capture/video_capture.h" #include "modules/video_capture/video_capture.h"
#include "modules/video_capture/video_capture_options.h"
#include "rtc_base/logging.h" #include "rtc_base/logging.h"
namespace webrtc { namespace webrtc {
@ -39,5 +43,26 @@ rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create(
return implementation; return implementation;
} }
rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create(
VideoCaptureOptions* options,
const char* deviceUniqueId) {
#if defined(WEBRTC_USE_PIPEWIRE)
if (options->allow_pipewire()) {
auto implementation =
rtc::make_ref_counted<VideoCaptureModulePipeWire>(options);
if (implementation->Init(deviceUniqueId) == 0)
return implementation;
}
#endif
if (options->allow_v4l2()) {
auto implementation = rtc::make_ref_counted<VideoCaptureModuleV4L2>();
if (implementation->Init(deviceUniqueId) == 0)
return implementation;
}
return nullptr;
}
} // namespace videocapturemodule } // namespace videocapturemodule
} // namespace webrtc } // namespace webrtc

View File

@ -0,0 +1,331 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/video_capture/linux/video_capture_pipewire.h"
#include <spa/param/format.h>
#include <spa/param/video/format-utils.h>
#include <spa/pod/builder.h>
#include <spa/utils/result.h>
#include <vector>
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/portal/pipewire_utils.h"
#include "rtc_base/logging.h"
#include "rtc_base/string_to_number.h"
namespace webrtc {
namespace videocapturemodule {
struct {
uint32_t spa_format;
VideoType video_type;
} constexpr kSupportedFormats[] = {
{SPA_VIDEO_FORMAT_I420, VideoType::kI420},
{SPA_VIDEO_FORMAT_NV12, VideoType::kNV12},
{SPA_VIDEO_FORMAT_YUY2, VideoType::kYUY2},
{SPA_VIDEO_FORMAT_UYVY, VideoType::kUYVY},
{SPA_VIDEO_FORMAT_RGB, VideoType::kRGB24},
};
VideoType VideoCaptureModulePipeWire::PipeWireRawFormatToVideoType(
uint32_t spa_format) {
for (const auto& spa_and_pixel_format : kSupportedFormats) {
if (spa_and_pixel_format.spa_format == spa_format)
return spa_and_pixel_format.video_type;
}
RTC_LOG(LS_INFO) << "Unsupported pixel format: " << spa_format;
return VideoType::kUnknown;
}
VideoCaptureModulePipeWire::VideoCaptureModulePipeWire(
VideoCaptureOptions* options)
: VideoCaptureImpl(), session_(options->pipewire_session()) {}
VideoCaptureModulePipeWire::~VideoCaptureModulePipeWire() {
StopCapture();
}
int32_t VideoCaptureModulePipeWire::Init(const char* deviceUniqueId) {
absl::optional<int> id;
id = rtc::StringToNumber<int>(deviceUniqueId);
if (id == absl::nullopt)
return -1;
node_id_ = id.value();
return 0;
}
static spa_pod* BuildFormat(spa_pod_builder* builder,
uint32_t format,
uint32_t width,
uint32_t height,
float frame_rate) {
spa_pod_frame frames[2];
spa_pod_builder_push_object(builder, &frames[0], SPA_TYPE_OBJECT_Format,
SPA_PARAM_EnumFormat);
spa_pod_builder_add(builder, SPA_FORMAT_mediaType,
SPA_POD_Id(SPA_MEDIA_TYPE_video), SPA_FORMAT_mediaSubtype,
SPA_POD_Id(format), 0);
if (format == SPA_MEDIA_SUBTYPE_raw) {
spa_pod_builder_prop(builder, SPA_FORMAT_VIDEO_format, 0);
spa_pod_builder_push_choice(builder, &frames[1], SPA_CHOICE_Enum, 0);
spa_pod_builder_id(builder, kSupportedFormats[0].spa_format);
for (const auto& spa_and_pixel_format : kSupportedFormats)
spa_pod_builder_id(builder, spa_and_pixel_format.spa_format);
spa_pod_builder_pop(builder, &frames[1]);
}
spa_rectangle preferred_size = spa_rectangle{width, height};
spa_rectangle min_size = spa_rectangle{1, 1};
spa_rectangle max_size = spa_rectangle{4096, 4096};
spa_pod_builder_add(
builder, SPA_FORMAT_VIDEO_size,
SPA_POD_CHOICE_RANGE_Rectangle(&preferred_size, &min_size, &max_size), 0);
spa_fraction preferred_frame_rate =
spa_fraction{static_cast<uint32_t>(frame_rate), 1};
spa_fraction min_frame_rate = spa_fraction{0, 1};
spa_fraction max_frame_rate = spa_fraction{INT32_MAX, 1};
spa_pod_builder_add(
builder, SPA_FORMAT_VIDEO_framerate,
SPA_POD_CHOICE_RANGE_Fraction(&preferred_frame_rate, &min_frame_rate,
&max_frame_rate),
0);
return static_cast<spa_pod*>(spa_pod_builder_pop(builder, &frames[0]));
}
int32_t VideoCaptureModulePipeWire::StartCapture(
const VideoCaptureCapability& capability) {
uint8_t buffer[1024] = {};
RTC_LOG(LS_VERBOSE) << "Creating new PipeWire stream for node " << node_id_;
PipeWireThreadLoopLock thread_loop_lock(session_->pw_main_loop_);
pw_properties* reuse_props =
pw_properties_new_string("pipewire.client.reuse=1");
stream_ = pw_stream_new(session_->pw_core_, "camera-stream", reuse_props);
if (!stream_) {
RTC_LOG(LS_ERROR) << "Failed to create camera stream!";
return -1;
}
static const pw_stream_events stream_events{
.version = PW_VERSION_STREAM_EVENTS,
.state_changed = &OnStreamStateChanged,
.param_changed = &OnStreamParamChanged,
.process = &OnStreamProcess,
};
pw_stream_add_listener(stream_, &stream_listener_, &stream_events, this);
spa_pod_builder builder = spa_pod_builder{buffer, sizeof(buffer)};
std::vector<const spa_pod*> params;
uint32_t width = capability.width;
uint32_t height = capability.height;
uint32_t frame_rate = capability.maxFPS;
bool prefer_jpeg = (width > 640) || (height > 480);
params.push_back(
BuildFormat(&builder, SPA_MEDIA_SUBTYPE_raw, width, height, frame_rate));
params.insert(
prefer_jpeg ? params.begin() : params.end(),
BuildFormat(&builder, SPA_MEDIA_SUBTYPE_mjpg, width, height, frame_rate));
int res = pw_stream_connect(
stream_, PW_DIRECTION_INPUT, node_id_,
static_cast<enum pw_stream_flags>(PW_STREAM_FLAG_AUTOCONNECT |
PW_STREAM_FLAG_DONT_RECONNECT |
PW_STREAM_FLAG_MAP_BUFFERS),
params.data(), params.size());
if (res != 0) {
RTC_LOG(LS_ERROR) << "Could not connect to camera stream: "
<< spa_strerror(res);
return -1;
}
return 0;
}
int32_t VideoCaptureModulePipeWire::StopCapture() {
PipeWireThreadLoopLock thread_loop_lock(session_->pw_main_loop_);
if (stream_) {
pw_stream_destroy(stream_);
stream_ = nullptr;
}
return 0;
}
bool VideoCaptureModulePipeWire::CaptureStarted() {
return started_;
}
int32_t VideoCaptureModulePipeWire::CaptureSettings(
VideoCaptureCapability& settings) {
settings = frameInfo_;
return 0;
}
void VideoCaptureModulePipeWire::OnStreamParamChanged(
void* data,
uint32_t id,
const struct spa_pod* format) {
VideoCaptureModulePipeWire* that =
static_cast<VideoCaptureModulePipeWire*>(data);
RTC_DCHECK(that);
if (format && id == SPA_PARAM_Format)
that->OnFormatChanged(format);
}
void VideoCaptureModulePipeWire::OnFormatChanged(const struct spa_pod* format) {
uint32_t media_type, media_subtype;
if (spa_format_parse(format, &media_type, &media_subtype) < 0) {
RTC_LOG(LS_ERROR) << "Failed to parse video format.";
return;
}
switch (media_subtype) {
case SPA_MEDIA_SUBTYPE_raw: {
struct spa_video_info_raw f;
spa_format_video_raw_parse(format, &f);
frameInfo_.width = f.size.width;
frameInfo_.height = f.size.height;
frameInfo_.videoType = PipeWireRawFormatToVideoType(f.format);
frameInfo_.maxFPS = f.framerate.num / f.framerate.denom;
break;
}
case SPA_MEDIA_SUBTYPE_mjpg: {
struct spa_video_info_mjpg f;
spa_format_video_mjpg_parse(format, &f);
frameInfo_.width = f.size.width;
frameInfo_.height = f.size.height;
frameInfo_.videoType = VideoType::kMJPEG;
frameInfo_.maxFPS = f.framerate.num / f.framerate.denom;
break;
}
default:
frameInfo_.videoType = VideoType::kUnknown;
}
if (frameInfo_.videoType == VideoType::kUnknown) {
RTC_LOG(LS_ERROR) << "Unsupported video format.";
return;
}
RTC_LOG(LS_VERBOSE) << "Configured capture format = "
<< static_cast<int>(frameInfo_.videoType);
uint8_t buffer[1024] = {};
auto builder = spa_pod_builder{buffer, sizeof(buffer)};
// Setup buffers and meta header for new format.
std::vector<const spa_pod*> params;
spa_pod_frame frame;
spa_pod_builder_push_object(&builder, &frame, SPA_TYPE_OBJECT_ParamBuffers,
SPA_PARAM_Buffers);
if (media_subtype == SPA_MEDIA_SUBTYPE_raw) {
// Enforce stride without padding.
size_t stride;
switch (frameInfo_.videoType) {
case VideoType::kI420:
case VideoType::kNV12:
stride = frameInfo_.width;
break;
case VideoType::kYUY2:
case VideoType::kUYVY:
stride = frameInfo_.width * 2;
break;
case VideoType::kRGB24:
stride = frameInfo_.width * 3;
break;
default:
RTC_LOG(LS_ERROR) << "Unsupported video format.";
return;
}
spa_pod_builder_add(&builder, SPA_PARAM_BUFFERS_stride, SPA_POD_Int(stride),
0);
}
spa_pod_builder_add(
&builder, SPA_PARAM_BUFFERS_buffers, SPA_POD_CHOICE_RANGE_Int(8, 1, 32),
SPA_PARAM_BUFFERS_dataType,
SPA_POD_CHOICE_FLAGS_Int((1 << SPA_DATA_MemFd) | (1 << SPA_DATA_MemPtr)),
0);
params.push_back(
static_cast<spa_pod*>(spa_pod_builder_pop(&builder, &frame)));
params.push_back(reinterpret_cast<spa_pod*>(spa_pod_builder_add_object(
&builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, SPA_PARAM_META_type,
SPA_POD_Id(SPA_META_Header), SPA_PARAM_META_size,
SPA_POD_Int(sizeof(struct spa_meta_header)))));
pw_stream_update_params(stream_, params.data(), params.size());
}
void VideoCaptureModulePipeWire::OnStreamStateChanged(
void* data,
pw_stream_state old_state,
pw_stream_state state,
const char* error_message) {
VideoCaptureModulePipeWire* that =
static_cast<VideoCaptureModulePipeWire*>(data);
RTC_DCHECK(that);
switch (state) {
case PW_STREAM_STATE_STREAMING:
that->started_ = true;
break;
case PW_STREAM_STATE_ERROR:
RTC_LOG(LS_ERROR) << "PipeWire stream state error: " << error_message;
[[fallthrough]];
case PW_STREAM_STATE_PAUSED:
case PW_STREAM_STATE_UNCONNECTED:
case PW_STREAM_STATE_CONNECTING:
that->started_ = false;
break;
}
RTC_LOG(LS_VERBOSE) << "PipeWire stream state change: "
<< pw_stream_state_as_string(old_state) << " -> "
<< pw_stream_state_as_string(state);
}
void VideoCaptureModulePipeWire::OnStreamProcess(void* data) {
VideoCaptureModulePipeWire* that =
static_cast<VideoCaptureModulePipeWire*>(data);
RTC_DCHECK(that);
that->ProcessBuffers();
}
void VideoCaptureModulePipeWire::ProcessBuffers() {
while (pw_buffer* buffer = pw_stream_dequeue_buffer(stream_)) {
struct spa_meta_header* h;
h = static_cast<struct spa_meta_header*>(
spa_buffer_find_meta_data(buffer->buffer, SPA_META_Header, sizeof(*h)));
if (h->flags & SPA_META_HEADER_FLAG_CORRUPTED) {
RTC_LOG(LS_INFO) << "Dropping corruped frame.";
} else {
IncomingFrame(static_cast<unsigned char*>(buffer->buffer->datas[0].data),
buffer->buffer->datas[0].chunk->size, frameInfo_);
}
pw_stream_queue_buffer(stream_, buffer);
}
}
} // namespace videocapturemodule
} // namespace webrtc

View File

@ -0,0 +1,57 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_VIDEO_CAPTURE_LINUX_VIDEO_CAPTURE_PIPEWIRE_H_
#define MODULES_VIDEO_CAPTURE_LINUX_VIDEO_CAPTURE_PIPEWIRE_H_
#include "modules/video_capture/linux/pipewire_session.h"
#include "modules/video_capture/video_capture_defines.h"
#include "modules/video_capture/video_capture_impl.h"
namespace webrtc {
namespace videocapturemodule {
class VideoCaptureModulePipeWire : public VideoCaptureImpl {
public:
explicit VideoCaptureModulePipeWire(VideoCaptureOptions* options);
~VideoCaptureModulePipeWire() override;
int32_t Init(const char* deviceUniqueId);
int32_t StartCapture(const VideoCaptureCapability& capability) override;
int32_t StopCapture() override;
bool CaptureStarted() override;
int32_t CaptureSettings(VideoCaptureCapability& settings) override;
static VideoType PipeWireRawFormatToVideoType(uint32_t format);
private:
static void OnStreamParamChanged(void* data,
uint32_t id,
const struct spa_pod* format);
static void OnStreamStateChanged(void* data,
pw_stream_state old_state,
pw_stream_state state,
const char* error_message);
static void OnStreamProcess(void* data);
void OnFormatChanged(const struct spa_pod* format);
void ProcessBuffers();
rtc::scoped_refptr<PipeWireSession> session_;
int node_id_;
VideoCaptureCapability frameInfo_;
bool started_;
struct pw_stream* stream_;
struct spa_hook stream_listener_;
};
} // namespace videocapturemodule
} // namespace webrtc
#endif // MODULES_VIDEO_CAPTURE_LINUX_VIDEO_CAPTURE_PIPEWIRE_H_

View File

@ -23,6 +23,19 @@ rtc::scoped_refptr<VideoCaptureModule> VideoCaptureFactory::Create(
#endif #endif
} }
rtc::scoped_refptr<VideoCaptureModule> VideoCaptureFactory::Create(
VideoCaptureOptions* options,
const char* deviceUniqueIdUTF8) {
// This is only implemented on pure Linux and WEBRTC_LINUX is defined for
// Android as well
#if !defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID)
return nullptr;
#else
return videocapturemodule::VideoCaptureImpl::Create(options,
deviceUniqueIdUTF8);
#endif
}
VideoCaptureModule::DeviceInfo* VideoCaptureFactory::CreateDeviceInfo() { VideoCaptureModule::DeviceInfo* VideoCaptureFactory::CreateDeviceInfo() {
#if defined(WEBRTC_ANDROID) || defined(WEBRTC_MAC) #if defined(WEBRTC_ANDROID) || defined(WEBRTC_MAC)
return nullptr; return nullptr;
@ -31,4 +44,15 @@ VideoCaptureModule::DeviceInfo* VideoCaptureFactory::CreateDeviceInfo() {
#endif #endif
} }
VideoCaptureModule::DeviceInfo* VideoCaptureFactory::CreateDeviceInfo(
VideoCaptureOptions* options) {
// This is only implemented on pure Linux and WEBRTC_LINUX is defined for
// Android as well
#if !defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID)
return nullptr;
#else
return videocapturemodule::VideoCaptureImpl::CreateDeviceInfo(options);
#endif
}
} // namespace webrtc } // namespace webrtc

View File

@ -20,6 +20,8 @@
namespace webrtc { namespace webrtc {
class VideoCaptureOptions;
class VideoCaptureFactory { class VideoCaptureFactory {
public: public:
// Create a video capture module object // Create a video capture module object
@ -28,8 +30,13 @@ class VideoCaptureFactory {
// Available names can be found by using GetDeviceName // Available names can be found by using GetDeviceName
static rtc::scoped_refptr<VideoCaptureModule> Create( static rtc::scoped_refptr<VideoCaptureModule> Create(
const char* deviceUniqueIdUTF8); const char* deviceUniqueIdUTF8);
static rtc::scoped_refptr<VideoCaptureModule> Create(
VideoCaptureOptions* options,
const char* deviceUniqueIdUTF8);
static VideoCaptureModule::DeviceInfo* CreateDeviceInfo(); static VideoCaptureModule::DeviceInfo* CreateDeviceInfo();
static VideoCaptureModule::DeviceInfo* CreateDeviceInfo(
VideoCaptureOptions* options);
private: private:
~VideoCaptureFactory(); ~VideoCaptureFactory();

View File

@ -29,6 +29,8 @@
namespace webrtc { namespace webrtc {
class VideoCaptureOptions;
namespace videocapturemodule { namespace videocapturemodule {
// Class definitions // Class definitions
class VideoCaptureImpl : public VideoCaptureModule { class VideoCaptureImpl : public VideoCaptureModule {
@ -42,8 +44,12 @@ class VideoCaptureImpl : public VideoCaptureModule {
*/ */
static rtc::scoped_refptr<VideoCaptureModule> Create( static rtc::scoped_refptr<VideoCaptureModule> Create(
const char* deviceUniqueIdUTF8); const char* deviceUniqueIdUTF8);
static rtc::scoped_refptr<VideoCaptureModule> Create(
VideoCaptureOptions* options,
const char* deviceUniqueIdUTF8);
static DeviceInfo* CreateDeviceInfo(); static DeviceInfo* CreateDeviceInfo();
static DeviceInfo* CreateDeviceInfo(VideoCaptureOptions* options);
// Helpers for converting between (integral) degrees and // Helpers for converting between (integral) degrees and
// VideoRotation values. Return 0 on success. // VideoRotation values. Return 0 on success.

View File

@ -0,0 +1,55 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/video_capture/video_capture_options.h"
#if defined(WEBRTC_USE_PIPEWIRE)
#include "modules/video_capture/linux/pipewire_session.h"
#endif
namespace webrtc {
VideoCaptureOptions::VideoCaptureOptions() {}
VideoCaptureOptions::VideoCaptureOptions(const VideoCaptureOptions& options) =
default;
VideoCaptureOptions::VideoCaptureOptions(VideoCaptureOptions&& options) =
default;
VideoCaptureOptions::~VideoCaptureOptions() {}
VideoCaptureOptions& VideoCaptureOptions::operator=(
const VideoCaptureOptions& options) = default;
VideoCaptureOptions& VideoCaptureOptions::operator=(
VideoCaptureOptions&& options) = default;
void VideoCaptureOptions::Init(Callback* callback) {
#if defined(WEBRTC_USE_PIPEWIRE)
if (allow_pipewire_) {
pipewire_session_ =
rtc::make_ref_counted<videocapturemodule::PipeWireSession>();
pipewire_session_->Init(callback);
return;
}
#endif
#if defined(WEBRTC_LINUX)
if (!allow_v4l2_)
callback->OnInitialized(Status::UNAVAILABLE);
else
#endif
callback->OnInitialized(Status::SUCCESS);
}
#if defined(WEBRTC_USE_PIPEWIRE)
rtc::scoped_refptr<videocapturemodule::PipeWireSession>
VideoCaptureOptions::pipewire_session() {
return pipewire_session_;
}
#endif
} // namespace webrtc

View File

@ -0,0 +1,78 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_OPTIONS_H_
#define MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_OPTIONS_H_
#include "api/scoped_refptr.h"
#include "rtc_base/system/rtc_export.h"
namespace webrtc {
#if defined(WEBRTC_USE_PIPEWIRE)
namespace videocapturemodule {
class PipeWireSession;
}
#endif
// An object that stores initialization parameters for screen and window
// capturers.
class RTC_EXPORT VideoCaptureOptions {
public:
VideoCaptureOptions();
VideoCaptureOptions(const VideoCaptureOptions& options);
VideoCaptureOptions(VideoCaptureOptions&& options);
~VideoCaptureOptions();
VideoCaptureOptions& operator=(const VideoCaptureOptions& options);
VideoCaptureOptions& operator=(VideoCaptureOptions&& options);
enum class Status {
SUCCESS,
UNINITIALIZED,
UNAVAILABLE,
DENIED,
ERROR,
MAX_VALUE = ERROR
};
class Callback {
public:
virtual void OnInitialized(Status status) = 0;
protected:
virtual ~Callback() = default;
};
void Init(Callback* callback);
#if defined(WEBRTC_LINUX)
bool allow_v4l2() const { return allow_v4l2_; }
void set_allow_v4l2(bool allow) { allow_v4l2_ = allow; }
#endif
#if defined(WEBRTC_USE_PIPEWIRE)
bool allow_pipewire() const { return allow_pipewire_; }
void set_allow_pipewire(bool allow) { allow_pipewire_ = allow; }
rtc::scoped_refptr<videocapturemodule::PipeWireSession> pipewire_session();
#endif
private:
#if defined(WEBRTC_LINUX)
bool allow_v4l2_ = false;
#endif
#if defined(WEBRTC_USE_PIPEWIRE)
bool allow_pipewire_ = false;
rtc::scoped_refptr<videocapturemodule::PipeWireSession> pipewire_session_;
#endif
};
} // namespace webrtc
#endif // MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_OPTIONS_H_