From bd9e4a95eb6b3d0a4e686f32b1fe5a0bf433fa02 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Henrik=20Bostr=C3=B6m?= Date: Mon, 22 Mar 2021 12:24:30 +0100 Subject: [PATCH] Support native scaling of VideoFrameBuffers in LibvpxVp9Encoder. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This CL is part of Optimized Scaling efforts. In Chromium, the native frame buffer is getting an optimized CropAndScale() implementation. To support HW accelerated scaling, returning pre-scaled images and skipping unnecessary intermediate downscales, WebRTC needs to 1) use CropAndScale instead of libyuv::XXXXScale and 2) only map buffers it actually intends to encode. - To achieve this, WebRTC encoders are updated to map kNative video buffers so that in a follow-up CL VideoStreamEncoder can stop mapping intermediate buffer sizes. In this CL LibvpxVp9Encoder is updated to map kNative buffers of pixel formats it supports and convert ToI420() if the kNative buffer is something else. A fake native buffer that keeps track of which resolutions were mapped, MappableNativeBuffer, is added. Because VP9 is currently an SVC encoder and not a simulcast encoder, it does not need to invoke CropAndScale. This CL also fixes MultiplexEncoderAdapter, but because it simply forwards frames it only cares about the pixel format when |supports_augmented_data_| is true so this is the only time we map it. Because this encoder is not used with kNative in practise, we don't care to make this path optimal. Bug: webrtc:12469, chromium:1157072 Change-Id: I74edf85b18eccd0d250776bbade7a6444478efce Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/212580 Commit-Queue: Henrik Boström Reviewed-by: Ilya Nikolaevskiy Reviewed-by: Evan Shrubsole Cr-Commit-Position: refs/heads/master@{#33526} --- modules/video_coding/BUILD.gn | 1 + .../multiplex/augmented_video_frame_buffer.cc | 8 + .../include/augmented_video_frame_buffer.h | 6 + .../multiplex/multiplex_encoder_adapter.cc | 48 +++-- .../codecs/vp9/libvpx_vp9_encoder.cc | 119 ++++++++--- .../codecs/vp9/libvpx_vp9_encoder.h | 6 + .../codecs/vp9/test/vp9_impl_unittest.cc | 26 +++ test/BUILD.gn | 8 +- test/mappable_native_buffer.cc | 186 ++++++++++++++++++ test/mappable_native_buffer.h | 122 ++++++++++++ 10 files changed, 488 insertions(+), 42 deletions(-) create mode 100644 test/mappable_native_buffer.cc create mode 100644 test/mappable_native_buffer.h diff --git a/modules/video_coding/BUILD.gn b/modules/video_coding/BUILD.gn index 88c3cf034f..a9aa74c65c 100644 --- a/modules/video_coding/BUILD.gn +++ b/modules/video_coding/BUILD.gn @@ -600,6 +600,7 @@ rtc_library("webrtc_vp9") { "//third_party/libyuv", ] absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings:strings", ] diff --git a/modules/video_coding/codecs/multiplex/augmented_video_frame_buffer.cc b/modules/video_coding/codecs/multiplex/augmented_video_frame_buffer.cc index b48996cbcf..8740884f5b 100644 --- a/modules/video_coding/codecs/multiplex/augmented_video_frame_buffer.cc +++ b/modules/video_coding/codecs/multiplex/augmented_video_frame_buffer.cc @@ -54,4 +54,12 @@ int AugmentedVideoFrameBuffer::height() const { rtc::scoped_refptr AugmentedVideoFrameBuffer::ToI420() { return video_frame_buffer_->ToI420(); } + +const I420BufferInterface* AugmentedVideoFrameBuffer::GetI420() const { + // TODO(https://crbug.com/webrtc/12021): When AugmentedVideoFrameBuffer is + // updated to implement the buffer interfaces of relevant + // VideoFrameBuffer::Types, stop overriding GetI420() as a workaround to + // AugmentedVideoFrameBuffer not being the type that is returned by type(). + return video_frame_buffer_->GetI420(); +} } // namespace webrtc diff --git a/modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h b/modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h index c45ab3b2a4..d711cd07da 100644 --- a/modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h +++ b/modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h @@ -45,6 +45,12 @@ class AugmentedVideoFrameBuffer : public VideoFrameBuffer { // Get the I140 Buffer from the underlying frame buffer rtc::scoped_refptr ToI420() final; + // Returns GetI420() of the underlying VideoFrameBuffer. + // TODO(hbos): AugmentedVideoFrameBuffer should not return a type (such as + // kI420) without also implementing that type's interface (i.e. + // I420BufferInterface). Either implement all possible Type's interfaces or + // return kNative. + const I420BufferInterface* GetI420() const final; private: uint16_t augmenting_data_size_; diff --git a/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc b/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc index b08c5b1fc4..db525b8f98 100644 --- a/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc +++ b/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc @@ -157,20 +157,38 @@ int MultiplexEncoderAdapter::Encode( return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } + // The input image is forwarded as-is, unless it is a native buffer and + // |supports_augmented_data_| is true in which case we need to map it in order + // to access the underlying AugmentedVideoFrameBuffer. + VideoFrame forwarded_image = input_image; + if (supports_augmented_data_ && + forwarded_image.video_frame_buffer()->type() == + VideoFrameBuffer::Type::kNative) { + auto info = GetEncoderInfo(); + rtc::scoped_refptr mapped_buffer = + forwarded_image.video_frame_buffer()->GetMappedFrameBuffer( + info.preferred_pixel_formats); + if (!mapped_buffer) { + // Unable to map the buffer. + return WEBRTC_VIDEO_CODEC_ERROR; + } + forwarded_image.set_video_frame_buffer(std::move(mapped_buffer)); + } + std::vector adjusted_frame_types; if (key_frame_interval_ > 0 && picture_index_ % key_frame_interval_ == 0) { adjusted_frame_types.push_back(VideoFrameType::kVideoFrameKey); } else { adjusted_frame_types.push_back(VideoFrameType::kVideoFrameDelta); } - const bool has_alpha = input_image.video_frame_buffer()->type() == + const bool has_alpha = forwarded_image.video_frame_buffer()->type() == VideoFrameBuffer::Type::kI420A; std::unique_ptr augmenting_data = nullptr; uint16_t augmenting_data_length = 0; AugmentedVideoFrameBuffer* augmented_video_frame_buffer = nullptr; if (supports_augmented_data_) { augmented_video_frame_buffer = static_cast( - input_image.video_frame_buffer().get()); + forwarded_image.video_frame_buffer().get()); augmenting_data_length = augmented_video_frame_buffer->GetAugmentingDataSize(); augmenting_data = @@ -185,7 +203,7 @@ int MultiplexEncoderAdapter::Encode( MutexLock lock(&mutex_); stashed_images_.emplace( std::piecewise_construct, - std::forward_as_tuple(input_image.timestamp()), + std::forward_as_tuple(forwarded_image.timestamp()), std::forward_as_tuple( picture_index_, has_alpha ? kAlphaCodecStreams : 1, std::move(augmenting_data), augmenting_data_length)); @@ -194,7 +212,8 @@ int MultiplexEncoderAdapter::Encode( ++picture_index_; // Encode YUV - int rv = encoders_[kYUVStream]->Encode(input_image, &adjusted_frame_types); + int rv = + encoders_[kYUVStream]->Encode(forwarded_image, &adjusted_frame_types); // If we do not receive an alpha frame, we send a single frame for this // |picture_index_|. The receiver will receive |frame_count| as 1 which @@ -206,23 +225,24 @@ int MultiplexEncoderAdapter::Encode( rtc::scoped_refptr frame_buffer = supports_augmented_data_ ? augmented_video_frame_buffer->GetVideoFrameBuffer() - : input_image.video_frame_buffer(); + : forwarded_image.video_frame_buffer(); const I420ABufferInterface* yuva_buffer = frame_buffer->GetI420A(); rtc::scoped_refptr alpha_buffer = - WrapI420Buffer(input_image.width(), input_image.height(), + WrapI420Buffer(forwarded_image.width(), forwarded_image.height(), yuva_buffer->DataA(), yuva_buffer->StrideA(), multiplex_dummy_planes_.data(), yuva_buffer->StrideU(), multiplex_dummy_planes_.data(), yuva_buffer->StrideV(), // To keep reference alive. [frame_buffer] {}); - VideoFrame alpha_image = VideoFrame::Builder() - .set_video_frame_buffer(alpha_buffer) - .set_timestamp_rtp(input_image.timestamp()) - .set_timestamp_ms(input_image.render_time_ms()) - .set_rotation(input_image.rotation()) - .set_id(input_image.id()) - .set_packet_infos(input_image.packet_infos()) - .build(); + VideoFrame alpha_image = + VideoFrame::Builder() + .set_video_frame_buffer(alpha_buffer) + .set_timestamp_rtp(forwarded_image.timestamp()) + .set_timestamp_ms(forwarded_image.render_time_ms()) + .set_rotation(forwarded_image.rotation()) + .set_id(forwarded_image.id()) + .set_packet_infos(forwarded_image.packet_infos()) + .build(); rv = encoders_[kAXXStream]->Encode(alpha_image, &adjusted_frame_types); return rv; } diff --git a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc index 7af3a9d810..888c7e9760 100644 --- a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc +++ b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc @@ -18,6 +18,7 @@ #include #include +#include "absl/algorithm/container.h" #include "absl/memory/memory.h" #include "absl/strings/match.h" #include "api/video/color_space.h" @@ -1040,37 +1041,17 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image, // doing this. input_image_ = &input_image; - // Keep reference to buffer until encode completes. - rtc::scoped_refptr video_frame_buffer; + // In case we need to map the buffer, |mapped_buffer| is used to keep it alive + // through reference counting until after encoding has finished. + rtc::scoped_refptr mapped_buffer; const I010BufferInterface* i010_buffer; rtc::scoped_refptr i010_copy; switch (profile_) { case VP9Profile::kProfile0: { - if (input_image.video_frame_buffer()->type() == - VideoFrameBuffer::Type::kNV12) { - const NV12BufferInterface* nv12_buffer = - input_image.video_frame_buffer()->GetNV12(); - video_frame_buffer = nv12_buffer; - MaybeRewrapRawWithFormat(VPX_IMG_FMT_NV12); - raw_->planes[VPX_PLANE_Y] = const_cast(nv12_buffer->DataY()); - raw_->planes[VPX_PLANE_U] = const_cast(nv12_buffer->DataUV()); - raw_->planes[VPX_PLANE_V] = raw_->planes[VPX_PLANE_U] + 1; - raw_->stride[VPX_PLANE_Y] = nv12_buffer->StrideY(); - raw_->stride[VPX_PLANE_U] = nv12_buffer->StrideUV(); - raw_->stride[VPX_PLANE_V] = nv12_buffer->StrideUV(); - } else { - rtc::scoped_refptr i420_buffer = - input_image.video_frame_buffer()->ToI420(); - video_frame_buffer = i420_buffer; - MaybeRewrapRawWithFormat(VPX_IMG_FMT_I420); - // Image in vpx_image_t format. - // Input image is const. VPX's raw image is not defined as const. - raw_->planes[VPX_PLANE_Y] = const_cast(i420_buffer->DataY()); - raw_->planes[VPX_PLANE_U] = const_cast(i420_buffer->DataU()); - raw_->planes[VPX_PLANE_V] = const_cast(i420_buffer->DataV()); - raw_->stride[VPX_PLANE_Y] = i420_buffer->StrideY(); - raw_->stride[VPX_PLANE_U] = i420_buffer->StrideU(); - raw_->stride[VPX_PLANE_V] = i420_buffer->StrideV(); + mapped_buffer = + PrepareBufferForProfile0(input_image.video_frame_buffer()); + if (!mapped_buffer) { + return WEBRTC_VIDEO_CODEC_ERROR; } break; } @@ -1892,6 +1873,90 @@ void LibvpxVp9Encoder::MaybeRewrapRawWithFormat(const vpx_img_fmt fmt) { // else no-op since the image is already in the right format. } +rtc::scoped_refptr LibvpxVp9Encoder::PrepareBufferForProfile0( + rtc::scoped_refptr buffer) { + absl::InlinedVector + supported_formats = {VideoFrameBuffer::Type::kI420, + VideoFrameBuffer::Type::kNV12}; + + rtc::scoped_refptr mapped_buffer; + if (buffer->type() != VideoFrameBuffer::Type::kNative) { + // |buffer| is already mapped. + mapped_buffer = buffer; + } else { + // Attempt to map to one of the supported formats. + mapped_buffer = buffer->GetMappedFrameBuffer(supported_formats); + } + if (!mapped_buffer || + (absl::c_find(supported_formats, mapped_buffer->type()) == + supported_formats.end() && + mapped_buffer->type() != VideoFrameBuffer::Type::kI420A)) { + // Unknown pixel format or unable to map, convert to I420 and prepare that + // buffer instead to ensure Scale() is safe to use. + rtc::scoped_refptr converted_buffer = buffer->ToI420(); + if (!converted_buffer) { + RTC_LOG(LS_ERROR) << "Failed to convert " + << VideoFrameBufferTypeToString(buffer->type()) + << " image to I420. Can't encode frame."; + return {}; + } + // The buffer should now be a mapped I420 or I420A format, but some buffer + // implementations incorrectly return the wrong buffer format, such as + // kNative. As a workaround to this, we retry GetMappedFrameBuffer+ToI420. + // TODO(https://crbug.com/webrtc/12602): When Android buffers have a correct + // ToI420() implementaion, remove his workaround. + if (converted_buffer->type() != VideoFrameBuffer::Type::kI420 && + converted_buffer->type() != VideoFrameBuffer::Type::kI420A) { + if (converted_buffer->type() == VideoFrameBuffer::Type::kNative) { + auto mapped_converted_buffer = + converted_buffer->GetMappedFrameBuffer(supported_formats); + if (mapped_converted_buffer) + converted_buffer = mapped_converted_buffer; + } + if (converted_buffer->type() != VideoFrameBuffer::Type::kI420 && + converted_buffer->type() != VideoFrameBuffer::Type::kI420A) { + converted_buffer = converted_buffer->ToI420(); + } + RTC_CHECK(converted_buffer->type() == VideoFrameBuffer::Type::kI420 || + converted_buffer->type() == VideoFrameBuffer::Type::kI420A); + } + // Because |buffer| had to be converted, use |converted_buffer| instead. + buffer = mapped_buffer = converted_buffer; + } + + // Prepare |raw_| from |mapped_buffer|. + switch (mapped_buffer->type()) { + case VideoFrameBuffer::Type::kI420: + case VideoFrameBuffer::Type::kI420A: { + MaybeRewrapRawWithFormat(VPX_IMG_FMT_I420); + const I420BufferInterface* i420_buffer = mapped_buffer->GetI420(); + RTC_DCHECK(i420_buffer); + raw_->planes[VPX_PLANE_Y] = const_cast(i420_buffer->DataY()); + raw_->planes[VPX_PLANE_U] = const_cast(i420_buffer->DataU()); + raw_->planes[VPX_PLANE_V] = const_cast(i420_buffer->DataV()); + raw_->stride[VPX_PLANE_Y] = i420_buffer->StrideY(); + raw_->stride[VPX_PLANE_U] = i420_buffer->StrideU(); + raw_->stride[VPX_PLANE_V] = i420_buffer->StrideV(); + break; + } + case VideoFrameBuffer::Type::kNV12: { + MaybeRewrapRawWithFormat(VPX_IMG_FMT_NV12); + const NV12BufferInterface* nv12_buffer = mapped_buffer->GetNV12(); + RTC_DCHECK(nv12_buffer); + raw_->planes[VPX_PLANE_Y] = const_cast(nv12_buffer->DataY()); + raw_->planes[VPX_PLANE_U] = const_cast(nv12_buffer->DataUV()); + raw_->planes[VPX_PLANE_V] = raw_->planes[VPX_PLANE_U] + 1; + raw_->stride[VPX_PLANE_Y] = nv12_buffer->StrideY(); + raw_->stride[VPX_PLANE_U] = nv12_buffer->StrideUV(); + raw_->stride[VPX_PLANE_V] = nv12_buffer->StrideUV(); + break; + } + default: + RTC_NOTREACHED(); + } + return mapped_buffer; +} + } // namespace webrtc #endif // RTC_ENABLE_VP9 diff --git a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h index 086b4464bb..cf328b2c8e 100644 --- a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h +++ b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h @@ -103,6 +103,12 @@ class LibvpxVp9Encoder : public VP9Encoder { size_t SteadyStateSize(int sid, int tid); void MaybeRewrapRawWithFormat(const vpx_img_fmt fmt); + // Prepares |raw_| to reference image data of |buffer|, or of mapped or scaled + // versions of |buffer|. Returns the buffer that got referenced as a result, + // allowing the caller to keep a reference to it until after encoding has + // finished. On failure to convert the buffer, null is returned. + rtc::scoped_refptr PrepareBufferForProfile0( + rtc::scoped_refptr buffer); const std::unique_ptr libvpx_; EncodedImage encoded_image_; diff --git a/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc b/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc index 4ffcf13c00..853d2df873 100644 --- a/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc +++ b/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc @@ -30,6 +30,7 @@ #include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/mappable_native_buffer.h" #include "test/video_codec_settings.h" namespace webrtc { @@ -158,6 +159,31 @@ TEST_P(TestVp9ImplForPixelFormat, EncodeDecode) { color_space.chroma_siting_vertical()); } +TEST_P(TestVp9ImplForPixelFormat, EncodeNativeBuffer) { + VideoFrame input_frame = NextInputFrame(); + // Replace the input frame with a fake native buffer of the same size and + // underlying pixel format. Do not allow ToI420() for non-I420 buffers, + // ensuring zero-conversion. + input_frame = test::CreateMappableNativeFrame( + input_frame.ntp_time_ms(), input_frame.video_frame_buffer()->type(), + input_frame.width(), input_frame.height()); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(input_frame, nullptr)); + EncodedImage encoded_frame; + CodecSpecificInfo codec_specific_info; + ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); + + // After encoding, we would expect a single mapping to have happened. + rtc::scoped_refptr mappable_buffer = + test::GetMappableNativeBufferFromVideoFrame(input_frame); + std::vector> mapped_buffers = + mappable_buffer->GetMappedFramedBuffers(); + ASSERT_EQ(mapped_buffers.size(), 1u); + EXPECT_EQ(mapped_buffers[0]->type(), mappable_buffer->mappable_type()); + EXPECT_EQ(mapped_buffers[0]->width(), input_frame.width()); + EXPECT_EQ(mapped_buffers[0]->height(), input_frame.height()); + EXPECT_FALSE(mappable_buffer->DidConvertToI420()); +} + TEST_P(TestVp9ImplForPixelFormat, DecodedColorSpaceFromBitstream) { EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr)); EncodedImage encoded_frame; diff --git a/test/BUILD.gn b/test/BUILD.gn index db508ca71e..988d15fd30 100644 --- a/test/BUILD.gn +++ b/test/BUILD.gn @@ -100,6 +100,8 @@ rtc_library("video_test_common") { "frame_forwarder.h", "frame_generator_capturer.cc", "frame_generator_capturer.h", + "mappable_native_buffer.cc", + "mappable_native_buffer.h", "test_video_capturer.cc", "test_video_capturer.h", "video_codec_settings.h", @@ -108,6 +110,7 @@ rtc_library("video_test_common") { deps = [ ":fileutils", ":frame_utils", + "../api:array_view", "../api:create_frame_generator", "../api:frame_generator_api", "../api:scoped_refptr", @@ -129,7 +132,10 @@ rtc_library("video_test_common") { "../rtc_base/task_utils:repeating_task", "../system_wrappers", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings", + ] } if (!build_with_chromium) { diff --git a/test/mappable_native_buffer.cc b/test/mappable_native_buffer.cc new file mode 100644 index 0000000000..cff58ff8e7 --- /dev/null +++ b/test/mappable_native_buffer.cc @@ -0,0 +1,186 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "test/mappable_native_buffer.h" + +#include "absl/algorithm/container.h" +#include "api/video/i420_buffer.h" +#include "api/video/nv12_buffer.h" +#include "api/video/video_frame.h" +#include "api/video/video_rotation.h" +#include "common_video/include/video_frame_buffer.h" +#include "rtc_base/checks.h" + +namespace webrtc { +namespace test { + +namespace { + +class NV12BufferWithDidConvertToI420 : public NV12Buffer { + public: + NV12BufferWithDidConvertToI420(int width, int height) + : NV12Buffer(width, height), did_convert_to_i420_(false) {} + + bool did_convert_to_i420() const { return did_convert_to_i420_; } + + rtc::scoped_refptr ToI420() override { + did_convert_to_i420_ = true; + return NV12Buffer::ToI420(); + } + + private: + bool did_convert_to_i420_; +}; + +} // namespace + +VideoFrame CreateMappableNativeFrame(int64_t ntp_time_ms, + VideoFrameBuffer::Type mappable_type, + int width, + int height) { + VideoFrame frame = VideoFrame::Builder() + .set_video_frame_buffer( + new rtc::RefCountedObject( + mappable_type, width, height)) + .set_timestamp_rtp(99) + .set_timestamp_ms(99) + .set_rotation(kVideoRotation_0) + .build(); + frame.set_ntp_time_ms(ntp_time_ms); + return frame; +} + +rtc::scoped_refptr GetMappableNativeBufferFromVideoFrame( + const VideoFrame& frame) { + return static_cast(frame.video_frame_buffer().get()); +} + +MappableNativeBuffer::ScaledBuffer::ScaledBuffer( + rtc::scoped_refptr parent, + int width, + int height) + : parent_(std::move(parent)), width_(width), height_(height) {} + +MappableNativeBuffer::ScaledBuffer::~ScaledBuffer() {} + +rtc::scoped_refptr +MappableNativeBuffer::ScaledBuffer::CropAndScale(int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) { + return rtc::scoped_refptr( + new rtc::RefCountedObject(parent_, scaled_width, + scaled_height)); +} + +rtc::scoped_refptr +MappableNativeBuffer::ScaledBuffer::ToI420() { + return parent_->GetOrCreateMappedBuffer(width_, height_)->ToI420(); +} + +rtc::scoped_refptr +MappableNativeBuffer::ScaledBuffer::GetMappedFrameBuffer( + rtc::ArrayView types) { + if (absl::c_find(types, parent_->mappable_type_) == types.end()) + return nullptr; + return parent_->GetOrCreateMappedBuffer(width_, height_); +} + +MappableNativeBuffer::MappableNativeBuffer(VideoFrameBuffer::Type mappable_type, + int width, + int height) + : mappable_type_(mappable_type), width_(width), height_(height) { + RTC_DCHECK(mappable_type_ == VideoFrameBuffer::Type::kI420 || + mappable_type_ == VideoFrameBuffer::Type::kNV12); +} + +MappableNativeBuffer::~MappableNativeBuffer() {} + +rtc::scoped_refptr MappableNativeBuffer::CropAndScale( + int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) { + return FullSizeBuffer()->CropAndScale( + offset_x, offset_y, crop_width, crop_height, scaled_width, scaled_height); +} + +rtc::scoped_refptr MappableNativeBuffer::ToI420() { + return FullSizeBuffer()->ToI420(); +} + +rtc::scoped_refptr MappableNativeBuffer::GetMappedFrameBuffer( + rtc::ArrayView types) { + return FullSizeBuffer()->GetMappedFrameBuffer(types); +} + +std::vector> +MappableNativeBuffer::GetMappedFramedBuffers() const { + MutexLock lock(&lock_); + return mapped_buffers_; +} + +bool MappableNativeBuffer::DidConvertToI420() const { + if (mappable_type_ != VideoFrameBuffer::Type::kNV12) + return false; + MutexLock lock(&lock_); + for (auto& mapped_buffer : mapped_buffers_) { + if (static_cast(mapped_buffer.get()) + ->did_convert_to_i420()) { + return true; + } + } + return false; +} + +rtc::scoped_refptr +MappableNativeBuffer::FullSizeBuffer() { + return rtc::scoped_refptr( + new rtc::RefCountedObject(this, width_, height_)); +} + +rtc::scoped_refptr +MappableNativeBuffer::GetOrCreateMappedBuffer(int width, int height) { + MutexLock lock(&lock_); + for (auto& mapped_buffer : mapped_buffers_) { + if (mapped_buffer->width() == width && mapped_buffer->height() == height) { + return mapped_buffer; + } + } + rtc::scoped_refptr mapped_buffer; + switch (mappable_type_) { + case VideoFrameBuffer::Type::kI420: { + rtc::scoped_refptr i420_buffer = + I420Buffer::Create(width, height); + I420Buffer::SetBlack(i420_buffer); + mapped_buffer = i420_buffer; + break; + } + case VideoFrameBuffer::Type::kNV12: { + rtc::scoped_refptr nv12_buffer; + nv12_buffer = new rtc::RefCountedObject( + width, height); + nv12_buffer->InitializeData(); + mapped_buffer = nv12_buffer; + break; + } + default: + RTC_NOTREACHED(); + } + mapped_buffers_.push_back(mapped_buffer); + return mapped_buffer; +} + +} // namespace test +} // namespace webrtc diff --git a/test/mappable_native_buffer.h b/test/mappable_native_buffer.h new file mode 100644 index 0000000000..add22029c7 --- /dev/null +++ b/test/mappable_native_buffer.h @@ -0,0 +1,122 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef TEST_MAPPABLE_NATIVE_BUFFER_H_ +#define TEST_MAPPABLE_NATIVE_BUFFER_H_ + +#include +#include + +#include "api/array_view.h" +#include "api/video/video_frame.h" +#include "common_video/include/video_frame_buffer.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { +namespace test { + +class MappableNativeBuffer; + +VideoFrame CreateMappableNativeFrame(int64_t ntp_time_ms, + VideoFrameBuffer::Type mappable_type, + int width, + int height); + +rtc::scoped_refptr GetMappableNativeBufferFromVideoFrame( + const VideoFrame& frame); + +// A for-testing native buffer that is scalable and mappable. The contents of +// the buffer is black and the pixels are created upon mapping. Mapped buffers +// are stored inside MappableNativeBuffer, allowing tests to verify which +// resolutions were mapped, e.g. when passing them in to an encoder or other +// modules. +class MappableNativeBuffer : public VideoFrameBuffer { + public: + // If |allow_i420_conversion| is false, calling ToI420() on a non-I420 buffer + // will DCHECK-crash. Used to ensure zero-copy in tests. + MappableNativeBuffer(VideoFrameBuffer::Type mappable_type, + int width, + int height); + ~MappableNativeBuffer() override; + + VideoFrameBuffer::Type mappable_type() const { return mappable_type_; } + + VideoFrameBuffer::Type type() const override { return Type::kNative; } + int width() const override { return width_; } + int height() const override { return height_; } + + rtc::scoped_refptr CropAndScale(int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) override; + + rtc::scoped_refptr ToI420() override; + rtc::scoped_refptr GetMappedFrameBuffer( + rtc::ArrayView types) override; + + // Gets all the buffers that have been mapped so far, including mappings of + // cropped and scaled buffers. + std::vector> GetMappedFramedBuffers() + const; + bool DidConvertToI420() const; + + private: + friend class rtc::RefCountedObject; + + class ScaledBuffer : public VideoFrameBuffer { + public: + ScaledBuffer(rtc::scoped_refptr parent, + int width, + int height); + ~ScaledBuffer() override; + + VideoFrameBuffer::Type type() const override { return Type::kNative; } + int width() const override { return width_; } + int height() const override { return height_; } + + rtc::scoped_refptr CropAndScale( + int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) override; + + rtc::scoped_refptr ToI420() override; + rtc::scoped_refptr GetMappedFrameBuffer( + rtc::ArrayView types) override; + + private: + friend class rtc::RefCountedObject; + + const rtc::scoped_refptr parent_; + const int width_; + const int height_; + }; + + rtc::scoped_refptr FullSizeBuffer(); + rtc::scoped_refptr GetOrCreateMappedBuffer(int width, + int height); + + const VideoFrameBuffer::Type mappable_type_; + const int width_; + const int height_; + mutable Mutex lock_; + std::vector> mapped_buffers_ + RTC_GUARDED_BY(&lock_); +}; + +} // namespace test +} // namespace webrtc + +#endif // TEST_MAPPABLE_NATIVE_BUFFER_H_