Add alpha channel to VideoFrameBuffer containers

- Add alpha accessors to PlanarYuvBuffer interface, null by defualt.
- Add WrapI420ABuffer() that creates a container which implements these
accessors.
- Show the use via StereoDecoderAdapter.

This CL is the step 2 for adding alpha channel support over the wire in webrtc.
See https://webrtc-review.googlesource.com/c/src/+/7800 for the experimental
CL that gives an idea about how it will come together.
Design Doc: https://goo.gl/sFeSUT

Bug: webrtc:7671
Change-Id: Id5691cde00088ec811b63d89080d33ad2d6e3939
Reviewed-on: https://webrtc-review.googlesource.com/21130
Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
Commit-Queue: Emircan Uysaler <emircan@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#20635}
This commit is contained in:
Emircan Uysaler 2017-11-09 12:33:24 -08:00 committed by Commit Bot
parent 66cebbda35
commit 574eaa4cda
7 changed files with 174 additions and 27 deletions

View File

@ -27,6 +27,16 @@ rtc::scoped_refptr<const I420BufferInterface> VideoFrameBuffer::GetI420()
return static_cast<const I420BufferInterface*>(this); return static_cast<const I420BufferInterface*>(this);
} }
I420ABufferInterface* VideoFrameBuffer::GetI420A() {
RTC_CHECK(type() == Type::kI420A);
return static_cast<I420ABufferInterface*>(this);
}
const I420ABufferInterface* VideoFrameBuffer::GetI420A() const {
RTC_CHECK(type() == Type::kI420A);
return static_cast<const I420ABufferInterface*>(this);
}
I444BufferInterface* VideoFrameBuffer::GetI444() { I444BufferInterface* VideoFrameBuffer::GetI444() {
RTC_CHECK(type() == Type::kI444); RTC_CHECK(type() == Type::kI444);
return static_cast<I444BufferInterface*>(this); return static_cast<I444BufferInterface*>(this);
@ -53,6 +63,10 @@ rtc::scoped_refptr<I420BufferInterface> I420BufferInterface::ToI420() {
return this; return this;
} }
VideoFrameBuffer::Type I420ABufferInterface::type() const {
return Type::kI420A;
}
VideoFrameBuffer::Type I444BufferInterface::type() const { VideoFrameBuffer::Type I444BufferInterface::type() const {
return Type::kI444; return Type::kI444;
} }

View File

@ -19,6 +19,7 @@
namespace webrtc { namespace webrtc {
class I420BufferInterface; class I420BufferInterface;
class I420ABufferInterface;
class I444BufferInterface; class I444BufferInterface;
// Base class for frame buffers of different types of pixel format and storage. // Base class for frame buffers of different types of pixel format and storage.
@ -44,6 +45,7 @@ class VideoFrameBuffer : public rtc::RefCountInterface {
enum class Type { enum class Type {
kNative, kNative,
kI420, kI420,
kI420A,
kI444, kI444,
}; };
@ -67,6 +69,8 @@ class VideoFrameBuffer : public rtc::RefCountInterface {
// removed. // removed.
rtc::scoped_refptr<I420BufferInterface> GetI420(); rtc::scoped_refptr<I420BufferInterface> GetI420();
rtc::scoped_refptr<const I420BufferInterface> GetI420() const; rtc::scoped_refptr<const I420BufferInterface> GetI420() const;
I420ABufferInterface* GetI420A();
const I420ABufferInterface* GetI420A() const;
I444BufferInterface* GetI444(); I444BufferInterface* GetI444();
const I444BufferInterface* GetI444() const; const I444BufferInterface* GetI444() const;
@ -97,7 +101,7 @@ class PlanarYuvBuffer : public VideoFrameBuffer {
class I420BufferInterface : public PlanarYuvBuffer { class I420BufferInterface : public PlanarYuvBuffer {
public: public:
Type type() const final; Type type() const override;
int ChromaWidth() const final; int ChromaWidth() const final;
int ChromaHeight() const final; int ChromaHeight() const final;
@ -108,6 +112,16 @@ class I420BufferInterface : public PlanarYuvBuffer {
~I420BufferInterface() override {} ~I420BufferInterface() override {}
}; };
class I420ABufferInterface : public I420BufferInterface {
public:
Type type() const final;
virtual const uint8_t* DataA() const = 0;
virtual int StrideA() const = 0;
protected:
~I420ABufferInterface() override {}
};
class I444BufferInterface : public PlanarYuvBuffer { class I444BufferInterface : public PlanarYuvBuffer {
public: public:
Type type() const final; Type type() const final;

View File

@ -78,6 +78,19 @@ rtc::scoped_refptr<I444BufferInterface> WrapI444Buffer(
int v_stride, int v_stride,
const rtc::Callback0<void>& no_longer_used); const rtc::Callback0<void>& no_longer_used);
rtc::scoped_refptr<I420ABufferInterface> WrapI420ABuffer(
int width,
int height,
const uint8_t* y_plane,
int y_stride,
const uint8_t* u_plane,
int u_stride,
const uint8_t* v_plane,
int v_stride,
const uint8_t* a_plane,
int a_stride,
const rtc::Callback0<void>& no_longer_used);
rtc::scoped_refptr<PlanarYuvBuffer> WrapYuvBuffer( rtc::scoped_refptr<PlanarYuvBuffer> WrapYuvBuffer(
VideoFrameBuffer::Type type, VideoFrameBuffer::Type type,
int width, int width,

View File

@ -96,6 +96,8 @@ class WrappedYuvBuffer : public Base {
v_stride_(v_stride), v_stride_(v_stride),
no_longer_used_cb_(no_longer_used) {} no_longer_used_cb_(no_longer_used) {}
~WrappedYuvBuffer() override { no_longer_used_cb_(); }
int width() const override { return width_; } int width() const override { return width_; }
int height() const override { return height_; } int height() const override { return height_; }
@ -115,8 +117,6 @@ class WrappedYuvBuffer : public Base {
private: private:
friend class rtc::RefCountedObject<WrappedYuvBuffer>; friend class rtc::RefCountedObject<WrappedYuvBuffer>;
~WrappedYuvBuffer() override { no_longer_used_cb_(); }
const int width_; const int width_;
const int height_; const int height_;
const uint8_t* const y_plane_; const uint8_t* const y_plane_;
@ -128,6 +128,41 @@ class WrappedYuvBuffer : public Base {
rtc::Callback0<void> no_longer_used_cb_; rtc::Callback0<void> no_longer_used_cb_;
}; };
// Template to implement a wrapped buffer for a I4??BufferInterface.
template <typename BaseWithA>
class WrappedYuvaBuffer : public WrappedYuvBuffer<BaseWithA> {
public:
WrappedYuvaBuffer(int width,
int height,
const uint8_t* y_plane,
int y_stride,
const uint8_t* u_plane,
int u_stride,
const uint8_t* v_plane,
int v_stride,
const uint8_t* a_plane,
int a_stride,
const rtc::Callback0<void>& no_longer_used)
: WrappedYuvBuffer<BaseWithA>(width,
height,
y_plane,
y_stride,
u_plane,
u_stride,
v_plane,
v_stride,
no_longer_used),
a_plane_(a_plane),
a_stride_(a_stride) {}
const uint8_t* DataA() const override { return a_plane_; }
int StrideA() const override { return a_stride_; }
private:
const uint8_t* const a_plane_;
const int a_stride_;
};
rtc::scoped_refptr<I420BufferInterface> WrapI420Buffer( rtc::scoped_refptr<I420BufferInterface> WrapI420Buffer(
int width, int width,
int height, int height,
@ -144,6 +179,24 @@ rtc::scoped_refptr<I420BufferInterface> WrapI420Buffer(
v_stride, no_longer_used)); v_stride, no_longer_used));
} }
rtc::scoped_refptr<I420ABufferInterface> WrapI420ABuffer(
int width,
int height,
const uint8_t* y_plane,
int y_stride,
const uint8_t* u_plane,
int u_stride,
const uint8_t* v_plane,
int v_stride,
const uint8_t* a_plane,
int a_stride,
const rtc::Callback0<void>& no_longer_used) {
return rtc::scoped_refptr<I420ABufferInterface>(
new rtc::RefCountedObject<WrappedYuvaBuffer<I420ABufferInterface>>(
width, height, y_plane, y_stride, u_plane, u_stride, v_plane,
v_stride, a_plane, a_stride, no_longer_used));
}
rtc::scoped_refptr<I444BufferInterface> WrapI444Buffer( rtc::scoped_refptr<I444BufferInterface> WrapI444Buffer(
int width, int width,
int height, int height,

View File

@ -11,6 +11,7 @@
#include "modules/video_coding/codecs/stereo/include/stereo_decoder_adapter.h" #include "modules/video_coding/codecs/stereo/include/stereo_decoder_adapter.h"
#include "api/video/i420_buffer.h" #include "api/video/i420_buffer.h"
#include "api/video/video_frame_buffer.h"
#include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/sdp_video_format.h"
#include "common_video/include/video_frame.h" #include "common_video/include/video_frame.h"
#include "common_video/include/video_frame_buffer.h" #include "common_video/include/video_frame_buffer.h"
@ -18,6 +19,11 @@
#include "rtc_base/keep_ref_until_done.h" #include "rtc_base/keep_ref_until_done.h"
#include "rtc_base/logging.h" #include "rtc_base/logging.h"
namespace {
void KeepBufferRefs(rtc::scoped_refptr<webrtc::VideoFrameBuffer>,
rtc::scoped_refptr<webrtc::VideoFrameBuffer>) {}
} // anonymous namespace
namespace webrtc { namespace webrtc {
class StereoDecoderAdapter::AdapterDecodedImageCallback class StereoDecoderAdapter::AdapterDecodedImageCallback
@ -173,11 +179,22 @@ void StereoDecoderAdapter::MergeAlphaImages(
VideoFrame* alpha_decodedImage, VideoFrame* alpha_decodedImage,
const rtc::Optional<int32_t>& alpha_decode_time_ms, const rtc::Optional<int32_t>& alpha_decode_time_ms,
const rtc::Optional<uint8_t>& alpha_qp) { const rtc::Optional<uint8_t>& alpha_qp) {
// TODO(emircan): Merge the output and put in a VideoFrame container that can rtc::scoped_refptr<webrtc::I420BufferInterface> yuv_buffer =
// transport I420A. decodedImage->video_frame_buffer()->ToI420();
decoded_complete_callback_->Decoded(*decodedImage, decode_time_ms, qp); rtc::scoped_refptr<webrtc::I420BufferInterface> alpha_buffer =
decoded_complete_callback_->Decoded(*alpha_decodedImage, alpha_decode_time_ms, alpha_decodedImage->video_frame_buffer()->ToI420();
alpha_qp); RTC_DCHECK_EQ(yuv_buffer->width(), alpha_buffer->width());
RTC_DCHECK_EQ(yuv_buffer->height(), alpha_buffer->height());
rtc::scoped_refptr<I420ABufferInterface> merged_buffer = WrapI420ABuffer(
yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(),
yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(),
yuv_buffer->DataV(), yuv_buffer->StrideV(), alpha_buffer->DataY(),
alpha_buffer->StrideY(),
rtc::Bind(&KeepBufferRefs, yuv_buffer, alpha_buffer));
VideoFrame merged_image(merged_buffer, decodedImage->timestamp(),
0 /* render_time_ms */, decodedImage->rotation());
decoded_complete_callback_->Decoded(merged_image, decode_time_ms, qp);
} }
} // namespace webrtc } // namespace webrtc

View File

@ -83,28 +83,30 @@ int StereoEncoderAdapter::Encode(const VideoFrame& input_image,
if (!encoded_complete_callback_) { if (!encoded_complete_callback_) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED; return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
} }
// TODO(emircan): Extract alpha and create an alpha frame with dummy planes.
// Since we don't have a way of transporting alpha yet, put a dummy output for
// alpha consisting of YXX.
// Encode AXX
rtc::scoped_refptr<I420BufferInterface> yuva_buffer =
input_image.video_frame_buffer()->ToI420();
rtc::scoped_refptr<WrappedI420Buffer> alpha_buffer(
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
input_image.width(), input_image.height(), yuva_buffer->DataY(),
yuva_buffer->StrideY(), stereo_dummy_planes_.data(),
yuva_buffer->StrideU(), stereo_dummy_planes_.data(),
yuva_buffer->StrideV(),
rtc::KeepRefUntilDone(input_image.video_frame_buffer())));
VideoFrame alpha_image(alpha_buffer, input_image.timestamp(),
input_image.render_time_ms(), input_image.rotation());
encoders_[kAXXStream]->Encode(alpha_image, codec_specific_info, frame_types);
// Encode YUV // Encode YUV
int rv = encoders_[kYUVStream]->Encode(input_image, codec_specific_info, int rv = encoders_[kYUVStream]->Encode(input_image, codec_specific_info,
frame_types); frame_types);
if (rv)
return rv;
const bool has_alpha = input_image.video_frame_buffer()->type() ==
VideoFrameBuffer::Type::kI420A;
if (!has_alpha)
return rv;
// Encode AXX
const I420ABufferInterface* yuva_buffer =
input_image.video_frame_buffer()->GetI420A();
rtc::scoped_refptr<I420BufferInterface> alpha_buffer =
WrapI420Buffer(input_image.width(), input_image.height(),
yuva_buffer->DataA(), yuva_buffer->StrideA(),
stereo_dummy_planes_.data(), yuva_buffer->StrideU(),
stereo_dummy_planes_.data(), yuva_buffer->StrideV(),
rtc::KeepRefUntilDone(input_image.video_frame_buffer()));
VideoFrame alpha_image(alpha_buffer, input_image.timestamp(),
input_image.render_time_ms(), input_image.rotation());
rv = encoders_[kAXXStream]->Encode(alpha_image, codec_specific_info,
frame_types);
return rv; return rv;
} }
@ -158,6 +160,9 @@ EncodedImageCallback::Result StereoEncoderAdapter::OnEncodedImage(
const EncodedImage& encodedImage, const EncodedImage& encodedImage,
const CodecSpecificInfo* codecSpecificInfo, const CodecSpecificInfo* codecSpecificInfo,
const RTPFragmentationHeader* fragmentation) { const RTPFragmentationHeader* fragmentation) {
if (stream_idx == kAXXStream)
return EncodedImageCallback::Result(EncodedImageCallback::Result::OK);
// TODO(emircan): Fill |codec_specific_info| with stereo parameters. // TODO(emircan): Fill |codec_specific_info| with stereo parameters.
encoded_complete_callback_->OnEncodedImage(encodedImage, codecSpecificInfo, encoded_complete_callback_->OnEncodedImage(encodedImage, codecSpecificInfo,
fragmentation); fragmentation);

View File

@ -10,11 +10,14 @@
#include "api/test/mock_video_decoder_factory.h" #include "api/test/mock_video_decoder_factory.h"
#include "api/test/mock_video_encoder_factory.h" #include "api/test/mock_video_encoder_factory.h"
#include "common_video/include/video_frame_buffer.h"
#include "common_video/libyuv/include/webrtc_libyuv.h" #include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/video_coding/codecs/stereo/include/stereo_decoder_adapter.h" #include "modules/video_coding/codecs/stereo/include/stereo_decoder_adapter.h"
#include "modules/video_coding/codecs/stereo/include/stereo_encoder_adapter.h" #include "modules/video_coding/codecs/stereo/include/stereo_encoder_adapter.h"
#include "modules/video_coding/codecs/test/video_codec_test.h" #include "modules/video_coding/codecs/test/video_codec_test.h"
#include "modules/video_coding/codecs/vp9/include/vp9.h" #include "modules/video_coding/codecs/vp9/include/vp9.h"
#include "rtc_base/keep_ref_until_done.h"
#include "rtc_base/ptr_util.h"
using testing::_; using testing::_;
using testing::Return; using testing::Return;
@ -44,6 +47,18 @@ class TestStereoAdapter : public VideoCodecTest {
return codec_settings; return codec_settings;
} }
std::unique_ptr<VideoFrame> CreateI420AInputFrame() {
rtc::scoped_refptr<webrtc::I420BufferInterface> yuv_buffer =
input_frame_->video_frame_buffer()->ToI420();
rtc::scoped_refptr<I420ABufferInterface> yuva_buffer = WrapI420ABuffer(
yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(),
yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(),
yuv_buffer->DataV(), yuv_buffer->StrideV(), yuv_buffer->DataY(),
yuv_buffer->StrideY(), rtc::KeepRefUntilDone(yuv_buffer));
return rtc::WrapUnique<VideoFrame>(
new VideoFrame(yuva_buffer, kVideoRotation_0, 0));
}
private: private:
void SetUp() override { void SetUp() override {
EXPECT_CALL(*decoder_factory_, Die()); EXPECT_CALL(*decoder_factory_, Die());
@ -93,4 +108,20 @@ TEST_F(TestStereoAdapter, EncodeDecodeI420Frame) {
EXPECT_GT(I420PSNR(input_frame_.get(), decoded_frame.get()), 36); EXPECT_GT(I420PSNR(input_frame_.get(), decoded_frame.get()), 36);
} }
TEST_F(TestStereoAdapter, EncodeDecodeI420AFrame) {
std::unique_ptr<VideoFrame> yuva_frame = CreateI420AInputFrame();
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->Encode(*yuva_frame, nullptr, nullptr));
EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
decoder_->Decode(encoded_frame, false, nullptr));
std::unique_ptr<VideoFrame> decoded_frame;
rtc::Optional<uint8_t> decoded_qp;
ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
ASSERT_TRUE(decoded_frame);
EXPECT_GT(I420PSNR(yuva_frame.get(), decoded_frame.get()), 36);
}
} // namespace webrtc } // namespace webrtc