From 0186d2d62665d7e4dbc7b144c12a720193c5b1a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Erik=20Spr=C3=A5ng?= Date: Wed, 9 Dec 2020 20:58:21 +0100 Subject: [PATCH] Splits vp9_impl into libvpx_vp9_encoder and libvpx_vp9_decoder. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Also moves the LibvpxVp8Interface from codec/vp8 to codec/interface and drops vp8 from the name. Follow-up CLs will wire up actual usage in the new classes through the interface so that we can unit test them more easily. Bug: webrtc:12274 Change-Id: I95f66e90245d9320e5fc23cdc845fbeb2648b38b Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/196522 Commit-Queue: Erik SprÃ¥ng Reviewed-by: Sergey Silkin Cr-Commit-Position: refs/heads/master@{#32816} --- modules/video_coding/BUILD.gn | 35 +- .../{vp8 => interface}/libvpx_interface.cc | 12 +- .../{vp8 => interface}/libvpx_interface.h | 10 +- .../mock_libvpx_interface.h | 10 +- .../codecs/vp8/libvpx_vp8_encoder.cc | 6 +- .../codecs/vp8/libvpx_vp8_encoder.h | 2 +- .../codecs/vp8/test/vp8_impl_unittest.cc | 20 +- .../codecs/vp9/libvpx_vp9_decoder.cc | 400 +++++++++++++++ .../codecs/vp9/libvpx_vp9_decoder.h | 69 +++ .../{vp9_impl.cc => libvpx_vp9_encoder.cc} | 469 ++---------------- .../vp9/{vp9_impl.h => libvpx_vp9_encoder.h} | 62 +-- modules/video_coding/codecs/vp9/vp9.cc | 14 +- 12 files changed, 602 insertions(+), 507 deletions(-) rename modules/video_coding/codecs/{vp8 => interface}/libvpx_interface.cc (96%) rename modules/video_coding/codecs/{vp8 => interface}/libvpx_interface.h (93%) rename modules/video_coding/codecs/{vp8/test => interface}/mock_libvpx_interface.h (91%) create mode 100644 modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc create mode 100644 modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h rename modules/video_coding/codecs/vp9/{vp9_impl.cc => libvpx_vp9_encoder.cc} (81%) rename modules/video_coding/codecs/vp9/{vp9_impl.h => libvpx_vp9_encoder.h} (82%) diff --git a/modules/video_coding/BUILD.gn b/modules/video_coding/BUILD.gn index fe2388c6cd..2d636cd5d1 100644 --- a/modules/video_coding/BUILD.gn +++ b/modules/video_coding/BUILD.gn @@ -446,14 +446,26 @@ rtc_library("webrtc_multiplex") { ] } +# This target defines a bare-bones interface towards libvpx, used by the +# VP8 and VP9 wrappers below. +rtc_library("webrtc_libvpx_interface") { + visibility = [ "*" ] + sources = [ + "codecs/interface/libvpx_interface.cc", + "codecs/interface/libvpx_interface.h", + ] + deps = [ "../../rtc_base:checks" ] + if (rtc_build_libvpx) { + deps += [ rtc_libvpx_dir ] + } +} + # This target includes the internal SW codec. rtc_library("webrtc_vp8") { visibility = [ "*" ] poisonous = [ "software_video_codecs" ] sources = [ "codecs/vp8/include/vp8.h", - "codecs/vp8/libvpx_interface.cc", - "codecs/vp8/libvpx_interface.h", "codecs/vp8/libvpx_vp8_decoder.cc", "codecs/vp8/libvpx_vp8_decoder.h", "codecs/vp8/libvpx_vp8_encoder.cc", @@ -464,6 +476,7 @@ rtc_library("webrtc_vp8") { ":codec_globals_headers", ":video_codec_interface", ":video_coding_utility", + ":webrtc_libvpx_interface", ":webrtc_vp8_temporal_layers", "../../api:fec_controller_api", "../../api:scoped_refptr", @@ -546,16 +559,19 @@ rtc_library("webrtc_vp9") { poisonous = [ "software_video_codecs" ] sources = [ "codecs/vp9/include/vp9.h", + "codecs/vp9/libvpx_vp9_decoder.cc", + "codecs/vp9/libvpx_vp9_decoder.h", + "codecs/vp9/libvpx_vp9_encoder.cc", + "codecs/vp9/libvpx_vp9_encoder.h", "codecs/vp9/vp9.cc", "codecs/vp9/vp9_frame_buffer_pool.cc", "codecs/vp9/vp9_frame_buffer_pool.h", - "codecs/vp9/vp9_impl.cc", - "codecs/vp9/vp9_impl.h", ] deps = [ ":video_codec_interface", ":video_coding_utility", + ":webrtc_libvpx_interface", ":webrtc_vp9_helpers", "../../api:fec_controller_api", "../../api:scoped_refptr", @@ -651,6 +667,15 @@ if (rtc_include_tests) { ] } + rtc_library("mock_libvpx_interface") { + testonly = true + sources = [ "codecs/interface/mock_libvpx_interface.h" ] + deps = [ + ":webrtc_libvpx_interface", + "../../test:test_support", + ] + } + rtc_library("simulcast_test_fixture_impl") { testonly = true sources = [ @@ -821,7 +846,6 @@ if (rtc_include_tests) { "codecs/multiplex/test/multiplex_adapter_unittest.cc", "codecs/test/video_encoder_decoder_instantiation_tests.cc", "codecs/test/videocodec_test_libvpx.cc", - "codecs/vp8/test/mock_libvpx_interface.h", "codecs/vp8/test/vp8_impl_unittest.cc", "codecs/vp9/test/vp9_impl_unittest.cc", ] @@ -836,6 +860,7 @@ if (rtc_include_tests) { deps = [ ":encoded_video_frame_producer", + ":mock_libvpx_interface", ":video_codec_interface", ":video_codecs_test_framework", ":video_coding_utility", diff --git a/modules/video_coding/codecs/vp8/libvpx_interface.cc b/modules/video_coding/codecs/interface/libvpx_interface.cc similarity index 96% rename from modules/video_coding/codecs/vp8/libvpx_interface.cc rename to modules/video_coding/codecs/interface/libvpx_interface.cc index 7bf6117157..8ab7bf5f99 100644 --- a/modules/video_coding/codecs/vp8/libvpx_interface.cc +++ b/modules/video_coding/codecs/interface/libvpx_interface.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/video_coding/codecs/vp8/libvpx_interface.h" +#include "modules/video_coding/codecs/interface/libvpx_interface.h" #include @@ -16,10 +16,10 @@ namespace webrtc { namespace { -class LibvpxVp8Facade : public LibvpxInterface { +class LibvpxFacade : public LibvpxInterface { public: - LibvpxVp8Facade() = default; - ~LibvpxVp8Facade() override = default; + LibvpxFacade() = default; + ~LibvpxFacade() override = default; vpx_image_t* img_alloc(vpx_image_t* img, vpx_img_fmt_t fmt, @@ -203,8 +203,8 @@ class LibvpxVp8Facade : public LibvpxInterface { } // namespace -std::unique_ptr LibvpxInterface::CreateEncoder() { - return std::make_unique(); +std::unique_ptr LibvpxInterface::Create() { + return std::make_unique(); } } // namespace webrtc diff --git a/modules/video_coding/codecs/vp8/libvpx_interface.h b/modules/video_coding/codecs/interface/libvpx_interface.h similarity index 93% rename from modules/video_coding/codecs/vp8/libvpx_interface.h rename to modules/video_coding/codecs/interface/libvpx_interface.h index 3da38ea24a..f087ff383b 100644 --- a/modules/video_coding/codecs/vp8/libvpx_interface.h +++ b/modules/video_coding/codecs/interface/libvpx_interface.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_VIDEO_CODING_CODECS_VP8_LIBVPX_INTERFACE_H_ -#define MODULES_VIDEO_CODING_CODECS_VP8_LIBVPX_INTERFACE_H_ +#ifndef MODULES_VIDEO_CODING_CODECS_INTERFACE_LIBVPX_INTERFACE_H_ +#define MODULES_VIDEO_CODING_CODECS_INTERFACE_LIBVPX_INTERFACE_H_ #include @@ -22,7 +22,7 @@ namespace webrtc { -// This interface is a proxy to to the static libvpx functions, so that they +// This interface is a proxy to the static libvpx functions, so that they // can be mocked for testing. Currently supports VP8 encoder functions. // TODO(sprang): Extend this to VP8 decoder and VP9 encoder/decoder too. class LibvpxInterface { @@ -96,9 +96,9 @@ class LibvpxInterface { virtual const char* codec_error_detail(vpx_codec_ctx_t* ctx) const = 0; // Returns interface wrapping the actual libvpx functions. - static std::unique_ptr CreateEncoder(); + static std::unique_ptr Create(); }; } // namespace webrtc -#endif // MODULES_VIDEO_CODING_CODECS_VP8_LIBVPX_INTERFACE_H_ +#endif // MODULES_VIDEO_CODING_CODECS_INTERFACE_LIBVPX_INTERFACE_H_ diff --git a/modules/video_coding/codecs/vp8/test/mock_libvpx_interface.h b/modules/video_coding/codecs/interface/mock_libvpx_interface.h similarity index 91% rename from modules/video_coding/codecs/vp8/test/mock_libvpx_interface.h rename to modules/video_coding/codecs/interface/mock_libvpx_interface.h index 697b44b9d5..33681cb784 100644 --- a/modules/video_coding/codecs/vp8/test/mock_libvpx_interface.h +++ b/modules/video_coding/codecs/interface/mock_libvpx_interface.h @@ -8,16 +8,16 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_VIDEO_CODING_CODECS_VP8_TEST_MOCK_LIBVPX_INTERFACE_H_ -#define MODULES_VIDEO_CODING_CODECS_VP8_TEST_MOCK_LIBVPX_INTERFACE_H_ +#ifndef MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_LIBVPX_INTERFACE_H_ +#define MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_LIBVPX_INTERFACE_H_ -#include "modules/video_coding/codecs/vp8/libvpx_interface.h" +#include "modules/video_coding/codecs/interface/libvpx_interface.h" #include "test/gmock.h" #include "test/gtest.h" namespace webrtc { -class MockLibvpxVp8Interface : public LibvpxInterface { +class MockLibvpxInterface : public LibvpxInterface { public: MOCK_METHOD( vpx_image_t*, @@ -107,4 +107,4 @@ class MockLibvpxVp8Interface : public LibvpxInterface { } // namespace webrtc -#endif // MODULES_VIDEO_CODING_CODECS_VP8_TEST_MOCK_LIBVPX_INTERFACE_H_ +#endif // MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_LIBVPX_INTERFACE_H_ diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc index 9f604975df..7713a0d3d0 100644 --- a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc +++ b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc @@ -177,13 +177,13 @@ absl::optional GetRequestedResolutionAlignmentOverride() { } // namespace std::unique_ptr VP8Encoder::Create() { - return std::make_unique(LibvpxInterface::CreateEncoder(), + return std::make_unique(LibvpxInterface::Create(), VP8Encoder::Settings()); } std::unique_ptr VP8Encoder::Create( VP8Encoder::Settings settings) { - return std::make_unique(LibvpxInterface::CreateEncoder(), + return std::make_unique(LibvpxInterface::Create(), std::move(settings)); } @@ -193,7 +193,7 @@ std::unique_ptr VP8Encoder::Create( VP8Encoder::Settings settings; settings.frame_buffer_controller_factory = std::move(frame_buffer_controller_factory); - return std::make_unique(LibvpxInterface::CreateEncoder(), + return std::make_unique(LibvpxInterface::Create(), std::move(settings)); } diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h index c08b9b0883..bfe4275f50 100644 --- a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h +++ b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h @@ -21,8 +21,8 @@ #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/vp8_frame_buffer_controller.h" #include "api/video_codecs/vp8_frame_config.h" +#include "modules/video_coding/codecs/interface/libvpx_interface.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" -#include "modules/video_coding/codecs/vp8/libvpx_interface.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/utility/framerate_controller.h" #include "rtc_base/experiments/cpu_speed_experiment.h" diff --git a/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc b/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc index 2d09eb0116..94ea1794ef 100644 --- a/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc +++ b/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc @@ -20,10 +20,10 @@ #include "api/video_codecs/vp8_temporal_layers.h" #include "common_video/libyuv/include/webrtc_libyuv.h" #include "common_video/test/utilities.h" +#include "modules/video_coding/codecs/interface/mock_libvpx_interface.h" #include "modules/video_coding/codecs/test/video_codec_unittest.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h" -#include "modules/video_coding/codecs/vp8/test/mock_libvpx_interface.h" #include "modules/video_coding/utility/vp8_header_parser.h" #include "rtc_base/time_utils.h" #include "test/field_trial.h" @@ -120,7 +120,7 @@ class TestVp8Impl : public VideoCodecUnitTest { TEST_F(TestVp8Impl, ErrorResilienceDisabledForNoTemporalLayers) { codec_settings_.simulcastStream[0].numberOfTemporalLayers = 1; - auto* const vpx = new NiceMock(); + auto* const vpx = new NiceMock(); LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), VP8Encoder::Settings()); EXPECT_CALL(*vpx, @@ -134,7 +134,7 @@ TEST_F(TestVp8Impl, DefaultErrorResilienceEnabledForTemporalLayers) { codec_settings_.simulcastStream[0].numberOfTemporalLayers = 2; codec_settings_.VP8()->numberOfTemporalLayers = 2; - auto* const vpx = new NiceMock(); + auto* const vpx = new NiceMock(); LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), VP8Encoder::Settings()); EXPECT_CALL(*vpx, @@ -153,7 +153,7 @@ TEST_F(TestVp8Impl, codec_settings_.simulcastStream[0].numberOfTemporalLayers = 2; codec_settings_.VP8()->numberOfTemporalLayers = 2; - auto* const vpx = new NiceMock(); + auto* const vpx = new NiceMock(); LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), VP8Encoder::Settings()); EXPECT_CALL(*vpx, @@ -166,7 +166,7 @@ TEST_F(TestVp8Impl, } TEST_F(TestVp8Impl, SetRates) { - auto* const vpx = new NiceMock(); + auto* const vpx = new NiceMock(); LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), VP8Encoder::Settings()); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, @@ -472,7 +472,7 @@ TEST_F(TestVp8Impl, DontDropKeyframes) { } TEST_F(TestVp8Impl, KeepsTimestampOnReencode) { - auto* const vpx = new NiceMock(); + auto* const vpx = new NiceMock(); LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), VP8Encoder::Settings()); @@ -512,7 +512,7 @@ TEST_F(TestVp8Impl, KeepsTimestampOnReencode) { } TEST(LibvpxVp8EncoderTest, GetEncoderInfoReturnsStaticInformation) { - auto* const vpx = new NiceMock(); + auto* const vpx = new NiceMock(); LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), VP8Encoder::Settings()); @@ -534,7 +534,7 @@ TEST(LibvpxVp8EncoderTest, RequestedResolutionAlignmentFromFieldTrial) { "WebRTC-VP8-GetEncoderInfoOverride/" "requested_resolution_alignment:10/"); - auto* const vpx = new NiceMock(); + auto* const vpx = new NiceMock(); LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), VP8Encoder::Settings()); @@ -543,7 +543,7 @@ TEST(LibvpxVp8EncoderTest, RequestedResolutionAlignmentFromFieldTrial) { TEST(LibvpxVp8EncoderTest, GetEncoderInfoReturnsEmptyResolutionBitrateLimitsByDefault) { - auto* const vpx = new NiceMock(); + auto* const vpx = new NiceMock(); LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), VP8Encoder::Settings()); @@ -563,7 +563,7 @@ TEST(LibvpxVp8EncoderTest, VP8Encoder::Settings settings; settings.resolution_bitrate_limits = resolution_bitrate_limits; - auto* const vpx = new NiceMock(); + auto* const vpx = new NiceMock(); LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), std::move(settings)); diff --git a/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc b/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc new file mode 100644 index 0000000000..45e0a0b4ab --- /dev/null +++ b/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc @@ -0,0 +1,400 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + * + */ + +#ifdef RTC_ENABLE_VP9 + +#include "modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h" + +#include + +#include "absl/strings/match.h" +#include "api/transport/field_trial_based_config.h" +#include "api/video/color_space.h" +#include "api/video/i010_buffer.h" +#include "common_video/include/video_frame_buffer.h" +#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" +#include "rtc_base/checks.h" +#include "rtc_base/keep_ref_until_done.h" +#include "rtc_base/logging.h" +#include "third_party/libyuv/include/libyuv/convert.h" +#include "vpx/vp8dx.h" +#include "vpx/vpx_decoder.h" + +namespace webrtc { +namespace { + +// Helper class for extracting VP9 colorspace. +ColorSpace ExtractVP9ColorSpace(vpx_color_space_t space_t, + vpx_color_range_t range_t, + unsigned int bit_depth) { + ColorSpace::PrimaryID primaries = ColorSpace::PrimaryID::kUnspecified; + ColorSpace::TransferID transfer = ColorSpace::TransferID::kUnspecified; + ColorSpace::MatrixID matrix = ColorSpace::MatrixID::kUnspecified; + switch (space_t) { + case VPX_CS_BT_601: + case VPX_CS_SMPTE_170: + primaries = ColorSpace::PrimaryID::kSMPTE170M; + transfer = ColorSpace::TransferID::kSMPTE170M; + matrix = ColorSpace::MatrixID::kSMPTE170M; + break; + case VPX_CS_SMPTE_240: + primaries = ColorSpace::PrimaryID::kSMPTE240M; + transfer = ColorSpace::TransferID::kSMPTE240M; + matrix = ColorSpace::MatrixID::kSMPTE240M; + break; + case VPX_CS_BT_709: + primaries = ColorSpace::PrimaryID::kBT709; + transfer = ColorSpace::TransferID::kBT709; + matrix = ColorSpace::MatrixID::kBT709; + break; + case VPX_CS_BT_2020: + primaries = ColorSpace::PrimaryID::kBT2020; + switch (bit_depth) { + case 8: + transfer = ColorSpace::TransferID::kBT709; + break; + case 10: + transfer = ColorSpace::TransferID::kBT2020_10; + break; + default: + RTC_NOTREACHED(); + break; + } + matrix = ColorSpace::MatrixID::kBT2020_NCL; + break; + case VPX_CS_SRGB: + primaries = ColorSpace::PrimaryID::kBT709; + transfer = ColorSpace::TransferID::kIEC61966_2_1; + matrix = ColorSpace::MatrixID::kBT709; + break; + default: + break; + } + + ColorSpace::RangeID range = ColorSpace::RangeID::kInvalid; + switch (range_t) { + case VPX_CR_STUDIO_RANGE: + range = ColorSpace::RangeID::kLimited; + break; + case VPX_CR_FULL_RANGE: + range = ColorSpace::RangeID::kFull; + break; + default: + break; + } + return ColorSpace(primaries, transfer, matrix, range); +} + +} // namespace + +LibvpxVp9Decoder::LibvpxVp9Decoder() + : LibvpxVp9Decoder(FieldTrialBasedConfig()) {} +LibvpxVp9Decoder::LibvpxVp9Decoder(const WebRtcKeyValueConfig& trials) + : decode_complete_callback_(nullptr), + inited_(false), + decoder_(nullptr), + key_frame_required_(true), + preferred_output_format_( + absl::StartsWith(trials.Lookup("WebRTC-NV12Decode"), "Enabled") + ? VideoFrameBuffer::Type::kNV12 + : VideoFrameBuffer::Type::kI420) {} + +LibvpxVp9Decoder::~LibvpxVp9Decoder() { + inited_ = true; // in order to do the actual release + Release(); + int num_buffers_in_use = libvpx_buffer_pool_.GetNumBuffersInUse(); + if (num_buffers_in_use > 0) { + // The frame buffers are reference counted and frames are exposed after + // decoding. There may be valid usage cases where previous frames are still + // referenced after ~LibvpxVp9Decoder that is not a leak. + RTC_LOG(LS_INFO) << num_buffers_in_use + << " Vp9FrameBuffers are still " + "referenced during ~LibvpxVp9Decoder."; + } +} + +int LibvpxVp9Decoder::InitDecode(const VideoCodec* inst, int number_of_cores) { + int ret_val = Release(); + if (ret_val < 0) { + return ret_val; + } + + if (decoder_ == nullptr) { + decoder_ = new vpx_codec_ctx_t; + } + vpx_codec_dec_cfg_t cfg; + memset(&cfg, 0, sizeof(cfg)); + +#ifdef FUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION + // We focus on webrtc fuzzing here, not libvpx itself. Use single thread for + // fuzzing, because: + // - libvpx's VP9 single thread decoder is more fuzzer friendly. It detects + // errors earlier than the multi-threads version. + // - Make peak CPU usage under control (not depending on input) + cfg.threads = 1; +#else + if (!inst) { + // No config provided - don't know resolution to decode yet. + // Set thread count to one in the meantime. + cfg.threads = 1; + } else { + // We want to use multithreading when decoding high resolution videos. But + // not too many in order to avoid overhead when many stream are decoded + // concurrently. + // Set 2 thread as target for 1280x720 pixel count, and then scale up + // linearly from there - but cap at physical core count. + // For common resolutions this results in: + // 1 for 360p + // 2 for 720p + // 4 for 1080p + // 8 for 1440p + // 18 for 4K + int num_threads = + std::max(1, 2 * (inst->width * inst->height) / (1280 * 720)); + cfg.threads = std::min(number_of_cores, num_threads); + current_codec_ = *inst; + } +#endif + + num_cores_ = number_of_cores; + + vpx_codec_flags_t flags = 0; + if (vpx_codec_dec_init(decoder_, vpx_codec_vp9_dx(), &cfg, flags)) { + return WEBRTC_VIDEO_CODEC_MEMORY; + } + + if (!libvpx_buffer_pool_.InitializeVpxUsePool(decoder_)) { + return WEBRTC_VIDEO_CODEC_MEMORY; + } + + inited_ = true; + // Always start with a complete key frame. + key_frame_required_ = true; + if (inst && inst->buffer_pool_size) { + if (!libvpx_buffer_pool_.Resize(*inst->buffer_pool_size) || + !output_buffer_pool_.Resize(*inst->buffer_pool_size)) { + return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + } + } + + vpx_codec_err_t status = + vpx_codec_control(decoder_, VP9D_SET_LOOP_FILTER_OPT, 1); + if (status != VPX_CODEC_OK) { + RTC_LOG(LS_ERROR) << "Failed to enable VP9D_SET_LOOP_FILTER_OPT. " + << vpx_codec_error(decoder_); + return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + } + + return WEBRTC_VIDEO_CODEC_OK; +} + +int LibvpxVp9Decoder::Decode(const EncodedImage& input_image, + bool missing_frames, + int64_t /*render_time_ms*/) { + if (!inited_) { + return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + } + if (decode_complete_callback_ == nullptr) { + return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + } + + if (input_image._frameType == VideoFrameType::kVideoFrameKey) { + absl::optional frame_info = + vp9::ParseIntraFrameInfo(input_image.data(), input_image.size()); + if (frame_info) { + if (frame_info->frame_width != current_codec_.width || + frame_info->frame_height != current_codec_.height) { + // Resolution has changed, tear down and re-init a new decoder in + // order to get correct sizing. + Release(); + current_codec_.width = frame_info->frame_width; + current_codec_.height = frame_info->frame_height; + int reinit_status = InitDecode(¤t_codec_, num_cores_); + if (reinit_status != WEBRTC_VIDEO_CODEC_OK) { + RTC_LOG(LS_WARNING) << "Failed to re-init decoder."; + return reinit_status; + } + } + } else { + RTC_LOG(LS_WARNING) << "Failed to parse VP9 header from key-frame."; + } + } + + // Always start with a complete key frame. + if (key_frame_required_) { + if (input_image._frameType != VideoFrameType::kVideoFrameKey) + return WEBRTC_VIDEO_CODEC_ERROR; + key_frame_required_ = false; + } + vpx_codec_iter_t iter = nullptr; + vpx_image_t* img; + const uint8_t* buffer = input_image.data(); + if (input_image.size() == 0) { + buffer = nullptr; // Triggers full frame concealment. + } + // During decode libvpx may get and release buffers from + // |libvpx_buffer_pool_|. In practice libvpx keeps a few (~3-4) buffers alive + // at a time. + if (vpx_codec_decode(decoder_, buffer, + static_cast(input_image.size()), 0, + VPX_DL_REALTIME)) { + return WEBRTC_VIDEO_CODEC_ERROR; + } + // |img->fb_priv| contains the image data, a reference counted Vp9FrameBuffer. + // It may be released by libvpx during future vpx_codec_decode or + // vpx_codec_destroy calls. + img = vpx_codec_get_frame(decoder_, &iter); + int qp; + vpx_codec_err_t vpx_ret = + vpx_codec_control(decoder_, VPXD_GET_LAST_QUANTIZER, &qp); + RTC_DCHECK_EQ(vpx_ret, VPX_CODEC_OK); + int ret = + ReturnFrame(img, input_image.Timestamp(), qp, input_image.ColorSpace()); + if (ret != 0) { + return ret; + } + return WEBRTC_VIDEO_CODEC_OK; +} + +int LibvpxVp9Decoder::ReturnFrame( + const vpx_image_t* img, + uint32_t timestamp, + int qp, + const webrtc::ColorSpace* explicit_color_space) { + if (img == nullptr) { + // Decoder OK and nullptr image => No show frame. + return WEBRTC_VIDEO_CODEC_NO_OUTPUT; + } + + // This buffer contains all of |img|'s image data, a reference counted + // Vp9FrameBuffer. (libvpx is done with the buffers after a few + // vpx_codec_decode calls or vpx_codec_destroy). + Vp9FrameBufferPool::Vp9FrameBuffer* img_buffer = + static_cast(img->fb_priv); + + // The buffer can be used directly by the VideoFrame (without copy) by + // using a Wrapped*Buffer. + rtc::scoped_refptr img_wrapped_buffer; + switch (img->bit_depth) { + case 8: + if (img->fmt == VPX_IMG_FMT_I420) { + if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) { + rtc::scoped_refptr nv12_buffer = + output_buffer_pool_.CreateNV12Buffer(img->d_w, img->d_h); + if (!nv12_buffer.get()) { + // Buffer pool is full. + return WEBRTC_VIDEO_CODEC_NO_OUTPUT; + } + img_wrapped_buffer = nv12_buffer; + libyuv::I420ToNV12(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], + img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], + img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], + nv12_buffer->MutableDataY(), + nv12_buffer->StrideY(), + nv12_buffer->MutableDataUV(), + nv12_buffer->StrideUV(), img->d_w, img->d_h); + // No holding onto img_buffer as it's no longer needed and can be + // reused. + } else { + img_wrapped_buffer = WrapI420Buffer( + img->d_w, img->d_h, img->planes[VPX_PLANE_Y], + img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], + img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], + img->stride[VPX_PLANE_V], + // WrappedI420Buffer's mechanism for allowing the release of its + // frame buffer is through a callback function. This is where we + // should release |img_buffer|. + rtc::KeepRefUntilDone(img_buffer)); + } + } else if (img->fmt == VPX_IMG_FMT_I444) { + img_wrapped_buffer = WrapI444Buffer( + img->d_w, img->d_h, img->planes[VPX_PLANE_Y], + img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], + img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], + img->stride[VPX_PLANE_V], + // WrappedI444Buffer's mechanism for allowing the release of its + // frame buffer is through a callback function. This is where we + // should release |img_buffer|. + rtc::KeepRefUntilDone(img_buffer)); + } else { + RTC_LOG(LS_ERROR) + << "Unsupported pixel format produced by the decoder: " + << static_cast(img->fmt); + return WEBRTC_VIDEO_CODEC_NO_OUTPUT; + } + break; + case 10: + img_wrapped_buffer = WrapI010Buffer( + img->d_w, img->d_h, + reinterpret_cast(img->planes[VPX_PLANE_Y]), + img->stride[VPX_PLANE_Y] / 2, + reinterpret_cast(img->planes[VPX_PLANE_U]), + img->stride[VPX_PLANE_U] / 2, + reinterpret_cast(img->planes[VPX_PLANE_V]), + img->stride[VPX_PLANE_V] / 2, rtc::KeepRefUntilDone(img_buffer)); + break; + default: + RTC_LOG(LS_ERROR) << "Unsupported bit depth produced by the decoder: " + << img->bit_depth; + return WEBRTC_VIDEO_CODEC_NO_OUTPUT; + } + + auto builder = VideoFrame::Builder() + .set_video_frame_buffer(img_wrapped_buffer) + .set_timestamp_rtp(timestamp); + if (explicit_color_space) { + builder.set_color_space(*explicit_color_space); + } else { + builder.set_color_space( + ExtractVP9ColorSpace(img->cs, img->range, img->bit_depth)); + } + VideoFrame decoded_image = builder.build(); + + decode_complete_callback_->Decoded(decoded_image, absl::nullopt, qp); + return WEBRTC_VIDEO_CODEC_OK; +} + +int LibvpxVp9Decoder::RegisterDecodeCompleteCallback( + DecodedImageCallback* callback) { + decode_complete_callback_ = callback; + return WEBRTC_VIDEO_CODEC_OK; +} + +int LibvpxVp9Decoder::Release() { + int ret_val = WEBRTC_VIDEO_CODEC_OK; + + if (decoder_ != nullptr) { + if (inited_) { + // When a codec is destroyed libvpx will release any buffers of + // |libvpx_buffer_pool_| it is currently using. + if (vpx_codec_destroy(decoder_)) { + ret_val = WEBRTC_VIDEO_CODEC_MEMORY; + } + } + delete decoder_; + decoder_ = nullptr; + } + // Releases buffers from the pool. Any buffers not in use are deleted. Buffers + // still referenced externally are deleted once fully released, not returning + // to the pool. + libvpx_buffer_pool_.ClearPool(); + output_buffer_pool_.Release(); + inited_ = false; + return ret_val; +} + +const char* LibvpxVp9Decoder::ImplementationName() const { + return "libvpx"; +} + +} // namespace webrtc + +#endif // RTC_ENABLE_VP9 diff --git a/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h b/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h new file mode 100644 index 0000000000..59d207aec2 --- /dev/null +++ b/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + * + */ + +#ifndef MODULES_VIDEO_CODING_CODECS_VP9_LIBVPX_VP9_DECODER_H_ +#define MODULES_VIDEO_CODING_CODECS_VP9_LIBVPX_VP9_DECODER_H_ + +#ifdef RTC_ENABLE_VP9 + +#include "api/transport/webrtc_key_value_config.h" +#include "api/video_codecs/video_decoder.h" +#include "common_video/include/video_frame_buffer_pool.h" +#include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h" +#include "vpx/vp8cx.h" + +namespace webrtc { + +class LibvpxVp9Decoder : public VP9Decoder { + public: + LibvpxVp9Decoder(); + explicit LibvpxVp9Decoder(const WebRtcKeyValueConfig& trials); + + virtual ~LibvpxVp9Decoder(); + + int InitDecode(const VideoCodec* inst, int number_of_cores) override; + + int Decode(const EncodedImage& input_image, + bool missing_frames, + int64_t /*render_time_ms*/) override; + + int RegisterDecodeCompleteCallback(DecodedImageCallback* callback) override; + + int Release() override; + + const char* ImplementationName() const override; + + private: + int ReturnFrame(const vpx_image_t* img, + uint32_t timestamp, + int qp, + const webrtc::ColorSpace* explicit_color_space); + + // Memory pool used to share buffers between libvpx and webrtc. + Vp9FrameBufferPool libvpx_buffer_pool_; + // Buffer pool used to allocate additionally needed NV12 buffers. + VideoFrameBufferPool output_buffer_pool_; + DecodedImageCallback* decode_complete_callback_; + bool inited_; + vpx_codec_ctx_t* decoder_; + bool key_frame_required_; + VideoCodec current_codec_; + int num_cores_; + + // Decoder should produce this format if possible. + const VideoFrameBuffer::Type preferred_output_format_; +}; +} // namespace webrtc + +#endif // RTC_ENABLE_VP9 + +#endif // MODULES_VIDEO_CODING_CODECS_VP9_LIBVPX_VP9_DECODER_H_ diff --git a/modules/video_coding/codecs/vp9/vp9_impl.cc b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc similarity index 81% rename from modules/video_coding/codecs/vp9/vp9_impl.cc rename to modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc index 74fe565d10..2f2fa6ccd2 100644 --- a/modules/video_coding/codecs/vp9/vp9_impl.cc +++ b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc @@ -1,5 +1,5 @@ /* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -11,7 +11,7 @@ #ifdef RTC_ENABLE_VP9 -#include "modules/video_coding/codecs/vp9/vp9_impl.h" +#include "modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h" #include #include @@ -20,7 +20,6 @@ #include "absl/memory/memory.h" #include "absl/strings/match.h" -#include "api/transport/field_trial_based_config.h" #include "api/video/color_space.h" #include "api/video/i010_buffer.h" #include "common_video/include/video_frame_buffer.h" @@ -35,15 +34,12 @@ #include "rtc_base/experiments/field_trial_list.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/rate_control_settings.h" -#include "rtc_base/keep_ref_until_done.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" #include "third_party/libyuv/include/libyuv/convert.h" #include "vpx/vp8cx.h" -#include "vpx/vp8dx.h" -#include "vpx/vpx_decoder.h" #include "vpx/vpx_encoder.h" namespace webrtc { @@ -66,68 +62,6 @@ const int kMaxAllowedPidDiff = 30; constexpr int kLowVp9QpThreshold = 149; constexpr int kHighVp9QpThreshold = 205; -// Helper class for extracting VP9 colorspace. -ColorSpace ExtractVP9ColorSpace(vpx_color_space_t space_t, - vpx_color_range_t range_t, - unsigned int bit_depth) { - ColorSpace::PrimaryID primaries = ColorSpace::PrimaryID::kUnspecified; - ColorSpace::TransferID transfer = ColorSpace::TransferID::kUnspecified; - ColorSpace::MatrixID matrix = ColorSpace::MatrixID::kUnspecified; - switch (space_t) { - case VPX_CS_BT_601: - case VPX_CS_SMPTE_170: - primaries = ColorSpace::PrimaryID::kSMPTE170M; - transfer = ColorSpace::TransferID::kSMPTE170M; - matrix = ColorSpace::MatrixID::kSMPTE170M; - break; - case VPX_CS_SMPTE_240: - primaries = ColorSpace::PrimaryID::kSMPTE240M; - transfer = ColorSpace::TransferID::kSMPTE240M; - matrix = ColorSpace::MatrixID::kSMPTE240M; - break; - case VPX_CS_BT_709: - primaries = ColorSpace::PrimaryID::kBT709; - transfer = ColorSpace::TransferID::kBT709; - matrix = ColorSpace::MatrixID::kBT709; - break; - case VPX_CS_BT_2020: - primaries = ColorSpace::PrimaryID::kBT2020; - switch (bit_depth) { - case 8: - transfer = ColorSpace::TransferID::kBT709; - break; - case 10: - transfer = ColorSpace::TransferID::kBT2020_10; - break; - default: - RTC_NOTREACHED(); - break; - } - matrix = ColorSpace::MatrixID::kBT2020_NCL; - break; - case VPX_CS_SRGB: - primaries = ColorSpace::PrimaryID::kBT709; - transfer = ColorSpace::TransferID::kIEC61966_2_1; - matrix = ColorSpace::MatrixID::kBT709; - break; - default: - break; - } - - ColorSpace::RangeID range = ColorSpace::RangeID::kInvalid; - switch (range_t) { - case VPX_CR_STUDIO_RANGE: - range = ColorSpace::RangeID::kLimited; - break; - case VPX_CR_FULL_RANGE: - range = ColorSpace::RangeID::kFull; - break; - default: - break; - } - return ColorSpace(primaries, transfer, matrix, range); -} - std::pair GetActiveLayers( const VideoBitrateAllocation& allocation) { for (size_t sl_idx = 0; sl_idx < kMaxSpatialLayers; ++sl_idx) { @@ -251,18 +185,17 @@ vpx_svc_ref_frame_config_t Vp9References( } // namespace -void VP9EncoderImpl::EncoderOutputCodedPacketCallback(vpx_codec_cx_pkt* pkt, - void* user_data) { - VP9EncoderImpl* enc = static_cast(user_data); +void LibvpxVp9Encoder::EncoderOutputCodedPacketCallback(vpx_codec_cx_pkt* pkt, + void* user_data) { + LibvpxVp9Encoder* enc = static_cast(user_data); enc->GetEncodedLayerFrame(pkt); } -VP9EncoderImpl::VP9EncoderImpl(const cricket::VideoCodec& codec) - : VP9EncoderImpl(codec, FieldTrialBasedConfig()) {} - -VP9EncoderImpl::VP9EncoderImpl(const cricket::VideoCodec& codec, - const WebRtcKeyValueConfig& trials) - : encoded_image_(), +LibvpxVp9Encoder::LibvpxVp9Encoder(const cricket::VideoCodec& codec, + std::unique_ptr interface, + const WebRtcKeyValueConfig& trials) + : libvpx_(std::move(interface)), + encoded_image_(), encoded_complete_callback_(nullptr), profile_( ParseSdpForVP9Profile(codec.params).value_or(VP9Profile::kProfile0)), @@ -311,15 +244,15 @@ VP9EncoderImpl::VP9EncoderImpl(const cricket::VideoCodec& codec, memset(&svc_params_, 0, sizeof(vpx_svc_extra_cfg_t)); } -VP9EncoderImpl::~VP9EncoderImpl() { +LibvpxVp9Encoder::~LibvpxVp9Encoder() { Release(); } -void VP9EncoderImpl::SetFecControllerOverride(FecControllerOverride*) { +void LibvpxVp9Encoder::SetFecControllerOverride(FecControllerOverride*) { // Ignored. } -int VP9EncoderImpl::Release() { +int LibvpxVp9Encoder::Release() { int ret_val = WEBRTC_VIDEO_CODEC_OK; if (encoder_ != nullptr) { @@ -343,13 +276,13 @@ int VP9EncoderImpl::Release() { return ret_val; } -bool VP9EncoderImpl::ExplicitlyConfiguredSpatialLayers() const { +bool LibvpxVp9Encoder::ExplicitlyConfiguredSpatialLayers() const { // We check target_bitrate_bps of the 0th layer to see if the spatial layers // (i.e. bitrates) were explicitly configured. return codec_.spatialLayers[0].targetBitrate > 0; } -bool VP9EncoderImpl::SetSvcRates( +bool LibvpxVp9Encoder::SetSvcRates( const VideoBitrateAllocation& bitrate_allocation) { std::pair current_layers = GetActiveLayers(current_bitrate_allocation_); @@ -481,7 +414,7 @@ bool VP9EncoderImpl::SetSvcRates( return true; } -void VP9EncoderImpl::SetRates(const RateControlParameters& parameters) { +void LibvpxVp9Encoder::SetRates(const RateControlParameters& parameters) { if (!inited_) { RTC_LOG(LS_WARNING) << "SetRates() calll while uninitialzied."; return; @@ -504,8 +437,8 @@ void VP9EncoderImpl::SetRates(const RateControlParameters& parameters) { } // TODO(eladalon): s/inst/codec_settings/g. -int VP9EncoderImpl::InitEncode(const VideoCodec* inst, - const Settings& settings) { +int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, + const Settings& settings) { if (inst == nullptr) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } @@ -707,9 +640,9 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst, return InitAndSetControlSettings(inst); } -int VP9EncoderImpl::NumberOfThreads(int width, - int height, - int number_of_cores) { +int LibvpxVp9Encoder::NumberOfThreads(int width, + int height, + int number_of_cores) { // Keep the number of encoder threads equal to the possible number of column // tiles, which is (1, 2, 4, 8). See comments below for VP9E_SET_TILE_COLUMNS. if (width * height >= 1280 * 720 && number_of_cores > 4) { @@ -729,7 +662,7 @@ int VP9EncoderImpl::NumberOfThreads(int width, } } -int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) { +int LibvpxVp9Encoder::InitAndSetControlSettings(const VideoCodec* inst) { // Set QP-min/max per spatial and temporal layer. int tot_num_layers = num_spatial_layers_ * num_temporal_layers_; for (int i = 0; i < tot_num_layers; ++i) { @@ -885,7 +818,7 @@ int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) { // Register callback for getting each spatial layer. vpx_codec_priv_output_cx_pkt_cb_pair_t cbp = { - VP9EncoderImpl::EncoderOutputCodedPacketCallback, + LibvpxVp9Encoder::EncoderOutputCodedPacketCallback, reinterpret_cast(this)}; vpx_codec_control(encoder_, VP9E_REGISTER_CX_CALLBACK, reinterpret_cast(&cbp)); @@ -918,7 +851,7 @@ int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) { return WEBRTC_VIDEO_CODEC_OK; } -uint32_t VP9EncoderImpl::MaxIntraTarget(uint32_t optimal_buffer_size) { +uint32_t LibvpxVp9Encoder::MaxIntraTarget(uint32_t optimal_buffer_size) { // Set max to the optimal buffer level (normalized by target BR), // and scaled by a scale_par. // Max target size = scale_par * optimal_buffer_size * targetBR[Kbps]. @@ -933,8 +866,8 @@ uint32_t VP9EncoderImpl::MaxIntraTarget(uint32_t optimal_buffer_size) { return (target_pct < min_intra_size) ? min_intra_size : target_pct; } -int VP9EncoderImpl::Encode(const VideoFrame& input_image, - const std::vector* frame_types) { +int LibvpxVp9Encoder::Encode(const VideoFrame& input_image, + const std::vector* frame_types) { if (!inited_) { return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } @@ -1231,10 +1164,10 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image, return WEBRTC_VIDEO_CODEC_OK; } -void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, - absl::optional* spatial_idx, - const vpx_codec_cx_pkt& pkt, - uint32_t timestamp) { +void LibvpxVp9Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, + absl::optional* spatial_idx, + const vpx_codec_cx_pkt& pkt, + uint32_t timestamp) { RTC_CHECK(codec_specific != nullptr); codec_specific->codecType = kVideoCodecVP9; CodecSpecificInfoVP9* vp9_info = &(codec_specific->codecSpecific.VP9); @@ -1372,10 +1305,10 @@ void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, } } -void VP9EncoderImpl::FillReferenceIndices(const vpx_codec_cx_pkt& pkt, - const size_t pic_num, - const bool inter_layer_predicted, - CodecSpecificInfoVP9* vp9_info) { +void LibvpxVp9Encoder::FillReferenceIndices(const vpx_codec_cx_pkt& pkt, + const size_t pic_num, + const bool inter_layer_predicted, + CodecSpecificInfoVP9* vp9_info) { vpx_svc_layer_id_t layer_id = {0}; vpx_codec_control(encoder_, VP9E_GET_SVC_LAYER_ID, &layer_id); @@ -1491,8 +1424,8 @@ void VP9EncoderImpl::FillReferenceIndices(const vpx_codec_cx_pkt& pkt, static_cast(layer_id.temporal_layer_id)); } -void VP9EncoderImpl::UpdateReferenceBuffers(const vpx_codec_cx_pkt& pkt, - const size_t pic_num) { +void LibvpxVp9Encoder::UpdateReferenceBuffers(const vpx_codec_cx_pkt& pkt, + const size_t pic_num) { vpx_svc_layer_id_t layer_id = {0}; vpx_codec_control(encoder_, VP9E_GET_SVC_LAYER_ID, &layer_id); @@ -1531,7 +1464,7 @@ void VP9EncoderImpl::UpdateReferenceBuffers(const vpx_codec_cx_pkt& pkt, } } -vpx_svc_ref_frame_config_t VP9EncoderImpl::SetReferences( +vpx_svc_ref_frame_config_t LibvpxVp9Encoder::SetReferences( bool is_key_pic, size_t first_active_spatial_layer_id) { // kRefBufIdx, kUpdBufIdx need to be updated to support longer GOFs. @@ -1625,7 +1558,7 @@ vpx_svc_ref_frame_config_t VP9EncoderImpl::SetReferences( return ref_config; } -int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) { +int LibvpxVp9Encoder::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) { RTC_DCHECK_EQ(pkt->kind, VPX_CODEC_CX_FRAME_PKT); if (pkt->data.frame.sz == 0) { @@ -1686,7 +1619,7 @@ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) { return WEBRTC_VIDEO_CODEC_OK; } -void VP9EncoderImpl::DeliverBufferedFrame(bool end_of_picture) { +void LibvpxVp9Encoder::DeliverBufferedFrame(bool end_of_picture) { if (encoded_image_.size() > 0) { if (num_spatial_layers_ > 1) { // Restore frame dropping settings, as dropping may be temporary forbidden @@ -1727,13 +1660,13 @@ void VP9EncoderImpl::DeliverBufferedFrame(bool end_of_picture) { } } -int VP9EncoderImpl::RegisterEncodeCompleteCallback( +int LibvpxVp9Encoder::RegisterEncodeCompleteCallback( EncodedImageCallback* callback) { encoded_complete_callback_ = callback; return WEBRTC_VIDEO_CODEC_OK; } -VideoEncoder::EncoderInfo VP9EncoderImpl::GetEncoderInfo() const { +VideoEncoder::EncoderInfo LibvpxVp9Encoder::GetEncoderInfo() const { EncoderInfo info; info.supports_native_handle = false; info.implementation_name = "libvpx"; @@ -1783,7 +1716,7 @@ VideoEncoder::EncoderInfo VP9EncoderImpl::GetEncoderInfo() const { return info; } -size_t VP9EncoderImpl::SteadyStateSize(int sid, int tid) { +size_t LibvpxVp9Encoder::SteadyStateSize(int sid, int tid) { const size_t bitrate_bps = current_bitrate_allocation_.GetBitrate( sid, tid == kNoTemporalIdx ? 0 : tid); const float fps = (codec_.mode == VideoCodecMode::kScreensharing) @@ -1799,8 +1732,8 @@ size_t VP9EncoderImpl::SteadyStateSize(int sid, int tid) { } // static -VP9EncoderImpl::VariableFramerateExperiment -VP9EncoderImpl::ParseVariableFramerateConfig( +LibvpxVp9Encoder::VariableFramerateExperiment +LibvpxVp9Encoder::ParseVariableFramerateConfig( const WebRtcKeyValueConfig& trials) { FieldTrialFlag enabled = FieldTrialFlag("Enabled"); FieldTrialParameter framerate_limit("min_fps", 5.0); @@ -1822,8 +1755,8 @@ VP9EncoderImpl::ParseVariableFramerateConfig( } // static -VP9EncoderImpl::QualityScalerExperiment -VP9EncoderImpl::ParseQualityScalerConfig(const WebRtcKeyValueConfig& trials) { +LibvpxVp9Encoder::QualityScalerExperiment +LibvpxVp9Encoder::ParseQualityScalerConfig(const WebRtcKeyValueConfig& trials) { FieldTrialFlag disabled = FieldTrialFlag("Disabled"); FieldTrialParameter low_qp("low_qp", kLowVp9QpThreshold); FieldTrialParameter high_qp("hihg_qp", kHighVp9QpThreshold); @@ -1839,7 +1772,7 @@ VP9EncoderImpl::ParseQualityScalerConfig(const WebRtcKeyValueConfig& trials) { return config; } -void VP9EncoderImpl::UpdatePerformanceFlags() { +void LibvpxVp9Encoder::UpdatePerformanceFlags() { const auto find_speed = [&](int min_pixel_count) { RTC_DCHECK(!performance_flags_.settings_by_resolution.empty()); auto it = @@ -1860,8 +1793,8 @@ void VP9EncoderImpl::UpdatePerformanceFlags() { } // static -VP9EncoderImpl::PerformanceFlags -VP9EncoderImpl::ParsePerformanceFlagsFromTrials( +LibvpxVp9Encoder::PerformanceFlags +LibvpxVp9Encoder::ParsePerformanceFlagsFromTrials( const WebRtcKeyValueConfig& trials) { struct Params : public PerformanceFlags::ParameterSet { int min_pixel_count = 0; @@ -1910,7 +1843,8 @@ VP9EncoderImpl::ParsePerformanceFlagsFromTrials( } // static -VP9EncoderImpl::PerformanceFlags VP9EncoderImpl::GetDefaultPerformanceFlags() { +LibvpxVp9Encoder::PerformanceFlags +LibvpxVp9Encoder::GetDefaultPerformanceFlags() { PerformanceFlags flags; flags.use_per_layer_speed = false; #if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID) @@ -1927,7 +1861,7 @@ VP9EncoderImpl::PerformanceFlags VP9EncoderImpl::GetDefaultPerformanceFlags() { return flags; } -void VP9EncoderImpl::MaybeRewrapRawWithFormat(const vpx_img_fmt fmt) { +void LibvpxVp9Encoder::MaybeRewrapRawWithFormat(const vpx_img_fmt fmt) { if (!raw_) { raw_ = vpx_img_wrap(nullptr, fmt, codec_.width, codec_.height, 1, nullptr); } else if (raw_->fmt != fmt) { @@ -1939,305 +1873,6 @@ void VP9EncoderImpl::MaybeRewrapRawWithFormat(const vpx_img_fmt fmt) { // else no-op since the image is already in the right format. } -VP9DecoderImpl::VP9DecoderImpl() : VP9DecoderImpl(FieldTrialBasedConfig()) {} -VP9DecoderImpl::VP9DecoderImpl(const WebRtcKeyValueConfig& trials) - : decode_complete_callback_(nullptr), - inited_(false), - decoder_(nullptr), - key_frame_required_(true), - preferred_output_format_( - absl::StartsWith(trials.Lookup("WebRTC-NV12Decode"), "Enabled") - ? VideoFrameBuffer::Type::kNV12 - : VideoFrameBuffer::Type::kI420) {} - -VP9DecoderImpl::~VP9DecoderImpl() { - inited_ = true; // in order to do the actual release - Release(); - int num_buffers_in_use = libvpx_buffer_pool_.GetNumBuffersInUse(); - if (num_buffers_in_use > 0) { - // The frame buffers are reference counted and frames are exposed after - // decoding. There may be valid usage cases where previous frames are still - // referenced after ~VP9DecoderImpl that is not a leak. - RTC_LOG(LS_INFO) << num_buffers_in_use - << " Vp9FrameBuffers are still " - "referenced during ~VP9DecoderImpl."; - } -} - -int VP9DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) { - int ret_val = Release(); - if (ret_val < 0) { - return ret_val; - } - - if (decoder_ == nullptr) { - decoder_ = new vpx_codec_ctx_t; - } - vpx_codec_dec_cfg_t cfg; - memset(&cfg, 0, sizeof(cfg)); - -#ifdef FUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION - // We focus on webrtc fuzzing here, not libvpx itself. Use single thread for - // fuzzing, because: - // - libvpx's VP9 single thread decoder is more fuzzer friendly. It detects - // errors earlier than the multi-threads version. - // - Make peak CPU usage under control (not depending on input) - cfg.threads = 1; -#else - if (!inst) { - // No config provided - don't know resolution to decode yet. - // Set thread count to one in the meantime. - cfg.threads = 1; - } else { - // We want to use multithreading when decoding high resolution videos. But - // not too many in order to avoid overhead when many stream are decoded - // concurrently. - // Set 2 thread as target for 1280x720 pixel count, and then scale up - // linearly from there - but cap at physical core count. - // For common resolutions this results in: - // 1 for 360p - // 2 for 720p - // 4 for 1080p - // 8 for 1440p - // 18 for 4K - int num_threads = - std::max(1, 2 * (inst->width * inst->height) / (1280 * 720)); - cfg.threads = std::min(number_of_cores, num_threads); - current_codec_ = *inst; - } -#endif - - num_cores_ = number_of_cores; - - vpx_codec_flags_t flags = 0; - if (vpx_codec_dec_init(decoder_, vpx_codec_vp9_dx(), &cfg, flags)) { - return WEBRTC_VIDEO_CODEC_MEMORY; - } - - if (!libvpx_buffer_pool_.InitializeVpxUsePool(decoder_)) { - return WEBRTC_VIDEO_CODEC_MEMORY; - } - - inited_ = true; - // Always start with a complete key frame. - key_frame_required_ = true; - if (inst && inst->buffer_pool_size) { - if (!libvpx_buffer_pool_.Resize(*inst->buffer_pool_size) || - !output_buffer_pool_.Resize(*inst->buffer_pool_size)) { - return WEBRTC_VIDEO_CODEC_UNINITIALIZED; - } - } - - vpx_codec_err_t status = - vpx_codec_control(decoder_, VP9D_SET_LOOP_FILTER_OPT, 1); - if (status != VPX_CODEC_OK) { - RTC_LOG(LS_ERROR) << "Failed to enable VP9D_SET_LOOP_FILTER_OPT. " - << vpx_codec_error(decoder_); - return WEBRTC_VIDEO_CODEC_UNINITIALIZED; - } - - return WEBRTC_VIDEO_CODEC_OK; -} - -int VP9DecoderImpl::Decode(const EncodedImage& input_image, - bool missing_frames, - int64_t /*render_time_ms*/) { - if (!inited_) { - return WEBRTC_VIDEO_CODEC_UNINITIALIZED; - } - if (decode_complete_callback_ == nullptr) { - return WEBRTC_VIDEO_CODEC_UNINITIALIZED; - } - - if (input_image._frameType == VideoFrameType::kVideoFrameKey) { - absl::optional frame_info = - vp9::ParseIntraFrameInfo(input_image.data(), input_image.size()); - if (frame_info) { - if (frame_info->frame_width != current_codec_.width || - frame_info->frame_height != current_codec_.height) { - // Resolution has changed, tear down and re-init a new decoder in - // order to get correct sizing. - Release(); - current_codec_.width = frame_info->frame_width; - current_codec_.height = frame_info->frame_height; - int reinit_status = InitDecode(¤t_codec_, num_cores_); - if (reinit_status != WEBRTC_VIDEO_CODEC_OK) { - RTC_LOG(LS_WARNING) << "Failed to re-init decoder."; - return reinit_status; - } - } - } else { - RTC_LOG(LS_WARNING) << "Failed to parse VP9 header from key-frame."; - } - } - - // Always start with a complete key frame. - if (key_frame_required_) { - if (input_image._frameType != VideoFrameType::kVideoFrameKey) - return WEBRTC_VIDEO_CODEC_ERROR; - key_frame_required_ = false; - } - vpx_codec_iter_t iter = nullptr; - vpx_image_t* img; - const uint8_t* buffer = input_image.data(); - if (input_image.size() == 0) { - buffer = nullptr; // Triggers full frame concealment. - } - // During decode libvpx may get and release buffers from - // |libvpx_buffer_pool_|. In practice libvpx keeps a few (~3-4) buffers alive - // at a time. - if (vpx_codec_decode(decoder_, buffer, - static_cast(input_image.size()), 0, - VPX_DL_REALTIME)) { - return WEBRTC_VIDEO_CODEC_ERROR; - } - // |img->fb_priv| contains the image data, a reference counted Vp9FrameBuffer. - // It may be released by libvpx during future vpx_codec_decode or - // vpx_codec_destroy calls. - img = vpx_codec_get_frame(decoder_, &iter); - int qp; - vpx_codec_err_t vpx_ret = - vpx_codec_control(decoder_, VPXD_GET_LAST_QUANTIZER, &qp); - RTC_DCHECK_EQ(vpx_ret, VPX_CODEC_OK); - int ret = - ReturnFrame(img, input_image.Timestamp(), qp, input_image.ColorSpace()); - if (ret != 0) { - return ret; - } - return WEBRTC_VIDEO_CODEC_OK; -} - -int VP9DecoderImpl::ReturnFrame( - const vpx_image_t* img, - uint32_t timestamp, - int qp, - const webrtc::ColorSpace* explicit_color_space) { - if (img == nullptr) { - // Decoder OK and nullptr image => No show frame. - return WEBRTC_VIDEO_CODEC_NO_OUTPUT; - } - - // This buffer contains all of |img|'s image data, a reference counted - // Vp9FrameBuffer. (libvpx is done with the buffers after a few - // vpx_codec_decode calls or vpx_codec_destroy). - Vp9FrameBufferPool::Vp9FrameBuffer* img_buffer = - static_cast(img->fb_priv); - - // The buffer can be used directly by the VideoFrame (without copy) by - // using a Wrapped*Buffer. - rtc::scoped_refptr img_wrapped_buffer; - switch (img->bit_depth) { - case 8: - if (img->fmt == VPX_IMG_FMT_I420) { - if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) { - rtc::scoped_refptr nv12_buffer = - output_buffer_pool_.CreateNV12Buffer(img->d_w, img->d_h); - if (!nv12_buffer.get()) { - // Buffer pool is full. - return WEBRTC_VIDEO_CODEC_NO_OUTPUT; - } - img_wrapped_buffer = nv12_buffer; - libyuv::I420ToNV12(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], - img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], - img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], - nv12_buffer->MutableDataY(), - nv12_buffer->StrideY(), - nv12_buffer->MutableDataUV(), - nv12_buffer->StrideUV(), img->d_w, img->d_h); - // No holding onto img_buffer as it's no longer needed and can be - // reused. - } else { - img_wrapped_buffer = WrapI420Buffer( - img->d_w, img->d_h, img->planes[VPX_PLANE_Y], - img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], - img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], - img->stride[VPX_PLANE_V], - // WrappedI420Buffer's mechanism for allowing the release of its - // frame buffer is through a callback function. This is where we - // should release |img_buffer|. - rtc::KeepRefUntilDone(img_buffer)); - } - } else if (img->fmt == VPX_IMG_FMT_I444) { - img_wrapped_buffer = WrapI444Buffer( - img->d_w, img->d_h, img->planes[VPX_PLANE_Y], - img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], - img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], - img->stride[VPX_PLANE_V], - // WrappedI444Buffer's mechanism for allowing the release of its - // frame buffer is through a callback function. This is where we - // should release |img_buffer|. - rtc::KeepRefUntilDone(img_buffer)); - } else { - RTC_LOG(LS_ERROR) - << "Unsupported pixel format produced by the decoder: " - << static_cast(img->fmt); - return WEBRTC_VIDEO_CODEC_NO_OUTPUT; - } - break; - case 10: - img_wrapped_buffer = WrapI010Buffer( - img->d_w, img->d_h, - reinterpret_cast(img->planes[VPX_PLANE_Y]), - img->stride[VPX_PLANE_Y] / 2, - reinterpret_cast(img->planes[VPX_PLANE_U]), - img->stride[VPX_PLANE_U] / 2, - reinterpret_cast(img->planes[VPX_PLANE_V]), - img->stride[VPX_PLANE_V] / 2, rtc::KeepRefUntilDone(img_buffer)); - break; - default: - RTC_LOG(LS_ERROR) << "Unsupported bit depth produced by the decoder: " - << img->bit_depth; - return WEBRTC_VIDEO_CODEC_NO_OUTPUT; - } - - auto builder = VideoFrame::Builder() - .set_video_frame_buffer(img_wrapped_buffer) - .set_timestamp_rtp(timestamp); - if (explicit_color_space) { - builder.set_color_space(*explicit_color_space); - } else { - builder.set_color_space( - ExtractVP9ColorSpace(img->cs, img->range, img->bit_depth)); - } - VideoFrame decoded_image = builder.build(); - - decode_complete_callback_->Decoded(decoded_image, absl::nullopt, qp); - return WEBRTC_VIDEO_CODEC_OK; -} - -int VP9DecoderImpl::RegisterDecodeCompleteCallback( - DecodedImageCallback* callback) { - decode_complete_callback_ = callback; - return WEBRTC_VIDEO_CODEC_OK; -} - -int VP9DecoderImpl::Release() { - int ret_val = WEBRTC_VIDEO_CODEC_OK; - - if (decoder_ != nullptr) { - if (inited_) { - // When a codec is destroyed libvpx will release any buffers of - // |libvpx_buffer_pool_| it is currently using. - if (vpx_codec_destroy(decoder_)) { - ret_val = WEBRTC_VIDEO_CODEC_MEMORY; - } - } - delete decoder_; - decoder_ = nullptr; - } - // Releases buffers from the pool. Any buffers not in use are deleted. Buffers - // still referenced externally are deleted once fully released, not returning - // to the pool. - libvpx_buffer_pool_.ClearPool(); - output_buffer_pool_.Release(); - inited_ = false; - return ret_val; -} - -const char* VP9DecoderImpl::ImplementationName() const { - return "libvpx"; -} - } // namespace webrtc #endif // RTC_ENABLE_VP9 diff --git a/modules/video_coding/codecs/vp9/vp9_impl.h b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h similarity index 82% rename from modules/video_coding/codecs/vp9/vp9_impl.h rename to modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h index 075a214628..037c760c17 100644 --- a/modules/video_coding/codecs/vp9/vp9_impl.h +++ b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -9,14 +9,13 @@ * */ -#ifndef MODULES_VIDEO_CODING_CODECS_VP9_VP9_IMPL_H_ -#define MODULES_VIDEO_CODING_CODECS_VP9_VP9_IMPL_H_ +#ifndef MODULES_VIDEO_CODING_CODECS_VP9_LIBVPX_VP9_ENCODER_H_ +#define MODULES_VIDEO_CODING_CODECS_VP9_LIBVPX_VP9_ENCODER_H_ #ifdef RTC_ENABLE_VP9 #include #include -#include #include #include "api/fec_controller_override.h" @@ -24,23 +23,22 @@ #include "api/video_codecs/video_encoder.h" #include "common_video/include/video_frame_buffer_pool.h" #include "media/base/vp9_profile.h" +#include "modules/video_coding/codecs/interface/libvpx_interface.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" #include "modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h" #include "modules/video_coding/svc/scalable_video_controller.h" #include "modules/video_coding/utility/framerate_controller.h" #include "vpx/vp8cx.h" -#include "vpx/vpx_decoder.h" -#include "vpx/vpx_encoder.h" namespace webrtc { -class VP9EncoderImpl : public VP9Encoder { +class LibvpxVp9Encoder : public VP9Encoder { public: - explicit VP9EncoderImpl(const cricket::VideoCodec& codec); - VP9EncoderImpl(const cricket::VideoCodec& codec, - const WebRtcKeyValueConfig& trials); + LibvpxVp9Encoder(const cricket::VideoCodec& codec, + std::unique_ptr interface, + const WebRtcKeyValueConfig& trials); - ~VP9EncoderImpl() override; + ~LibvpxVp9Encoder() override; void SetFecControllerOverride( FecControllerOverride* fec_controller_override) override; @@ -105,6 +103,7 @@ class VP9EncoderImpl : public VP9Encoder { void MaybeRewrapRawWithFormat(const vpx_img_fmt fmt); + const std::unique_ptr libvpx_; EncodedImage encoded_image_; CodecSpecificInfo codec_specific_; EncodedImageCallback* encoded_complete_callback_; @@ -233,47 +232,8 @@ class VP9EncoderImpl : public VP9Encoder { bool config_changed_; }; -class VP9DecoderImpl : public VP9Decoder { - public: - VP9DecoderImpl(); - explicit VP9DecoderImpl(const WebRtcKeyValueConfig& trials); - - virtual ~VP9DecoderImpl(); - - int InitDecode(const VideoCodec* inst, int number_of_cores) override; - - int Decode(const EncodedImage& input_image, - bool missing_frames, - int64_t /*render_time_ms*/) override; - - int RegisterDecodeCompleteCallback(DecodedImageCallback* callback) override; - - int Release() override; - - const char* ImplementationName() const override; - - private: - int ReturnFrame(const vpx_image_t* img, - uint32_t timestamp, - int qp, - const webrtc::ColorSpace* explicit_color_space); - - // Memory pool used to share buffers between libvpx and webrtc. - Vp9FrameBufferPool libvpx_buffer_pool_; - // Buffer pool used to allocate additionally needed NV12 buffers. - VideoFrameBufferPool output_buffer_pool_; - DecodedImageCallback* decode_complete_callback_; - bool inited_; - vpx_codec_ctx_t* decoder_; - bool key_frame_required_; - VideoCodec current_codec_; - int num_cores_; - - // Decoder should produce this format if possible. - const VideoFrameBuffer::Type preferred_output_format_; -}; } // namespace webrtc #endif // RTC_ENABLE_VP9 -#endif // MODULES_VIDEO_CODING_CODECS_VP9_VP9_IMPL_H_ +#endif // MODULES_VIDEO_CODING_CODECS_VP9_LIBVPX_VP9_ENCODER_H_ diff --git a/modules/video_coding/codecs/vp9/vp9.cc b/modules/video_coding/codecs/vp9/vp9.cc index 9b0585c059..1efb1b4f9f 100644 --- a/modules/video_coding/codecs/vp9/vp9.cc +++ b/modules/video_coding/codecs/vp9/vp9.cc @@ -12,8 +12,11 @@ #include +#include "api/transport/field_trial_based_config.h" #include "api/video_codecs/sdp_video_format.h" -#include "modules/video_coding/codecs/vp9/vp9_impl.h" +#include "media/base/vp9_profile.h" +#include "modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h" +#include "modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h" #include "rtc_base/checks.h" #include "vpx/vp8cx.h" #include "vpx/vp8dx.h" @@ -63,7 +66,9 @@ std::vector SupportedVP9DecoderCodecs() { std::unique_ptr VP9Encoder::Create() { #ifdef RTC_ENABLE_VP9 - return std::make_unique(cricket::VideoCodec()); + return std::make_unique(cricket::VideoCodec(), + LibvpxInterface::Create(), + FieldTrialBasedConfig()); #else RTC_NOTREACHED(); return nullptr; @@ -73,7 +78,8 @@ std::unique_ptr VP9Encoder::Create() { std::unique_ptr VP9Encoder::Create( const cricket::VideoCodec& codec) { #ifdef RTC_ENABLE_VP9 - return std::make_unique(codec); + return std::make_unique(codec, LibvpxInterface::Create(), + FieldTrialBasedConfig()); #else RTC_NOTREACHED(); return nullptr; @@ -82,7 +88,7 @@ std::unique_ptr VP9Encoder::Create( std::unique_ptr VP9Decoder::Create() { #ifdef RTC_ENABLE_VP9 - return std::make_unique(); + return std::make_unique(); #else RTC_NOTREACHED(); return nullptr;