diff --git a/api/video_codecs/vp9_profile.cc b/api/video_codecs/vp9_profile.cc index 5e2bd53a86..7e627cc080 100644 --- a/api/video_codecs/vp9_profile.cc +++ b/api/video_codecs/vp9_profile.cc @@ -28,6 +28,8 @@ std::string VP9ProfileToString(VP9Profile profile) { return "1"; case VP9Profile::kProfile2: return "2"; + case VP9Profile::kProfile3: + return "3"; } return "0"; } @@ -44,6 +46,8 @@ absl::optional StringToVP9Profile(const std::string& str) { return VP9Profile::kProfile1; case 2: return VP9Profile::kProfile2; + case 3: + return VP9Profile::kProfile3; default: return absl::nullopt; } diff --git a/api/video_codecs/vp9_profile.h b/api/video_codecs/vp9_profile.h index e632df437b..b570bc3bb6 100644 --- a/api/video_codecs/vp9_profile.h +++ b/api/video_codecs/vp9_profile.h @@ -26,6 +26,7 @@ enum class VP9Profile { kProfile0, kProfile1, kProfile2, + kProfile3, }; // Helper functions to convert VP9Profile to std::string. Returns "0" by diff --git a/media/base/media_constants.cc b/media/base/media_constants.cc index da5e7a8adf..2f29a2036b 100644 --- a/media/base/media_constants.cc +++ b/media/base/media_constants.cc @@ -114,6 +114,8 @@ const char kH264FmtpSpsPpsIdrInKeyframe[] = "sps-pps-idr-in-keyframe"; const char kH264ProfileLevelConstrainedBaseline[] = "42e01f"; const char kH264ProfileLevelConstrainedHigh[] = "640c1f"; +const char kVP9ProfileId[] = "profile-id"; + const int kDefaultVideoMaxFramerate = 60; const size_t kConferenceMaxNumSpatialLayers = 3; diff --git a/media/base/media_constants.h b/media/base/media_constants.h index 16c5db92b9..f843d50ce5 100644 --- a/media/base/media_constants.h +++ b/media/base/media_constants.h @@ -134,6 +134,8 @@ extern const char kH264FmtpSpsPpsIdrInKeyframe[]; extern const char kH264ProfileLevelConstrainedBaseline[]; extern const char kH264ProfileLevelConstrainedHigh[]; +extern const char kVP9ProfileId[]; + extern const int kDefaultVideoMaxFramerate; extern const size_t kConferenceMaxNumSpatialLayers; diff --git a/media/engine/webrtc_video_engine.cc b/media/engine/webrtc_video_engine.cc index 0ce0f0c63b..561eba6e25 100644 --- a/media/engine/webrtc_video_engine.cc +++ b/media/engine/webrtc_video_engine.cc @@ -133,17 +133,25 @@ bool IsCodecValidForLowerRange(const VideoCodec& codec) { absl::EqualsIgnoreCase(codec.name, kAv1xCodecName)) { return true; } else if (absl::EqualsIgnoreCase(codec.name, kH264CodecName)) { - std::string profileLevelId; - std::string packetizationMode; + std::string profile_level_id; + std::string packetization_mode; - if (codec.GetParam(kH264FmtpProfileLevelId, &profileLevelId)) { - if (absl::StartsWithIgnoreCase(profileLevelId, "4d00")) { - if (codec.GetParam(kH264FmtpPacketizationMode, &packetizationMode)) { - return packetizationMode == "0"; + if (codec.GetParam(kH264FmtpProfileLevelId, &profile_level_id)) { + if (absl::StartsWithIgnoreCase(profile_level_id, "4d00")) { + if (codec.GetParam(kH264FmtpPacketizationMode, &packetization_mode)) { + return packetization_mode == "0"; } } // H264 with YUV444. - return absl::StartsWithIgnoreCase(profileLevelId, "f400"); + return absl::StartsWithIgnoreCase(profile_level_id, "f400"); + } + } else if (absl::EqualsIgnoreCase(codec.name, kVp9CodecName)) { + std::string profile_id; + + if (codec.GetParam(kVP9ProfileId, &profile_id)) { + if (profile_id.compare("1") == 0 || profile_id.compare("3") == 0) { + return true; + } } } return false; diff --git a/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc b/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc index ffb4705e4f..843c9bd2ee 100644 --- a/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc +++ b/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc @@ -281,55 +281,59 @@ int LibvpxVp9Decoder::ReturnFrame( // The buffer can be used directly by the VideoFrame (without copy) by // using a Wrapped*Buffer. rtc::scoped_refptr img_wrapped_buffer; - switch (img->bit_depth) { - case 8: - if (img->fmt == VPX_IMG_FMT_I420) { - if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) { - rtc::scoped_refptr nv12_buffer = - output_buffer_pool_.CreateNV12Buffer(img->d_w, img->d_h); - if (!nv12_buffer.get()) { - // Buffer pool is full. - return WEBRTC_VIDEO_CODEC_NO_OUTPUT; - } - img_wrapped_buffer = nv12_buffer; - libyuv::I420ToNV12(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], - img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], - img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], - nv12_buffer->MutableDataY(), - nv12_buffer->StrideY(), - nv12_buffer->MutableDataUV(), - nv12_buffer->StrideUV(), img->d_w, img->d_h); - // No holding onto img_buffer as it's no longer needed and can be - // reused. - } else { - img_wrapped_buffer = WrapI420Buffer( - img->d_w, img->d_h, img->planes[VPX_PLANE_Y], - img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], - img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], - img->stride[VPX_PLANE_V], - // WrappedI420Buffer's mechanism for allowing the release of its - // frame buffer is through a callback function. This is where we - // should release `img_buffer`. - [img_buffer] {}); + switch (img->fmt) { + case VPX_IMG_FMT_I420: + if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) { + rtc::scoped_refptr nv12_buffer = + output_buffer_pool_.CreateNV12Buffer(img->d_w, img->d_h); + if (!nv12_buffer.get()) { + // Buffer pool is full. + return WEBRTC_VIDEO_CODEC_NO_OUTPUT; } - } else if (img->fmt == VPX_IMG_FMT_I444) { - img_wrapped_buffer = WrapI444Buffer( + img_wrapped_buffer = nv12_buffer; + libyuv::I420ToNV12(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], + img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], + img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], + nv12_buffer->MutableDataY(), nv12_buffer->StrideY(), + nv12_buffer->MutableDataUV(), + nv12_buffer->StrideUV(), img->d_w, img->d_h); + // No holding onto img_buffer as it's no longer needed and can be + // reused. + } else { + img_wrapped_buffer = WrapI420Buffer( img->d_w, img->d_h, img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], - // WrappedI444Buffer's mechanism for allowing the release of its + // WrappedI420Buffer's mechanism for allowing the release of its // frame buffer is through a callback function. This is where we // should release `img_buffer`. [img_buffer] {}); - } else { - RTC_LOG(LS_ERROR) - << "Unsupported pixel format produced by the decoder: " - << static_cast(img->fmt); - return WEBRTC_VIDEO_CODEC_NO_OUTPUT; } break; - case 10: + case VPX_IMG_FMT_I422: + img_wrapped_buffer = WrapI422Buffer( + img->d_w, img->d_h, img->planes[VPX_PLANE_Y], + img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], + img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], + img->stride[VPX_PLANE_V], + // WrappedI444Buffer's mechanism for allowing the release of its + // frame buffer is through a callback function. This is where we + // should release `img_buffer`. + [img_buffer] {}); + break; + case VPX_IMG_FMT_I444: + img_wrapped_buffer = WrapI444Buffer( + img->d_w, img->d_h, img->planes[VPX_PLANE_Y], + img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], + img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], + img->stride[VPX_PLANE_V], + // WrappedI444Buffer's mechanism for allowing the release of its + // frame buffer is through a callback function. This is where we + // should release `img_buffer`. + [img_buffer] {}); + break; + case VPX_IMG_FMT_I42016: img_wrapped_buffer = WrapI010Buffer( img->d_w, img->d_h, reinterpret_cast(img->planes[VPX_PLANE_Y]), @@ -339,9 +343,19 @@ int LibvpxVp9Decoder::ReturnFrame( reinterpret_cast(img->planes[VPX_PLANE_V]), img->stride[VPX_PLANE_V] / 2, [img_buffer] {}); break; + case VPX_IMG_FMT_I42216: + img_wrapped_buffer = WrapI210Buffer( + img->d_w, img->d_h, + reinterpret_cast(img->planes[VPX_PLANE_Y]), + img->stride[VPX_PLANE_Y] / 2, + reinterpret_cast(img->planes[VPX_PLANE_U]), + img->stride[VPX_PLANE_U] / 2, + reinterpret_cast(img->planes[VPX_PLANE_V]), + img->stride[VPX_PLANE_V] / 2, [img_buffer] {}); + break; default: - RTC_LOG(LS_ERROR) << "Unsupported bit depth produced by the decoder: " - << img->bit_depth; + RTC_LOG(LS_ERROR) << "Unsupported pixel format produced by the decoder: " + << static_cast(img->fmt); return WEBRTC_VIDEO_CODEC_NO_OUTPUT; } diff --git a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc index 58beacd213..b252fb708a 100644 --- a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc +++ b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc @@ -628,6 +628,10 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, config_->g_profile = 2; config_->g_input_bit_depth = 10; break; + case VP9Profile::kProfile3: + // Encoding of profile 3 is not implemented. + RTC_DCHECK_NOTREACHED(); + break; } // Creating a wrapper to the image - setting image data to nullptr. Actual @@ -1194,6 +1198,10 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image, raw_->stride[VPX_PLANE_V] = i010_buffer->StrideV() * 2; break; } + case VP9Profile::kProfile3: { + RTC_DCHECK_NOTREACHED(); + break; + } } vpx_enc_frame_flags_t flags = 0; diff --git a/modules/video_coding/codecs/vp9/vp9.cc b/modules/video_coding/codecs/vp9/vp9.cc index 42d6987285..7a7a7d6c8f 100644 --- a/modules/video_coding/codecs/vp9/vp9.cc +++ b/modules/video_coding/codecs/vp9/vp9.cc @@ -54,12 +54,15 @@ std::vector SupportedVP9Codecs() { std::vector SupportedVP9DecoderCodecs() { #ifdef RTC_ENABLE_VP9 std::vector supported_formats = SupportedVP9Codecs(); - // The WebRTC internal decoder supports VP9 profile 1. However, there's - // currently no way of sending VP9 profile 1 using the internal encoder. + // The WebRTC internal decoder supports VP9 profile 1 and 3. However, there's + // currently no way of sending VP9 profile 1 or 3 using the internal encoder. // It would require extended support for I444, I422, and I440 buffers. supported_formats.push_back(SdpVideoFormat( cricket::kVp9CodecName, {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile1)}})); + supported_formats.push_back(SdpVideoFormat( + cricket::kVp9CodecName, + {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile3)}})); return supported_formats; #else return std::vector();