add 422 8 and 10 bit decoding support
Bug: webrtc:14195 Change-Id: I2048d567850ae669d76d9e593752683f3c76499f Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/266180 Reviewed-by: Niels Moller <nisse@webrtc.org> Commit-Queue: Ilya Nikolaevskiy <ilnik@webrtc.org> Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org> Cr-Commit-Position: refs/heads/main@{#37306}
This commit is contained in:
parent
0ed3a2b6cb
commit
179f40e81a
@ -28,6 +28,8 @@ std::string VP9ProfileToString(VP9Profile profile) {
|
||||
return "1";
|
||||
case VP9Profile::kProfile2:
|
||||
return "2";
|
||||
case VP9Profile::kProfile3:
|
||||
return "3";
|
||||
}
|
||||
return "0";
|
||||
}
|
||||
@ -44,6 +46,8 @@ absl::optional<VP9Profile> StringToVP9Profile(const std::string& str) {
|
||||
return VP9Profile::kProfile1;
|
||||
case 2:
|
||||
return VP9Profile::kProfile2;
|
||||
case 3:
|
||||
return VP9Profile::kProfile3;
|
||||
default:
|
||||
return absl::nullopt;
|
||||
}
|
||||
|
||||
@ -26,6 +26,7 @@ enum class VP9Profile {
|
||||
kProfile0,
|
||||
kProfile1,
|
||||
kProfile2,
|
||||
kProfile3,
|
||||
};
|
||||
|
||||
// Helper functions to convert VP9Profile to std::string. Returns "0" by
|
||||
|
||||
@ -114,6 +114,8 @@ const char kH264FmtpSpsPpsIdrInKeyframe[] = "sps-pps-idr-in-keyframe";
|
||||
const char kH264ProfileLevelConstrainedBaseline[] = "42e01f";
|
||||
const char kH264ProfileLevelConstrainedHigh[] = "640c1f";
|
||||
|
||||
const char kVP9ProfileId[] = "profile-id";
|
||||
|
||||
const int kDefaultVideoMaxFramerate = 60;
|
||||
|
||||
const size_t kConferenceMaxNumSpatialLayers = 3;
|
||||
|
||||
@ -134,6 +134,8 @@ extern const char kH264FmtpSpsPpsIdrInKeyframe[];
|
||||
extern const char kH264ProfileLevelConstrainedBaseline[];
|
||||
extern const char kH264ProfileLevelConstrainedHigh[];
|
||||
|
||||
extern const char kVP9ProfileId[];
|
||||
|
||||
extern const int kDefaultVideoMaxFramerate;
|
||||
|
||||
extern const size_t kConferenceMaxNumSpatialLayers;
|
||||
|
||||
@ -133,17 +133,25 @@ bool IsCodecValidForLowerRange(const VideoCodec& codec) {
|
||||
absl::EqualsIgnoreCase(codec.name, kAv1xCodecName)) {
|
||||
return true;
|
||||
} else if (absl::EqualsIgnoreCase(codec.name, kH264CodecName)) {
|
||||
std::string profileLevelId;
|
||||
std::string packetizationMode;
|
||||
std::string profile_level_id;
|
||||
std::string packetization_mode;
|
||||
|
||||
if (codec.GetParam(kH264FmtpProfileLevelId, &profileLevelId)) {
|
||||
if (absl::StartsWithIgnoreCase(profileLevelId, "4d00")) {
|
||||
if (codec.GetParam(kH264FmtpPacketizationMode, &packetizationMode)) {
|
||||
return packetizationMode == "0";
|
||||
if (codec.GetParam(kH264FmtpProfileLevelId, &profile_level_id)) {
|
||||
if (absl::StartsWithIgnoreCase(profile_level_id, "4d00")) {
|
||||
if (codec.GetParam(kH264FmtpPacketizationMode, &packetization_mode)) {
|
||||
return packetization_mode == "0";
|
||||
}
|
||||
}
|
||||
// H264 with YUV444.
|
||||
return absl::StartsWithIgnoreCase(profileLevelId, "f400");
|
||||
return absl::StartsWithIgnoreCase(profile_level_id, "f400");
|
||||
}
|
||||
} else if (absl::EqualsIgnoreCase(codec.name, kVp9CodecName)) {
|
||||
std::string profile_id;
|
||||
|
||||
if (codec.GetParam(kVP9ProfileId, &profile_id)) {
|
||||
if (profile_id.compare("1") == 0 || profile_id.compare("3") == 0) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
|
||||
@ -281,55 +281,59 @@ int LibvpxVp9Decoder::ReturnFrame(
|
||||
// The buffer can be used directly by the VideoFrame (without copy) by
|
||||
// using a Wrapped*Buffer.
|
||||
rtc::scoped_refptr<VideoFrameBuffer> img_wrapped_buffer;
|
||||
switch (img->bit_depth) {
|
||||
case 8:
|
||||
if (img->fmt == VPX_IMG_FMT_I420) {
|
||||
if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) {
|
||||
rtc::scoped_refptr<NV12Buffer> nv12_buffer =
|
||||
output_buffer_pool_.CreateNV12Buffer(img->d_w, img->d_h);
|
||||
if (!nv12_buffer.get()) {
|
||||
// Buffer pool is full.
|
||||
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
|
||||
}
|
||||
img_wrapped_buffer = nv12_buffer;
|
||||
libyuv::I420ToNV12(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
|
||||
img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
|
||||
img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
|
||||
nv12_buffer->MutableDataY(),
|
||||
nv12_buffer->StrideY(),
|
||||
nv12_buffer->MutableDataUV(),
|
||||
nv12_buffer->StrideUV(), img->d_w, img->d_h);
|
||||
// No holding onto img_buffer as it's no longer needed and can be
|
||||
// reused.
|
||||
} else {
|
||||
img_wrapped_buffer = WrapI420Buffer(
|
||||
img->d_w, img->d_h, img->planes[VPX_PLANE_Y],
|
||||
img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U],
|
||||
img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V],
|
||||
img->stride[VPX_PLANE_V],
|
||||
// WrappedI420Buffer's mechanism for allowing the release of its
|
||||
// frame buffer is through a callback function. This is where we
|
||||
// should release `img_buffer`.
|
||||
[img_buffer] {});
|
||||
switch (img->fmt) {
|
||||
case VPX_IMG_FMT_I420:
|
||||
if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) {
|
||||
rtc::scoped_refptr<NV12Buffer> nv12_buffer =
|
||||
output_buffer_pool_.CreateNV12Buffer(img->d_w, img->d_h);
|
||||
if (!nv12_buffer.get()) {
|
||||
// Buffer pool is full.
|
||||
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
|
||||
}
|
||||
} else if (img->fmt == VPX_IMG_FMT_I444) {
|
||||
img_wrapped_buffer = WrapI444Buffer(
|
||||
img_wrapped_buffer = nv12_buffer;
|
||||
libyuv::I420ToNV12(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
|
||||
img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
|
||||
img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
|
||||
nv12_buffer->MutableDataY(), nv12_buffer->StrideY(),
|
||||
nv12_buffer->MutableDataUV(),
|
||||
nv12_buffer->StrideUV(), img->d_w, img->d_h);
|
||||
// No holding onto img_buffer as it's no longer needed and can be
|
||||
// reused.
|
||||
} else {
|
||||
img_wrapped_buffer = WrapI420Buffer(
|
||||
img->d_w, img->d_h, img->planes[VPX_PLANE_Y],
|
||||
img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U],
|
||||
img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V],
|
||||
img->stride[VPX_PLANE_V],
|
||||
// WrappedI444Buffer's mechanism for allowing the release of its
|
||||
// WrappedI420Buffer's mechanism for allowing the release of its
|
||||
// frame buffer is through a callback function. This is where we
|
||||
// should release `img_buffer`.
|
||||
[img_buffer] {});
|
||||
} else {
|
||||
RTC_LOG(LS_ERROR)
|
||||
<< "Unsupported pixel format produced by the decoder: "
|
||||
<< static_cast<int>(img->fmt);
|
||||
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
|
||||
}
|
||||
break;
|
||||
case 10:
|
||||
case VPX_IMG_FMT_I422:
|
||||
img_wrapped_buffer = WrapI422Buffer(
|
||||
img->d_w, img->d_h, img->planes[VPX_PLANE_Y],
|
||||
img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U],
|
||||
img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V],
|
||||
img->stride[VPX_PLANE_V],
|
||||
// WrappedI444Buffer's mechanism for allowing the release of its
|
||||
// frame buffer is through a callback function. This is where we
|
||||
// should release `img_buffer`.
|
||||
[img_buffer] {});
|
||||
break;
|
||||
case VPX_IMG_FMT_I444:
|
||||
img_wrapped_buffer = WrapI444Buffer(
|
||||
img->d_w, img->d_h, img->planes[VPX_PLANE_Y],
|
||||
img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U],
|
||||
img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V],
|
||||
img->stride[VPX_PLANE_V],
|
||||
// WrappedI444Buffer's mechanism for allowing the release of its
|
||||
// frame buffer is through a callback function. This is where we
|
||||
// should release `img_buffer`.
|
||||
[img_buffer] {});
|
||||
break;
|
||||
case VPX_IMG_FMT_I42016:
|
||||
img_wrapped_buffer = WrapI010Buffer(
|
||||
img->d_w, img->d_h,
|
||||
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_Y]),
|
||||
@ -339,9 +343,19 @@ int LibvpxVp9Decoder::ReturnFrame(
|
||||
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_V]),
|
||||
img->stride[VPX_PLANE_V] / 2, [img_buffer] {});
|
||||
break;
|
||||
case VPX_IMG_FMT_I42216:
|
||||
img_wrapped_buffer = WrapI210Buffer(
|
||||
img->d_w, img->d_h,
|
||||
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_Y]),
|
||||
img->stride[VPX_PLANE_Y] / 2,
|
||||
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_U]),
|
||||
img->stride[VPX_PLANE_U] / 2,
|
||||
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_V]),
|
||||
img->stride[VPX_PLANE_V] / 2, [img_buffer] {});
|
||||
break;
|
||||
default:
|
||||
RTC_LOG(LS_ERROR) << "Unsupported bit depth produced by the decoder: "
|
||||
<< img->bit_depth;
|
||||
RTC_LOG(LS_ERROR) << "Unsupported pixel format produced by the decoder: "
|
||||
<< static_cast<int>(img->fmt);
|
||||
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
|
||||
}
|
||||
|
||||
|
||||
@ -628,6 +628,10 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst,
|
||||
config_->g_profile = 2;
|
||||
config_->g_input_bit_depth = 10;
|
||||
break;
|
||||
case VP9Profile::kProfile3:
|
||||
// Encoding of profile 3 is not implemented.
|
||||
RTC_DCHECK_NOTREACHED();
|
||||
break;
|
||||
}
|
||||
|
||||
// Creating a wrapper to the image - setting image data to nullptr. Actual
|
||||
@ -1194,6 +1198,10 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image,
|
||||
raw_->stride[VPX_PLANE_V] = i010_buffer->StrideV() * 2;
|
||||
break;
|
||||
}
|
||||
case VP9Profile::kProfile3: {
|
||||
RTC_DCHECK_NOTREACHED();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
vpx_enc_frame_flags_t flags = 0;
|
||||
|
||||
@ -54,12 +54,15 @@ std::vector<SdpVideoFormat> SupportedVP9Codecs() {
|
||||
std::vector<SdpVideoFormat> SupportedVP9DecoderCodecs() {
|
||||
#ifdef RTC_ENABLE_VP9
|
||||
std::vector<SdpVideoFormat> supported_formats = SupportedVP9Codecs();
|
||||
// The WebRTC internal decoder supports VP9 profile 1. However, there's
|
||||
// currently no way of sending VP9 profile 1 using the internal encoder.
|
||||
// The WebRTC internal decoder supports VP9 profile 1 and 3. However, there's
|
||||
// currently no way of sending VP9 profile 1 or 3 using the internal encoder.
|
||||
// It would require extended support for I444, I422, and I440 buffers.
|
||||
supported_formats.push_back(SdpVideoFormat(
|
||||
cricket::kVp9CodecName,
|
||||
{{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile1)}}));
|
||||
supported_formats.push_back(SdpVideoFormat(
|
||||
cricket::kVp9CodecName,
|
||||
{{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile3)}}));
|
||||
return supported_formats;
|
||||
#else
|
||||
return std::vector<SdpVideoFormat>();
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user