Added EncodedImage::GetBufferPaddingBytes.
The FFmpeg video decoder requires up to 8 additional bytes to be allocated for its encoded image buffer input, due to optimized byte readers over-reading on some platforms. We plan to use FFmpeg for a soon-to-land H.264 enc/dec. This CL adds support for padding encoded image buffers based on codec type, and makes sure calls to VCMEncodedFrame::VerifyAndAllocate use the padding. All padding constants are 0 but making H.264 pad with 8 bytes will be a one-line change. Also, added -framework CoreFoundation to webrtc_h264_video_toolbox which was missing. BUG=chromium:468365 BUG=https://bugs.chromium.org/p/webrtc/issues/detail?id=5424 NOTRY=True Review URL: https://codereview.webrtc.org/1602523004 Cr-Commit-Position: refs/heads/master@{#11337}
This commit is contained in:
parent
429c345b02
commit
d664836efa
@ -19,6 +19,10 @@
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// TODO(hbos): The FFmpeg video decoder will require up to 8 bytes, update this
|
||||
// when the FFmpeg decoder is added.
|
||||
const size_t EncodedImage::kBufferPaddingBytesH264 = 0;
|
||||
|
||||
bool EqualPlane(const uint8_t* data1,
|
||||
const uint8_t* data2,
|
||||
int stride,
|
||||
@ -242,4 +246,22 @@ bool VideoFrame::EqualsFrame(const VideoFrame& frame) const {
|
||||
stride(kVPlane), half_width, half_height);
|
||||
}
|
||||
|
||||
size_t EncodedImage::GetBufferPaddingBytes(VideoCodecType codec_type) {
|
||||
switch (codec_type) {
|
||||
case kVideoCodecVP8:
|
||||
case kVideoCodecVP9:
|
||||
return 0;
|
||||
case kVideoCodecH264:
|
||||
return kBufferPaddingBytesH264;
|
||||
case kVideoCodecI420:
|
||||
case kVideoCodecRED:
|
||||
case kVideoCodecULPFEC:
|
||||
case kVideoCodecGeneric:
|
||||
case kVideoCodecUnknown:
|
||||
return 0;
|
||||
}
|
||||
RTC_NOTREACHED();
|
||||
return 0;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
@ -50,6 +50,7 @@
|
||||
'link_settings': {
|
||||
'xcode_settings': {
|
||||
'OTHER_LDFLAGS': [
|
||||
'-framework CoreFoundation',
|
||||
'-framework CoreMedia',
|
||||
'-framework CoreVideo',
|
||||
'-framework VideoToolbox',
|
||||
|
||||
@ -41,7 +41,8 @@ VCMEncodedFrame::VCMEncodedFrame(const webrtc::EncodedImage& rhs)
|
||||
_size = 0;
|
||||
_length = 0;
|
||||
if (rhs._buffer != NULL) {
|
||||
VerifyAndAllocate(rhs._length);
|
||||
VerifyAndAllocate(rhs._length +
|
||||
EncodedImage::GetBufferPaddingBytes(_codec));
|
||||
memcpy(_buffer, rhs._buffer, rhs._length);
|
||||
}
|
||||
}
|
||||
@ -60,7 +61,8 @@ VCMEncodedFrame::VCMEncodedFrame(const VCMEncodedFrame& rhs)
|
||||
_size = 0;
|
||||
_length = 0;
|
||||
if (rhs._buffer != NULL) {
|
||||
VerifyAndAllocate(rhs._length);
|
||||
VerifyAndAllocate(rhs._length +
|
||||
EncodedImage::GetBufferPaddingBytes(_codec));
|
||||
memcpy(_buffer, rhs._buffer, rhs._length);
|
||||
_length = rhs._length;
|
||||
}
|
||||
|
||||
@ -105,7 +105,8 @@ VCMFrameBufferEnum VCMFrameBuffer::InsertPacket(
|
||||
|
||||
uint32_t requiredSizeBytes =
|
||||
Length() + packet.sizeBytes +
|
||||
(packet.insertStartCode ? kH264StartCodeLengthBytes : 0);
|
||||
(packet.insertStartCode ? kH264StartCodeLengthBytes : 0) +
|
||||
EncodedImage::GetBufferPaddingBytes(packet.codec);
|
||||
if (requiredSizeBytes >= _size) {
|
||||
const uint8_t* prevBuffer = _buffer;
|
||||
const uint32_t increments =
|
||||
|
||||
@ -173,6 +173,12 @@ class VideoFrame {
|
||||
// TODO(pbos): Rename EncodedFrame and reformat this class' members.
|
||||
class EncodedImage {
|
||||
public:
|
||||
static const size_t kBufferPaddingBytesH264;
|
||||
|
||||
// Some decoders require encoded image buffers to be padded with a small
|
||||
// number of additional bytes (due to over-reading byte readers).
|
||||
static size_t GetBufferPaddingBytes(VideoCodecType codec_type);
|
||||
|
||||
EncodedImage() : EncodedImage(nullptr, 0, 0) {}
|
||||
EncodedImage(uint8_t* buffer, size_t length, size_t size)
|
||||
: _buffer(buffer), _length(length), _size(size) {}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user