Don't crash in SingleNalu packetization for h264 if no space in packet

Also, pass correct max payload data size to encoders: now accounting for
rtp headers.

Bug: chromium:819259
Change-Id: I586924e9246218fab6072e05eca894925cfe556e
Reviewed-on: https://webrtc-review.googlesource.com/61425
Commit-Queue: Ilya Nikolaevskiy <ilnik@webrtc.org>
Reviewed-by: Åsa Persson <asapersson@webrtc.org>
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22460}
This commit is contained in:
Ilya Nikolaevskiy 2018-03-15 15:46:17 +01:00 committed by Commit Bot
parent 4425b055f9
commit 1d037ae704
8 changed files with 89 additions and 24 deletions

View File

@ -170,15 +170,25 @@ size_t RtpPacketizerH264::SetPayloadData(
if (!updated_sps)
input_fragments_.push_back(Fragment(buffer, length));
}
GeneratePackets();
if (!GeneratePackets()) {
// If failed to generate all the packets, discard already generated
// packets in case the caller would ignore return value and still try to
// call NextPacket().
num_packets_left_ = 0;
while (!packets_.empty()) {
packets_.pop();
}
return 0;
}
return num_packets_left_;
}
void RtpPacketizerH264::GeneratePackets() {
bool RtpPacketizerH264::GeneratePackets() {
for (size_t i = 0; i < input_fragments_.size();) {
switch (packetization_mode_) {
case H264PacketizationMode::SingleNalUnit:
PacketizeSingleNalu(i);
if (!PacketizeSingleNalu(i))
return false;
++i;
break;
case H264PacketizationMode::NonInterleaved:
@ -197,6 +207,7 @@ void RtpPacketizerH264::GeneratePackets() {
break;
}
}
return true;
}
void RtpPacketizerH264::PacketizeFuA(size_t fragment_index) {
@ -290,19 +301,25 @@ size_t RtpPacketizerH264::PacketizeStapA(size_t fragment_index) {
return fragment_index;
}
void RtpPacketizerH264::PacketizeSingleNalu(size_t fragment_index) {
bool RtpPacketizerH264::PacketizeSingleNalu(size_t fragment_index) {
// Add a single NALU to the queue, no aggregation.
size_t payload_size_left = max_payload_len_;
if (fragment_index + 1 == input_fragments_.size())
payload_size_left -= last_packet_reduction_len_;
const Fragment* fragment = &input_fragments_[fragment_index];
RTC_CHECK_GE(payload_size_left, fragment->length)
<< "Payload size left " << payload_size_left << ", fragment length "
<< fragment->length << ", packetization mode " << packetization_mode_;
if (payload_size_left < fragment->length) {
RTC_LOG(LS_ERROR) << "Failed to fit a fragment to packet in SingleNalu "
"packetization mode. Payload size left "
<< payload_size_left << ", fragment length "
<< fragment->length << ", packet capacity "
<< max_payload_len_;
return false;
}
RTC_CHECK_GT(fragment->length, 0u);
packets_.push(PacketUnit(*fragment, true /* first */, true /* last */,
false /* aggregated */, fragment->buffer[0]));
++num_packets_left_;
return true;
}
bool RtpPacketizerH264::NextPacket(RtpPacketToSend* rtp_packet) {

View File

@ -80,10 +80,10 @@ class RtpPacketizerH264 : public RtpPacketizer {
uint8_t header;
};
void GeneratePackets();
bool GeneratePackets();
void PacketizeFuA(size_t fragment_index);
size_t PacketizeStapA(size_t fragment_index);
void PacketizeSingleNalu(size_t fragment_index);
bool PacketizeSingleNalu(size_t fragment_index);
void NextAggregatePacket(RtpPacketToSend* rtp_packet, bool last);
void NextFragmentPacket(RtpPacketToSend* rtp_packet);

View File

@ -468,9 +468,7 @@ TEST(RtpPacketizerH264Test, TestFUABig) {
sizeof(kExpectedPayloadSizes) / sizeof(size_t)));
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
TEST(RtpPacketizerH264DeathTest, SendOverlongDataInPacketizationMode0) {
TEST(RtpPacketizerH264Test, SendOverlongDataInPacketizationMode0) {
const size_t kFrameSize = kMaxPayloadSize + 1;
uint8_t frame[kFrameSize] = {0};
for (size_t i = 0; i < kFrameSize; ++i)
@ -484,12 +482,9 @@ TEST(RtpPacketizerH264DeathTest, SendOverlongDataInPacketizationMode0) {
std::unique_ptr<RtpPacketizer> packetizer(CreateH264Packetizer(
H264PacketizationMode::SingleNalUnit, kMaxPayloadSize, 0));
EXPECT_DEATH(packetizer->SetPayloadData(frame, kFrameSize, &fragmentation),
"payload_size");
EXPECT_EQ(0u, packetizer->SetPayloadData(frame, kFrameSize, &fragmentation));
}
#endif // RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
namespace {
const uint8_t kStartSequence[] = {0x00, 0x00, 0x00, 0x01};
const uint8_t kOriginalSps[] = {kSps, 0x00, 0x00, 0x03, 0x03,

View File

@ -54,13 +54,24 @@ constexpr RtpExtensionSize CreateExtensionSize() {
}
// Size info for header extensions that might be used in padding or FEC packets.
constexpr RtpExtensionSize kExtensionSizes[] = {
constexpr RtpExtensionSize kFecOrPaddingExtensionSizes[] = {
CreateExtensionSize<AbsoluteSendTime>(),
CreateExtensionSize<TransmissionOffset>(),
CreateExtensionSize<TransportSequenceNumber>(),
CreateExtensionSize<PlayoutDelayLimits>(),
};
// Size info for header extensions that might be used in video packets.
constexpr RtpExtensionSize kVideoExtensionSizes[] = {
CreateExtensionSize<AbsoluteSendTime>(),
CreateExtensionSize<TransmissionOffset>(),
CreateExtensionSize<TransportSequenceNumber>(),
CreateExtensionSize<PlayoutDelayLimits>(),
CreateExtensionSize<VideoOrientation>(),
CreateExtensionSize<VideoContentTypeExtension>(),
CreateExtensionSize<VideoTimingExtension>(),
};
const char* FrameTypeToString(FrameType frame_type) {
switch (frame_type) {
case kEmptyFrame:
@ -177,7 +188,13 @@ RTPSender::~RTPSender() {
}
rtc::ArrayView<const RtpExtensionSize> RTPSender::FecExtensionSizes() {
return rtc::MakeArrayView(kExtensionSizes, arraysize(kExtensionSizes));
return rtc::MakeArrayView(kFecOrPaddingExtensionSizes,
arraysize(kFecOrPaddingExtensionSizes));
}
rtc::ArrayView<const RtpExtensionSize> RTPSender::VideoExtensionSizes() {
return rtc::MakeArrayView(kVideoExtensionSizes,
arraysize(kVideoExtensionSizes));
}
uint16_t RTPSender::ActualSendBitrateKbit() const {
@ -1029,8 +1046,8 @@ size_t RTPSender::RtpHeaderLength() const {
rtc::CritScope lock(&send_critsect_);
size_t rtp_header_length = kRtpHeaderLength;
rtp_header_length += sizeof(uint32_t) * csrcs_.size();
rtp_header_length +=
rtp_header_extension_map_.GetTotalLengthInBytes(kExtensionSizes);
rtp_header_length += rtp_header_extension_map_.GetTotalLengthInBytes(
kFecOrPaddingExtensionSizes);
return rtp_header_length;
}

View File

@ -151,6 +151,9 @@ class RTPSender {
// Size info for header extensions used by FEC packets.
static rtc::ArrayView<const RtpExtensionSize> FecExtensionSizes();
// Size info for header extensions used by video packets.
static rtc::ArrayView<const RtpExtensionSize> VideoExtensionSizes();
// Create empty packet, fills ssrc, csrcs and reserve place for header
// extensions RtpSender updates before sending.
std::unique_ptr<RtpPacketToSend> AllocatePacket() const;

View File

@ -373,7 +373,6 @@ bool RTPSenderVideo::SendVideo(RtpVideoCodecTypes video_type,
video_header ? GetTemporalId(*video_header) : kNoTemporalIdx;
StorageType storage = GetStorageType(temporal_id, retransmission_settings,
expected_retransmission_time_ms);
size_t num_packets =
packetizer->SetPayloadData(payload_data, payload_size, fragmentation);

View File

@ -465,6 +465,8 @@ SEncParamExt H264EncoderImpl::CreateEncoderParams() const {
SM_SIZELIMITED_SLICE;
encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceSizeConstraint =
static_cast<unsigned int>(max_payload_size_);
RTC_LOG(INFO) << "Encoder is configured with NALU constraint: "
<< max_payload_size_ << " bytes";
break;
case H264PacketizationMode::NonInterleaved:
// When uiSliceMode = SM_FIXEDSLCNUM_SLICE, uiSliceNum = 0 means auto

View File

@ -255,6 +255,37 @@ CpuOveruseOptions GetCpuOveruseOptions(const VideoSendStream::Config& config) {
return options;
}
size_t CalculateMaxHeaderSize(const VideoSendStream::Config::Rtp& config) {
size_t header_size = kRtpHeaderSize;
size_t extensions_size = 0;
size_t fec_extensions_size = 0;
if (config.extensions.size() > 0) {
RtpHeaderExtensionMap extensions_map(config.extensions);
extensions_size =
extensions_map.GetTotalLengthInBytes(RTPSender::VideoExtensionSizes());
fec_extensions_size =
extensions_map.GetTotalLengthInBytes(RTPSender::FecExtensionSizes());
}
header_size += extensions_size;
if (config.flexfec.payload_type >= 0) {
// All FEC extensions again plus maximum FlexFec overhead.
header_size += fec_extensions_size + 32;
} else {
if (config.ulpfec.ulpfec_payload_type >= 0) {
// Header with all the FEC extensions will be repeated plus maximum
// UlpFec overhead.
header_size += fec_extensions_size + 18;
}
if (config.ulpfec.red_payload_type >= 0) {
header_size += 1; // RED header.
}
}
// Additional room for Rtx.
if (config.rtx.payload_type >= 0)
header_size += kRtxHeaderSize;
return header_size;
}
} // namespace
namespace internal {
@ -585,9 +616,10 @@ void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config) {
// ReconfigureVideoEncoder from the network thread.
// RTC_DCHECK_RUN_ON(&thread_checker_);
RTC_DCHECK(content_type_ == config.content_type);
video_stream_encoder_->ConfigureEncoder(std::move(config),
config_.rtp.max_packet_size,
config_.rtp.nack.rtp_history_ms > 0);
video_stream_encoder_->ConfigureEncoder(
std::move(config),
config_.rtp.max_packet_size - CalculateMaxHeaderSize(config_.rtp),
config_.rtp.nack.rtp_history_ms > 0);
}
VideoSendStream::Stats VideoSendStream::GetStats() {