From ba050a6d6d60690141f2dd23c824857e690d1991 Mon Sep 17 00:00:00 2001 From: sprang Date: Fri, 18 Aug 2017 02:51:12 -0700 Subject: [PATCH] Reland of Add a flags field to video timing extension. (patchset #1 id:1 of https://codereview.webrtc.org/2995953002/ ) Reason for revert: Create reland CL to add fix to. Original issue's description: > Revert of Add a flags field to video timing extension. (patchset #15 id:280001 of https://codereview.webrtc.org/3000753002/ ) > > Reason for revert: > Speculative revet for breaking remoting_unittests in fyi bots. > https://build.chromium.org/p/chromium.webrtc.fyi/waterfall?builder=Win7%20Tester > > Original issue's description: > > Add a flags field to video timing extension. > > > > The rtp header extension for video timing shuold have an additional > > field for signaling metadata, such as what triggered the extension for > > this particular frame. This will allow separating frames select because > > of outlier sizes from regular frames, for more accurate stats. > > > > This implementation is backwards compatible in that it can read video > > timing extensions without the new flag field, but it always sends with > > it included. > > > > BUG=webrtc:7594 > > > > Review-Url: https://codereview.webrtc.org/3000753002 > > Cr-Commit-Position: refs/heads/master@{#19353} > > Committed: https://chromium.googlesource.com/external/webrtc/+/cf5d485e147f7d7b3081692f101e496ce9e1d257 > > TBR=danilchap@webrtc.org,kthelgason@webrtc.org,stefan@webrtc.org,sprang@webrtc.org > # Skipping CQ checks because original CL landed less than 1 days ago. > NOPRESUBMIT=true > NOTREECHECKS=true > NOTRY=true > BUG=webrtc:7594 > > Review-Url: https://codereview.webrtc.org/2995953002 > Cr-Commit-Position: refs/heads/master@{#19360} > Committed: https://chromium.googlesource.com/external/webrtc/+/f0f7378b059501bb2bc5d006bf0f43546e47328f TBR=danilchap@webrtc.org,kthelgason@webrtc.org,stefan@webrtc.org,emircan@google.com # Not skipping CQ checks because original CL landed more than 1 days ago. BUG=webrtc:7594 Review-Url: https://codereview.webrtc.org/2996153002 Cr-Commit-Position: refs/heads/master@{#19405} --- webrtc/api/video/video_timing.cc | 32 +++++++-- webrtc/api/video/video_timing.h | 39 ++++++++-- webrtc/common_video/include/video_frame.h | 2 +- webrtc/common_video/video_frame.cc | 1 - .../rtp_rtcp/source/rtp_header_extensions.cc | 71 ++++++++++++------- .../rtp_rtcp/source/rtp_header_extensions.h | 2 +- .../rtp_rtcp/source/rtp_packet_to_send.h | 8 +-- .../rtp_rtcp/source/rtp_packet_unittest.cc | 69 +++++++++++++++++- .../rtp_rtcp/source/rtp_receiver_video.cc | 3 +- .../rtp_rtcp/source/rtp_sender_unittest.cc | 7 +- .../rtp_rtcp/source/rtp_sender_video.cc | 2 +- .../codecs/h264/h264_encoder_impl.cc | 2 +- .../video_coding/codecs/vp8/vp8_impl.cc | 2 +- .../video_coding/codecs/vp9/vp9_impl.cc | 2 +- webrtc/modules/video_coding/encoded_frame.cc | 2 +- webrtc/modules/video_coding/frame_buffer.cc | 6 +- webrtc/modules/video_coding/frame_object.cc | 8 +-- .../modules/video_coding/generic_decoder.cc | 3 +- .../modules/video_coding/generic_encoder.cc | 13 ++-- .../video_coding/generic_encoder_unittest.cc | 28 +++++++- .../src/jni/androidmediaencoder_jni.cc | 2 +- .../Classes/PeerConnection/RTCEncodedImage.mm | 6 +- .../VideoToolbox/RTCVideoEncoderH264.mm | 2 +- .../Framework/Headers/WebRTC/RTCVideoCodec.h | 2 +- webrtc/video/payload_router.cc | 6 +- webrtc/video/rtp_video_stream_receiver.cc | 1 - 26 files changed, 238 insertions(+), 83 deletions(-) diff --git a/webrtc/api/video/video_timing.cc b/webrtc/api/video/video_timing.cc index a0a3f4d10f..0d22f904bf 100644 --- a/webrtc/api/video/video_timing.cc +++ b/webrtc/api/video/video_timing.cc @@ -27,7 +27,8 @@ TimingFrameInfo::TimingFrameInfo() receive_finish_ms(-1), decode_start_ms(-1), decode_finish_ms(-1), - render_time_ms(-1) {} + render_time_ms(-1), + flags(TimingFrameFlags::kDefault) {} int64_t TimingFrameInfo::EndToEndDelay() const { return capture_time_ms >= 0 ? decode_finish_ms - capture_time_ms : -1; @@ -38,14 +39,31 @@ bool TimingFrameInfo::IsLongerThan(const TimingFrameInfo& other) const { return other_delay == -1 || EndToEndDelay() > other_delay; } +bool TimingFrameInfo::IsOutlier() const { + return !IsInvalid() && (flags & TimingFrameFlags::kTriggeredBySize); +} + +bool TimingFrameInfo::IsTimerTriggered() const { + return !IsInvalid() && (flags & TimingFrameFlags::kTriggeredByTimer); +} + +bool TimingFrameInfo::IsInvalid() const { + return flags == TimingFrameFlags::kInvalid; +} + std::string TimingFrameInfo::ToString() const { std::stringstream out; - out << rtp_timestamp << ',' << capture_time_ms << ',' << encode_start_ms - << ',' << encode_finish_ms << ',' << packetization_finish_ms << ',' - << pacer_exit_ms << ',' << network_timestamp_ms << ',' - << network2_timestamp_ms << ',' << receive_start_ms << ',' - << receive_finish_ms << ',' << decode_start_ms << ',' << decode_finish_ms - << ',' << render_time_ms; + if (IsInvalid()) { + out << "[Invalid]"; + } else { + out << rtp_timestamp << ',' << capture_time_ms << ',' << encode_start_ms + << ',' << encode_finish_ms << ',' << packetization_finish_ms << ',' + << pacer_exit_ms << ',' << network_timestamp_ms << ',' + << network2_timestamp_ms << ',' << receive_start_ms << ',' + << receive_finish_ms << ',' << decode_start_ms << ',' + << decode_finish_ms << ',' << render_time_ms << ", outlier_triggered " + << IsOutlier() << ", timer_triggered " << IsTimerTriggered(); + } return out.str(); } diff --git a/webrtc/api/video/video_timing.h b/webrtc/api/video/video_timing.h index 44991dfbeb..f5134636ba 100644 --- a/webrtc/api/video/video_timing.h +++ b/webrtc/api/video/video_timing.h @@ -13,6 +13,7 @@ #include +#include #include #include "webrtc/rtc_base/checks.h" @@ -20,15 +21,25 @@ namespace webrtc { +enum TimingFrameFlags : uint8_t { + kDefault = 0, // No flags set (used by old protocol) + kTriggeredByTimer = 1 << 0, // Frame marked for tracing by periodic timer. + kTriggeredBySize = 1 << 1, // Frame marked for tracing due to size. + kInvalid = std::numeric_limits::max() // Invalid, ignore! +}; + // Video timing timestamps in ms counted from capture_time_ms of a frame. // This structure represents data sent in video-timing RTP header extension. struct VideoSendTiming { - static const uint8_t kEncodeStartDeltaIdx = 0; - static const uint8_t kEncodeFinishDeltaIdx = 1; - static const uint8_t kPacketizationFinishDeltaIdx = 2; - static const uint8_t kPacerExitDeltaIdx = 3; - static const uint8_t kNetworkTimestampDeltaIdx = 4; - static const uint8_t kNetwork2TimestampDeltaIdx = 5; + // Offsets of the fields in the RTP header extension, counting from the first + // byte after the one-byte header. + static constexpr uint8_t kFlagsOffset = 0; + static constexpr uint8_t kEncodeStartDeltaOffset = 1; + static constexpr uint8_t kEncodeFinishDeltaOffset = 3; + static constexpr uint8_t kPacketizationFinishDeltaOffset = 5; + static constexpr uint8_t kPacerExitDeltaOffset = 7; + static constexpr uint8_t kNetworkTimestampDeltaOffset = 9; + static constexpr uint8_t kNetwork2TimestampDeltaOffset = 11; // Returns |time_ms - base_ms| capped at max 16-bit value. // Used to fill this data structure as per @@ -45,7 +56,7 @@ struct VideoSendTiming { uint16_t pacer_exit_delta_ms; uint16_t network_timstamp_delta_ms; uint16_t network2_timstamp_delta_ms; - bool is_timing_frame; + uint8_t flags; }; // Used to report precise timings of a 'timing frames'. Contains all important @@ -64,6 +75,18 @@ struct TimingFrameInfo { // preferred. bool IsLongerThan(const TimingFrameInfo& other) const; + // Returns true if flags are set to indicate this frame was marked for tracing + // due to the size being outside some limit. + bool IsOutlier() const; + + // Returns true if flags are set to indicate this frame was marked fro tracing + // due to cyclic timer. + bool IsTimerTriggered() const; + + // Returns true if the timing data is marked as invalid, in which case it + // should be ignored. + bool IsInvalid() const; + std::string ToString() const; uint32_t rtp_timestamp; // Identifier of a frame. @@ -84,6 +107,8 @@ struct TimingFrameInfo { int64_t decode_start_ms; // Decode start time. int64_t decode_finish_ms; // Decode completion time. int64_t render_time_ms; // Proposed render time to insure smooth playback. + + uint8_t flags; // Flags indicating validity and/or why tracing was triggered. }; } // namespace webrtc diff --git a/webrtc/common_video/include/video_frame.h b/webrtc/common_video/include/video_frame.h index a5f0e52da0..4f8ed08f9b 100644 --- a/webrtc/common_video/include/video_frame.h +++ b/webrtc/common_video/include/video_frame.h @@ -66,7 +66,7 @@ class EncodedImage { // Timing information should be updatable on const instances. mutable struct Timing { - bool is_timing_frame = false; + uint8_t flags = TimingFrameFlags::kInvalid; int64_t encode_start_ms = 0; int64_t encode_finish_ms = 0; int64_t packetization_finish_ms = 0; diff --git a/webrtc/common_video/video_frame.cc b/webrtc/common_video/video_frame.cc index e01aa01e18..8678700e9a 100644 --- a/webrtc/common_video/video_frame.cc +++ b/webrtc/common_video/video_frame.cc @@ -49,7 +49,6 @@ EncodedImage::EncodedImage(uint8_t* buffer, size_t length, size_t size) void EncodedImage::SetEncodeTime(int64_t encode_start_ms, int64_t encode_finish_ms) const { - timing_.is_timing_frame = true; timing_.encode_start_ms = encode_start_ms; timing_.encode_finish_ms = encode_finish_ms; } diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc index 5e172c4f66..28685b4640 100644 --- a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc +++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc @@ -248,18 +248,24 @@ bool VideoContentTypeExtension::Write(uint8_t* data, // Video Timing. // 6 timestamps in milliseconds counted from capture time stored in rtp header: // encode start/finish, packetization complete, pacer exit and reserved for -// modification by the network modification. +// modification by the network modification. |flags| is a bitmask and has the +// following allowed values: +// 0 = Valid data, but no flags available (backwards compatibility) +// 1 = Frame marked as timing frame due to cyclic timer. +// 2 = Frame marked as timing frame due to size being outside limit. +// 255 = Invalid. The whole timing frame extension should be ignored. +// // 0 1 2 3 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// | ID | len=11| encode start ms delta | encode finish | +// | ID | len=12| flags | encode start ms delta | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// | ms delta | packetizer finish ms delta | pacer exit | +// | encode finish ms delta | packetizer finish ms delta | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// | ms delta | network timestamp ms delta | network2 time-| +// | pacer exit ms delta | network timestamp ms delta | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// | stamp ms delta| -// +-+-+-+-+-+-+-+-+ +// | network2 timestamp ms delta | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr RTPExtensionType VideoTimingExtension::kId; constexpr uint8_t VideoTimingExtension::kValueSizeBytes; @@ -268,47 +274,62 @@ constexpr const char VideoTimingExtension::kUri[]; bool VideoTimingExtension::Parse(rtc::ArrayView data, VideoSendTiming* timing) { RTC_DCHECK(timing); - if (data.size() != kValueSizeBytes) - return false; - timing->encode_start_delta_ms = - ByteReader::ReadBigEndian(data.data()); + // TODO(sprang): Deprecate support for old wire format. + ptrdiff_t off = 0; + switch (data.size()) { + case kValueSizeBytes - 1: + timing->flags = 0; + off = 1; // Old wire format without the flags field. + break; + case kValueSizeBytes: + timing->flags = ByteReader::ReadBigEndian(data.data()); + break; + default: + return false; + } + + timing->encode_start_delta_ms = ByteReader::ReadBigEndian( + data.data() + VideoSendTiming::kEncodeStartDeltaOffset - off); timing->encode_finish_delta_ms = ByteReader::ReadBigEndian( - data.data() + 2 * VideoSendTiming::kEncodeFinishDeltaIdx); + data.data() + VideoSendTiming::kEncodeFinishDeltaOffset - off); timing->packetization_finish_delta_ms = ByteReader::ReadBigEndian( - data.data() + 2 * VideoSendTiming::kPacketizationFinishDeltaIdx); + data.data() + VideoSendTiming::kPacketizationFinishDeltaOffset - off); timing->pacer_exit_delta_ms = ByteReader::ReadBigEndian( - data.data() + 2 * VideoSendTiming::kPacerExitDeltaIdx); + data.data() + VideoSendTiming::kPacerExitDeltaOffset - off); timing->network_timstamp_delta_ms = ByteReader::ReadBigEndian( - data.data() + 2 * VideoSendTiming::kNetworkTimestampDeltaIdx); + data.data() + VideoSendTiming::kNetworkTimestampDeltaOffset - off); timing->network2_timstamp_delta_ms = ByteReader::ReadBigEndian( - data.data() + 2 * VideoSendTiming::kNetwork2TimestampDeltaIdx); - timing->is_timing_frame = true; + data.data() + VideoSendTiming::kNetwork2TimestampDeltaOffset - off); return true; } bool VideoTimingExtension::Write(uint8_t* data, const VideoSendTiming& timing) { - ByteWriter::WriteBigEndian(data, timing.encode_start_delta_ms); + ByteWriter::WriteBigEndian(data + VideoSendTiming::kFlagsOffset, + timing.flags); ByteWriter::WriteBigEndian( - data + 2 * VideoSendTiming::kEncodeFinishDeltaIdx, + data + VideoSendTiming::kEncodeStartDeltaOffset, + timing.encode_start_delta_ms); + ByteWriter::WriteBigEndian( + data + VideoSendTiming::kEncodeFinishDeltaOffset, timing.encode_finish_delta_ms); ByteWriter::WriteBigEndian( - data + 2 * VideoSendTiming::kPacketizationFinishDeltaIdx, + data + VideoSendTiming::kPacketizationFinishDeltaOffset, timing.packetization_finish_delta_ms); ByteWriter::WriteBigEndian( - data + 2 * VideoSendTiming::kPacerExitDeltaIdx, + data + VideoSendTiming::kPacerExitDeltaOffset, timing.pacer_exit_delta_ms); ByteWriter::WriteBigEndian( - data + 2 * VideoSendTiming::kNetworkTimestampDeltaIdx, 0); // reserved + data + VideoSendTiming::kNetworkTimestampDeltaOffset, 0); // reserved ByteWriter::WriteBigEndian( - data + 2 * VideoSendTiming::kNetwork2TimestampDeltaIdx, 0); // reserved + data + VideoSendTiming::kNetwork2TimestampDeltaOffset, 0); // reserved return true; } bool VideoTimingExtension::Write(uint8_t* data, uint16_t time_delta_ms, - uint8_t idx) { - RTC_DCHECK_LT(idx, 6); - ByteWriter::WriteBigEndian(data + 2 * idx, time_delta_ms); + uint8_t offset) { + RTC_DCHECK_LT(offset, kValueSizeBytes - sizeof(uint16_t)); + ByteWriter::WriteBigEndian(data + offset, time_delta_ms); return true; } diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h index 9dabca28b9..dc63140271 100644 --- a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h +++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h @@ -130,7 +130,7 @@ class VideoContentTypeExtension { class VideoTimingExtension { public: static constexpr RTPExtensionType kId = kRtpExtensionVideoTiming; - static constexpr uint8_t kValueSizeBytes = 12; + static constexpr uint8_t kValueSizeBytes = 13; static constexpr const char kUri[] = "http://www.webrtc.org/experiments/rtp-hdrext/video-timing"; diff --git a/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h b/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h index 5d5b31b39b..d557ecf102 100644 --- a/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h +++ b/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h @@ -33,25 +33,25 @@ class RtpPacketToSend : public rtp::Packet { void set_packetization_finish_time_ms(int64_t time) { SetExtension( VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time), - VideoSendTiming::kPacketizationFinishDeltaIdx); + VideoSendTiming::kPacketizationFinishDeltaOffset); } void set_pacer_exit_time_ms(int64_t time) { SetExtension( VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time), - VideoSendTiming::kPacerExitDeltaIdx); + VideoSendTiming::kPacerExitDeltaOffset); } void set_network_time_ms(int64_t time) { SetExtension( VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time), - VideoSendTiming::kNetworkTimestampDeltaIdx); + VideoSendTiming::kNetworkTimestampDeltaOffset); } void set_network2_time_ms(int64_t time) { SetExtension( VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time), - VideoSendTiming::kNetwork2TimestampDeltaIdx); + VideoSendTiming::kNetwork2TimestampDeltaOffset); } private: diff --git a/webrtc/modules/rtp_rtcp/source/rtp_packet_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtp_packet_unittest.cc index 1075d9ea35..776db53385 100644 --- a/webrtc/modules/rtp_rtcp/source/rtp_packet_unittest.cc +++ b/webrtc/modules/rtp_rtcp/source/rtp_packet_unittest.cc @@ -32,6 +32,7 @@ constexpr uint8_t kTransmissionOffsetExtensionId = 1; constexpr uint8_t kAudioLevelExtensionId = 9; constexpr uint8_t kRtpStreamIdExtensionId = 0xa; constexpr uint8_t kRtpMidExtensionId = 0xb; +constexpr uint8_t kVideoTimingExtensionId = 0xc; constexpr int32_t kTimeOffset = 0x56ce; constexpr bool kVoiceActive = true; constexpr uint8_t kAudioLevel = 0x5a; @@ -97,8 +98,19 @@ constexpr uint8_t kPacketWithInvalidExtension[] = { (kTransmissionOffsetExtensionId << 4) | 6, // (6+1)-byte extension, but 'e', 'x', 't', // Transmission Offset 'd', 'a', 't', 'a', // expected to be 3-bytes. - 'p', 'a', 'y', 'l', 'o', 'a', 'd' -}; + 'p', 'a', 'y', 'l', 'o', 'a', 'd'}; + +constexpr uint8_t kPacketWithLegacyTimingExtension[] = { + 0x90, kPayloadType, kSeqNumFirstByte, kSeqNumSecondByte, + 0x65, 0x43, 0x12, 0x78, // kTimestamp. + 0x12, 0x34, 0x56, 0x78, // kSSrc. + 0xbe, 0xde, 0x00, 0x04, // Extension block of size 4 x 32bit words. + (kVideoTimingExtensionId << 4) + | VideoTimingExtension::kValueSizeBytes - 2, // Old format without flags. + 0x00, 0x01, 0x00, + 0x02, 0x00, 0x03, 0x00, + 0x04, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00}; // clang-format on } // namespace @@ -498,4 +510,57 @@ TEST(RtpPacketTest, RawExtensionFunctionsAcceptZeroIdAndReturnFalse) { EXPECT_THAT(packet.AllocateRawExtension(kInvalidId, 3), IsEmpty()); } +TEST(RtpPacketTest, CreateAndParseTimingFrameExtension) { + // Create a packet with video frame timing extension populated. + RtpPacketToSend::ExtensionManager send_extensions; + send_extensions.Register(kRtpExtensionVideoTiming, kVideoTimingExtensionId); + RtpPacketToSend send_packet(&send_extensions); + send_packet.SetPayloadType(kPayloadType); + send_packet.SetSequenceNumber(kSeqNum); + send_packet.SetTimestamp(kTimestamp); + send_packet.SetSsrc(kSsrc); + + VideoSendTiming timing; + timing.encode_start_delta_ms = 1; + timing.encode_finish_delta_ms = 2; + timing.packetization_finish_delta_ms = 3; + timing.pacer_exit_delta_ms = 4; + timing.flags = + TimingFrameFlags::kTriggeredByTimer + TimingFrameFlags::kTriggeredBySize; + + send_packet.SetExtension(timing); + + // Serialize the packet and then parse it again. + RtpPacketReceived::ExtensionManager extensions; + extensions.Register(kVideoTimingExtensionId); + RtpPacketReceived receive_packet(&extensions); + EXPECT_TRUE(receive_packet.Parse(send_packet.Buffer())); + + VideoSendTiming receivied_timing; + EXPECT_TRUE( + receive_packet.GetExtension(&receivied_timing)); + + // Only check first and last timestamp (covered by other tests) plus flags. + EXPECT_EQ(receivied_timing.encode_start_delta_ms, + timing.encode_start_delta_ms); + EXPECT_EQ(receivied_timing.pacer_exit_delta_ms, timing.pacer_exit_delta_ms); + EXPECT_EQ(receivied_timing.flags, timing.flags); +} + +TEST(RtpPacketTest, ParseLegacyTimingFrameExtension) { + // Parse the modified packet. + RtpPacketReceived::ExtensionManager extensions; + extensions.Register(kVideoTimingExtensionId); + RtpPacketReceived packet(&extensions); + EXPECT_TRUE(packet.Parse(kPacketWithLegacyTimingExtension, + sizeof(kPacketWithLegacyTimingExtension))); + VideoSendTiming receivied_timing; + EXPECT_TRUE(packet.GetExtension(&receivied_timing)); + + // Check first and last timestamp are still OK. Flags should now be 0. + EXPECT_EQ(receivied_timing.encode_start_delta_ms, 1); + EXPECT_EQ(receivied_timing.pacer_exit_delta_ms, 4); + EXPECT_EQ(receivied_timing.flags, 0); +} + } // namespace webrtc diff --git a/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc b/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc index 845e7ac496..106f056075 100644 --- a/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc +++ b/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc @@ -91,7 +91,7 @@ int32_t RTPReceiverVideo::ParseRtpPacket(WebRtcRTPHeader* rtp_header, rtp_header->type = parsed_payload.type; rtp_header->type.Video.rotation = kVideoRotation_0; rtp_header->type.Video.content_type = VideoContentType::UNSPECIFIED; - rtp_header->type.Video.video_timing.is_timing_frame = false; + rtp_header->type.Video.video_timing.flags = TimingFrameFlags::kInvalid; // Retrieve the video rotation information. if (rtp_header->header.extension.hasVideoRotation) { @@ -107,7 +107,6 @@ int32_t RTPReceiverVideo::ParseRtpPacket(WebRtcRTPHeader* rtp_header, if (rtp_header->header.extension.has_video_timing) { rtp_header->type.Video.video_timing = rtp_header->header.extension.video_timing; - rtp_header->type.Video.video_timing.is_timing_frame = true; } rtp_header->type.Video.playout_delay = diff --git a/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc index 3e6e4529d8..14bbff781c 100644 --- a/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc +++ b/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc @@ -11,6 +11,7 @@ #include #include +#include "webrtc/api/video/video_timing.h" #include "webrtc/logging/rtc_event_log/mock/mock_rtc_event_log.h" #include "webrtc/modules/rtp_rtcp/include/rtp_cvo.h" #include "webrtc/modules/rtp_rtcp/include/rtp_header_extension_map.h" @@ -994,7 +995,7 @@ TEST_P(RtpSenderTest, NoFlexfecForTimingFrames) { 0, 1500)); RTPVideoHeader video_header; memset(&video_header, 0, sizeof(RTPVideoHeader)); - video_header.video_timing.is_timing_frame = true; + video_header.video_timing.flags = TimingFrameFlags::kTriggeredByTimer; EXPECT_TRUE(rtp_sender_->SendOutgoingData( kVideoFrameKey, kPayloadType, kTimestamp, kCaptureTimeMs, kPayloadData, sizeof(kPayloadData), nullptr, &video_header, nullptr)); @@ -1019,7 +1020,7 @@ TEST_P(RtpSenderTest, NoFlexfecForTimingFrames) { EXPECT_CALL(mock_paced_sender_, InsertPacket(RtpPacketSender::kLowPriority, kMediaSsrc, kSeqNum + 1, _, _, false)); - video_header.video_timing.is_timing_frame = false; + video_header.video_timing.flags = TimingFrameFlags::kInvalid; EXPECT_TRUE(rtp_sender_->SendOutgoingData( kVideoFrameKey, kPayloadType, kTimestamp + 1, kCaptureTimeMs + 1, kPayloadData, sizeof(kPayloadData), nullptr, &video_header, nullptr)); @@ -1571,7 +1572,7 @@ TEST_P(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) { const int64_t kCaptureTimestamp = fake_clock_.TimeInMilliseconds(); RTPVideoHeader hdr = {0}; - hdr.video_timing.is_timing_frame = true; + hdr.video_timing.flags = TimingFrameFlags::kTriggeredByTimer; hdr.video_timing.encode_start_delta_ms = kEncodeStartDeltaMs; hdr.video_timing.encode_finish_delta_ms = kEncodeFinishDeltaMs; diff --git a/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc b/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc index d12936c143..38450742be 100644 --- a/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc +++ b/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc @@ -332,7 +332,7 @@ bool RTPSenderVideo::SendVideo(RtpVideoCodecTypes video_type, last_packet->SetExtension( video_header->content_type); } - if (video_header->video_timing.is_timing_frame) { + if (video_header->video_timing.flags != TimingFrameFlags::kInvalid) { last_packet->SetExtension( video_header->video_timing); } diff --git a/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc index e84dffd4c6..f5a1910984 100644 --- a/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc +++ b/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc @@ -370,7 +370,7 @@ int32_t H264EncoderImpl::Encode(const VideoFrame& input_frame, encoded_image_.content_type_ = (mode_ == kScreensharing) ? VideoContentType::SCREENSHARE : VideoContentType::UNSPECIFIED; - encoded_image_.timing_.is_timing_frame = false; + encoded_image_.timing_.flags = TimingFrameFlags::kInvalid; encoded_image_._frameType = ConvertToVideoFrameType(info.eFrameType); // Split encoded image up into fragments. This also updates |encoded_image_|. diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc index 5933073a3f..3bf85d57e1 100644 --- a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc +++ b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc @@ -928,7 +928,7 @@ int VP8EncoderImpl::GetEncodedPartitions( encoded_images_[encoder_idx].content_type_ = (codec_.mode == kScreensharing) ? VideoContentType::SCREENSHARE : VideoContentType::UNSPECIFIED; - encoded_images_[encoder_idx].timing_.is_timing_frame = false; + encoded_images_[encoder_idx].timing_.flags = TimingFrameFlags::kInvalid; int qp = -1; vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER_64, &qp); diff --git a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc index 0d4334e360..dc8bba22e0 100644 --- a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc +++ b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc @@ -710,7 +710,7 @@ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) { : VideoContentType::UNSPECIFIED; encoded_image_._encodedHeight = raw_->d_h; encoded_image_._encodedWidth = raw_->d_w; - encoded_image_.timing_.is_timing_frame = false; + encoded_image_.timing_.flags = TimingFrameFlags::kInvalid; int qp = -1; vpx_codec_control(encoder_, VP8E_GET_LAST_QUANTIZER, &qp); encoded_image_.qp_ = qp; diff --git a/webrtc/modules/video_coding/encoded_frame.cc b/webrtc/modules/video_coding/encoded_frame.cc index ec7f560755..bd407c733f 100644 --- a/webrtc/modules/video_coding/encoded_frame.cc +++ b/webrtc/modules/video_coding/encoded_frame.cc @@ -88,7 +88,7 @@ void VCMEncodedFrame::Reset() { _codec = kVideoCodecUnknown; rotation_ = kVideoRotation_0; content_type_ = VideoContentType::UNSPECIFIED; - timing_.is_timing_frame = false; + timing_.flags = TimingFrameFlags::kInvalid; _rotation_set = false; } diff --git a/webrtc/modules/video_coding/frame_buffer.cc b/webrtc/modules/video_coding/frame_buffer.cc index f67210f976..41c7fc8825 100644 --- a/webrtc/modules/video_coding/frame_buffer.cc +++ b/webrtc/modules/video_coding/frame_buffer.cc @@ -164,8 +164,7 @@ VCMFrameBufferEnum VCMFrameBuffer::InsertPacket( rotation_ = packet.video_header.rotation; _rotation_set = true; content_type_ = packet.video_header.content_type; - if (packet.video_header.video_timing.is_timing_frame) { - timing_.is_timing_frame = true; + if (packet.video_header.video_timing.flags != TimingFrameFlags::kInvalid) { timing_.encode_start_ms = ntp_time_ms_ + packet.video_header.video_timing.encode_start_delta_ms; timing_.encode_finish_ms = @@ -182,9 +181,8 @@ VCMFrameBufferEnum VCMFrameBuffer::InsertPacket( timing_.network2_timestamp_ms = ntp_time_ms_ + packet.video_header.video_timing.network2_timstamp_delta_ms; - } else { - timing_.is_timing_frame = false; } + timing_.flags = packet.video_header.video_timing.flags; } if (packet.is_first_packet_in_frame) { diff --git a/webrtc/modules/video_coding/frame_object.cc b/webrtc/modules/video_coding/frame_object.cc index 1d858fcc0e..86bcffb092 100644 --- a/webrtc/modules/video_coding/frame_object.cc +++ b/webrtc/modules/video_coding/frame_object.cc @@ -32,6 +32,7 @@ RtpFrameObject::RtpFrameObject(PacketBuffer* packet_buffer, : packet_buffer_(packet_buffer), first_seq_num_(first_seq_num), last_seq_num_(last_seq_num), + timestamp_(0), received_time_(received_time), times_nacked_(times_nacked) { VCMPacket* first_packet = packet_buffer_->GetPacket(first_seq_num); @@ -113,10 +114,10 @@ RtpFrameObject::RtpFrameObject(PacketBuffer* packet_buffer, rotation_ = last_packet->video_header.rotation; _rotation_set = true; content_type_ = last_packet->video_header.content_type; - if (last_packet->video_header.video_timing.is_timing_frame) { + if (last_packet->video_header.video_timing.flags != + TimingFrameFlags::kInvalid) { // ntp_time_ms_ may be -1 if not estimated yet. This is not a problem, // as this will be dealt with at the time of reporting. - timing_.is_timing_frame = true; timing_.encode_start_ms = ntp_time_ms_ + last_packet->video_header.video_timing.encode_start_delta_ms; @@ -138,9 +139,8 @@ RtpFrameObject::RtpFrameObject(PacketBuffer* packet_buffer, timing_.receive_start_ms = first_packet->receive_time_ms; timing_.receive_finish_ms = last_packet->receive_time_ms; - } else { - timing_.is_timing_frame = false; } + timing_.flags = last_packet->video_header.video_timing.flags; } RtpFrameObject::~RtpFrameObject() { diff --git a/webrtc/modules/video_coding/generic_decoder.cc b/webrtc/modules/video_coding/generic_decoder.cc index cc14794e09..3b2a620e12 100644 --- a/webrtc/modules/video_coding/generic_decoder.cc +++ b/webrtc/modules/video_coding/generic_decoder.cc @@ -92,7 +92,7 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, frameInfo->renderTimeMs); // Report timing information. - if (frameInfo->timing.is_timing_frame) { + if (frameInfo->timing.flags != TimingFrameFlags::kInvalid) { int64_t capture_time_ms = decodedImage.ntp_time_ms() - ntp_offset_; // Convert remote timestamps to local time from ntp timestamps. frameInfo->timing.encode_start_ms -= ntp_offset_; @@ -137,6 +137,7 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, timing_frame_info.decode_finish_ms = now_ms; timing_frame_info.render_time_ms = frameInfo->renderTimeMs; timing_frame_info.rtp_timestamp = decodedImage.timestamp(); + timing_frame_info.flags = frameInfo->timing.flags; _timing->SetTimingFrameInfo(timing_frame_info); } diff --git a/webrtc/modules/video_coding/generic_encoder.cc b/webrtc/modules/video_coding/generic_encoder.cc index 78eeef288a..cdb244da96 100644 --- a/webrtc/modules/video_coding/generic_encoder.cc +++ b/webrtc/modules/video_coding/generic_encoder.cc @@ -231,7 +231,7 @@ EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage( rtc::Optional outlier_frame_size; rtc::Optional encode_start_ms; - bool is_timing_frame = false; + uint8_t timing_flags = TimingFrameFlags::kInvalid; { rtc::CritScope crit(&timing_params_lock_); @@ -274,14 +274,16 @@ EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage( if (last_timing_frame_time_ms_ == -1 || timing_frame_delay_ms >= timing_frames_thresholds_.delay_ms || timing_frame_delay_ms == 0) { - is_timing_frame = true; + timing_flags = TimingFrameFlags::kTriggeredByTimer; last_timing_frame_time_ms_ = encoded_image.capture_time_ms_; } // Outliers trigger timing frames, but do not affect scheduled timing // frames. if (outlier_frame_size && encoded_image._length >= *outlier_frame_size) { - is_timing_frame = true; + if (timing_flags == TimingFrameFlags::kInvalid) + timing_flags = 0; + timing_flags |= TimingFrameFlags::kTriggeredBySize; } } @@ -290,8 +292,11 @@ EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage( // drift relative to rtc::TimeMillis(). We can't use it for Timing frames, // because to being sent in the network capture time required to be less than // all the other timestamps. - if (is_timing_frame && encode_start_ms) { + if (timing_flags != TimingFrameFlags::kInvalid && encode_start_ms) { encoded_image.SetEncodeTime(*encode_start_ms, rtc::TimeMillis()); + encoded_image.timing_.flags = timing_flags; + } else { + encoded_image.timing_.flags = TimingFrameFlags::kInvalid; } Result result = post_encode_callback_->OnEncodedImage( diff --git a/webrtc/modules/video_coding/generic_encoder_unittest.cc b/webrtc/modules/video_coding/generic_encoder_unittest.cc index eec5b98bba..2a5aeff85d 100644 --- a/webrtc/modules/video_coding/generic_encoder_unittest.cc +++ b/webrtc/modules/video_coding/generic_encoder_unittest.cc @@ -31,7 +31,8 @@ class FakeEncodedImageCallback : public EncodedImageCallback { Result OnEncodedImage(const EncodedImage& encoded_image, const CodecSpecificInfo* codec_specific_info, const RTPFragmentationHeader* fragmentation) override { - last_frame_was_timing_ = encoded_image.timing_.is_timing_frame; + last_frame_was_timing_ = + encoded_image.timing_.flags != TimingFrameFlags::kInvalid; return Result(Result::OK); }; @@ -164,5 +165,30 @@ TEST(TestVCMEncodedFrameCallback, MarksOutliers) { } } +TEST(TestVCMEncodedFrameCallback, NoTimingFrameIfNoEncodeStartTime) { + EncodedImage image; + CodecSpecificInfo codec_specific; + int64_t timestamp = 1; + image._length = 500; + image.capture_time_ms_ = timestamp; + codec_specific.codecType = kVideoCodecGeneric; + codec_specific.codecSpecific.generic.simulcast_idx = 0; + FakeEncodedImageCallback sink; + VCMEncodedFrameCallback callback(&sink, nullptr); + VideoCodec::TimingFrameTriggerThresholds thresholds; + thresholds.delay_ms = 1; // Make all frames timing frames. + callback.SetTimingFramesThresholds(thresholds); + + // Verify a single frame works with encode start time set. + callback.OnEncodeStarted(timestamp, 0); + callback.OnEncodedImage(image, &codec_specific, nullptr); + EXPECT_TRUE(sink.WasTimingFrame()); + + // New frame, now skip OnEncodeStarted. Should not result in timing frame. + image.capture_time_ms_ = ++timestamp; + callback.OnEncodedImage(image, &codec_specific, nullptr); + EXPECT_FALSE(sink.WasTimingFrame()); +} + } // namespace test } // namespace webrtc diff --git a/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc b/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc index 8181e3dfba..efbf2c23f2 100644 --- a/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc +++ b/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc @@ -1162,7 +1162,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { (codec_mode_ == webrtc::VideoCodecMode::kScreensharing) ? webrtc::VideoContentType::SCREENSHARE : webrtc::VideoContentType::UNSPECIFIED; - image->timing_.is_timing_frame = false; + image->timing_.flags = webrtc::TimingFrameFlags::kInvalid; image->_frameType = (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); image->_completeFrame = true; diff --git a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCEncodedImage.mm b/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCEncodedImage.mm index ac57844b97..3c39acb38e 100644 --- a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCEncodedImage.mm +++ b/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCEncodedImage.mm @@ -20,7 +20,7 @@ @synthesize timeStamp = _timeStamp; @synthesize captureTimeMs = _captureTimeMs; @synthesize ntpTimeMs = _ntpTimeMs; -@synthesize isTimingFrame = _isTimingFrame; +@synthesize flags = _flags; @synthesize encodeStartMs = _encodeStartMs; @synthesize encodeFinishMs = _encodeFinishMs; @synthesize frameType = _frameType; @@ -40,7 +40,7 @@ _timeStamp = encodedImage._timeStamp; _captureTimeMs = encodedImage.capture_time_ms_; _ntpTimeMs = encodedImage.ntp_time_ms_; - _isTimingFrame = encodedImage.timing_.is_timing_frame; + _flags = encodedImage.timing_.flags; _encodeStartMs = encodedImage.timing_.encode_start_ms; _encodeFinishMs = encodedImage.timing_.encode_finish_ms; _frameType = (RTCFrameType)encodedImage._frameType; @@ -64,7 +64,7 @@ encodedImage._timeStamp = _timeStamp; encodedImage.capture_time_ms_ = _captureTimeMs; encodedImage.ntp_time_ms_ = _ntpTimeMs; - encodedImage.timing_.is_timing_frame = _isTimingFrame; + encodedImage.timing_.flags = _flags; encodedImage.timing_.encode_start_ms = _encodeStartMs; encodedImage.timing_.encode_finish_ms = _encodeFinishMs; encodedImage._frameType = webrtc::FrameType(_frameType); diff --git a/webrtc/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm b/webrtc/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm index 4091886dd4..7ac8c18237 100644 --- a/webrtc/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm +++ b/webrtc/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm @@ -657,7 +657,7 @@ CFStringRef ExtractProfile(const cricket::VideoCodec &codec) { frame.rotation = rotation; frame.contentType = (_mode == RTCVideoCodecModeScreensharing) ? RTCVideoContentTypeScreenshare : RTCVideoContentTypeUnspecified; - frame.isTimingFrame = NO; + frame.flags = webrtc::TimingFrameFlags::kInvalid; int qp; _h264BitstreamParser.ParseBitstream(buffer->data(), buffer->size()); diff --git a/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodec.h b/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodec.h index 7f678b9759..c18fa783cf 100644 --- a/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodec.h +++ b/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodec.h @@ -40,7 +40,7 @@ RTC_EXPORT @property(nonatomic, assign) uint32_t timeStamp; @property(nonatomic, assign) long captureTimeMs; @property(nonatomic, assign) long ntpTimeMs; -@property(nonatomic, assign) BOOL isTimingFrame; +@property(nonatomic, assign) uint8_t flags; @property(nonatomic, assign) long encodeStartMs; @property(nonatomic, assign) long encodeFinishMs; @property(nonatomic, assign) RTCFrameType frameType; diff --git a/webrtc/video/payload_router.cc b/webrtc/video/payload_router.cc index 7957451c2b..eabde400aa 100644 --- a/webrtc/video/payload_router.cc +++ b/webrtc/video/payload_router.cc @@ -130,7 +130,7 @@ EncodedImageCallback::Result PayloadRouter::OnEncodedImage( CopyCodecSpecific(codec_specific_info, &rtp_video_header); rtp_video_header.rotation = encoded_image.rotation_; rtp_video_header.content_type = encoded_image.content_type_; - if (encoded_image.timing_.is_timing_frame) { + if (encoded_image.timing_.flags != TimingFrameFlags::kInvalid) { rtp_video_header.video_timing.encode_start_delta_ms = VideoSendTiming::GetDeltaCappedMs( encoded_image.capture_time_ms_, @@ -143,10 +143,8 @@ EncodedImageCallback::Result PayloadRouter::OnEncodedImage( rtp_video_header.video_timing.pacer_exit_delta_ms = 0; rtp_video_header.video_timing.network_timstamp_delta_ms = 0; rtp_video_header.video_timing.network2_timstamp_delta_ms = 0; - rtp_video_header.video_timing.is_timing_frame = true; - } else { - rtp_video_header.video_timing.is_timing_frame = false; } + rtp_video_header.video_timing.flags = encoded_image.timing_.flags; rtp_video_header.playout_delay = encoded_image.playout_delay_; int stream_index = rtp_video_header.simulcastIdx; diff --git a/webrtc/video/rtp_video_stream_receiver.cc b/webrtc/video/rtp_video_stream_receiver.cc index d4464ac21d..6064e69715 100644 --- a/webrtc/video/rtp_video_stream_receiver.cc +++ b/webrtc/video/rtp_video_stream_receiver.cc @@ -560,7 +560,6 @@ void RtpVideoStreamReceiver::NotifyReceiverOfFecPacket( rtp_header.type.Video.video_timing = {0u, 0u, 0u, 0u, 0u, 0u, false}; if (header.extension.has_video_timing) { rtp_header.type.Video.video_timing = header.extension.video_timing; - rtp_header.type.Video.video_timing.is_timing_frame = true; } rtp_header.type.Video.playout_delay = header.extension.playout_delay;