From 2edc6845ac12a7216aeb84d9ad9da8260e2bb434 Mon Sep 17 00:00:00 2001 From: ilnik Date: Thu, 6 Jul 2017 03:06:50 -0700 Subject: [PATCH] Report timing frames info in GetStats. Some frames are already marked as 'timing frames' via video-timing RTP header extension. Timestamps along full WebRTC pipeline are gathered for these frames. This CL implements reporting of these timestamps for a single timing frame since the last GetStats(). The frame with the longest end-to-end delay between two consecutive GetStats calls is reported. The purpose of this timing information is not to provide a realtime statistics but to provide debugging information as it will help identify problematic places in video pipeline for outliers (frames which took longest to process). BUG=webrtc:7594 Review-Url: https://codereview.webrtc.org/2946413002 Cr-Commit-Position: refs/heads/master@{#18909} --- webrtc/api/BUILD.gn | 1 + webrtc/api/statstypes.cc | 2 + webrtc/api/statstypes.h | 1 + webrtc/api/video/video_timing.cc | 52 ++++++++++++++++++ webrtc/api/video/video_timing.h | 47 ++++++++++++++-- webrtc/common_types.h | 3 +- webrtc/media/base/mediachannel.h | 8 ++- webrtc/media/engine/fakewebrtccall.cc | 5 ++ webrtc/media/engine/fakewebrtccall.h | 2 + webrtc/media/engine/webrtcvideoengine.cc | 2 + webrtc/modules/include/module_common_types.h | 2 +- .../rtp_rtcp/source/rtp_header_extensions.cc | 25 ++++----- .../rtp_rtcp/source/rtp_header_extensions.h | 7 +-- .../rtp_rtcp/source/rtp_packet_to_send.h | 16 +++--- .../rtp_rtcp/source/rtp_sender_unittest.cc | 4 +- webrtc/modules/video_coding/frame_buffer2.cc | 10 +++- webrtc/modules/video_coding/frame_buffer2.h | 2 + .../video_coding/frame_buffer2_unittest.cc | 1 + .../modules/video_coding/generic_decoder.cc | 54 ++++++++++++++----- .../include/video_coding_defines.h | 2 + webrtc/modules/video_coding/timing.cc | 41 ++++++++------ webrtc/modules/video_coding/timing.h | 4 ++ webrtc/pc/statscollector.cc | 4 ++ webrtc/test/fuzzers/rtp_packet_fuzzer.cc | 2 +- webrtc/video/end_to_end_tests.cc | 53 ++++++++++++++++++ webrtc/video/payload_router.cc | 10 ++-- webrtc/video/receive_statistics_proxy.cc | 22 ++++++++ webrtc/video/receive_statistics_proxy.h | 5 ++ .../receive_statistics_proxy_unittest.cc | 42 +++++++++++++++ webrtc/video/video_receive_stream.cc | 5 ++ webrtc/video/video_receive_stream.h | 2 + webrtc/video/video_stream_decoder.cc | 3 ++ webrtc/video/video_stream_decoder.h | 2 + webrtc/video_receive_stream.h | 2 + 34 files changed, 376 insertions(+), 67 deletions(-) create mode 100644 webrtc/api/video/video_timing.cc diff --git a/webrtc/api/BUILD.gn b/webrtc/api/BUILD.gn index 40741bced5..249411b1c2 100644 --- a/webrtc/api/BUILD.gn +++ b/webrtc/api/BUILD.gn @@ -173,6 +173,7 @@ rtc_source_set("video_frame_api") { "video/video_frame_buffer.cc", "video/video_frame_buffer.h", "video/video_rotation.h", + "video/video_timing.cc", "video/video_timing.h", ] diff --git a/webrtc/api/statstypes.cc b/webrtc/api/statstypes.cc index b7459fa1fa..f6f26899e9 100644 --- a/webrtc/api/statstypes.cc +++ b/webrtc/api/statstypes.cc @@ -598,6 +598,8 @@ const char* StatsReport::Value::display_name() const { return "googTransportType"; case kStatsValueNameTrackId: return "googTrackId"; + case kStatsValueNameTimingFrameInfo: + return "googTimingFrameInfo"; case kStatsValueNameTypingNoiseState: return "googTypingNoiseState"; case kStatsValueNameWritable: diff --git a/webrtc/api/statstypes.h b/webrtc/api/statstypes.h index 8ef79d9669..6fe6f37a6a 100644 --- a/webrtc/api/statstypes.h +++ b/webrtc/api/statstypes.h @@ -210,6 +210,7 @@ class StatsReport { kStatsValueNameSrtpCipher, kStatsValueNameTargetDelayMs, kStatsValueNameTargetEncBitrate, + kStatsValueNameTimingFrameInfo, // Result of |TimingFrameInfo::ToString| kStatsValueNameTrackId, kStatsValueNameTransmitBitrate, kStatsValueNameTransportType, diff --git a/webrtc/api/video/video_timing.cc b/webrtc/api/video/video_timing.cc new file mode 100644 index 0000000000..a0a3f4d10f --- /dev/null +++ b/webrtc/api/video/video_timing.cc @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/api/video/video_timing.h" + +#include + +namespace webrtc { + +TimingFrameInfo::TimingFrameInfo() + : rtp_timestamp(0), + capture_time_ms(-1), + encode_start_ms(-1), + encode_finish_ms(-1), + packetization_finish_ms(-1), + pacer_exit_ms(-1), + network_timestamp_ms(-1), + network2_timestamp_ms(-1), + receive_start_ms(-1), + receive_finish_ms(-1), + decode_start_ms(-1), + decode_finish_ms(-1), + render_time_ms(-1) {} + +int64_t TimingFrameInfo::EndToEndDelay() const { + return capture_time_ms >= 0 ? decode_finish_ms - capture_time_ms : -1; +} + +bool TimingFrameInfo::IsLongerThan(const TimingFrameInfo& other) const { + int64_t other_delay = other.EndToEndDelay(); + return other_delay == -1 || EndToEndDelay() > other_delay; +} + +std::string TimingFrameInfo::ToString() const { + std::stringstream out; + out << rtp_timestamp << ',' << capture_time_ms << ',' << encode_start_ms + << ',' << encode_finish_ms << ',' << packetization_finish_ms << ',' + << pacer_exit_ms << ',' << network_timestamp_ms << ',' + << network2_timestamp_ms << ',' << receive_start_ms << ',' + << receive_finish_ms << ',' << decode_start_ms << ',' << decode_finish_ms + << ',' << render_time_ms; + return out.str(); +} + +} // namespace webrtc diff --git a/webrtc/api/video/video_timing.h b/webrtc/api/video/video_timing.h index a44a8ef68d..05414de34a 100644 --- a/webrtc/api/video/video_timing.h +++ b/webrtc/api/video/video_timing.h @@ -12,14 +12,17 @@ #define WEBRTC_API_VIDEO_VIDEO_TIMING_H_ #include -#include + +#include + #include "webrtc/base/checks.h" #include "webrtc/base/safe_conversions.h" namespace webrtc { -// Video timing timstamps in ms counted from capture_time_ms of a frame. -struct VideoTiming { +// Video timing timestamps in ms counted from capture_time_ms of a frame. +// This structure represents data sent in video-timing RTP header extension. +struct VideoSendTiming { static const uint8_t kEncodeStartDeltaIdx = 0; static const uint8_t kEncodeFinishDeltaIdx = 1; static const uint8_t kPacketizationFinishDeltaIdx = 2; @@ -45,6 +48,44 @@ struct VideoTiming { bool is_timing_frame; }; +// Used to report precise timings of a 'timing frames'. Contains all important +// timestamps for a lifetime of that specific frame. Reported as a string via +// GetStats(). Only frame which took the longest between two GetStats calls is +// reported. +struct TimingFrameInfo { + TimingFrameInfo(); + + // Returns end-to-end delay of a frame, if sender and receiver timestamps are + // synchronized, -1 otherwise. + int64_t EndToEndDelay() const; + + // Returns true if current frame took longer to process than |other| frame. + // If other frame's clocks are not synchronized, current frame is always + // preferred. + bool IsLongerThan(const TimingFrameInfo& other) const; + + std::string ToString() const; + + uint32_t rtp_timestamp; // Identifier of a frame. + // All timestamps below are in local monotonous clock of a receiver. + // If sender clock is not yet estimated, sender timestamps + // (capture_time_ms ... pacer_exit_ms) are negative values, still + // relatively correct. + int64_t capture_time_ms; // Captrue time of a frame. + int64_t encode_start_ms; // Encode start time. + int64_t encode_finish_ms; // Encode completion time. + int64_t packetization_finish_ms; // Time when frame was passed to pacer. + int64_t pacer_exit_ms; // Time when last packet was pushed out of pacer. + // Two in-network RTP processor timestamps: meaning is application specific. + int64_t network_timestamp_ms; + int64_t network2_timestamp_ms; + int64_t receive_start_ms; // First received packet time. + int64_t receive_finish_ms; // Last received packet time. + int64_t decode_start_ms; // Decode start time. + int64_t decode_finish_ms; // Decode completion time. + int64_t render_time_ms; // Proposed render time to insure smooth playback. +}; + } // namespace webrtc #endif // WEBRTC_API_VIDEO_VIDEO_TIMING_H_ diff --git a/webrtc/common_types.h b/webrtc/common_types.h index 7b62dbca09..bf7566c3cc 100644 --- a/webrtc/common_types.h +++ b/webrtc/common_types.h @@ -13,7 +13,6 @@ #include #include - #include #include #include @@ -782,7 +781,7 @@ struct RTPHeaderExtension { VideoContentType videoContentType; bool has_video_timing; - VideoTiming video_timing; + VideoSendTiming video_timing; PlayoutDelay playout_delay = {-1, -1}; diff --git a/webrtc/media/base/mediachannel.h b/webrtc/media/base/mediachannel.h index 816dfd18fc..1bea916f30 100644 --- a/webrtc/media/base/mediachannel.h +++ b/webrtc/media/base/mediachannel.h @@ -17,6 +17,7 @@ #include "webrtc/api/rtpparameters.h" #include "webrtc/api/rtpreceiverinterface.h" +#include "webrtc/api/video/video_timing.h" #include "webrtc/base/basictypes.h" #include "webrtc/base/buffer.h" #include "webrtc/base/copyonwritebuffer.h" @@ -747,8 +748,7 @@ struct VideoReceiverInfo : public MediaReceiverInfo { render_delay_ms(0), target_delay_ms(0), current_delay_ms(0), - capture_start_ntp_time_ms(-1) { - } + capture_start_ntp_time_ms(-1) {} std::vector ssrc_groups; // TODO(hbos): Move this to |VideoMediaInfo::receive_codecs|? @@ -793,6 +793,10 @@ struct VideoReceiverInfo : public MediaReceiverInfo { // Estimated capture start time in NTP time in ms. int64_t capture_start_ntp_time_ms; + + // Timing frame info: all important timestamps for a full lifetime of a + // single 'timing frame'. + rtc::Optional timing_frame_info; }; struct DataSenderInfo : public MediaSenderInfo { diff --git a/webrtc/media/engine/fakewebrtccall.cc b/webrtc/media/engine/fakewebrtccall.cc index ec8f6b8f16..0f26a51c28 100644 --- a/webrtc/media/engine/fakewebrtccall.cc +++ b/webrtc/media/engine/fakewebrtccall.cc @@ -309,6 +309,11 @@ webrtc::VideoReceiveStream::Stats FakeVideoReceiveStream::GetStats() const { return stats_; } +rtc::Optional +FakeVideoReceiveStream::GetAndResetTimingFrameInfo() { + return rtc::Optional(); +} + void FakeVideoReceiveStream::Start() { receiving_ = true; } diff --git a/webrtc/media/engine/fakewebrtccall.h b/webrtc/media/engine/fakewebrtccall.h index 34db106c43..e0e0f7a7ce 100644 --- a/webrtc/media/engine/fakewebrtccall.h +++ b/webrtc/media/engine/fakewebrtccall.h @@ -205,6 +205,8 @@ class FakeVideoReceiveStream final : public webrtc::VideoReceiveStream { webrtc::VideoReceiveStream::Stats GetStats() const override; + rtc::Optional GetAndResetTimingFrameInfo() override; + webrtc::VideoReceiveStream::Config config_; bool receiving_; webrtc::VideoReceiveStream::Stats stats_; diff --git a/webrtc/media/engine/webrtcvideoengine.cc b/webrtc/media/engine/webrtcvideoengine.cc index e592740bb9..2eab3ceaa7 100644 --- a/webrtc/media/engine/webrtcvideoengine.cc +++ b/webrtc/media/engine/webrtcvideoengine.cc @@ -2483,6 +2483,8 @@ WebRtcVideoChannel::WebRtcVideoReceiveStream::GetVideoReceiverInfo( info.plis_sent = stats.rtcp_packet_type_counts.pli_packets; info.nacks_sent = stats.rtcp_packet_type_counts.nack_packets; + info.timing_frame_info = stream_->GetAndResetTimingFrameInfo(); + if (log_stats) LOG(LS_INFO) << stats.ToString(rtc::TimeMillis()); diff --git a/webrtc/modules/include/module_common_types.h b/webrtc/modules/include/module_common_types.h index b719226d24..abf8b94697 100644 --- a/webrtc/modules/include/module_common_types.h +++ b/webrtc/modules/include/module_common_types.h @@ -61,7 +61,7 @@ struct RTPVideoHeader { VideoContentType content_type; - VideoTiming video_timing; + VideoSendTiming video_timing; bool is_first_packet_in_frame; uint8_t simulcastIdx; // Index if the simulcast encoder creating diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc index f630bafe28..c3fc9dc5dc 100644 --- a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc +++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc @@ -266,40 +266,41 @@ constexpr uint8_t VideoTimingExtension::kValueSizeBytes; constexpr const char* VideoTimingExtension::kUri; bool VideoTimingExtension::Parse(rtc::ArrayView data, - VideoTiming* timing) { + VideoSendTiming* timing) { RTC_DCHECK(timing); if (data.size() != kValueSizeBytes) return false; timing->encode_start_delta_ms = ByteReader::ReadBigEndian(data.data()); timing->encode_finish_delta_ms = ByteReader::ReadBigEndian( - data.data() + 2 * VideoTiming::kEncodeFinishDeltaIdx); + data.data() + 2 * VideoSendTiming::kEncodeFinishDeltaIdx); timing->packetization_finish_delta_ms = ByteReader::ReadBigEndian( - data.data() + 2 * VideoTiming::kPacketizationFinishDeltaIdx); + data.data() + 2 * VideoSendTiming::kPacketizationFinishDeltaIdx); timing->pacer_exit_delta_ms = ByteReader::ReadBigEndian( - data.data() + 2 * VideoTiming::kPacerExitDeltaIdx); + data.data() + 2 * VideoSendTiming::kPacerExitDeltaIdx); timing->network_timstamp_delta_ms = ByteReader::ReadBigEndian( - data.data() + 2 * VideoTiming::kNetworkTimestampDeltaIdx); + data.data() + 2 * VideoSendTiming::kNetworkTimestampDeltaIdx); timing->network2_timstamp_delta_ms = ByteReader::ReadBigEndian( - data.data() + 2 * VideoTiming::kNetwork2TimestampDeltaIdx); + data.data() + 2 * VideoSendTiming::kNetwork2TimestampDeltaIdx); timing->is_timing_frame = true; return true; } -bool VideoTimingExtension::Write(uint8_t* data, const VideoTiming& timing) { +bool VideoTimingExtension::Write(uint8_t* data, const VideoSendTiming& timing) { ByteWriter::WriteBigEndian(data, timing.encode_start_delta_ms); ByteWriter::WriteBigEndian( - data + 2 * VideoTiming::kEncodeFinishDeltaIdx, + data + 2 * VideoSendTiming::kEncodeFinishDeltaIdx, timing.encode_finish_delta_ms); ByteWriter::WriteBigEndian( - data + 2 * VideoTiming::kPacketizationFinishDeltaIdx, + data + 2 * VideoSendTiming::kPacketizationFinishDeltaIdx, timing.packetization_finish_delta_ms); ByteWriter::WriteBigEndian( - data + 2 * VideoTiming::kPacerExitDeltaIdx, timing.pacer_exit_delta_ms); + data + 2 * VideoSendTiming::kPacerExitDeltaIdx, + timing.pacer_exit_delta_ms); ByteWriter::WriteBigEndian( - data + 2 * VideoTiming::kNetworkTimestampDeltaIdx, 0); // reserved + data + 2 * VideoSendTiming::kNetworkTimestampDeltaIdx, 0); // reserved ByteWriter::WriteBigEndian( - data + 2 * VideoTiming::kNetwork2TimestampDeltaIdx, 0); // reserved + data + 2 * VideoSendTiming::kNetwork2TimestampDeltaIdx, 0); // reserved return true; } diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h index c0a4bca913..c637a844e6 100644 --- a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h +++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h @@ -134,9 +134,10 @@ class VideoTimingExtension { static constexpr const char* kUri = "http://www.webrtc.org/experiments/rtp-hdrext/video-timing"; - static bool Parse(rtc::ArrayView data, VideoTiming* timing); - static size_t ValueSize(const VideoTiming&) { return kValueSizeBytes; } - static bool Write(uint8_t* data, const VideoTiming& timing); + static bool Parse(rtc::ArrayView data, + VideoSendTiming* timing); + static size_t ValueSize(const VideoSendTiming&) { return kValueSizeBytes; } + static bool Write(uint8_t* data, const VideoSendTiming& timing); static size_t ValueSize(uint16_t time_delta_ms, uint8_t idx) { return kValueSizeBytes; diff --git a/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h b/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h index 55bad2d502..5d5b31b39b 100644 --- a/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h +++ b/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h @@ -32,26 +32,26 @@ class RtpPacketToSend : public rtp::Packet { void set_packetization_finish_time_ms(int64_t time) { SetExtension( - VideoTiming::GetDeltaCappedMs(capture_time_ms_, time), - VideoTiming::kPacketizationFinishDeltaIdx); + VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time), + VideoSendTiming::kPacketizationFinishDeltaIdx); } void set_pacer_exit_time_ms(int64_t time) { SetExtension( - VideoTiming::GetDeltaCappedMs(capture_time_ms_, time), - VideoTiming::kPacerExitDeltaIdx); + VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time), + VideoSendTiming::kPacerExitDeltaIdx); } void set_network_time_ms(int64_t time) { SetExtension( - VideoTiming::GetDeltaCappedMs(capture_time_ms_, time), - VideoTiming::kNetworkTimestampDeltaIdx); + VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time), + VideoSendTiming::kNetworkTimestampDeltaIdx); } void set_network2_time_ms(int64_t time) { SetExtension( - VideoTiming::GetDeltaCappedMs(capture_time_ms_, time), - VideoTiming::kNetwork2TimestampDeltaIdx); + VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time), + VideoSendTiming::kNetwork2TimestampDeltaIdx); } private: diff --git a/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc index bcfa650c02..89c1d1de56 100644 --- a/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc +++ b/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc @@ -473,7 +473,7 @@ TEST_P(RtpSenderTestWithoutPacer, WritesTimestampToTimingExtension) { packet->SetMarker(true); packet->SetTimestamp(kTimestamp); packet->set_capture_time_ms(capture_time_ms); - const VideoTiming kVideoTiming = {0u, 0u, 0u, 0u, 0u, 0u, true}; + const VideoSendTiming kVideoTiming = {0u, 0u, 0u, 0u, 0u, 0u, true}; packet->SetExtension(kVideoTiming); EXPECT_TRUE(rtp_sender_->AssignSequenceNumber(packet.get())); size_t packet_size = packet->size(); @@ -1578,7 +1578,7 @@ TEST_P(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) { rtp_sender_video_->SendVideo(kRtpVideoGeneric, kVideoFrameKey, kPayload, kTimestamp, kCaptureTimestamp, kFrame, sizeof(kFrame), nullptr, &hdr); - VideoTiming timing; + VideoSendTiming timing; EXPECT_TRUE(transport_.last_sent_packet().GetExtension( &timing)); EXPECT_EQ(kPacketizationTimeMs, timing.packetization_finish_delta_ms); diff --git a/webrtc/modules/video_coding/frame_buffer2.cc b/webrtc/modules/video_coding/frame_buffer2.cc index 1114e7cc0a..2beffc7f08 100644 --- a/webrtc/modules/video_coding/frame_buffer2.cc +++ b/webrtc/modules/video_coding/frame_buffer2.cc @@ -149,6 +149,7 @@ FrameBuffer::ReturnReason FrameBuffer::NextFrame( } UpdateJitterDelay(); + UpdateTimingFrameInfo(); PropagateDecodability(next_frame_it_->second); // Sanity check for RTP timestamp monotonicity. @@ -534,8 +535,15 @@ void FrameBuffer::UpdateJitterDelay() { } } +void FrameBuffer::UpdateTimingFrameInfo() { + TRACE_EVENT0("webrtc", "FrameBuffer::UpdateTimingFrameInfo"); + rtc::Optional info = timing_->GetTimingFrameInfo(); + if (info) + stats_callback_->OnTimingFrameInfoUpdated(*info); +} + void FrameBuffer::ClearFramesAndHistory() { - TRACE_EVENT0("webrtc", "FrameBuffer::UpdateJitterDelay"); + TRACE_EVENT0("webrtc", "FrameBuffer::ClearFramesAndHistory"); frames_.clear(); last_decoded_frame_it_ = frames_.end(); last_continuous_frame_it_ = frames_.end(); diff --git a/webrtc/modules/video_coding/frame_buffer2.h b/webrtc/modules/video_coding/frame_buffer2.h index ffeb2aa7ca..25ba0a9189 100644 --- a/webrtc/modules/video_coding/frame_buffer2.h +++ b/webrtc/modules/video_coding/frame_buffer2.h @@ -153,6 +153,8 @@ class FrameBuffer { void UpdateJitterDelay() EXCLUSIVE_LOCKS_REQUIRED(crit_); + void UpdateTimingFrameInfo() EXCLUSIVE_LOCKS_REQUIRED(crit_); + void ClearFramesAndHistory() EXCLUSIVE_LOCKS_REQUIRED(crit_); bool HasBadRenderTiming(const FrameObject& frame, int64_t now_ms) diff --git a/webrtc/modules/video_coding/frame_buffer2_unittest.cc b/webrtc/modules/video_coding/frame_buffer2_unittest.cc index 58b3f7a42b..9d15302cec 100644 --- a/webrtc/modules/video_coding/frame_buffer2_unittest.cc +++ b/webrtc/modules/video_coding/frame_buffer2_unittest.cc @@ -116,6 +116,7 @@ class VCMReceiveStatisticsCallbackMock : public VCMReceiveStatisticsCallback { int jitter_buffer_ms, int min_playout_delay_ms, int render_delay_ms)); + MOCK_METHOD1(OnTimingFrameInfoUpdated, void(const TimingFrameInfo& info)); }; class TestFrameBuffer2 : public ::testing::Test { diff --git a/webrtc/modules/video_coding/generic_decoder.cc b/webrtc/modules/video_coding/generic_decoder.cc index 42ee8b600c..80b08b5478 100644 --- a/webrtc/modules/video_coding/generic_decoder.cc +++ b/webrtc/modules/video_coding/generic_decoder.cc @@ -10,6 +10,8 @@ #include "webrtc/modules/video_coding/generic_decoder.h" +#include + #include "webrtc/base/checks.h" #include "webrtc/base/logging.h" #include "webrtc/base/timeutils.h" @@ -91,6 +93,7 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, // Report timing information. if (frameInfo->timing.is_timing_frame) { + int64_t capture_time_ms = decodedImage.ntp_time_ms() - ntp_offset_; // Convert remote timestamps to local time from ntp timestamps. frameInfo->timing.encode_start_ms -= ntp_offset_; frameInfo->timing.encode_finish_ms -= ntp_offset_; @@ -98,19 +101,44 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, frameInfo->timing.pacer_exit_ms -= ntp_offset_; frameInfo->timing.network_timestamp_ms -= ntp_offset_; frameInfo->timing.network2_timestamp_ms -= ntp_offset_; - // TODO(ilnik): Report timing information here. - // Capture time: decodedImage.ntp_time_ms() - ntp_offset - // Encode start: frameInfo->timing.encode_start_ms - // Encode finish: frameInfo->timing.encode_finish_ms - // Packetization done: frameInfo->timing.packetization_finish_ms - // Pacer exit: frameInfo->timing.pacer_exit_ms - // Network timestamp: frameInfo->timing.network_timestamp_ms - // Network2 timestamp: frameInfo->timing.network2_timestamp_ms - // Receive start: frameInfo->timing.receive_start_ms - // Receive finish: frameInfo->timing.receive_finish_ms - // Decode start: frameInfo->decodeStartTimeMs - // Decode finish: now_ms - // Render time: frameInfo->renderTimeMs + + int64_t sender_delta_ms = 0; + if (decodedImage.ntp_time_ms() < 0) { + // Sender clock is not estimated yet. Make sure that sender times are all + // negative to indicate that. Yet they still should be relatively correct. + sender_delta_ms = + std::max({capture_time_ms, frameInfo->timing.encode_start_ms, + frameInfo->timing.encode_finish_ms, + frameInfo->timing.packetization_finish_ms, + frameInfo->timing.pacer_exit_ms, + frameInfo->timing.network_timestamp_ms, + frameInfo->timing.network2_timestamp_ms}) + + 1; + } + + TimingFrameInfo timing_frame_info; + + timing_frame_info.capture_time_ms = capture_time_ms - sender_delta_ms; + timing_frame_info.encode_start_ms = + frameInfo->timing.encode_start_ms - sender_delta_ms; + timing_frame_info.encode_finish_ms = + frameInfo->timing.encode_finish_ms - sender_delta_ms; + timing_frame_info.packetization_finish_ms = + frameInfo->timing.packetization_finish_ms - sender_delta_ms; + timing_frame_info.pacer_exit_ms = + frameInfo->timing.pacer_exit_ms - sender_delta_ms; + timing_frame_info.network_timestamp_ms = + frameInfo->timing.network_timestamp_ms - sender_delta_ms; + timing_frame_info.network2_timestamp_ms = + frameInfo->timing.network2_timestamp_ms - sender_delta_ms; + timing_frame_info.receive_start_ms = frameInfo->timing.receive_start_ms; + timing_frame_info.receive_finish_ms = frameInfo->timing.receive_finish_ms; + timing_frame_info.decode_start_ms = frameInfo->decodeStartTimeMs; + timing_frame_info.decode_finish_ms = now_ms; + timing_frame_info.render_time_ms = frameInfo->renderTimeMs; + timing_frame_info.rtp_timestamp = decodedImage.timestamp(); + + _timing->SetTimingFrameInfo(timing_frame_info); } decodedImage.set_timestamp_us( diff --git a/webrtc/modules/video_coding/include/video_coding_defines.h b/webrtc/modules/video_coding/include/video_coding_defines.h index 6ed472df5e..19d38138ea 100644 --- a/webrtc/modules/video_coding/include/video_coding_defines.h +++ b/webrtc/modules/video_coding/include/video_coding_defines.h @@ -109,6 +109,8 @@ class VCMReceiveStatisticsCallback { int min_playout_delay_ms, int render_delay_ms) = 0; + virtual void OnTimingFrameInfoUpdated(const TimingFrameInfo& info) = 0; + protected: virtual ~VCMReceiveStatisticsCallback() {} }; diff --git a/webrtc/modules/video_coding/timing.cc b/webrtc/modules/video_coding/timing.cc index 21f4a3dc71..340f2c187f 100644 --- a/webrtc/modules/video_coding/timing.cc +++ b/webrtc/modules/video_coding/timing.cc @@ -21,21 +21,22 @@ namespace webrtc { VCMTiming::VCMTiming(Clock* clock, VCMTiming* master_timing) - : clock_(clock), - master_(false), - ts_extrapolator_(), - codec_timer_(new VCMCodecTimer()), - render_delay_ms_(kDefaultRenderDelayMs), - min_playout_delay_ms_(0), - max_playout_delay_ms_(10000), - jitter_delay_ms_(0), - current_delay_ms_(0), - last_decode_ms_(0), - prev_frame_timestamp_(0), - num_decoded_frames_(0), - num_delayed_decoded_frames_(0), - first_decoded_frame_ms_(-1), - sum_missed_render_deadline_ms_(0) { + : clock_(clock), + master_(false), + ts_extrapolator_(), + codec_timer_(new VCMCodecTimer()), + render_delay_ms_(kDefaultRenderDelayMs), + min_playout_delay_ms_(0), + max_playout_delay_ms_(10000), + jitter_delay_ms_(0), + current_delay_ms_(0), + last_decode_ms_(0), + prev_frame_timestamp_(0), + timing_frame_info_(), + num_decoded_frames_(0), + num_delayed_decoded_frames_(0), + first_decoded_frame_ms_(-1), + sum_missed_render_deadline_ms_(0) { if (master_timing == NULL) { master_ = true; ts_extrapolator_ = new TimestampExtrapolator(clock_->TimeInMilliseconds()); @@ -304,4 +305,14 @@ bool VCMTiming::GetTimings(int* decode_ms, return (num_decoded_frames_ > 0); } +void VCMTiming::SetTimingFrameInfo(const TimingFrameInfo& info) { + rtc::CritScope cs(&crit_sect_); + timing_frame_info_.emplace(info); +} + +rtc::Optional VCMTiming::GetTimingFrameInfo() { + rtc::CritScope cs(&crit_sect_); + return timing_frame_info_; +} + } // namespace webrtc diff --git a/webrtc/modules/video_coding/timing.h b/webrtc/modules/video_coding/timing.h index bfd6778c12..82252bf3fe 100644 --- a/webrtc/modules/video_coding/timing.h +++ b/webrtc/modules/video_coding/timing.h @@ -102,6 +102,9 @@ class VCMTiming { int* min_playout_delay_ms, int* render_delay_ms) const; + void SetTimingFrameInfo(const TimingFrameInfo& info); + rtc::Optional GetTimingFrameInfo(); + enum { kDefaultRenderDelayMs = 10 }; enum { kDelayMaxChangeMsPerS = 100 }; @@ -131,6 +134,7 @@ class VCMTiming { int current_delay_ms_ GUARDED_BY(crit_sect_); int last_decode_ms_ GUARDED_BY(crit_sect_); uint32_t prev_frame_timestamp_ GUARDED_BY(crit_sect_); + rtc::Optional timing_frame_info_ GUARDED_BY(crit_sect_); // Statistics. size_t num_decoded_frames_ GUARDED_BY(crit_sect_); diff --git a/webrtc/pc/statscollector.cc b/webrtc/pc/statscollector.cc index c932583159..27cb6c3083 100644 --- a/webrtc/pc/statscollector.cc +++ b/webrtc/pc/statscollector.cc @@ -249,6 +249,10 @@ void ExtractStats(const cricket::VideoReceiverInfo& info, StatsReport* report) { for (const auto& i : ints) report->AddInt(i.name, i.value); report->AddString(StatsReport::kStatsValueNameMediaType, "video"); + if (info.timing_frame_info) { + report->AddString(StatsReport::kStatsValueNameTimingFrameInfo, + info.timing_frame_info->ToString()); + } } void ExtractStats(const cricket::VideoSenderInfo& info, StatsReport* report) { diff --git a/webrtc/test/fuzzers/rtp_packet_fuzzer.cc b/webrtc/test/fuzzers/rtp_packet_fuzzer.cc index db41e1a9ba..fc53216520 100644 --- a/webrtc/test/fuzzers/rtp_packet_fuzzer.cc +++ b/webrtc/test/fuzzers/rtp_packet_fuzzer.cc @@ -91,7 +91,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { packet.GetExtension(&content_type); break; case kRtpExtensionVideoTiming: - VideoTiming timing; + VideoSendTiming timing; packet.GetExtension(&timing); break; case kRtpExtensionRtpStreamId: { diff --git a/webrtc/video/end_to_end_tests.cc b/webrtc/video/end_to_end_tests.cc index f876621fd3..b76a57db26 100644 --- a/webrtc/video/end_to_end_tests.cc +++ b/webrtc/video/end_to_end_tests.cc @@ -3442,6 +3442,59 @@ TEST_F(EndToEndTest, GetStats) { RunBaseTest(&test); } +TEST_F(EndToEndTest, GetTimingFrameInfoReportsTimingFrames) { + static const int kExtensionId = 5; + + class StatsObserver : public test::EndToEndTest { + public: + StatsObserver() : EndToEndTest(kLongTimeoutMs) {} + + private: + std::string CompoundKey(const char* name, uint32_t ssrc) { + std::ostringstream oss; + oss << name << "_" << ssrc; + return oss.str(); + } + + void ModifyVideoConfigs( + VideoSendStream::Config* send_config, + std::vector* receive_configs, + VideoEncoderConfig* encoder_config) override { + send_config->rtp.extensions.clear(); + send_config->rtp.extensions.push_back( + RtpExtension(RtpExtension::kVideoTimingUri, kExtensionId)); + for (size_t i = 0; i < receive_configs->size(); ++i) { + (*receive_configs)[i].rtp.extensions.clear(); + (*receive_configs)[i].rtp.extensions.push_back( + RtpExtension(RtpExtension::kVideoTimingUri, kExtensionId)); + } + } + + void OnVideoStreamsCreated( + VideoSendStream* send_stream, + const std::vector& receive_streams) override { + receive_streams_ = receive_streams; + } + + void PerformTest() override { + // No frames reported initially. + for (size_t i = 0; i < receive_streams_.size(); ++i) { + EXPECT_FALSE(receive_streams_[i]->GetAndResetTimingFrameInfo()); + } + // Wait for at least one timing frame to be sent with 100ms grace period. + SleepMs(kDefaultTimingFramesDelayMs + 100); + // Check that timing frames are reported for each stream. + for (size_t i = 0; i < receive_streams_.size(); ++i) { + EXPECT_TRUE(receive_streams_[i]->GetAndResetTimingFrameInfo()); + } + } + + std::vector receive_streams_; + } test; + + RunBaseTest(&test); +} + class RtcpXrObserver : public test::EndToEndTest { public: RtcpXrObserver(bool enable_rrtr, bool enable_target_bitrate) diff --git a/webrtc/video/payload_router.cc b/webrtc/video/payload_router.cc index e0e95ae056..58fcfd668f 100644 --- a/webrtc/video/payload_router.cc +++ b/webrtc/video/payload_router.cc @@ -132,11 +132,13 @@ EncodedImageCallback::Result PayloadRouter::OnEncodedImage( rtp_video_header.content_type = encoded_image.content_type_; if (encoded_image.timing_.is_timing_frame) { rtp_video_header.video_timing.encode_start_delta_ms = - VideoTiming::GetDeltaCappedMs(encoded_image.capture_time_ms_, - encoded_image.timing_.encode_start_ms); + VideoSendTiming::GetDeltaCappedMs( + encoded_image.capture_time_ms_, + encoded_image.timing_.encode_start_ms); rtp_video_header.video_timing.encode_finish_delta_ms = - VideoTiming::GetDeltaCappedMs(encoded_image.capture_time_ms_, - encoded_image.timing_.encode_finish_ms); + VideoSendTiming::GetDeltaCappedMs( + encoded_image.capture_time_ms_, + encoded_image.timing_.encode_finish_ms); rtp_video_header.video_timing.packetization_finish_delta_ms = 0; rtp_video_header.video_timing.pacer_exit_delta_ms = 0; rtp_video_header.video_timing.network_timstamp_delta_ms = 0; diff --git a/webrtc/video/receive_statistics_proxy.cc b/webrtc/video/receive_statistics_proxy.cc index 2af19125c2..62d0b1c292 100644 --- a/webrtc/video/receive_statistics_proxy.cc +++ b/webrtc/video/receive_statistics_proxy.cc @@ -406,6 +406,17 @@ VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const { return stats_; } +rtc::Optional +ReceiveStatisticsProxy::GetAndResetTimingFrameInfo() { + rtc::CritScope lock(&crit_); + rtc::Optional info = timing_frame_info_; + // Reset reported value to empty, so it will be always + // overwritten in |OnTimingFrameInfoUpdated|, thus allowing to store new + // value instead of reported one. + timing_frame_info_.reset(); + return info; +} + void ReceiveStatisticsProxy::OnIncomingPayloadType(int payload_type) { rtc::CritScope lock(&crit_); stats_.current_payload_type = payload_type; @@ -464,6 +475,17 @@ void ReceiveStatisticsProxy::OnFrameBufferTimingsUpdated( "ssrc", stats_.ssrc); } +void ReceiveStatisticsProxy::OnTimingFrameInfoUpdated( + const TimingFrameInfo& info) { + rtc::CritScope lock(&crit_); + // Only the frame which was processed the longest since the last + // GetStats() call is reported. Therefore, only single 'longest' frame is + // stored. + if (!timing_frame_info_ || info.IsLongerThan(*timing_frame_info_)) { + timing_frame_info_.emplace(info); + } +} + void ReceiveStatisticsProxy::RtcpPacketTypesCounterUpdated( uint32_t ssrc, const RtcpPacketTypeCounter& packet_counter) { diff --git a/webrtc/video/receive_statistics_proxy.h b/webrtc/video/receive_statistics_proxy.h index 626b56dcea..1cfeca6513 100644 --- a/webrtc/video/receive_statistics_proxy.h +++ b/webrtc/video/receive_statistics_proxy.h @@ -46,6 +46,8 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback, VideoReceiveStream::Stats GetStats() const; + rtc::Optional GetAndResetTimingFrameInfo(); + void OnDecodedFrame(rtc::Optional qp, VideoContentType content_type); void OnSyncOffsetUpdated(int64_t sync_offset_ms, double estimated_freq_khz); void OnRenderedFrame(const VideoFrame& frame); @@ -69,6 +71,8 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback, int min_playout_delay_ms, int render_delay_ms) override; + void OnTimingFrameInfoUpdated(const TimingFrameInfo& info) override; + // Overrides RtcpStatisticsCallback. void StatisticsUpdated(const webrtc::RtcpStatistics& statistics, uint32_t ssrc) override; @@ -157,6 +161,7 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback, mutable std::map frame_window_ GUARDED_BY(&crit_); VideoContentType last_content_type_ GUARDED_BY(&crit_); rtc::Optional last_decoded_frame_time_ms_; + rtc::Optional timing_frame_info_ GUARDED_BY(&crit_); }; } // namespace webrtc diff --git a/webrtc/video/receive_statistics_proxy_unittest.cc b/webrtc/video/receive_statistics_proxy_unittest.cc index e219542768..2c4d330195 100644 --- a/webrtc/video/receive_statistics_proxy_unittest.cc +++ b/webrtc/video/receive_statistics_proxy_unittest.cc @@ -247,6 +247,48 @@ TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsNoCNameForUnknownSsrc) { EXPECT_STREQ("", statistics_proxy_->GetStats().c_name.c_str()); } +TEST_F(ReceiveStatisticsProxyTest, + GetTimingFrameInfoReportsLongestTimingFrame) { + const int64_t kShortEndToEndDelay = 10; + const int64_t kMedEndToEndDelay = 20; + const int64_t kLongEndToEndDelay = 100; + const uint32_t kExpectedRtpTimestamp = 2; + TimingFrameInfo info; + rtc::Optional result; + info.rtp_timestamp = kExpectedRtpTimestamp - 1; + info.capture_time_ms = 0; + info.decode_finish_ms = kShortEndToEndDelay; + statistics_proxy_->OnTimingFrameInfoUpdated(info); + info.rtp_timestamp = + kExpectedRtpTimestamp; // this frame should be reported in the end. + info.capture_time_ms = 0; + info.decode_finish_ms = kLongEndToEndDelay; + statistics_proxy_->OnTimingFrameInfoUpdated(info); + info.rtp_timestamp = kExpectedRtpTimestamp + 1; + info.capture_time_ms = 0; + info.decode_finish_ms = kMedEndToEndDelay; + statistics_proxy_->OnTimingFrameInfoUpdated(info); + result = statistics_proxy_->GetAndResetTimingFrameInfo(); + EXPECT_TRUE(result); + EXPECT_EQ(kExpectedRtpTimestamp, result->rtp_timestamp); +} + +TEST_F(ReceiveStatisticsProxyTest, GetTimingFrameInfoTimingFramesReportedOnce) { + const int64_t kShortEndToEndDelay = 10; + const uint32_t kExpectedRtpTimestamp = 2; + TimingFrameInfo info; + rtc::Optional result; + info.rtp_timestamp = kExpectedRtpTimestamp; + info.capture_time_ms = 0; + info.decode_finish_ms = kShortEndToEndDelay; + statistics_proxy_->OnTimingFrameInfoUpdated(info); + result = statistics_proxy_->GetAndResetTimingFrameInfo(); + EXPECT_TRUE(result); + EXPECT_EQ(kExpectedRtpTimestamp, result->rtp_timestamp); + result = statistics_proxy_->GetAndResetTimingFrameInfo(); + EXPECT_FALSE(result); +} + TEST_F(ReceiveStatisticsProxyTest, LifetimeHistogramIsUpdated) { const int64_t kTimeSec = 3; fake_clock_.AdvanceTimeMilliseconds(kTimeSec * 1000); diff --git a/webrtc/video/video_receive_stream.cc b/webrtc/video/video_receive_stream.cc index acc497b616..f21cf720a6 100644 --- a/webrtc/video/video_receive_stream.cc +++ b/webrtc/video/video_receive_stream.cc @@ -344,6 +344,11 @@ VideoReceiveStream::Stats VideoReceiveStream::GetStats() const { return stats_proxy_.GetStats(); } +rtc::Optional +VideoReceiveStream::GetAndResetTimingFrameInfo() { + return stats_proxy_.GetAndResetTimingFrameInfo(); +} + void VideoReceiveStream::EnableEncodedFrameRecording(rtc::PlatformFile file, size_t byte_limit) { { diff --git a/webrtc/video/video_receive_stream.h b/webrtc/video/video_receive_stream.h index a32573de97..6e2b20a95d 100644 --- a/webrtc/video/video_receive_stream.h +++ b/webrtc/video/video_receive_stream.h @@ -72,6 +72,8 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream, webrtc::VideoReceiveStream::Stats GetStats() const override; + rtc::Optional GetAndResetTimingFrameInfo() override; + // Takes ownership of the file, is responsible for closing it later. // Calling this method will close and finalize any current log. // Giving rtc::kInvalidPlatformFileValue disables logging. diff --git a/webrtc/video/video_stream_decoder.cc b/webrtc/video/video_stream_decoder.cc index a7688ce13c..126d6e5e57 100644 --- a/webrtc/video/video_stream_decoder.cc +++ b/webrtc/video/video_stream_decoder.cc @@ -119,6 +119,9 @@ void VideoStreamDecoder::OnFrameBufferTimingsUpdated(int decode_ms, int min_playout_delay_ms, int render_delay_ms) {} +void VideoStreamDecoder::OnTimingFrameInfoUpdated(const TimingFrameInfo& info) { +} + void VideoStreamDecoder::OnCompleteFrame(bool is_keyframe, size_t size_bytes) {} void VideoStreamDecoder::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) { diff --git a/webrtc/video/video_stream_decoder.h b/webrtc/video/video_stream_decoder.h index b670b12659..3903fb559c 100644 --- a/webrtc/video/video_stream_decoder.h +++ b/webrtc/video/video_stream_decoder.h @@ -78,6 +78,8 @@ class VideoStreamDecoder : public VCMReceiveCallback, int min_playout_delay_ms, int render_delay_ms) override; + void OnTimingFrameInfoUpdated(const TimingFrameInfo& info) override; + void RegisterReceiveStatisticsProxy( ReceiveStatisticsProxy* receive_statistics_proxy); diff --git a/webrtc/video_receive_stream.h b/webrtc/video_receive_stream.h index b3a98c617a..66dd78b241 100644 --- a/webrtc/video_receive_stream.h +++ b/webrtc/video_receive_stream.h @@ -207,6 +207,8 @@ class VideoReceiveStream { // TODO(pbos): Add info on currently-received codec to Stats. virtual Stats GetStats() const = 0; + virtual rtc::Optional GetAndResetTimingFrameInfo() = 0; + // Takes ownership of the file, is responsible for closing it later. // Calling this method will close and finalize any current log. // Giving rtc::kInvalidPlatformFileValue disables logging.