From 74fc574cbc14be50ebee6d69794a46264a4f86ab Mon Sep 17 00:00:00 2001 From: Tommi Date: Mon, 27 Apr 2020 10:43:06 +0200 Subject: [PATCH] Fork a few VideoReceiveStream related classes. We'll need to deprecate the previous classes due to being used externally as an API. Bug: webrtc:11489 Change-Id: I64de29c8adae304d0b7628e24dd0abc5be6387ba Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/173960 Reviewed-by: Mirko Bonadei Commit-Queue: Tommi Cr-Commit-Position: refs/heads/master@{#31136} --- video/BUILD.gn | 9 + video/receive_statistics_proxy2.cc | 943 +++++++++ video/receive_statistics_proxy2.h | 208 ++ video/receive_statistics_proxy2_unittest.cc | 1836 +++++++++++++++++ video/rtp_video_stream_receiver.cc | 42 +- video/rtp_video_stream_receiver.h | 22 + ...ver_frame_transformer_delegate_unittest.cc | 1 + video/rtp_video_stream_receiver_unittest.cc | 4 +- video/video_quality_observer2.cc | 288 +++ video/video_quality_observer2.h | 101 + video/video_receive_stream.cc | 1 + video/video_receive_stream2.cc | 795 +++++++ video/video_receive_stream2.h | 238 +++ video/video_stream_decoder2.cc | 69 + video/video_stream_decoder2.h | 60 + 15 files changed, 4611 insertions(+), 6 deletions(-) create mode 100644 video/receive_statistics_proxy2.cc create mode 100644 video/receive_statistics_proxy2.h create mode 100644 video/receive_statistics_proxy2_unittest.cc create mode 100644 video/video_quality_observer2.cc create mode 100644 video/video_quality_observer2.h create mode 100644 video/video_receive_stream2.cc create mode 100644 video/video_receive_stream2.h create mode 100644 video/video_stream_decoder2.cc create mode 100644 video/video_stream_decoder2.h diff --git a/video/BUILD.gn b/video/BUILD.gn index f8ad66b452..28647df14b 100644 --- a/video/BUILD.gn +++ b/video/BUILD.gn @@ -22,6 +22,8 @@ rtc_library("video") { "quality_threshold.h", "receive_statistics_proxy.cc", "receive_statistics_proxy.h", + "receive_statistics_proxy2.cc", + "receive_statistics_proxy2.h", "report_block_stats.cc", "report_block_stats.h", "rtp_streams_synchronizer.cc", @@ -42,14 +44,20 @@ rtc_library("video") { "transport_adapter.h", "video_quality_observer.cc", "video_quality_observer.h", + "video_quality_observer2.cc", + "video_quality_observer2.h", "video_receive_stream.cc", "video_receive_stream.h", + "video_receive_stream2.cc", + "video_receive_stream2.h", "video_send_stream.cc", "video_send_stream.h", "video_send_stream_impl.cc", "video_send_stream_impl.h", "video_stream_decoder.cc", "video_stream_decoder.h", + "video_stream_decoder2.cc", + "video_stream_decoder2.h", ] deps = [ @@ -507,6 +515,7 @@ if (rtc_include_tests) { "quality_limitation_reason_tracker_unittest.cc", "quality_scaling_tests.cc", "quality_threshold_unittest.cc", + "receive_statistics_proxy2_unittest.cc", "receive_statistics_proxy_unittest.cc", "report_block_stats_unittest.cc", "rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc", diff --git a/video/receive_statistics_proxy2.cc b/video/receive_statistics_proxy2.cc new file mode 100644 index 0000000000..50b1ea05ea --- /dev/null +++ b/video/receive_statistics_proxy2.cc @@ -0,0 +1,943 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/receive_statistics_proxy2.h" + +#include +#include +#include + +#include "modules/video_coding/include/video_codec_interface.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/time_utils.h" +#include "system_wrappers/include/clock.h" +#include "system_wrappers/include/field_trial.h" +#include "system_wrappers/include/metrics.h" + +namespace webrtc { +namespace internal { +namespace { +// Periodic time interval for processing samples for |freq_offset_counter_|. +const int64_t kFreqOffsetProcessIntervalMs = 40000; + +// Configuration for bad call detection. +const int kBadCallMinRequiredSamples = 10; +const int kMinSampleLengthMs = 990; +const int kNumMeasurements = 10; +const int kNumMeasurementsVariance = kNumMeasurements * 1.5; +const float kBadFraction = 0.8f; +// For fps: +// Low means low enough to be bad, high means high enough to be good +const int kLowFpsThreshold = 12; +const int kHighFpsThreshold = 14; +// For qp and fps variance: +// Low means low enough to be good, high means high enough to be bad +const int kLowQpThresholdVp8 = 60; +const int kHighQpThresholdVp8 = 70; +const int kLowVarianceThreshold = 1; +const int kHighVarianceThreshold = 2; + +// Some metrics are reported as a maximum over this period. +// This should be synchronized with a typical getStats polling interval in +// the clients. +const int kMovingMaxWindowMs = 1000; + +// How large window we use to calculate the framerate/bitrate. +const int kRateStatisticsWindowSizeMs = 1000; + +// Some sane ballpark estimate for maximum common value of inter-frame delay. +// Values below that will be stored explicitly in the array, +// values above - in the map. +const int kMaxCommonInterframeDelayMs = 500; + +const char* UmaPrefixForContentType(VideoContentType content_type) { + if (videocontenttypehelpers::IsScreenshare(content_type)) + return "WebRTC.Video.Screenshare"; + return "WebRTC.Video"; +} + +std::string UmaSuffixForContentType(VideoContentType content_type) { + char ss_buf[1024]; + rtc::SimpleStringBuilder ss(ss_buf); + int simulcast_id = videocontenttypehelpers::GetSimulcastId(content_type); + if (simulcast_id > 0) { + ss << ".S" << simulcast_id - 1; + } + int experiment_id = videocontenttypehelpers::GetExperimentId(content_type); + if (experiment_id > 0) { + ss << ".ExperimentGroup" << experiment_id - 1; + } + return ss.str(); +} + +} // namespace + +ReceiveStatisticsProxy::ReceiveStatisticsProxy( + const VideoReceiveStream::Config* config, + Clock* clock) + : clock_(clock), + config_(*config), + start_ms_(clock->TimeInMilliseconds()), + enable_decode_time_histograms_( + !field_trial::IsEnabled("WebRTC-DecodeTimeHistogramsKillSwitch")), + last_sample_time_(clock->TimeInMilliseconds()), + fps_threshold_(kLowFpsThreshold, + kHighFpsThreshold, + kBadFraction, + kNumMeasurements), + qp_threshold_(kLowQpThresholdVp8, + kHighQpThresholdVp8, + kBadFraction, + kNumMeasurements), + variance_threshold_(kLowVarianceThreshold, + kHighVarianceThreshold, + kBadFraction, + kNumMeasurementsVariance), + num_bad_states_(0), + num_certain_states_(0), + // 1000ms window, scale 1000 for ms to s. + decode_fps_estimator_(1000, 1000), + renders_fps_estimator_(1000, 1000), + render_fps_tracker_(100, 10u), + render_pixel_tracker_(100, 10u), + video_quality_observer_( + new VideoQualityObserver(VideoContentType::UNSPECIFIED)), + interframe_delay_max_moving_(kMovingMaxWindowMs), + freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs), + avg_rtt_ms_(0), + last_content_type_(VideoContentType::UNSPECIFIED), + last_codec_type_(kVideoCodecVP8), + num_delayed_frames_rendered_(0), + sum_missed_render_deadline_ms_(0), + timing_frame_info_counter_(kMovingMaxWindowMs) { + decode_thread_.Detach(); + network_thread_.Detach(); + stats_.ssrc = config_.rtp.remote_ssrc; +} + +void ReceiveStatisticsProxy::UpdateHistograms( + absl::optional fraction_lost, + const StreamDataCounters& rtp_stats, + const StreamDataCounters* rtx_stats) { + // Not actually running on the decoder thread, but must be called after + // DecoderThreadStopped, which detaches the thread checker. It is therefore + // safe to access |qp_counters_|, which were updated on the decode thread + // earlier. + RTC_DCHECK_RUN_ON(&decode_thread_); + + rtc::CritScope lock(&crit_); + + char log_stream_buf[8 * 1024]; + rtc::SimpleStringBuilder log_stream(log_stream_buf); + int stream_duration_sec = (clock_->TimeInMilliseconds() - start_ms_) / 1000; + if (stats_.frame_counts.key_frames > 0 || + stats_.frame_counts.delta_frames > 0) { + RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.ReceiveStreamLifetimeInSeconds", + stream_duration_sec); + log_stream << "WebRTC.Video.ReceiveStreamLifetimeInSeconds " + << stream_duration_sec << '\n'; + } + + log_stream << "Frames decoded " << stats_.frames_decoded << '\n'; + + if (num_unique_frames_) { + int num_dropped_frames = *num_unique_frames_ - stats_.frames_decoded; + RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.DroppedFrames.Receiver", + num_dropped_frames); + log_stream << "WebRTC.Video.DroppedFrames.Receiver " << num_dropped_frames + << '\n'; + } + + if (fraction_lost && stream_duration_sec >= metrics::kMinRunTimeInSeconds) { + RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.ReceivedPacketsLostInPercent", + *fraction_lost); + log_stream << "WebRTC.Video.ReceivedPacketsLostInPercent " << *fraction_lost + << '\n'; + } + + if (first_decoded_frame_time_ms_) { + const int64_t elapsed_ms = + (clock_->TimeInMilliseconds() - *first_decoded_frame_time_ms_); + if (elapsed_ms >= + metrics::kMinRunTimeInSeconds * rtc::kNumMillisecsPerSec) { + int decoded_fps = static_cast( + (stats_.frames_decoded * 1000.0f / elapsed_ms) + 0.5f); + RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.DecodedFramesPerSecond", + decoded_fps); + log_stream << "WebRTC.Video.DecodedFramesPerSecond " << decoded_fps + << '\n'; + + const uint32_t frames_rendered = stats_.frames_rendered; + if (frames_rendered > 0) { + RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.DelayedFramesToRenderer", + static_cast(num_delayed_frames_rendered_ * + 100 / frames_rendered)); + if (num_delayed_frames_rendered_ > 0) { + RTC_HISTOGRAM_COUNTS_1000( + "WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs", + static_cast(sum_missed_render_deadline_ms_ / + num_delayed_frames_rendered_)); + } + } + } + } + + const int kMinRequiredSamples = 200; + int samples = static_cast(render_fps_tracker_.TotalSampleCount()); + if (samples >= kMinRequiredSamples) { + int rendered_fps = round(render_fps_tracker_.ComputeTotalRate()); + RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.RenderFramesPerSecond", + rendered_fps); + log_stream << "WebRTC.Video.RenderFramesPerSecond " << rendered_fps << '\n'; + RTC_HISTOGRAM_COUNTS_100000( + "WebRTC.Video.RenderSqrtPixelsPerSecond", + round(render_pixel_tracker_.ComputeTotalRate())); + } + + absl::optional sync_offset_ms = + sync_offset_counter_.Avg(kMinRequiredSamples); + if (sync_offset_ms) { + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.AVSyncOffsetInMs", + *sync_offset_ms); + log_stream << "WebRTC.Video.AVSyncOffsetInMs " << *sync_offset_ms << '\n'; + } + AggregatedStats freq_offset_stats = freq_offset_counter_.GetStats(); + if (freq_offset_stats.num_samples > 0) { + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.RtpToNtpFreqOffsetInKhz", + freq_offset_stats.average); + log_stream << "WebRTC.Video.RtpToNtpFreqOffsetInKhz " + << freq_offset_stats.ToString() << '\n'; + } + + int num_total_frames = + stats_.frame_counts.key_frames + stats_.frame_counts.delta_frames; + if (num_total_frames >= kMinRequiredSamples) { + int num_key_frames = stats_.frame_counts.key_frames; + int key_frames_permille = + (num_key_frames * 1000 + num_total_frames / 2) / num_total_frames; + RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.KeyFramesReceivedInPermille", + key_frames_permille); + log_stream << "WebRTC.Video.KeyFramesReceivedInPermille " + << key_frames_permille << '\n'; + } + + absl::optional qp = qp_counters_.vp8.Avg(kMinRequiredSamples); + if (qp) { + RTC_HISTOGRAM_COUNTS_200("WebRTC.Video.Decoded.Vp8.Qp", *qp); + log_stream << "WebRTC.Video.Decoded.Vp8.Qp " << *qp << '\n'; + } + absl::optional decode_ms = decode_time_counter_.Avg(kMinRequiredSamples); + if (decode_ms) { + RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.DecodeTimeInMs", *decode_ms); + log_stream << "WebRTC.Video.DecodeTimeInMs " << *decode_ms << '\n'; + } + absl::optional jb_delay_ms = + jitter_buffer_delay_counter_.Avg(kMinRequiredSamples); + if (jb_delay_ms) { + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.JitterBufferDelayInMs", + *jb_delay_ms); + log_stream << "WebRTC.Video.JitterBufferDelayInMs " << *jb_delay_ms << '\n'; + } + + absl::optional target_delay_ms = + target_delay_counter_.Avg(kMinRequiredSamples); + if (target_delay_ms) { + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.TargetDelayInMs", + *target_delay_ms); + log_stream << "WebRTC.Video.TargetDelayInMs " << *target_delay_ms << '\n'; + } + absl::optional current_delay_ms = + current_delay_counter_.Avg(kMinRequiredSamples); + if (current_delay_ms) { + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.CurrentDelayInMs", + *current_delay_ms); + log_stream << "WebRTC.Video.CurrentDelayInMs " << *current_delay_ms << '\n'; + } + absl::optional delay_ms = delay_counter_.Avg(kMinRequiredSamples); + if (delay_ms) + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", *delay_ms); + + // Aggregate content_specific_stats_ by removing experiment or simulcast + // information; + std::map aggregated_stats; + for (const auto& it : content_specific_stats_) { + // Calculate simulcast specific metrics (".S0" ... ".S2" suffixes). + VideoContentType content_type = it.first; + if (videocontenttypehelpers::GetSimulcastId(content_type) > 0) { + // Aggregate on experiment id. + videocontenttypehelpers::SetExperimentId(&content_type, 0); + aggregated_stats[content_type].Add(it.second); + } + // Calculate experiment specific metrics (".ExperimentGroup[0-7]" suffixes). + content_type = it.first; + if (videocontenttypehelpers::GetExperimentId(content_type) > 0) { + // Aggregate on simulcast id. + videocontenttypehelpers::SetSimulcastId(&content_type, 0); + aggregated_stats[content_type].Add(it.second); + } + // Calculate aggregated metrics (no suffixes. Aggregated on everything). + content_type = it.first; + videocontenttypehelpers::SetSimulcastId(&content_type, 0); + videocontenttypehelpers::SetExperimentId(&content_type, 0); + aggregated_stats[content_type].Add(it.second); + } + + for (const auto& it : aggregated_stats) { + // For the metric Foo we report the following slices: + // WebRTC.Video.Foo, + // WebRTC.Video.Screenshare.Foo, + // WebRTC.Video.Foo.S[0-3], + // WebRTC.Video.Foo.ExperimentGroup[0-7], + // WebRTC.Video.Screenshare.Foo.S[0-3], + // WebRTC.Video.Screenshare.Foo.ExperimentGroup[0-7]. + auto content_type = it.first; + auto stats = it.second; + std::string uma_prefix = UmaPrefixForContentType(content_type); + std::string uma_suffix = UmaSuffixForContentType(content_type); + // Metrics can be sliced on either simulcast id or experiment id but not + // both. + RTC_DCHECK(videocontenttypehelpers::GetExperimentId(content_type) == 0 || + videocontenttypehelpers::GetSimulcastId(content_type) == 0); + + absl::optional e2e_delay_ms = + stats.e2e_delay_counter.Avg(kMinRequiredSamples); + if (e2e_delay_ms) { + RTC_HISTOGRAM_COUNTS_SPARSE_10000( + uma_prefix + ".EndToEndDelayInMs" + uma_suffix, *e2e_delay_ms); + log_stream << uma_prefix << ".EndToEndDelayInMs" << uma_suffix << " " + << *e2e_delay_ms << '\n'; + } + absl::optional e2e_delay_max_ms = stats.e2e_delay_counter.Max(); + if (e2e_delay_max_ms && e2e_delay_ms) { + RTC_HISTOGRAM_COUNTS_SPARSE_100000( + uma_prefix + ".EndToEndDelayMaxInMs" + uma_suffix, *e2e_delay_max_ms); + log_stream << uma_prefix << ".EndToEndDelayMaxInMs" << uma_suffix << " " + << *e2e_delay_max_ms << '\n'; + } + absl::optional interframe_delay_ms = + stats.interframe_delay_counter.Avg(kMinRequiredSamples); + if (interframe_delay_ms) { + RTC_HISTOGRAM_COUNTS_SPARSE_10000( + uma_prefix + ".InterframeDelayInMs" + uma_suffix, + *interframe_delay_ms); + log_stream << uma_prefix << ".InterframeDelayInMs" << uma_suffix << " " + << *interframe_delay_ms << '\n'; + } + absl::optional interframe_delay_max_ms = + stats.interframe_delay_counter.Max(); + if (interframe_delay_max_ms && interframe_delay_ms) { + RTC_HISTOGRAM_COUNTS_SPARSE_10000( + uma_prefix + ".InterframeDelayMaxInMs" + uma_suffix, + *interframe_delay_max_ms); + log_stream << uma_prefix << ".InterframeDelayMaxInMs" << uma_suffix << " " + << *interframe_delay_max_ms << '\n'; + } + + absl::optional interframe_delay_95p_ms = + stats.interframe_delay_percentiles.GetPercentile(0.95f); + if (interframe_delay_95p_ms && interframe_delay_ms != -1) { + RTC_HISTOGRAM_COUNTS_SPARSE_10000( + uma_prefix + ".InterframeDelay95PercentileInMs" + uma_suffix, + *interframe_delay_95p_ms); + log_stream << uma_prefix << ".InterframeDelay95PercentileInMs" + << uma_suffix << " " << *interframe_delay_95p_ms << '\n'; + } + + absl::optional width = stats.received_width.Avg(kMinRequiredSamples); + if (width) { + RTC_HISTOGRAM_COUNTS_SPARSE_10000( + uma_prefix + ".ReceivedWidthInPixels" + uma_suffix, *width); + log_stream << uma_prefix << ".ReceivedWidthInPixels" << uma_suffix << " " + << *width << '\n'; + } + + absl::optional height = stats.received_height.Avg(kMinRequiredSamples); + if (height) { + RTC_HISTOGRAM_COUNTS_SPARSE_10000( + uma_prefix + ".ReceivedHeightInPixels" + uma_suffix, *height); + log_stream << uma_prefix << ".ReceivedHeightInPixels" << uma_suffix << " " + << *height << '\n'; + } + + if (content_type != VideoContentType::UNSPECIFIED) { + // Don't report these 3 metrics unsliced, as more precise variants + // are reported separately in this method. + float flow_duration_sec = stats.flow_duration_ms / 1000.0; + if (flow_duration_sec >= metrics::kMinRunTimeInSeconds) { + int media_bitrate_kbps = static_cast(stats.total_media_bytes * 8 / + flow_duration_sec / 1000); + RTC_HISTOGRAM_COUNTS_SPARSE_10000( + uma_prefix + ".MediaBitrateReceivedInKbps" + uma_suffix, + media_bitrate_kbps); + log_stream << uma_prefix << ".MediaBitrateReceivedInKbps" << uma_suffix + << " " << media_bitrate_kbps << '\n'; + } + + int num_total_frames = + stats.frame_counts.key_frames + stats.frame_counts.delta_frames; + if (num_total_frames >= kMinRequiredSamples) { + int num_key_frames = stats.frame_counts.key_frames; + int key_frames_permille = + (num_key_frames * 1000 + num_total_frames / 2) / num_total_frames; + RTC_HISTOGRAM_COUNTS_SPARSE_1000( + uma_prefix + ".KeyFramesReceivedInPermille" + uma_suffix, + key_frames_permille); + log_stream << uma_prefix << ".KeyFramesReceivedInPermille" << uma_suffix + << " " << key_frames_permille << '\n'; + } + + absl::optional qp = stats.qp_counter.Avg(kMinRequiredSamples); + if (qp) { + RTC_HISTOGRAM_COUNTS_SPARSE_200( + uma_prefix + ".Decoded.Vp8.Qp" + uma_suffix, *qp); + log_stream << uma_prefix << ".Decoded.Vp8.Qp" << uma_suffix << " " + << *qp << '\n'; + } + } + } + + StreamDataCounters rtp_rtx_stats = rtp_stats; + if (rtx_stats) + rtp_rtx_stats.Add(*rtx_stats); + int64_t elapsed_sec = + rtp_rtx_stats.TimeSinceFirstPacketInMs(clock_->TimeInMilliseconds()) / + 1000; + if (elapsed_sec >= metrics::kMinRunTimeInSeconds) { + RTC_HISTOGRAM_COUNTS_10000( + "WebRTC.Video.BitrateReceivedInKbps", + static_cast(rtp_rtx_stats.transmitted.TotalBytes() * 8 / + elapsed_sec / 1000)); + int media_bitrate_kbs = static_cast(rtp_stats.MediaPayloadBytes() * 8 / + elapsed_sec / 1000); + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.MediaBitrateReceivedInKbps", + media_bitrate_kbs); + log_stream << "WebRTC.Video.MediaBitrateReceivedInKbps " + << media_bitrate_kbs << '\n'; + RTC_HISTOGRAM_COUNTS_10000( + "WebRTC.Video.PaddingBitrateReceivedInKbps", + static_cast(rtp_rtx_stats.transmitted.padding_bytes * 8 / + elapsed_sec / 1000)); + RTC_HISTOGRAM_COUNTS_10000( + "WebRTC.Video.RetransmittedBitrateReceivedInKbps", + static_cast(rtp_rtx_stats.retransmitted.TotalBytes() * 8 / + elapsed_sec / 1000)); + if (rtx_stats) { + RTC_HISTOGRAM_COUNTS_10000( + "WebRTC.Video.RtxBitrateReceivedInKbps", + static_cast(rtx_stats->transmitted.TotalBytes() * 8 / + elapsed_sec / 1000)); + } + const RtcpPacketTypeCounter& counters = stats_.rtcp_packet_type_counts; + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.NackPacketsSentPerMinute", + counters.nack_packets * 60 / elapsed_sec); + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.FirPacketsSentPerMinute", + counters.fir_packets * 60 / elapsed_sec); + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.PliPacketsSentPerMinute", + counters.pli_packets * 60 / elapsed_sec); + if (counters.nack_requests > 0) { + RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.UniqueNackRequestsSentInPercent", + counters.UniqueNackRequestsInPercent()); + } + } + + if (num_certain_states_ >= kBadCallMinRequiredSamples) { + RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.Any", + 100 * num_bad_states_ / num_certain_states_); + } + absl::optional fps_fraction = + fps_threshold_.FractionHigh(kBadCallMinRequiredSamples); + if (fps_fraction) { + RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.FrameRate", + static_cast(100 * (1 - *fps_fraction))); + } + absl::optional variance_fraction = + variance_threshold_.FractionHigh(kBadCallMinRequiredSamples); + if (variance_fraction) { + RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.FrameRateVariance", + static_cast(100 * *variance_fraction)); + } + absl::optional qp_fraction = + qp_threshold_.FractionHigh(kBadCallMinRequiredSamples); + if (qp_fraction) { + RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.Qp", + static_cast(100 * *qp_fraction)); + } + + RTC_LOG(LS_INFO) << log_stream.str(); + video_quality_observer_->UpdateHistograms(); +} + +void ReceiveStatisticsProxy::QualitySample() { + int64_t now = clock_->TimeInMilliseconds(); + if (last_sample_time_ + kMinSampleLengthMs > now) + return; + + double fps = + render_fps_tracker_.ComputeRateForInterval(now - last_sample_time_); + absl::optional qp = qp_sample_.Avg(1); + + bool prev_fps_bad = !fps_threshold_.IsHigh().value_or(true); + bool prev_qp_bad = qp_threshold_.IsHigh().value_or(false); + bool prev_variance_bad = variance_threshold_.IsHigh().value_or(false); + bool prev_any_bad = prev_fps_bad || prev_qp_bad || prev_variance_bad; + + fps_threshold_.AddMeasurement(static_cast(fps)); + if (qp) + qp_threshold_.AddMeasurement(*qp); + absl::optional fps_variance_opt = fps_threshold_.CalculateVariance(); + double fps_variance = fps_variance_opt.value_or(0); + if (fps_variance_opt) { + variance_threshold_.AddMeasurement(static_cast(fps_variance)); + } + + bool fps_bad = !fps_threshold_.IsHigh().value_or(true); + bool qp_bad = qp_threshold_.IsHigh().value_or(false); + bool variance_bad = variance_threshold_.IsHigh().value_or(false); + bool any_bad = fps_bad || qp_bad || variance_bad; + + if (!prev_any_bad && any_bad) { + RTC_LOG(LS_INFO) << "Bad call (any) start: " << now; + } else if (prev_any_bad && !any_bad) { + RTC_LOG(LS_INFO) << "Bad call (any) end: " << now; + } + + if (!prev_fps_bad && fps_bad) { + RTC_LOG(LS_INFO) << "Bad call (fps) start: " << now; + } else if (prev_fps_bad && !fps_bad) { + RTC_LOG(LS_INFO) << "Bad call (fps) end: " << now; + } + + if (!prev_qp_bad && qp_bad) { + RTC_LOG(LS_INFO) << "Bad call (qp) start: " << now; + } else if (prev_qp_bad && !qp_bad) { + RTC_LOG(LS_INFO) << "Bad call (qp) end: " << now; + } + + if (!prev_variance_bad && variance_bad) { + RTC_LOG(LS_INFO) << "Bad call (variance) start: " << now; + } else if (prev_variance_bad && !variance_bad) { + RTC_LOG(LS_INFO) << "Bad call (variance) end: " << now; + } + + RTC_LOG(LS_VERBOSE) << "SAMPLE: sample_length: " << (now - last_sample_time_) + << " fps: " << fps << " fps_bad: " << fps_bad + << " qp: " << qp.value_or(-1) << " qp_bad: " << qp_bad + << " variance_bad: " << variance_bad + << " fps_variance: " << fps_variance; + + last_sample_time_ = now; + qp_sample_.Reset(); + + if (fps_threshold_.IsHigh() || variance_threshold_.IsHigh() || + qp_threshold_.IsHigh()) { + if (any_bad) + ++num_bad_states_; + ++num_certain_states_; + } +} + +void ReceiveStatisticsProxy::UpdateFramerate(int64_t now_ms) const { + int64_t old_frames_ms = now_ms - kRateStatisticsWindowSizeMs; + while (!frame_window_.empty() && + frame_window_.begin()->first < old_frames_ms) { + frame_window_.erase(frame_window_.begin()); + } + + size_t framerate = + (frame_window_.size() * 1000 + 500) / kRateStatisticsWindowSizeMs; + stats_.network_frame_rate = static_cast(framerate); +} + +void ReceiveStatisticsProxy::UpdateDecodeTimeHistograms( + int width, + int height, + int decode_time_ms) const { + bool is_4k = (width == 3840 || width == 4096) && height == 2160; + bool is_hd = width == 1920 && height == 1080; + // Only update histograms for 4k/HD and VP9/H264. + if ((is_4k || is_hd) && (last_codec_type_ == kVideoCodecVP9 || + last_codec_type_ == kVideoCodecH264)) { + const std::string kDecodeTimeUmaPrefix = + "WebRTC.Video.DecodeTimePerFrameInMs."; + + // Each histogram needs its own line for it to not be reused in the wrong + // way when the format changes. + if (last_codec_type_ == kVideoCodecVP9) { + bool is_sw_decoder = + stats_.decoder_implementation_name.compare(0, 6, "libvpx") == 0; + if (is_4k) { + if (is_sw_decoder) + RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.4k.Sw", + decode_time_ms); + else + RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.4k.Hw", + decode_time_ms); + } else { + if (is_sw_decoder) + RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.Hd.Sw", + decode_time_ms); + else + RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.Hd.Hw", + decode_time_ms); + } + } else { + bool is_sw_decoder = + stats_.decoder_implementation_name.compare(0, 6, "FFmpeg") == 0; + if (is_4k) { + if (is_sw_decoder) + RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.4k.Sw", + decode_time_ms); + else + RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.4k.Hw", + decode_time_ms); + + } else { + if (is_sw_decoder) + RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.Hd.Sw", + decode_time_ms); + else + RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.Hd.Hw", + decode_time_ms); + } + } + } +} + +absl::optional +ReceiveStatisticsProxy::GetCurrentEstimatedPlayoutNtpTimestampMs( + int64_t now_ms) const { + if (!last_estimated_playout_ntp_timestamp_ms_ || + !last_estimated_playout_time_ms_) { + return absl::nullopt; + } + int64_t elapsed_ms = now_ms - *last_estimated_playout_time_ms_; + return *last_estimated_playout_ntp_timestamp_ms_ + elapsed_ms; +} + +VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const { + rtc::CritScope lock(&crit_); + // Get current frame rates here, as only updating them on new frames prevents + // us from ever correctly displaying frame rate of 0. + int64_t now_ms = clock_->TimeInMilliseconds(); + UpdateFramerate(now_ms); + stats_.render_frame_rate = renders_fps_estimator_.Rate(now_ms).value_or(0); + stats_.decode_frame_rate = decode_fps_estimator_.Rate(now_ms).value_or(0); + stats_.interframe_delay_max_ms = + interframe_delay_max_moving_.Max(now_ms).value_or(-1); + stats_.freeze_count = video_quality_observer_->NumFreezes(); + stats_.pause_count = video_quality_observer_->NumPauses(); + stats_.total_freezes_duration_ms = + video_quality_observer_->TotalFreezesDurationMs(); + stats_.total_pauses_duration_ms = + video_quality_observer_->TotalPausesDurationMs(); + stats_.total_frames_duration_ms = + video_quality_observer_->TotalFramesDurationMs(); + stats_.sum_squared_frame_durations = + video_quality_observer_->SumSquaredFrameDurationsSec(); + stats_.content_type = last_content_type_; + stats_.timing_frame_info = timing_frame_info_counter_.Max(now_ms); + stats_.jitter_buffer_delay_seconds = + static_cast(current_delay_counter_.Sum(1).value_or(0)) / + rtc::kNumMillisecsPerSec; + stats_.jitter_buffer_emitted_count = current_delay_counter_.NumSamples(); + stats_.estimated_playout_ntp_timestamp_ms = + GetCurrentEstimatedPlayoutNtpTimestampMs(now_ms); + return stats_; +} + +void ReceiveStatisticsProxy::OnIncomingPayloadType(int payload_type) { + rtc::CritScope lock(&crit_); + stats_.current_payload_type = payload_type; +} + +void ReceiveStatisticsProxy::OnDecoderImplementationName( + const char* implementation_name) { + rtc::CritScope lock(&crit_); + stats_.decoder_implementation_name = implementation_name; +} + +void ReceiveStatisticsProxy::OnFrameBufferTimingsUpdated( + int max_decode_ms, + int current_delay_ms, + int target_delay_ms, + int jitter_buffer_ms, + int min_playout_delay_ms, + int render_delay_ms) { + rtc::CritScope lock(&crit_); + stats_.max_decode_ms = max_decode_ms; + stats_.current_delay_ms = current_delay_ms; + stats_.target_delay_ms = target_delay_ms; + stats_.jitter_buffer_ms = jitter_buffer_ms; + stats_.min_playout_delay_ms = min_playout_delay_ms; + stats_.render_delay_ms = render_delay_ms; + jitter_buffer_delay_counter_.Add(jitter_buffer_ms); + target_delay_counter_.Add(target_delay_ms); + current_delay_counter_.Add(current_delay_ms); + // Network delay (rtt/2) + target_delay_ms (jitter delay + decode time + + // render delay). + delay_counter_.Add(target_delay_ms + avg_rtt_ms_ / 2); +} + +void ReceiveStatisticsProxy::OnUniqueFramesCounted(int num_unique_frames) { + rtc::CritScope lock(&crit_); + num_unique_frames_.emplace(num_unique_frames); +} + +void ReceiveStatisticsProxy::OnTimingFrameInfoUpdated( + const TimingFrameInfo& info) { + rtc::CritScope lock(&crit_); + if (info.flags != VideoSendTiming::kInvalid) { + int64_t now_ms = clock_->TimeInMilliseconds(); + timing_frame_info_counter_.Add(info, now_ms); + } + + // Measure initial decoding latency between the first frame arriving and the + // first frame being decoded. + if (!first_frame_received_time_ms_.has_value()) { + first_frame_received_time_ms_ = info.receive_finish_ms; + } + if (stats_.first_frame_received_to_decoded_ms == -1 && + first_decoded_frame_time_ms_) { + stats_.first_frame_received_to_decoded_ms = + *first_decoded_frame_time_ms_ - *first_frame_received_time_ms_; + } +} + +void ReceiveStatisticsProxy::RtcpPacketTypesCounterUpdated( + uint32_t ssrc, + const RtcpPacketTypeCounter& packet_counter) { + rtc::CritScope lock(&crit_); + if (stats_.ssrc != ssrc) + return; + stats_.rtcp_packet_type_counts = packet_counter; +} + +void ReceiveStatisticsProxy::OnCname(uint32_t ssrc, absl::string_view cname) { + rtc::CritScope lock(&crit_); + // TODO(pbos): Handle both local and remote ssrcs here and RTC_DCHECK that we + // receive stats from one of them. + if (stats_.ssrc != ssrc) + return; + stats_.c_name = std::string(cname); +} + +void ReceiveStatisticsProxy::OnDecodedFrame(const VideoFrame& frame, + absl::optional qp, + int32_t decode_time_ms, + VideoContentType content_type) { + rtc::CritScope lock(&crit_); + + uint64_t now_ms = clock_->TimeInMilliseconds(); + + if (videocontenttypehelpers::IsScreenshare(content_type) != + videocontenttypehelpers::IsScreenshare(last_content_type_)) { + // Reset the quality observer if content type is switched. But first report + // stats for the previous part of the call. + video_quality_observer_->UpdateHistograms(); + video_quality_observer_.reset(new VideoQualityObserver(content_type)); + } + + video_quality_observer_->OnDecodedFrame(frame, qp, last_codec_type_); + + ContentSpecificStats* content_specific_stats = + &content_specific_stats_[content_type]; + ++stats_.frames_decoded; + if (qp) { + if (!stats_.qp_sum) { + if (stats_.frames_decoded != 1) { + RTC_LOG(LS_WARNING) + << "Frames decoded was not 1 when first qp value was received."; + } + stats_.qp_sum = 0; + } + *stats_.qp_sum += *qp; + content_specific_stats->qp_counter.Add(*qp); + } else if (stats_.qp_sum) { + RTC_LOG(LS_WARNING) + << "QP sum was already set and no QP was given for a frame."; + stats_.qp_sum.reset(); + } + decode_time_counter_.Add(decode_time_ms); + stats_.decode_ms = decode_time_ms; + stats_.total_decode_time_ms += decode_time_ms; + if (enable_decode_time_histograms_) { + UpdateDecodeTimeHistograms(frame.width(), frame.height(), decode_time_ms); + } + + last_content_type_ = content_type; + decode_fps_estimator_.Update(1, now_ms); + if (last_decoded_frame_time_ms_) { + int64_t interframe_delay_ms = now_ms - *last_decoded_frame_time_ms_; + RTC_DCHECK_GE(interframe_delay_ms, 0); + double interframe_delay = interframe_delay_ms / 1000.0; + stats_.total_inter_frame_delay += interframe_delay; + stats_.total_squared_inter_frame_delay += + interframe_delay * interframe_delay; + interframe_delay_max_moving_.Add(interframe_delay_ms, now_ms); + content_specific_stats->interframe_delay_counter.Add(interframe_delay_ms); + content_specific_stats->interframe_delay_percentiles.Add( + interframe_delay_ms); + content_specific_stats->flow_duration_ms += interframe_delay_ms; + } + if (stats_.frames_decoded == 1) { + first_decoded_frame_time_ms_.emplace(now_ms); + } + last_decoded_frame_time_ms_.emplace(now_ms); +} + +void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) { + int width = frame.width(); + int height = frame.height(); + RTC_DCHECK_GT(width, 0); + RTC_DCHECK_GT(height, 0); + int64_t now_ms = clock_->TimeInMilliseconds(); + rtc::CritScope lock(&crit_); + + video_quality_observer_->OnRenderedFrame(frame, now_ms); + + ContentSpecificStats* content_specific_stats = + &content_specific_stats_[last_content_type_]; + renders_fps_estimator_.Update(1, now_ms); + ++stats_.frames_rendered; + stats_.width = width; + stats_.height = height; + render_fps_tracker_.AddSamples(1); + render_pixel_tracker_.AddSamples(sqrt(width * height)); + content_specific_stats->received_width.Add(width); + content_specific_stats->received_height.Add(height); + + // Consider taking stats_.render_delay_ms into account. + const int64_t time_until_rendering_ms = frame.render_time_ms() - now_ms; + if (time_until_rendering_ms < 0) { + sum_missed_render_deadline_ms_ += -time_until_rendering_ms; + ++num_delayed_frames_rendered_; + } + + if (frame.ntp_time_ms() > 0) { + int64_t delay_ms = clock_->CurrentNtpInMilliseconds() - frame.ntp_time_ms(); + if (delay_ms >= 0) { + content_specific_stats->e2e_delay_counter.Add(delay_ms); + } + } + QualitySample(); +} + +void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t video_playout_ntp_ms, + int64_t sync_offset_ms, + double estimated_freq_khz) { + rtc::CritScope lock(&crit_); + sync_offset_counter_.Add(std::abs(sync_offset_ms)); + stats_.sync_offset_ms = sync_offset_ms; + last_estimated_playout_ntp_timestamp_ms_ = video_playout_ntp_ms; + last_estimated_playout_time_ms_ = clock_->TimeInMilliseconds(); + + const double kMaxFreqKhz = 10000.0; + int offset_khz = kMaxFreqKhz; + // Should not be zero or negative. If so, report max. + if (estimated_freq_khz < kMaxFreqKhz && estimated_freq_khz > 0.0) + offset_khz = static_cast(std::fabs(estimated_freq_khz - 90.0) + 0.5); + + freq_offset_counter_.Add(offset_khz); +} + +void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe, + size_t size_bytes, + VideoContentType content_type) { + rtc::CritScope lock(&crit_); + if (is_keyframe) { + ++stats_.frame_counts.key_frames; + } else { + ++stats_.frame_counts.delta_frames; + } + + // Content type extension is set only for keyframes and should be propagated + // for all the following delta frames. Here we may receive frames out of order + // and miscategorise some delta frames near the layer switch. + // This may slightly offset calculated bitrate and keyframes permille metrics. + VideoContentType propagated_content_type = + is_keyframe ? content_type : last_content_type_; + + ContentSpecificStats* content_specific_stats = + &content_specific_stats_[propagated_content_type]; + + content_specific_stats->total_media_bytes += size_bytes; + if (is_keyframe) { + ++content_specific_stats->frame_counts.key_frames; + } else { + ++content_specific_stats->frame_counts.delta_frames; + } + + int64_t now_ms = clock_->TimeInMilliseconds(); + frame_window_.insert(std::make_pair(now_ms, size_bytes)); + UpdateFramerate(now_ms); +} + +void ReceiveStatisticsProxy::OnDroppedFrames(uint32_t frames_dropped) { + rtc::CritScope lock(&crit_); + stats_.frames_dropped += frames_dropped; +} + +void ReceiveStatisticsProxy::OnPreDecode(VideoCodecType codec_type, int qp) { + RTC_DCHECK_RUN_ON(&decode_thread_); + rtc::CritScope lock(&crit_); + last_codec_type_ = codec_type; + if (last_codec_type_ == kVideoCodecVP8 && qp != -1) { + qp_counters_.vp8.Add(qp); + qp_sample_.Add(qp); + } +} + +void ReceiveStatisticsProxy::OnStreamInactive() { + // TODO(sprang): Figure out any other state that should be reset. + + rtc::CritScope lock(&crit_); + // Don't report inter-frame delay if stream was paused. + last_decoded_frame_time_ms_.reset(); + video_quality_observer_->OnStreamInactive(); +} + +void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms, + int64_t max_rtt_ms) { + rtc::CritScope lock(&crit_); + avg_rtt_ms_ = avg_rtt_ms; +} + +void ReceiveStatisticsProxy::DecoderThreadStarting() { + RTC_DCHECK_RUN_ON(&main_thread_); +} + +void ReceiveStatisticsProxy::DecoderThreadStopped() { + RTC_DCHECK_RUN_ON(&main_thread_); + decode_thread_.Detach(); +} + +ReceiveStatisticsProxy::ContentSpecificStats::ContentSpecificStats() + : interframe_delay_percentiles(kMaxCommonInterframeDelayMs) {} + +ReceiveStatisticsProxy::ContentSpecificStats::~ContentSpecificStats() = default; + +void ReceiveStatisticsProxy::ContentSpecificStats::Add( + const ContentSpecificStats& other) { + e2e_delay_counter.Add(other.e2e_delay_counter); + interframe_delay_counter.Add(other.interframe_delay_counter); + flow_duration_ms += other.flow_duration_ms; + total_media_bytes += other.total_media_bytes; + received_height.Add(other.received_height); + received_width.Add(other.received_width); + qp_counter.Add(other.qp_counter); + frame_counts.key_frames += other.frame_counts.key_frames; + frame_counts.delta_frames += other.frame_counts.delta_frames; + interframe_delay_percentiles.Add(other.interframe_delay_percentiles); +} + +} // namespace internal +} // namespace webrtc diff --git a/video/receive_statistics_proxy2.h b/video/receive_statistics_proxy2.h new file mode 100644 index 0000000000..788bd617c4 --- /dev/null +++ b/video/receive_statistics_proxy2.h @@ -0,0 +1,208 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_RECEIVE_STATISTICS_PROXY2_H_ +#define VIDEO_RECEIVE_STATISTICS_PROXY2_H_ + +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "call/video_receive_stream.h" +#include "modules/include/module_common_types.h" +#include "modules/video_coding/include/video_coding_defines.h" +#include "rtc_base/critical_section.h" +#include "rtc_base/numerics/histogram_percentile_counter.h" +#include "rtc_base/numerics/moving_max_counter.h" +#include "rtc_base/numerics/sample_counter.h" +#include "rtc_base/rate_statistics.h" +#include "rtc_base/rate_tracker.h" +#include "rtc_base/thread_annotations.h" +#include "rtc_base/thread_checker.h" +#include "video/quality_threshold.h" +#include "video/stats_counter.h" +#include "video/video_quality_observer2.h" + +namespace webrtc { + +class Clock; +struct CodecSpecificInfo; + +namespace internal { + +class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback, + public RtcpCnameCallback, + public RtcpPacketTypeCounterObserver, + public CallStatsObserver { + public: + ReceiveStatisticsProxy(const VideoReceiveStream::Config* config, + Clock* clock); + ~ReceiveStatisticsProxy() = default; + + VideoReceiveStream::Stats GetStats() const; + + void OnDecodedFrame(const VideoFrame& frame, + absl::optional qp, + int32_t decode_time_ms, + VideoContentType content_type); + void OnSyncOffsetUpdated(int64_t video_playout_ntp_ms, + int64_t sync_offset_ms, + double estimated_freq_khz); + void OnRenderedFrame(const VideoFrame& frame); + void OnIncomingPayloadType(int payload_type); + void OnDecoderImplementationName(const char* implementation_name); + + void OnPreDecode(VideoCodecType codec_type, int qp); + + void OnUniqueFramesCounted(int num_unique_frames); + + // Indicates video stream has been paused (no incoming packets). + void OnStreamInactive(); + + // Overrides VCMReceiveStatisticsCallback. + void OnCompleteFrame(bool is_keyframe, + size_t size_bytes, + VideoContentType content_type) override; + void OnDroppedFrames(uint32_t frames_dropped) override; + void OnFrameBufferTimingsUpdated(int max_decode_ms, + int current_delay_ms, + int target_delay_ms, + int jitter_buffer_ms, + int min_playout_delay_ms, + int render_delay_ms) override; + + void OnTimingFrameInfoUpdated(const TimingFrameInfo& info) override; + + // Overrides RtcpCnameCallback. + void OnCname(uint32_t ssrc, absl::string_view cname) override; + + // Overrides RtcpPacketTypeCounterObserver. + void RtcpPacketTypesCounterUpdated( + uint32_t ssrc, + const RtcpPacketTypeCounter& packet_counter) override; + + // Implements CallStatsObserver. + void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override; + + // Notification methods that are used to check our internal state and validate + // threading assumptions. These are called by VideoReceiveStream. + void DecoderThreadStarting(); + void DecoderThreadStopped(); + + // Produce histograms. Must be called after DecoderThreadStopped(), typically + // at the end of the call. + void UpdateHistograms(absl::optional fraction_lost, + const StreamDataCounters& rtp_stats, + const StreamDataCounters* rtx_stats); + + private: + struct QpCounters { + rtc::SampleCounter vp8; + }; + + struct ContentSpecificStats { + ContentSpecificStats(); + ~ContentSpecificStats(); + + void Add(const ContentSpecificStats& other); + + rtc::SampleCounter e2e_delay_counter; + rtc::SampleCounter interframe_delay_counter; + int64_t flow_duration_ms = 0; + int64_t total_media_bytes = 0; + rtc::SampleCounter received_width; + rtc::SampleCounter received_height; + rtc::SampleCounter qp_counter; + FrameCounts frame_counts; + rtc::HistogramPercentileCounter interframe_delay_percentiles; + }; + + void QualitySample() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + + // Removes info about old frames and then updates the framerate. + void UpdateFramerate(int64_t now_ms) const + RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + + void UpdateDecodeTimeHistograms(int width, + int height, + int decode_time_ms) const + RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + + absl::optional GetCurrentEstimatedPlayoutNtpTimestampMs( + int64_t now_ms) const RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + + Clock* const clock_; + // Ownership of this object lies with the owner of the ReceiveStatisticsProxy + // instance. Lifetime is guaranteed to outlive |this|. + // TODO(tommi): In practice the config_ reference is only used for accessing + // config_.rtp.ulpfec.ulpfec_payload_type. Instead of holding a pointer back, + // we could just store the value of ulpfec_payload_type and change the + // ReceiveStatisticsProxy() ctor to accept a const& of Config (since we'll + // then no longer store a pointer to the object). + const VideoReceiveStream::Config& config_; + const int64_t start_ms_; + const bool enable_decode_time_histograms_; + + rtc::CriticalSection crit_; + int64_t last_sample_time_ RTC_GUARDED_BY(crit_); + QualityThreshold fps_threshold_ RTC_GUARDED_BY(crit_); + QualityThreshold qp_threshold_ RTC_GUARDED_BY(crit_); + QualityThreshold variance_threshold_ RTC_GUARDED_BY(crit_); + rtc::SampleCounter qp_sample_ RTC_GUARDED_BY(crit_); + int num_bad_states_ RTC_GUARDED_BY(crit_); + int num_certain_states_ RTC_GUARDED_BY(crit_); + // Note: The |stats_.rtp_stats| member is not used or populated by this class. + mutable VideoReceiveStream::Stats stats_ RTC_GUARDED_BY(crit_); + RateStatistics decode_fps_estimator_ RTC_GUARDED_BY(crit_); + RateStatistics renders_fps_estimator_ RTC_GUARDED_BY(crit_); + rtc::RateTracker render_fps_tracker_ RTC_GUARDED_BY(crit_); + rtc::RateTracker render_pixel_tracker_ RTC_GUARDED_BY(crit_); + rtc::SampleCounter sync_offset_counter_ RTC_GUARDED_BY(crit_); + rtc::SampleCounter decode_time_counter_ RTC_GUARDED_BY(crit_); + rtc::SampleCounter jitter_buffer_delay_counter_ RTC_GUARDED_BY(crit_); + rtc::SampleCounter target_delay_counter_ RTC_GUARDED_BY(crit_); + rtc::SampleCounter current_delay_counter_ RTC_GUARDED_BY(crit_); + rtc::SampleCounter delay_counter_ RTC_GUARDED_BY(crit_); + std::unique_ptr video_quality_observer_ + RTC_GUARDED_BY(crit_); + mutable rtc::MovingMaxCounter interframe_delay_max_moving_ + RTC_GUARDED_BY(crit_); + std::map content_specific_stats_ + RTC_GUARDED_BY(crit_); + MaxCounter freq_offset_counter_ RTC_GUARDED_BY(crit_); + QpCounters qp_counters_ RTC_GUARDED_BY(decode_thread_); + int64_t avg_rtt_ms_ RTC_GUARDED_BY(crit_); + mutable std::map frame_window_ RTC_GUARDED_BY(&crit_); + VideoContentType last_content_type_ RTC_GUARDED_BY(&crit_); + VideoCodecType last_codec_type_ RTC_GUARDED_BY(&crit_); + absl::optional first_frame_received_time_ms_ RTC_GUARDED_BY(&crit_); + absl::optional first_decoded_frame_time_ms_ RTC_GUARDED_BY(&crit_); + absl::optional last_decoded_frame_time_ms_ RTC_GUARDED_BY(&crit_); + size_t num_delayed_frames_rendered_ RTC_GUARDED_BY(&crit_); + int64_t sum_missed_render_deadline_ms_ RTC_GUARDED_BY(&crit_); + // Mutable because calling Max() on MovingMaxCounter is not const. Yet it is + // called from const GetStats(). + mutable rtc::MovingMaxCounter timing_frame_info_counter_ + RTC_GUARDED_BY(&crit_); + absl::optional num_unique_frames_ RTC_GUARDED_BY(crit_); + absl::optional last_estimated_playout_ntp_timestamp_ms_ + RTC_GUARDED_BY(&crit_); + absl::optional last_estimated_playout_time_ms_ + RTC_GUARDED_BY(&crit_); + rtc::ThreadChecker decode_thread_; + rtc::ThreadChecker network_thread_; + rtc::ThreadChecker main_thread_; +}; + +} // namespace internal +} // namespace webrtc +#endif // VIDEO_RECEIVE_STATISTICS_PROXY2_H_ diff --git a/video/receive_statistics_proxy2_unittest.cc b/video/receive_statistics_proxy2_unittest.cc new file mode 100644 index 0000000000..5574d44a0f --- /dev/null +++ b/video/receive_statistics_proxy2_unittest.cc @@ -0,0 +1,1836 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/receive_statistics_proxy2.h" + +#include +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/scoped_refptr.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_rotation.h" +#include "system_wrappers/include/metrics.h" +#include "test/field_trial.h" +#include "test/gtest.h" + +namespace webrtc { +namespace internal { +namespace { +const int64_t kFreqOffsetProcessIntervalInMs = 40000; +const uint32_t kLocalSsrc = 123; +const uint32_t kRemoteSsrc = 456; +const int kMinRequiredSamples = 200; +const int kWidth = 1280; +const int kHeight = 720; +} // namespace + +// TODO(sakal): ReceiveStatisticsProxy is lacking unittesting. +class ReceiveStatisticsProxy2Test : public ::testing::Test { + public: + ReceiveStatisticsProxy2Test() : fake_clock_(1234), config_(GetTestConfig()) {} + virtual ~ReceiveStatisticsProxy2Test() {} + + protected: + virtual void SetUp() { + metrics::Reset(); + statistics_proxy_.reset(new ReceiveStatisticsProxy(&config_, &fake_clock_)); + } + + VideoReceiveStream::Config GetTestConfig() { + VideoReceiveStream::Config config(nullptr); + config.rtp.local_ssrc = kLocalSsrc; + config.rtp.remote_ssrc = kRemoteSsrc; + return config; + } + + VideoFrame CreateFrame(int width, int height) { + return CreateVideoFrame(width, height, 0); + } + + VideoFrame CreateFrameWithRenderTimeMs(int64_t render_time_ms) { + return CreateVideoFrame(kWidth, kHeight, render_time_ms); + } + + VideoFrame CreateVideoFrame(int width, int height, int64_t render_time_ms) { + VideoFrame frame = + VideoFrame::Builder() + .set_video_frame_buffer(I420Buffer::Create(width, height)) + .set_timestamp_rtp(0) + .set_timestamp_ms(render_time_ms) + .set_rotation(kVideoRotation_0) + .build(); + frame.set_ntp_time_ms(fake_clock_.CurrentNtpInMilliseconds()); + return frame; + } + + SimulatedClock fake_clock_; + const VideoReceiveStream::Config config_; + std::unique_ptr statistics_proxy_; +}; + +TEST_F(ReceiveStatisticsProxy2Test, OnDecodedFrameIncreasesFramesDecoded) { + EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_decoded); + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + for (uint32_t i = 1; i <= 3; ++i) { + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(i, statistics_proxy_->GetStats().frames_decoded); + } +} + +TEST_F(ReceiveStatisticsProxy2Test, DecodedFpsIsReported) { + const int kFps = 20; + const int kRequiredSamples = metrics::kMinRunTimeInSeconds * kFps; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + for (int i = 0; i < kRequiredSamples; ++i) { + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + fake_clock_.AdvanceTimeMilliseconds(1000 / kFps); + } + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.DecodedFramesPerSecond")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.DecodedFramesPerSecond", kFps)); +} + +TEST_F(ReceiveStatisticsProxy2Test, DecodedFpsIsNotReportedForTooFewSamples) { + const int kFps = 20; + const int kRequiredSamples = metrics::kMinRunTimeInSeconds * kFps; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + for (int i = 0; i < kRequiredSamples - 1; ++i) { + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + fake_clock_.AdvanceTimeMilliseconds(1000 / kFps); + } + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(0, + metrics::NumSamples("WebRTC.Video.DecodedFramesPerSecond")); +} + +TEST_F(ReceiveStatisticsProxy2Test, + OnDecodedFrameWithQpDoesNotResetFramesDecodedOrTotalDecodeTime) { + EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_decoded); + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + unsigned int expected_total_decode_time_ms = 0; + unsigned int expected_frames_decoded = 0; + for (uint32_t i = 1; i <= 3; ++i) { + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 1, + VideoContentType::UNSPECIFIED); + expected_total_decode_time_ms += 1; + ++expected_frames_decoded; + EXPECT_EQ(expected_frames_decoded, + statistics_proxy_->GetStats().frames_decoded); + EXPECT_EQ(expected_total_decode_time_ms, + statistics_proxy_->GetStats().total_decode_time_ms); + } + statistics_proxy_->OnDecodedFrame(frame, 1u, 3, + VideoContentType::UNSPECIFIED); + ++expected_frames_decoded; + expected_total_decode_time_ms += 3; + EXPECT_EQ(expected_frames_decoded, + statistics_proxy_->GetStats().frames_decoded); + EXPECT_EQ(expected_total_decode_time_ms, + statistics_proxy_->GetStats().total_decode_time_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, OnDecodedFrameIncreasesQpSum) { + EXPECT_EQ(absl::nullopt, statistics_proxy_->GetStats().qp_sum); + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + statistics_proxy_->OnDecodedFrame(frame, 3u, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(3u, statistics_proxy_->GetStats().qp_sum); + statistics_proxy_->OnDecodedFrame(frame, 127u, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(130u, statistics_proxy_->GetStats().qp_sum); +} + +TEST_F(ReceiveStatisticsProxy2Test, OnDecodedFrameIncreasesTotalDecodeTime) { + EXPECT_EQ(absl::nullopt, statistics_proxy_->GetStats().qp_sum); + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + statistics_proxy_->OnDecodedFrame(frame, 3u, 4, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(4u, statistics_proxy_->GetStats().total_decode_time_ms); + statistics_proxy_->OnDecodedFrame(frame, 127u, 7, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(11u, statistics_proxy_->GetStats().total_decode_time_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, ReportsContentType) { + const std::string kRealtimeString("realtime"); + const std::string kScreenshareString("screen"); + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + EXPECT_EQ(kRealtimeString, videocontenttypehelpers::ToString( + statistics_proxy_->GetStats().content_type)); + statistics_proxy_->OnDecodedFrame(frame, 3u, 0, + VideoContentType::SCREENSHARE); + EXPECT_EQ(kScreenshareString, + videocontenttypehelpers::ToString( + statistics_proxy_->GetStats().content_type)); + statistics_proxy_->OnDecodedFrame(frame, 3u, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(kRealtimeString, videocontenttypehelpers::ToString( + statistics_proxy_->GetStats().content_type)); +} + +TEST_F(ReceiveStatisticsProxy2Test, ReportsMaxTotalInterFrameDelay) { + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + const TimeDelta kInterFrameDelay1 = TimeDelta::Millis(100); + const TimeDelta kInterFrameDelay2 = TimeDelta::Millis(200); + const TimeDelta kInterFrameDelay3 = TimeDelta::Millis(300); + double expected_total_inter_frame_delay = 0; + double expected_total_squared_inter_frame_delay = 0; + EXPECT_EQ(expected_total_inter_frame_delay, + statistics_proxy_->GetStats().total_inter_frame_delay); + EXPECT_EQ(expected_total_squared_inter_frame_delay, + statistics_proxy_->GetStats().total_squared_inter_frame_delay); + + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + EXPECT_DOUBLE_EQ(expected_total_inter_frame_delay, + statistics_proxy_->GetStats().total_inter_frame_delay); + EXPECT_DOUBLE_EQ( + expected_total_squared_inter_frame_delay, + statistics_proxy_->GetStats().total_squared_inter_frame_delay); + + fake_clock_.AdvanceTime(kInterFrameDelay1); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + expected_total_inter_frame_delay += kInterFrameDelay1.seconds(); + expected_total_squared_inter_frame_delay += + pow(kInterFrameDelay1.seconds(), 2.0); + EXPECT_DOUBLE_EQ(expected_total_inter_frame_delay, + statistics_proxy_->GetStats().total_inter_frame_delay); + EXPECT_DOUBLE_EQ( + expected_total_squared_inter_frame_delay, + statistics_proxy_->GetStats().total_squared_inter_frame_delay); + + fake_clock_.AdvanceTime(kInterFrameDelay2); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + expected_total_inter_frame_delay += kInterFrameDelay2.seconds(); + expected_total_squared_inter_frame_delay += + pow(kInterFrameDelay2.seconds(), 2.0); + EXPECT_DOUBLE_EQ(expected_total_inter_frame_delay, + statistics_proxy_->GetStats().total_inter_frame_delay); + EXPECT_DOUBLE_EQ( + expected_total_squared_inter_frame_delay, + statistics_proxy_->GetStats().total_squared_inter_frame_delay); + + fake_clock_.AdvanceTime(kInterFrameDelay3); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + expected_total_inter_frame_delay += kInterFrameDelay3.seconds(); + expected_total_squared_inter_frame_delay += + pow(kInterFrameDelay3.seconds(), 2.0); + EXPECT_DOUBLE_EQ(expected_total_inter_frame_delay, + statistics_proxy_->GetStats().total_inter_frame_delay); + EXPECT_DOUBLE_EQ( + expected_total_squared_inter_frame_delay, + statistics_proxy_->GetStats().total_squared_inter_frame_delay); +} + +TEST_F(ReceiveStatisticsProxy2Test, ReportsMaxInterframeDelay) { + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + const int64_t kInterframeDelayMs1 = 100; + const int64_t kInterframeDelayMs2 = 200; + const int64_t kInterframeDelayMs3 = 100; + EXPECT_EQ(-1, statistics_proxy_->GetStats().interframe_delay_max_ms); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(-1, statistics_proxy_->GetStats().interframe_delay_max_ms); + + fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs1); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(kInterframeDelayMs1, + statistics_proxy_->GetStats().interframe_delay_max_ms); + + fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs2); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(kInterframeDelayMs2, + statistics_proxy_->GetStats().interframe_delay_max_ms); + + fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs3); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + // kInterframeDelayMs3 is smaller than kInterframeDelayMs2. + EXPECT_EQ(kInterframeDelayMs2, + statistics_proxy_->GetStats().interframe_delay_max_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, ReportInterframeDelayInWindow) { + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + const int64_t kInterframeDelayMs1 = 900; + const int64_t kInterframeDelayMs2 = 750; + const int64_t kInterframeDelayMs3 = 700; + EXPECT_EQ(-1, statistics_proxy_->GetStats().interframe_delay_max_ms); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(-1, statistics_proxy_->GetStats().interframe_delay_max_ms); + + fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs1); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(kInterframeDelayMs1, + statistics_proxy_->GetStats().interframe_delay_max_ms); + + fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs2); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + // Still first delay is the maximum + EXPECT_EQ(kInterframeDelayMs1, + statistics_proxy_->GetStats().interframe_delay_max_ms); + + fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs3); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + // Now the first sample is out of the window, so the second is the maximum. + EXPECT_EQ(kInterframeDelayMs2, + statistics_proxy_->GetStats().interframe_delay_max_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, ReportsFreezeMetrics) { + const int64_t kFreezeDurationMs = 1000; + + VideoReceiveStream::Stats stats = statistics_proxy_->GetStats(); + EXPECT_EQ(0u, stats.freeze_count); + EXPECT_FALSE(stats.total_freezes_duration_ms); + + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + for (size_t i = 0; i < VideoQualityObserver::kMinFrameSamplesToDetectFreeze; + ++i) { + fake_clock_.AdvanceTimeMilliseconds(30); + statistics_proxy_->OnRenderedFrame(frame); + } + + // Freeze. + fake_clock_.AdvanceTimeMilliseconds(kFreezeDurationMs); + statistics_proxy_->OnRenderedFrame(frame); + + stats = statistics_proxy_->GetStats(); + EXPECT_EQ(1u, stats.freeze_count); + EXPECT_EQ(kFreezeDurationMs, stats.total_freezes_duration_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, ReportsPauseMetrics) { + VideoReceiveStream::Stats stats = statistics_proxy_->GetStats(); + ASSERT_EQ(0u, stats.pause_count); + ASSERT_EQ(0u, stats.total_pauses_duration_ms); + + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + statistics_proxy_->OnRenderedFrame(frame); + + // Pause. + fake_clock_.AdvanceTimeMilliseconds(5432); + statistics_proxy_->OnStreamInactive(); + statistics_proxy_->OnRenderedFrame(frame); + + stats = statistics_proxy_->GetStats(); + EXPECT_EQ(1u, stats.pause_count); + EXPECT_EQ(5432u, stats.total_pauses_duration_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, PauseBeforeFirstAndAfterLastFrameIgnored) { + VideoReceiveStream::Stats stats = statistics_proxy_->GetStats(); + ASSERT_EQ(0u, stats.pause_count); + ASSERT_EQ(0u, stats.total_pauses_duration_ms); + + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + // Pause -> Frame -> Pause + fake_clock_.AdvanceTimeMilliseconds(5000); + statistics_proxy_->OnStreamInactive(); + statistics_proxy_->OnRenderedFrame(frame); + + fake_clock_.AdvanceTimeMilliseconds(30); + statistics_proxy_->OnRenderedFrame(frame); + + fake_clock_.AdvanceTimeMilliseconds(5000); + statistics_proxy_->OnStreamInactive(); + + stats = statistics_proxy_->GetStats(); + EXPECT_EQ(0u, stats.pause_count); + EXPECT_EQ(0u, stats.total_pauses_duration_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, ReportsFramesDuration) { + VideoReceiveStream::Stats stats = statistics_proxy_->GetStats(); + ASSERT_EQ(0u, stats.total_frames_duration_ms); + + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + // Emulate delay before first frame is rendered. This is needed to ensure + // that frame duration only covers time since first frame is rendered and + // not the total time. + fake_clock_.AdvanceTimeMilliseconds(5432); + + for (int i = 0; i <= 10; ++i) { + fake_clock_.AdvanceTimeMilliseconds(30); + statistics_proxy_->OnRenderedFrame(frame); + } + + stats = statistics_proxy_->GetStats(); + EXPECT_EQ(10 * 30u, stats.total_frames_duration_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, ReportsSumSquaredFrameDurations) { + VideoReceiveStream::Stats stats = statistics_proxy_->GetStats(); + ASSERT_EQ(0u, stats.sum_squared_frame_durations); + + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + for (int i = 0; i <= 10; ++i) { + fake_clock_.AdvanceTimeMilliseconds(30); + statistics_proxy_->OnRenderedFrame(frame); + } + + stats = statistics_proxy_->GetStats(); + const double kExpectedSumSquaredFrameDurationsSecs = + 10 * (30 / 1000.0 * 30 / 1000.0); + EXPECT_EQ(kExpectedSumSquaredFrameDurationsSecs, + stats.sum_squared_frame_durations); +} + +TEST_F(ReceiveStatisticsProxy2Test, OnDecodedFrameWithoutQpQpSumWontExist) { + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + EXPECT_EQ(absl::nullopt, statistics_proxy_->GetStats().qp_sum); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(absl::nullopt, statistics_proxy_->GetStats().qp_sum); +} + +TEST_F(ReceiveStatisticsProxy2Test, OnDecodedFrameWithoutQpResetsQpSum) { + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + EXPECT_EQ(absl::nullopt, statistics_proxy_->GetStats().qp_sum); + statistics_proxy_->OnDecodedFrame(frame, 3u, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(3u, statistics_proxy_->GetStats().qp_sum); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(absl::nullopt, statistics_proxy_->GetStats().qp_sum); +} + +TEST_F(ReceiveStatisticsProxy2Test, OnRenderedFrameIncreasesFramesRendered) { + EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_rendered); + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + for (uint32_t i = 1; i <= 3; ++i) { + statistics_proxy_->OnRenderedFrame(frame); + EXPECT_EQ(i, statistics_proxy_->GetStats().frames_rendered); + } +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsSsrc) { + EXPECT_EQ(kRemoteSsrc, statistics_proxy_->GetStats().ssrc); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsIncomingPayloadType) { + const int kPayloadType = 111; + statistics_proxy_->OnIncomingPayloadType(kPayloadType); + EXPECT_EQ(kPayloadType, statistics_proxy_->GetStats().current_payload_type); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsDecoderImplementationName) { + const char* kName = "decoderName"; + statistics_proxy_->OnDecoderImplementationName(kName); + EXPECT_STREQ( + kName, statistics_proxy_->GetStats().decoder_implementation_name.c_str()); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsOnCompleteFrame) { + const int kFrameSizeBytes = 1000; + statistics_proxy_->OnCompleteFrame(true, kFrameSizeBytes, + VideoContentType::UNSPECIFIED); + VideoReceiveStream::Stats stats = statistics_proxy_->GetStats(); + EXPECT_EQ(1, stats.network_frame_rate); + EXPECT_EQ(1, stats.frame_counts.key_frames); + EXPECT_EQ(0, stats.frame_counts.delta_frames); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsOnDroppedFrame) { + unsigned int dropped_frames = 0; + for (int i = 0; i < 10; ++i) { + statistics_proxy_->OnDroppedFrames(i); + dropped_frames += i; + } + VideoReceiveStream::Stats stats = statistics_proxy_->GetStats(); + EXPECT_EQ(dropped_frames, stats.frames_dropped); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsDecodeTimingStats) { + const int kMaxDecodeMs = 2; + const int kCurrentDelayMs = 3; + const int kTargetDelayMs = 4; + const int kJitterBufferMs = 5; + const int kMinPlayoutDelayMs = 6; + const int kRenderDelayMs = 7; + const int64_t kRttMs = 8; + statistics_proxy_->OnRttUpdate(kRttMs, 0); + statistics_proxy_->OnFrameBufferTimingsUpdated( + kMaxDecodeMs, kCurrentDelayMs, kTargetDelayMs, kJitterBufferMs, + kMinPlayoutDelayMs, kRenderDelayMs); + VideoReceiveStream::Stats stats = statistics_proxy_->GetStats(); + EXPECT_EQ(kMaxDecodeMs, stats.max_decode_ms); + EXPECT_EQ(kCurrentDelayMs, stats.current_delay_ms); + EXPECT_EQ(kTargetDelayMs, stats.target_delay_ms); + EXPECT_EQ(kJitterBufferMs, stats.jitter_buffer_ms); + EXPECT_EQ(kMinPlayoutDelayMs, stats.min_playout_delay_ms); + EXPECT_EQ(kRenderDelayMs, stats.render_delay_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsRtcpPacketTypeCounts) { + const uint32_t kFirPackets = 33; + const uint32_t kPliPackets = 44; + const uint32_t kNackPackets = 55; + RtcpPacketTypeCounter counter; + counter.fir_packets = kFirPackets; + counter.pli_packets = kPliPackets; + counter.nack_packets = kNackPackets; + statistics_proxy_->RtcpPacketTypesCounterUpdated(kRemoteSsrc, counter); + VideoReceiveStream::Stats stats = statistics_proxy_->GetStats(); + EXPECT_EQ(kFirPackets, stats.rtcp_packet_type_counts.fir_packets); + EXPECT_EQ(kPliPackets, stats.rtcp_packet_type_counts.pli_packets); + EXPECT_EQ(kNackPackets, stats.rtcp_packet_type_counts.nack_packets); +} + +TEST_F(ReceiveStatisticsProxy2Test, + GetStatsReportsNoRtcpPacketTypeCountsForUnknownSsrc) { + RtcpPacketTypeCounter counter; + counter.fir_packets = 33; + statistics_proxy_->RtcpPacketTypesCounterUpdated(kRemoteSsrc + 1, counter); + EXPECT_EQ(0u, + statistics_proxy_->GetStats().rtcp_packet_type_counts.fir_packets); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsFrameCounts) { + const int kKeyFrames = 3; + const int kDeltaFrames = 22; + for (int i = 0; i < kKeyFrames; i++) { + statistics_proxy_->OnCompleteFrame(true, 0, VideoContentType::UNSPECIFIED); + } + for (int i = 0; i < kDeltaFrames; i++) { + statistics_proxy_->OnCompleteFrame(false, 0, VideoContentType::UNSPECIFIED); + } + + VideoReceiveStream::Stats stats = statistics_proxy_->GetStats(); + EXPECT_EQ(kKeyFrames, stats.frame_counts.key_frames); + EXPECT_EQ(kDeltaFrames, stats.frame_counts.delta_frames); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsCName) { + const char* kName = "cName"; + statistics_proxy_->OnCname(kRemoteSsrc, kName); + EXPECT_STREQ(kName, statistics_proxy_->GetStats().c_name.c_str()); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsNoCNameForUnknownSsrc) { + const char* kName = "cName"; + statistics_proxy_->OnCname(kRemoteSsrc + 1, kName); + EXPECT_STREQ("", statistics_proxy_->GetStats().c_name.c_str()); +} + +TEST_F(ReceiveStatisticsProxy2Test, ReportsLongestTimingFrameInfo) { + const int64_t kShortEndToEndDelay = 10; + const int64_t kMedEndToEndDelay = 20; + const int64_t kLongEndToEndDelay = 100; + const uint32_t kExpectedRtpTimestamp = 2; + TimingFrameInfo info; + absl::optional result; + info.rtp_timestamp = kExpectedRtpTimestamp - 1; + info.capture_time_ms = 0; + info.decode_finish_ms = kShortEndToEndDelay; + statistics_proxy_->OnTimingFrameInfoUpdated(info); + info.rtp_timestamp = + kExpectedRtpTimestamp; // this frame should be reported in the end. + info.capture_time_ms = 0; + info.decode_finish_ms = kLongEndToEndDelay; + statistics_proxy_->OnTimingFrameInfoUpdated(info); + info.rtp_timestamp = kExpectedRtpTimestamp + 1; + info.capture_time_ms = 0; + info.decode_finish_ms = kMedEndToEndDelay; + statistics_proxy_->OnTimingFrameInfoUpdated(info); + result = statistics_proxy_->GetStats().timing_frame_info; + EXPECT_TRUE(result); + EXPECT_EQ(kExpectedRtpTimestamp, result->rtp_timestamp); +} + +TEST_F(ReceiveStatisticsProxy2Test, RespectsReportingIntervalForTimingFrames) { + TimingFrameInfo info; + const int64_t kShortEndToEndDelay = 10; + const uint32_t kExpectedRtpTimestamp = 2; + const int64_t kShortDelayMs = 1000; + const int64_t kLongDelayMs = 10000; + absl::optional result; + info.rtp_timestamp = kExpectedRtpTimestamp; + info.capture_time_ms = 0; + info.decode_finish_ms = kShortEndToEndDelay; + statistics_proxy_->OnTimingFrameInfoUpdated(info); + fake_clock_.AdvanceTimeMilliseconds(kShortDelayMs); + result = statistics_proxy_->GetStats().timing_frame_info; + EXPECT_TRUE(result); + EXPECT_EQ(kExpectedRtpTimestamp, result->rtp_timestamp); + fake_clock_.AdvanceTimeMilliseconds(kLongDelayMs); + result = statistics_proxy_->GetStats().timing_frame_info; + EXPECT_FALSE(result); +} + +TEST_F(ReceiveStatisticsProxy2Test, LifetimeHistogramIsUpdated) { + const int64_t kTimeSec = 3; + fake_clock_.AdvanceTimeMilliseconds(kTimeSec * 1000); + // Need at least one frame to report stream lifetime. + statistics_proxy_->OnCompleteFrame(true, 1000, VideoContentType::UNSPECIFIED); + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.ReceiveStreamLifetimeInSeconds")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.ReceiveStreamLifetimeInSeconds", + kTimeSec)); +} + +TEST_F(ReceiveStatisticsProxy2Test, + LifetimeHistogramNotReportedForEmptyStreams) { + const int64_t kTimeSec = 3; + fake_clock_.AdvanceTimeMilliseconds(kTimeSec * 1000); + // No frames received. + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ( + 0, metrics::NumSamples("WebRTC.Video.ReceiveStreamLifetimeInSeconds")); +} + +TEST_F(ReceiveStatisticsProxy2Test, BadCallHistogramsAreUpdated) { + // Based on the tuning parameters this will produce 7 uncertain states, + // then 10 certainly bad states. There has to be 10 certain states before + // any histograms are recorded. + const int kNumBadSamples = 17; + // We only count one sample per second. + const int kBadFameIntervalMs = 1100; + + StreamDataCounters counters; + counters.first_packet_time_ms = fake_clock_.TimeInMilliseconds(); + + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i < kNumBadSamples; ++i) { + fake_clock_.AdvanceTimeMilliseconds(kBadFameIntervalMs); + statistics_proxy_->OnRenderedFrame(frame); + } + statistics_proxy_->UpdateHistograms(absl::nullopt, counters, nullptr); + EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.BadCall.Any")); + EXPECT_METRIC_EQ(1, metrics::NumEvents("WebRTC.Video.BadCall.Any", 100)); + + EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.BadCall.FrameRate")); + EXPECT_METRIC_EQ(1, + metrics::NumEvents("WebRTC.Video.BadCall.FrameRate", 100)); + + EXPECT_METRIC_EQ( + 0, metrics::NumSamples("WebRTC.Video.BadCall.FrameRateVariance")); + + EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.BadCall.Qp")); +} + +TEST_F(ReceiveStatisticsProxy2Test, PacketLossHistogramIsUpdated) { + statistics_proxy_->UpdateHistograms(10, StreamDataCounters(), nullptr); + EXPECT_METRIC_EQ( + 0, metrics::NumSamples("WebRTC.Video.ReceivedPacketsLostInPercent")); + + // Restart + SetUp(); + + // Min run time has passed. + fake_clock_.AdvanceTimeMilliseconds(metrics::kMinRunTimeInSeconds * 1000); + statistics_proxy_->UpdateHistograms(10, StreamDataCounters(), nullptr); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.ReceivedPacketsLostInPercent")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.ReceivedPacketsLostInPercent", 10)); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsPlayoutTimestamp) { + const int64_t kVideoNtpMs = 21; + const int64_t kSyncOffsetMs = 22; + const double kFreqKhz = 90.0; + EXPECT_EQ(absl::nullopt, + statistics_proxy_->GetStats().estimated_playout_ntp_timestamp_ms); + statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, kFreqKhz); + EXPECT_EQ(kVideoNtpMs, + statistics_proxy_->GetStats().estimated_playout_ntp_timestamp_ms); + fake_clock_.AdvanceTimeMilliseconds(13); + EXPECT_EQ(kVideoNtpMs + 13, + statistics_proxy_->GetStats().estimated_playout_ntp_timestamp_ms); + fake_clock_.AdvanceTimeMilliseconds(5); + EXPECT_EQ(kVideoNtpMs + 13 + 5, + statistics_proxy_->GetStats().estimated_playout_ntp_timestamp_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsAvSyncOffset) { + const int64_t kVideoNtpMs = 21; + const int64_t kSyncOffsetMs = 22; + const double kFreqKhz = 90.0; + EXPECT_EQ(std::numeric_limits::max(), + statistics_proxy_->GetStats().sync_offset_ms); + statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, kFreqKhz); + EXPECT_EQ(kSyncOffsetMs, statistics_proxy_->GetStats().sync_offset_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, AvSyncOffsetHistogramIsUpdated) { + const int64_t kVideoNtpMs = 21; + const int64_t kSyncOffsetMs = 22; + const double kFreqKhz = 90.0; + for (int i = 0; i < kMinRequiredSamples; ++i) + statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, + kFreqKhz); + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.AVSyncOffsetInMs")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.AVSyncOffsetInMs", kSyncOffsetMs)); +} + +TEST_F(ReceiveStatisticsProxy2Test, RtpToNtpFrequencyOffsetHistogramIsUpdated) { + const int64_t kVideoNtpMs = 21; + const int64_t kSyncOffsetMs = 22; + const double kFreqKhz = 90.0; + statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, kFreqKhz); + statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, + kFreqKhz + 2.2); + fake_clock_.AdvanceTimeMilliseconds(kFreqOffsetProcessIntervalInMs); + // Process interval passed, max diff: 2. + statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, + kFreqKhz + 1.1); + statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, + kFreqKhz - 4.2); + statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, + kFreqKhz - 0.9); + fake_clock_.AdvanceTimeMilliseconds(kFreqOffsetProcessIntervalInMs); + // Process interval passed, max diff: 4. + statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, kFreqKhz); + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + // Average reported: (2 + 4) / 2 = 3. + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.RtpToNtpFreqOffsetInKhz")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.RtpToNtpFreqOffsetInKhz", 3)); +} + +TEST_F(ReceiveStatisticsProxy2Test, Vp8QpHistogramIsUpdated) { + const int kQp = 22; + + for (int i = 0; i < kMinRequiredSamples; ++i) + statistics_proxy_->OnPreDecode(kVideoCodecVP8, kQp); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.Decoded.Vp8.Qp")); + EXPECT_METRIC_EQ(1, metrics::NumEvents("WebRTC.Video.Decoded.Vp8.Qp", kQp)); +} + +TEST_F(ReceiveStatisticsProxy2Test, + Vp8QpHistogramIsNotUpdatedForTooFewSamples) { + const int kQp = 22; + + for (int i = 0; i < kMinRequiredSamples - 1; ++i) + statistics_proxy_->OnPreDecode(kVideoCodecVP8, kQp); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.Decoded.Vp8.Qp")); +} + +TEST_F(ReceiveStatisticsProxy2Test, Vp8QpHistogramIsNotUpdatedIfNoQpValue) { + for (int i = 0; i < kMinRequiredSamples; ++i) + statistics_proxy_->OnPreDecode(kVideoCodecVP8, -1); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.Decoded.Vp8.Qp")); +} + +TEST_F(ReceiveStatisticsProxy2Test, + KeyFrameHistogramNotUpdatedForTooFewSamples) { + const bool kIsKeyFrame = false; + const int kFrameSizeBytes = 1000; + + for (int i = 0; i < kMinRequiredSamples - 1; ++i) + statistics_proxy_->OnCompleteFrame(kIsKeyFrame, kFrameSizeBytes, + VideoContentType::UNSPECIFIED); + + EXPECT_EQ(0, statistics_proxy_->GetStats().frame_counts.key_frames); + EXPECT_EQ(kMinRequiredSamples - 1, + statistics_proxy_->GetStats().frame_counts.delta_frames); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ( + 0, metrics::NumSamples("WebRTC.Video.KeyFramesReceivedInPermille")); +} + +TEST_F(ReceiveStatisticsProxy2Test, + KeyFrameHistogramUpdatedForMinRequiredSamples) { + const bool kIsKeyFrame = false; + const int kFrameSizeBytes = 1000; + + for (int i = 0; i < kMinRequiredSamples; ++i) + statistics_proxy_->OnCompleteFrame(kIsKeyFrame, kFrameSizeBytes, + VideoContentType::UNSPECIFIED); + + EXPECT_EQ(0, statistics_proxy_->GetStats().frame_counts.key_frames); + EXPECT_EQ(kMinRequiredSamples, + statistics_proxy_->GetStats().frame_counts.delta_frames); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.KeyFramesReceivedInPermille")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.KeyFramesReceivedInPermille", 0)); +} + +TEST_F(ReceiveStatisticsProxy2Test, KeyFrameHistogramIsUpdated) { + const int kFrameSizeBytes = 1000; + + for (int i = 0; i < kMinRequiredSamples; ++i) + statistics_proxy_->OnCompleteFrame(true, kFrameSizeBytes, + VideoContentType::UNSPECIFIED); + + for (int i = 0; i < kMinRequiredSamples; ++i) + statistics_proxy_->OnCompleteFrame(false, kFrameSizeBytes, + VideoContentType::UNSPECIFIED); + + EXPECT_EQ(kMinRequiredSamples, + statistics_proxy_->GetStats().frame_counts.key_frames); + EXPECT_EQ(kMinRequiredSamples, + statistics_proxy_->GetStats().frame_counts.delta_frames); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.KeyFramesReceivedInPermille")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.KeyFramesReceivedInPermille", 500)); +} + +TEST_F(ReceiveStatisticsProxy2Test, + TimingHistogramsNotUpdatedForTooFewSamples) { + const int kMaxDecodeMs = 2; + const int kCurrentDelayMs = 3; + const int kTargetDelayMs = 4; + const int kJitterBufferMs = 5; + const int kMinPlayoutDelayMs = 6; + const int kRenderDelayMs = 7; + + for (int i = 0; i < kMinRequiredSamples - 1; ++i) { + statistics_proxy_->OnFrameBufferTimingsUpdated( + kMaxDecodeMs, kCurrentDelayMs, kTargetDelayMs, kJitterBufferMs, + kMinPlayoutDelayMs, kRenderDelayMs); + } + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.DecodeTimeInMs")); + EXPECT_METRIC_EQ(0, + metrics::NumSamples("WebRTC.Video.JitterBufferDelayInMs")); + EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.TargetDelayInMs")); + EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.CurrentDelayInMs")); + EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.OnewayDelayInMs")); +} + +TEST_F(ReceiveStatisticsProxy2Test, TimingHistogramsAreUpdated) { + const int kMaxDecodeMs = 2; + const int kCurrentDelayMs = 3; + const int kTargetDelayMs = 4; + const int kJitterBufferMs = 5; + const int kMinPlayoutDelayMs = 6; + const int kRenderDelayMs = 7; + + for (int i = 0; i < kMinRequiredSamples; ++i) { + statistics_proxy_->OnFrameBufferTimingsUpdated( + kMaxDecodeMs, kCurrentDelayMs, kTargetDelayMs, kJitterBufferMs, + kMinPlayoutDelayMs, kRenderDelayMs); + } + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.JitterBufferDelayInMs")); + EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.TargetDelayInMs")); + EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.CurrentDelayInMs")); + EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.OnewayDelayInMs")); + + EXPECT_METRIC_EQ(1, metrics::NumEvents("WebRTC.Video.JitterBufferDelayInMs", + kJitterBufferMs)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.TargetDelayInMs", kTargetDelayMs)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.CurrentDelayInMs", kCurrentDelayMs)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.OnewayDelayInMs", kTargetDelayMs)); +} + +TEST_F(ReceiveStatisticsProxy2Test, DoesNotReportStaleFramerates) { + const int kDefaultFps = 30; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i < kDefaultFps; ++i) { + // Since OnRenderedFrame is never called the fps in each sample will be 0, + // i.e. bad + frame.set_ntp_time_ms(fake_clock_.CurrentNtpInMilliseconds()); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + statistics_proxy_->OnRenderedFrame(frame); + fake_clock_.AdvanceTimeMilliseconds(1000 / kDefaultFps); + } + + EXPECT_EQ(kDefaultFps, statistics_proxy_->GetStats().decode_frame_rate); + EXPECT_EQ(kDefaultFps, statistics_proxy_->GetStats().render_frame_rate); + + // FPS trackers in stats proxy have a 1000ms sliding window. + fake_clock_.AdvanceTimeMilliseconds(1000); + EXPECT_EQ(0, statistics_proxy_->GetStats().decode_frame_rate); + EXPECT_EQ(0, statistics_proxy_->GetStats().render_frame_rate); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsReceivedFrameStats) { + EXPECT_EQ(0, statistics_proxy_->GetStats().width); + EXPECT_EQ(0, statistics_proxy_->GetStats().height); + EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_rendered); + + statistics_proxy_->OnRenderedFrame(CreateFrame(kWidth, kHeight)); + + EXPECT_EQ(kWidth, statistics_proxy_->GetStats().width); + EXPECT_EQ(kHeight, statistics_proxy_->GetStats().height); + EXPECT_EQ(1u, statistics_proxy_->GetStats().frames_rendered); +} + +TEST_F(ReceiveStatisticsProxy2Test, + ReceivedFrameHistogramsAreNotUpdatedForTooFewSamples) { + for (int i = 0; i < kMinRequiredSamples - 1; ++i) + statistics_proxy_->OnRenderedFrame(CreateFrame(kWidth, kHeight)); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(0, + metrics::NumSamples("WebRTC.Video.ReceivedWidthInPixels")); + EXPECT_METRIC_EQ(0, + metrics::NumSamples("WebRTC.Video.ReceivedHeightInPixels")); + EXPECT_METRIC_EQ(0, + metrics::NumSamples("WebRTC.Video.RenderFramesPerSecond")); + EXPECT_METRIC_EQ( + 0, metrics::NumSamples("WebRTC.Video.RenderSqrtPixelsPerSecond")); +} + +TEST_F(ReceiveStatisticsProxy2Test, ReceivedFrameHistogramsAreUpdated) { + for (int i = 0; i < kMinRequiredSamples; ++i) + statistics_proxy_->OnRenderedFrame(CreateFrame(kWidth, kHeight)); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.ReceivedWidthInPixels")); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.ReceivedHeightInPixels")); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.RenderFramesPerSecond")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.RenderSqrtPixelsPerSecond")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.ReceivedWidthInPixels", kWidth)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.ReceivedHeightInPixels", kHeight)); +} + +TEST_F(ReceiveStatisticsProxy2Test, ZeroDelayReportedIfFrameNotDelayed) { + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + + // Frame not delayed, delayed frames to render: 0%. + const int64_t kNowMs = fake_clock_.TimeInMilliseconds(); + statistics_proxy_->OnRenderedFrame(CreateFrameWithRenderTimeMs(kNowMs)); + + // Min run time has passed. + fake_clock_.AdvanceTimeMilliseconds((metrics::kMinRunTimeInSeconds * 1000)); + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.DelayedFramesToRenderer")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.DelayedFramesToRenderer", 0)); + EXPECT_METRIC_EQ(0, metrics::NumSamples( + "WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs")); +} + +TEST_F(ReceiveStatisticsProxy2Test, + DelayedFrameHistogramsAreNotUpdatedIfMinRuntimeHasNotPassed) { + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + + // Frame not delayed, delayed frames to render: 0%. + const int64_t kNowMs = fake_clock_.TimeInMilliseconds(); + statistics_proxy_->OnRenderedFrame(CreateFrameWithRenderTimeMs(kNowMs)); + + // Min run time has not passed. + fake_clock_.AdvanceTimeMilliseconds((metrics::kMinRunTimeInSeconds * 1000) - + 1); + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(0, + metrics::NumSamples("WebRTC.Video.DelayedFramesToRenderer")); + EXPECT_METRIC_EQ(0, metrics::NumSamples( + "WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs")); +} + +TEST_F(ReceiveStatisticsProxy2Test, + DelayedFramesHistogramsAreNotUpdatedIfNoRenderedFrames) { + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + + // Min run time has passed. No rendered frames. + fake_clock_.AdvanceTimeMilliseconds((metrics::kMinRunTimeInSeconds * 1000)); + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(0, + metrics::NumSamples("WebRTC.Video.DelayedFramesToRenderer")); + EXPECT_METRIC_EQ(0, metrics::NumSamples( + "WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs")); +} + +TEST_F(ReceiveStatisticsProxy2Test, DelayReportedIfFrameIsDelayed) { + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + + // Frame delayed 1 ms, delayed frames to render: 100%. + const int64_t kNowMs = fake_clock_.TimeInMilliseconds(); + statistics_proxy_->OnRenderedFrame(CreateFrameWithRenderTimeMs(kNowMs - 1)); + + // Min run time has passed. + fake_clock_.AdvanceTimeMilliseconds((metrics::kMinRunTimeInSeconds * 1000)); + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.DelayedFramesToRenderer")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.DelayedFramesToRenderer", 100)); + EXPECT_METRIC_EQ(1, metrics::NumSamples( + "WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs", + 1)); +} + +TEST_F(ReceiveStatisticsProxy2Test, AverageDelayOfDelayedFramesIsReported) { + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + + // Two frames delayed (6 ms, 10 ms), delayed frames to render: 50%. + const int64_t kNowMs = fake_clock_.TimeInMilliseconds(); + statistics_proxy_->OnRenderedFrame(CreateFrameWithRenderTimeMs(kNowMs - 10)); + statistics_proxy_->OnRenderedFrame(CreateFrameWithRenderTimeMs(kNowMs - 6)); + statistics_proxy_->OnRenderedFrame(CreateFrameWithRenderTimeMs(kNowMs)); + statistics_proxy_->OnRenderedFrame(CreateFrameWithRenderTimeMs(kNowMs + 1)); + + // Min run time has passed. + fake_clock_.AdvanceTimeMilliseconds((metrics::kMinRunTimeInSeconds * 1000)); + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.DelayedFramesToRenderer")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.DelayedFramesToRenderer", 50)); + EXPECT_METRIC_EQ(1, metrics::NumSamples( + "WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs", + 8)); +} + +TEST_F(ReceiveStatisticsProxy2Test, + RtcpHistogramsNotUpdatedIfMinRuntimeHasNotPassed) { + StreamDataCounters data_counters; + data_counters.first_packet_time_ms = fake_clock_.TimeInMilliseconds(); + + fake_clock_.AdvanceTimeMilliseconds((metrics::kMinRunTimeInSeconds * 1000) - + 1); + + RtcpPacketTypeCounter counter; + statistics_proxy_->RtcpPacketTypesCounterUpdated(kRemoteSsrc, counter); + + statistics_proxy_->UpdateHistograms(absl::nullopt, data_counters, nullptr); + EXPECT_METRIC_EQ(0, + metrics::NumSamples("WebRTC.Video.FirPacketsSentPerMinute")); + EXPECT_METRIC_EQ(0, + metrics::NumSamples("WebRTC.Video.PliPacketsSentPerMinute")); + EXPECT_METRIC_EQ( + 0, metrics::NumSamples("WebRTC.Video.NackPacketsSentPerMinute")); +} + +TEST_F(ReceiveStatisticsProxy2Test, RtcpHistogramsAreUpdated) { + StreamDataCounters data_counters; + data_counters.first_packet_time_ms = fake_clock_.TimeInMilliseconds(); + fake_clock_.AdvanceTimeMilliseconds(metrics::kMinRunTimeInSeconds * 1000); + + const uint32_t kFirPackets = 100; + const uint32_t kPliPackets = 200; + const uint32_t kNackPackets = 300; + + RtcpPacketTypeCounter counter; + counter.fir_packets = kFirPackets; + counter.pli_packets = kPliPackets; + counter.nack_packets = kNackPackets; + statistics_proxy_->RtcpPacketTypesCounterUpdated(kRemoteSsrc, counter); + + statistics_proxy_->UpdateHistograms(absl::nullopt, data_counters, nullptr); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.FirPacketsSentPerMinute")); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.PliPacketsSentPerMinute")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.NackPacketsSentPerMinute")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.FirPacketsSentPerMinute", + kFirPackets * 60 / metrics::kMinRunTimeInSeconds)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.PliPacketsSentPerMinute", + kPliPackets * 60 / metrics::kMinRunTimeInSeconds)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.NackPacketsSentPerMinute", + kNackPackets * 60 / metrics::kMinRunTimeInSeconds)); +} + +class ReceiveStatisticsProxy2TestWithFreezeDuration + : public ReceiveStatisticsProxy2Test, + public ::testing::WithParamInterface< + std::tuple> { + protected: + const uint32_t frame_duration_ms_ = {std::get<0>(GetParam())}; + const uint32_t freeze_duration_ms_ = {std::get<1>(GetParam())}; + const uint32_t expected_freeze_count_ = {std::get<2>(GetParam())}; +}; + +// It is a freeze if: +// frame_duration_ms >= max(3 * avg_frame_duration, avg_frame_duration + 150) +// where avg_frame_duration is average duration of last 30 frames including +// the current one. +// +// Condition 1: 3 * avg_frame_duration > avg_frame_duration + 150 +const auto kFreezeDetectionCond1Freeze = std::make_tuple(150, 483, 1); +const auto kFreezeDetectionCond1NotFreeze = std::make_tuple(150, 482, 0); +// Condition 2: 3 * avg_frame_duration < avg_frame_duration + 150 +const auto kFreezeDetectionCond2Freeze = std::make_tuple(30, 185, 1); +const auto kFreezeDetectionCond2NotFreeze = std::make_tuple(30, 184, 0); + +INSTANTIATE_TEST_SUITE_P(_, + ReceiveStatisticsProxy2TestWithFreezeDuration, + ::testing::Values(kFreezeDetectionCond1Freeze, + kFreezeDetectionCond1NotFreeze, + kFreezeDetectionCond2Freeze, + kFreezeDetectionCond2NotFreeze)); + +TEST_P(ReceiveStatisticsProxy2TestWithFreezeDuration, FreezeDetection) { + VideoReceiveStream::Stats stats = statistics_proxy_->GetStats(); + EXPECT_EQ(0u, stats.freeze_count); + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + // Add a very long frame. This is need to verify that average frame + // duration, which is supposed to be calculated as mean of durations of + // last 30 frames, is calculated correctly. + statistics_proxy_->OnRenderedFrame(frame); + fake_clock_.AdvanceTimeMilliseconds(2000); + + for (size_t i = 0; + i <= VideoQualityObserver::kAvgInterframeDelaysWindowSizeFrames; ++i) { + fake_clock_.AdvanceTimeMilliseconds(frame_duration_ms_); + statistics_proxy_->OnRenderedFrame(frame); + } + + fake_clock_.AdvanceTimeMilliseconds(freeze_duration_ms_); + statistics_proxy_->OnRenderedFrame(frame); + + stats = statistics_proxy_->GetStats(); + EXPECT_EQ(stats.freeze_count, expected_freeze_count_); +} + +class ReceiveStatisticsProxy2TestWithContent + : public ReceiveStatisticsProxy2Test, + public ::testing::WithParamInterface { + protected: + const webrtc::VideoContentType content_type_{GetParam()}; +}; + +INSTANTIATE_TEST_SUITE_P(ContentTypes, + ReceiveStatisticsProxy2TestWithContent, + ::testing::Values(VideoContentType::UNSPECIFIED, + VideoContentType::SCREENSHARE)); + +TEST_P(ReceiveStatisticsProxy2TestWithContent, InterFrameDelaysAreReported) { + const int kInterFrameDelayMs = 33; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i < kMinRequiredSamples; ++i) { + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + // One extra with double the interval. + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + const int kExpectedInterFrame = + (kInterFrameDelayMs * (kMinRequiredSamples - 1) + + kInterFrameDelayMs * 2) / + kMinRequiredSamples; + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ( + kExpectedInterFrame, + metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs")); + EXPECT_METRIC_EQ( + kInterFrameDelayMs * 2, + metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayMaxInMs")); + } else { + EXPECT_METRIC_EQ(kExpectedInterFrame, + metrics::MinSample("WebRTC.Video.InterframeDelayInMs")); + EXPECT_METRIC_EQ(kInterFrameDelayMs * 2, + metrics::MinSample("WebRTC.Video.InterframeDelayMaxInMs")); + } +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, + InterFrameDelaysPercentilesAreReported) { + const int kInterFrameDelayMs = 33; + const int kLastFivePercentsSamples = kMinRequiredSamples * 5 / 100; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i <= kMinRequiredSamples - kLastFivePercentsSamples; ++i) { + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + } + // Last 5% of intervals are double in size. + for (int i = 0; i < kLastFivePercentsSamples; ++i) { + fake_clock_.AdvanceTimeMilliseconds(2 * kInterFrameDelayMs); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + } + // Final sample is outlier and 10 times as big. + fake_clock_.AdvanceTimeMilliseconds(10 * kInterFrameDelayMs); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + const int kExpectedInterFrame = kInterFrameDelayMs * 2; + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ( + kExpectedInterFrame, + metrics::MinSample( + "WebRTC.Video.Screenshare.InterframeDelay95PercentileInMs")); + } else { + EXPECT_METRIC_EQ( + kExpectedInterFrame, + metrics::MinSample("WebRTC.Video.InterframeDelay95PercentileInMs")); + } +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, + MaxInterFrameDelayOnlyWithValidAverage) { + const int kInterFrameDelayMs = 33; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i < kMinRequiredSamples; ++i) { + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + + // |kMinRequiredSamples| samples, and thereby intervals, is required. That + // means we're one frame short of having a valid data set. + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.InterframeDelayInMs")); + EXPECT_METRIC_EQ(0, + metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs")); + EXPECT_METRIC_EQ( + 0, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayInMs")); + EXPECT_METRIC_EQ(0, metrics::NumSamples( + "WebRTC.Video.Screenshare.InterframeDelayMaxInMs")); +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, + MaxInterFrameDelayOnlyWithPause) { + const int kInterFrameDelayMs = 33; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i <= kMinRequiredSamples; ++i) { + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + + // At this state, we should have a valid inter-frame delay. + // Indicate stream paused and make a large jump in time. + statistics_proxy_->OnStreamInactive(); + fake_clock_.AdvanceTimeMilliseconds(5000); + + // Insert two more frames. The interval during the pause should be disregarded + // in the stats. + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayInMs")); + EXPECT_METRIC_EQ(1, metrics::NumSamples( + "WebRTC.Video.Screenshare.InterframeDelayMaxInMs")); + EXPECT_METRIC_EQ( + kInterFrameDelayMs, + metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs")); + EXPECT_METRIC_EQ( + kInterFrameDelayMs, + metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayMaxInMs")); + } else { + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.InterframeDelayInMs")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs")); + EXPECT_METRIC_EQ(kInterFrameDelayMs, + metrics::MinSample("WebRTC.Video.InterframeDelayInMs")); + EXPECT_METRIC_EQ(kInterFrameDelayMs, + metrics::MinSample("WebRTC.Video.InterframeDelayMaxInMs")); + } +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, FreezesAreReported) { + const int kInterFrameDelayMs = 33; + const int kFreezeDelayMs = 200; + const int kCallDurationMs = + kMinRequiredSamples * kInterFrameDelayMs + kFreezeDelayMs; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i < kMinRequiredSamples; ++i) { + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + statistics_proxy_->OnRenderedFrame(frame); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + // Add extra freeze. + fake_clock_.AdvanceTimeMilliseconds(kFreezeDelayMs); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + statistics_proxy_->OnRenderedFrame(frame); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + const int kExpectedTimeBetweenFreezes = + kInterFrameDelayMs * (kMinRequiredSamples - 1); + const int kExpectedNumberFreezesPerMinute = 60 * 1000 / kCallDurationMs; + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ( + kFreezeDelayMs + kInterFrameDelayMs, + metrics::MinSample("WebRTC.Video.Screenshare.MeanFreezeDurationMs")); + EXPECT_METRIC_EQ(kExpectedTimeBetweenFreezes, + metrics::MinSample( + "WebRTC.Video.Screenshare.MeanTimeBetweenFreezesMs")); + EXPECT_METRIC_EQ( + kExpectedNumberFreezesPerMinute, + metrics::MinSample("WebRTC.Video.Screenshare.NumberFreezesPerMinute")); + } else { + EXPECT_METRIC_EQ(kFreezeDelayMs + kInterFrameDelayMs, + metrics::MinSample("WebRTC.Video.MeanFreezeDurationMs")); + EXPECT_METRIC_EQ( + kExpectedTimeBetweenFreezes, + metrics::MinSample("WebRTC.Video.MeanTimeBetweenFreezesMs")); + EXPECT_METRIC_EQ(kExpectedNumberFreezesPerMinute, + metrics::MinSample("WebRTC.Video.NumberFreezesPerMinute")); + } +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, HarmonicFrameRateIsReported) { + const int kFrameDurationMs = 33; + const int kFreezeDurationMs = 200; + const int kPauseDurationMs = 10000; + const int kCallDurationMs = kMinRequiredSamples * kFrameDurationMs + + kFreezeDurationMs + kPauseDurationMs; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i < kMinRequiredSamples; ++i) { + fake_clock_.AdvanceTimeMilliseconds(kFrameDurationMs); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + statistics_proxy_->OnRenderedFrame(frame); + } + + // Freezes and pauses should be included into harmonic frame rate. + // Add freeze. + fake_clock_.AdvanceTimeMilliseconds(kFreezeDurationMs); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + statistics_proxy_->OnRenderedFrame(frame); + + // Add pause. + fake_clock_.AdvanceTimeMilliseconds(kPauseDurationMs); + statistics_proxy_->OnStreamInactive(); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + statistics_proxy_->OnRenderedFrame(frame); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + double kSumSquaredFrameDurationSecs = + (kMinRequiredSamples - 1) * + (kFrameDurationMs / 1000.0 * kFrameDurationMs / 1000.0); + kSumSquaredFrameDurationSecs += + kFreezeDurationMs / 1000.0 * kFreezeDurationMs / 1000.0; + kSumSquaredFrameDurationSecs += + kPauseDurationMs / 1000.0 * kPauseDurationMs / 1000.0; + const int kExpectedHarmonicFrameRateFps = + std::round(kCallDurationMs / (1000 * kSumSquaredFrameDurationSecs)); + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ( + kExpectedHarmonicFrameRateFps, + metrics::MinSample("WebRTC.Video.Screenshare.HarmonicFrameRate")); + } else { + EXPECT_METRIC_EQ(kExpectedHarmonicFrameRateFps, + metrics::MinSample("WebRTC.Video.HarmonicFrameRate")); + } +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, PausesAreIgnored) { + const int kInterFrameDelayMs = 33; + const int kPauseDurationMs = 10000; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i <= kMinRequiredSamples; ++i) { + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + statistics_proxy_->OnRenderedFrame(frame); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + // Add a pause. + fake_clock_.AdvanceTimeMilliseconds(kPauseDurationMs); + statistics_proxy_->OnStreamInactive(); + + // Second playback interval with triple the length. + for (int i = 0; i <= kMinRequiredSamples * 3; ++i) { + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + statistics_proxy_->OnRenderedFrame(frame); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + // Average of two playback intervals. + const int kExpectedTimeBetweenFreezes = + kInterFrameDelayMs * kMinRequiredSamples * 2; + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ(-1, metrics::MinSample( + "WebRTC.Video.Screenshare.MeanFreezeDurationMs")); + EXPECT_METRIC_EQ(kExpectedTimeBetweenFreezes, + metrics::MinSample( + "WebRTC.Video.Screenshare.MeanTimeBetweenFreezesMs")); + } else { + EXPECT_METRIC_EQ(-1, + metrics::MinSample("WebRTC.Video.MeanFreezeDurationMs")); + EXPECT_METRIC_EQ( + kExpectedTimeBetweenFreezes, + metrics::MinSample("WebRTC.Video.MeanTimeBetweenFreezesMs")); + } +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, ManyPausesAtTheBeginning) { + const int kInterFrameDelayMs = 33; + const int kPauseDurationMs = 10000; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i <= kMinRequiredSamples; ++i) { + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + + statistics_proxy_->OnStreamInactive(); + fake_clock_.AdvanceTimeMilliseconds(kPauseDurationMs); + + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + // No freezes should be detected, as all long inter-frame delays were pauses. + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ(-1, metrics::MinSample( + "WebRTC.Video.Screenshare.MeanFreezeDurationMs")); + } else { + EXPECT_METRIC_EQ(-1, + metrics::MinSample("WebRTC.Video.MeanFreezeDurationMs")); + } +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, TimeInHdReported) { + const int kInterFrameDelayMs = 20; + webrtc::VideoFrame frame_hd = CreateFrame(1280, 720); + webrtc::VideoFrame frame_sd = CreateFrame(640, 360); + + // HD frames. + for (int i = 0; i < kMinRequiredSamples; ++i) { + statistics_proxy_->OnDecodedFrame(frame_hd, absl::nullopt, 0, + content_type_); + statistics_proxy_->OnRenderedFrame(frame_hd); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + // SD frames. + for (int i = 0; i < 2 * kMinRequiredSamples; ++i) { + statistics_proxy_->OnDecodedFrame(frame_sd, absl::nullopt, 0, + content_type_); + statistics_proxy_->OnRenderedFrame(frame_sd); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + // Extra last frame. + statistics_proxy_->OnRenderedFrame(frame_sd); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + const int kExpectedTimeInHdPercents = 33; + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ( + kExpectedTimeInHdPercents, + metrics::MinSample("WebRTC.Video.Screenshare.TimeInHdPercentage")); + } else { + EXPECT_METRIC_EQ(kExpectedTimeInHdPercents, + metrics::MinSample("WebRTC.Video.TimeInHdPercentage")); + } +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, TimeInBlockyVideoReported) { + const int kInterFrameDelayMs = 20; + const int kHighQp = 80; + const int kLowQp = 30; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + // High quality frames. + for (int i = 0; i < kMinRequiredSamples; ++i) { + statistics_proxy_->OnDecodedFrame(frame, kLowQp, 0, content_type_); + statistics_proxy_->OnRenderedFrame(frame); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + // Blocky frames. + for (int i = 0; i < 2 * kMinRequiredSamples; ++i) { + statistics_proxy_->OnDecodedFrame(frame, kHighQp, 0, content_type_); + statistics_proxy_->OnRenderedFrame(frame); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + // Extra last frame. + statistics_proxy_->OnDecodedFrame(frame, kHighQp, 0, content_type_); + statistics_proxy_->OnRenderedFrame(frame); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + const int kExpectedTimeInHdPercents = 66; + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ( + kExpectedTimeInHdPercents, + metrics::MinSample( + "WebRTC.Video.Screenshare.TimeInBlockyVideoPercentage")); + } else { + EXPECT_METRIC_EQ( + kExpectedTimeInHdPercents, + metrics::MinSample("WebRTC.Video.TimeInBlockyVideoPercentage")); + } +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, DownscalesReported) { + const int kInterFrameDelayMs = 2000; // To ensure long enough call duration. + + webrtc::VideoFrame frame_hd = CreateFrame(1280, 720); + webrtc::VideoFrame frame_sd = CreateFrame(640, 360); + webrtc::VideoFrame frame_ld = CreateFrame(320, 180); + + // Call once to pass content type. + statistics_proxy_->OnDecodedFrame(frame_hd, absl::nullopt, 0, content_type_); + + statistics_proxy_->OnRenderedFrame(frame_hd); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + + // Downscale. + statistics_proxy_->OnRenderedFrame(frame_sd); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + + // Downscale. + statistics_proxy_->OnRenderedFrame(frame_ld); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + const int kExpectedDownscales = 30; // 2 per 4 seconds = 30 per minute. + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ( + kExpectedDownscales, + metrics::MinSample( + "WebRTC.Video.Screenshare.NumberResolutionDownswitchesPerMinute")); + } else { + EXPECT_METRIC_EQ(kExpectedDownscales, + metrics::MinSample( + "WebRTC.Video.NumberResolutionDownswitchesPerMinute")); + } +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, DecodeTimeReported) { + const int kInterFrameDelayMs = 20; + const int kLowQp = 30; + const int kDecodeMs = 7; + + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i < kMinRequiredSamples; ++i) { + statistics_proxy_->OnDecodedFrame(frame, kLowQp, kDecodeMs, content_type_); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.DecodeTimeInMs", kDecodeMs)); +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, + StatsAreSlicedOnSimulcastAndExperiment) { + const uint8_t experiment_id = 1; + webrtc::VideoContentType content_type = content_type_; + videocontenttypehelpers::SetExperimentId(&content_type, experiment_id); + const int kInterFrameDelayMs1 = 30; + const int kInterFrameDelayMs2 = 50; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + videocontenttypehelpers::SetSimulcastId(&content_type, 1); + for (int i = 0; i <= kMinRequiredSamples; ++i) { + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs1); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type); + } + + videocontenttypehelpers::SetSimulcastId(&content_type, 2); + for (int i = 0; i <= kMinRequiredSamples; ++i) { + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs2); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type); + } + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + + if (videocontenttypehelpers::IsScreenshare(content_type)) { + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayInMs")); + EXPECT_METRIC_EQ(1, metrics::NumSamples( + "WebRTC.Video.Screenshare.InterframeDelayMaxInMs")); + EXPECT_METRIC_EQ(1, metrics::NumSamples( + "WebRTC.Video.Screenshare.InterframeDelayInMs.S0")); + EXPECT_METRIC_EQ(1, + metrics::NumSamples( + "WebRTC.Video.Screenshare.InterframeDelayMaxInMs.S0")); + EXPECT_METRIC_EQ(1, metrics::NumSamples( + "WebRTC.Video.Screenshare.InterframeDelayInMs.S1")); + EXPECT_METRIC_EQ(1, + metrics::NumSamples( + "WebRTC.Video.Screenshare.InterframeDelayMaxInMs.S1")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayInMs" + ".ExperimentGroup0")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayMaxInMs" + ".ExperimentGroup0")); + EXPECT_METRIC_EQ( + kInterFrameDelayMs1, + metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs.S0")); + EXPECT_METRIC_EQ( + kInterFrameDelayMs2, + metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs.S1")); + EXPECT_METRIC_EQ( + (kInterFrameDelayMs1 + kInterFrameDelayMs2) / 2, + metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs")); + EXPECT_METRIC_EQ( + kInterFrameDelayMs2, + metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayMaxInMs")); + EXPECT_METRIC_EQ( + (kInterFrameDelayMs1 + kInterFrameDelayMs2) / 2, + metrics::MinSample( + "WebRTC.Video.Screenshare.InterframeDelayInMs.ExperimentGroup0")); + } else { + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.InterframeDelayInMs")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.InterframeDelayInMs.S0")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs.S0")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.InterframeDelayInMs.S1")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs.S1")); + EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.InterframeDelayInMs" + ".ExperimentGroup0")); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs" + ".ExperimentGroup0")); + EXPECT_METRIC_EQ(kInterFrameDelayMs1, + metrics::MinSample("WebRTC.Video.InterframeDelayInMs.S0")); + EXPECT_METRIC_EQ(kInterFrameDelayMs2, + metrics::MinSample("WebRTC.Video.InterframeDelayInMs.S1")); + EXPECT_METRIC_EQ((kInterFrameDelayMs1 + kInterFrameDelayMs2) / 2, + metrics::MinSample("WebRTC.Video.InterframeDelayInMs")); + EXPECT_METRIC_EQ(kInterFrameDelayMs2, + metrics::MinSample("WebRTC.Video.InterframeDelayMaxInMs")); + EXPECT_METRIC_EQ((kInterFrameDelayMs1 + kInterFrameDelayMs2) / 2, + metrics::MinSample( + "WebRTC.Video.InterframeDelayInMs.ExperimentGroup0")); + } +} + +class DecodeTimeHistogramsKillswitch { + public: + explicit DecodeTimeHistogramsKillswitch(bool disable_histograms) + : field_trial_(disable_histograms + ? "WebRTC-DecodeTimeHistogramsKillSwitch/Enabled/" + : "") {} + + private: + webrtc::test::ScopedFieldTrials field_trial_; +}; + +class ReceiveStatisticsProxy2TestWithDecodeTimeHistograms + : public DecodeTimeHistogramsKillswitch, + public ::testing::WithParamInterface< + std::tuple>, + public ReceiveStatisticsProxy2Test { + public: + ReceiveStatisticsProxy2TestWithDecodeTimeHistograms() + : DecodeTimeHistogramsKillswitch(std::get<0>(GetParam())) {} + + protected: + const std::string kUmaPrefix = "WebRTC.Video.DecodeTimePerFrameInMs."; + const int expected_number_of_samples_ = {std::get<1>(GetParam())}; + const int width_ = {std::get<2>(GetParam())}; + const int height_ = {std::get<3>(GetParam())}; + const VideoCodecType codec_type_ = {std::get<4>(GetParam())}; + const std::string implementation_name_ = {std::get<5>(GetParam())}; + const std::string uma_histogram_name_ = + kUmaPrefix + (codec_type_ == kVideoCodecVP9 ? "Vp9." : "H264.") + + (height_ == 2160 ? "4k." : "Hd.") + + (implementation_name_.compare("ExternalDecoder") == 0 ? "Hw" : "Sw"); +}; + +TEST_P(ReceiveStatisticsProxy2TestWithDecodeTimeHistograms, + DecodeTimeHistogramsUpdated) { + constexpr int kNumberOfFrames = 10; + constexpr int kDecodeTimeMs = 7; + constexpr int kFrameDurationMs = 1000 / 60; + + webrtc::VideoFrame frame = CreateFrame(width_, height_); + + statistics_proxy_->OnDecoderImplementationName(implementation_name_.c_str()); + statistics_proxy_->OnPreDecode(codec_type_, /*qp=*/0); + + for (int i = 0; i < kNumberOfFrames; ++i) { + statistics_proxy_->OnDecodedFrame(frame, /*qp=*/absl::nullopt, + kDecodeTimeMs, + VideoContentType::UNSPECIFIED); + fake_clock_.AdvanceTimeMilliseconds(kFrameDurationMs); + } + + EXPECT_METRIC_EQ(expected_number_of_samples_, + metrics::NumSamples(uma_histogram_name_)); + EXPECT_METRIC_EQ(expected_number_of_samples_, + metrics::NumEvents(uma_histogram_name_, kDecodeTimeMs)); +} + +const auto kVp94kHw = std::make_tuple(/*killswitch=*/false, + /*expected_number_of_samples=*/10, + /*width=*/3840, + /*height=*/2160, + kVideoCodecVP9, + /*implementation=*/"ExternalDecoder"); +const auto kVp94kSw = std::make_tuple(/*killswitch=*/false, + /*expected_number_of_samples=*/10, + /*width=*/3840, + /*height=*/2160, + kVideoCodecVP9, + /*implementation=*/"libvpx"); +const auto kVp9HdHw = std::make_tuple(/*killswitch=*/false, + /*expected_number_of_samples=*/10, + /*width=*/1920, + /*height=*/1080, + kVideoCodecVP9, + /*implementation=*/"ExternalDecoder"); +const auto kVp9HdSw = std::make_tuple(/*killswitch=*/false, + /*expected_number_of_samples=*/10, + /*width=*/1920, + /*height=*/1080, + kVideoCodecVP9, + /*implementation=*/"libvpx"); +const auto kH2644kHw = std::make_tuple(/*killswitch=*/false, + /*expected_number_of_samples=*/10, + /*width=*/3840, + /*height=*/2160, + kVideoCodecH264, + /*implementation=*/"ExternalDecoder"); +const auto kH2644kSw = std::make_tuple(/*killswitch=*/false, + /*expected_number_of_samples=*/10, + /*width=*/3840, + /*height=*/2160, + kVideoCodecH264, + /*implementation=*/"FFmpeg"); +const auto kH264HdHw = std::make_tuple(/*killswitch=*/false, + /*expected_number_of_samples=*/10, + /*width=*/1920, + /*height=*/1080, + kVideoCodecH264, + /*implementation=*/"ExternalDecoder"); +const auto kH264HdSw = std::make_tuple(/*killswitch=*/false, + /*expected_number_of_samples=*/10, + /*width=*/1920, + /*height=*/1080, + kVideoCodecH264, + /*implementation=*/"FFmpeg"); + +INSTANTIATE_TEST_SUITE_P(AllHistogramsPopulated, + ReceiveStatisticsProxy2TestWithDecodeTimeHistograms, + ::testing::Values(kVp94kHw, + kVp94kSw, + kVp9HdHw, + kVp9HdSw, + kH2644kHw, + kH2644kSw, + kH264HdHw, + kH264HdSw)); + +const auto kKillswitchDisabled = + std::make_tuple(/*killswitch=*/false, + /*expected_number_of_samples=*/10, + /*width=*/1920, + /*height=*/1080, + kVideoCodecVP9, + /*implementation=*/"libvpx"); +const auto kKillswitchEnabled = + std::make_tuple(/*killswitch=*/true, + /*expected_number_of_samples=*/0, + /*width=*/1920, + /*height=*/1080, + kVideoCodecVP9, + /*implementation=*/"libvpx"); + +INSTANTIATE_TEST_SUITE_P(KillswitchEffective, + ReceiveStatisticsProxy2TestWithDecodeTimeHistograms, + ::testing::Values(kKillswitchDisabled, + kKillswitchEnabled)); + +} // namespace internal +} // namespace webrtc diff --git a/video/rtp_video_stream_receiver.cc b/video/rtp_video_stream_receiver.cc index 3f5186bcf3..1ae5c5e93d 100644 --- a/video/rtp_video_stream_receiver.cc +++ b/video/rtp_video_stream_receiver.cc @@ -84,7 +84,8 @@ std::unique_ptr CreateRtpRtcpModule( ReceiveStatistics* receive_statistics, Transport* outgoing_transport, RtcpRttStats* rtt_stats, - ReceiveStatisticsProxy* rtcp_statistics_observer, + RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, + RtcpCnameCallback* rtcp_cname_callback, uint32_t local_ssrc) { RtpRtcp::Configuration configuration; configuration.clock = clock; @@ -93,8 +94,9 @@ std::unique_ptr CreateRtpRtcpModule( configuration.receive_statistics = receive_statistics; configuration.outgoing_transport = outgoing_transport; configuration.rtt_stats = rtt_stats; - configuration.rtcp_packet_type_counter_observer = rtcp_statistics_observer; - configuration.rtcp_cname_callback = rtcp_statistics_observer; + configuration.rtcp_packet_type_counter_observer = + rtcp_packet_type_counter_observer; + configuration.rtcp_cname_callback = rtcp_cname_callback; configuration.local_media_ssrc = local_ssrc; std::unique_ptr rtp_rtcp = RtpRtcp::Create(configuration); @@ -184,6 +186,7 @@ void RtpVideoStreamReceiver::RtcpFeedbackBuffer::SendBufferedRtcpFeedback() { } } +// DEPRECATED RtpVideoStreamReceiver::RtpVideoStreamReceiver( Clock* clock, Transport* transport, @@ -198,6 +201,36 @@ RtpVideoStreamReceiver::RtpVideoStreamReceiver( video_coding::OnCompleteFrameCallback* complete_frame_callback, rtc::scoped_refptr frame_decryptor, rtc::scoped_refptr frame_transformer) + : RtpVideoStreamReceiver(clock, + transport, + rtt_stats, + packet_router, + config, + rtp_receive_statistics, + receive_stats_proxy, + receive_stats_proxy, + process_thread, + nack_sender, + keyframe_request_sender, + complete_frame_callback, + frame_decryptor, + frame_transformer) {} + +RtpVideoStreamReceiver::RtpVideoStreamReceiver( + Clock* clock, + Transport* transport, + RtcpRttStats* rtt_stats, + PacketRouter* packet_router, + const VideoReceiveStream::Config* config, + ReceiveStatistics* rtp_receive_statistics, + RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, + RtcpCnameCallback* rtcp_cname_callback, + ProcessThread* process_thread, + NackSender* nack_sender, + KeyFrameRequestSender* keyframe_request_sender, + video_coding::OnCompleteFrameCallback* complete_frame_callback, + rtc::scoped_refptr frame_decryptor, + rtc::scoped_refptr frame_transformer) : clock_(clock), config_(*config), packet_router_(packet_router), @@ -214,7 +247,8 @@ RtpVideoStreamReceiver::RtpVideoStreamReceiver( rtp_receive_statistics_, transport, rtt_stats, - receive_stats_proxy, + rtcp_packet_type_counter_observer, + rtcp_cname_callback, config_.rtp.local_ssrc)), complete_frame_callback_(complete_frame_callback), keyframe_request_sender_(keyframe_request_sender), diff --git a/video/rtp_video_stream_receiver.h b/video/rtp_video_stream_receiver.h index ba617fd02b..3e07df926c 100644 --- a/video/rtp_video_stream_receiver.h +++ b/video/rtp_video_stream_receiver.h @@ -70,6 +70,7 @@ class RtpVideoStreamReceiver : public LossNotificationSender, public OnDecryptedFrameCallback, public OnDecryptionStatusChangeCallback { public: + // DEPRECATED due to dependency on ReceiveStatisticsProxy. RtpVideoStreamReceiver( Clock* clock, Transport* transport, @@ -89,6 +90,27 @@ class RtpVideoStreamReceiver : public LossNotificationSender, video_coding::OnCompleteFrameCallback* complete_frame_callback, rtc::scoped_refptr frame_decryptor, rtc::scoped_refptr frame_transformer); + + RtpVideoStreamReceiver( + Clock* clock, + Transport* transport, + RtcpRttStats* rtt_stats, + // The packet router is optional; if provided, the RtpRtcp module for this + // stream is registered as a candidate for sending REMB and transport + // feedback. + PacketRouter* packet_router, + const VideoReceiveStream::Config* config, + ReceiveStatistics* rtp_receive_statistics, + RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, + RtcpCnameCallback* rtcp_cname_callback, + ProcessThread* process_thread, + NackSender* nack_sender, + // The KeyFrameRequestSender is optional; if not provided, key frame + // requests are sent via the internal RtpRtcp module. + KeyFrameRequestSender* keyframe_request_sender, + video_coding::OnCompleteFrameCallback* complete_frame_callback, + rtc::scoped_refptr frame_decryptor, + rtc::scoped_refptr frame_transformer); ~RtpVideoStreamReceiver() override; void AddReceiveCodec(const VideoCodec& video_codec, diff --git a/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc b/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc index 5626d83d39..3b507dc8da 100644 --- a/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc +++ b/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc @@ -93,6 +93,7 @@ class TestRtpVideoStreamReceiver : public TestRtpVideoStreamReceiverInitializer, &test_config_, test_rtp_receive_statistics_.get(), nullptr, + nullptr, test_process_thread_.get(), &fake_nack_sender_, nullptr, diff --git a/video/rtp_video_stream_receiver_unittest.cc b/video/rtp_video_stream_receiver_unittest.cc index 512f4d94c5..40602f7754 100644 --- a/video/rtp_video_stream_receiver_unittest.cc +++ b/video/rtp_video_stream_receiver_unittest.cc @@ -167,7 +167,7 @@ class RtpVideoStreamReceiverTest : public ::testing::Test { ReceiveStatistics::Create(Clock::GetRealTimeClock()); rtp_video_stream_receiver_ = std::make_unique( Clock::GetRealTimeClock(), &mock_transport_, nullptr, nullptr, &config_, - rtp_receive_statistics_.get(), nullptr, process_thread_.get(), + rtp_receive_statistics_.get(), nullptr, nullptr, process_thread_.get(), &mock_nack_sender_, &mock_key_frame_request_sender_, &mock_on_complete_frame_callback_, nullptr, nullptr); VideoCodec codec; @@ -1139,7 +1139,7 @@ TEST_F(RtpVideoStreamReceiverTest, TransformFrame) { RegisterTransformedFrameSinkCallback(_, config_.rtp.remote_ssrc)); auto receiver = std::make_unique( Clock::GetRealTimeClock(), &mock_transport_, nullptr, nullptr, &config_, - rtp_receive_statistics_.get(), nullptr, process_thread_.get(), + rtp_receive_statistics_.get(), nullptr, nullptr, process_thread_.get(), &mock_nack_sender_, nullptr, &mock_on_complete_frame_callback_, nullptr, mock_frame_transformer); VideoCodec video_codec; diff --git a/video/video_quality_observer2.cc b/video/video_quality_observer2.cc new file mode 100644 index 0000000000..5528815890 --- /dev/null +++ b/video/video_quality_observer2.cc @@ -0,0 +1,288 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/video_quality_observer2.h" + +#include +#include +#include +#include + +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" +#include "system_wrappers/include/metrics.h" + +namespace webrtc { +namespace internal { +const uint32_t VideoQualityObserver::kMinFrameSamplesToDetectFreeze = 5; +const uint32_t VideoQualityObserver::kMinIncreaseForFreezeMs = 150; +const uint32_t VideoQualityObserver::kAvgInterframeDelaysWindowSizeFrames = 30; + +namespace { +constexpr int kMinVideoDurationMs = 3000; +constexpr int kMinRequiredSamples = 1; +constexpr int kPixelsInHighResolution = + 960 * 540; // CPU-adapted HD still counts. +constexpr int kPixelsInMediumResolution = 640 * 360; +constexpr int kBlockyQpThresholdVp8 = 70; +constexpr int kBlockyQpThresholdVp9 = 180; +constexpr int kMaxNumCachedBlockyFrames = 100; +// TODO(ilnik): Add H264/HEVC thresholds. +} // namespace + +VideoQualityObserver::VideoQualityObserver(VideoContentType content_type) + : last_frame_rendered_ms_(-1), + num_frames_rendered_(0), + first_frame_rendered_ms_(-1), + last_frame_pixels_(0), + is_last_frame_blocky_(false), + last_unfreeze_time_ms_(0), + render_interframe_delays_(kAvgInterframeDelaysWindowSizeFrames), + sum_squared_interframe_delays_secs_(0.0), + time_in_resolution_ms_(3, 0), + current_resolution_(Resolution::Low), + num_resolution_downgrades_(0), + time_in_blocky_video_ms_(0), + content_type_(content_type), + is_paused_(false) {} + +void VideoQualityObserver::UpdateHistograms() { + // Don't report anything on an empty video stream. + if (num_frames_rendered_ == 0) { + return; + } + + char log_stream_buf[2 * 1024]; + rtc::SimpleStringBuilder log_stream(log_stream_buf); + + if (last_frame_rendered_ms_ > last_unfreeze_time_ms_) { + smooth_playback_durations_.Add(last_frame_rendered_ms_ - + last_unfreeze_time_ms_); + } + + std::string uma_prefix = videocontenttypehelpers::IsScreenshare(content_type_) + ? "WebRTC.Video.Screenshare" + : "WebRTC.Video"; + + auto mean_time_between_freezes = + smooth_playback_durations_.Avg(kMinRequiredSamples); + if (mean_time_between_freezes) { + RTC_HISTOGRAM_COUNTS_SPARSE_100000(uma_prefix + ".MeanTimeBetweenFreezesMs", + *mean_time_between_freezes); + log_stream << uma_prefix << ".MeanTimeBetweenFreezesMs " + << *mean_time_between_freezes << "\n"; + } + auto avg_freeze_length = freezes_durations_.Avg(kMinRequiredSamples); + if (avg_freeze_length) { + RTC_HISTOGRAM_COUNTS_SPARSE_100000(uma_prefix + ".MeanFreezeDurationMs", + *avg_freeze_length); + log_stream << uma_prefix << ".MeanFreezeDurationMs " << *avg_freeze_length + << "\n"; + } + + int64_t video_duration_ms = + last_frame_rendered_ms_ - first_frame_rendered_ms_; + + if (video_duration_ms >= kMinVideoDurationMs) { + int time_spent_in_hd_percentage = static_cast( + time_in_resolution_ms_[Resolution::High] * 100 / video_duration_ms); + RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix + ".TimeInHdPercentage", + time_spent_in_hd_percentage); + log_stream << uma_prefix << ".TimeInHdPercentage " + << time_spent_in_hd_percentage << "\n"; + + int time_with_blocky_video_percentage = + static_cast(time_in_blocky_video_ms_ * 100 / video_duration_ms); + RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix + ".TimeInBlockyVideoPercentage", + time_with_blocky_video_percentage); + log_stream << uma_prefix << ".TimeInBlockyVideoPercentage " + << time_with_blocky_video_percentage << "\n"; + + int num_resolution_downgrades_per_minute = + num_resolution_downgrades_ * 60000 / video_duration_ms; + RTC_HISTOGRAM_COUNTS_SPARSE_100( + uma_prefix + ".NumberResolutionDownswitchesPerMinute", + num_resolution_downgrades_per_minute); + log_stream << uma_prefix << ".NumberResolutionDownswitchesPerMinute " + << num_resolution_downgrades_per_minute << "\n"; + + int num_freezes_per_minute = + freezes_durations_.NumSamples() * 60000 / video_duration_ms; + RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix + ".NumberFreezesPerMinute", + num_freezes_per_minute); + log_stream << uma_prefix << ".NumberFreezesPerMinute " + << num_freezes_per_minute << "\n"; + + if (sum_squared_interframe_delays_secs_ > 0.0) { + int harmonic_framerate_fps = std::round( + video_duration_ms / (1000 * sum_squared_interframe_delays_secs_)); + RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix + ".HarmonicFrameRate", + harmonic_framerate_fps); + log_stream << uma_prefix << ".HarmonicFrameRate " + << harmonic_framerate_fps << "\n"; + } + } + RTC_LOG(LS_INFO) << log_stream.str(); +} + +void VideoQualityObserver::OnRenderedFrame(const VideoFrame& frame, + int64_t now_ms) { + RTC_DCHECK_LE(last_frame_rendered_ms_, now_ms); + RTC_DCHECK_LE(last_unfreeze_time_ms_, now_ms); + + if (num_frames_rendered_ == 0) { + first_frame_rendered_ms_ = last_unfreeze_time_ms_ = now_ms; + } + + auto blocky_frame_it = blocky_frames_.find(frame.timestamp()); + + if (num_frames_rendered_ > 0) { + // Process inter-frame delay. + const int64_t interframe_delay_ms = now_ms - last_frame_rendered_ms_; + const double interframe_delays_secs = interframe_delay_ms / 1000.0; + + // Sum of squared inter frame intervals is used to calculate the harmonic + // frame rate metric. The metric aims to reflect overall experience related + // to smoothness of video playback and includes both freezes and pauses. + sum_squared_interframe_delays_secs_ += + interframe_delays_secs * interframe_delays_secs; + + if (!is_paused_) { + render_interframe_delays_.AddSample(interframe_delay_ms); + + bool was_freeze = false; + if (render_interframe_delays_.Size() >= kMinFrameSamplesToDetectFreeze) { + const absl::optional avg_interframe_delay = + render_interframe_delays_.GetAverageRoundedDown(); + RTC_DCHECK(avg_interframe_delay); + was_freeze = interframe_delay_ms >= + std::max(3 * *avg_interframe_delay, + *avg_interframe_delay + kMinIncreaseForFreezeMs); + } + + if (was_freeze) { + freezes_durations_.Add(interframe_delay_ms); + smooth_playback_durations_.Add(last_frame_rendered_ms_ - + last_unfreeze_time_ms_); + last_unfreeze_time_ms_ = now_ms; + } else { + // Count spatial metrics if there were no freeze. + time_in_resolution_ms_[current_resolution_] += interframe_delay_ms; + + if (is_last_frame_blocky_) { + time_in_blocky_video_ms_ += interframe_delay_ms; + } + } + } + } + + if (is_paused_) { + // If the stream was paused since the previous frame, do not count the + // pause toward smooth playback. Explicitly count the part before it and + // start the new smooth playback interval from this frame. + is_paused_ = false; + if (last_frame_rendered_ms_ > last_unfreeze_time_ms_) { + smooth_playback_durations_.Add(last_frame_rendered_ms_ - + last_unfreeze_time_ms_); + } + last_unfreeze_time_ms_ = now_ms; + + if (num_frames_rendered_ > 0) { + pauses_durations_.Add(now_ms - last_frame_rendered_ms_); + } + } + + int64_t pixels = frame.width() * frame.height(); + if (pixels >= kPixelsInHighResolution) { + current_resolution_ = Resolution::High; + } else if (pixels >= kPixelsInMediumResolution) { + current_resolution_ = Resolution::Medium; + } else { + current_resolution_ = Resolution::Low; + } + + if (pixels < last_frame_pixels_) { + ++num_resolution_downgrades_; + } + + last_frame_pixels_ = pixels; + last_frame_rendered_ms_ = now_ms; + + is_last_frame_blocky_ = blocky_frame_it != blocky_frames_.end(); + if (is_last_frame_blocky_) { + blocky_frames_.erase(blocky_frames_.begin(), ++blocky_frame_it); + } + + ++num_frames_rendered_; +} + +void VideoQualityObserver::OnDecodedFrame(const VideoFrame& frame, + absl::optional qp, + VideoCodecType codec) { + if (qp) { + absl::optional qp_blocky_threshold; + // TODO(ilnik): add other codec types when we have QP for them. + switch (codec) { + case kVideoCodecVP8: + qp_blocky_threshold = kBlockyQpThresholdVp8; + break; + case kVideoCodecVP9: + qp_blocky_threshold = kBlockyQpThresholdVp9; + break; + default: + qp_blocky_threshold = absl::nullopt; + } + + RTC_DCHECK(blocky_frames_.find(frame.timestamp()) == blocky_frames_.end()); + + if (qp_blocky_threshold && *qp > *qp_blocky_threshold) { + // Cache blocky frame. Its duration will be calculated in render callback. + if (blocky_frames_.size() > kMaxNumCachedBlockyFrames) { + RTC_LOG(LS_WARNING) << "Overflow of blocky frames cache."; + blocky_frames_.erase( + blocky_frames_.begin(), + std::next(blocky_frames_.begin(), kMaxNumCachedBlockyFrames / 2)); + } + + blocky_frames_.insert(frame.timestamp()); + } + } +} + +void VideoQualityObserver::OnStreamInactive() { + is_paused_ = true; +} + +uint32_t VideoQualityObserver::NumFreezes() const { + return freezes_durations_.NumSamples(); +} + +uint32_t VideoQualityObserver::NumPauses() const { + return pauses_durations_.NumSamples(); +} + +uint32_t VideoQualityObserver::TotalFreezesDurationMs() const { + return freezes_durations_.Sum(kMinRequiredSamples).value_or(0); +} + +uint32_t VideoQualityObserver::TotalPausesDurationMs() const { + return pauses_durations_.Sum(kMinRequiredSamples).value_or(0); +} + +uint32_t VideoQualityObserver::TotalFramesDurationMs() const { + return last_frame_rendered_ms_ - first_frame_rendered_ms_; +} + +double VideoQualityObserver::SumSquaredFrameDurationsSec() const { + return sum_squared_interframe_delays_secs_; +} + +} // namespace internal +} // namespace webrtc diff --git a/video/video_quality_observer2.h b/video/video_quality_observer2.h new file mode 100644 index 0000000000..af71937e43 --- /dev/null +++ b/video/video_quality_observer2.h @@ -0,0 +1,101 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_VIDEO_QUALITY_OBSERVER2_H_ +#define VIDEO_VIDEO_QUALITY_OBSERVER2_H_ + +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame.h" +#include "rtc_base/numerics/moving_average.h" +#include "rtc_base/numerics/sample_counter.h" + +namespace webrtc { +namespace internal { + +// Calculates spatial and temporal quality metrics and reports them to UMA +// stats. +class VideoQualityObserver { + public: + // Use either VideoQualityObserver::kBlockyQpThresholdVp8 or + // VideoQualityObserver::kBlockyQpThresholdVp9. + explicit VideoQualityObserver(VideoContentType content_type); + ~VideoQualityObserver() = default; + + void OnDecodedFrame(const VideoFrame& frame, + absl::optional qp, + VideoCodecType codec); + + void OnRenderedFrame(const VideoFrame& frame, int64_t now_ms); + + void OnStreamInactive(); + + uint32_t NumFreezes() const; + uint32_t NumPauses() const; + uint32_t TotalFreezesDurationMs() const; + uint32_t TotalPausesDurationMs() const; + uint32_t TotalFramesDurationMs() const; + double SumSquaredFrameDurationsSec() const; + + void UpdateHistograms(); + + static const uint32_t kMinFrameSamplesToDetectFreeze; + static const uint32_t kMinIncreaseForFreezeMs; + static const uint32_t kAvgInterframeDelaysWindowSizeFrames; + + private: + enum Resolution { + Low = 0, + Medium = 1, + High = 2, + }; + + int64_t last_frame_rendered_ms_; + int64_t num_frames_rendered_; + int64_t first_frame_rendered_ms_; + int64_t last_frame_pixels_; + bool is_last_frame_blocky_; + // Decoded timestamp of the last delayed frame. + int64_t last_unfreeze_time_ms_; + rtc::MovingAverage render_interframe_delays_; + double sum_squared_interframe_delays_secs_; + // An inter-frame delay is counted as a freeze if it's significantly longer + // than average inter-frame delay. + rtc::SampleCounter freezes_durations_; + rtc::SampleCounter pauses_durations_; + // Time between freezes. + rtc::SampleCounter smooth_playback_durations_; + // Counters for time spent in different resolutions. Time between each two + // Consecutive frames is counted to bin corresponding to the first frame + // resolution. + std::vector time_in_resolution_ms_; + // Resolution of the last decoded frame. Resolution enum is used as an index. + Resolution current_resolution_; + int num_resolution_downgrades_; + // Similar to resolution, time spent in high-QP video. + int64_t time_in_blocky_video_ms_; + // Content type of the last decoded frame. + VideoContentType content_type_; + bool is_paused_; + + // Set of decoded frames with high QP value. + std::set blocky_frames_; +}; + +} // namespace internal +} // namespace webrtc + +#endif // VIDEO_VIDEO_QUALITY_OBSERVER2_H_ diff --git a/video/video_receive_stream.cc b/video/video_receive_stream.cc index b2b96db9bf..f1b3fc7b5b 100644 --- a/video/video_receive_stream.cc +++ b/video/video_receive_stream.cc @@ -211,6 +211,7 @@ VideoReceiveStream::VideoReceiveStream( &config_, rtp_receive_statistics_.get(), &stats_proxy_, + &stats_proxy_, process_thread_, this, // NackSender nullptr, // Use default KeyFrameRequestSender diff --git a/video/video_receive_stream2.cc b/video/video_receive_stream2.cc new file mode 100644 index 0000000000..899d9d5e65 --- /dev/null +++ b/video/video_receive_stream2.cc @@ -0,0 +1,795 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/video_receive_stream2.h" + +#include +#include + +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/video/encoded_image.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder.h" +#include "call/rtp_stream_receiver_controller_interface.h" +#include "call/rtx_receive_stream.h" +#include "common_video/include/incoming_video_stream.h" +#include "media/base/h264_profile_level_id.h" +#include "modules/utility/include/process_thread.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/include/video_coding_defines.h" +#include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/timing.h" +#include "modules/video_coding/utility/vp8_header_parser.h" +#include "rtc_base/checks.h" +#include "rtc_base/experiments/keyframe_interval_settings.h" +#include "rtc_base/location.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/system/thread_registry.h" +#include "rtc_base/time_utils.h" +#include "rtc_base/trace_event.h" +#include "system_wrappers/include/clock.h" +#include "system_wrappers/include/field_trial.h" +#include "video/call_stats.h" +#include "video/frame_dumping_decoder.h" +#include "video/receive_statistics_proxy.h" + +namespace webrtc { + +namespace internal { +constexpr int VideoReceiveStream2::kMaxWaitForKeyFrameMs; + +namespace { + +using video_coding::EncodedFrame; +using ReturnReason = video_coding::FrameBuffer::ReturnReason; + +constexpr int kMinBaseMinimumDelayMs = 0; +constexpr int kMaxBaseMinimumDelayMs = 10000; + +constexpr int kMaxWaitForFrameMs = 3000; + +// Concrete instance of RecordableEncodedFrame wrapping needed content +// from video_coding::EncodedFrame. +class WebRtcRecordableEncodedFrame : public RecordableEncodedFrame { + public: + explicit WebRtcRecordableEncodedFrame(const EncodedFrame& frame) + : buffer_(frame.GetEncodedData()), + render_time_ms_(frame.RenderTime()), + codec_(frame.CodecSpecific()->codecType), + is_key_frame_(frame.FrameType() == VideoFrameType::kVideoFrameKey), + resolution_{frame.EncodedImage()._encodedWidth, + frame.EncodedImage()._encodedHeight} { + if (frame.ColorSpace()) { + color_space_ = *frame.ColorSpace(); + } + } + + // VideoEncodedSinkInterface::FrameBuffer + rtc::scoped_refptr encoded_buffer() + const override { + return buffer_; + } + + absl::optional color_space() const override { + return color_space_; + } + + VideoCodecType codec() const override { return codec_; } + + bool is_key_frame() const override { return is_key_frame_; } + + EncodedResolution resolution() const override { return resolution_; } + + Timestamp render_time() const override { + return Timestamp::Millis(render_time_ms_); + } + + private: + rtc::scoped_refptr buffer_; + int64_t render_time_ms_; + VideoCodecType codec_; + bool is_key_frame_; + EncodedResolution resolution_; + absl::optional color_space_; +}; + +VideoCodec CreateDecoderVideoCodec(const VideoReceiveStream::Decoder& decoder) { + VideoCodec codec; + memset(&codec, 0, sizeof(codec)); + + codec.plType = decoder.payload_type; + codec.codecType = PayloadStringToCodecType(decoder.video_format.name); + + if (codec.codecType == kVideoCodecVP8) { + *(codec.VP8()) = VideoEncoder::GetDefaultVp8Settings(); + } else if (codec.codecType == kVideoCodecVP9) { + *(codec.VP9()) = VideoEncoder::GetDefaultVp9Settings(); + } else if (codec.codecType == kVideoCodecH264) { + *(codec.H264()) = VideoEncoder::GetDefaultH264Settings(); + } else if (codec.codecType == kVideoCodecMultiplex) { + VideoReceiveStream::Decoder associated_decoder = decoder; + associated_decoder.video_format = + SdpVideoFormat(CodecTypeToPayloadString(kVideoCodecVP9)); + VideoCodec associated_codec = CreateDecoderVideoCodec(associated_decoder); + associated_codec.codecType = kVideoCodecMultiplex; + return associated_codec; + } + + codec.width = 320; + codec.height = 180; + const int kDefaultStartBitrate = 300; + codec.startBitrate = codec.minBitrate = codec.maxBitrate = + kDefaultStartBitrate; + + return codec; +} + +// Video decoder class to be used for unknown codecs. Doesn't support decoding +// but logs messages to LS_ERROR. +class NullVideoDecoder : public webrtc::VideoDecoder { + public: + int32_t InitDecode(const webrtc::VideoCodec* codec_settings, + int32_t number_of_cores) override { + RTC_LOG(LS_ERROR) << "Can't initialize NullVideoDecoder."; + return WEBRTC_VIDEO_CODEC_OK; + } + + int32_t Decode(const webrtc::EncodedImage& input_image, + bool missing_frames, + int64_t render_time_ms) override { + RTC_LOG(LS_ERROR) << "The NullVideoDecoder doesn't support decoding."; + return WEBRTC_VIDEO_CODEC_OK; + } + + int32_t RegisterDecodeCompleteCallback( + webrtc::DecodedImageCallback* callback) override { + RTC_LOG(LS_ERROR) + << "Can't register decode complete callback on NullVideoDecoder."; + return WEBRTC_VIDEO_CODEC_OK; + } + + int32_t Release() override { return WEBRTC_VIDEO_CODEC_OK; } + + const char* ImplementationName() const override { return "NullVideoDecoder"; } +}; + +// TODO(https://bugs.webrtc.org/9974): Consider removing this workaround. +// Maximum time between frames before resetting the FrameBuffer to avoid RTP +// timestamps wraparound to affect FrameBuffer. +constexpr int kInactiveStreamThresholdMs = 600000; // 10 minutes. + +} // namespace + +VideoReceiveStream2::VideoReceiveStream2( + TaskQueueFactory* task_queue_factory, + RtpStreamReceiverControllerInterface* receiver_controller, + int num_cpu_cores, + PacketRouter* packet_router, + VideoReceiveStream::Config config, + ProcessThread* process_thread, + CallStats* call_stats, + Clock* clock, + VCMTiming* timing) + : task_queue_factory_(task_queue_factory), + transport_adapter_(config.rtcp_send_transport), + config_(std::move(config)), + num_cpu_cores_(num_cpu_cores), + process_thread_(process_thread), + clock_(clock), + call_stats_(call_stats), + source_tracker_(clock_), + stats_proxy_(&config_, clock_), + rtp_receive_statistics_(ReceiveStatistics::Create(clock_)), + timing_(timing), + video_receiver_(clock_, timing_.get()), + rtp_video_stream_receiver_(clock_, + &transport_adapter_, + call_stats, + packet_router, + &config_, + rtp_receive_statistics_.get(), + &stats_proxy_, + &stats_proxy_, + process_thread_, + this, // NackSender + nullptr, // Use default KeyFrameRequestSender + this, // OnCompleteFrameCallback + config_.frame_decryptor, + config_.frame_transformer), + rtp_stream_sync_(this), + max_wait_for_keyframe_ms_(KeyframeIntervalSettings::ParseFromFieldTrials() + .MaxWaitForKeyframeMs() + .value_or(kMaxWaitForKeyFrameMs)), + max_wait_for_frame_ms_(KeyframeIntervalSettings::ParseFromFieldTrials() + .MaxWaitForFrameMs() + .value_or(kMaxWaitForFrameMs)), + decode_queue_(task_queue_factory_->CreateTaskQueue( + "DecodingQueue", + TaskQueueFactory::Priority::HIGH)) { + RTC_LOG(LS_INFO) << "VideoReceiveStream2: " << config_.ToString(); + + RTC_DCHECK(config_.renderer); + RTC_DCHECK(process_thread_); + RTC_DCHECK(call_stats_); + + module_process_sequence_checker_.Detach(); + network_sequence_checker_.Detach(); + + RTC_DCHECK(!config_.decoders.empty()); + std::set decoder_payload_types; + for (const Decoder& decoder : config_.decoders) { + RTC_CHECK(decoder.decoder_factory); + RTC_CHECK(decoder_payload_types.find(decoder.payload_type) == + decoder_payload_types.end()) + << "Duplicate payload type (" << decoder.payload_type + << ") for different decoders."; + decoder_payload_types.insert(decoder.payload_type); + } + + timing_->set_render_delay(config_.render_delay_ms); + + frame_buffer_.reset( + new video_coding::FrameBuffer(clock_, timing_.get(), &stats_proxy_)); + + process_thread_->RegisterModule(&rtp_stream_sync_, RTC_FROM_HERE); + // Register with RtpStreamReceiverController. + media_receiver_ = receiver_controller->CreateReceiver( + config_.rtp.remote_ssrc, &rtp_video_stream_receiver_); + if (config_.rtp.rtx_ssrc) { + rtx_receive_stream_ = std::make_unique( + &rtp_video_stream_receiver_, config.rtp.rtx_associated_payload_types, + config_.rtp.remote_ssrc, rtp_receive_statistics_.get()); + rtx_receiver_ = receiver_controller->CreateReceiver( + config_.rtp.rtx_ssrc, rtx_receive_stream_.get()); + } else { + rtp_receive_statistics_->EnableRetransmitDetection(config.rtp.remote_ssrc, + true); + } +} + +VideoReceiveStream2::VideoReceiveStream2( + TaskQueueFactory* task_queue_factory, + RtpStreamReceiverControllerInterface* receiver_controller, + int num_cpu_cores, + PacketRouter* packet_router, + VideoReceiveStream::Config config, + ProcessThread* process_thread, + CallStats* call_stats, + Clock* clock) + : VideoReceiveStream2(task_queue_factory, + receiver_controller, + num_cpu_cores, + packet_router, + std::move(config), + process_thread, + call_stats, + clock, + new VCMTiming(clock)) {} + +VideoReceiveStream2::~VideoReceiveStream2() { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + RTC_LOG(LS_INFO) << "~VideoReceiveStream2: " << config_.ToString(); + Stop(); + process_thread_->DeRegisterModule(&rtp_stream_sync_); +} + +void VideoReceiveStream2::SignalNetworkState(NetworkState state) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + rtp_video_stream_receiver_.SignalNetworkState(state); +} + +bool VideoReceiveStream2::DeliverRtcp(const uint8_t* packet, size_t length) { + return rtp_video_stream_receiver_.DeliverRtcp(packet, length); +} + +void VideoReceiveStream2::SetSync(Syncable* audio_syncable) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + rtp_stream_sync_.ConfigureSync(audio_syncable); +} + +void VideoReceiveStream2::Start() { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + + if (decoder_running_) { + return; + } + + const bool protected_by_fec = config_.rtp.protected_by_flexfec || + rtp_video_stream_receiver_.IsUlpfecEnabled(); + + frame_buffer_->Start(); + + if (rtp_video_stream_receiver_.IsRetransmissionsEnabled() && + protected_by_fec) { + frame_buffer_->SetProtectionMode(kProtectionNackFEC); + } + + transport_adapter_.Enable(); + rtc::VideoSinkInterface* renderer = nullptr; + if (config_.enable_prerenderer_smoothing) { + incoming_video_stream_.reset(new IncomingVideoStream( + task_queue_factory_, config_.render_delay_ms, this)); + renderer = incoming_video_stream_.get(); + } else { + renderer = this; + } + + for (const Decoder& decoder : config_.decoders) { + std::unique_ptr video_decoder = + decoder.decoder_factory->LegacyCreateVideoDecoder(decoder.video_format, + config_.stream_id); + // If we still have no valid decoder, we have to create a "Null" decoder + // that ignores all calls. The reason we can get into this state is that the + // old decoder factory interface doesn't have a way to query supported + // codecs. + if (!video_decoder) { + video_decoder = std::make_unique(); + } + + std::string decoded_output_file = + field_trial::FindFullName("WebRTC-DecoderDataDumpDirectory"); + // Because '/' can't be used inside a field trial parameter, we use ';' + // instead. + // This is only relevant to WebRTC-DecoderDataDumpDirectory + // field trial. ';' is chosen arbitrary. Even though it's a legal character + // in some file systems, we can sacrifice ability to use it in the path to + // dumped video, since it's developers-only feature for debugging. + absl::c_replace(decoded_output_file, ';', '/'); + if (!decoded_output_file.empty()) { + char filename_buffer[256]; + rtc::SimpleStringBuilder ssb(filename_buffer); + ssb << decoded_output_file << "/webrtc_receive_stream_" + << this->config_.rtp.remote_ssrc << "-" << rtc::TimeMicros() + << ".ivf"; + video_decoder = CreateFrameDumpingDecoderWrapper( + std::move(video_decoder), FileWrapper::OpenWriteOnly(ssb.str())); + } + + video_decoders_.push_back(std::move(video_decoder)); + + video_receiver_.RegisterExternalDecoder(video_decoders_.back().get(), + decoder.payload_type); + VideoCodec codec = CreateDecoderVideoCodec(decoder); + + const bool raw_payload = + config_.rtp.raw_payload_types.count(codec.plType) > 0; + rtp_video_stream_receiver_.AddReceiveCodec( + codec, decoder.video_format.parameters, raw_payload); + RTC_CHECK_EQ(VCM_OK, video_receiver_.RegisterReceiveCodec( + &codec, num_cpu_cores_, false)); + } + + RTC_DCHECK(renderer != nullptr); + video_stream_decoder_.reset( + new VideoStreamDecoder(&video_receiver_, &stats_proxy_, renderer)); + + // Make sure we register as a stats observer *after* we've prepared the + // |video_stream_decoder_|. + call_stats_->RegisterStatsObserver(this); + + // Start decoding on task queue. + video_receiver_.DecoderThreadStarting(); + stats_proxy_.DecoderThreadStarting(); + decode_queue_.PostTask([this] { + RTC_DCHECK_RUN_ON(&decode_queue_); + decoder_stopped_ = false; + StartNextDecode(); + }); + decoder_running_ = true; + rtp_video_stream_receiver_.StartReceive(); +} + +void VideoReceiveStream2::Stop() { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + rtp_video_stream_receiver_.StopReceive(); + + stats_proxy_.OnUniqueFramesCounted( + rtp_video_stream_receiver_.GetUniqueFramesSeen()); + + decode_queue_.PostTask([this] { frame_buffer_->Stop(); }); + + call_stats_->DeregisterStatsObserver(this); + + if (decoder_running_) { + rtc::Event done; + decode_queue_.PostTask([this, &done] { + RTC_DCHECK_RUN_ON(&decode_queue_); + decoder_stopped_ = true; + done.Set(); + }); + done.Wait(rtc::Event::kForever); + + decoder_running_ = false; + video_receiver_.DecoderThreadStopped(); + stats_proxy_.DecoderThreadStopped(); + // Deregister external decoders so they are no longer running during + // destruction. This effectively stops the VCM since the decoder thread is + // stopped, the VCM is deregistered and no asynchronous decoder threads are + // running. + for (const Decoder& decoder : config_.decoders) + video_receiver_.RegisterExternalDecoder(nullptr, decoder.payload_type); + + UpdateHistograms(); + } + + video_stream_decoder_.reset(); + incoming_video_stream_.reset(); + transport_adapter_.Disable(); +} + +VideoReceiveStream::Stats VideoReceiveStream2::GetStats() const { + VideoReceiveStream::Stats stats = stats_proxy_.GetStats(); + stats.total_bitrate_bps = 0; + StreamStatistician* statistician = + rtp_receive_statistics_->GetStatistician(stats.ssrc); + if (statistician) { + stats.rtp_stats = statistician->GetStats(); + stats.total_bitrate_bps = statistician->BitrateReceived(); + } + if (config_.rtp.rtx_ssrc) { + StreamStatistician* rtx_statistician = + rtp_receive_statistics_->GetStatistician(config_.rtp.rtx_ssrc); + if (rtx_statistician) + stats.total_bitrate_bps += rtx_statistician->BitrateReceived(); + } + return stats; +} + +void VideoReceiveStream2::UpdateHistograms() { + absl::optional fraction_lost; + StreamDataCounters rtp_stats; + StreamStatistician* statistician = + rtp_receive_statistics_->GetStatistician(config_.rtp.remote_ssrc); + if (statistician) { + fraction_lost = statistician->GetFractionLostInPercent(); + rtp_stats = statistician->GetReceiveStreamDataCounters(); + } + if (config_.rtp.rtx_ssrc) { + StreamStatistician* rtx_statistician = + rtp_receive_statistics_->GetStatistician(config_.rtp.rtx_ssrc); + if (rtx_statistician) { + StreamDataCounters rtx_stats = + rtx_statistician->GetReceiveStreamDataCounters(); + stats_proxy_.UpdateHistograms(fraction_lost, rtp_stats, &rtx_stats); + return; + } + } + stats_proxy_.UpdateHistograms(fraction_lost, rtp_stats, nullptr); +} + +void VideoReceiveStream2::AddSecondarySink(RtpPacketSinkInterface* sink) { + rtp_video_stream_receiver_.AddSecondarySink(sink); +} + +void VideoReceiveStream2::RemoveSecondarySink( + const RtpPacketSinkInterface* sink) { + rtp_video_stream_receiver_.RemoveSecondarySink(sink); +} + +bool VideoReceiveStream2::SetBaseMinimumPlayoutDelayMs(int delay_ms) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + if (delay_ms < kMinBaseMinimumDelayMs || delay_ms > kMaxBaseMinimumDelayMs) { + return false; + } + + rtc::CritScope cs(&playout_delay_lock_); + base_minimum_playout_delay_ms_ = delay_ms; + UpdatePlayoutDelays(); + return true; +} + +int VideoReceiveStream2::GetBaseMinimumPlayoutDelayMs() const { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + + rtc::CritScope cs(&playout_delay_lock_); + return base_minimum_playout_delay_ms_; +} + +// TODO(tommi): This method grabs a lock 6 times. +void VideoReceiveStream2::OnFrame(const VideoFrame& video_frame) { + int64_t video_playout_ntp_ms; + int64_t sync_offset_ms; + double estimated_freq_khz; + // TODO(tommi): GetStreamSyncOffsetInMs grabs three locks. One inside the + // function itself, another in GetChannel() and a third in + // GetPlayoutTimestamp. Seems excessive. Anyhow, I'm assuming the function + // succeeds most of the time, which leads to grabbing a fourth lock. + if (rtp_stream_sync_.GetStreamSyncOffsetInMs( + video_frame.timestamp(), video_frame.render_time_ms(), + &video_playout_ntp_ms, &sync_offset_ms, &estimated_freq_khz)) { + // TODO(tommi): OnSyncOffsetUpdated grabs a lock. + stats_proxy_.OnSyncOffsetUpdated(video_playout_ntp_ms, sync_offset_ms, + estimated_freq_khz); + } + source_tracker_.OnFrameDelivered(video_frame.packet_infos()); + + config_.renderer->OnFrame(video_frame); + + // TODO(tommi): OnRenderFrame grabs a lock too. + stats_proxy_.OnRenderedFrame(video_frame); +} + +void VideoReceiveStream2::SetFrameDecryptor( + rtc::scoped_refptr frame_decryptor) { + rtp_video_stream_receiver_.SetFrameDecryptor(std::move(frame_decryptor)); +} + +void VideoReceiveStream2::SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer) { + rtp_video_stream_receiver_.SetDepacketizerToDecoderFrameTransformer( + std::move(frame_transformer)); +} + +void VideoReceiveStream2::SendNack( + const std::vector& sequence_numbers, + bool buffering_allowed) { + RTC_DCHECK(buffering_allowed); + rtp_video_stream_receiver_.RequestPacketRetransmit(sequence_numbers); +} + +void VideoReceiveStream2::RequestKeyFrame(int64_t timestamp_ms) { + rtp_video_stream_receiver_.RequestKeyFrame(); + last_keyframe_request_ms_ = timestamp_ms; +} + +void VideoReceiveStream2::OnCompleteFrame( + std::unique_ptr frame) { + RTC_DCHECK_RUN_ON(&network_sequence_checker_); + // TODO(https://bugs.webrtc.org/9974): Consider removing this workaround. + int64_t time_now_ms = clock_->TimeInMilliseconds(); + if (last_complete_frame_time_ms_ > 0 && + time_now_ms - last_complete_frame_time_ms_ > kInactiveStreamThresholdMs) { + frame_buffer_->Clear(); + } + last_complete_frame_time_ms_ = time_now_ms; + + const PlayoutDelay& playout_delay = frame->EncodedImage().playout_delay_; + if (playout_delay.min_ms >= 0) { + rtc::CritScope cs(&playout_delay_lock_); + frame_minimum_playout_delay_ms_ = playout_delay.min_ms; + UpdatePlayoutDelays(); + } + + if (playout_delay.max_ms >= 0) { + rtc::CritScope cs(&playout_delay_lock_); + frame_maximum_playout_delay_ms_ = playout_delay.max_ms; + UpdatePlayoutDelays(); + } + + int64_t last_continuous_pid = frame_buffer_->InsertFrame(std::move(frame)); + if (last_continuous_pid != -1) + rtp_video_stream_receiver_.FrameContinuous(last_continuous_pid); +} + +void VideoReceiveStream2::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) { + RTC_DCHECK_RUN_ON(&module_process_sequence_checker_); + frame_buffer_->UpdateRtt(max_rtt_ms); + rtp_video_stream_receiver_.UpdateRtt(max_rtt_ms); +} + +uint32_t VideoReceiveStream2::id() const { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + return config_.rtp.remote_ssrc; +} + +absl::optional VideoReceiveStream2::GetInfo() const { + RTC_DCHECK_RUN_ON(&module_process_sequence_checker_); + absl::optional info = + rtp_video_stream_receiver_.GetSyncInfo(); + + if (!info) + return absl::nullopt; + + info->current_delay_ms = timing_->TargetVideoDelay(); + return info; +} + +bool VideoReceiveStream2::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, + int64_t* time_ms) const { + RTC_NOTREACHED(); + return 0; +} + +void VideoReceiveStream2::SetEstimatedPlayoutNtpTimestampMs( + int64_t ntp_timestamp_ms, + int64_t time_ms) { + RTC_NOTREACHED(); +} + +void VideoReceiveStream2::SetMinimumPlayoutDelay(int delay_ms) { + RTC_DCHECK_RUN_ON(&module_process_sequence_checker_); + rtc::CritScope cs(&playout_delay_lock_); + syncable_minimum_playout_delay_ms_ = delay_ms; + UpdatePlayoutDelays(); +} + +int64_t VideoReceiveStream2::GetWaitMs() const { + return keyframe_required_ ? max_wait_for_keyframe_ms_ + : max_wait_for_frame_ms_; +} + +void VideoReceiveStream2::StartNextDecode() { + TRACE_EVENT0("webrtc", "VideoReceiveStream2::StartNextDecode"); + frame_buffer_->NextFrame( + GetWaitMs(), keyframe_required_, &decode_queue_, + /* encoded frame handler */ + [this](std::unique_ptr frame, ReturnReason res) { + RTC_DCHECK_EQ(frame == nullptr, res == ReturnReason::kTimeout); + RTC_DCHECK_EQ(frame != nullptr, res == ReturnReason::kFrameFound); + decode_queue_.PostTask([this, frame = std::move(frame)]() mutable { + RTC_DCHECK_RUN_ON(&decode_queue_); + if (decoder_stopped_) + return; + if (frame) { + HandleEncodedFrame(std::move(frame)); + } else { + HandleFrameBufferTimeout(); + } + StartNextDecode(); + }); + }); +} + +void VideoReceiveStream2::HandleEncodedFrame( + std::unique_ptr frame) { + int64_t now_ms = clock_->TimeInMilliseconds(); + + // Current OnPreDecode only cares about QP for VP8. + int qp = -1; + if (frame->CodecSpecific()->codecType == kVideoCodecVP8) { + if (!vp8::GetQp(frame->data(), frame->size(), &qp)) { + RTC_LOG(LS_WARNING) << "Failed to extract QP from VP8 video frame"; + } + } + stats_proxy_.OnPreDecode(frame->CodecSpecific()->codecType, qp); + HandleKeyFrameGeneration(frame->FrameType() == VideoFrameType::kVideoFrameKey, + now_ms); + int decode_result = video_receiver_.Decode(frame.get()); + if (decode_result == WEBRTC_VIDEO_CODEC_OK || + decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME) { + keyframe_required_ = false; + frame_decoded_ = true; + rtp_video_stream_receiver_.FrameDecoded(frame->id.picture_id); + + if (decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME) + RequestKeyFrame(now_ms); + } else if (!frame_decoded_ || !keyframe_required_ || + (last_keyframe_request_ms_ + max_wait_for_keyframe_ms_ < now_ms)) { + keyframe_required_ = true; + // TODO(philipel): Remove this keyframe request when downstream project + // has been fixed. + RequestKeyFrame(now_ms); + } + + if (encoded_frame_buffer_function_) { + frame->Retain(); + encoded_frame_buffer_function_(WebRtcRecordableEncodedFrame(*frame)); + } +} + +void VideoReceiveStream2::HandleKeyFrameGeneration( + bool received_frame_is_keyframe, + int64_t now_ms) { + // Repeat sending keyframe requests if we've requested a keyframe. + if (!keyframe_generation_requested_) { + return; + } + if (received_frame_is_keyframe) { + keyframe_generation_requested_ = false; + } else if (last_keyframe_request_ms_ + max_wait_for_keyframe_ms_ <= now_ms) { + if (!IsReceivingKeyFrame(now_ms)) { + RequestKeyFrame(now_ms); + } + } else { + // It hasn't been long enough since the last keyframe request, do nothing. + } +} + +void VideoReceiveStream2::HandleFrameBufferTimeout() { + int64_t now_ms = clock_->TimeInMilliseconds(); + absl::optional last_packet_ms = + rtp_video_stream_receiver_.LastReceivedPacketMs(); + + // To avoid spamming keyframe requests for a stream that is not active we + // check if we have received a packet within the last 5 seconds. + bool stream_is_active = last_packet_ms && now_ms - *last_packet_ms < 5000; + if (!stream_is_active) + stats_proxy_.OnStreamInactive(); + + if (stream_is_active && !IsReceivingKeyFrame(now_ms) && + (!config_.crypto_options.sframe.require_frame_encryption || + rtp_video_stream_receiver_.IsDecryptable())) { + RTC_LOG(LS_WARNING) << "No decodable frame in " << GetWaitMs() + << " ms, requesting keyframe."; + RequestKeyFrame(now_ms); + } +} + +bool VideoReceiveStream2::IsReceivingKeyFrame(int64_t timestamp_ms) const { + absl::optional last_keyframe_packet_ms = + rtp_video_stream_receiver_.LastReceivedKeyframePacketMs(); + + // If we recently have been receiving packets belonging to a keyframe then + // we assume a keyframe is currently being received. + bool receiving_keyframe = + last_keyframe_packet_ms && + timestamp_ms - *last_keyframe_packet_ms < max_wait_for_keyframe_ms_; + return receiving_keyframe; +} + +void VideoReceiveStream2::UpdatePlayoutDelays() const { + const int minimum_delay_ms = + std::max({frame_minimum_playout_delay_ms_, base_minimum_playout_delay_ms_, + syncable_minimum_playout_delay_ms_}); + if (minimum_delay_ms >= 0) { + timing_->set_min_playout_delay(minimum_delay_ms); + } + + const int maximum_delay_ms = frame_maximum_playout_delay_ms_; + if (maximum_delay_ms >= 0) { + timing_->set_max_playout_delay(maximum_delay_ms); + } +} + +std::vector VideoReceiveStream2::GetSources() const { + return source_tracker_.GetSources(); +} + +VideoReceiveStream2::RecordingState +VideoReceiveStream2::SetAndGetRecordingState(RecordingState state, + bool generate_key_frame) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + rtc::Event event; + RecordingState old_state; + decode_queue_.PostTask([this, &event, &old_state, generate_key_frame, + state = std::move(state)] { + RTC_DCHECK_RUN_ON(&decode_queue_); + // Save old state. + old_state.callback = std::move(encoded_frame_buffer_function_); + old_state.keyframe_needed = keyframe_generation_requested_; + old_state.last_keyframe_request_ms = last_keyframe_request_ms_; + + // Set new state. + encoded_frame_buffer_function_ = std::move(state.callback); + if (generate_key_frame) { + RequestKeyFrame(clock_->TimeInMilliseconds()); + keyframe_generation_requested_ = true; + } else { + keyframe_generation_requested_ = state.keyframe_needed; + last_keyframe_request_ms_ = state.last_keyframe_request_ms.value_or(0); + } + event.Set(); + }); + event.Wait(rtc::Event::kForever); + return old_state; +} + +void VideoReceiveStream2::GenerateKeyFrame() { + decode_queue_.PostTask([this]() { + RTC_DCHECK_RUN_ON(&decode_queue_); + RequestKeyFrame(clock_->TimeInMilliseconds()); + keyframe_generation_requested_ = true; + }); +} + +} // namespace internal +} // namespace webrtc diff --git a/video/video_receive_stream2.h b/video/video_receive_stream2.h new file mode 100644 index 0000000000..7fb940431a --- /dev/null +++ b/video/video_receive_stream2.h @@ -0,0 +1,238 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_VIDEO_RECEIVE_STREAM2_H_ +#define VIDEO_VIDEO_RECEIVE_STREAM2_H_ + +#include +#include + +#include "api/task_queue/task_queue_factory.h" +#include "api/transport/media/media_transport_interface.h" +#include "api/video/recordable_encoded_frame.h" +#include "call/rtp_packet_sink_interface.h" +#include "call/syncable.h" +#include "call/video_receive_stream.h" +#include "modules/rtp_rtcp/include/flexfec_receiver.h" +#include "modules/rtp_rtcp/source/source_tracker.h" +#include "modules/video_coding/frame_buffer2.h" +#include "modules/video_coding/video_receiver2.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/task_queue.h" +#include "system_wrappers/include/clock.h" +#include "video/receive_statistics_proxy2.h" +#include "video/rtp_streams_synchronizer.h" +#include "video/rtp_video_stream_receiver.h" +#include "video/transport_adapter.h" +#include "video/video_stream_decoder2.h" + +namespace webrtc { + +class CallStats; +class ProcessThread; +class RTPFragmentationHeader; +class RtpStreamReceiverInterface; +class RtpStreamReceiverControllerInterface; +class RtxReceiveStream; +class VCMTiming; + +namespace internal { + +class VideoReceiveStream2 : public webrtc::VideoReceiveStream, + public rtc::VideoSinkInterface, + public NackSender, + public video_coding::OnCompleteFrameCallback, + public Syncable, + public CallStatsObserver { + public: + // The default number of milliseconds to pass before re-requesting a key frame + // to be sent. + static constexpr int kMaxWaitForKeyFrameMs = 200; + + VideoReceiveStream2(TaskQueueFactory* task_queue_factory, + RtpStreamReceiverControllerInterface* receiver_controller, + int num_cpu_cores, + PacketRouter* packet_router, + VideoReceiveStream::Config config, + ProcessThread* process_thread, + CallStats* call_stats, + Clock* clock, + VCMTiming* timing); + VideoReceiveStream2(TaskQueueFactory* task_queue_factory, + RtpStreamReceiverControllerInterface* receiver_controller, + int num_cpu_cores, + PacketRouter* packet_router, + VideoReceiveStream::Config config, + ProcessThread* process_thread, + CallStats* call_stats, + Clock* clock); + ~VideoReceiveStream2() override; + + const Config& config() const { return config_; } + + void SignalNetworkState(NetworkState state); + bool DeliverRtcp(const uint8_t* packet, size_t length); + + void SetSync(Syncable* audio_syncable); + + // Implements webrtc::VideoReceiveStream. + void Start() override; + void Stop() override; + + webrtc::VideoReceiveStream::Stats GetStats() const override; + + void AddSecondarySink(RtpPacketSinkInterface* sink) override; + void RemoveSecondarySink(const RtpPacketSinkInterface* sink) override; + + // SetBaseMinimumPlayoutDelayMs and GetBaseMinimumPlayoutDelayMs are called + // from webrtc/api level and requested by user code. For e.g. blink/js layer + // in Chromium. + bool SetBaseMinimumPlayoutDelayMs(int delay_ms) override; + int GetBaseMinimumPlayoutDelayMs() const override; + + void SetFrameDecryptor( + rtc::scoped_refptr frame_decryptor) override; + void SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer) override; + + // Implements rtc::VideoSinkInterface. + void OnFrame(const VideoFrame& video_frame) override; + + // Implements NackSender. + // For this particular override of the interface, + // only (buffering_allowed == true) is acceptable. + void SendNack(const std::vector& sequence_numbers, + bool buffering_allowed) override; + + // Implements video_coding::OnCompleteFrameCallback. + void OnCompleteFrame( + std::unique_ptr frame) override; + + // Implements CallStatsObserver::OnRttUpdate + void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override; + + // Implements Syncable. + uint32_t id() const override; + absl::optional GetInfo() const override; + bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, + int64_t* time_ms) const override; + void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms, + int64_t time_ms) override; + + // SetMinimumPlayoutDelay is only called by A/V sync. + void SetMinimumPlayoutDelay(int delay_ms) override; + + std::vector GetSources() const override; + + RecordingState SetAndGetRecordingState(RecordingState state, + bool generate_key_frame) override; + void GenerateKeyFrame() override; + + private: + int64_t GetWaitMs() const; + void StartNextDecode() RTC_RUN_ON(decode_queue_); + void HandleEncodedFrame(std::unique_ptr frame) + RTC_RUN_ON(decode_queue_); + void HandleFrameBufferTimeout() RTC_RUN_ON(decode_queue_); + void UpdatePlayoutDelays() const + RTC_EXCLUSIVE_LOCKS_REQUIRED(playout_delay_lock_); + void RequestKeyFrame(int64_t timestamp_ms) RTC_RUN_ON(decode_queue_); + void HandleKeyFrameGeneration(bool received_frame_is_keyframe, int64_t now_ms) + RTC_RUN_ON(decode_queue_); + bool IsReceivingKeyFrame(int64_t timestamp_ms) const + RTC_RUN_ON(decode_queue_); + + void UpdateHistograms(); + + SequenceChecker worker_sequence_checker_; + SequenceChecker module_process_sequence_checker_; + SequenceChecker network_sequence_checker_; + + TaskQueueFactory* const task_queue_factory_; + + TransportAdapter transport_adapter_; + const VideoReceiveStream::Config config_; + const int num_cpu_cores_; + ProcessThread* const process_thread_; + Clock* const clock_; + + CallStats* const call_stats_; + + bool decoder_running_ RTC_GUARDED_BY(worker_sequence_checker_) = false; + bool decoder_stopped_ RTC_GUARDED_BY(decode_queue_) = true; + + SourceTracker source_tracker_; + ReceiveStatisticsProxy stats_proxy_; + // Shared by media and rtx stream receivers, since the latter has no RtpRtcp + // module of its own. + const std::unique_ptr rtp_receive_statistics_; + + std::unique_ptr timing_; // Jitter buffer experiment. + VideoReceiver2 video_receiver_; + std::unique_ptr> incoming_video_stream_; + RtpVideoStreamReceiver rtp_video_stream_receiver_; + std::unique_ptr video_stream_decoder_; + RtpStreamsSynchronizer rtp_stream_sync_; + + // TODO(nisse, philipel): Creation and ownership of video encoders should be + // moved to the new VideoStreamDecoder. + std::vector> video_decoders_; + + // Members for the new jitter buffer experiment. + std::unique_ptr frame_buffer_; + + std::unique_ptr media_receiver_; + std::unique_ptr rtx_receive_stream_; + std::unique_ptr rtx_receiver_; + + // Whenever we are in an undecodable state (stream has just started or due to + // a decoding error) we require a keyframe to restart the stream. + bool keyframe_required_ = true; + + // If we have successfully decoded any frame. + bool frame_decoded_ = false; + + int64_t last_keyframe_request_ms_ = 0; + int64_t last_complete_frame_time_ms_ = 0; + + // Keyframe request intervals are configurable through field trials. + const int max_wait_for_keyframe_ms_; + const int max_wait_for_frame_ms_; + + rtc::CriticalSection playout_delay_lock_; + + // All of them tries to change current min_playout_delay on |timing_| but + // source of the change request is different in each case. Among them the + // biggest delay is used. -1 means use default value from the |timing_|. + // + // Minimum delay as decided by the RTP playout delay extension. + int frame_minimum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = -1; + // Minimum delay as decided by the setLatency function in "webrtc/api". + int base_minimum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = -1; + // Minimum delay as decided by the A/V synchronization feature. + int syncable_minimum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = + -1; + + // Maximum delay as decided by the RTP playout delay extension. + int frame_maximum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = -1; + + // Function that is triggered with encoded frames, if not empty. + std::function + encoded_frame_buffer_function_ RTC_GUARDED_BY(decode_queue_); + // Set to true while we're requesting keyframes but not yet received one. + bool keyframe_generation_requested_ RTC_GUARDED_BY(decode_queue_) = false; + + // Defined last so they are destroyed before all other members. + rtc::TaskQueue decode_queue_; +}; +} // namespace internal +} // namespace webrtc + +#endif // VIDEO_VIDEO_RECEIVE_STREAM2_H_ diff --git a/video/video_stream_decoder2.cc b/video/video_stream_decoder2.cc new file mode 100644 index 0000000000..a73bb649ea --- /dev/null +++ b/video/video_stream_decoder2.cc @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/video_stream_decoder2.h" + +#include "modules/video_coding/video_receiver2.h" +#include "rtc_base/checks.h" +#include "video/receive_statistics_proxy2.h" + +namespace webrtc { +namespace internal { + +VideoStreamDecoder::VideoStreamDecoder( + VideoReceiver2* video_receiver, + ReceiveStatisticsProxy* receive_statistics_proxy, + rtc::VideoSinkInterface* incoming_video_stream) + : video_receiver_(video_receiver), + receive_stats_callback_(receive_statistics_proxy), + incoming_video_stream_(incoming_video_stream) { + RTC_DCHECK(video_receiver_); + + video_receiver_->RegisterReceiveCallback(this); +} + +VideoStreamDecoder::~VideoStreamDecoder() { + // Note: There's an assumption at this point that the decoder thread is + // *not* running. If it was, then there could be a race for each of these + // callbacks. + + // Unset all the callback pointers that we set in the ctor. + video_receiver_->RegisterReceiveCallback(nullptr); +} + +// Do not acquire the lock of |video_receiver_| in this function. Decode +// callback won't necessarily be called from the decoding thread. The decoding +// thread may have held the lock when calling VideoDecoder::Decode, Reset, or +// Release. Acquiring the same lock in the path of decode callback can deadlock. +int32_t VideoStreamDecoder::FrameToRender(VideoFrame& video_frame, + absl::optional qp, + int32_t decode_time_ms, + VideoContentType content_type) { + receive_stats_callback_->OnDecodedFrame(video_frame, qp, decode_time_ms, + content_type); + incoming_video_stream_->OnFrame(video_frame); + return 0; +} + +void VideoStreamDecoder::OnDroppedFrames(uint32_t frames_dropped) { + receive_stats_callback_->OnDroppedFrames(frames_dropped); +} + +void VideoStreamDecoder::OnIncomingPayloadType(int payload_type) { + receive_stats_callback_->OnIncomingPayloadType(payload_type); +} + +void VideoStreamDecoder::OnDecoderImplementationName( + const char* implementation_name) { + receive_stats_callback_->OnDecoderImplementationName(implementation_name); +} + +} // namespace internal +} // namespace webrtc diff --git a/video/video_stream_decoder2.h b/video/video_stream_decoder2.h new file mode 100644 index 0000000000..04f98bc044 --- /dev/null +++ b/video/video_stream_decoder2.h @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_VIDEO_STREAM_DECODER2_H_ +#define VIDEO_VIDEO_STREAM_DECODER2_H_ + +#include +#include +#include +#include + +#include "api/scoped_refptr.h" +#include "api/video/video_sink_interface.h" +#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" +#include "modules/video_coding/include/video_coding_defines.h" +#include "rtc_base/critical_section.h" +#include "rtc_base/platform_thread.h" + +namespace webrtc { + +class VideoReceiver2; + +namespace internal { + +class ReceiveStatisticsProxy; + +class VideoStreamDecoder : public VCMReceiveCallback { + public: + VideoStreamDecoder( + VideoReceiver2* video_receiver, + ReceiveStatisticsProxy* receive_statistics_proxy, + rtc::VideoSinkInterface* incoming_video_stream); + ~VideoStreamDecoder() override; + + // Implements VCMReceiveCallback. + int32_t FrameToRender(VideoFrame& video_frame, + absl::optional qp, + int32_t decode_time_ms, + VideoContentType content_type) override; + void OnDroppedFrames(uint32_t frames_dropped) override; + void OnIncomingPayloadType(int payload_type) override; + void OnDecoderImplementationName(const char* implementation_name) override; + + private: + VideoReceiver2* const video_receiver_; + ReceiveStatisticsProxy* const receive_stats_callback_; + rtc::VideoSinkInterface* const incoming_video_stream_; +}; + +} // namespace internal +} // namespace webrtc + +#endif // VIDEO_VIDEO_STREAM_DECODER2_H_