Fork a few VideoReceiveStream related classes.
We'll need to deprecate the previous classes due to being used externally as an API. Bug: webrtc:11489 Change-Id: I64de29c8adae304d0b7628e24dd0abc5be6387ba Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/173960 Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org> Commit-Queue: Tommi <tommi@webrtc.org> Cr-Commit-Position: refs/heads/master@{#31136}
This commit is contained in:
parent
b261118156
commit
74fc574cbc
@ -22,6 +22,8 @@ rtc_library("video") {
|
||||
"quality_threshold.h",
|
||||
"receive_statistics_proxy.cc",
|
||||
"receive_statistics_proxy.h",
|
||||
"receive_statistics_proxy2.cc",
|
||||
"receive_statistics_proxy2.h",
|
||||
"report_block_stats.cc",
|
||||
"report_block_stats.h",
|
||||
"rtp_streams_synchronizer.cc",
|
||||
@ -42,14 +44,20 @@ rtc_library("video") {
|
||||
"transport_adapter.h",
|
||||
"video_quality_observer.cc",
|
||||
"video_quality_observer.h",
|
||||
"video_quality_observer2.cc",
|
||||
"video_quality_observer2.h",
|
||||
"video_receive_stream.cc",
|
||||
"video_receive_stream.h",
|
||||
"video_receive_stream2.cc",
|
||||
"video_receive_stream2.h",
|
||||
"video_send_stream.cc",
|
||||
"video_send_stream.h",
|
||||
"video_send_stream_impl.cc",
|
||||
"video_send_stream_impl.h",
|
||||
"video_stream_decoder.cc",
|
||||
"video_stream_decoder.h",
|
||||
"video_stream_decoder2.cc",
|
||||
"video_stream_decoder2.h",
|
||||
]
|
||||
|
||||
deps = [
|
||||
@ -507,6 +515,7 @@ if (rtc_include_tests) {
|
||||
"quality_limitation_reason_tracker_unittest.cc",
|
||||
"quality_scaling_tests.cc",
|
||||
"quality_threshold_unittest.cc",
|
||||
"receive_statistics_proxy2_unittest.cc",
|
||||
"receive_statistics_proxy_unittest.cc",
|
||||
"report_block_stats_unittest.cc",
|
||||
"rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc",
|
||||
|
||||
943
video/receive_statistics_proxy2.cc
Normal file
943
video/receive_statistics_proxy2.cc
Normal file
@ -0,0 +1,943 @@
|
||||
/*
|
||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "video/receive_statistics_proxy2.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <cmath>
|
||||
#include <utility>
|
||||
|
||||
#include "modules/video_coding/include/video_codec_interface.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/strings/string_builder.h"
|
||||
#include "rtc_base/time_utils.h"
|
||||
#include "system_wrappers/include/clock.h"
|
||||
#include "system_wrappers/include/field_trial.h"
|
||||
#include "system_wrappers/include/metrics.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace internal {
|
||||
namespace {
|
||||
// Periodic time interval for processing samples for |freq_offset_counter_|.
|
||||
const int64_t kFreqOffsetProcessIntervalMs = 40000;
|
||||
|
||||
// Configuration for bad call detection.
|
||||
const int kBadCallMinRequiredSamples = 10;
|
||||
const int kMinSampleLengthMs = 990;
|
||||
const int kNumMeasurements = 10;
|
||||
const int kNumMeasurementsVariance = kNumMeasurements * 1.5;
|
||||
const float kBadFraction = 0.8f;
|
||||
// For fps:
|
||||
// Low means low enough to be bad, high means high enough to be good
|
||||
const int kLowFpsThreshold = 12;
|
||||
const int kHighFpsThreshold = 14;
|
||||
// For qp and fps variance:
|
||||
// Low means low enough to be good, high means high enough to be bad
|
||||
const int kLowQpThresholdVp8 = 60;
|
||||
const int kHighQpThresholdVp8 = 70;
|
||||
const int kLowVarianceThreshold = 1;
|
||||
const int kHighVarianceThreshold = 2;
|
||||
|
||||
// Some metrics are reported as a maximum over this period.
|
||||
// This should be synchronized with a typical getStats polling interval in
|
||||
// the clients.
|
||||
const int kMovingMaxWindowMs = 1000;
|
||||
|
||||
// How large window we use to calculate the framerate/bitrate.
|
||||
const int kRateStatisticsWindowSizeMs = 1000;
|
||||
|
||||
// Some sane ballpark estimate for maximum common value of inter-frame delay.
|
||||
// Values below that will be stored explicitly in the array,
|
||||
// values above - in the map.
|
||||
const int kMaxCommonInterframeDelayMs = 500;
|
||||
|
||||
const char* UmaPrefixForContentType(VideoContentType content_type) {
|
||||
if (videocontenttypehelpers::IsScreenshare(content_type))
|
||||
return "WebRTC.Video.Screenshare";
|
||||
return "WebRTC.Video";
|
||||
}
|
||||
|
||||
std::string UmaSuffixForContentType(VideoContentType content_type) {
|
||||
char ss_buf[1024];
|
||||
rtc::SimpleStringBuilder ss(ss_buf);
|
||||
int simulcast_id = videocontenttypehelpers::GetSimulcastId(content_type);
|
||||
if (simulcast_id > 0) {
|
||||
ss << ".S" << simulcast_id - 1;
|
||||
}
|
||||
int experiment_id = videocontenttypehelpers::GetExperimentId(content_type);
|
||||
if (experiment_id > 0) {
|
||||
ss << ".ExperimentGroup" << experiment_id - 1;
|
||||
}
|
||||
return ss.str();
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
ReceiveStatisticsProxy::ReceiveStatisticsProxy(
|
||||
const VideoReceiveStream::Config* config,
|
||||
Clock* clock)
|
||||
: clock_(clock),
|
||||
config_(*config),
|
||||
start_ms_(clock->TimeInMilliseconds()),
|
||||
enable_decode_time_histograms_(
|
||||
!field_trial::IsEnabled("WebRTC-DecodeTimeHistogramsKillSwitch")),
|
||||
last_sample_time_(clock->TimeInMilliseconds()),
|
||||
fps_threshold_(kLowFpsThreshold,
|
||||
kHighFpsThreshold,
|
||||
kBadFraction,
|
||||
kNumMeasurements),
|
||||
qp_threshold_(kLowQpThresholdVp8,
|
||||
kHighQpThresholdVp8,
|
||||
kBadFraction,
|
||||
kNumMeasurements),
|
||||
variance_threshold_(kLowVarianceThreshold,
|
||||
kHighVarianceThreshold,
|
||||
kBadFraction,
|
||||
kNumMeasurementsVariance),
|
||||
num_bad_states_(0),
|
||||
num_certain_states_(0),
|
||||
// 1000ms window, scale 1000 for ms to s.
|
||||
decode_fps_estimator_(1000, 1000),
|
||||
renders_fps_estimator_(1000, 1000),
|
||||
render_fps_tracker_(100, 10u),
|
||||
render_pixel_tracker_(100, 10u),
|
||||
video_quality_observer_(
|
||||
new VideoQualityObserver(VideoContentType::UNSPECIFIED)),
|
||||
interframe_delay_max_moving_(kMovingMaxWindowMs),
|
||||
freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs),
|
||||
avg_rtt_ms_(0),
|
||||
last_content_type_(VideoContentType::UNSPECIFIED),
|
||||
last_codec_type_(kVideoCodecVP8),
|
||||
num_delayed_frames_rendered_(0),
|
||||
sum_missed_render_deadline_ms_(0),
|
||||
timing_frame_info_counter_(kMovingMaxWindowMs) {
|
||||
decode_thread_.Detach();
|
||||
network_thread_.Detach();
|
||||
stats_.ssrc = config_.rtp.remote_ssrc;
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::UpdateHistograms(
|
||||
absl::optional<int> fraction_lost,
|
||||
const StreamDataCounters& rtp_stats,
|
||||
const StreamDataCounters* rtx_stats) {
|
||||
// Not actually running on the decoder thread, but must be called after
|
||||
// DecoderThreadStopped, which detaches the thread checker. It is therefore
|
||||
// safe to access |qp_counters_|, which were updated on the decode thread
|
||||
// earlier.
|
||||
RTC_DCHECK_RUN_ON(&decode_thread_);
|
||||
|
||||
rtc::CritScope lock(&crit_);
|
||||
|
||||
char log_stream_buf[8 * 1024];
|
||||
rtc::SimpleStringBuilder log_stream(log_stream_buf);
|
||||
int stream_duration_sec = (clock_->TimeInMilliseconds() - start_ms_) / 1000;
|
||||
if (stats_.frame_counts.key_frames > 0 ||
|
||||
stats_.frame_counts.delta_frames > 0) {
|
||||
RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.ReceiveStreamLifetimeInSeconds",
|
||||
stream_duration_sec);
|
||||
log_stream << "WebRTC.Video.ReceiveStreamLifetimeInSeconds "
|
||||
<< stream_duration_sec << '\n';
|
||||
}
|
||||
|
||||
log_stream << "Frames decoded " << stats_.frames_decoded << '\n';
|
||||
|
||||
if (num_unique_frames_) {
|
||||
int num_dropped_frames = *num_unique_frames_ - stats_.frames_decoded;
|
||||
RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.DroppedFrames.Receiver",
|
||||
num_dropped_frames);
|
||||
log_stream << "WebRTC.Video.DroppedFrames.Receiver " << num_dropped_frames
|
||||
<< '\n';
|
||||
}
|
||||
|
||||
if (fraction_lost && stream_duration_sec >= metrics::kMinRunTimeInSeconds) {
|
||||
RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.ReceivedPacketsLostInPercent",
|
||||
*fraction_lost);
|
||||
log_stream << "WebRTC.Video.ReceivedPacketsLostInPercent " << *fraction_lost
|
||||
<< '\n';
|
||||
}
|
||||
|
||||
if (first_decoded_frame_time_ms_) {
|
||||
const int64_t elapsed_ms =
|
||||
(clock_->TimeInMilliseconds() - *first_decoded_frame_time_ms_);
|
||||
if (elapsed_ms >=
|
||||
metrics::kMinRunTimeInSeconds * rtc::kNumMillisecsPerSec) {
|
||||
int decoded_fps = static_cast<int>(
|
||||
(stats_.frames_decoded * 1000.0f / elapsed_ms) + 0.5f);
|
||||
RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.DecodedFramesPerSecond",
|
||||
decoded_fps);
|
||||
log_stream << "WebRTC.Video.DecodedFramesPerSecond " << decoded_fps
|
||||
<< '\n';
|
||||
|
||||
const uint32_t frames_rendered = stats_.frames_rendered;
|
||||
if (frames_rendered > 0) {
|
||||
RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.DelayedFramesToRenderer",
|
||||
static_cast<int>(num_delayed_frames_rendered_ *
|
||||
100 / frames_rendered));
|
||||
if (num_delayed_frames_rendered_ > 0) {
|
||||
RTC_HISTOGRAM_COUNTS_1000(
|
||||
"WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs",
|
||||
static_cast<int>(sum_missed_render_deadline_ms_ /
|
||||
num_delayed_frames_rendered_));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const int kMinRequiredSamples = 200;
|
||||
int samples = static_cast<int>(render_fps_tracker_.TotalSampleCount());
|
||||
if (samples >= kMinRequiredSamples) {
|
||||
int rendered_fps = round(render_fps_tracker_.ComputeTotalRate());
|
||||
RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.RenderFramesPerSecond",
|
||||
rendered_fps);
|
||||
log_stream << "WebRTC.Video.RenderFramesPerSecond " << rendered_fps << '\n';
|
||||
RTC_HISTOGRAM_COUNTS_100000(
|
||||
"WebRTC.Video.RenderSqrtPixelsPerSecond",
|
||||
round(render_pixel_tracker_.ComputeTotalRate()));
|
||||
}
|
||||
|
||||
absl::optional<int> sync_offset_ms =
|
||||
sync_offset_counter_.Avg(kMinRequiredSamples);
|
||||
if (sync_offset_ms) {
|
||||
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.AVSyncOffsetInMs",
|
||||
*sync_offset_ms);
|
||||
log_stream << "WebRTC.Video.AVSyncOffsetInMs " << *sync_offset_ms << '\n';
|
||||
}
|
||||
AggregatedStats freq_offset_stats = freq_offset_counter_.GetStats();
|
||||
if (freq_offset_stats.num_samples > 0) {
|
||||
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.RtpToNtpFreqOffsetInKhz",
|
||||
freq_offset_stats.average);
|
||||
log_stream << "WebRTC.Video.RtpToNtpFreqOffsetInKhz "
|
||||
<< freq_offset_stats.ToString() << '\n';
|
||||
}
|
||||
|
||||
int num_total_frames =
|
||||
stats_.frame_counts.key_frames + stats_.frame_counts.delta_frames;
|
||||
if (num_total_frames >= kMinRequiredSamples) {
|
||||
int num_key_frames = stats_.frame_counts.key_frames;
|
||||
int key_frames_permille =
|
||||
(num_key_frames * 1000 + num_total_frames / 2) / num_total_frames;
|
||||
RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.KeyFramesReceivedInPermille",
|
||||
key_frames_permille);
|
||||
log_stream << "WebRTC.Video.KeyFramesReceivedInPermille "
|
||||
<< key_frames_permille << '\n';
|
||||
}
|
||||
|
||||
absl::optional<int> qp = qp_counters_.vp8.Avg(kMinRequiredSamples);
|
||||
if (qp) {
|
||||
RTC_HISTOGRAM_COUNTS_200("WebRTC.Video.Decoded.Vp8.Qp", *qp);
|
||||
log_stream << "WebRTC.Video.Decoded.Vp8.Qp " << *qp << '\n';
|
||||
}
|
||||
absl::optional<int> decode_ms = decode_time_counter_.Avg(kMinRequiredSamples);
|
||||
if (decode_ms) {
|
||||
RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.DecodeTimeInMs", *decode_ms);
|
||||
log_stream << "WebRTC.Video.DecodeTimeInMs " << *decode_ms << '\n';
|
||||
}
|
||||
absl::optional<int> jb_delay_ms =
|
||||
jitter_buffer_delay_counter_.Avg(kMinRequiredSamples);
|
||||
if (jb_delay_ms) {
|
||||
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.JitterBufferDelayInMs",
|
||||
*jb_delay_ms);
|
||||
log_stream << "WebRTC.Video.JitterBufferDelayInMs " << *jb_delay_ms << '\n';
|
||||
}
|
||||
|
||||
absl::optional<int> target_delay_ms =
|
||||
target_delay_counter_.Avg(kMinRequiredSamples);
|
||||
if (target_delay_ms) {
|
||||
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.TargetDelayInMs",
|
||||
*target_delay_ms);
|
||||
log_stream << "WebRTC.Video.TargetDelayInMs " << *target_delay_ms << '\n';
|
||||
}
|
||||
absl::optional<int> current_delay_ms =
|
||||
current_delay_counter_.Avg(kMinRequiredSamples);
|
||||
if (current_delay_ms) {
|
||||
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.CurrentDelayInMs",
|
||||
*current_delay_ms);
|
||||
log_stream << "WebRTC.Video.CurrentDelayInMs " << *current_delay_ms << '\n';
|
||||
}
|
||||
absl::optional<int> delay_ms = delay_counter_.Avg(kMinRequiredSamples);
|
||||
if (delay_ms)
|
||||
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", *delay_ms);
|
||||
|
||||
// Aggregate content_specific_stats_ by removing experiment or simulcast
|
||||
// information;
|
||||
std::map<VideoContentType, ContentSpecificStats> aggregated_stats;
|
||||
for (const auto& it : content_specific_stats_) {
|
||||
// Calculate simulcast specific metrics (".S0" ... ".S2" suffixes).
|
||||
VideoContentType content_type = it.first;
|
||||
if (videocontenttypehelpers::GetSimulcastId(content_type) > 0) {
|
||||
// Aggregate on experiment id.
|
||||
videocontenttypehelpers::SetExperimentId(&content_type, 0);
|
||||
aggregated_stats[content_type].Add(it.second);
|
||||
}
|
||||
// Calculate experiment specific metrics (".ExperimentGroup[0-7]" suffixes).
|
||||
content_type = it.first;
|
||||
if (videocontenttypehelpers::GetExperimentId(content_type) > 0) {
|
||||
// Aggregate on simulcast id.
|
||||
videocontenttypehelpers::SetSimulcastId(&content_type, 0);
|
||||
aggregated_stats[content_type].Add(it.second);
|
||||
}
|
||||
// Calculate aggregated metrics (no suffixes. Aggregated on everything).
|
||||
content_type = it.first;
|
||||
videocontenttypehelpers::SetSimulcastId(&content_type, 0);
|
||||
videocontenttypehelpers::SetExperimentId(&content_type, 0);
|
||||
aggregated_stats[content_type].Add(it.second);
|
||||
}
|
||||
|
||||
for (const auto& it : aggregated_stats) {
|
||||
// For the metric Foo we report the following slices:
|
||||
// WebRTC.Video.Foo,
|
||||
// WebRTC.Video.Screenshare.Foo,
|
||||
// WebRTC.Video.Foo.S[0-3],
|
||||
// WebRTC.Video.Foo.ExperimentGroup[0-7],
|
||||
// WebRTC.Video.Screenshare.Foo.S[0-3],
|
||||
// WebRTC.Video.Screenshare.Foo.ExperimentGroup[0-7].
|
||||
auto content_type = it.first;
|
||||
auto stats = it.second;
|
||||
std::string uma_prefix = UmaPrefixForContentType(content_type);
|
||||
std::string uma_suffix = UmaSuffixForContentType(content_type);
|
||||
// Metrics can be sliced on either simulcast id or experiment id but not
|
||||
// both.
|
||||
RTC_DCHECK(videocontenttypehelpers::GetExperimentId(content_type) == 0 ||
|
||||
videocontenttypehelpers::GetSimulcastId(content_type) == 0);
|
||||
|
||||
absl::optional<int> e2e_delay_ms =
|
||||
stats.e2e_delay_counter.Avg(kMinRequiredSamples);
|
||||
if (e2e_delay_ms) {
|
||||
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
|
||||
uma_prefix + ".EndToEndDelayInMs" + uma_suffix, *e2e_delay_ms);
|
||||
log_stream << uma_prefix << ".EndToEndDelayInMs" << uma_suffix << " "
|
||||
<< *e2e_delay_ms << '\n';
|
||||
}
|
||||
absl::optional<int> e2e_delay_max_ms = stats.e2e_delay_counter.Max();
|
||||
if (e2e_delay_max_ms && e2e_delay_ms) {
|
||||
RTC_HISTOGRAM_COUNTS_SPARSE_100000(
|
||||
uma_prefix + ".EndToEndDelayMaxInMs" + uma_suffix, *e2e_delay_max_ms);
|
||||
log_stream << uma_prefix << ".EndToEndDelayMaxInMs" << uma_suffix << " "
|
||||
<< *e2e_delay_max_ms << '\n';
|
||||
}
|
||||
absl::optional<int> interframe_delay_ms =
|
||||
stats.interframe_delay_counter.Avg(kMinRequiredSamples);
|
||||
if (interframe_delay_ms) {
|
||||
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
|
||||
uma_prefix + ".InterframeDelayInMs" + uma_suffix,
|
||||
*interframe_delay_ms);
|
||||
log_stream << uma_prefix << ".InterframeDelayInMs" << uma_suffix << " "
|
||||
<< *interframe_delay_ms << '\n';
|
||||
}
|
||||
absl::optional<int> interframe_delay_max_ms =
|
||||
stats.interframe_delay_counter.Max();
|
||||
if (interframe_delay_max_ms && interframe_delay_ms) {
|
||||
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
|
||||
uma_prefix + ".InterframeDelayMaxInMs" + uma_suffix,
|
||||
*interframe_delay_max_ms);
|
||||
log_stream << uma_prefix << ".InterframeDelayMaxInMs" << uma_suffix << " "
|
||||
<< *interframe_delay_max_ms << '\n';
|
||||
}
|
||||
|
||||
absl::optional<uint32_t> interframe_delay_95p_ms =
|
||||
stats.interframe_delay_percentiles.GetPercentile(0.95f);
|
||||
if (interframe_delay_95p_ms && interframe_delay_ms != -1) {
|
||||
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
|
||||
uma_prefix + ".InterframeDelay95PercentileInMs" + uma_suffix,
|
||||
*interframe_delay_95p_ms);
|
||||
log_stream << uma_prefix << ".InterframeDelay95PercentileInMs"
|
||||
<< uma_suffix << " " << *interframe_delay_95p_ms << '\n';
|
||||
}
|
||||
|
||||
absl::optional<int> width = stats.received_width.Avg(kMinRequiredSamples);
|
||||
if (width) {
|
||||
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
|
||||
uma_prefix + ".ReceivedWidthInPixels" + uma_suffix, *width);
|
||||
log_stream << uma_prefix << ".ReceivedWidthInPixels" << uma_suffix << " "
|
||||
<< *width << '\n';
|
||||
}
|
||||
|
||||
absl::optional<int> height = stats.received_height.Avg(kMinRequiredSamples);
|
||||
if (height) {
|
||||
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
|
||||
uma_prefix + ".ReceivedHeightInPixels" + uma_suffix, *height);
|
||||
log_stream << uma_prefix << ".ReceivedHeightInPixels" << uma_suffix << " "
|
||||
<< *height << '\n';
|
||||
}
|
||||
|
||||
if (content_type != VideoContentType::UNSPECIFIED) {
|
||||
// Don't report these 3 metrics unsliced, as more precise variants
|
||||
// are reported separately in this method.
|
||||
float flow_duration_sec = stats.flow_duration_ms / 1000.0;
|
||||
if (flow_duration_sec >= metrics::kMinRunTimeInSeconds) {
|
||||
int media_bitrate_kbps = static_cast<int>(stats.total_media_bytes * 8 /
|
||||
flow_duration_sec / 1000);
|
||||
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
|
||||
uma_prefix + ".MediaBitrateReceivedInKbps" + uma_suffix,
|
||||
media_bitrate_kbps);
|
||||
log_stream << uma_prefix << ".MediaBitrateReceivedInKbps" << uma_suffix
|
||||
<< " " << media_bitrate_kbps << '\n';
|
||||
}
|
||||
|
||||
int num_total_frames =
|
||||
stats.frame_counts.key_frames + stats.frame_counts.delta_frames;
|
||||
if (num_total_frames >= kMinRequiredSamples) {
|
||||
int num_key_frames = stats.frame_counts.key_frames;
|
||||
int key_frames_permille =
|
||||
(num_key_frames * 1000 + num_total_frames / 2) / num_total_frames;
|
||||
RTC_HISTOGRAM_COUNTS_SPARSE_1000(
|
||||
uma_prefix + ".KeyFramesReceivedInPermille" + uma_suffix,
|
||||
key_frames_permille);
|
||||
log_stream << uma_prefix << ".KeyFramesReceivedInPermille" << uma_suffix
|
||||
<< " " << key_frames_permille << '\n';
|
||||
}
|
||||
|
||||
absl::optional<int> qp = stats.qp_counter.Avg(kMinRequiredSamples);
|
||||
if (qp) {
|
||||
RTC_HISTOGRAM_COUNTS_SPARSE_200(
|
||||
uma_prefix + ".Decoded.Vp8.Qp" + uma_suffix, *qp);
|
||||
log_stream << uma_prefix << ".Decoded.Vp8.Qp" << uma_suffix << " "
|
||||
<< *qp << '\n';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
StreamDataCounters rtp_rtx_stats = rtp_stats;
|
||||
if (rtx_stats)
|
||||
rtp_rtx_stats.Add(*rtx_stats);
|
||||
int64_t elapsed_sec =
|
||||
rtp_rtx_stats.TimeSinceFirstPacketInMs(clock_->TimeInMilliseconds()) /
|
||||
1000;
|
||||
if (elapsed_sec >= metrics::kMinRunTimeInSeconds) {
|
||||
RTC_HISTOGRAM_COUNTS_10000(
|
||||
"WebRTC.Video.BitrateReceivedInKbps",
|
||||
static_cast<int>(rtp_rtx_stats.transmitted.TotalBytes() * 8 /
|
||||
elapsed_sec / 1000));
|
||||
int media_bitrate_kbs = static_cast<int>(rtp_stats.MediaPayloadBytes() * 8 /
|
||||
elapsed_sec / 1000);
|
||||
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.MediaBitrateReceivedInKbps",
|
||||
media_bitrate_kbs);
|
||||
log_stream << "WebRTC.Video.MediaBitrateReceivedInKbps "
|
||||
<< media_bitrate_kbs << '\n';
|
||||
RTC_HISTOGRAM_COUNTS_10000(
|
||||
"WebRTC.Video.PaddingBitrateReceivedInKbps",
|
||||
static_cast<int>(rtp_rtx_stats.transmitted.padding_bytes * 8 /
|
||||
elapsed_sec / 1000));
|
||||
RTC_HISTOGRAM_COUNTS_10000(
|
||||
"WebRTC.Video.RetransmittedBitrateReceivedInKbps",
|
||||
static_cast<int>(rtp_rtx_stats.retransmitted.TotalBytes() * 8 /
|
||||
elapsed_sec / 1000));
|
||||
if (rtx_stats) {
|
||||
RTC_HISTOGRAM_COUNTS_10000(
|
||||
"WebRTC.Video.RtxBitrateReceivedInKbps",
|
||||
static_cast<int>(rtx_stats->transmitted.TotalBytes() * 8 /
|
||||
elapsed_sec / 1000));
|
||||
}
|
||||
const RtcpPacketTypeCounter& counters = stats_.rtcp_packet_type_counts;
|
||||
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.NackPacketsSentPerMinute",
|
||||
counters.nack_packets * 60 / elapsed_sec);
|
||||
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.FirPacketsSentPerMinute",
|
||||
counters.fir_packets * 60 / elapsed_sec);
|
||||
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.PliPacketsSentPerMinute",
|
||||
counters.pli_packets * 60 / elapsed_sec);
|
||||
if (counters.nack_requests > 0) {
|
||||
RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.UniqueNackRequestsSentInPercent",
|
||||
counters.UniqueNackRequestsInPercent());
|
||||
}
|
||||
}
|
||||
|
||||
if (num_certain_states_ >= kBadCallMinRequiredSamples) {
|
||||
RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.Any",
|
||||
100 * num_bad_states_ / num_certain_states_);
|
||||
}
|
||||
absl::optional<double> fps_fraction =
|
||||
fps_threshold_.FractionHigh(kBadCallMinRequiredSamples);
|
||||
if (fps_fraction) {
|
||||
RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.FrameRate",
|
||||
static_cast<int>(100 * (1 - *fps_fraction)));
|
||||
}
|
||||
absl::optional<double> variance_fraction =
|
||||
variance_threshold_.FractionHigh(kBadCallMinRequiredSamples);
|
||||
if (variance_fraction) {
|
||||
RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.FrameRateVariance",
|
||||
static_cast<int>(100 * *variance_fraction));
|
||||
}
|
||||
absl::optional<double> qp_fraction =
|
||||
qp_threshold_.FractionHigh(kBadCallMinRequiredSamples);
|
||||
if (qp_fraction) {
|
||||
RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.Qp",
|
||||
static_cast<int>(100 * *qp_fraction));
|
||||
}
|
||||
|
||||
RTC_LOG(LS_INFO) << log_stream.str();
|
||||
video_quality_observer_->UpdateHistograms();
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::QualitySample() {
|
||||
int64_t now = clock_->TimeInMilliseconds();
|
||||
if (last_sample_time_ + kMinSampleLengthMs > now)
|
||||
return;
|
||||
|
||||
double fps =
|
||||
render_fps_tracker_.ComputeRateForInterval(now - last_sample_time_);
|
||||
absl::optional<int> qp = qp_sample_.Avg(1);
|
||||
|
||||
bool prev_fps_bad = !fps_threshold_.IsHigh().value_or(true);
|
||||
bool prev_qp_bad = qp_threshold_.IsHigh().value_or(false);
|
||||
bool prev_variance_bad = variance_threshold_.IsHigh().value_or(false);
|
||||
bool prev_any_bad = prev_fps_bad || prev_qp_bad || prev_variance_bad;
|
||||
|
||||
fps_threshold_.AddMeasurement(static_cast<int>(fps));
|
||||
if (qp)
|
||||
qp_threshold_.AddMeasurement(*qp);
|
||||
absl::optional<double> fps_variance_opt = fps_threshold_.CalculateVariance();
|
||||
double fps_variance = fps_variance_opt.value_or(0);
|
||||
if (fps_variance_opt) {
|
||||
variance_threshold_.AddMeasurement(static_cast<int>(fps_variance));
|
||||
}
|
||||
|
||||
bool fps_bad = !fps_threshold_.IsHigh().value_or(true);
|
||||
bool qp_bad = qp_threshold_.IsHigh().value_or(false);
|
||||
bool variance_bad = variance_threshold_.IsHigh().value_or(false);
|
||||
bool any_bad = fps_bad || qp_bad || variance_bad;
|
||||
|
||||
if (!prev_any_bad && any_bad) {
|
||||
RTC_LOG(LS_INFO) << "Bad call (any) start: " << now;
|
||||
} else if (prev_any_bad && !any_bad) {
|
||||
RTC_LOG(LS_INFO) << "Bad call (any) end: " << now;
|
||||
}
|
||||
|
||||
if (!prev_fps_bad && fps_bad) {
|
||||
RTC_LOG(LS_INFO) << "Bad call (fps) start: " << now;
|
||||
} else if (prev_fps_bad && !fps_bad) {
|
||||
RTC_LOG(LS_INFO) << "Bad call (fps) end: " << now;
|
||||
}
|
||||
|
||||
if (!prev_qp_bad && qp_bad) {
|
||||
RTC_LOG(LS_INFO) << "Bad call (qp) start: " << now;
|
||||
} else if (prev_qp_bad && !qp_bad) {
|
||||
RTC_LOG(LS_INFO) << "Bad call (qp) end: " << now;
|
||||
}
|
||||
|
||||
if (!prev_variance_bad && variance_bad) {
|
||||
RTC_LOG(LS_INFO) << "Bad call (variance) start: " << now;
|
||||
} else if (prev_variance_bad && !variance_bad) {
|
||||
RTC_LOG(LS_INFO) << "Bad call (variance) end: " << now;
|
||||
}
|
||||
|
||||
RTC_LOG(LS_VERBOSE) << "SAMPLE: sample_length: " << (now - last_sample_time_)
|
||||
<< " fps: " << fps << " fps_bad: " << fps_bad
|
||||
<< " qp: " << qp.value_or(-1) << " qp_bad: " << qp_bad
|
||||
<< " variance_bad: " << variance_bad
|
||||
<< " fps_variance: " << fps_variance;
|
||||
|
||||
last_sample_time_ = now;
|
||||
qp_sample_.Reset();
|
||||
|
||||
if (fps_threshold_.IsHigh() || variance_threshold_.IsHigh() ||
|
||||
qp_threshold_.IsHigh()) {
|
||||
if (any_bad)
|
||||
++num_bad_states_;
|
||||
++num_certain_states_;
|
||||
}
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::UpdateFramerate(int64_t now_ms) const {
|
||||
int64_t old_frames_ms = now_ms - kRateStatisticsWindowSizeMs;
|
||||
while (!frame_window_.empty() &&
|
||||
frame_window_.begin()->first < old_frames_ms) {
|
||||
frame_window_.erase(frame_window_.begin());
|
||||
}
|
||||
|
||||
size_t framerate =
|
||||
(frame_window_.size() * 1000 + 500) / kRateStatisticsWindowSizeMs;
|
||||
stats_.network_frame_rate = static_cast<int>(framerate);
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::UpdateDecodeTimeHistograms(
|
||||
int width,
|
||||
int height,
|
||||
int decode_time_ms) const {
|
||||
bool is_4k = (width == 3840 || width == 4096) && height == 2160;
|
||||
bool is_hd = width == 1920 && height == 1080;
|
||||
// Only update histograms for 4k/HD and VP9/H264.
|
||||
if ((is_4k || is_hd) && (last_codec_type_ == kVideoCodecVP9 ||
|
||||
last_codec_type_ == kVideoCodecH264)) {
|
||||
const std::string kDecodeTimeUmaPrefix =
|
||||
"WebRTC.Video.DecodeTimePerFrameInMs.";
|
||||
|
||||
// Each histogram needs its own line for it to not be reused in the wrong
|
||||
// way when the format changes.
|
||||
if (last_codec_type_ == kVideoCodecVP9) {
|
||||
bool is_sw_decoder =
|
||||
stats_.decoder_implementation_name.compare(0, 6, "libvpx") == 0;
|
||||
if (is_4k) {
|
||||
if (is_sw_decoder)
|
||||
RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.4k.Sw",
|
||||
decode_time_ms);
|
||||
else
|
||||
RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.4k.Hw",
|
||||
decode_time_ms);
|
||||
} else {
|
||||
if (is_sw_decoder)
|
||||
RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.Hd.Sw",
|
||||
decode_time_ms);
|
||||
else
|
||||
RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.Hd.Hw",
|
||||
decode_time_ms);
|
||||
}
|
||||
} else {
|
||||
bool is_sw_decoder =
|
||||
stats_.decoder_implementation_name.compare(0, 6, "FFmpeg") == 0;
|
||||
if (is_4k) {
|
||||
if (is_sw_decoder)
|
||||
RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.4k.Sw",
|
||||
decode_time_ms);
|
||||
else
|
||||
RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.4k.Hw",
|
||||
decode_time_ms);
|
||||
|
||||
} else {
|
||||
if (is_sw_decoder)
|
||||
RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.Hd.Sw",
|
||||
decode_time_ms);
|
||||
else
|
||||
RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.Hd.Hw",
|
||||
decode_time_ms);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
absl::optional<int64_t>
|
||||
ReceiveStatisticsProxy::GetCurrentEstimatedPlayoutNtpTimestampMs(
|
||||
int64_t now_ms) const {
|
||||
if (!last_estimated_playout_ntp_timestamp_ms_ ||
|
||||
!last_estimated_playout_time_ms_) {
|
||||
return absl::nullopt;
|
||||
}
|
||||
int64_t elapsed_ms = now_ms - *last_estimated_playout_time_ms_;
|
||||
return *last_estimated_playout_ntp_timestamp_ms_ + elapsed_ms;
|
||||
}
|
||||
|
||||
VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const {
|
||||
rtc::CritScope lock(&crit_);
|
||||
// Get current frame rates here, as only updating them on new frames prevents
|
||||
// us from ever correctly displaying frame rate of 0.
|
||||
int64_t now_ms = clock_->TimeInMilliseconds();
|
||||
UpdateFramerate(now_ms);
|
||||
stats_.render_frame_rate = renders_fps_estimator_.Rate(now_ms).value_or(0);
|
||||
stats_.decode_frame_rate = decode_fps_estimator_.Rate(now_ms).value_or(0);
|
||||
stats_.interframe_delay_max_ms =
|
||||
interframe_delay_max_moving_.Max(now_ms).value_or(-1);
|
||||
stats_.freeze_count = video_quality_observer_->NumFreezes();
|
||||
stats_.pause_count = video_quality_observer_->NumPauses();
|
||||
stats_.total_freezes_duration_ms =
|
||||
video_quality_observer_->TotalFreezesDurationMs();
|
||||
stats_.total_pauses_duration_ms =
|
||||
video_quality_observer_->TotalPausesDurationMs();
|
||||
stats_.total_frames_duration_ms =
|
||||
video_quality_observer_->TotalFramesDurationMs();
|
||||
stats_.sum_squared_frame_durations =
|
||||
video_quality_observer_->SumSquaredFrameDurationsSec();
|
||||
stats_.content_type = last_content_type_;
|
||||
stats_.timing_frame_info = timing_frame_info_counter_.Max(now_ms);
|
||||
stats_.jitter_buffer_delay_seconds =
|
||||
static_cast<double>(current_delay_counter_.Sum(1).value_or(0)) /
|
||||
rtc::kNumMillisecsPerSec;
|
||||
stats_.jitter_buffer_emitted_count = current_delay_counter_.NumSamples();
|
||||
stats_.estimated_playout_ntp_timestamp_ms =
|
||||
GetCurrentEstimatedPlayoutNtpTimestampMs(now_ms);
|
||||
return stats_;
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnIncomingPayloadType(int payload_type) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
stats_.current_payload_type = payload_type;
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnDecoderImplementationName(
|
||||
const char* implementation_name) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
stats_.decoder_implementation_name = implementation_name;
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnFrameBufferTimingsUpdated(
|
||||
int max_decode_ms,
|
||||
int current_delay_ms,
|
||||
int target_delay_ms,
|
||||
int jitter_buffer_ms,
|
||||
int min_playout_delay_ms,
|
||||
int render_delay_ms) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
stats_.max_decode_ms = max_decode_ms;
|
||||
stats_.current_delay_ms = current_delay_ms;
|
||||
stats_.target_delay_ms = target_delay_ms;
|
||||
stats_.jitter_buffer_ms = jitter_buffer_ms;
|
||||
stats_.min_playout_delay_ms = min_playout_delay_ms;
|
||||
stats_.render_delay_ms = render_delay_ms;
|
||||
jitter_buffer_delay_counter_.Add(jitter_buffer_ms);
|
||||
target_delay_counter_.Add(target_delay_ms);
|
||||
current_delay_counter_.Add(current_delay_ms);
|
||||
// Network delay (rtt/2) + target_delay_ms (jitter delay + decode time +
|
||||
// render delay).
|
||||
delay_counter_.Add(target_delay_ms + avg_rtt_ms_ / 2);
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnUniqueFramesCounted(int num_unique_frames) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
num_unique_frames_.emplace(num_unique_frames);
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnTimingFrameInfoUpdated(
|
||||
const TimingFrameInfo& info) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
if (info.flags != VideoSendTiming::kInvalid) {
|
||||
int64_t now_ms = clock_->TimeInMilliseconds();
|
||||
timing_frame_info_counter_.Add(info, now_ms);
|
||||
}
|
||||
|
||||
// Measure initial decoding latency between the first frame arriving and the
|
||||
// first frame being decoded.
|
||||
if (!first_frame_received_time_ms_.has_value()) {
|
||||
first_frame_received_time_ms_ = info.receive_finish_ms;
|
||||
}
|
||||
if (stats_.first_frame_received_to_decoded_ms == -1 &&
|
||||
first_decoded_frame_time_ms_) {
|
||||
stats_.first_frame_received_to_decoded_ms =
|
||||
*first_decoded_frame_time_ms_ - *first_frame_received_time_ms_;
|
||||
}
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::RtcpPacketTypesCounterUpdated(
|
||||
uint32_t ssrc,
|
||||
const RtcpPacketTypeCounter& packet_counter) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
if (stats_.ssrc != ssrc)
|
||||
return;
|
||||
stats_.rtcp_packet_type_counts = packet_counter;
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnCname(uint32_t ssrc, absl::string_view cname) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
// TODO(pbos): Handle both local and remote ssrcs here and RTC_DCHECK that we
|
||||
// receive stats from one of them.
|
||||
if (stats_.ssrc != ssrc)
|
||||
return;
|
||||
stats_.c_name = std::string(cname);
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnDecodedFrame(const VideoFrame& frame,
|
||||
absl::optional<uint8_t> qp,
|
||||
int32_t decode_time_ms,
|
||||
VideoContentType content_type) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
|
||||
uint64_t now_ms = clock_->TimeInMilliseconds();
|
||||
|
||||
if (videocontenttypehelpers::IsScreenshare(content_type) !=
|
||||
videocontenttypehelpers::IsScreenshare(last_content_type_)) {
|
||||
// Reset the quality observer if content type is switched. But first report
|
||||
// stats for the previous part of the call.
|
||||
video_quality_observer_->UpdateHistograms();
|
||||
video_quality_observer_.reset(new VideoQualityObserver(content_type));
|
||||
}
|
||||
|
||||
video_quality_observer_->OnDecodedFrame(frame, qp, last_codec_type_);
|
||||
|
||||
ContentSpecificStats* content_specific_stats =
|
||||
&content_specific_stats_[content_type];
|
||||
++stats_.frames_decoded;
|
||||
if (qp) {
|
||||
if (!stats_.qp_sum) {
|
||||
if (stats_.frames_decoded != 1) {
|
||||
RTC_LOG(LS_WARNING)
|
||||
<< "Frames decoded was not 1 when first qp value was received.";
|
||||
}
|
||||
stats_.qp_sum = 0;
|
||||
}
|
||||
*stats_.qp_sum += *qp;
|
||||
content_specific_stats->qp_counter.Add(*qp);
|
||||
} else if (stats_.qp_sum) {
|
||||
RTC_LOG(LS_WARNING)
|
||||
<< "QP sum was already set and no QP was given for a frame.";
|
||||
stats_.qp_sum.reset();
|
||||
}
|
||||
decode_time_counter_.Add(decode_time_ms);
|
||||
stats_.decode_ms = decode_time_ms;
|
||||
stats_.total_decode_time_ms += decode_time_ms;
|
||||
if (enable_decode_time_histograms_) {
|
||||
UpdateDecodeTimeHistograms(frame.width(), frame.height(), decode_time_ms);
|
||||
}
|
||||
|
||||
last_content_type_ = content_type;
|
||||
decode_fps_estimator_.Update(1, now_ms);
|
||||
if (last_decoded_frame_time_ms_) {
|
||||
int64_t interframe_delay_ms = now_ms - *last_decoded_frame_time_ms_;
|
||||
RTC_DCHECK_GE(interframe_delay_ms, 0);
|
||||
double interframe_delay = interframe_delay_ms / 1000.0;
|
||||
stats_.total_inter_frame_delay += interframe_delay;
|
||||
stats_.total_squared_inter_frame_delay +=
|
||||
interframe_delay * interframe_delay;
|
||||
interframe_delay_max_moving_.Add(interframe_delay_ms, now_ms);
|
||||
content_specific_stats->interframe_delay_counter.Add(interframe_delay_ms);
|
||||
content_specific_stats->interframe_delay_percentiles.Add(
|
||||
interframe_delay_ms);
|
||||
content_specific_stats->flow_duration_ms += interframe_delay_ms;
|
||||
}
|
||||
if (stats_.frames_decoded == 1) {
|
||||
first_decoded_frame_time_ms_.emplace(now_ms);
|
||||
}
|
||||
last_decoded_frame_time_ms_.emplace(now_ms);
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) {
|
||||
int width = frame.width();
|
||||
int height = frame.height();
|
||||
RTC_DCHECK_GT(width, 0);
|
||||
RTC_DCHECK_GT(height, 0);
|
||||
int64_t now_ms = clock_->TimeInMilliseconds();
|
||||
rtc::CritScope lock(&crit_);
|
||||
|
||||
video_quality_observer_->OnRenderedFrame(frame, now_ms);
|
||||
|
||||
ContentSpecificStats* content_specific_stats =
|
||||
&content_specific_stats_[last_content_type_];
|
||||
renders_fps_estimator_.Update(1, now_ms);
|
||||
++stats_.frames_rendered;
|
||||
stats_.width = width;
|
||||
stats_.height = height;
|
||||
render_fps_tracker_.AddSamples(1);
|
||||
render_pixel_tracker_.AddSamples(sqrt(width * height));
|
||||
content_specific_stats->received_width.Add(width);
|
||||
content_specific_stats->received_height.Add(height);
|
||||
|
||||
// Consider taking stats_.render_delay_ms into account.
|
||||
const int64_t time_until_rendering_ms = frame.render_time_ms() - now_ms;
|
||||
if (time_until_rendering_ms < 0) {
|
||||
sum_missed_render_deadline_ms_ += -time_until_rendering_ms;
|
||||
++num_delayed_frames_rendered_;
|
||||
}
|
||||
|
||||
if (frame.ntp_time_ms() > 0) {
|
||||
int64_t delay_ms = clock_->CurrentNtpInMilliseconds() - frame.ntp_time_ms();
|
||||
if (delay_ms >= 0) {
|
||||
content_specific_stats->e2e_delay_counter.Add(delay_ms);
|
||||
}
|
||||
}
|
||||
QualitySample();
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t video_playout_ntp_ms,
|
||||
int64_t sync_offset_ms,
|
||||
double estimated_freq_khz) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
sync_offset_counter_.Add(std::abs(sync_offset_ms));
|
||||
stats_.sync_offset_ms = sync_offset_ms;
|
||||
last_estimated_playout_ntp_timestamp_ms_ = video_playout_ntp_ms;
|
||||
last_estimated_playout_time_ms_ = clock_->TimeInMilliseconds();
|
||||
|
||||
const double kMaxFreqKhz = 10000.0;
|
||||
int offset_khz = kMaxFreqKhz;
|
||||
// Should not be zero or negative. If so, report max.
|
||||
if (estimated_freq_khz < kMaxFreqKhz && estimated_freq_khz > 0.0)
|
||||
offset_khz = static_cast<int>(std::fabs(estimated_freq_khz - 90.0) + 0.5);
|
||||
|
||||
freq_offset_counter_.Add(offset_khz);
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe,
|
||||
size_t size_bytes,
|
||||
VideoContentType content_type) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
if (is_keyframe) {
|
||||
++stats_.frame_counts.key_frames;
|
||||
} else {
|
||||
++stats_.frame_counts.delta_frames;
|
||||
}
|
||||
|
||||
// Content type extension is set only for keyframes and should be propagated
|
||||
// for all the following delta frames. Here we may receive frames out of order
|
||||
// and miscategorise some delta frames near the layer switch.
|
||||
// This may slightly offset calculated bitrate and keyframes permille metrics.
|
||||
VideoContentType propagated_content_type =
|
||||
is_keyframe ? content_type : last_content_type_;
|
||||
|
||||
ContentSpecificStats* content_specific_stats =
|
||||
&content_specific_stats_[propagated_content_type];
|
||||
|
||||
content_specific_stats->total_media_bytes += size_bytes;
|
||||
if (is_keyframe) {
|
||||
++content_specific_stats->frame_counts.key_frames;
|
||||
} else {
|
||||
++content_specific_stats->frame_counts.delta_frames;
|
||||
}
|
||||
|
||||
int64_t now_ms = clock_->TimeInMilliseconds();
|
||||
frame_window_.insert(std::make_pair(now_ms, size_bytes));
|
||||
UpdateFramerate(now_ms);
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnDroppedFrames(uint32_t frames_dropped) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
stats_.frames_dropped += frames_dropped;
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnPreDecode(VideoCodecType codec_type, int qp) {
|
||||
RTC_DCHECK_RUN_ON(&decode_thread_);
|
||||
rtc::CritScope lock(&crit_);
|
||||
last_codec_type_ = codec_type;
|
||||
if (last_codec_type_ == kVideoCodecVP8 && qp != -1) {
|
||||
qp_counters_.vp8.Add(qp);
|
||||
qp_sample_.Add(qp);
|
||||
}
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnStreamInactive() {
|
||||
// TODO(sprang): Figure out any other state that should be reset.
|
||||
|
||||
rtc::CritScope lock(&crit_);
|
||||
// Don't report inter-frame delay if stream was paused.
|
||||
last_decoded_frame_time_ms_.reset();
|
||||
video_quality_observer_->OnStreamInactive();
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms,
|
||||
int64_t max_rtt_ms) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
avg_rtt_ms_ = avg_rtt_ms;
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::DecoderThreadStarting() {
|
||||
RTC_DCHECK_RUN_ON(&main_thread_);
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::DecoderThreadStopped() {
|
||||
RTC_DCHECK_RUN_ON(&main_thread_);
|
||||
decode_thread_.Detach();
|
||||
}
|
||||
|
||||
ReceiveStatisticsProxy::ContentSpecificStats::ContentSpecificStats()
|
||||
: interframe_delay_percentiles(kMaxCommonInterframeDelayMs) {}
|
||||
|
||||
ReceiveStatisticsProxy::ContentSpecificStats::~ContentSpecificStats() = default;
|
||||
|
||||
void ReceiveStatisticsProxy::ContentSpecificStats::Add(
|
||||
const ContentSpecificStats& other) {
|
||||
e2e_delay_counter.Add(other.e2e_delay_counter);
|
||||
interframe_delay_counter.Add(other.interframe_delay_counter);
|
||||
flow_duration_ms += other.flow_duration_ms;
|
||||
total_media_bytes += other.total_media_bytes;
|
||||
received_height.Add(other.received_height);
|
||||
received_width.Add(other.received_width);
|
||||
qp_counter.Add(other.qp_counter);
|
||||
frame_counts.key_frames += other.frame_counts.key_frames;
|
||||
frame_counts.delta_frames += other.frame_counts.delta_frames;
|
||||
interframe_delay_percentiles.Add(other.interframe_delay_percentiles);
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace webrtc
|
||||
208
video/receive_statistics_proxy2.h
Normal file
208
video/receive_statistics_proxy2.h
Normal file
@ -0,0 +1,208 @@
|
||||
/*
|
||||
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef VIDEO_RECEIVE_STATISTICS_PROXY2_H_
|
||||
#define VIDEO_RECEIVE_STATISTICS_PROXY2_H_
|
||||
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "call/video_receive_stream.h"
|
||||
#include "modules/include/module_common_types.h"
|
||||
#include "modules/video_coding/include/video_coding_defines.h"
|
||||
#include "rtc_base/critical_section.h"
|
||||
#include "rtc_base/numerics/histogram_percentile_counter.h"
|
||||
#include "rtc_base/numerics/moving_max_counter.h"
|
||||
#include "rtc_base/numerics/sample_counter.h"
|
||||
#include "rtc_base/rate_statistics.h"
|
||||
#include "rtc_base/rate_tracker.h"
|
||||
#include "rtc_base/thread_annotations.h"
|
||||
#include "rtc_base/thread_checker.h"
|
||||
#include "video/quality_threshold.h"
|
||||
#include "video/stats_counter.h"
|
||||
#include "video/video_quality_observer2.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class Clock;
|
||||
struct CodecSpecificInfo;
|
||||
|
||||
namespace internal {
|
||||
|
||||
class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
|
||||
public RtcpCnameCallback,
|
||||
public RtcpPacketTypeCounterObserver,
|
||||
public CallStatsObserver {
|
||||
public:
|
||||
ReceiveStatisticsProxy(const VideoReceiveStream::Config* config,
|
||||
Clock* clock);
|
||||
~ReceiveStatisticsProxy() = default;
|
||||
|
||||
VideoReceiveStream::Stats GetStats() const;
|
||||
|
||||
void OnDecodedFrame(const VideoFrame& frame,
|
||||
absl::optional<uint8_t> qp,
|
||||
int32_t decode_time_ms,
|
||||
VideoContentType content_type);
|
||||
void OnSyncOffsetUpdated(int64_t video_playout_ntp_ms,
|
||||
int64_t sync_offset_ms,
|
||||
double estimated_freq_khz);
|
||||
void OnRenderedFrame(const VideoFrame& frame);
|
||||
void OnIncomingPayloadType(int payload_type);
|
||||
void OnDecoderImplementationName(const char* implementation_name);
|
||||
|
||||
void OnPreDecode(VideoCodecType codec_type, int qp);
|
||||
|
||||
void OnUniqueFramesCounted(int num_unique_frames);
|
||||
|
||||
// Indicates video stream has been paused (no incoming packets).
|
||||
void OnStreamInactive();
|
||||
|
||||
// Overrides VCMReceiveStatisticsCallback.
|
||||
void OnCompleteFrame(bool is_keyframe,
|
||||
size_t size_bytes,
|
||||
VideoContentType content_type) override;
|
||||
void OnDroppedFrames(uint32_t frames_dropped) override;
|
||||
void OnFrameBufferTimingsUpdated(int max_decode_ms,
|
||||
int current_delay_ms,
|
||||
int target_delay_ms,
|
||||
int jitter_buffer_ms,
|
||||
int min_playout_delay_ms,
|
||||
int render_delay_ms) override;
|
||||
|
||||
void OnTimingFrameInfoUpdated(const TimingFrameInfo& info) override;
|
||||
|
||||
// Overrides RtcpCnameCallback.
|
||||
void OnCname(uint32_t ssrc, absl::string_view cname) override;
|
||||
|
||||
// Overrides RtcpPacketTypeCounterObserver.
|
||||
void RtcpPacketTypesCounterUpdated(
|
||||
uint32_t ssrc,
|
||||
const RtcpPacketTypeCounter& packet_counter) override;
|
||||
|
||||
// Implements CallStatsObserver.
|
||||
void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override;
|
||||
|
||||
// Notification methods that are used to check our internal state and validate
|
||||
// threading assumptions. These are called by VideoReceiveStream.
|
||||
void DecoderThreadStarting();
|
||||
void DecoderThreadStopped();
|
||||
|
||||
// Produce histograms. Must be called after DecoderThreadStopped(), typically
|
||||
// at the end of the call.
|
||||
void UpdateHistograms(absl::optional<int> fraction_lost,
|
||||
const StreamDataCounters& rtp_stats,
|
||||
const StreamDataCounters* rtx_stats);
|
||||
|
||||
private:
|
||||
struct QpCounters {
|
||||
rtc::SampleCounter vp8;
|
||||
};
|
||||
|
||||
struct ContentSpecificStats {
|
||||
ContentSpecificStats();
|
||||
~ContentSpecificStats();
|
||||
|
||||
void Add(const ContentSpecificStats& other);
|
||||
|
||||
rtc::SampleCounter e2e_delay_counter;
|
||||
rtc::SampleCounter interframe_delay_counter;
|
||||
int64_t flow_duration_ms = 0;
|
||||
int64_t total_media_bytes = 0;
|
||||
rtc::SampleCounter received_width;
|
||||
rtc::SampleCounter received_height;
|
||||
rtc::SampleCounter qp_counter;
|
||||
FrameCounts frame_counts;
|
||||
rtc::HistogramPercentileCounter interframe_delay_percentiles;
|
||||
};
|
||||
|
||||
void QualitySample() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
|
||||
|
||||
// Removes info about old frames and then updates the framerate.
|
||||
void UpdateFramerate(int64_t now_ms) const
|
||||
RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
|
||||
|
||||
void UpdateDecodeTimeHistograms(int width,
|
||||
int height,
|
||||
int decode_time_ms) const
|
||||
RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
|
||||
|
||||
absl::optional<int64_t> GetCurrentEstimatedPlayoutNtpTimestampMs(
|
||||
int64_t now_ms) const RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
|
||||
|
||||
Clock* const clock_;
|
||||
// Ownership of this object lies with the owner of the ReceiveStatisticsProxy
|
||||
// instance. Lifetime is guaranteed to outlive |this|.
|
||||
// TODO(tommi): In practice the config_ reference is only used for accessing
|
||||
// config_.rtp.ulpfec.ulpfec_payload_type. Instead of holding a pointer back,
|
||||
// we could just store the value of ulpfec_payload_type and change the
|
||||
// ReceiveStatisticsProxy() ctor to accept a const& of Config (since we'll
|
||||
// then no longer store a pointer to the object).
|
||||
const VideoReceiveStream::Config& config_;
|
||||
const int64_t start_ms_;
|
||||
const bool enable_decode_time_histograms_;
|
||||
|
||||
rtc::CriticalSection crit_;
|
||||
int64_t last_sample_time_ RTC_GUARDED_BY(crit_);
|
||||
QualityThreshold fps_threshold_ RTC_GUARDED_BY(crit_);
|
||||
QualityThreshold qp_threshold_ RTC_GUARDED_BY(crit_);
|
||||
QualityThreshold variance_threshold_ RTC_GUARDED_BY(crit_);
|
||||
rtc::SampleCounter qp_sample_ RTC_GUARDED_BY(crit_);
|
||||
int num_bad_states_ RTC_GUARDED_BY(crit_);
|
||||
int num_certain_states_ RTC_GUARDED_BY(crit_);
|
||||
// Note: The |stats_.rtp_stats| member is not used or populated by this class.
|
||||
mutable VideoReceiveStream::Stats stats_ RTC_GUARDED_BY(crit_);
|
||||
RateStatistics decode_fps_estimator_ RTC_GUARDED_BY(crit_);
|
||||
RateStatistics renders_fps_estimator_ RTC_GUARDED_BY(crit_);
|
||||
rtc::RateTracker render_fps_tracker_ RTC_GUARDED_BY(crit_);
|
||||
rtc::RateTracker render_pixel_tracker_ RTC_GUARDED_BY(crit_);
|
||||
rtc::SampleCounter sync_offset_counter_ RTC_GUARDED_BY(crit_);
|
||||
rtc::SampleCounter decode_time_counter_ RTC_GUARDED_BY(crit_);
|
||||
rtc::SampleCounter jitter_buffer_delay_counter_ RTC_GUARDED_BY(crit_);
|
||||
rtc::SampleCounter target_delay_counter_ RTC_GUARDED_BY(crit_);
|
||||
rtc::SampleCounter current_delay_counter_ RTC_GUARDED_BY(crit_);
|
||||
rtc::SampleCounter delay_counter_ RTC_GUARDED_BY(crit_);
|
||||
std::unique_ptr<VideoQualityObserver> video_quality_observer_
|
||||
RTC_GUARDED_BY(crit_);
|
||||
mutable rtc::MovingMaxCounter<int> interframe_delay_max_moving_
|
||||
RTC_GUARDED_BY(crit_);
|
||||
std::map<VideoContentType, ContentSpecificStats> content_specific_stats_
|
||||
RTC_GUARDED_BY(crit_);
|
||||
MaxCounter freq_offset_counter_ RTC_GUARDED_BY(crit_);
|
||||
QpCounters qp_counters_ RTC_GUARDED_BY(decode_thread_);
|
||||
int64_t avg_rtt_ms_ RTC_GUARDED_BY(crit_);
|
||||
mutable std::map<int64_t, size_t> frame_window_ RTC_GUARDED_BY(&crit_);
|
||||
VideoContentType last_content_type_ RTC_GUARDED_BY(&crit_);
|
||||
VideoCodecType last_codec_type_ RTC_GUARDED_BY(&crit_);
|
||||
absl::optional<int64_t> first_frame_received_time_ms_ RTC_GUARDED_BY(&crit_);
|
||||
absl::optional<int64_t> first_decoded_frame_time_ms_ RTC_GUARDED_BY(&crit_);
|
||||
absl::optional<int64_t> last_decoded_frame_time_ms_ RTC_GUARDED_BY(&crit_);
|
||||
size_t num_delayed_frames_rendered_ RTC_GUARDED_BY(&crit_);
|
||||
int64_t sum_missed_render_deadline_ms_ RTC_GUARDED_BY(&crit_);
|
||||
// Mutable because calling Max() on MovingMaxCounter is not const. Yet it is
|
||||
// called from const GetStats().
|
||||
mutable rtc::MovingMaxCounter<TimingFrameInfo> timing_frame_info_counter_
|
||||
RTC_GUARDED_BY(&crit_);
|
||||
absl::optional<int> num_unique_frames_ RTC_GUARDED_BY(crit_);
|
||||
absl::optional<int64_t> last_estimated_playout_ntp_timestamp_ms_
|
||||
RTC_GUARDED_BY(&crit_);
|
||||
absl::optional<int64_t> last_estimated_playout_time_ms_
|
||||
RTC_GUARDED_BY(&crit_);
|
||||
rtc::ThreadChecker decode_thread_;
|
||||
rtc::ThreadChecker network_thread_;
|
||||
rtc::ThreadChecker main_thread_;
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
} // namespace webrtc
|
||||
#endif // VIDEO_RECEIVE_STATISTICS_PROXY2_H_
|
||||
1836
video/receive_statistics_proxy2_unittest.cc
Normal file
1836
video/receive_statistics_proxy2_unittest.cc
Normal file
File diff suppressed because it is too large
Load Diff
@ -84,7 +84,8 @@ std::unique_ptr<RtpRtcp> CreateRtpRtcpModule(
|
||||
ReceiveStatistics* receive_statistics,
|
||||
Transport* outgoing_transport,
|
||||
RtcpRttStats* rtt_stats,
|
||||
ReceiveStatisticsProxy* rtcp_statistics_observer,
|
||||
RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
|
||||
RtcpCnameCallback* rtcp_cname_callback,
|
||||
uint32_t local_ssrc) {
|
||||
RtpRtcp::Configuration configuration;
|
||||
configuration.clock = clock;
|
||||
@ -93,8 +94,9 @@ std::unique_ptr<RtpRtcp> CreateRtpRtcpModule(
|
||||
configuration.receive_statistics = receive_statistics;
|
||||
configuration.outgoing_transport = outgoing_transport;
|
||||
configuration.rtt_stats = rtt_stats;
|
||||
configuration.rtcp_packet_type_counter_observer = rtcp_statistics_observer;
|
||||
configuration.rtcp_cname_callback = rtcp_statistics_observer;
|
||||
configuration.rtcp_packet_type_counter_observer =
|
||||
rtcp_packet_type_counter_observer;
|
||||
configuration.rtcp_cname_callback = rtcp_cname_callback;
|
||||
configuration.local_media_ssrc = local_ssrc;
|
||||
|
||||
std::unique_ptr<RtpRtcp> rtp_rtcp = RtpRtcp::Create(configuration);
|
||||
@ -184,6 +186,7 @@ void RtpVideoStreamReceiver::RtcpFeedbackBuffer::SendBufferedRtcpFeedback() {
|
||||
}
|
||||
}
|
||||
|
||||
// DEPRECATED
|
||||
RtpVideoStreamReceiver::RtpVideoStreamReceiver(
|
||||
Clock* clock,
|
||||
Transport* transport,
|
||||
@ -198,6 +201,36 @@ RtpVideoStreamReceiver::RtpVideoStreamReceiver(
|
||||
video_coding::OnCompleteFrameCallback* complete_frame_callback,
|
||||
rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor,
|
||||
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer)
|
||||
: RtpVideoStreamReceiver(clock,
|
||||
transport,
|
||||
rtt_stats,
|
||||
packet_router,
|
||||
config,
|
||||
rtp_receive_statistics,
|
||||
receive_stats_proxy,
|
||||
receive_stats_proxy,
|
||||
process_thread,
|
||||
nack_sender,
|
||||
keyframe_request_sender,
|
||||
complete_frame_callback,
|
||||
frame_decryptor,
|
||||
frame_transformer) {}
|
||||
|
||||
RtpVideoStreamReceiver::RtpVideoStreamReceiver(
|
||||
Clock* clock,
|
||||
Transport* transport,
|
||||
RtcpRttStats* rtt_stats,
|
||||
PacketRouter* packet_router,
|
||||
const VideoReceiveStream::Config* config,
|
||||
ReceiveStatistics* rtp_receive_statistics,
|
||||
RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
|
||||
RtcpCnameCallback* rtcp_cname_callback,
|
||||
ProcessThread* process_thread,
|
||||
NackSender* nack_sender,
|
||||
KeyFrameRequestSender* keyframe_request_sender,
|
||||
video_coding::OnCompleteFrameCallback* complete_frame_callback,
|
||||
rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor,
|
||||
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer)
|
||||
: clock_(clock),
|
||||
config_(*config),
|
||||
packet_router_(packet_router),
|
||||
@ -214,7 +247,8 @@ RtpVideoStreamReceiver::RtpVideoStreamReceiver(
|
||||
rtp_receive_statistics_,
|
||||
transport,
|
||||
rtt_stats,
|
||||
receive_stats_proxy,
|
||||
rtcp_packet_type_counter_observer,
|
||||
rtcp_cname_callback,
|
||||
config_.rtp.local_ssrc)),
|
||||
complete_frame_callback_(complete_frame_callback),
|
||||
keyframe_request_sender_(keyframe_request_sender),
|
||||
|
||||
@ -70,6 +70,7 @@ class RtpVideoStreamReceiver : public LossNotificationSender,
|
||||
public OnDecryptedFrameCallback,
|
||||
public OnDecryptionStatusChangeCallback {
|
||||
public:
|
||||
// DEPRECATED due to dependency on ReceiveStatisticsProxy.
|
||||
RtpVideoStreamReceiver(
|
||||
Clock* clock,
|
||||
Transport* transport,
|
||||
@ -89,6 +90,27 @@ class RtpVideoStreamReceiver : public LossNotificationSender,
|
||||
video_coding::OnCompleteFrameCallback* complete_frame_callback,
|
||||
rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor,
|
||||
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer);
|
||||
|
||||
RtpVideoStreamReceiver(
|
||||
Clock* clock,
|
||||
Transport* transport,
|
||||
RtcpRttStats* rtt_stats,
|
||||
// The packet router is optional; if provided, the RtpRtcp module for this
|
||||
// stream is registered as a candidate for sending REMB and transport
|
||||
// feedback.
|
||||
PacketRouter* packet_router,
|
||||
const VideoReceiveStream::Config* config,
|
||||
ReceiveStatistics* rtp_receive_statistics,
|
||||
RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
|
||||
RtcpCnameCallback* rtcp_cname_callback,
|
||||
ProcessThread* process_thread,
|
||||
NackSender* nack_sender,
|
||||
// The KeyFrameRequestSender is optional; if not provided, key frame
|
||||
// requests are sent via the internal RtpRtcp module.
|
||||
KeyFrameRequestSender* keyframe_request_sender,
|
||||
video_coding::OnCompleteFrameCallback* complete_frame_callback,
|
||||
rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor,
|
||||
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer);
|
||||
~RtpVideoStreamReceiver() override;
|
||||
|
||||
void AddReceiveCodec(const VideoCodec& video_codec,
|
||||
|
||||
@ -93,6 +93,7 @@ class TestRtpVideoStreamReceiver : public TestRtpVideoStreamReceiverInitializer,
|
||||
&test_config_,
|
||||
test_rtp_receive_statistics_.get(),
|
||||
nullptr,
|
||||
nullptr,
|
||||
test_process_thread_.get(),
|
||||
&fake_nack_sender_,
|
||||
nullptr,
|
||||
|
||||
@ -167,7 +167,7 @@ class RtpVideoStreamReceiverTest : public ::testing::Test {
|
||||
ReceiveStatistics::Create(Clock::GetRealTimeClock());
|
||||
rtp_video_stream_receiver_ = std::make_unique<RtpVideoStreamReceiver>(
|
||||
Clock::GetRealTimeClock(), &mock_transport_, nullptr, nullptr, &config_,
|
||||
rtp_receive_statistics_.get(), nullptr, process_thread_.get(),
|
||||
rtp_receive_statistics_.get(), nullptr, nullptr, process_thread_.get(),
|
||||
&mock_nack_sender_, &mock_key_frame_request_sender_,
|
||||
&mock_on_complete_frame_callback_, nullptr, nullptr);
|
||||
VideoCodec codec;
|
||||
@ -1139,7 +1139,7 @@ TEST_F(RtpVideoStreamReceiverTest, TransformFrame) {
|
||||
RegisterTransformedFrameSinkCallback(_, config_.rtp.remote_ssrc));
|
||||
auto receiver = std::make_unique<RtpVideoStreamReceiver>(
|
||||
Clock::GetRealTimeClock(), &mock_transport_, nullptr, nullptr, &config_,
|
||||
rtp_receive_statistics_.get(), nullptr, process_thread_.get(),
|
||||
rtp_receive_statistics_.get(), nullptr, nullptr, process_thread_.get(),
|
||||
&mock_nack_sender_, nullptr, &mock_on_complete_frame_callback_, nullptr,
|
||||
mock_frame_transformer);
|
||||
VideoCodec video_codec;
|
||||
|
||||
288
video/video_quality_observer2.cc
Normal file
288
video/video_quality_observer2.cc
Normal file
@ -0,0 +1,288 @@
|
||||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "video/video_quality_observer2.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <cmath>
|
||||
#include <cstdint>
|
||||
#include <string>
|
||||
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/strings/string_builder.h"
|
||||
#include "system_wrappers/include/metrics.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace internal {
|
||||
const uint32_t VideoQualityObserver::kMinFrameSamplesToDetectFreeze = 5;
|
||||
const uint32_t VideoQualityObserver::kMinIncreaseForFreezeMs = 150;
|
||||
const uint32_t VideoQualityObserver::kAvgInterframeDelaysWindowSizeFrames = 30;
|
||||
|
||||
namespace {
|
||||
constexpr int kMinVideoDurationMs = 3000;
|
||||
constexpr int kMinRequiredSamples = 1;
|
||||
constexpr int kPixelsInHighResolution =
|
||||
960 * 540; // CPU-adapted HD still counts.
|
||||
constexpr int kPixelsInMediumResolution = 640 * 360;
|
||||
constexpr int kBlockyQpThresholdVp8 = 70;
|
||||
constexpr int kBlockyQpThresholdVp9 = 180;
|
||||
constexpr int kMaxNumCachedBlockyFrames = 100;
|
||||
// TODO(ilnik): Add H264/HEVC thresholds.
|
||||
} // namespace
|
||||
|
||||
VideoQualityObserver::VideoQualityObserver(VideoContentType content_type)
|
||||
: last_frame_rendered_ms_(-1),
|
||||
num_frames_rendered_(0),
|
||||
first_frame_rendered_ms_(-1),
|
||||
last_frame_pixels_(0),
|
||||
is_last_frame_blocky_(false),
|
||||
last_unfreeze_time_ms_(0),
|
||||
render_interframe_delays_(kAvgInterframeDelaysWindowSizeFrames),
|
||||
sum_squared_interframe_delays_secs_(0.0),
|
||||
time_in_resolution_ms_(3, 0),
|
||||
current_resolution_(Resolution::Low),
|
||||
num_resolution_downgrades_(0),
|
||||
time_in_blocky_video_ms_(0),
|
||||
content_type_(content_type),
|
||||
is_paused_(false) {}
|
||||
|
||||
void VideoQualityObserver::UpdateHistograms() {
|
||||
// Don't report anything on an empty video stream.
|
||||
if (num_frames_rendered_ == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
char log_stream_buf[2 * 1024];
|
||||
rtc::SimpleStringBuilder log_stream(log_stream_buf);
|
||||
|
||||
if (last_frame_rendered_ms_ > last_unfreeze_time_ms_) {
|
||||
smooth_playback_durations_.Add(last_frame_rendered_ms_ -
|
||||
last_unfreeze_time_ms_);
|
||||
}
|
||||
|
||||
std::string uma_prefix = videocontenttypehelpers::IsScreenshare(content_type_)
|
||||
? "WebRTC.Video.Screenshare"
|
||||
: "WebRTC.Video";
|
||||
|
||||
auto mean_time_between_freezes =
|
||||
smooth_playback_durations_.Avg(kMinRequiredSamples);
|
||||
if (mean_time_between_freezes) {
|
||||
RTC_HISTOGRAM_COUNTS_SPARSE_100000(uma_prefix + ".MeanTimeBetweenFreezesMs",
|
||||
*mean_time_between_freezes);
|
||||
log_stream << uma_prefix << ".MeanTimeBetweenFreezesMs "
|
||||
<< *mean_time_between_freezes << "\n";
|
||||
}
|
||||
auto avg_freeze_length = freezes_durations_.Avg(kMinRequiredSamples);
|
||||
if (avg_freeze_length) {
|
||||
RTC_HISTOGRAM_COUNTS_SPARSE_100000(uma_prefix + ".MeanFreezeDurationMs",
|
||||
*avg_freeze_length);
|
||||
log_stream << uma_prefix << ".MeanFreezeDurationMs " << *avg_freeze_length
|
||||
<< "\n";
|
||||
}
|
||||
|
||||
int64_t video_duration_ms =
|
||||
last_frame_rendered_ms_ - first_frame_rendered_ms_;
|
||||
|
||||
if (video_duration_ms >= kMinVideoDurationMs) {
|
||||
int time_spent_in_hd_percentage = static_cast<int>(
|
||||
time_in_resolution_ms_[Resolution::High] * 100 / video_duration_ms);
|
||||
RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix + ".TimeInHdPercentage",
|
||||
time_spent_in_hd_percentage);
|
||||
log_stream << uma_prefix << ".TimeInHdPercentage "
|
||||
<< time_spent_in_hd_percentage << "\n";
|
||||
|
||||
int time_with_blocky_video_percentage =
|
||||
static_cast<int>(time_in_blocky_video_ms_ * 100 / video_duration_ms);
|
||||
RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix + ".TimeInBlockyVideoPercentage",
|
||||
time_with_blocky_video_percentage);
|
||||
log_stream << uma_prefix << ".TimeInBlockyVideoPercentage "
|
||||
<< time_with_blocky_video_percentage << "\n";
|
||||
|
||||
int num_resolution_downgrades_per_minute =
|
||||
num_resolution_downgrades_ * 60000 / video_duration_ms;
|
||||
RTC_HISTOGRAM_COUNTS_SPARSE_100(
|
||||
uma_prefix + ".NumberResolutionDownswitchesPerMinute",
|
||||
num_resolution_downgrades_per_minute);
|
||||
log_stream << uma_prefix << ".NumberResolutionDownswitchesPerMinute "
|
||||
<< num_resolution_downgrades_per_minute << "\n";
|
||||
|
||||
int num_freezes_per_minute =
|
||||
freezes_durations_.NumSamples() * 60000 / video_duration_ms;
|
||||
RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix + ".NumberFreezesPerMinute",
|
||||
num_freezes_per_minute);
|
||||
log_stream << uma_prefix << ".NumberFreezesPerMinute "
|
||||
<< num_freezes_per_minute << "\n";
|
||||
|
||||
if (sum_squared_interframe_delays_secs_ > 0.0) {
|
||||
int harmonic_framerate_fps = std::round(
|
||||
video_duration_ms / (1000 * sum_squared_interframe_delays_secs_));
|
||||
RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix + ".HarmonicFrameRate",
|
||||
harmonic_framerate_fps);
|
||||
log_stream << uma_prefix << ".HarmonicFrameRate "
|
||||
<< harmonic_framerate_fps << "\n";
|
||||
}
|
||||
}
|
||||
RTC_LOG(LS_INFO) << log_stream.str();
|
||||
}
|
||||
|
||||
void VideoQualityObserver::OnRenderedFrame(const VideoFrame& frame,
|
||||
int64_t now_ms) {
|
||||
RTC_DCHECK_LE(last_frame_rendered_ms_, now_ms);
|
||||
RTC_DCHECK_LE(last_unfreeze_time_ms_, now_ms);
|
||||
|
||||
if (num_frames_rendered_ == 0) {
|
||||
first_frame_rendered_ms_ = last_unfreeze_time_ms_ = now_ms;
|
||||
}
|
||||
|
||||
auto blocky_frame_it = blocky_frames_.find(frame.timestamp());
|
||||
|
||||
if (num_frames_rendered_ > 0) {
|
||||
// Process inter-frame delay.
|
||||
const int64_t interframe_delay_ms = now_ms - last_frame_rendered_ms_;
|
||||
const double interframe_delays_secs = interframe_delay_ms / 1000.0;
|
||||
|
||||
// Sum of squared inter frame intervals is used to calculate the harmonic
|
||||
// frame rate metric. The metric aims to reflect overall experience related
|
||||
// to smoothness of video playback and includes both freezes and pauses.
|
||||
sum_squared_interframe_delays_secs_ +=
|
||||
interframe_delays_secs * interframe_delays_secs;
|
||||
|
||||
if (!is_paused_) {
|
||||
render_interframe_delays_.AddSample(interframe_delay_ms);
|
||||
|
||||
bool was_freeze = false;
|
||||
if (render_interframe_delays_.Size() >= kMinFrameSamplesToDetectFreeze) {
|
||||
const absl::optional<int64_t> avg_interframe_delay =
|
||||
render_interframe_delays_.GetAverageRoundedDown();
|
||||
RTC_DCHECK(avg_interframe_delay);
|
||||
was_freeze = interframe_delay_ms >=
|
||||
std::max(3 * *avg_interframe_delay,
|
||||
*avg_interframe_delay + kMinIncreaseForFreezeMs);
|
||||
}
|
||||
|
||||
if (was_freeze) {
|
||||
freezes_durations_.Add(interframe_delay_ms);
|
||||
smooth_playback_durations_.Add(last_frame_rendered_ms_ -
|
||||
last_unfreeze_time_ms_);
|
||||
last_unfreeze_time_ms_ = now_ms;
|
||||
} else {
|
||||
// Count spatial metrics if there were no freeze.
|
||||
time_in_resolution_ms_[current_resolution_] += interframe_delay_ms;
|
||||
|
||||
if (is_last_frame_blocky_) {
|
||||
time_in_blocky_video_ms_ += interframe_delay_ms;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (is_paused_) {
|
||||
// If the stream was paused since the previous frame, do not count the
|
||||
// pause toward smooth playback. Explicitly count the part before it and
|
||||
// start the new smooth playback interval from this frame.
|
||||
is_paused_ = false;
|
||||
if (last_frame_rendered_ms_ > last_unfreeze_time_ms_) {
|
||||
smooth_playback_durations_.Add(last_frame_rendered_ms_ -
|
||||
last_unfreeze_time_ms_);
|
||||
}
|
||||
last_unfreeze_time_ms_ = now_ms;
|
||||
|
||||
if (num_frames_rendered_ > 0) {
|
||||
pauses_durations_.Add(now_ms - last_frame_rendered_ms_);
|
||||
}
|
||||
}
|
||||
|
||||
int64_t pixels = frame.width() * frame.height();
|
||||
if (pixels >= kPixelsInHighResolution) {
|
||||
current_resolution_ = Resolution::High;
|
||||
} else if (pixels >= kPixelsInMediumResolution) {
|
||||
current_resolution_ = Resolution::Medium;
|
||||
} else {
|
||||
current_resolution_ = Resolution::Low;
|
||||
}
|
||||
|
||||
if (pixels < last_frame_pixels_) {
|
||||
++num_resolution_downgrades_;
|
||||
}
|
||||
|
||||
last_frame_pixels_ = pixels;
|
||||
last_frame_rendered_ms_ = now_ms;
|
||||
|
||||
is_last_frame_blocky_ = blocky_frame_it != blocky_frames_.end();
|
||||
if (is_last_frame_blocky_) {
|
||||
blocky_frames_.erase(blocky_frames_.begin(), ++blocky_frame_it);
|
||||
}
|
||||
|
||||
++num_frames_rendered_;
|
||||
}
|
||||
|
||||
void VideoQualityObserver::OnDecodedFrame(const VideoFrame& frame,
|
||||
absl::optional<uint8_t> qp,
|
||||
VideoCodecType codec) {
|
||||
if (qp) {
|
||||
absl::optional<int> qp_blocky_threshold;
|
||||
// TODO(ilnik): add other codec types when we have QP for them.
|
||||
switch (codec) {
|
||||
case kVideoCodecVP8:
|
||||
qp_blocky_threshold = kBlockyQpThresholdVp8;
|
||||
break;
|
||||
case kVideoCodecVP9:
|
||||
qp_blocky_threshold = kBlockyQpThresholdVp9;
|
||||
break;
|
||||
default:
|
||||
qp_blocky_threshold = absl::nullopt;
|
||||
}
|
||||
|
||||
RTC_DCHECK(blocky_frames_.find(frame.timestamp()) == blocky_frames_.end());
|
||||
|
||||
if (qp_blocky_threshold && *qp > *qp_blocky_threshold) {
|
||||
// Cache blocky frame. Its duration will be calculated in render callback.
|
||||
if (blocky_frames_.size() > kMaxNumCachedBlockyFrames) {
|
||||
RTC_LOG(LS_WARNING) << "Overflow of blocky frames cache.";
|
||||
blocky_frames_.erase(
|
||||
blocky_frames_.begin(),
|
||||
std::next(blocky_frames_.begin(), kMaxNumCachedBlockyFrames / 2));
|
||||
}
|
||||
|
||||
blocky_frames_.insert(frame.timestamp());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void VideoQualityObserver::OnStreamInactive() {
|
||||
is_paused_ = true;
|
||||
}
|
||||
|
||||
uint32_t VideoQualityObserver::NumFreezes() const {
|
||||
return freezes_durations_.NumSamples();
|
||||
}
|
||||
|
||||
uint32_t VideoQualityObserver::NumPauses() const {
|
||||
return pauses_durations_.NumSamples();
|
||||
}
|
||||
|
||||
uint32_t VideoQualityObserver::TotalFreezesDurationMs() const {
|
||||
return freezes_durations_.Sum(kMinRequiredSamples).value_or(0);
|
||||
}
|
||||
|
||||
uint32_t VideoQualityObserver::TotalPausesDurationMs() const {
|
||||
return pauses_durations_.Sum(kMinRequiredSamples).value_or(0);
|
||||
}
|
||||
|
||||
uint32_t VideoQualityObserver::TotalFramesDurationMs() const {
|
||||
return last_frame_rendered_ms_ - first_frame_rendered_ms_;
|
||||
}
|
||||
|
||||
double VideoQualityObserver::SumSquaredFrameDurationsSec() const {
|
||||
return sum_squared_interframe_delays_secs_;
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace webrtc
|
||||
101
video/video_quality_observer2.h
Normal file
101
video/video_quality_observer2.h
Normal file
@ -0,0 +1,101 @@
|
||||
/*
|
||||
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef VIDEO_VIDEO_QUALITY_OBSERVER2_H_
|
||||
#define VIDEO_VIDEO_QUALITY_OBSERVER2_H_
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#include <set>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/video/video_codec_type.h"
|
||||
#include "api/video/video_content_type.h"
|
||||
#include "api/video/video_frame.h"
|
||||
#include "rtc_base/numerics/moving_average.h"
|
||||
#include "rtc_base/numerics/sample_counter.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace internal {
|
||||
|
||||
// Calculates spatial and temporal quality metrics and reports them to UMA
|
||||
// stats.
|
||||
class VideoQualityObserver {
|
||||
public:
|
||||
// Use either VideoQualityObserver::kBlockyQpThresholdVp8 or
|
||||
// VideoQualityObserver::kBlockyQpThresholdVp9.
|
||||
explicit VideoQualityObserver(VideoContentType content_type);
|
||||
~VideoQualityObserver() = default;
|
||||
|
||||
void OnDecodedFrame(const VideoFrame& frame,
|
||||
absl::optional<uint8_t> qp,
|
||||
VideoCodecType codec);
|
||||
|
||||
void OnRenderedFrame(const VideoFrame& frame, int64_t now_ms);
|
||||
|
||||
void OnStreamInactive();
|
||||
|
||||
uint32_t NumFreezes() const;
|
||||
uint32_t NumPauses() const;
|
||||
uint32_t TotalFreezesDurationMs() const;
|
||||
uint32_t TotalPausesDurationMs() const;
|
||||
uint32_t TotalFramesDurationMs() const;
|
||||
double SumSquaredFrameDurationsSec() const;
|
||||
|
||||
void UpdateHistograms();
|
||||
|
||||
static const uint32_t kMinFrameSamplesToDetectFreeze;
|
||||
static const uint32_t kMinIncreaseForFreezeMs;
|
||||
static const uint32_t kAvgInterframeDelaysWindowSizeFrames;
|
||||
|
||||
private:
|
||||
enum Resolution {
|
||||
Low = 0,
|
||||
Medium = 1,
|
||||
High = 2,
|
||||
};
|
||||
|
||||
int64_t last_frame_rendered_ms_;
|
||||
int64_t num_frames_rendered_;
|
||||
int64_t first_frame_rendered_ms_;
|
||||
int64_t last_frame_pixels_;
|
||||
bool is_last_frame_blocky_;
|
||||
// Decoded timestamp of the last delayed frame.
|
||||
int64_t last_unfreeze_time_ms_;
|
||||
rtc::MovingAverage render_interframe_delays_;
|
||||
double sum_squared_interframe_delays_secs_;
|
||||
// An inter-frame delay is counted as a freeze if it's significantly longer
|
||||
// than average inter-frame delay.
|
||||
rtc::SampleCounter freezes_durations_;
|
||||
rtc::SampleCounter pauses_durations_;
|
||||
// Time between freezes.
|
||||
rtc::SampleCounter smooth_playback_durations_;
|
||||
// Counters for time spent in different resolutions. Time between each two
|
||||
// Consecutive frames is counted to bin corresponding to the first frame
|
||||
// resolution.
|
||||
std::vector<int64_t> time_in_resolution_ms_;
|
||||
// Resolution of the last decoded frame. Resolution enum is used as an index.
|
||||
Resolution current_resolution_;
|
||||
int num_resolution_downgrades_;
|
||||
// Similar to resolution, time spent in high-QP video.
|
||||
int64_t time_in_blocky_video_ms_;
|
||||
// Content type of the last decoded frame.
|
||||
VideoContentType content_type_;
|
||||
bool is_paused_;
|
||||
|
||||
// Set of decoded frames with high QP value.
|
||||
std::set<int64_t> blocky_frames_;
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // VIDEO_VIDEO_QUALITY_OBSERVER2_H_
|
||||
@ -211,6 +211,7 @@ VideoReceiveStream::VideoReceiveStream(
|
||||
&config_,
|
||||
rtp_receive_statistics_.get(),
|
||||
&stats_proxy_,
|
||||
&stats_proxy_,
|
||||
process_thread_,
|
||||
this, // NackSender
|
||||
nullptr, // Use default KeyFrameRequestSender
|
||||
|
||||
795
video/video_receive_stream2.cc
Normal file
795
video/video_receive_stream2.cc
Normal file
@ -0,0 +1,795 @@
|
||||
/*
|
||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "video/video_receive_stream2.h"
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <memory>
|
||||
#include <set>
|
||||
#include <string>
|
||||
#include <utility>
|
||||
|
||||
#include "absl/algorithm/container.h"
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/array_view.h"
|
||||
#include "api/crypto/frame_decryptor_interface.h"
|
||||
#include "api/video/encoded_image.h"
|
||||
#include "api/video_codecs/sdp_video_format.h"
|
||||
#include "api/video_codecs/video_codec.h"
|
||||
#include "api/video_codecs/video_decoder_factory.h"
|
||||
#include "api/video_codecs/video_encoder.h"
|
||||
#include "call/rtp_stream_receiver_controller_interface.h"
|
||||
#include "call/rtx_receive_stream.h"
|
||||
#include "common_video/include/incoming_video_stream.h"
|
||||
#include "media/base/h264_profile_level_id.h"
|
||||
#include "modules/utility/include/process_thread.h"
|
||||
#include "modules/video_coding/include/video_codec_interface.h"
|
||||
#include "modules/video_coding/include/video_coding_defines.h"
|
||||
#include "modules/video_coding/include/video_error_codes.h"
|
||||
#include "modules/video_coding/timing.h"
|
||||
#include "modules/video_coding/utility/vp8_header_parser.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "rtc_base/experiments/keyframe_interval_settings.h"
|
||||
#include "rtc_base/location.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/strings/string_builder.h"
|
||||
#include "rtc_base/system/thread_registry.h"
|
||||
#include "rtc_base/time_utils.h"
|
||||
#include "rtc_base/trace_event.h"
|
||||
#include "system_wrappers/include/clock.h"
|
||||
#include "system_wrappers/include/field_trial.h"
|
||||
#include "video/call_stats.h"
|
||||
#include "video/frame_dumping_decoder.h"
|
||||
#include "video/receive_statistics_proxy.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace internal {
|
||||
constexpr int VideoReceiveStream2::kMaxWaitForKeyFrameMs;
|
||||
|
||||
namespace {
|
||||
|
||||
using video_coding::EncodedFrame;
|
||||
using ReturnReason = video_coding::FrameBuffer::ReturnReason;
|
||||
|
||||
constexpr int kMinBaseMinimumDelayMs = 0;
|
||||
constexpr int kMaxBaseMinimumDelayMs = 10000;
|
||||
|
||||
constexpr int kMaxWaitForFrameMs = 3000;
|
||||
|
||||
// Concrete instance of RecordableEncodedFrame wrapping needed content
|
||||
// from video_coding::EncodedFrame.
|
||||
class WebRtcRecordableEncodedFrame : public RecordableEncodedFrame {
|
||||
public:
|
||||
explicit WebRtcRecordableEncodedFrame(const EncodedFrame& frame)
|
||||
: buffer_(frame.GetEncodedData()),
|
||||
render_time_ms_(frame.RenderTime()),
|
||||
codec_(frame.CodecSpecific()->codecType),
|
||||
is_key_frame_(frame.FrameType() == VideoFrameType::kVideoFrameKey),
|
||||
resolution_{frame.EncodedImage()._encodedWidth,
|
||||
frame.EncodedImage()._encodedHeight} {
|
||||
if (frame.ColorSpace()) {
|
||||
color_space_ = *frame.ColorSpace();
|
||||
}
|
||||
}
|
||||
|
||||
// VideoEncodedSinkInterface::FrameBuffer
|
||||
rtc::scoped_refptr<const EncodedImageBufferInterface> encoded_buffer()
|
||||
const override {
|
||||
return buffer_;
|
||||
}
|
||||
|
||||
absl::optional<webrtc::ColorSpace> color_space() const override {
|
||||
return color_space_;
|
||||
}
|
||||
|
||||
VideoCodecType codec() const override { return codec_; }
|
||||
|
||||
bool is_key_frame() const override { return is_key_frame_; }
|
||||
|
||||
EncodedResolution resolution() const override { return resolution_; }
|
||||
|
||||
Timestamp render_time() const override {
|
||||
return Timestamp::Millis(render_time_ms_);
|
||||
}
|
||||
|
||||
private:
|
||||
rtc::scoped_refptr<EncodedImageBufferInterface> buffer_;
|
||||
int64_t render_time_ms_;
|
||||
VideoCodecType codec_;
|
||||
bool is_key_frame_;
|
||||
EncodedResolution resolution_;
|
||||
absl::optional<webrtc::ColorSpace> color_space_;
|
||||
};
|
||||
|
||||
VideoCodec CreateDecoderVideoCodec(const VideoReceiveStream::Decoder& decoder) {
|
||||
VideoCodec codec;
|
||||
memset(&codec, 0, sizeof(codec));
|
||||
|
||||
codec.plType = decoder.payload_type;
|
||||
codec.codecType = PayloadStringToCodecType(decoder.video_format.name);
|
||||
|
||||
if (codec.codecType == kVideoCodecVP8) {
|
||||
*(codec.VP8()) = VideoEncoder::GetDefaultVp8Settings();
|
||||
} else if (codec.codecType == kVideoCodecVP9) {
|
||||
*(codec.VP9()) = VideoEncoder::GetDefaultVp9Settings();
|
||||
} else if (codec.codecType == kVideoCodecH264) {
|
||||
*(codec.H264()) = VideoEncoder::GetDefaultH264Settings();
|
||||
} else if (codec.codecType == kVideoCodecMultiplex) {
|
||||
VideoReceiveStream::Decoder associated_decoder = decoder;
|
||||
associated_decoder.video_format =
|
||||
SdpVideoFormat(CodecTypeToPayloadString(kVideoCodecVP9));
|
||||
VideoCodec associated_codec = CreateDecoderVideoCodec(associated_decoder);
|
||||
associated_codec.codecType = kVideoCodecMultiplex;
|
||||
return associated_codec;
|
||||
}
|
||||
|
||||
codec.width = 320;
|
||||
codec.height = 180;
|
||||
const int kDefaultStartBitrate = 300;
|
||||
codec.startBitrate = codec.minBitrate = codec.maxBitrate =
|
||||
kDefaultStartBitrate;
|
||||
|
||||
return codec;
|
||||
}
|
||||
|
||||
// Video decoder class to be used for unknown codecs. Doesn't support decoding
|
||||
// but logs messages to LS_ERROR.
|
||||
class NullVideoDecoder : public webrtc::VideoDecoder {
|
||||
public:
|
||||
int32_t InitDecode(const webrtc::VideoCodec* codec_settings,
|
||||
int32_t number_of_cores) override {
|
||||
RTC_LOG(LS_ERROR) << "Can't initialize NullVideoDecoder.";
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int32_t Decode(const webrtc::EncodedImage& input_image,
|
||||
bool missing_frames,
|
||||
int64_t render_time_ms) override {
|
||||
RTC_LOG(LS_ERROR) << "The NullVideoDecoder doesn't support decoding.";
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int32_t RegisterDecodeCompleteCallback(
|
||||
webrtc::DecodedImageCallback* callback) override {
|
||||
RTC_LOG(LS_ERROR)
|
||||
<< "Can't register decode complete callback on NullVideoDecoder.";
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int32_t Release() override { return WEBRTC_VIDEO_CODEC_OK; }
|
||||
|
||||
const char* ImplementationName() const override { return "NullVideoDecoder"; }
|
||||
};
|
||||
|
||||
// TODO(https://bugs.webrtc.org/9974): Consider removing this workaround.
|
||||
// Maximum time between frames before resetting the FrameBuffer to avoid RTP
|
||||
// timestamps wraparound to affect FrameBuffer.
|
||||
constexpr int kInactiveStreamThresholdMs = 600000; // 10 minutes.
|
||||
|
||||
} // namespace
|
||||
|
||||
VideoReceiveStream2::VideoReceiveStream2(
|
||||
TaskQueueFactory* task_queue_factory,
|
||||
RtpStreamReceiverControllerInterface* receiver_controller,
|
||||
int num_cpu_cores,
|
||||
PacketRouter* packet_router,
|
||||
VideoReceiveStream::Config config,
|
||||
ProcessThread* process_thread,
|
||||
CallStats* call_stats,
|
||||
Clock* clock,
|
||||
VCMTiming* timing)
|
||||
: task_queue_factory_(task_queue_factory),
|
||||
transport_adapter_(config.rtcp_send_transport),
|
||||
config_(std::move(config)),
|
||||
num_cpu_cores_(num_cpu_cores),
|
||||
process_thread_(process_thread),
|
||||
clock_(clock),
|
||||
call_stats_(call_stats),
|
||||
source_tracker_(clock_),
|
||||
stats_proxy_(&config_, clock_),
|
||||
rtp_receive_statistics_(ReceiveStatistics::Create(clock_)),
|
||||
timing_(timing),
|
||||
video_receiver_(clock_, timing_.get()),
|
||||
rtp_video_stream_receiver_(clock_,
|
||||
&transport_adapter_,
|
||||
call_stats,
|
||||
packet_router,
|
||||
&config_,
|
||||
rtp_receive_statistics_.get(),
|
||||
&stats_proxy_,
|
||||
&stats_proxy_,
|
||||
process_thread_,
|
||||
this, // NackSender
|
||||
nullptr, // Use default KeyFrameRequestSender
|
||||
this, // OnCompleteFrameCallback
|
||||
config_.frame_decryptor,
|
||||
config_.frame_transformer),
|
||||
rtp_stream_sync_(this),
|
||||
max_wait_for_keyframe_ms_(KeyframeIntervalSettings::ParseFromFieldTrials()
|
||||
.MaxWaitForKeyframeMs()
|
||||
.value_or(kMaxWaitForKeyFrameMs)),
|
||||
max_wait_for_frame_ms_(KeyframeIntervalSettings::ParseFromFieldTrials()
|
||||
.MaxWaitForFrameMs()
|
||||
.value_or(kMaxWaitForFrameMs)),
|
||||
decode_queue_(task_queue_factory_->CreateTaskQueue(
|
||||
"DecodingQueue",
|
||||
TaskQueueFactory::Priority::HIGH)) {
|
||||
RTC_LOG(LS_INFO) << "VideoReceiveStream2: " << config_.ToString();
|
||||
|
||||
RTC_DCHECK(config_.renderer);
|
||||
RTC_DCHECK(process_thread_);
|
||||
RTC_DCHECK(call_stats_);
|
||||
|
||||
module_process_sequence_checker_.Detach();
|
||||
network_sequence_checker_.Detach();
|
||||
|
||||
RTC_DCHECK(!config_.decoders.empty());
|
||||
std::set<int> decoder_payload_types;
|
||||
for (const Decoder& decoder : config_.decoders) {
|
||||
RTC_CHECK(decoder.decoder_factory);
|
||||
RTC_CHECK(decoder_payload_types.find(decoder.payload_type) ==
|
||||
decoder_payload_types.end())
|
||||
<< "Duplicate payload type (" << decoder.payload_type
|
||||
<< ") for different decoders.";
|
||||
decoder_payload_types.insert(decoder.payload_type);
|
||||
}
|
||||
|
||||
timing_->set_render_delay(config_.render_delay_ms);
|
||||
|
||||
frame_buffer_.reset(
|
||||
new video_coding::FrameBuffer(clock_, timing_.get(), &stats_proxy_));
|
||||
|
||||
process_thread_->RegisterModule(&rtp_stream_sync_, RTC_FROM_HERE);
|
||||
// Register with RtpStreamReceiverController.
|
||||
media_receiver_ = receiver_controller->CreateReceiver(
|
||||
config_.rtp.remote_ssrc, &rtp_video_stream_receiver_);
|
||||
if (config_.rtp.rtx_ssrc) {
|
||||
rtx_receive_stream_ = std::make_unique<RtxReceiveStream>(
|
||||
&rtp_video_stream_receiver_, config.rtp.rtx_associated_payload_types,
|
||||
config_.rtp.remote_ssrc, rtp_receive_statistics_.get());
|
||||
rtx_receiver_ = receiver_controller->CreateReceiver(
|
||||
config_.rtp.rtx_ssrc, rtx_receive_stream_.get());
|
||||
} else {
|
||||
rtp_receive_statistics_->EnableRetransmitDetection(config.rtp.remote_ssrc,
|
||||
true);
|
||||
}
|
||||
}
|
||||
|
||||
VideoReceiveStream2::VideoReceiveStream2(
|
||||
TaskQueueFactory* task_queue_factory,
|
||||
RtpStreamReceiverControllerInterface* receiver_controller,
|
||||
int num_cpu_cores,
|
||||
PacketRouter* packet_router,
|
||||
VideoReceiveStream::Config config,
|
||||
ProcessThread* process_thread,
|
||||
CallStats* call_stats,
|
||||
Clock* clock)
|
||||
: VideoReceiveStream2(task_queue_factory,
|
||||
receiver_controller,
|
||||
num_cpu_cores,
|
||||
packet_router,
|
||||
std::move(config),
|
||||
process_thread,
|
||||
call_stats,
|
||||
clock,
|
||||
new VCMTiming(clock)) {}
|
||||
|
||||
VideoReceiveStream2::~VideoReceiveStream2() {
|
||||
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
|
||||
RTC_LOG(LS_INFO) << "~VideoReceiveStream2: " << config_.ToString();
|
||||
Stop();
|
||||
process_thread_->DeRegisterModule(&rtp_stream_sync_);
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::SignalNetworkState(NetworkState state) {
|
||||
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
|
||||
rtp_video_stream_receiver_.SignalNetworkState(state);
|
||||
}
|
||||
|
||||
bool VideoReceiveStream2::DeliverRtcp(const uint8_t* packet, size_t length) {
|
||||
return rtp_video_stream_receiver_.DeliverRtcp(packet, length);
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::SetSync(Syncable* audio_syncable) {
|
||||
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
|
||||
rtp_stream_sync_.ConfigureSync(audio_syncable);
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::Start() {
|
||||
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
|
||||
|
||||
if (decoder_running_) {
|
||||
return;
|
||||
}
|
||||
|
||||
const bool protected_by_fec = config_.rtp.protected_by_flexfec ||
|
||||
rtp_video_stream_receiver_.IsUlpfecEnabled();
|
||||
|
||||
frame_buffer_->Start();
|
||||
|
||||
if (rtp_video_stream_receiver_.IsRetransmissionsEnabled() &&
|
||||
protected_by_fec) {
|
||||
frame_buffer_->SetProtectionMode(kProtectionNackFEC);
|
||||
}
|
||||
|
||||
transport_adapter_.Enable();
|
||||
rtc::VideoSinkInterface<VideoFrame>* renderer = nullptr;
|
||||
if (config_.enable_prerenderer_smoothing) {
|
||||
incoming_video_stream_.reset(new IncomingVideoStream(
|
||||
task_queue_factory_, config_.render_delay_ms, this));
|
||||
renderer = incoming_video_stream_.get();
|
||||
} else {
|
||||
renderer = this;
|
||||
}
|
||||
|
||||
for (const Decoder& decoder : config_.decoders) {
|
||||
std::unique_ptr<VideoDecoder> video_decoder =
|
||||
decoder.decoder_factory->LegacyCreateVideoDecoder(decoder.video_format,
|
||||
config_.stream_id);
|
||||
// If we still have no valid decoder, we have to create a "Null" decoder
|
||||
// that ignores all calls. The reason we can get into this state is that the
|
||||
// old decoder factory interface doesn't have a way to query supported
|
||||
// codecs.
|
||||
if (!video_decoder) {
|
||||
video_decoder = std::make_unique<NullVideoDecoder>();
|
||||
}
|
||||
|
||||
std::string decoded_output_file =
|
||||
field_trial::FindFullName("WebRTC-DecoderDataDumpDirectory");
|
||||
// Because '/' can't be used inside a field trial parameter, we use ';'
|
||||
// instead.
|
||||
// This is only relevant to WebRTC-DecoderDataDumpDirectory
|
||||
// field trial. ';' is chosen arbitrary. Even though it's a legal character
|
||||
// in some file systems, we can sacrifice ability to use it in the path to
|
||||
// dumped video, since it's developers-only feature for debugging.
|
||||
absl::c_replace(decoded_output_file, ';', '/');
|
||||
if (!decoded_output_file.empty()) {
|
||||
char filename_buffer[256];
|
||||
rtc::SimpleStringBuilder ssb(filename_buffer);
|
||||
ssb << decoded_output_file << "/webrtc_receive_stream_"
|
||||
<< this->config_.rtp.remote_ssrc << "-" << rtc::TimeMicros()
|
||||
<< ".ivf";
|
||||
video_decoder = CreateFrameDumpingDecoderWrapper(
|
||||
std::move(video_decoder), FileWrapper::OpenWriteOnly(ssb.str()));
|
||||
}
|
||||
|
||||
video_decoders_.push_back(std::move(video_decoder));
|
||||
|
||||
video_receiver_.RegisterExternalDecoder(video_decoders_.back().get(),
|
||||
decoder.payload_type);
|
||||
VideoCodec codec = CreateDecoderVideoCodec(decoder);
|
||||
|
||||
const bool raw_payload =
|
||||
config_.rtp.raw_payload_types.count(codec.plType) > 0;
|
||||
rtp_video_stream_receiver_.AddReceiveCodec(
|
||||
codec, decoder.video_format.parameters, raw_payload);
|
||||
RTC_CHECK_EQ(VCM_OK, video_receiver_.RegisterReceiveCodec(
|
||||
&codec, num_cpu_cores_, false));
|
||||
}
|
||||
|
||||
RTC_DCHECK(renderer != nullptr);
|
||||
video_stream_decoder_.reset(
|
||||
new VideoStreamDecoder(&video_receiver_, &stats_proxy_, renderer));
|
||||
|
||||
// Make sure we register as a stats observer *after* we've prepared the
|
||||
// |video_stream_decoder_|.
|
||||
call_stats_->RegisterStatsObserver(this);
|
||||
|
||||
// Start decoding on task queue.
|
||||
video_receiver_.DecoderThreadStarting();
|
||||
stats_proxy_.DecoderThreadStarting();
|
||||
decode_queue_.PostTask([this] {
|
||||
RTC_DCHECK_RUN_ON(&decode_queue_);
|
||||
decoder_stopped_ = false;
|
||||
StartNextDecode();
|
||||
});
|
||||
decoder_running_ = true;
|
||||
rtp_video_stream_receiver_.StartReceive();
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::Stop() {
|
||||
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
|
||||
rtp_video_stream_receiver_.StopReceive();
|
||||
|
||||
stats_proxy_.OnUniqueFramesCounted(
|
||||
rtp_video_stream_receiver_.GetUniqueFramesSeen());
|
||||
|
||||
decode_queue_.PostTask([this] { frame_buffer_->Stop(); });
|
||||
|
||||
call_stats_->DeregisterStatsObserver(this);
|
||||
|
||||
if (decoder_running_) {
|
||||
rtc::Event done;
|
||||
decode_queue_.PostTask([this, &done] {
|
||||
RTC_DCHECK_RUN_ON(&decode_queue_);
|
||||
decoder_stopped_ = true;
|
||||
done.Set();
|
||||
});
|
||||
done.Wait(rtc::Event::kForever);
|
||||
|
||||
decoder_running_ = false;
|
||||
video_receiver_.DecoderThreadStopped();
|
||||
stats_proxy_.DecoderThreadStopped();
|
||||
// Deregister external decoders so they are no longer running during
|
||||
// destruction. This effectively stops the VCM since the decoder thread is
|
||||
// stopped, the VCM is deregistered and no asynchronous decoder threads are
|
||||
// running.
|
||||
for (const Decoder& decoder : config_.decoders)
|
||||
video_receiver_.RegisterExternalDecoder(nullptr, decoder.payload_type);
|
||||
|
||||
UpdateHistograms();
|
||||
}
|
||||
|
||||
video_stream_decoder_.reset();
|
||||
incoming_video_stream_.reset();
|
||||
transport_adapter_.Disable();
|
||||
}
|
||||
|
||||
VideoReceiveStream::Stats VideoReceiveStream2::GetStats() const {
|
||||
VideoReceiveStream::Stats stats = stats_proxy_.GetStats();
|
||||
stats.total_bitrate_bps = 0;
|
||||
StreamStatistician* statistician =
|
||||
rtp_receive_statistics_->GetStatistician(stats.ssrc);
|
||||
if (statistician) {
|
||||
stats.rtp_stats = statistician->GetStats();
|
||||
stats.total_bitrate_bps = statistician->BitrateReceived();
|
||||
}
|
||||
if (config_.rtp.rtx_ssrc) {
|
||||
StreamStatistician* rtx_statistician =
|
||||
rtp_receive_statistics_->GetStatistician(config_.rtp.rtx_ssrc);
|
||||
if (rtx_statistician)
|
||||
stats.total_bitrate_bps += rtx_statistician->BitrateReceived();
|
||||
}
|
||||
return stats;
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::UpdateHistograms() {
|
||||
absl::optional<int> fraction_lost;
|
||||
StreamDataCounters rtp_stats;
|
||||
StreamStatistician* statistician =
|
||||
rtp_receive_statistics_->GetStatistician(config_.rtp.remote_ssrc);
|
||||
if (statistician) {
|
||||
fraction_lost = statistician->GetFractionLostInPercent();
|
||||
rtp_stats = statistician->GetReceiveStreamDataCounters();
|
||||
}
|
||||
if (config_.rtp.rtx_ssrc) {
|
||||
StreamStatistician* rtx_statistician =
|
||||
rtp_receive_statistics_->GetStatistician(config_.rtp.rtx_ssrc);
|
||||
if (rtx_statistician) {
|
||||
StreamDataCounters rtx_stats =
|
||||
rtx_statistician->GetReceiveStreamDataCounters();
|
||||
stats_proxy_.UpdateHistograms(fraction_lost, rtp_stats, &rtx_stats);
|
||||
return;
|
||||
}
|
||||
}
|
||||
stats_proxy_.UpdateHistograms(fraction_lost, rtp_stats, nullptr);
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::AddSecondarySink(RtpPacketSinkInterface* sink) {
|
||||
rtp_video_stream_receiver_.AddSecondarySink(sink);
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::RemoveSecondarySink(
|
||||
const RtpPacketSinkInterface* sink) {
|
||||
rtp_video_stream_receiver_.RemoveSecondarySink(sink);
|
||||
}
|
||||
|
||||
bool VideoReceiveStream2::SetBaseMinimumPlayoutDelayMs(int delay_ms) {
|
||||
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
|
||||
if (delay_ms < kMinBaseMinimumDelayMs || delay_ms > kMaxBaseMinimumDelayMs) {
|
||||
return false;
|
||||
}
|
||||
|
||||
rtc::CritScope cs(&playout_delay_lock_);
|
||||
base_minimum_playout_delay_ms_ = delay_ms;
|
||||
UpdatePlayoutDelays();
|
||||
return true;
|
||||
}
|
||||
|
||||
int VideoReceiveStream2::GetBaseMinimumPlayoutDelayMs() const {
|
||||
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
|
||||
|
||||
rtc::CritScope cs(&playout_delay_lock_);
|
||||
return base_minimum_playout_delay_ms_;
|
||||
}
|
||||
|
||||
// TODO(tommi): This method grabs a lock 6 times.
|
||||
void VideoReceiveStream2::OnFrame(const VideoFrame& video_frame) {
|
||||
int64_t video_playout_ntp_ms;
|
||||
int64_t sync_offset_ms;
|
||||
double estimated_freq_khz;
|
||||
// TODO(tommi): GetStreamSyncOffsetInMs grabs three locks. One inside the
|
||||
// function itself, another in GetChannel() and a third in
|
||||
// GetPlayoutTimestamp. Seems excessive. Anyhow, I'm assuming the function
|
||||
// succeeds most of the time, which leads to grabbing a fourth lock.
|
||||
if (rtp_stream_sync_.GetStreamSyncOffsetInMs(
|
||||
video_frame.timestamp(), video_frame.render_time_ms(),
|
||||
&video_playout_ntp_ms, &sync_offset_ms, &estimated_freq_khz)) {
|
||||
// TODO(tommi): OnSyncOffsetUpdated grabs a lock.
|
||||
stats_proxy_.OnSyncOffsetUpdated(video_playout_ntp_ms, sync_offset_ms,
|
||||
estimated_freq_khz);
|
||||
}
|
||||
source_tracker_.OnFrameDelivered(video_frame.packet_infos());
|
||||
|
||||
config_.renderer->OnFrame(video_frame);
|
||||
|
||||
// TODO(tommi): OnRenderFrame grabs a lock too.
|
||||
stats_proxy_.OnRenderedFrame(video_frame);
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::SetFrameDecryptor(
|
||||
rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor) {
|
||||
rtp_video_stream_receiver_.SetFrameDecryptor(std::move(frame_decryptor));
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::SetDepacketizerToDecoderFrameTransformer(
|
||||
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) {
|
||||
rtp_video_stream_receiver_.SetDepacketizerToDecoderFrameTransformer(
|
||||
std::move(frame_transformer));
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::SendNack(
|
||||
const std::vector<uint16_t>& sequence_numbers,
|
||||
bool buffering_allowed) {
|
||||
RTC_DCHECK(buffering_allowed);
|
||||
rtp_video_stream_receiver_.RequestPacketRetransmit(sequence_numbers);
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::RequestKeyFrame(int64_t timestamp_ms) {
|
||||
rtp_video_stream_receiver_.RequestKeyFrame();
|
||||
last_keyframe_request_ms_ = timestamp_ms;
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::OnCompleteFrame(
|
||||
std::unique_ptr<video_coding::EncodedFrame> frame) {
|
||||
RTC_DCHECK_RUN_ON(&network_sequence_checker_);
|
||||
// TODO(https://bugs.webrtc.org/9974): Consider removing this workaround.
|
||||
int64_t time_now_ms = clock_->TimeInMilliseconds();
|
||||
if (last_complete_frame_time_ms_ > 0 &&
|
||||
time_now_ms - last_complete_frame_time_ms_ > kInactiveStreamThresholdMs) {
|
||||
frame_buffer_->Clear();
|
||||
}
|
||||
last_complete_frame_time_ms_ = time_now_ms;
|
||||
|
||||
const PlayoutDelay& playout_delay = frame->EncodedImage().playout_delay_;
|
||||
if (playout_delay.min_ms >= 0) {
|
||||
rtc::CritScope cs(&playout_delay_lock_);
|
||||
frame_minimum_playout_delay_ms_ = playout_delay.min_ms;
|
||||
UpdatePlayoutDelays();
|
||||
}
|
||||
|
||||
if (playout_delay.max_ms >= 0) {
|
||||
rtc::CritScope cs(&playout_delay_lock_);
|
||||
frame_maximum_playout_delay_ms_ = playout_delay.max_ms;
|
||||
UpdatePlayoutDelays();
|
||||
}
|
||||
|
||||
int64_t last_continuous_pid = frame_buffer_->InsertFrame(std::move(frame));
|
||||
if (last_continuous_pid != -1)
|
||||
rtp_video_stream_receiver_.FrameContinuous(last_continuous_pid);
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
|
||||
RTC_DCHECK_RUN_ON(&module_process_sequence_checker_);
|
||||
frame_buffer_->UpdateRtt(max_rtt_ms);
|
||||
rtp_video_stream_receiver_.UpdateRtt(max_rtt_ms);
|
||||
}
|
||||
|
||||
uint32_t VideoReceiveStream2::id() const {
|
||||
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
|
||||
return config_.rtp.remote_ssrc;
|
||||
}
|
||||
|
||||
absl::optional<Syncable::Info> VideoReceiveStream2::GetInfo() const {
|
||||
RTC_DCHECK_RUN_ON(&module_process_sequence_checker_);
|
||||
absl::optional<Syncable::Info> info =
|
||||
rtp_video_stream_receiver_.GetSyncInfo();
|
||||
|
||||
if (!info)
|
||||
return absl::nullopt;
|
||||
|
||||
info->current_delay_ms = timing_->TargetVideoDelay();
|
||||
return info;
|
||||
}
|
||||
|
||||
bool VideoReceiveStream2::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
|
||||
int64_t* time_ms) const {
|
||||
RTC_NOTREACHED();
|
||||
return 0;
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::SetEstimatedPlayoutNtpTimestampMs(
|
||||
int64_t ntp_timestamp_ms,
|
||||
int64_t time_ms) {
|
||||
RTC_NOTREACHED();
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::SetMinimumPlayoutDelay(int delay_ms) {
|
||||
RTC_DCHECK_RUN_ON(&module_process_sequence_checker_);
|
||||
rtc::CritScope cs(&playout_delay_lock_);
|
||||
syncable_minimum_playout_delay_ms_ = delay_ms;
|
||||
UpdatePlayoutDelays();
|
||||
}
|
||||
|
||||
int64_t VideoReceiveStream2::GetWaitMs() const {
|
||||
return keyframe_required_ ? max_wait_for_keyframe_ms_
|
||||
: max_wait_for_frame_ms_;
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::StartNextDecode() {
|
||||
TRACE_EVENT0("webrtc", "VideoReceiveStream2::StartNextDecode");
|
||||
frame_buffer_->NextFrame(
|
||||
GetWaitMs(), keyframe_required_, &decode_queue_,
|
||||
/* encoded frame handler */
|
||||
[this](std::unique_ptr<EncodedFrame> frame, ReturnReason res) {
|
||||
RTC_DCHECK_EQ(frame == nullptr, res == ReturnReason::kTimeout);
|
||||
RTC_DCHECK_EQ(frame != nullptr, res == ReturnReason::kFrameFound);
|
||||
decode_queue_.PostTask([this, frame = std::move(frame)]() mutable {
|
||||
RTC_DCHECK_RUN_ON(&decode_queue_);
|
||||
if (decoder_stopped_)
|
||||
return;
|
||||
if (frame) {
|
||||
HandleEncodedFrame(std::move(frame));
|
||||
} else {
|
||||
HandleFrameBufferTimeout();
|
||||
}
|
||||
StartNextDecode();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::HandleEncodedFrame(
|
||||
std::unique_ptr<EncodedFrame> frame) {
|
||||
int64_t now_ms = clock_->TimeInMilliseconds();
|
||||
|
||||
// Current OnPreDecode only cares about QP for VP8.
|
||||
int qp = -1;
|
||||
if (frame->CodecSpecific()->codecType == kVideoCodecVP8) {
|
||||
if (!vp8::GetQp(frame->data(), frame->size(), &qp)) {
|
||||
RTC_LOG(LS_WARNING) << "Failed to extract QP from VP8 video frame";
|
||||
}
|
||||
}
|
||||
stats_proxy_.OnPreDecode(frame->CodecSpecific()->codecType, qp);
|
||||
HandleKeyFrameGeneration(frame->FrameType() == VideoFrameType::kVideoFrameKey,
|
||||
now_ms);
|
||||
int decode_result = video_receiver_.Decode(frame.get());
|
||||
if (decode_result == WEBRTC_VIDEO_CODEC_OK ||
|
||||
decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME) {
|
||||
keyframe_required_ = false;
|
||||
frame_decoded_ = true;
|
||||
rtp_video_stream_receiver_.FrameDecoded(frame->id.picture_id);
|
||||
|
||||
if (decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME)
|
||||
RequestKeyFrame(now_ms);
|
||||
} else if (!frame_decoded_ || !keyframe_required_ ||
|
||||
(last_keyframe_request_ms_ + max_wait_for_keyframe_ms_ < now_ms)) {
|
||||
keyframe_required_ = true;
|
||||
// TODO(philipel): Remove this keyframe request when downstream project
|
||||
// has been fixed.
|
||||
RequestKeyFrame(now_ms);
|
||||
}
|
||||
|
||||
if (encoded_frame_buffer_function_) {
|
||||
frame->Retain();
|
||||
encoded_frame_buffer_function_(WebRtcRecordableEncodedFrame(*frame));
|
||||
}
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::HandleKeyFrameGeneration(
|
||||
bool received_frame_is_keyframe,
|
||||
int64_t now_ms) {
|
||||
// Repeat sending keyframe requests if we've requested a keyframe.
|
||||
if (!keyframe_generation_requested_) {
|
||||
return;
|
||||
}
|
||||
if (received_frame_is_keyframe) {
|
||||
keyframe_generation_requested_ = false;
|
||||
} else if (last_keyframe_request_ms_ + max_wait_for_keyframe_ms_ <= now_ms) {
|
||||
if (!IsReceivingKeyFrame(now_ms)) {
|
||||
RequestKeyFrame(now_ms);
|
||||
}
|
||||
} else {
|
||||
// It hasn't been long enough since the last keyframe request, do nothing.
|
||||
}
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::HandleFrameBufferTimeout() {
|
||||
int64_t now_ms = clock_->TimeInMilliseconds();
|
||||
absl::optional<int64_t> last_packet_ms =
|
||||
rtp_video_stream_receiver_.LastReceivedPacketMs();
|
||||
|
||||
// To avoid spamming keyframe requests for a stream that is not active we
|
||||
// check if we have received a packet within the last 5 seconds.
|
||||
bool stream_is_active = last_packet_ms && now_ms - *last_packet_ms < 5000;
|
||||
if (!stream_is_active)
|
||||
stats_proxy_.OnStreamInactive();
|
||||
|
||||
if (stream_is_active && !IsReceivingKeyFrame(now_ms) &&
|
||||
(!config_.crypto_options.sframe.require_frame_encryption ||
|
||||
rtp_video_stream_receiver_.IsDecryptable())) {
|
||||
RTC_LOG(LS_WARNING) << "No decodable frame in " << GetWaitMs()
|
||||
<< " ms, requesting keyframe.";
|
||||
RequestKeyFrame(now_ms);
|
||||
}
|
||||
}
|
||||
|
||||
bool VideoReceiveStream2::IsReceivingKeyFrame(int64_t timestamp_ms) const {
|
||||
absl::optional<int64_t> last_keyframe_packet_ms =
|
||||
rtp_video_stream_receiver_.LastReceivedKeyframePacketMs();
|
||||
|
||||
// If we recently have been receiving packets belonging to a keyframe then
|
||||
// we assume a keyframe is currently being received.
|
||||
bool receiving_keyframe =
|
||||
last_keyframe_packet_ms &&
|
||||
timestamp_ms - *last_keyframe_packet_ms < max_wait_for_keyframe_ms_;
|
||||
return receiving_keyframe;
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::UpdatePlayoutDelays() const {
|
||||
const int minimum_delay_ms =
|
||||
std::max({frame_minimum_playout_delay_ms_, base_minimum_playout_delay_ms_,
|
||||
syncable_minimum_playout_delay_ms_});
|
||||
if (minimum_delay_ms >= 0) {
|
||||
timing_->set_min_playout_delay(minimum_delay_ms);
|
||||
}
|
||||
|
||||
const int maximum_delay_ms = frame_maximum_playout_delay_ms_;
|
||||
if (maximum_delay_ms >= 0) {
|
||||
timing_->set_max_playout_delay(maximum_delay_ms);
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<webrtc::RtpSource> VideoReceiveStream2::GetSources() const {
|
||||
return source_tracker_.GetSources();
|
||||
}
|
||||
|
||||
VideoReceiveStream2::RecordingState
|
||||
VideoReceiveStream2::SetAndGetRecordingState(RecordingState state,
|
||||
bool generate_key_frame) {
|
||||
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
|
||||
rtc::Event event;
|
||||
RecordingState old_state;
|
||||
decode_queue_.PostTask([this, &event, &old_state, generate_key_frame,
|
||||
state = std::move(state)] {
|
||||
RTC_DCHECK_RUN_ON(&decode_queue_);
|
||||
// Save old state.
|
||||
old_state.callback = std::move(encoded_frame_buffer_function_);
|
||||
old_state.keyframe_needed = keyframe_generation_requested_;
|
||||
old_state.last_keyframe_request_ms = last_keyframe_request_ms_;
|
||||
|
||||
// Set new state.
|
||||
encoded_frame_buffer_function_ = std::move(state.callback);
|
||||
if (generate_key_frame) {
|
||||
RequestKeyFrame(clock_->TimeInMilliseconds());
|
||||
keyframe_generation_requested_ = true;
|
||||
} else {
|
||||
keyframe_generation_requested_ = state.keyframe_needed;
|
||||
last_keyframe_request_ms_ = state.last_keyframe_request_ms.value_or(0);
|
||||
}
|
||||
event.Set();
|
||||
});
|
||||
event.Wait(rtc::Event::kForever);
|
||||
return old_state;
|
||||
}
|
||||
|
||||
void VideoReceiveStream2::GenerateKeyFrame() {
|
||||
decode_queue_.PostTask([this]() {
|
||||
RTC_DCHECK_RUN_ON(&decode_queue_);
|
||||
RequestKeyFrame(clock_->TimeInMilliseconds());
|
||||
keyframe_generation_requested_ = true;
|
||||
});
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace webrtc
|
||||
238
video/video_receive_stream2.h
Normal file
238
video/video_receive_stream2.h
Normal file
@ -0,0 +1,238 @@
|
||||
/*
|
||||
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef VIDEO_VIDEO_RECEIVE_STREAM2_H_
|
||||
#define VIDEO_VIDEO_RECEIVE_STREAM2_H_
|
||||
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
|
||||
#include "api/task_queue/task_queue_factory.h"
|
||||
#include "api/transport/media/media_transport_interface.h"
|
||||
#include "api/video/recordable_encoded_frame.h"
|
||||
#include "call/rtp_packet_sink_interface.h"
|
||||
#include "call/syncable.h"
|
||||
#include "call/video_receive_stream.h"
|
||||
#include "modules/rtp_rtcp/include/flexfec_receiver.h"
|
||||
#include "modules/rtp_rtcp/source/source_tracker.h"
|
||||
#include "modules/video_coding/frame_buffer2.h"
|
||||
#include "modules/video_coding/video_receiver2.h"
|
||||
#include "rtc_base/synchronization/sequence_checker.h"
|
||||
#include "rtc_base/task_queue.h"
|
||||
#include "system_wrappers/include/clock.h"
|
||||
#include "video/receive_statistics_proxy2.h"
|
||||
#include "video/rtp_streams_synchronizer.h"
|
||||
#include "video/rtp_video_stream_receiver.h"
|
||||
#include "video/transport_adapter.h"
|
||||
#include "video/video_stream_decoder2.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class CallStats;
|
||||
class ProcessThread;
|
||||
class RTPFragmentationHeader;
|
||||
class RtpStreamReceiverInterface;
|
||||
class RtpStreamReceiverControllerInterface;
|
||||
class RtxReceiveStream;
|
||||
class VCMTiming;
|
||||
|
||||
namespace internal {
|
||||
|
||||
class VideoReceiveStream2 : public webrtc::VideoReceiveStream,
|
||||
public rtc::VideoSinkInterface<VideoFrame>,
|
||||
public NackSender,
|
||||
public video_coding::OnCompleteFrameCallback,
|
||||
public Syncable,
|
||||
public CallStatsObserver {
|
||||
public:
|
||||
// The default number of milliseconds to pass before re-requesting a key frame
|
||||
// to be sent.
|
||||
static constexpr int kMaxWaitForKeyFrameMs = 200;
|
||||
|
||||
VideoReceiveStream2(TaskQueueFactory* task_queue_factory,
|
||||
RtpStreamReceiverControllerInterface* receiver_controller,
|
||||
int num_cpu_cores,
|
||||
PacketRouter* packet_router,
|
||||
VideoReceiveStream::Config config,
|
||||
ProcessThread* process_thread,
|
||||
CallStats* call_stats,
|
||||
Clock* clock,
|
||||
VCMTiming* timing);
|
||||
VideoReceiveStream2(TaskQueueFactory* task_queue_factory,
|
||||
RtpStreamReceiverControllerInterface* receiver_controller,
|
||||
int num_cpu_cores,
|
||||
PacketRouter* packet_router,
|
||||
VideoReceiveStream::Config config,
|
||||
ProcessThread* process_thread,
|
||||
CallStats* call_stats,
|
||||
Clock* clock);
|
||||
~VideoReceiveStream2() override;
|
||||
|
||||
const Config& config() const { return config_; }
|
||||
|
||||
void SignalNetworkState(NetworkState state);
|
||||
bool DeliverRtcp(const uint8_t* packet, size_t length);
|
||||
|
||||
void SetSync(Syncable* audio_syncable);
|
||||
|
||||
// Implements webrtc::VideoReceiveStream.
|
||||
void Start() override;
|
||||
void Stop() override;
|
||||
|
||||
webrtc::VideoReceiveStream::Stats GetStats() const override;
|
||||
|
||||
void AddSecondarySink(RtpPacketSinkInterface* sink) override;
|
||||
void RemoveSecondarySink(const RtpPacketSinkInterface* sink) override;
|
||||
|
||||
// SetBaseMinimumPlayoutDelayMs and GetBaseMinimumPlayoutDelayMs are called
|
||||
// from webrtc/api level and requested by user code. For e.g. blink/js layer
|
||||
// in Chromium.
|
||||
bool SetBaseMinimumPlayoutDelayMs(int delay_ms) override;
|
||||
int GetBaseMinimumPlayoutDelayMs() const override;
|
||||
|
||||
void SetFrameDecryptor(
|
||||
rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor) override;
|
||||
void SetDepacketizerToDecoderFrameTransformer(
|
||||
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) override;
|
||||
|
||||
// Implements rtc::VideoSinkInterface<VideoFrame>.
|
||||
void OnFrame(const VideoFrame& video_frame) override;
|
||||
|
||||
// Implements NackSender.
|
||||
// For this particular override of the interface,
|
||||
// only (buffering_allowed == true) is acceptable.
|
||||
void SendNack(const std::vector<uint16_t>& sequence_numbers,
|
||||
bool buffering_allowed) override;
|
||||
|
||||
// Implements video_coding::OnCompleteFrameCallback.
|
||||
void OnCompleteFrame(
|
||||
std::unique_ptr<video_coding::EncodedFrame> frame) override;
|
||||
|
||||
// Implements CallStatsObserver::OnRttUpdate
|
||||
void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override;
|
||||
|
||||
// Implements Syncable.
|
||||
uint32_t id() const override;
|
||||
absl::optional<Syncable::Info> GetInfo() const override;
|
||||
bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
|
||||
int64_t* time_ms) const override;
|
||||
void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms,
|
||||
int64_t time_ms) override;
|
||||
|
||||
// SetMinimumPlayoutDelay is only called by A/V sync.
|
||||
void SetMinimumPlayoutDelay(int delay_ms) override;
|
||||
|
||||
std::vector<webrtc::RtpSource> GetSources() const override;
|
||||
|
||||
RecordingState SetAndGetRecordingState(RecordingState state,
|
||||
bool generate_key_frame) override;
|
||||
void GenerateKeyFrame() override;
|
||||
|
||||
private:
|
||||
int64_t GetWaitMs() const;
|
||||
void StartNextDecode() RTC_RUN_ON(decode_queue_);
|
||||
void HandleEncodedFrame(std::unique_ptr<video_coding::EncodedFrame> frame)
|
||||
RTC_RUN_ON(decode_queue_);
|
||||
void HandleFrameBufferTimeout() RTC_RUN_ON(decode_queue_);
|
||||
void UpdatePlayoutDelays() const
|
||||
RTC_EXCLUSIVE_LOCKS_REQUIRED(playout_delay_lock_);
|
||||
void RequestKeyFrame(int64_t timestamp_ms) RTC_RUN_ON(decode_queue_);
|
||||
void HandleKeyFrameGeneration(bool received_frame_is_keyframe, int64_t now_ms)
|
||||
RTC_RUN_ON(decode_queue_);
|
||||
bool IsReceivingKeyFrame(int64_t timestamp_ms) const
|
||||
RTC_RUN_ON(decode_queue_);
|
||||
|
||||
void UpdateHistograms();
|
||||
|
||||
SequenceChecker worker_sequence_checker_;
|
||||
SequenceChecker module_process_sequence_checker_;
|
||||
SequenceChecker network_sequence_checker_;
|
||||
|
||||
TaskQueueFactory* const task_queue_factory_;
|
||||
|
||||
TransportAdapter transport_adapter_;
|
||||
const VideoReceiveStream::Config config_;
|
||||
const int num_cpu_cores_;
|
||||
ProcessThread* const process_thread_;
|
||||
Clock* const clock_;
|
||||
|
||||
CallStats* const call_stats_;
|
||||
|
||||
bool decoder_running_ RTC_GUARDED_BY(worker_sequence_checker_) = false;
|
||||
bool decoder_stopped_ RTC_GUARDED_BY(decode_queue_) = true;
|
||||
|
||||
SourceTracker source_tracker_;
|
||||
ReceiveStatisticsProxy stats_proxy_;
|
||||
// Shared by media and rtx stream receivers, since the latter has no RtpRtcp
|
||||
// module of its own.
|
||||
const std::unique_ptr<ReceiveStatistics> rtp_receive_statistics_;
|
||||
|
||||
std::unique_ptr<VCMTiming> timing_; // Jitter buffer experiment.
|
||||
VideoReceiver2 video_receiver_;
|
||||
std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> incoming_video_stream_;
|
||||
RtpVideoStreamReceiver rtp_video_stream_receiver_;
|
||||
std::unique_ptr<VideoStreamDecoder> video_stream_decoder_;
|
||||
RtpStreamsSynchronizer rtp_stream_sync_;
|
||||
|
||||
// TODO(nisse, philipel): Creation and ownership of video encoders should be
|
||||
// moved to the new VideoStreamDecoder.
|
||||
std::vector<std::unique_ptr<VideoDecoder>> video_decoders_;
|
||||
|
||||
// Members for the new jitter buffer experiment.
|
||||
std::unique_ptr<video_coding::FrameBuffer> frame_buffer_;
|
||||
|
||||
std::unique_ptr<RtpStreamReceiverInterface> media_receiver_;
|
||||
std::unique_ptr<RtxReceiveStream> rtx_receive_stream_;
|
||||
std::unique_ptr<RtpStreamReceiverInterface> rtx_receiver_;
|
||||
|
||||
// Whenever we are in an undecodable state (stream has just started or due to
|
||||
// a decoding error) we require a keyframe to restart the stream.
|
||||
bool keyframe_required_ = true;
|
||||
|
||||
// If we have successfully decoded any frame.
|
||||
bool frame_decoded_ = false;
|
||||
|
||||
int64_t last_keyframe_request_ms_ = 0;
|
||||
int64_t last_complete_frame_time_ms_ = 0;
|
||||
|
||||
// Keyframe request intervals are configurable through field trials.
|
||||
const int max_wait_for_keyframe_ms_;
|
||||
const int max_wait_for_frame_ms_;
|
||||
|
||||
rtc::CriticalSection playout_delay_lock_;
|
||||
|
||||
// All of them tries to change current min_playout_delay on |timing_| but
|
||||
// source of the change request is different in each case. Among them the
|
||||
// biggest delay is used. -1 means use default value from the |timing_|.
|
||||
//
|
||||
// Minimum delay as decided by the RTP playout delay extension.
|
||||
int frame_minimum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = -1;
|
||||
// Minimum delay as decided by the setLatency function in "webrtc/api".
|
||||
int base_minimum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = -1;
|
||||
// Minimum delay as decided by the A/V synchronization feature.
|
||||
int syncable_minimum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) =
|
||||
-1;
|
||||
|
||||
// Maximum delay as decided by the RTP playout delay extension.
|
||||
int frame_maximum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = -1;
|
||||
|
||||
// Function that is triggered with encoded frames, if not empty.
|
||||
std::function<void(const RecordableEncodedFrame&)>
|
||||
encoded_frame_buffer_function_ RTC_GUARDED_BY(decode_queue_);
|
||||
// Set to true while we're requesting keyframes but not yet received one.
|
||||
bool keyframe_generation_requested_ RTC_GUARDED_BY(decode_queue_) = false;
|
||||
|
||||
// Defined last so they are destroyed before all other members.
|
||||
rtc::TaskQueue decode_queue_;
|
||||
};
|
||||
} // namespace internal
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // VIDEO_VIDEO_RECEIVE_STREAM2_H_
|
||||
69
video/video_stream_decoder2.cc
Normal file
69
video/video_stream_decoder2.cc
Normal file
@ -0,0 +1,69 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "video/video_stream_decoder2.h"
|
||||
|
||||
#include "modules/video_coding/video_receiver2.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "video/receive_statistics_proxy2.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace internal {
|
||||
|
||||
VideoStreamDecoder::VideoStreamDecoder(
|
||||
VideoReceiver2* video_receiver,
|
||||
ReceiveStatisticsProxy* receive_statistics_proxy,
|
||||
rtc::VideoSinkInterface<VideoFrame>* incoming_video_stream)
|
||||
: video_receiver_(video_receiver),
|
||||
receive_stats_callback_(receive_statistics_proxy),
|
||||
incoming_video_stream_(incoming_video_stream) {
|
||||
RTC_DCHECK(video_receiver_);
|
||||
|
||||
video_receiver_->RegisterReceiveCallback(this);
|
||||
}
|
||||
|
||||
VideoStreamDecoder::~VideoStreamDecoder() {
|
||||
// Note: There's an assumption at this point that the decoder thread is
|
||||
// *not* running. If it was, then there could be a race for each of these
|
||||
// callbacks.
|
||||
|
||||
// Unset all the callback pointers that we set in the ctor.
|
||||
video_receiver_->RegisterReceiveCallback(nullptr);
|
||||
}
|
||||
|
||||
// Do not acquire the lock of |video_receiver_| in this function. Decode
|
||||
// callback won't necessarily be called from the decoding thread. The decoding
|
||||
// thread may have held the lock when calling VideoDecoder::Decode, Reset, or
|
||||
// Release. Acquiring the same lock in the path of decode callback can deadlock.
|
||||
int32_t VideoStreamDecoder::FrameToRender(VideoFrame& video_frame,
|
||||
absl::optional<uint8_t> qp,
|
||||
int32_t decode_time_ms,
|
||||
VideoContentType content_type) {
|
||||
receive_stats_callback_->OnDecodedFrame(video_frame, qp, decode_time_ms,
|
||||
content_type);
|
||||
incoming_video_stream_->OnFrame(video_frame);
|
||||
return 0;
|
||||
}
|
||||
|
||||
void VideoStreamDecoder::OnDroppedFrames(uint32_t frames_dropped) {
|
||||
receive_stats_callback_->OnDroppedFrames(frames_dropped);
|
||||
}
|
||||
|
||||
void VideoStreamDecoder::OnIncomingPayloadType(int payload_type) {
|
||||
receive_stats_callback_->OnIncomingPayloadType(payload_type);
|
||||
}
|
||||
|
||||
void VideoStreamDecoder::OnDecoderImplementationName(
|
||||
const char* implementation_name) {
|
||||
receive_stats_callback_->OnDecoderImplementationName(implementation_name);
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace webrtc
|
||||
60
video/video_stream_decoder2.h
Normal file
60
video/video_stream_decoder2.h
Normal file
@ -0,0 +1,60 @@
|
||||
/*
|
||||
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef VIDEO_VIDEO_STREAM_DECODER2_H_
|
||||
#define VIDEO_VIDEO_STREAM_DECODER2_H_
|
||||
|
||||
#include <list>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/video/video_sink_interface.h"
|
||||
#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
|
||||
#include "modules/video_coding/include/video_coding_defines.h"
|
||||
#include "rtc_base/critical_section.h"
|
||||
#include "rtc_base/platform_thread.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class VideoReceiver2;
|
||||
|
||||
namespace internal {
|
||||
|
||||
class ReceiveStatisticsProxy;
|
||||
|
||||
class VideoStreamDecoder : public VCMReceiveCallback {
|
||||
public:
|
||||
VideoStreamDecoder(
|
||||
VideoReceiver2* video_receiver,
|
||||
ReceiveStatisticsProxy* receive_statistics_proxy,
|
||||
rtc::VideoSinkInterface<VideoFrame>* incoming_video_stream);
|
||||
~VideoStreamDecoder() override;
|
||||
|
||||
// Implements VCMReceiveCallback.
|
||||
int32_t FrameToRender(VideoFrame& video_frame,
|
||||
absl::optional<uint8_t> qp,
|
||||
int32_t decode_time_ms,
|
||||
VideoContentType content_type) override;
|
||||
void OnDroppedFrames(uint32_t frames_dropped) override;
|
||||
void OnIncomingPayloadType(int payload_type) override;
|
||||
void OnDecoderImplementationName(const char* implementation_name) override;
|
||||
|
||||
private:
|
||||
VideoReceiver2* const video_receiver_;
|
||||
ReceiveStatisticsProxy* const receive_stats_callback_;
|
||||
rtc::VideoSinkInterface<VideoFrame>* const incoming_video_stream_;
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // VIDEO_VIDEO_STREAM_DECODER2_H_
|
||||
Loading…
x
Reference in New Issue
Block a user