Piggybacking simulcast id and ALR experiment id into video content type extension.

Use it to slice UMA video receive statis.

BUG=8032

Review-Url: https://codereview.webrtc.org/2986893002
Cr-Commit-Position: refs/heads/master@{#19598}
This commit is contained in:
ilnik 2017-08-30 03:32:14 -07:00 committed by Commit Bot
parent 057e63a0ac
commit 6d5b4d6fe1
22 changed files with 602 additions and 164 deletions

View File

@ -168,6 +168,8 @@ rtc_source_set("video_frame_api") {
sources = [
"video/i420_buffer.cc",
"video/i420_buffer.h",
"video/video_content_type.cc",
"video/video_content_type.h",
"video/video_frame.cc",
"video/video_frame.h",
"video/video_frame_buffer.cc",

View File

@ -0,0 +1,94 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/api/video/video_content_type.h"
// VideoContentType stored as a single byte, which is sent over the network.
// Structure:
//
// 0 1 2 3 4 5 6 7
// +---------------+
// |r r e e e s s c|
//
// where:
// r - reserved bits.
// e - 3-bit number of an experiment group counted from 1. 0 means there's no
// experiment ongoing.
// s - 2-bit simulcast stream id or spatial layer, counted from 1. 0 means that
// no simulcast information is set.
// c - content type. 0 means real-time video, 1 means screenshare.
//
namespace webrtc {
namespace videocontenttypehelpers {
namespace {
static constexpr uint8_t kScreenshareBitsSize = 1;
static constexpr uint8_t kScreenshareBitsMask =
(1u << kScreenshareBitsSize) - 1;
static constexpr uint8_t kSimulcastShift = 1;
static constexpr uint8_t kSimulcastBitsSize = 2;
static constexpr uint8_t kSimulcastBitsMask = ((1u << kSimulcastBitsSize) - 1)
<< kSimulcastShift; // 0b00000110
static constexpr uint8_t kExperimentShift = 3;
static constexpr uint8_t kExperimentBitsSize = 3;
static constexpr uint8_t kExperimentBitsMask =
((1u << kExperimentBitsSize) - 1) << kExperimentShift; // 0b00111000
static constexpr uint8_t kTotalBitsSize =
kScreenshareBitsSize + kSimulcastBitsSize + kExperimentBitsSize;
} // namespace
bool SetExperimentId(VideoContentType* content_type,
uint8_t experiment_id) {
// Store in bits 2-4.
if (experiment_id >= (1 << kExperimentBitsSize))
return false;
*content_type = static_cast<VideoContentType>(
(static_cast<uint8_t>(*content_type) & ~kExperimentBitsMask) |
((experiment_id << kExperimentShift) & kExperimentBitsMask));
return true;
}
bool SetSimulcastId(VideoContentType* content_type,
uint8_t simulcast_id) {
// Store in bits 5-6.
if (simulcast_id >= (1 << kSimulcastBitsSize))
return false;
*content_type = static_cast<VideoContentType>(
(static_cast<uint8_t>(*content_type) & ~kSimulcastBitsMask) |
((simulcast_id << kSimulcastShift) & kSimulcastBitsMask));
return true;
}
uint8_t GetExperimentId(
const VideoContentType& content_type) {
return (static_cast<uint8_t>(content_type) & kExperimentBitsMask) >>
kExperimentShift;
}
uint8_t GetSimulcastId(
const VideoContentType& content_type) {
return (static_cast<uint8_t>(content_type) & kSimulcastBitsMask) >>
kSimulcastShift;
}
bool IsScreenshare(
const VideoContentType& content_type) {
return (static_cast<uint8_t>(content_type) & kScreenshareBitsMask) > 0;
}
bool IsValidContentType(uint8_t value) {
// Any 6-bit value is allowed.
return value < (1 << kTotalBitsSize);
}
} // namespace videocontenttypehelpers
} // namespace webrtc

View File

@ -18,9 +18,22 @@ namespace webrtc {
enum class VideoContentType : uint8_t {
UNSPECIFIED = 0,
SCREENSHARE = 1,
TOTAL_CONTENT_TYPES // Must be the last value in the enum.
};
namespace videocontenttypehelpers {
bool SetExperimentId(VideoContentType* content_type,
uint8_t experiment_id);
bool SetSimulcastId(VideoContentType* content_type,
uint8_t simulcast_id);
uint8_t GetExperimentId(const VideoContentType& content_type);
uint8_t GetSimulcastId(const VideoContentType& content_type);
bool IsScreenshare(const VideoContentType& content_type);
bool IsValidContentType(uint8_t value);
} // namespace videocontenttypehelpers
} // namespace webrtc
#endif // WEBRTC_API_VIDEO_VIDEO_CONTENT_TYPE_H_

View File

@ -54,7 +54,7 @@ class EncodedImage {
size_t _length;
size_t _size;
VideoRotation rotation_ = kVideoRotation_0;
VideoContentType content_type_ = VideoContentType::UNSPECIFIED;
mutable VideoContentType content_type_ = VideoContentType::UNSPECIFIED;
bool _completeFrame = false;
AdaptReason adapt_reason_;
int qp_ = -1; // Quantizer value.

View File

@ -91,11 +91,12 @@ AlrDetector::ParseAlrSettingsFromFieldTrial(const char* experiment_name) {
return ret;
AlrExperimentSettings settings;
if (sscanf(group_name.c_str(), "%f,%" PRId64 ",%d,%d,%d",
if (sscanf(group_name.c_str(), "%f,%" PRId64 ",%d,%d,%d,%d",
&settings.pacing_factor, &settings.max_paced_queue_time,
&settings.alr_bandwidth_usage_percent,
&settings.alr_start_budget_level_percent,
&settings.alr_stop_budget_level_percent) == 5) {
&settings.alr_stop_budget_level_percent,
&settings.group_id) == 6) {
ret.emplace(settings);
LOG(LS_INFO) << "Using ALR experiment settings: "
"pacing factor: "
@ -106,7 +107,8 @@ AlrDetector::ParseAlrSettingsFromFieldTrial(const char* experiment_name) {
<< ", ALR end budget level percent: "
<< settings.alr_start_budget_level_percent
<< ", ALR end budget level percent: "
<< settings.alr_stop_budget_level_percent;
<< settings.alr_stop_budget_level_percent
<< ", ALR experiment group ID: " << settings.group_id;
} else {
LOG(LS_INFO) << "Failed to parse ALR experiment: " << experiment_name;
}

View File

@ -47,6 +47,10 @@ class AlrDetector {
int alr_bandwidth_usage_percent = kDefaultAlrBandwidthUsagePercent;
int alr_start_budget_level_percent = kDefaultAlrStartBudgetLevelPercent;
int alr_stop_budget_level_percent = kDefaultAlrStopBudgetLevelPercent;
// Will be sent to the receive side for stats slicing.
// Can be 0..6, because it's sent as a 3 bits value and there's also
// reserved value to indicate absence of experiment.
int group_id = 0;
};
static rtc::Optional<AlrExperimentSettings> ParseAlrSettingsFromFieldTrial(
const char* experiment_name);

View File

@ -232,7 +232,7 @@ constexpr const char VideoContentTypeExtension::kUri[];
bool VideoContentTypeExtension::Parse(rtc::ArrayView<const uint8_t> data,
VideoContentType* content_type) {
if (data.size() == 1 &&
data[0] < static_cast<uint8_t>(VideoContentType::TOTAL_CONTENT_TYPES)) {
videocontenttypehelpers::IsValidContentType(data[0])) {
*content_type = static_cast<VideoContentType>(data[0]);
return true;
}

View File

@ -452,8 +452,7 @@ void RtpHeaderParser::ParseOneByteExtensionHeader(
// | ID | len=0 | Content type |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
if (ptr[0] <
static_cast<uint8_t>(VideoContentType::TOTAL_CONTENT_TYPES)) {
if (videocontenttypehelpers::IsValidContentType(ptr[0])) {
header->extension.hasVideoContentType = true;
header->extension.videoContentType =
static_cast<VideoContentType>(ptr[0]);

View File

@ -104,6 +104,7 @@ rtc_static_library("video_coding") {
"../../rtc_base:rtc_task_queue",
"../../rtc_base:sequenced_task_checker",
"../../system_wrappers",
"../pacing",
"../rtp_rtcp:rtp_rtcp",
"../utility:utility",
]

View File

@ -277,7 +277,8 @@ int FrameBuffer::InsertFrame(std::unique_ptr<FrameObject> frame) {
TRACE_EVENT0("webrtc", "FrameBuffer::InsertFrame");
RTC_DCHECK(frame);
if (stats_callback_)
stats_callback_->OnCompleteFrame(frame->is_keyframe(), frame->size());
stats_callback_->OnCompleteFrame(frame->is_keyframe(), frame->size(),
frame->contentType());
FrameKey key(frame->picture_id, frame->spatial_layer);
rtc::CritScope lock(&crit_);

View File

@ -105,7 +105,10 @@ class VCMReceiveStatisticsCallbackMock : public VCMReceiveStatisticsCallback {
public:
MOCK_METHOD2(OnReceiveRatesUpdated,
void(uint32_t bitRate, uint32_t frameRate));
MOCK_METHOD2(OnCompleteFrame, void(bool is_keyframe, size_t size_bytes));
MOCK_METHOD3(OnCompleteFrame,
void(bool is_keyframe,
size_t size_bytes,
VideoContentType content_type));
MOCK_METHOD1(OnDiscardedPacketsUpdated, void(int discarded_packets));
MOCK_METHOD1(OnFrameCountsUpdated, void(const FrameCounts& frame_counts));
MOCK_METHOD7(OnFrameBufferTimingsUpdated,
@ -489,7 +492,8 @@ TEST_F(TestFrameBuffer2, StatsCallback) {
uint32_t ts = Rand();
const int kFrameSize = 5000;
EXPECT_CALL(stats_callback_, OnCompleteFrame(true, kFrameSize));
EXPECT_CALL(stats_callback_,
OnCompleteFrame(true, kFrameSize, VideoContentType::UNSPECIFIED));
EXPECT_CALL(stats_callback_,
OnFrameBufferTimingsUpdated(_, _, _, _, _, _, _));

View File

@ -13,6 +13,7 @@
#include <vector>
#include "webrtc/api/video/i420_buffer.h"
#include "webrtc/modules/pacing/alr_detector.h"
#include "webrtc/modules/video_coding/encoded_frame.h"
#include "webrtc/modules/video_coding/media_optimization.h"
#include "webrtc/rtc_base/checks.h"
@ -20,6 +21,7 @@
#include "webrtc/rtc_base/optional.h"
#include "webrtc/rtc_base/timeutils.h"
#include "webrtc/rtc_base/trace_event.h"
#include "webrtc/system_wrappers/include/field_trial.h"
namespace webrtc {
@ -183,7 +185,23 @@ VCMEncodedFrameCallback::VCMEncodedFrameCallback(
media_opt_(media_opt),
framerate_(1),
last_timing_frame_time_ms_(-1),
timing_frames_thresholds_({-1, 0}) {}
timing_frames_thresholds_({-1, 0}) {
rtc::Optional<AlrDetector::AlrExperimentSettings> experiment_settings =
AlrDetector::ParseAlrSettingsFromFieldTrial(
AlrDetector::kStrictPacingAndProbingExperimentName);
if (experiment_settings) {
experiment_groups_[0] = experiment_settings->group_id + 1;
} else {
experiment_groups_[0] = 0;
}
experiment_settings = AlrDetector::ParseAlrSettingsFromFieldTrial(
AlrDetector::kScreenshareProbingBweExperimentName);
if (experiment_settings) {
experiment_groups_[1] = experiment_settings->group_id + 1;
} else {
experiment_groups_[1] = 0;
}
}
VCMEncodedFrameCallback::~VCMEncodedFrameCallback() {}
@ -231,13 +249,15 @@ EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage(
rtc::Optional<size_t> outlier_frame_size;
rtc::Optional<int64_t> encode_start_ms;
size_t num_simulcast_svc_streams = 1;
uint8_t timing_flags = TimingFrameFlags::kInvalid;
{
rtc::CritScope crit(&timing_params_lock_);
// Encoders with internal sources do not call OnEncodeStarted and
// OnFrameRateChanged. |timing_frames_info_| may be not filled here.
if (simulcast_svc_idx < timing_frames_info_.size()) {
num_simulcast_svc_streams = timing_frames_info_.size();
if (simulcast_svc_idx < num_simulcast_svc_streams) {
auto encode_start_map =
&timing_frames_info_[simulcast_svc_idx].encode_start_time_ms;
auto it = encode_start_map->find(encoded_image.capture_time_ms_);
@ -299,6 +319,23 @@ EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage(
encoded_image.timing_.flags = TimingFrameFlags::kInvalid;
}
// Piggyback ALR experiment group id and simulcast id into the content type.
uint8_t experiment_id =
experiment_groups_[videocontenttypehelpers::IsScreenshare(
encoded_image.content_type_)];
// TODO(ilnik): This will force content type extension to be present even
// for realtime video. At the expense of miniscule overhead we will get
// sliced receive statistics.
RTC_CHECK(videocontenttypehelpers::SetExperimentId(
&encoded_image.content_type_, experiment_id));
// We count simulcast streams from 1 on the wire. That's why we set simulcast
// id in content type to +1 of that is actual simulcast index. This is because
// value 0 on the wire is reserved for 'no simulcast stream specified'.
RTC_CHECK(videocontenttypehelpers::SetSimulcastId(
&encoded_image.content_type_,
static_cast<uint8_t>(simulcast_svc_idx + 1)));
Result result = post_encode_callback_->OnEncodedImage(
encoded_image, codec_specific, fragmentation_header);
if (result.error != Result::OK)

View File

@ -82,6 +82,11 @@ class VCMEncodedFrameCallback : public EncodedImageCallback {
int64_t last_timing_frame_time_ms_ GUARDED_BY(timing_params_lock_);
VideoCodec::TimingFrameTriggerThresholds timing_frames_thresholds_
GUARDED_BY(timing_params_lock_);
// Experiment groups parsed from field trials for realtime video ([0]) and
// screenshare ([1]). 0 means no group specified. Positive values are
// experiment group numbers incremented by 1.
uint8_t experiment_groups_[2];
};
class VCMGenericEncoder {

View File

@ -69,7 +69,7 @@ class VCMReceiveCallback {
public:
virtual int32_t FrameToRender(VideoFrame& videoFrame, // NOLINT
rtc::Optional<uint8_t> qp,
VideoContentType /*content_type*/) = 0;
VideoContentType content_type) = 0;
virtual int32_t ReceivedDecodedReferenceFrame(const uint64_t pictureId) {
return -1;
@ -97,7 +97,9 @@ class VCMSendStatisticsCallback {
class VCMReceiveStatisticsCallback {
public:
virtual void OnReceiveRatesUpdated(uint32_t bitRate, uint32_t frameRate) = 0;
virtual void OnCompleteFrame(bool is_keyframe, size_t size_bytes) = 0;
virtual void OnCompleteFrame(bool is_keyframe,
size_t size_bytes,
VideoContentType content_type) = 0;
virtual void OnDiscardedPacketsUpdated(int discarded_packets) = 0;
virtual void OnFrameCountsUpdated(const FrameCounts& frame_counts) = 0;
virtual void OnFrameBufferTimingsUpdated(int decode_ms,

View File

@ -92,15 +92,45 @@
webrtc::metrics::HistogramFactoryGetCountsLinear( \
name, min, max, bucket_count))
// Deprecated.
// TODO(asapersson): Remove.
// Slow metrics: pointer to metric is acquired at each call and is not cached.
//
#define RTC_HISTOGRAM_COUNTS_SPARSE_100(name, sample) \
RTC_HISTOGRAM_COUNTS_SPARSE(name, sample, 1, 100, 50)
#define RTC_HISTOGRAM_COUNTS_SPARSE_200(name, sample) \
RTC_HISTOGRAM_COUNTS_SPARSE(name, sample, 1, 200, 50)
#define RTC_HISTOGRAM_COUNTS_SPARSE_500(name, sample) \
RTC_HISTOGRAM_COUNTS_SPARSE(name, sample, 1, 500, 50)
#define RTC_HISTOGRAM_COUNTS_SPARSE_1000(name, sample) \
RTC_HISTOGRAM_COUNTS_SPARSE(name, sample, 1, 1000, 50)
#define RTC_HISTOGRAM_COUNTS_SPARSE_10000(name, sample) \
RTC_HISTOGRAM_COUNTS_SPARSE(name, sample, 1, 10000, 50)
#define RTC_HISTOGRAM_COUNTS_SPARSE_100000(name, sample) \
RTC_HISTOGRAM_COUNTS_SPARSE(name, sample, 1, 100000, 50)
#define RTC_HISTOGRAM_COUNTS_SPARSE(name, sample, min, max, bucket_count) \
RTC_HISTOGRAM_COMMON_BLOCK_SLOW(name, sample, \
webrtc::metrics::HistogramFactoryGetCounts(name, min, max, bucket_count))
// Histogram for percentage (evenly spaced buckets).
#define RTC_HISTOGRAM_PERCENTAGE_SPARSE(name, sample) \
RTC_HISTOGRAM_ENUMERATION_SPARSE(name, sample, 101)
// Histogram for booleans.
#define RTC_HISTOGRAM_BOOLEAN_SPARSE(name, sample) \
RTC_HISTOGRAM_ENUMERATION_SPARSE(name, sample, 2)
// Histogram for enumerators (evenly spaced buckets).
// |boundary| should be above the max enumerator sample.
#define RTC_HISTOGRAM_ENUMERATION_SPARSE(name, sample, boundary) \
RTC_HISTOGRAM_COMMON_BLOCK_SLOW( \
name, sample, \
webrtc::metrics::HistogramFactoryGetEnumeration(name, boundary))
// Histogram for percentage (evenly spaced buckets).
#define RTC_HISTOGRAM_PERCENTAGE(name, sample) \
RTC_HISTOGRAM_ENUMERATION(name, sample, 101)
@ -154,7 +184,9 @@
// Helper macros.
// Macros for calling a histogram with varying name (e.g. when using a metric
// in different modes such as real-time vs screenshare).
// in different modes such as real-time vs screenshare). Fast, because pointer
// is cached. |index| should be different for different names. Allowed |index|
// values are 0, 1, and 2.
#define RTC_HISTOGRAMS_COUNTS_100(index, name, sample) \
RTC_HISTOGRAMS_COMMON(index, name, sample, \
RTC_HISTOGRAM_COUNTS(name, sample, 1, 100, 50))
@ -188,23 +220,22 @@
RTC_HISTOGRAM_PERCENTAGE(name, sample))
#define RTC_HISTOGRAMS_COMMON(index, name, sample, macro_invocation) \
do { \
switch (index) { \
case 0: \
macro_invocation; \
break; \
case 1: \
macro_invocation; \
break; \
case 2: \
macro_invocation; \
break; \
default: \
RTC_NOTREACHED(); \
} \
do { \
switch (index) { \
case 0: \
macro_invocation; \
break; \
case 1: \
macro_invocation; \
break; \
case 2: \
macro_invocation; \
break; \
default: \
RTC_NOTREACHED(); \
} \
} while (0)
namespace webrtc {
namespace metrics {

View File

@ -2811,7 +2811,9 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx,
std::string video_prefix =
screenshare ? "WebRTC.Video.Screenshare." : "WebRTC.Video.";
// The content type extension is disabled in non screenshare test,
// therefore no slicing on simulcast id should be present.
std::string video_suffix = screenshare ? ".S0" : "";
// Verify that stats have been updated once.
EXPECT_EQ(2, metrics::NumSamples("WebRTC.Call.LifetimeInSeconds"));
EXPECT_EQ(1, metrics::NumSamples(
@ -2847,8 +2849,8 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx,
EXPECT_EQ(1, metrics::NumSamples(video_prefix + "InputHeightInPixels"));
EXPECT_EQ(1, metrics::NumSamples(video_prefix + "SentWidthInPixels"));
EXPECT_EQ(1, metrics::NumSamples(video_prefix + "SentHeightInPixels"));
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.ReceivedWidthInPixels"));
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.ReceivedHeightInPixels"));
EXPECT_EQ(1, metrics::NumSamples(video_prefix + "ReceivedWidthInPixels"));
EXPECT_EQ(1, metrics::NumSamples(video_prefix + "ReceivedHeightInPixels"));
EXPECT_EQ(1, metrics::NumEvents(video_prefix + "InputWidthInPixels",
kDefaultWidth));
@ -2858,9 +2860,9 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx,
1, metrics::NumEvents(video_prefix + "SentWidthInPixels", kDefaultWidth));
EXPECT_EQ(1, metrics::NumEvents(video_prefix + "SentHeightInPixels",
kDefaultHeight));
EXPECT_EQ(1, metrics::NumEvents("WebRTC.Video.ReceivedWidthInPixels",
EXPECT_EQ(1, metrics::NumEvents(video_prefix + "ReceivedWidthInPixels",
kDefaultWidth));
EXPECT_EQ(1, metrics::NumEvents("WebRTC.Video.ReceivedHeightInPixels",
EXPECT_EQ(1, metrics::NumEvents(video_prefix + "ReceivedHeightInPixels",
kDefaultHeight));
EXPECT_EQ(1, metrics::NumSamples(video_prefix + "InputFramesPerSecond"));
@ -2873,10 +2875,14 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx,
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.CurrentDelayInMs"));
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.OnewayDelayInMs"));
EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EndToEndDelayInMs"));
EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EndToEndDelayMaxInMs"));
EXPECT_EQ(1, metrics::NumSamples(video_prefix + "InterframeDelayInMs"));
EXPECT_EQ(1, metrics::NumSamples(video_prefix + "InterframeDelayMaxInMs"));
EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EndToEndDelayInMs" +
video_suffix));
EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EndToEndDelayMaxInMs" +
video_suffix));
EXPECT_EQ(1, metrics::NumSamples(video_prefix + "InterframeDelayInMs" +
video_suffix));
EXPECT_EQ(1, metrics::NumSamples(video_prefix + "InterframeDelayMaxInMs" +
video_suffix));
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.RenderSqrtPixelsPerSecond"));

View File

@ -12,13 +12,14 @@
#include <algorithm>
#include <cmath>
#include <sstream>
#include <utility>
#include "webrtc/modules/pacing/alr_detector.h"
#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/rtc_base/checks.h"
#include "webrtc/rtc_base/logging.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/field_trial.h"
#include "webrtc/system_wrappers/include/metrics.h"
namespace webrtc {
@ -48,6 +49,28 @@ const int kMovingMaxWindowMs = 10000;
// How large window we use to calculate the framerate/bitrate.
const int kRateStatisticsWindowSizeMs = 1000;
std::string UmaPrefixForContentType(VideoContentType content_type) {
std::stringstream ss;
ss << "WebRTC.Video";
if (videocontenttypehelpers::IsScreenshare(content_type)) {
ss << ".Screenshare";
}
return ss.str();
}
std::string UmaSuffixForContentType(VideoContentType content_type) {
std::stringstream ss;
int simulcast_id = videocontenttypehelpers::GetSimulcastId(content_type);
if (simulcast_id > 0) {
ss << ".S" << simulcast_id - 1;
}
int experiment_id = videocontenttypehelpers::GetExperimentId(content_type);
if (experiment_id > 0) {
ss << ".ExperimentGroup" << experiment_id - 1;
}
return ss.str();
}
} // namespace
ReceiveStatisticsProxy::ReceiveStatisticsProxy(
@ -77,10 +100,6 @@ ReceiveStatisticsProxy::ReceiveStatisticsProxy(
render_fps_tracker_(100, 10u),
render_pixel_tracker_(100, 10u),
total_byte_tracker_(100, 10u), // bucket_interval_ms, bucket_count
e2e_delay_max_ms_video_(-1),
e2e_delay_max_ms_screenshare_(-1),
interframe_delay_max_ms_video_(-1),
interframe_delay_max_ms_screenshare_(-1),
interframe_delay_max_moving_(kMovingMaxWindowMs),
freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs),
first_report_block_time_ms_(-1),
@ -99,9 +118,14 @@ ReceiveStatisticsProxy::~ReceiveStatisticsProxy() {
}
void ReceiveStatisticsProxy::UpdateHistograms() {
RTC_HISTOGRAM_COUNTS_100000(
"WebRTC.Video.ReceiveStreamLifetimeInSeconds",
(clock_->TimeInMilliseconds() - start_ms_) / 1000);
int stream_duration_sec = (clock_->TimeInMilliseconds() - start_ms_) / 1000;
if (stats_.frame_counts.key_frames > 0 ||
stats_.frame_counts.delta_frames > 0) {
RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.ReceiveStreamLifetimeInSeconds",
stream_duration_sec);
LOG(LS_INFO) << "WebRTC.Video.ReceiveStreamLifetimeInSeconds "
<< stream_duration_sec;
}
if (first_report_block_time_ms_ != -1 &&
((clock_->TimeInMilliseconds() - first_report_block_time_ms_) / 1000) >=
@ -124,14 +148,7 @@ void ReceiveStatisticsProxy::UpdateHistograms() {
"WebRTC.Video.RenderSqrtPixelsPerSecond",
round(render_pixel_tracker_.ComputeTotalRate()));
}
int width = render_width_counter_.Avg(kMinRequiredSamples);
int height = render_height_counter_.Avg(kMinRequiredSamples);
if (width != -1) {
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.ReceivedWidthInPixels", width);
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.ReceivedHeightInPixels", height);
LOG(LS_INFO) << "WebRTC.Video.ReceivedWidthInPixels " << width;
LOG(LS_INFO) << "WebRTC.Video.ReceivedHeightInPixels " << height;
}
int sync_offset_ms = sync_offset_counter_.Avg(kMinRequiredSamples);
if (sync_offset_ms != -1) {
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.AVSyncOffsetInMs", sync_offset_ms);
@ -189,52 +206,131 @@ void ReceiveStatisticsProxy::UpdateHistograms() {
if (delay_ms != -1)
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", delay_ms);
int e2e_delay_ms_video = e2e_delay_counter_video_.Avg(kMinRequiredSamples);
if (e2e_delay_ms_video != -1) {
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.EndToEndDelayInMs",
e2e_delay_ms_video);
LOG(LS_INFO) << "WebRTC.Video.EndToEndDelayInMs " << e2e_delay_ms_video;
// Aggregate content_specific_stats_ by removing experiment or simulcast
// information;
std::map<VideoContentType, ContentSpecificStats> aggregated_stats;
for (auto it : content_specific_stats_) {
// Calculate simulcast specific metrics (".S0" ... ".S2" suffixes).
VideoContentType content_type = it.first;
if (videocontenttypehelpers::GetSimulcastId(content_type) > 0) {
// Aggregate on experiment id.
videocontenttypehelpers::SetExperimentId(&content_type, 0);
aggregated_stats[content_type].Add(it.second);
}
// Calculate experiment specific metrics (".ExperimentGroup[0-7]" suffixes).
content_type = it.first;
if (videocontenttypehelpers::GetExperimentId(content_type) > 0) {
// Aggregate on simulcast id.
videocontenttypehelpers::SetSimulcastId(&content_type, 0);
aggregated_stats[content_type].Add(it.second);
}
// Calculate aggregated metrics (no suffixes. Aggregated on everything).
content_type = it.first;
videocontenttypehelpers::SetSimulcastId(&content_type, 0);
videocontenttypehelpers::SetExperimentId(&content_type, 0);
aggregated_stats[content_type].Add(it.second);
}
int e2e_delay_ms_screenshare =
e2e_delay_counter_screenshare_.Avg(kMinRequiredSamples);
if (e2e_delay_ms_screenshare != -1) {
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.Screenshare.EndToEndDelayInMs",
e2e_delay_ms_screenshare);
}
for (auto it : aggregated_stats) {
// For the metric Foo we report the following slices:
// WebRTC.Video.Foo,
// WebRTC.Video.Screenshare.Foo,
// WebRTC.Video.Foo.S[0-3],
// WebRTC.Video.Foo.ExperimentGroup[0-7],
// WebRTC.Video.Screenshare.Foo.S[0-3],
// WebRTC.Video.Screenshare.Foo.ExperimentGroup[0-7].
auto content_type = it.first;
auto stats = it.second;
std::string uma_prefix = UmaPrefixForContentType(content_type);
std::string uma_suffix = UmaSuffixForContentType(content_type);
// Metrics can be sliced on either simulcast id or experiment id but not
// both.
RTC_DCHECK(videocontenttypehelpers::GetExperimentId(content_type) == 0 ||
videocontenttypehelpers::GetSimulcastId(content_type) == 0);
int e2e_delay_max_ms_video = e2e_delay_max_ms_video_;
if (e2e_delay_max_ms_video != -1) {
RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.EndToEndDelayMaxInMs",
e2e_delay_max_ms_video);
}
int e2e_delay_ms = stats.e2e_delay_counter.Avg(kMinRequiredSamples);
if (e2e_delay_ms != -1) {
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
uma_prefix + ".EndToEndDelayInMs" + uma_suffix, e2e_delay_ms);
LOG(LS_INFO) << uma_prefix << ".EndToEndDelayInMs" << uma_suffix << " "
<< e2e_delay_ms;
}
int e2e_delay_max_ms = stats.e2e_delay_counter.Max();
if (e2e_delay_max_ms != -1 && e2e_delay_ms != -1) {
RTC_HISTOGRAM_COUNTS_SPARSE_100000(
uma_prefix + ".EndToEndDelayMaxInMs" + uma_suffix, e2e_delay_max_ms);
LOG(LS_INFO) << uma_prefix << ".EndToEndDelayMaxInMs" << uma_suffix << " "
<< e2e_delay_max_ms;
}
int interframe_delay_ms =
stats.interframe_delay_counter.Avg(kMinRequiredSamples);
if (interframe_delay_ms != -1) {
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
uma_prefix + ".InterframeDelayInMs" + uma_suffix,
interframe_delay_ms);
LOG(LS_INFO) << uma_prefix << ".InterframeDelayInMs" << uma_suffix << " "
<< interframe_delay_ms;
}
int interframe_delay_max_ms = stats.interframe_delay_counter.Max();
if (interframe_delay_max_ms != -1 && interframe_delay_ms != -1) {
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
uma_prefix + ".InterframeDelayMaxInMs" + uma_suffix,
interframe_delay_max_ms);
LOG(LS_INFO) << uma_prefix << ".InterframeDelayMaxInMs" << uma_suffix
<< " " << interframe_delay_max_ms;
}
int e2e_delay_max_ms_screenshare = e2e_delay_max_ms_screenshare_;
if (e2e_delay_max_ms_screenshare != -1) {
RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.Screenshare.EndToEndDelayMaxInMs",
e2e_delay_max_ms_screenshare);
}
int width = stats.received_width.Avg(kMinRequiredSamples);
if (width != -1) {
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
uma_prefix + ".ReceivedWidthInPixels" + uma_suffix, width);
LOG(LS_INFO) << uma_prefix << ".ReceivedWidthInPixels" << uma_suffix
<< " " << width;
}
int interframe_delay_ms_screenshare =
interframe_delay_counter_screenshare_.Avg(kMinRequiredSamples);
if (interframe_delay_ms_screenshare != -1) {
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.Screenshare.InterframeDelayInMs",
interframe_delay_ms_screenshare);
RTC_DCHECK_GE(interframe_delay_max_ms_screenshare_,
interframe_delay_ms_screenshare);
RTC_HISTOGRAM_COUNTS_10000(
"WebRTC.Video.Screenshare.InterframeDelayMaxInMs",
interframe_delay_max_ms_screenshare_);
}
int height = stats.received_height.Avg(kMinRequiredSamples);
if (height != -1) {
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
uma_prefix + ".ReceivedHeightInPixels" + uma_suffix, height);
LOG(LS_INFO) << uma_prefix << ".ReceivedHeightInPixels" << uma_suffix
<< " " << height;
}
int interframe_delay_ms_video =
interframe_delay_counter_video_.Avg(kMinRequiredSamples);
if (interframe_delay_ms_video != -1) {
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.InterframeDelayInMs",
interframe_delay_ms_video);
RTC_DCHECK_GE(interframe_delay_max_ms_video_, interframe_delay_ms_video);
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.InterframeDelayMaxInMs",
interframe_delay_max_ms_video_);
if (content_type != VideoContentType::UNSPECIFIED) {
// Don't report these 3 metrics unsliced, as more precise variants
// are reported separately in this method.
float flow_duration_sec = stats.flow_duration_ms / 1000.0;
if (flow_duration_sec >= metrics::kMinRunTimeInSeconds) {
int media_bitrate_kbps = static_cast<int>(stats.total_media_bytes * 8 /
flow_duration_sec / 1000);
RTC_HISTOGRAM_COUNTS_SPARSE_10000(
uma_prefix + ".MediaBitrateReceivedInKbps" + uma_suffix,
media_bitrate_kbps);
LOG(LS_INFO) << uma_prefix << ".MediaBitrateReceivedInKbps"
<< uma_suffix << " " << media_bitrate_kbps;
}
int num_total_frames =
stats.frame_counts.key_frames + stats.frame_counts.delta_frames;
if (num_total_frames >= kMinRequiredSamples) {
int num_key_frames = stats.frame_counts.key_frames;
int key_frames_permille =
(num_key_frames * 1000 + num_total_frames / 2) / num_total_frames;
RTC_HISTOGRAM_COUNTS_SPARSE_1000(
uma_prefix + ".KeyFramesReceivedInPermille" + uma_suffix,
key_frames_permille);
LOG(LS_INFO) << uma_prefix << ".KeyFramesReceivedInPermille"
<< uma_suffix << " " << key_frames_permille;
}
int qp = stats.qp_counter.Avg(kMinRequiredSamples);
if (qp != -1) {
RTC_HISTOGRAM_COUNTS_SPARSE_200(
uma_prefix + ".Decoded.Vp8.Qp" + uma_suffix, qp);
LOG(LS_INFO) << uma_prefix << ".Decoded.Vp8.Qp" << uma_suffix << " "
<< qp;
}
}
}
StreamDataCounters rtp = stats_.rtp_stats;
@ -250,9 +346,12 @@ void ReceiveStatisticsProxy::UpdateHistograms() {
"WebRTC.Video.BitrateReceivedInKbps",
static_cast<int>(rtp_rtx.transmitted.TotalBytes() * 8 / elapsed_sec /
1000));
RTC_HISTOGRAM_COUNTS_10000(
"WebRTC.Video.MediaBitrateReceivedInKbps",
static_cast<int>(rtp.MediaPayloadBytes() * 8 / elapsed_sec / 1000));
int media_bitrate_kbs =
static_cast<int>(rtp.MediaPayloadBytes() * 8 / elapsed_sec / 1000);
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.MediaBitrateReceivedInKbps",
media_bitrate_kbs);
LOG(LS_INFO) << "WebRTC.Video.MediaBitrateReceivedInKbps "
<< media_bitrate_kbs;
RTC_HISTOGRAM_COUNTS_10000(
"WebRTC.Video.PaddingBitrateReceivedInKbps",
static_cast<int>(rtp_rtx.transmitted.padding_bytes * 8 / elapsed_sec /
@ -529,6 +628,9 @@ void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp,
uint64_t now = clock_->TimeInMilliseconds();
rtc::CritScope lock(&crit_);
ContentSpecificStats* content_specific_stats =
&content_specific_stats_[content_type];
++stats_.frames_decoded;
if (qp) {
if (!stats_.qp_sum) {
@ -540,6 +642,7 @@ void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp,
stats_.qp_sum = rtc::Optional<uint64_t>(0);
}
*stats_.qp_sum += *qp;
content_specific_stats->qp_counter.Add(*qp);
} else if (stats_.qp_sum) {
LOG(LS_WARNING)
<< "QP sum was already set and no QP was given for a frame.";
@ -551,17 +654,8 @@ void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp,
int64_t interframe_delay_ms = now - *last_decoded_frame_time_ms_;
RTC_DCHECK_GE(interframe_delay_ms, 0);
interframe_delay_max_moving_.Add(interframe_delay_ms, now);
if (last_content_type_ == VideoContentType::SCREENSHARE) {
interframe_delay_counter_screenshare_.Add(interframe_delay_ms);
if (interframe_delay_max_ms_screenshare_ < interframe_delay_ms) {
interframe_delay_max_ms_screenshare_ = interframe_delay_ms;
}
} else {
interframe_delay_counter_video_.Add(interframe_delay_ms);
if (interframe_delay_max_ms_video_ < interframe_delay_ms) {
interframe_delay_max_ms_video_ = interframe_delay_ms;
}
}
content_specific_stats->interframe_delay_counter.Add(interframe_delay_ms);
content_specific_stats->flow_duration_ms += interframe_delay_ms;
}
last_decoded_frame_time_ms_.emplace(now);
}
@ -572,28 +666,22 @@ void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) {
RTC_DCHECK_GT(width, 0);
RTC_DCHECK_GT(height, 0);
uint64_t now = clock_->TimeInMilliseconds();
rtc::CritScope lock(&crit_);
ContentSpecificStats* content_specific_stats =
&content_specific_stats_[last_content_type_];
renders_fps_estimator_.Update(1, now);
++stats_.frames_rendered;
stats_.width = width;
stats_.height = height;
render_width_counter_.Add(width);
render_height_counter_.Add(height);
render_fps_tracker_.AddSamples(1);
render_pixel_tracker_.AddSamples(sqrt(width * height));
content_specific_stats->received_width.Add(width);
content_specific_stats->received_height.Add(height);
if (frame.ntp_time_ms() > 0) {
int64_t delay_ms = clock_->CurrentNtpInMilliseconds() - frame.ntp_time_ms();
if (delay_ms >= 0) {
if (last_content_type_ == VideoContentType::SCREENSHARE) {
e2e_delay_max_ms_screenshare_ =
std::max(delay_ms, e2e_delay_max_ms_screenshare_);
e2e_delay_counter_screenshare_.Add(delay_ms);
} else {
e2e_delay_max_ms_video_ = std::max(delay_ms, e2e_delay_max_ms_video_);
e2e_delay_counter_video_.Add(delay_ms);
}
content_specific_stats->e2e_delay_counter.Add(delay_ms);
}
}
}
@ -618,12 +706,24 @@ void ReceiveStatisticsProxy::OnReceiveRatesUpdated(uint32_t bitRate,
}
void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe,
size_t size_bytes) {
size_t size_bytes,
VideoContentType content_type) {
rtc::CritScope lock(&crit_);
if (is_keyframe)
if (is_keyframe) {
++stats_.frame_counts.key_frames;
else
} else {
++stats_.frame_counts.delta_frames;
}
ContentSpecificStats* content_specific_stats =
&content_specific_stats_[content_type];
content_specific_stats->total_media_bytes += size_bytes;
if (is_keyframe) {
++content_specific_stats->frame_counts.key_frames;
} else {
++content_specific_stats->frame_counts.delta_frames;
}
int64_t now_ms = clock_->TimeInMilliseconds();
frame_window_.insert(std::make_pair(now_ms, size_bytes));
@ -665,6 +765,16 @@ void ReceiveStatisticsProxy::OnStreamInactive() {
void ReceiveStatisticsProxy::SampleCounter::Add(int sample) {
sum += sample;
++num_samples;
if (!max || sample > *max) {
max.emplace(sample);
}
}
void ReceiveStatisticsProxy::SampleCounter::Add(const SampleCounter& other) {
sum += other.sum;
num_samples += other.num_samples;
if (other.max && (!max || *max < *other.max))
max = other.max;
}
int ReceiveStatisticsProxy::SampleCounter::Avg(
@ -674,9 +784,14 @@ int ReceiveStatisticsProxy::SampleCounter::Avg(
return static_cast<int>(sum / num_samples);
}
int ReceiveStatisticsProxy::SampleCounter::Max() const {
return max.value_or(-1);
}
void ReceiveStatisticsProxy::SampleCounter::Reset() {
num_samples = 0;
sum = 0;
max.reset();
}
void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms,
@ -685,4 +800,17 @@ void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms,
avg_rtt_ms_ = avg_rtt_ms;
}
void ReceiveStatisticsProxy::ContentSpecificStats::Add(
const ContentSpecificStats& other) {
e2e_delay_counter.Add(other.e2e_delay_counter);
interframe_delay_counter.Add(other.interframe_delay_counter);
flow_duration_ms += other.flow_duration_ms;
total_media_bytes += other.total_media_bytes;
received_height.Add(other.received_height);
received_width.Add(other.received_width);
qp_counter.Add(other.qp_counter);
frame_counts.key_frames += other.frame_counts.key_frames;
frame_counts.delta_frames += other.frame_counts.delta_frames;
}
} // namespace webrtc

View File

@ -20,6 +20,7 @@
#include "webrtc/modules/video_coding/include/video_coding_defines.h"
#include "webrtc/rtc_base/criticalsection.h"
#include "webrtc/rtc_base/moving_max_counter.h"
#include "webrtc/rtc_base/optional.h"
#include "webrtc/rtc_base/rate_statistics.h"
#include "webrtc/rtc_base/ratetracker.h"
#include "webrtc/rtc_base/thread_annotations.h"
@ -66,7 +67,9 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
void OnReceiveRatesUpdated(uint32_t bitRate, uint32_t frameRate) override;
void OnFrameCountsUpdated(const FrameCounts& frame_counts) override;
void OnDiscardedPacketsUpdated(int discarded_packets) override;
void OnCompleteFrame(bool is_keyframe, size_t size_bytes) override;
void OnCompleteFrame(bool is_keyframe,
size_t size_bytes,
VideoContentType content_type) override;
void OnFrameBufferTimingsUpdated(int decode_ms,
int max_decode_ms,
int current_delay_ms,
@ -98,16 +101,33 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
SampleCounter() : sum(0), num_samples(0) {}
void Add(int sample);
int Avg(int64_t min_required_samples) const;
int Max() const;
void Reset();
void Add(const SampleCounter& other);
private:
int64_t sum;
int64_t num_samples;
rtc::Optional<int> max;
};
struct QpCounters {
SampleCounter vp8;
};
struct ContentSpecificStats {
void Add(const ContentSpecificStats& other);
SampleCounter e2e_delay_counter;
SampleCounter interframe_delay_counter;
int64_t flow_duration_ms = 0;
int64_t total_media_bytes = 0;
SampleCounter received_width;
SampleCounter received_height;
SampleCounter qp_counter;
FrameCounts frame_counts;
};
void UpdateHistograms() EXCLUSIVE_LOCKS_REQUIRED(crit_);
void QualitySample() EXCLUSIVE_LOCKS_REQUIRED(crit_);
@ -140,24 +160,16 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
rtc::RateTracker render_fps_tracker_ GUARDED_BY(crit_);
rtc::RateTracker render_pixel_tracker_ GUARDED_BY(crit_);
rtc::RateTracker total_byte_tracker_ GUARDED_BY(crit_);
SampleCounter render_width_counter_ GUARDED_BY(crit_);
SampleCounter render_height_counter_ GUARDED_BY(crit_);
SampleCounter sync_offset_counter_ GUARDED_BY(crit_);
SampleCounter decode_time_counter_ GUARDED_BY(crit_);
SampleCounter jitter_buffer_delay_counter_ GUARDED_BY(crit_);
SampleCounter target_delay_counter_ GUARDED_BY(crit_);
SampleCounter current_delay_counter_ GUARDED_BY(crit_);
SampleCounter delay_counter_ GUARDED_BY(crit_);
SampleCounter e2e_delay_counter_video_ GUARDED_BY(crit_);
SampleCounter e2e_delay_counter_screenshare_ GUARDED_BY(crit_);
SampleCounter interframe_delay_counter_video_ GUARDED_BY(crit_);
SampleCounter interframe_delay_counter_screenshare_ GUARDED_BY(crit_);
int64_t e2e_delay_max_ms_video_ GUARDED_BY(crit_);
int64_t e2e_delay_max_ms_screenshare_ GUARDED_BY(crit_);
int64_t interframe_delay_max_ms_video_ GUARDED_BY(crit_);
int64_t interframe_delay_max_ms_screenshare_ GUARDED_BY(crit_);
mutable rtc::MovingMaxCounter<int> interframe_delay_max_moving_
GUARDED_BY(crit_);
std::map<VideoContentType, ContentSpecificStats> content_specific_stats_
GUARDED_BY(crit_);
MaxCounter freq_offset_counter_ GUARDED_BY(crit_);
int64_t first_report_block_time_ms_ GUARDED_BY(crit_);
ReportBlockStats report_block_stats_ GUARDED_BY(crit_);

View File

@ -203,7 +203,8 @@ TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsDecoderImplementationName) {
TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsOnCompleteFrame) {
const int kFrameSizeBytes = 1000;
statistics_proxy_->OnCompleteFrame(true, kFrameSizeBytes);
statistics_proxy_->OnCompleteFrame(true, kFrameSizeBytes,
VideoContentType::UNSPECIFIED);
VideoReceiveStream::Stats stats = statistics_proxy_->GetStats();
EXPECT_EQ(1, stats.network_frame_rate);
EXPECT_EQ(1, stats.frame_counts.key_frames);
@ -352,6 +353,8 @@ TEST_F(ReceiveStatisticsProxyTest, GetTimingFrameInfoTimingFramesReportedOnce) {
TEST_F(ReceiveStatisticsProxyTest, LifetimeHistogramIsUpdated) {
const int64_t kTimeSec = 3;
fake_clock_.AdvanceTimeMilliseconds(kTimeSec * 1000);
// Need at least one frame to report stream lifetime.
statistics_proxy_->OnCompleteFrame(true, 1000, VideoContentType::UNSPECIFIED);
// Histograms are updated when the statistics_proxy_ is deleted.
statistics_proxy_.reset();
EXPECT_EQ(1,
@ -360,6 +363,17 @@ TEST_F(ReceiveStatisticsProxyTest, LifetimeHistogramIsUpdated) {
kTimeSec));
}
TEST_F(ReceiveStatisticsProxyTest,
LifetimeHistogramNotReportedForEmptyStreams) {
const int64_t kTimeSec = 3;
fake_clock_.AdvanceTimeMilliseconds(kTimeSec * 1000);
// No frames received.
// Histograms are updated when the statistics_proxy_ is deleted.
statistics_proxy_.reset();
EXPECT_EQ(0,
metrics::NumSamples("WebRTC.Video.ReceiveStreamLifetimeInSeconds"));
}
TEST_F(ReceiveStatisticsProxyTest, BadCallHistogramsAreUpdated) {
// Based on the tuning parameters this will produce 7 uncertain states,
// then 10 certainly bad states. There has to be 10 certain states before
@ -533,7 +547,8 @@ TEST_F(ReceiveStatisticsProxyTest,
const int kFrameSizeBytes = 1000;
for (int i = 0; i < kMinRequiredSamples - 1; ++i)
statistics_proxy_->OnCompleteFrame(kIsKeyFrame, kFrameSizeBytes);
statistics_proxy_->OnCompleteFrame(kIsKeyFrame, kFrameSizeBytes,
VideoContentType::UNSPECIFIED);
EXPECT_EQ(0, statistics_proxy_->GetStats().frame_counts.key_frames);
EXPECT_EQ(kMinRequiredSamples - 1,
@ -549,7 +564,8 @@ TEST_F(ReceiveStatisticsProxyTest,
const int kFrameSizeBytes = 1000;
for (int i = 0; i < kMinRequiredSamples; ++i)
statistics_proxy_->OnCompleteFrame(kIsKeyFrame, kFrameSizeBytes);
statistics_proxy_->OnCompleteFrame(kIsKeyFrame, kFrameSizeBytes,
VideoContentType::UNSPECIFIED);
EXPECT_EQ(0, statistics_proxy_->GetStats().frame_counts.key_frames);
EXPECT_EQ(kMinRequiredSamples,
@ -565,10 +581,12 @@ TEST_F(ReceiveStatisticsProxyTest, KeyFrameHistogramIsUpdated) {
const int kFrameSizeBytes = 1000;
for (int i = 0; i < kMinRequiredSamples; ++i)
statistics_proxy_->OnCompleteFrame(true, kFrameSizeBytes);
statistics_proxy_->OnCompleteFrame(true, kFrameSizeBytes,
VideoContentType::UNSPECIFIED);
for (int i = 0; i < kMinRequiredSamples; ++i)
statistics_proxy_->OnCompleteFrame(false, kFrameSizeBytes);
statistics_proxy_->OnCompleteFrame(false, kFrameSizeBytes,
VideoContentType::UNSPECIFIED);
EXPECT_EQ(kMinRequiredSamples,
statistics_proxy_->GetStats().frame_counts.key_frames);
@ -777,23 +795,18 @@ TEST_P(ReceiveStatisticsProxyTest, InterFrameDelaysAreReported) {
(kInterFrameDelayMs * (kMinRequiredSamples - 1) +
kInterFrameDelayMs * 2) /
kMinRequiredSamples;
switch (content_type) {
case VideoContentType::UNSPECIFIED:
EXPECT_EQ(kExpectedInterFrame,
metrics::MinSample("WebRTC.Video.InterframeDelayInMs"));
EXPECT_EQ(kInterFrameDelayMs * 2,
metrics::MinSample("WebRTC.Video.InterframeDelayMaxInMs"));
break;
case VideoContentType::SCREENSHARE:
EXPECT_EQ(
kExpectedInterFrame,
metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs"));
EXPECT_EQ(kInterFrameDelayMs * 2,
metrics::MinSample(
"WebRTC.Video.Screenshare.InterframeDelayMaxInMs"));
break;
default:
RTC_NOTREACHED();
if (videocontenttypehelpers::IsScreenshare(content_type)) {
EXPECT_EQ(
kExpectedInterFrame,
metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs"));
EXPECT_EQ(
kInterFrameDelayMs * 2,
metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayMaxInMs"));
} else {
EXPECT_EQ(kExpectedInterFrame,
metrics::MinSample("WebRTC.Video.InterframeDelayInMs"));
EXPECT_EQ(kInterFrameDelayMs * 2,
metrics::MinSample("WebRTC.Video.InterframeDelayMaxInMs"));
}
}
@ -836,7 +849,7 @@ TEST_P(ReceiveStatisticsProxyTest, MaxInterFrameDelayOnlyWithPause) {
statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(), content_type);
statistics_proxy_.reset();
if (content_type == VideoContentType::SCREENSHARE) {
if (videocontenttypehelpers::IsScreenshare(content_type)) {
EXPECT_EQ(
1, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayInMs"));
EXPECT_EQ(1, metrics::NumSamples(
@ -857,4 +870,84 @@ TEST_P(ReceiveStatisticsProxyTest, MaxInterFrameDelayOnlyWithPause) {
}
}
TEST_P(ReceiveStatisticsProxyTest, StatsAreSlicedOnSimulcastAndExperiment) {
VideoContentType content_type = GetParam();
const uint8_t experiment_id = 1;
videocontenttypehelpers::SetExperimentId(&content_type, experiment_id);
const int kInterFrameDelayMs1 = 30;
const int kInterFrameDelayMs2 = 50;
videocontenttypehelpers::SetSimulcastId(&content_type, 1);
for (int i = 0; i <= kMinRequiredSamples; ++i) {
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs1);
statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(), content_type);
}
videocontenttypehelpers::SetSimulcastId(&content_type, 2);
for (int i = 0; i <= kMinRequiredSamples; ++i) {
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs2);
statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(), content_type);
}
statistics_proxy_.reset();
if (videocontenttypehelpers::IsScreenshare(content_type)) {
EXPECT_EQ(
1, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayInMs"));
EXPECT_EQ(1, metrics::NumSamples(
"WebRTC.Video.Screenshare.InterframeDelayMaxInMs"));
EXPECT_EQ(1, metrics::NumSamples(
"WebRTC.Video.Screenshare.InterframeDelayInMs.S0"));
EXPECT_EQ(1, metrics::NumSamples(
"WebRTC.Video.Screenshare.InterframeDelayMaxInMs.S0"));
EXPECT_EQ(1, metrics::NumSamples(
"WebRTC.Video.Screenshare.InterframeDelayInMs.S1"));
EXPECT_EQ(1, metrics::NumSamples(
"WebRTC.Video.Screenshare.InterframeDelayMaxInMs.S1"));
EXPECT_EQ(1,
metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayInMs"
".ExperimentGroup0"));
EXPECT_EQ(
1, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayMaxInMs"
".ExperimentGroup0"));
EXPECT_EQ(
kInterFrameDelayMs1,
metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs.S0"));
EXPECT_EQ(
kInterFrameDelayMs2,
metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs.S1"));
EXPECT_EQ(
(kInterFrameDelayMs1 + kInterFrameDelayMs2) / 2,
metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs"));
EXPECT_EQ(
kInterFrameDelayMs2,
metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayMaxInMs"));
EXPECT_EQ(
(kInterFrameDelayMs1 + kInterFrameDelayMs2) / 2,
metrics::MinSample(
"WebRTC.Video.Screenshare.InterframeDelayInMs.ExperimentGroup0"));
} else {
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.InterframeDelayInMs"));
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs"));
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.InterframeDelayInMs.S0"));
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs.S0"));
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.InterframeDelayInMs.S1"));
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs.S1"));
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.InterframeDelayInMs"
".ExperimentGroup0"));
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs"
".ExperimentGroup0"));
EXPECT_EQ(kInterFrameDelayMs1,
metrics::MinSample("WebRTC.Video.InterframeDelayInMs.S0"));
EXPECT_EQ(kInterFrameDelayMs2,
metrics::MinSample("WebRTC.Video.InterframeDelayInMs.S1"));
EXPECT_EQ((kInterFrameDelayMs1 + kInterFrameDelayMs2) / 2,
metrics::MinSample("WebRTC.Video.InterframeDelayInMs"));
EXPECT_EQ(kInterFrameDelayMs2,
metrics::MinSample("WebRTC.Video.InterframeDelayMaxInMs"));
EXPECT_EQ((kInterFrameDelayMs1 + kInterFrameDelayMs2) / 2,
metrics::MinSample(
"WebRTC.Video.InterframeDelayInMs.ExperimentGroup0"));
}
}
} // namespace webrtc

View File

@ -318,8 +318,8 @@ TEST_F(VideoSendStreamTest, SupportsVideoContentType) {
if (!header.markerBit)
return SEND_PACKET;
EXPECT_TRUE(header.extension.hasVideoContentType);
EXPECT_EQ(VideoContentType::SCREENSHARE,
header.extension.videoContentType);
EXPECT_TRUE(videocontenttypehelpers::IsScreenshare(
header.extension.videoContentType));
observation_complete_.Set();
return SEND_PACKET;
}

View File

@ -122,7 +122,9 @@ void VideoStreamDecoder::OnFrameBufferTimingsUpdated(int decode_ms,
void VideoStreamDecoder::OnTimingFrameInfoUpdated(const TimingFrameInfo& info) {
}
void VideoStreamDecoder::OnCompleteFrame(bool is_keyframe, size_t size_bytes) {}
void VideoStreamDecoder::OnCompleteFrame(bool is_keyframe,
size_t size_bytes,
VideoContentType content_type) {}
void VideoStreamDecoder::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
video_receiver_->SetReceiveChannelParameters(max_rtt_ms);

View File

@ -69,7 +69,9 @@ class VideoStreamDecoder : public VCMReceiveCallback,
void OnReceiveRatesUpdated(uint32_t bit_rate, uint32_t frame_rate) override;
void OnDiscardedPacketsUpdated(int discarded_packets) override;
void OnFrameCountsUpdated(const FrameCounts& frame_counts) override;
void OnCompleteFrame(bool is_keyframe, size_t size_bytes) override;
void OnCompleteFrame(bool is_keyframe,
size_t size_bytes,
VideoContentType content_type) override;
void OnFrameBufferTimingsUpdated(int decode_ms,
int max_decode_ms,
int current_delay_ms,