[Stats] Explicit RTP-RTX and RTP-FEC mappings. Unblocks simulcast stats.

--- Background ---
The webrtc::VideoSendStream::StreamStats are converted into
VideoSenderInfo objects which turn into "outbound-rtp" stats objects in
getStats() (or "ssrc" objects in legacy getStats()).

StreamStats are created for each type of substream: RTP media streams,
RTX streams and FlexFEC streams - each with individual packet counters.

The RTX stream is responsible for retransmissions of a referenced media
stream and the FlexFEC stream is responsible for FEC of a referenced
media stream. RTX/FEC streams do not show up as separate objects in
getStats(). Only the media streams become "outbound-rtp" objects, but
their packet and byte counters have to include the RTX and FEC counters.

--- Overview of this CL ---
This CL adds MergeInfoAboutOutboundRtpSubstreams(). It takes
StreamStats of all kinds as input, and outputs media-only StreamStats
- incorporating the RTX and FEC counters into the relevant media
StreamStats.

The merged StreamStats objects is a smaller set of objects than the
non-merged counterparts, but when aggregating all packet counters
together we end up with exact same packet and count as before.

Because WebRtcVideoSendStream::GetVideoSenderInfo() currently aggregates
the StreamStats into a single VideoSenderInfo (single "outbound-rtp"),
this CL should not have any observable side-effects. Prior to this CL:
aggregate StreamStats. After this CL: merge StreamStats and then
aggregate them.

However, when simulcast stats are implemented (WIP CL:
https://webrtc-review.googlesource.com/c/src/+/168120) each RTP media
stream should turn into an individual "outbound-rtp" object. We will
then no longer aggregate all StreamStats into a single "info". This CL
unblocks simulcast stats by providing StreamStats objects that could be
turned into individual VideoSenderInfos.

--- The Changes ---
1. Methods added to RtpConfig to be able to easily tell the relationship
   between RTP, RTX and FEC ssrcs.
2. StreamStats gets a StreamType (kMedia, kRtx or kFlexfec) that
   replaces the booleans (is_rtx, is_flexfec).
3. "referenced_media_ssrc" is added to StreamStats, making it possible
   to tell which kRtx/kFlexFec stream stats need to be merged with which
   kMedia StreamStats.
4. MergeInfoAboutOutboundRtpSubstreams() added and used.

Bug: webrtc:11439
Change-Id: Iaf9002041169a054ddfd32c7ea06bd1dc36c6bca
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/170826
Commit-Queue: Henrik Boström <hbos@webrtc.org>
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Reviewed-by: Stefan Holmer <stefan@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#30869}
This commit is contained in:
Henrik Boström 2020-03-24 13:30:50 +01:00 committed by Commit Bot
parent 536c395391
commit f45ca3787f
12 changed files with 427 additions and 58 deletions

View File

@ -97,7 +97,9 @@ rtc_library("rtp_interfaces") {
"../api/transport:bitrate_settings",
"../api/units:timestamp",
"../modules/rtp_rtcp:rtp_rtcp_format",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/types:optional",
]
}

View File

@ -12,11 +12,29 @@
#include <cstdint>
#include "absl/algorithm/container.h"
#include "api/array_view.h"
#include "rtc_base/checks.h"
#include "rtc_base/strings/string_builder.h"
namespace webrtc {
namespace {
uint32_t FindAssociatedSsrc(uint32_t ssrc,
const std::vector<uint32_t>& ssrcs,
const std::vector<uint32_t>& associated_ssrcs) {
RTC_DCHECK_EQ(ssrcs.size(), associated_ssrcs.size());
for (size_t i = 0; i < ssrcs.size(); ++i) {
if (ssrcs[i] == ssrc)
return associated_ssrcs[i];
}
RTC_NOTREACHED();
return 0;
}
} // namespace
std::string LntfConfig::ToString() const {
return enabled ? "{enabled: true}" : "{enabled: false}";
}
@ -124,4 +142,51 @@ std::string RtpConfig::Rtx::ToString() const {
ss << '}';
return ss.str();
}
bool RtpConfig::IsMediaSsrc(uint32_t ssrc) const {
return absl::c_linear_search(ssrcs, ssrc);
}
bool RtpConfig::IsRtxSsrc(uint32_t ssrc) const {
return absl::c_linear_search(rtx.ssrcs, ssrc);
}
bool RtpConfig::IsFlexfecSsrc(uint32_t ssrc) const {
return flexfec.payload_type != -1 && ssrc == flexfec.ssrc;
}
absl::optional<uint32_t> RtpConfig::GetRtxSsrcAssociatedWithMediaSsrc(
uint32_t media_ssrc) const {
RTC_DCHECK(IsMediaSsrc(media_ssrc));
// If we don't use RTX there is no association.
if (rtx.ssrcs.empty())
return absl::nullopt;
// If we use RTX there MUST be an association ssrcs[i] <-> rtx.ssrcs[i].
RTC_DCHECK_EQ(ssrcs.size(), rtx.ssrcs.size());
return FindAssociatedSsrc(media_ssrc, ssrcs, rtx.ssrcs);
}
uint32_t RtpConfig::GetMediaSsrcAssociatedWithRtxSsrc(uint32_t rtx_ssrc) const {
RTC_DCHECK(IsRtxSsrc(rtx_ssrc));
// If we use RTX there MUST be an association ssrcs[i] <-> rtx.ssrcs[i].
RTC_DCHECK_EQ(ssrcs.size(), rtx.ssrcs.size());
return FindAssociatedSsrc(rtx_ssrc, rtx.ssrcs, ssrcs);
}
uint32_t RtpConfig::GetMediaSsrcAssociatedWithFlexfecSsrc(
uint32_t flexfec_ssrc) const {
RTC_DCHECK(IsFlexfecSsrc(flexfec_ssrc));
// If we use FlexFEC there MUST be an associated media ssrc.
//
// TODO(brandtr/hbos): The current implementation only supports an association
// with a single media ssrc. If multiple ssrcs are to be supported in the
// future, in order not to break GetStats()'s packet and byte counters, we
// must be able to tell how many packets and bytes have contributed to which
// SSRC.
RTC_DCHECK_EQ(1u, flexfec.protected_media_ssrcs.size());
uint32_t media_ssrc = flexfec.protected_media_ssrcs[0];
RTC_DCHECK(IsMediaSsrc(media_ssrc));
return media_ssrc;
}
} // namespace webrtc

View File

@ -17,6 +17,7 @@
#include <string>
#include <vector>
#include "absl/types/optional.h"
#include "api/rtp_headers.h"
#include "api/rtp_parameters.h"
@ -157,6 +158,14 @@ struct RtpConfig {
// RTCP CNAME, see RFC 3550.
std::string c_name;
bool IsMediaSsrc(uint32_t ssrc) const;
bool IsRtxSsrc(uint32_t ssrc) const;
bool IsFlexfecSsrc(uint32_t ssrc) const;
absl::optional<uint32_t> GetRtxSsrcAssociatedWithMediaSsrc(
uint32_t media_ssrc) const;
uint32_t GetMediaSsrcAssociatedWithRtxSsrc(uint32_t rtx_ssrc) const;
uint32_t GetMediaSsrcAssociatedWithFlexfecSsrc(uint32_t flexfec_ssrc) const;
};
} // namespace webrtc
#endif // CALL_RTP_CONFIG_H_

View File

@ -231,7 +231,7 @@ std::vector<RtpStreamSender> CreateRtpStreamSenders(
std::vector<RtpStreamSender> rtp_streams;
RTC_DCHECK(rtp_config.rtx.ssrcs.empty() ||
rtp_config.rtx.ssrcs.size() == rtp_config.rtx.ssrcs.size());
rtp_config.rtx.ssrcs.size() == rtp_config.ssrcs.size());
for (size_t i = 0; i < rtp_config.ssrcs.size(); ++i) {
RTPSenderVideo::Config video_config;
configuration.local_media_ssrc = rtp_config.ssrcs[i];
@ -241,9 +241,10 @@ std::vector<RtpStreamSender> CreateRtpStreamSenders(
configuration.fec_generator = fec_generator.get();
video_config.fec_generator = fec_generator.get();
if (rtp_config.rtx.ssrcs.size() > i) {
configuration.rtx_send_ssrc = rtp_config.rtx.ssrcs[i];
}
configuration.rtx_send_ssrc =
rtp_config.GetRtxSsrcAssociatedWithMediaSsrc(rtp_config.ssrcs[i]);
RTC_DCHECK_EQ(configuration.rtx_send_ssrc.has_value(),
!rtp_config.rtx.ssrcs.empty());
configuration.need_rtp_packet_infos = rtp_config.lntf.enabled;

View File

@ -17,12 +17,31 @@
namespace webrtc {
namespace {
const char* StreamTypeToString(VideoSendStream::StreamStats::StreamType type) {
switch (type) {
case VideoSendStream::StreamStats::StreamType::kMedia:
return "media";
case VideoSendStream::StreamStats::StreamType::kRtx:
return "rtx";
case VideoSendStream::StreamStats::StreamType::kFlexfec:
return "flexfec";
}
}
} // namespace
VideoSendStream::StreamStats::StreamStats() = default;
VideoSendStream::StreamStats::~StreamStats() = default;
std::string VideoSendStream::StreamStats::ToString() const {
char buf[1024];
rtc::SimpleStringBuilder ss(buf);
ss << "type: " << StreamTypeToString(type);
if (referenced_media_ssrc.has_value())
ss << " (for: " << referenced_media_ssrc.value() << ")";
ss << ", ";
ss << "width: " << width << ", ";
ss << "height: " << height << ", ";
ss << "key: " << frame_counts.key_frames << ", ";
@ -64,7 +83,8 @@ std::string VideoSendStream::Stats::ToString(int64_t time_ms) const {
ss << "#quality_adaptations: " << number_of_quality_adapt_changes;
ss << '}';
for (const auto& substream : substreams) {
if (!substream.second.is_rtx && !substream.second.is_flexfec) {
if (substream.second.type ==
VideoSendStream::StreamStats::StreamType::kMedia) {
ss << " {ssrc: " << substream.first << ", ";
ss << substream.second.ToString();
ss << '}';

View File

@ -40,15 +40,35 @@ class FrameEncryptorInterface;
class VideoSendStream {
public:
// Multiple StreamStats objects are present if simulcast is used (multiple
// kMedia streams) or if RTX or FlexFEC is negotiated. Multiple SVC layers, on
// the other hand, does not cause additional StreamStats.
struct StreamStats {
enum class StreamType {
// A media stream is an RTP stream for audio or video. Retransmissions and
// FEC is either sent over the same SSRC or negotiated to be sent over
// separate SSRCs, in which case separate StreamStats objects exist with
// references to this media stream's SSRC.
kMedia,
// RTX streams are streams dedicated to retransmissions. They have a
// dependency on a single kMedia stream: |referenced_media_ssrc|.
kRtx,
// FlexFEC streams are streams dedicated to FlexFEC. They have a
// dependency on a single kMedia stream: |referenced_media_ssrc|.
kFlexfec,
};
StreamStats();
~StreamStats();
std::string ToString() const;
StreamType type = StreamType::kMedia;
// If |type| is kRtx or kFlexfec this value is present. The referenced SSRC
// is the kMedia stream that this stream is performing retransmissions or
// FEC for. If |type| is kMedia, this value is null.
absl::optional<uint32_t> referenced_media_ssrc;
FrameCounts frame_counts;
bool is_rtx = false;
bool is_flexfec = false;
int width = 0;
int height = 0;
// TODO(holmer): Move bitrate_bps out to the webrtc::Call layer.
@ -63,6 +83,13 @@ class VideoSendStream {
// A snapshot of the most recent Report Block with additional data of
// interest to statistics. Used to implement RTCRemoteInboundRtpStreamStats.
absl::optional<ReportBlockData> report_block_data;
// These booleans are redundant; this information is already exposed in
// |type|.
// TODO(hbos): Update downstream projects to use |type| instead and delete
// these members.
bool is_flexfec = false;
bool is_rtx = false;
};
struct Stats {

View File

@ -324,6 +324,53 @@ int NumActiveStreams(const webrtc::RtpParameters& rtp_parameters) {
return res;
}
std::map<uint32_t, webrtc::VideoSendStream::StreamStats>
MergeInfoAboutOutboundRtpSubstreams(
const std::map<uint32_t, webrtc::VideoSendStream::StreamStats>&
substreams) {
std::map<uint32_t, webrtc::VideoSendStream::StreamStats> rtp_substreams;
// Add substreams for all RTP media streams.
for (const auto& pair : substreams) {
uint32_t ssrc = pair.first;
const webrtc::VideoSendStream::StreamStats& substream = pair.second;
switch (substream.type) {
case webrtc::VideoSendStream::StreamStats::StreamType::kMedia:
break;
case webrtc::VideoSendStream::StreamStats::StreamType::kRtx:
case webrtc::VideoSendStream::StreamStats::StreamType::kFlexfec:
continue;
}
rtp_substreams.insert(std::make_pair(ssrc, substream));
}
// Complement the kMedia substream stats with the associated kRtx and kFlexfec
// substream stats.
for (const auto& pair : substreams) {
switch (pair.second.type) {
case webrtc::VideoSendStream::StreamStats::StreamType::kMedia:
continue;
case webrtc::VideoSendStream::StreamStats::StreamType::kRtx:
case webrtc::VideoSendStream::StreamStats::StreamType::kFlexfec:
break;
}
// The associated substream is an RTX or FlexFEC substream that is
// referencing an RTP media substream.
const webrtc::VideoSendStream::StreamStats& associated_substream =
pair.second;
RTC_DCHECK(associated_substream.referenced_media_ssrc.has_value());
uint32_t media_ssrc = associated_substream.referenced_media_ssrc.value();
RTC_DCHECK(substreams.find(media_ssrc) != substreams.end());
webrtc::VideoSendStream::StreamStats& rtp_substream =
rtp_substreams[media_ssrc];
// We only merge |rtp_stats|. All other metrics are not applicable for RTX
// and FlexFEC.
// TODO(hbos): kRtx and kFlexfec stats should use a separate struct to make
// it clear what is or is not applicable.
rtp_substream.rtp_stats.Add(associated_substream.rtp_stats);
}
return rtp_substreams;
}
} // namespace
// This constant is really an on/off, lower-level configurable NACK history
@ -335,6 +382,13 @@ static const int kDefaultRtcpReceiverReportSsrc = 1;
// Minimum time interval for logging stats.
static const int64_t kStatsLogIntervalMs = 10000;
std::map<uint32_t, webrtc::VideoSendStream::StreamStats>
MergeInfoAboutOutboundRtpSubstreamsForTesting(
const std::map<uint32_t, webrtc::VideoSendStream::StreamStats>&
substreams) {
return MergeInfoAboutOutboundRtpSubstreams(substreams);
}
rtc::scoped_refptr<webrtc::VideoEncoderConfig::EncoderSpecificSettings>
WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings(
const VideoCodec& codec) {
@ -2420,32 +2474,24 @@ VideoSenderInfo WebRtcVideoChannel::WebRtcVideoSendStream::GetVideoSenderInfo(
info.send_frame_width = 0;
info.send_frame_height = 0;
info.total_packet_send_delay_ms = 0;
for (std::map<uint32_t, webrtc::VideoSendStream::StreamStats>::iterator it =
stats.substreams.begin();
it != stats.substreams.end(); ++it) {
std::map<uint32_t, webrtc::VideoSendStream::StreamStats>
outbound_rtp_substreams =
MergeInfoAboutOutboundRtpSubstreams(stats.substreams);
for (const auto& pair : outbound_rtp_substreams) {
// TODO(pbos): Wire up additional stats, such as padding bytes.
webrtc::VideoSendStream::StreamStats stream_stats = it->second;
const webrtc::VideoSendStream::StreamStats& stream_stats = pair.second;
RTC_DCHECK_EQ(stream_stats.type,
webrtc::VideoSendStream::StreamStats::StreamType::kMedia);
info.payload_bytes_sent += stream_stats.rtp_stats.transmitted.payload_bytes;
info.header_and_padding_bytes_sent +=
stream_stats.rtp_stats.transmitted.header_bytes +
stream_stats.rtp_stats.transmitted.padding_bytes;
info.packets_sent += stream_stats.rtp_stats.transmitted.packets;
info.total_packet_send_delay_ms += stream_stats.total_packet_send_delay_ms;
if (!stream_stats.is_flexfec) {
// Retransmissions can happen over the same SSRC that media is sent over,
// or a separate RTX stream is negotiated per SSRC, in which case there
// will be a |stream_stats| with "is_rtx == true". Since we are currently
// aggregating all substreams' counters into a single "info" we do not
// need to know the relationship between RTX streams and RTP streams here.
// TODO(https://crbug.com/webrtc/11439): To unblock simulcast-aware stats,
// where substreams are not aggregated, we need to know the relationship
// between RTX streams and RTP streams so that the correct "info" object
// accounts for the correct RTX retransmissions.
info.retransmitted_bytes_sent +=
stream_stats.rtp_stats.retransmitted.payload_bytes;
info.retransmitted_packets_sent +=
stream_stats.rtp_stats.retransmitted.packets;
}
info.retransmitted_bytes_sent +=
stream_stats.rtp_stats.retransmitted.payload_bytes;
info.retransmitted_packets_sent +=
stream_stats.rtp_stats.retransmitted.packets;
info.packets_lost += stream_stats.rtcp_stats.packets_lost;
if (stream_stats.width > info.send_frame_width)
info.send_frame_width = stream_stats.width;
@ -2454,8 +2500,7 @@ VideoSenderInfo WebRtcVideoChannel::WebRtcVideoSendStream::GetVideoSenderInfo(
info.firs_rcvd += stream_stats.rtcp_packet_type_counts.fir_packets;
info.nacks_rcvd += stream_stats.rtcp_packet_type_counts.nack_packets;
info.plis_rcvd += stream_stats.rtcp_packet_type_counts.pli_packets;
if (stream_stats.report_block_data.has_value() && !stream_stats.is_rtx &&
!stream_stats.is_flexfec) {
if (stream_stats.report_block_data.has_value()) {
info.report_block_datas.push_back(stream_stats.report_block_data.value());
}
}

View File

@ -51,6 +51,19 @@ namespace cricket {
class WebRtcVideoChannel;
// Public for testing.
// Inputs StreamStats for all types of substreams (kMedia, kRtx, kFlexfec) and
// merges any non-kMedia substream stats object into its referenced kMedia-type
// substream. The resulting substreams are all kMedia. This means, for example,
// that packet and byte counters of RTX and FlexFEC streams are accounted for in
// the relevant RTP media stream's stats. This makes the resulting StreamStats
// objects ready to be turned into "outbound-rtp" stats objects for GetStats()
// which does not create separate stream stats objects for complementary
// streams.
std::map<uint32_t, webrtc::VideoSendStream::StreamStats>
MergeInfoAboutOutboundRtpSubstreamsForTesting(
const std::map<uint32_t, webrtc::VideoSendStream::StreamStats>& substreams);
class UnsignalledSsrcHandler {
public:
enum Action {

View File

@ -5269,12 +5269,112 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsAdaptationAndBandwidthStats) {
EXPECT_EQ(stats.number_of_cpu_adapt_changes, info.senders[0].adapt_changes);
}
TEST(WebRtcVideoChannelHelperTest, MergeInfoAboutOutboundRtpSubstreams) {
const uint32_t kFirstMediaStreamSsrc = 10;
const uint32_t kSecondMediaStreamSsrc = 20;
const uint32_t kRtxSsrc = 30;
const uint32_t kFlexfecSsrc = 40;
std::map<uint32_t, webrtc::VideoSendStream::StreamStats> substreams;
// First kMedia stream.
substreams[kFirstMediaStreamSsrc].type =
webrtc::VideoSendStream::StreamStats::StreamType::kMedia;
substreams[kFirstMediaStreamSsrc].rtp_stats.transmitted.header_bytes = 1;
substreams[kFirstMediaStreamSsrc].rtp_stats.transmitted.padding_bytes = 2;
substreams[kFirstMediaStreamSsrc].rtp_stats.transmitted.payload_bytes = 3;
substreams[kFirstMediaStreamSsrc].rtp_stats.transmitted.packets = 4;
substreams[kFirstMediaStreamSsrc].rtp_stats.retransmitted.header_bytes = 5;
substreams[kFirstMediaStreamSsrc].rtp_stats.retransmitted.padding_bytes = 6;
substreams[kFirstMediaStreamSsrc].rtp_stats.retransmitted.payload_bytes = 7;
substreams[kFirstMediaStreamSsrc].rtp_stats.retransmitted.packets = 8;
substreams[kFirstMediaStreamSsrc].referenced_media_ssrc = absl::nullopt;
substreams[kFirstMediaStreamSsrc].width = 1280;
substreams[kFirstMediaStreamSsrc].height = 720;
// Second kMedia stream.
substreams[kSecondMediaStreamSsrc].type =
webrtc::VideoSendStream::StreamStats::StreamType::kMedia;
substreams[kSecondMediaStreamSsrc].rtp_stats.transmitted.header_bytes = 10;
substreams[kSecondMediaStreamSsrc].rtp_stats.transmitted.padding_bytes = 11;
substreams[kSecondMediaStreamSsrc].rtp_stats.transmitted.payload_bytes = 12;
substreams[kSecondMediaStreamSsrc].rtp_stats.transmitted.packets = 13;
substreams[kSecondMediaStreamSsrc].rtp_stats.retransmitted.header_bytes = 14;
substreams[kSecondMediaStreamSsrc].rtp_stats.retransmitted.padding_bytes = 15;
substreams[kSecondMediaStreamSsrc].rtp_stats.retransmitted.payload_bytes = 16;
substreams[kSecondMediaStreamSsrc].rtp_stats.retransmitted.packets = 17;
substreams[kSecondMediaStreamSsrc].referenced_media_ssrc = absl::nullopt;
substreams[kSecondMediaStreamSsrc].width = 640;
substreams[kSecondMediaStreamSsrc].height = 480;
// kRtx stream referencing the first kMedia stream.
substreams[kRtxSsrc].type =
webrtc::VideoSendStream::StreamStats::StreamType::kRtx;
substreams[kRtxSsrc].rtp_stats.transmitted.header_bytes = 19;
substreams[kRtxSsrc].rtp_stats.transmitted.padding_bytes = 20;
substreams[kRtxSsrc].rtp_stats.transmitted.payload_bytes = 21;
substreams[kRtxSsrc].rtp_stats.transmitted.packets = 22;
substreams[kRtxSsrc].rtp_stats.retransmitted.header_bytes = 23;
substreams[kRtxSsrc].rtp_stats.retransmitted.padding_bytes = 24;
substreams[kRtxSsrc].rtp_stats.retransmitted.payload_bytes = 25;
substreams[kRtxSsrc].rtp_stats.retransmitted.packets = 26;
substreams[kRtxSsrc].referenced_media_ssrc = kFirstMediaStreamSsrc;
// kFlexfec stream referencing the second kMedia stream.
substreams[kFlexfecSsrc].type =
webrtc::VideoSendStream::StreamStats::StreamType::kFlexfec;
substreams[kFlexfecSsrc].rtp_stats.transmitted.header_bytes = 19;
substreams[kFlexfecSsrc].rtp_stats.transmitted.padding_bytes = 20;
substreams[kFlexfecSsrc].rtp_stats.transmitted.payload_bytes = 21;
substreams[kFlexfecSsrc].rtp_stats.transmitted.packets = 22;
substreams[kFlexfecSsrc].rtp_stats.retransmitted.header_bytes = 23;
substreams[kFlexfecSsrc].rtp_stats.retransmitted.padding_bytes = 24;
substreams[kFlexfecSsrc].rtp_stats.retransmitted.payload_bytes = 25;
substreams[kFlexfecSsrc].rtp_stats.retransmitted.packets = 26;
substreams[kFlexfecSsrc].referenced_media_ssrc = kSecondMediaStreamSsrc;
auto merged_substreams =
MergeInfoAboutOutboundRtpSubstreamsForTesting(substreams);
// Only kMedia substreams remain.
EXPECT_TRUE(merged_substreams.find(kFirstMediaStreamSsrc) !=
merged_substreams.end());
EXPECT_EQ(merged_substreams[kFirstMediaStreamSsrc].type,
webrtc::VideoSendStream::StreamStats::StreamType::kMedia);
EXPECT_TRUE(merged_substreams.find(kSecondMediaStreamSsrc) !=
merged_substreams.end());
EXPECT_EQ(merged_substreams[kSecondMediaStreamSsrc].type,
webrtc::VideoSendStream::StreamStats::StreamType::kMedia);
EXPECT_FALSE(merged_substreams.find(kRtxSsrc) != merged_substreams.end());
EXPECT_FALSE(merged_substreams.find(kFlexfecSsrc) != merged_substreams.end());
// Expect kFirstMediaStreamSsrc's rtp_stats to be merged with kRtxSsrc.
webrtc::StreamDataCounters first_media_expected_rtp_stats =
substreams[kFirstMediaStreamSsrc].rtp_stats;
first_media_expected_rtp_stats.Add(substreams[kRtxSsrc].rtp_stats);
EXPECT_EQ(merged_substreams[kFirstMediaStreamSsrc].rtp_stats.transmitted,
first_media_expected_rtp_stats.transmitted);
EXPECT_EQ(merged_substreams[kFirstMediaStreamSsrc].rtp_stats.retransmitted,
first_media_expected_rtp_stats.retransmitted);
// Expect kSecondMediaStreamSsrc' rtp_stats to be merged with kFlexfecSsrc.
webrtc::StreamDataCounters second_media_expected_rtp_stats =
substreams[kSecondMediaStreamSsrc].rtp_stats;
second_media_expected_rtp_stats.Add(substreams[kFlexfecSsrc].rtp_stats);
EXPECT_EQ(merged_substreams[kSecondMediaStreamSsrc].rtp_stats.transmitted,
second_media_expected_rtp_stats.transmitted);
EXPECT_EQ(merged_substreams[kSecondMediaStreamSsrc].rtp_stats.retransmitted,
second_media_expected_rtp_stats.retransmitted);
// Expect other metrics to come from the original kMedia stats.
EXPECT_EQ(merged_substreams[kFirstMediaStreamSsrc].width,
substreams[kFirstMediaStreamSsrc].width);
EXPECT_EQ(merged_substreams[kFirstMediaStreamSsrc].height,
substreams[kFirstMediaStreamSsrc].height);
EXPECT_EQ(merged_substreams[kSecondMediaStreamSsrc].width,
substreams[kSecondMediaStreamSsrc].width);
EXPECT_EQ(merged_substreams[kSecondMediaStreamSsrc].height,
substreams[kSecondMediaStreamSsrc].height);
}
TEST_F(WebRtcVideoChannelTest,
GetStatsReportsTransmittedAndRetransmittedBytesAndPacketsCorrectly) {
FakeVideoSendStream* stream = AddSendStream();
webrtc::VideoSendStream::Stats stats;
// Simulcast layer 1, RTP stream. header+padding=10, payload=20, packets=3.
stats.substreams[101].is_rtx = false;
stats.substreams[101].type =
webrtc::VideoSendStream::StreamStats::StreamType::kMedia;
stats.substreams[101].rtp_stats.transmitted.header_bytes = 5;
stats.substreams[101].rtp_stats.transmitted.padding_bytes = 5;
stats.substreams[101].rtp_stats.transmitted.payload_bytes = 20;
@ -5283,16 +5383,20 @@ TEST_F(WebRtcVideoChannelTest,
stats.substreams[101].rtp_stats.retransmitted.padding_bytes = 0;
stats.substreams[101].rtp_stats.retransmitted.payload_bytes = 0;
stats.substreams[101].rtp_stats.retransmitted.packets = 0;
stats.substreams[101].referenced_media_ssrc = absl::nullopt;
// Simulcast layer 1, RTX stream. header+padding=5, payload=10, packets=1.
stats.substreams[102].is_rtx = true;
stats.substreams[102].type =
webrtc::VideoSendStream::StreamStats::StreamType::kRtx;
stats.substreams[102].rtp_stats.retransmitted.header_bytes = 3;
stats.substreams[102].rtp_stats.retransmitted.padding_bytes = 2;
stats.substreams[102].rtp_stats.retransmitted.payload_bytes = 10;
stats.substreams[102].rtp_stats.retransmitted.packets = 1;
stats.substreams[102].rtp_stats.transmitted =
stats.substreams[102].rtp_stats.retransmitted;
stats.substreams[102].referenced_media_ssrc = 101;
// Simulcast layer 2, RTP stream. header+padding=20, payload=40, packets=7.
stats.substreams[201].is_rtx = false;
stats.substreams[201].type =
webrtc::VideoSendStream::StreamStats::StreamType::kMedia;
stats.substreams[201].rtp_stats.transmitted.header_bytes = 10;
stats.substreams[201].rtp_stats.transmitted.padding_bytes = 10;
stats.substreams[201].rtp_stats.transmitted.payload_bytes = 40;
@ -5301,14 +5405,30 @@ TEST_F(WebRtcVideoChannelTest,
stats.substreams[201].rtp_stats.retransmitted.padding_bytes = 0;
stats.substreams[201].rtp_stats.retransmitted.payload_bytes = 0;
stats.substreams[201].rtp_stats.retransmitted.packets = 0;
stats.substreams[201].referenced_media_ssrc = absl::nullopt;
// Simulcast layer 2, RTX stream. header+padding=10, payload=20, packets=4.
stats.substreams[202].is_rtx = true;
stats.substreams[202].type =
webrtc::VideoSendStream::StreamStats::StreamType::kRtx;
stats.substreams[202].rtp_stats.retransmitted.header_bytes = 6;
stats.substreams[202].rtp_stats.retransmitted.padding_bytes = 4;
stats.substreams[202].rtp_stats.retransmitted.payload_bytes = 20;
stats.substreams[202].rtp_stats.retransmitted.packets = 4;
stats.substreams[202].rtp_stats.transmitted =
stats.substreams[202].rtp_stats.retransmitted;
stats.substreams[202].referenced_media_ssrc = 201;
// FlexFEC stream associated with the Simulcast layer 2.
// header+padding=15, payload=17, packets=5.
stats.substreams[301].type =
webrtc::VideoSendStream::StreamStats::StreamType::kFlexfec;
stats.substreams[301].rtp_stats.transmitted.header_bytes = 13;
stats.substreams[301].rtp_stats.transmitted.padding_bytes = 2;
stats.substreams[301].rtp_stats.transmitted.payload_bytes = 17;
stats.substreams[301].rtp_stats.transmitted.packets = 5;
stats.substreams[301].rtp_stats.retransmitted.header_bytes = 0;
stats.substreams[301].rtp_stats.retransmitted.padding_bytes = 0;
stats.substreams[301].rtp_stats.retransmitted.payload_bytes = 0;
stats.substreams[301].rtp_stats.retransmitted.packets = 0;
stats.substreams[301].referenced_media_ssrc = 201;
stream->SetStats(stats);
cricket::VideoMediaInfo info;
@ -5321,9 +5441,9 @@ TEST_F(WebRtcVideoChannelTest,
// to be set up correctly, it may need to be updated such that the
// relationship between RTP and RTX streams are known. See also
// https://crbug.com/webrtc/11439.
EXPECT_EQ(45u, info.senders[0].header_and_padding_bytes_sent);
EXPECT_EQ(90u, info.senders[0].payload_bytes_sent);
EXPECT_EQ(15, info.senders[0].packets_sent);
EXPECT_EQ(60u, info.senders[0].header_and_padding_bytes_sent);
EXPECT_EQ(107u, info.senders[0].payload_bytes_sent);
EXPECT_EQ(20, info.senders[0].packets_sent);
EXPECT_EQ(30u, info.senders[0].retransmitted_bytes_sent);
EXPECT_EQ(5u, info.senders[0].retransmitted_packets_sent);
}

View File

@ -314,6 +314,12 @@ struct RtpPacketCounter {
packets -= other.packets;
}
bool operator==(const RtpPacketCounter& other) const {
return header_bytes == other.header_bytes &&
payload_bytes == other.payload_bytes &&
padding_bytes == other.padding_bytes && packets == other.packets;
}
// Not inlined, since use of RtpPacket would result in circular includes.
void AddPacket(const RtpPacket& packet);

View File

@ -16,7 +16,6 @@
#include <limits>
#include <utility>
#include "absl/algorithm/container.h"
#include "api/video/video_codec_constants.h"
#include "api/video/video_codec_type.h"
#include "api/video_codecs/video_codec.h"
@ -206,12 +205,17 @@ void SendStatisticsProxy::UmaSamplesContainer::InitializeBitrateCounters(
retransmit_byte_counter_.SetLast(
it.second.rtp_stats.retransmitted.TotalBytes(), ssrc);
fec_byte_counter_.SetLast(it.second.rtp_stats.fec.TotalBytes(), ssrc);
if (it.second.is_rtx) {
rtx_byte_counter_.SetLast(it.second.rtp_stats.transmitted.TotalBytes(),
ssrc);
} else {
media_byte_counter_.SetLast(it.second.rtp_stats.MediaPayloadBytes(),
switch (it.second.type) {
case VideoSendStream::StreamStats::StreamType::kMedia:
media_byte_counter_.SetLast(it.second.rtp_stats.MediaPayloadBytes(),
ssrc);
break;
case VideoSendStream::StreamStats::StreamType::kRtx:
rtx_byte_counter_.SetLast(it.second.rtp_stats.transmitted.TotalBytes(),
ssrc);
break;
case VideoSendStream::StreamStats::StreamType::kFlexfec:
break;
}
}
}
@ -761,17 +765,42 @@ VideoSendStream::StreamStats* SendStatisticsProxy::GetStatsEntry(
if (it != stats_.substreams.end())
return &it->second;
bool is_media = absl::c_linear_search(rtp_config_.ssrcs, ssrc);
bool is_media = rtp_config_.IsMediaSsrc(ssrc);
bool is_flexfec = rtp_config_.flexfec.payload_type != -1 &&
ssrc == rtp_config_.flexfec.ssrc;
bool is_rtx = absl::c_linear_search(rtp_config_.rtx.ssrcs, ssrc);
bool is_rtx = rtp_config_.IsRtxSsrc(ssrc);
if (!is_media && !is_flexfec && !is_rtx)
return nullptr;
// Insert new entry and return ptr.
VideoSendStream::StreamStats* entry = &stats_.substreams[ssrc];
entry->is_rtx = is_rtx;
entry->is_flexfec = is_flexfec;
if (is_media) {
entry->type = VideoSendStream::StreamStats::StreamType::kMedia;
} else if (is_rtx) {
entry->type = VideoSendStream::StreamStats::StreamType::kRtx;
} else if (is_flexfec) {
entry->type = VideoSendStream::StreamStats::StreamType::kFlexfec;
} else {
RTC_NOTREACHED();
}
switch (entry->type) {
case VideoSendStream::StreamStats::StreamType::kMedia:
break;
case VideoSendStream::StreamStats::StreamType::kRtx:
entry->referenced_media_ssrc =
rtp_config_.GetMediaSsrcAssociatedWithRtxSsrc(ssrc);
entry->is_rtx = true;
break;
case VideoSendStream::StreamStats::StreamType::kFlexfec:
entry->referenced_media_ssrc =
rtp_config_.GetMediaSsrcAssociatedWithFlexfecSsrc(ssrc);
entry->is_flexfec = true;
break;
}
// TODO(hbos): Remove these booleans once downstream projects stop depedning
// on them, reading the value of |type| instead.
RTC_DCHECK_EQ(entry->is_rtx, is_rtx);
RTC_DCHECK_EQ(entry->is_flexfec, is_flexfec);
return entry;
}
@ -1252,7 +1281,7 @@ void SendStatisticsProxy::DataCountersUpdated(
VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
RTC_DCHECK(stats) << "DataCountersUpdated reported for unknown ssrc " << ssrc;
if (stats->is_flexfec) {
if (stats->type == VideoSendStream::StreamStats::StreamType::kFlexfec) {
// The same counters are reported for both the media ssrc and flexfec ssrc.
// Bitrate stats are summed for all SSRCs. Use fec stats from media update.
return;
@ -1273,11 +1302,17 @@ void SendStatisticsProxy::DataCountersUpdated(
uma_container_->retransmit_byte_counter_.Set(
counters.retransmitted.TotalBytes(), ssrc);
uma_container_->fec_byte_counter_.Set(counters.fec.TotalBytes(), ssrc);
if (stats->is_rtx) {
uma_container_->rtx_byte_counter_.Set(counters.transmitted.TotalBytes(),
ssrc);
} else {
uma_container_->media_byte_counter_.Set(counters.MediaPayloadBytes(), ssrc);
switch (stats->type) {
case VideoSendStream::StreamStats::StreamType::kMedia:
uma_container_->media_byte_counter_.Set(counters.MediaPayloadBytes(),
ssrc);
break;
case VideoSendStream::StreamStats::StreamType::kRtx:
uma_container_->rtx_byte_counter_.Set(counters.transmitted.TotalBytes(),
ssrc);
break;
case VideoSendStream::StreamStats::StreamType::kFlexfec:
break;
}
}

View File

@ -65,10 +65,16 @@ class SendStatisticsProxyTest : public ::testing::Test {
&fake_clock_, GetTestConfig(),
VideoEncoderConfig::ContentType::kRealtimeVideo));
expected_ = VideoSendStream::Stats();
for (const auto& ssrc : config_.rtp.ssrcs)
expected_.substreams[ssrc].is_rtx = false;
for (const auto& ssrc : config_.rtp.rtx.ssrcs)
expected_.substreams[ssrc].is_rtx = true;
for (const auto& ssrc : config_.rtp.ssrcs) {
expected_.substreams[ssrc].type =
VideoSendStream::StreamStats::StreamType::kMedia;
}
for (size_t i = 0; i < config_.rtp.rtx.ssrcs.size(); ++i) {
uint32_t ssrc = config_.rtp.rtx.ssrcs[i];
expected_.substreams[ssrc].type =
VideoSendStream::StreamStats::StreamType::kRtx;
expected_.substreams[ssrc].referenced_media_ssrc = config_.rtp.ssrcs[i];
}
}
VideoSendStream::Config GetTestConfig() {
@ -89,6 +95,7 @@ class SendStatisticsProxyTest : public ::testing::Test {
config.rtp.rtx.ssrcs.push_back(kSecondRtxSsrc);
config.rtp.flexfec.payload_type = 50;
config.rtp.flexfec.ssrc = kFlexFecSsrc;
config.rtp.flexfec.protected_media_ssrcs = {kFirstSsrc};
return config;
}
@ -123,7 +130,7 @@ class SendStatisticsProxyTest : public ::testing::Test {
const VideoSendStream::StreamStats& a = it->second;
const VideoSendStream::StreamStats& b = corresponding_it->second;
EXPECT_EQ(a.is_rtx, b.is_rtx);
EXPECT_EQ(a.type, b.type);
EXPECT_EQ(a.frame_counts.key_frames, b.frame_counts.key_frames);
EXPECT_EQ(a.frame_counts.delta_frames, b.frame_counts.delta_frames);
EXPECT_EQ(a.total_bitrate_bps, b.total_bitrate_bps);
@ -2379,6 +2386,21 @@ TEST_F(SendStatisticsProxyTest, ResetsRtcpCountersOnContentChange) {
4 * 100 / 5));
}
TEST_F(SendStatisticsProxyTest, GetStatsReportsIsRtx) {
StreamDataCountersCallback* proxy =
static_cast<StreamDataCountersCallback*>(statistics_proxy_.get());
StreamDataCounters counters;
proxy->DataCountersUpdated(counters, kFirstSsrc);
proxy->DataCountersUpdated(counters, kFirstRtxSsrc);
EXPECT_NE(GetStreamStats(kFirstSsrc).type,
VideoSendStream::StreamStats::StreamType::kRtx);
EXPECT_EQ(GetStreamStats(kFirstSsrc).referenced_media_ssrc, absl::nullopt);
EXPECT_EQ(GetStreamStats(kFirstRtxSsrc).type,
VideoSendStream::StreamStats::StreamType::kRtx);
EXPECT_EQ(GetStreamStats(kFirstRtxSsrc).referenced_media_ssrc, kFirstSsrc);
}
TEST_F(SendStatisticsProxyTest, GetStatsReportsIsFlexFec) {
statistics_proxy_.reset(
new SendStatisticsProxy(&fake_clock_, GetTestConfigWithFlexFec(),
@ -2390,8 +2412,12 @@ TEST_F(SendStatisticsProxyTest, GetStatsReportsIsFlexFec) {
proxy->DataCountersUpdated(counters, kFirstSsrc);
proxy->DataCountersUpdated(counters, kFlexFecSsrc);
EXPECT_FALSE(GetStreamStats(kFirstSsrc).is_flexfec);
EXPECT_TRUE(GetStreamStats(kFlexFecSsrc).is_flexfec);
EXPECT_NE(GetStreamStats(kFirstSsrc).type,
VideoSendStream::StreamStats::StreamType::kFlexfec);
EXPECT_EQ(GetStreamStats(kFirstSsrc).referenced_media_ssrc, absl::nullopt);
EXPECT_EQ(GetStreamStats(kFlexFecSsrc).type,
VideoSendStream::StreamStats::StreamType::kFlexfec);
EXPECT_EQ(GetStreamStats(kFlexFecSsrc).referenced_media_ssrc, kFirstSsrc);
}
TEST_F(SendStatisticsProxyTest, SendBitratesAreReportedWithFlexFecEnabled) {