Add media related stats (audio level etc.) to unsignaled streams.

The media related stats wasn't working for unsignaled stream because there
is no mapping between the receiver_info and unsignaled tracks.

This CL fixes the issue by adding some special logic to the TrackMediaInfoMap
which would create the mapping.

BUG=b/37836881
BUG=webrtc:7685

TBR=deadbeef@webrtc.org

Review-Url: https://codereview.webrtc.org/2883943003
Cr-Commit-Position: refs/heads/master@{#18217}
This commit is contained in:
zhihuang 2017-05-19 13:09:47 -07:00 committed by Commit bot
parent 56e119e2e8
commit f816493c4f
2 changed files with 59 additions and 10 deletions

View File

@ -116,6 +116,18 @@ void RemoveSsrcsAndMsids(cricket::SessionDescription* desc) {
desc->set_msid_supported(false);
}
int FindFirstMediaStatsIndexByKind(
const std::string& kind,
const std::vector<const webrtc::RTCMediaStreamTrackStats*>&
media_stats_vec) {
for (size_t i = 0; i < media_stats_vec.size(); i++) {
if (media_stats_vec[i]->kind.ValueToString() == kind) {
return i;
}
}
return -1;
}
class SignalingMessageReceiver {
public:
virtual void ReceiveSdpMessage(const std::string& type,
@ -1926,9 +1938,31 @@ TEST_F(PeerConnectionIntegrationTest,
ASSERT_EQ(1U, inbound_stream_stats.size());
ASSERT_TRUE(inbound_stream_stats[0]->bytes_received.is_defined());
ASSERT_GT(*inbound_stream_stats[0]->bytes_received, 0U);
// TODO(deadbeef): Test that track_id is defined. This is not currently
// working since SSRCs are used to match RtpReceivers (and their tracks) with
// received stream stats in TrackMediaInfoMap.
ASSERT_TRUE(inbound_stream_stats[0]->track_id.is_defined());
}
// Test that we can successfully get the media related stats (audio level
// etc.) for the unsignaled stream.
TEST_F(PeerConnectionIntegrationTest,
GetMediaStatsForUnsignaledStreamWithNewStatsApi) {
ASSERT_TRUE(CreatePeerConnectionWrappers());
ConnectFakeSignaling();
caller()->AddAudioVideoMediaStream();
// Remove SSRCs and MSIDs from the received offer SDP.
callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids);
caller()->CreateAndSetAndSignalOffer();
ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
// Wait for one audio frame to be received by the callee.
ExpectNewFramesReceivedWithWait(0, 0, 1, 1, kMaxWaitForFramesMs);
rtc::scoped_refptr<const webrtc::RTCStatsReport> report =
callee()->NewGetStats();
ASSERT_NE(nullptr, report);
auto media_stats = report->GetStatsOfType<webrtc::RTCMediaStreamTrackStats>();
auto audio_index = FindFirstMediaStatsIndexByKind("audio", media_stats);
ASSERT_GE(audio_index, 0);
EXPECT_TRUE(media_stats[audio_index]->audio_level.is_defined());
}
// Test that DTLS 1.0 is used if both sides only support DTLS 1.0.

View File

@ -34,7 +34,9 @@ void GetAudioAndVideoTrackBySsrc(
std::map<uint32_t, AudioTrackInterface*>* local_audio_track_by_ssrc,
std::map<uint32_t, VideoTrackInterface*>* local_video_track_by_ssrc,
std::map<uint32_t, AudioTrackInterface*>* remote_audio_track_by_ssrc,
std::map<uint32_t, VideoTrackInterface*>* remote_video_track_by_ssrc) {
std::map<uint32_t, VideoTrackInterface*>* remote_video_track_by_ssrc,
AudioTrackInterface** unsignaled_audio_track,
VideoTrackInterface** unsignaled_video_track) {
RTC_DCHECK(local_audio_track_by_ssrc->empty());
RTC_DCHECK(local_video_track_by_ssrc->empty());
RTC_DCHECK(remote_audio_track_by_ssrc->empty());
@ -80,6 +82,12 @@ void GetAudioAndVideoTrackBySsrc(
RtpParameters params = rtp_receiver->GetParameters();
for (const RtpEncodingParameters& encoding : params.encodings) {
if (!encoding.ssrc) {
if (media_type == cricket::MEDIA_TYPE_AUDIO) {
*unsignaled_audio_track = static_cast<AudioTrackInterface*>(track);
} else {
RTC_DCHECK(media_type == cricket::MEDIA_TYPE_VIDEO);
*unsignaled_video_track = static_cast<VideoTrackInterface*>(track);
}
continue;
}
if (media_type == cricket::MEDIA_TYPE_AUDIO) {
@ -110,12 +118,13 @@ TrackMediaInfoMap::TrackMediaInfoMap(
std::map<uint32_t, VideoTrackInterface*> local_video_track_by_ssrc;
std::map<uint32_t, AudioTrackInterface*> remote_audio_track_by_ssrc;
std::map<uint32_t, VideoTrackInterface*> remote_video_track_by_ssrc;
GetAudioAndVideoTrackBySsrc(rtp_senders,
rtp_receivers,
&local_audio_track_by_ssrc,
&local_video_track_by_ssrc,
&remote_audio_track_by_ssrc,
&remote_video_track_by_ssrc);
AudioTrackInterface* unsignaled_audio_track = nullptr;
VideoTrackInterface* unsignaled_video_track = nullptr;
GetAudioAndVideoTrackBySsrc(
rtp_senders, rtp_receivers, &local_audio_track_by_ssrc,
&local_video_track_by_ssrc, &remote_audio_track_by_ssrc,
&remote_video_track_by_ssrc, &unsignaled_audio_track,
&unsignaled_video_track);
if (voice_media_info_) {
for (auto& sender_info : voice_media_info_->senders) {
AudioTrackInterface* associated_track =
@ -137,6 +146,9 @@ TrackMediaInfoMap::TrackMediaInfoMap(
RTC_DCHECK(voice_info_by_remote_track_.find(associated_track) ==
voice_info_by_remote_track_.end());
voice_info_by_remote_track_[associated_track] = &receiver_info;
} else if (unsignaled_audio_track) {
audio_track_by_receiver_info_[&receiver_info] = unsignaled_audio_track;
voice_info_by_remote_track_[unsignaled_audio_track] = &receiver_info;
}
}
}
@ -161,6 +173,9 @@ TrackMediaInfoMap::TrackMediaInfoMap(
RTC_DCHECK(video_info_by_remote_track_.find(associated_track) ==
video_info_by_remote_track_.end());
video_info_by_remote_track_[associated_track] = &receiver_info;
} else if (unsignaled_video_track) {
video_track_by_receiver_info_[&receiver_info] = unsignaled_video_track;
video_info_by_remote_track_[unsignaled_video_track] = &receiver_info;
}
}
}