Add estimatedPlayoutTimestamp to RTCInboundRTPStreamStats.
https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-estimatedplayouttimestamp Partial implementation: currently only populated when a/v sync is enabled. Bug: webrtc:7065 Change-Id: I8595cc848d080d7c3bef152462a9becf0e5a2196 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/155621 Commit-Queue: Åsa Persson <asapersson@webrtc.org> Reviewed-by: Oskar Sundbom <ossu@webrtc.org> Reviewed-by: Henrik Boström <hbos@webrtc.org> Reviewed-by: Niels Moller <nisse@webrtc.org> Cr-Commit-Position: refs/heads/master@{#29581}
This commit is contained in:
parent
261fc5197e
commit
fcf79cca7b
@ -446,6 +446,8 @@ class RTC_EXPORT RTCInboundRTPStreamStats final : public RTCRTPStreamStats {
|
||||
RTCStatsMember<double> total_decode_time;
|
||||
// https://henbos.github.io/webrtc-provisional-stats/#dom-rtcinboundrtpstreamstats-contenttype
|
||||
RTCStatsMember<std::string> content_type;
|
||||
// TODO(asapersson): Currently only populated if audio/video sync is enabled.
|
||||
RTCStatsMember<double> estimated_playout_timestamp;
|
||||
// TODO(hbos): This is only implemented for video; implement it for audio as
|
||||
// well.
|
||||
RTCStatsMember<std::string> decoder_implementation;
|
||||
|
||||
@ -206,6 +206,9 @@ webrtc::AudioReceiveStream::Stats AudioReceiveStream::GetStats() const {
|
||||
stats.audio_level = channel_receive_->GetSpeechOutputLevelFullRange();
|
||||
stats.total_output_energy = channel_receive_->GetTotalOutputEnergy();
|
||||
stats.total_output_duration = channel_receive_->GetTotalOutputDuration();
|
||||
stats.estimated_playout_ntp_timestamp_ms =
|
||||
channel_receive_->GetCurrentEstimatedPlayoutNtpTimestampMs(
|
||||
rtc::TimeMillis());
|
||||
|
||||
// Get jitter buffer and total delay (alg + jitter + playout) stats.
|
||||
auto ns = channel_receive_->GetNetworkStatistics();
|
||||
@ -310,9 +313,18 @@ absl::optional<Syncable::Info> AudioReceiveStream::GetInfo() const {
|
||||
return info;
|
||||
}
|
||||
|
||||
uint32_t AudioReceiveStream::GetPlayoutTimestamp() const {
|
||||
bool AudioReceiveStream::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
|
||||
int64_t* time_ms) const {
|
||||
// Called on video capture thread.
|
||||
return channel_receive_->GetPlayoutTimestamp();
|
||||
return channel_receive_->GetPlayoutRtpTimestamp(rtp_timestamp, time_ms);
|
||||
}
|
||||
|
||||
void AudioReceiveStream::SetEstimatedPlayoutNtpTimestampMs(
|
||||
int64_t ntp_timestamp_ms,
|
||||
int64_t time_ms) {
|
||||
// Called on video capture thread.
|
||||
channel_receive_->SetEstimatedPlayoutNtpTimestampMs(ntp_timestamp_ms,
|
||||
time_ms);
|
||||
}
|
||||
|
||||
void AudioReceiveStream::SetMinimumPlayoutDelay(int delay_ms) {
|
||||
|
||||
@ -87,7 +87,10 @@ class AudioReceiveStream final : public webrtc::AudioReceiveStream,
|
||||
// Syncable
|
||||
int id() const override;
|
||||
absl::optional<Syncable::Info> GetInfo() const override;
|
||||
uint32_t GetPlayoutTimestamp() const override;
|
||||
bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
|
||||
int64_t* time_ms) const override;
|
||||
void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms,
|
||||
int64_t time_ms) override;
|
||||
void SetMinimumPlayoutDelay(int delay_ms) override;
|
||||
|
||||
void AssociateSendStream(AudioSendStream* send_stream);
|
||||
|
||||
@ -62,6 +62,7 @@ const int kPlayoutBufferDelay = 302;
|
||||
const unsigned int kSpeechOutputLevel = 99;
|
||||
const double kTotalOutputEnergy = 0.25;
|
||||
const double kTotalOutputDuration = 0.5;
|
||||
const int64_t kPlayoutNtpTimestampMs = 5678;
|
||||
|
||||
const CallReceiveStatistics kCallStats = {678, 234, -12, 567, 78, 890, 123};
|
||||
const std::pair<int, SdpAudioFormat> kReceiveCodec = {
|
||||
@ -145,6 +146,8 @@ struct ConfigHelper {
|
||||
.WillOnce(Return(kAudioDecodeStats));
|
||||
EXPECT_CALL(*channel_receive_, GetReceiveCodec())
|
||||
.WillOnce(Return(kReceiveCodec));
|
||||
EXPECT_CALL(*channel_receive_, GetCurrentEstimatedPlayoutNtpTimestampMs(_))
|
||||
.WillOnce(Return(kPlayoutNtpTimestampMs));
|
||||
}
|
||||
|
||||
private:
|
||||
@ -315,6 +318,7 @@ TEST(AudioReceiveStreamTest, GetStats) {
|
||||
stats.decoding_muted_output);
|
||||
EXPECT_EQ(kCallStats.capture_start_ntp_time_ms_,
|
||||
stats.capture_start_ntp_time_ms);
|
||||
EXPECT_EQ(kPlayoutNtpTimestampMs, stats.estimated_playout_ntp_timestamp_ms);
|
||||
}
|
||||
|
||||
TEST(AudioReceiveStreamTest, SetGain) {
|
||||
|
||||
@ -141,7 +141,12 @@ class ChannelReceive : public ChannelReceiveInterface,
|
||||
// Audio+Video Sync.
|
||||
uint32_t GetDelayEstimate() const override;
|
||||
void SetMinimumPlayoutDelay(int delayMs) override;
|
||||
uint32_t GetPlayoutTimestamp() const override;
|
||||
bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
|
||||
int64_t* time_ms) const override;
|
||||
void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms,
|
||||
int64_t time_ms) override;
|
||||
absl::optional<int64_t> GetCurrentEstimatedPlayoutNtpTimestampMs(
|
||||
int64_t now_ms) const override;
|
||||
|
||||
// Audio quality.
|
||||
bool SetBaseMinimumPlayoutDelayMs(int delay_ms) override;
|
||||
@ -178,7 +183,7 @@ class ChannelReceive : public ChannelReceiveInterface,
|
||||
size_t packet_length,
|
||||
const RTPHeader& header);
|
||||
int ResendPackets(const uint16_t* sequence_numbers, int length);
|
||||
void UpdatePlayoutTimestamp(bool rtcp);
|
||||
void UpdatePlayoutTimestamp(bool rtcp, int64_t now_ms);
|
||||
|
||||
int GetRtpTimestampRateHz() const;
|
||||
int64_t GetRTT() const;
|
||||
@ -242,7 +247,13 @@ class ChannelReceive : public ChannelReceiveInterface,
|
||||
|
||||
rtc::CriticalSection video_sync_lock_;
|
||||
uint32_t playout_timestamp_rtp_ RTC_GUARDED_BY(video_sync_lock_);
|
||||
absl::optional<int64_t> playout_timestamp_rtp_time_ms_
|
||||
RTC_GUARDED_BY(video_sync_lock_);
|
||||
uint32_t playout_delay_ms_ RTC_GUARDED_BY(video_sync_lock_);
|
||||
absl::optional<int64_t> playout_timestamp_ntp_
|
||||
RTC_GUARDED_BY(video_sync_lock_);
|
||||
absl::optional<int64_t> playout_timestamp_ntp_time_ms_
|
||||
RTC_GUARDED_BY(video_sync_lock_);
|
||||
|
||||
rtc::CriticalSection ts_stats_lock_;
|
||||
|
||||
@ -573,7 +584,7 @@ void ChannelReceive::OnRtpPacket(const RtpPacketReceived& packet) {
|
||||
}
|
||||
|
||||
// Store playout timestamp for the received RTP packet
|
||||
UpdatePlayoutTimestamp(false);
|
||||
UpdatePlayoutTimestamp(false, now_ms);
|
||||
|
||||
const auto& it = payload_type_frequencies_.find(packet.PayloadType());
|
||||
if (it == payload_type_frequencies_.end())
|
||||
@ -638,7 +649,7 @@ void ChannelReceive::ReceivePacket(const uint8_t* packet,
|
||||
// May be called on either worker thread or network thread.
|
||||
void ChannelReceive::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
|
||||
// Store playout timestamp for the received RTCP packet
|
||||
UpdatePlayoutTimestamp(true);
|
||||
UpdatePlayoutTimestamp(true, rtc::TimeMillis());
|
||||
|
||||
// Deliver RTCP packet to RTP/RTCP module for parsing
|
||||
_rtpRtcpModule->IncomingRtcpPacket(data, length);
|
||||
@ -806,14 +817,38 @@ void ChannelReceive::SetMinimumPlayoutDelay(int delay_ms) {
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t ChannelReceive::GetPlayoutTimestamp() const {
|
||||
bool ChannelReceive::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
|
||||
int64_t* time_ms) const {
|
||||
RTC_DCHECK_RUNS_SERIALIZED(&video_capture_thread_race_checker_);
|
||||
{
|
||||
rtc::CritScope lock(&video_sync_lock_);
|
||||
return playout_timestamp_rtp_;
|
||||
if (!playout_timestamp_rtp_time_ms_)
|
||||
return false;
|
||||
*rtp_timestamp = playout_timestamp_rtp_;
|
||||
*time_ms = playout_timestamp_rtp_time_ms_.value();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
void ChannelReceive::SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms,
|
||||
int64_t time_ms) {
|
||||
RTC_DCHECK_RUNS_SERIALIZED(&video_capture_thread_race_checker_);
|
||||
rtc::CritScope lock(&video_sync_lock_);
|
||||
playout_timestamp_ntp_ = ntp_timestamp_ms;
|
||||
playout_timestamp_ntp_time_ms_ = time_ms;
|
||||
}
|
||||
|
||||
absl::optional<int64_t>
|
||||
ChannelReceive::GetCurrentEstimatedPlayoutNtpTimestampMs(int64_t now_ms) const {
|
||||
RTC_DCHECK(worker_thread_checker_.IsCurrent());
|
||||
rtc::CritScope lock(&video_sync_lock_);
|
||||
if (!playout_timestamp_ntp_ || !playout_timestamp_ntp_time_ms_)
|
||||
return absl::nullopt;
|
||||
|
||||
int64_t elapsed_ms = now_ms - *playout_timestamp_ntp_time_ms_;
|
||||
return *playout_timestamp_ntp_ + elapsed_ms;
|
||||
}
|
||||
|
||||
bool ChannelReceive::SetBaseMinimumPlayoutDelayMs(int delay_ms) {
|
||||
return acm_receiver_.SetBaseMinimumDelayMs(delay_ms);
|
||||
}
|
||||
@ -841,7 +876,7 @@ absl::optional<Syncable::Info> ChannelReceive::GetSyncInfo() const {
|
||||
return info;
|
||||
}
|
||||
|
||||
void ChannelReceive::UpdatePlayoutTimestamp(bool rtcp) {
|
||||
void ChannelReceive::UpdatePlayoutTimestamp(bool rtcp, int64_t now_ms) {
|
||||
jitter_buffer_playout_timestamp_ = acm_receiver_.GetPlayoutTimestamp();
|
||||
|
||||
if (!jitter_buffer_playout_timestamp_) {
|
||||
@ -868,6 +903,7 @@ void ChannelReceive::UpdatePlayoutTimestamp(bool rtcp) {
|
||||
rtc::CritScope lock(&video_sync_lock_);
|
||||
if (!rtcp) {
|
||||
playout_timestamp_rtp_ = playout_timestamp;
|
||||
playout_timestamp_rtp_time_ms_ = now_ms;
|
||||
}
|
||||
playout_delay_ms_ = delay_ms;
|
||||
}
|
||||
|
||||
@ -105,7 +105,12 @@ class ChannelReceiveInterface : public RtpPacketSinkInterface {
|
||||
// Audio+Video Sync.
|
||||
virtual uint32_t GetDelayEstimate() const = 0;
|
||||
virtual void SetMinimumPlayoutDelay(int delay_ms) = 0;
|
||||
virtual uint32_t GetPlayoutTimestamp() const = 0;
|
||||
virtual bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
|
||||
int64_t* time_ms) const = 0;
|
||||
virtual void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms,
|
||||
int64_t time_ms) = 0;
|
||||
virtual absl::optional<int64_t> GetCurrentEstimatedPlayoutNtpTimestampMs(
|
||||
int64_t now_ms) const = 0;
|
||||
|
||||
// Audio quality.
|
||||
// Base minimum delay sets lower bound on minimum delay value which
|
||||
|
||||
@ -49,7 +49,12 @@ class MockChannelReceive : public voe::ChannelReceiveInterface {
|
||||
MOCK_CONST_METHOD0(PreferredSampleRate, int());
|
||||
MOCK_METHOD1(SetAssociatedSendChannel,
|
||||
void(const voe::ChannelSendInterface* send_channel));
|
||||
MOCK_CONST_METHOD0(GetPlayoutTimestamp, uint32_t());
|
||||
MOCK_CONST_METHOD2(GetPlayoutRtpTimestamp,
|
||||
bool(uint32_t* rtp_timestamp, int64_t* time_ms));
|
||||
MOCK_METHOD2(SetEstimatedPlayoutNtpTimestampMs,
|
||||
void(int64_t ntp_timestamp_ms, int64_t time_ms));
|
||||
MOCK_CONST_METHOD1(GetCurrentEstimatedPlayoutNtpTimestampMs,
|
||||
absl::optional<int64_t>(int64_t now_ms));
|
||||
MOCK_CONST_METHOD0(GetSyncInfo, absl::optional<Syncable::Info>());
|
||||
MOCK_METHOD1(SetMinimumPlayoutDelay, void(int delay_ms));
|
||||
MOCK_METHOD1(SetBaseMinimumPlayoutDelayMs, bool(int delay_ms));
|
||||
|
||||
@ -87,6 +87,8 @@ class AudioReceiveStream {
|
||||
double relative_packet_arrival_delay_seconds = 0.0;
|
||||
int32_t interruption_count = 0;
|
||||
int32_t total_interruption_duration_ms = 0;
|
||||
// https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-estimatedplayouttimestamp
|
||||
absl::optional<int64_t> estimated_playout_ntp_timestamp_ms;
|
||||
};
|
||||
|
||||
struct Config {
|
||||
|
||||
@ -35,8 +35,11 @@ class Syncable {
|
||||
|
||||
virtual int id() const = 0;
|
||||
virtual absl::optional<Info> GetInfo() const = 0;
|
||||
virtual uint32_t GetPlayoutTimestamp() const = 0;
|
||||
virtual bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
|
||||
int64_t* time_ms) const = 0;
|
||||
virtual void SetMinimumPlayoutDelay(int delay_ms) = 0;
|
||||
virtual void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms,
|
||||
int64_t time_ms) = 0;
|
||||
};
|
||||
} // namespace webrtc
|
||||
|
||||
|
||||
@ -110,6 +110,8 @@ class VideoReceiveStream {
|
||||
|
||||
VideoContentType content_type = VideoContentType::UNSPECIFIED;
|
||||
|
||||
// https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-estimatedplayouttimestamp
|
||||
absl::optional<int64_t> estimated_playout_ntp_timestamp_ms;
|
||||
int sync_offset_ms = std::numeric_limits<int>::max();
|
||||
|
||||
uint32_t ssrc = 0;
|
||||
|
||||
@ -460,6 +460,8 @@ struct MediaReceiverInfo {
|
||||
// local clock when it was received - not the RTP timestamp of that packet.
|
||||
// https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-lastpacketreceivedtimestamp
|
||||
absl::optional<int64_t> last_packet_received_timestamp_ms;
|
||||
// https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-estimatedplayouttimestamp
|
||||
absl::optional<int64_t> estimated_playout_ntp_timestamp_ms;
|
||||
std::string codec_name;
|
||||
absl::optional<int> codec_payload_type;
|
||||
std::vector<SsrcReceiverInfo> local_stats;
|
||||
|
||||
@ -2829,6 +2829,8 @@ WebRtcVideoChannel::WebRtcVideoReceiveStream::GetVideoReceiverInfo(
|
||||
info.total_decode_time_ms = stats.total_decode_time_ms;
|
||||
info.last_packet_received_timestamp_ms =
|
||||
stats.rtp_stats.last_packet_received_timestamp_ms;
|
||||
info.estimated_playout_ntp_timestamp_ms =
|
||||
stats.estimated_playout_ntp_timestamp_ms;
|
||||
info.first_frame_received_to_decoded_ms =
|
||||
stats.first_frame_received_to_decoded_ms;
|
||||
info.interframe_delay_max_ms = stats.interframe_delay_max_ms;
|
||||
|
||||
@ -2271,6 +2271,8 @@ bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info) {
|
||||
rinfo.capture_start_ntp_time_ms = stats.capture_start_ntp_time_ms;
|
||||
rinfo.last_packet_received_timestamp_ms =
|
||||
stats.last_packet_received_timestamp_ms;
|
||||
rinfo.estimated_playout_ntp_timestamp_ms =
|
||||
stats.estimated_playout_ntp_timestamp_ms;
|
||||
rinfo.jitter_buffer_flushes = stats.jitter_buffer_flushes;
|
||||
rinfo.relative_packet_arrival_delay_seconds =
|
||||
stats.relative_packet_arrival_delay_seconds;
|
||||
|
||||
@ -285,6 +285,10 @@ void SetInboundRTPStreamStatsFromVoiceReceiverInfo(
|
||||
*voice_receiver_info.last_packet_received_timestamp_ms) /
|
||||
rtc::kNumMillisecsPerSec;
|
||||
}
|
||||
if (voice_receiver_info.estimated_playout_ntp_timestamp_ms) {
|
||||
inbound_audio->estimated_playout_timestamp = static_cast<double>(
|
||||
*voice_receiver_info.estimated_playout_ntp_timestamp_ms);
|
||||
}
|
||||
inbound_audio->fec_packets_received =
|
||||
voice_receiver_info.fec_packets_received;
|
||||
inbound_audio->fec_packets_discarded =
|
||||
@ -322,6 +326,10 @@ void SetInboundRTPStreamStatsFromVideoReceiverInfo(
|
||||
*video_receiver_info.last_packet_received_timestamp_ms) /
|
||||
rtc::kNumMillisecsPerSec;
|
||||
}
|
||||
if (video_receiver_info.estimated_playout_ntp_timestamp_ms) {
|
||||
inbound_video->estimated_playout_timestamp = static_cast<double>(
|
||||
*video_receiver_info.estimated_playout_ntp_timestamp_ms);
|
||||
}
|
||||
// TODO(https://crbug.com/webrtc/10529): When info's |content_info| is
|
||||
// optional, support the "unspecified" value.
|
||||
if (video_receiver_info.content_type == VideoContentType::SCREENSHARE)
|
||||
|
||||
@ -1789,6 +1789,8 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRTPStreamStats_Audio) {
|
||||
// Set previously undefined values and "GetStats" again.
|
||||
voice_media_info.receivers[0].last_packet_received_timestamp_ms = 3000;
|
||||
expected_audio.last_packet_received_timestamp = 3.0;
|
||||
voice_media_info.receivers[0].estimated_playout_ntp_timestamp_ms = 4567;
|
||||
expected_audio.estimated_playout_timestamp = 4567;
|
||||
voice_media_channel->SetStats(voice_media_info);
|
||||
|
||||
report = stats_->GetFreshStatsReport();
|
||||
@ -1824,6 +1826,8 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRTPStreamStats_Video) {
|
||||
video_media_info.receivers[0].last_packet_received_timestamp_ms =
|
||||
absl::nullopt;
|
||||
video_media_info.receivers[0].content_type = VideoContentType::UNSPECIFIED;
|
||||
video_media_info.receivers[0].estimated_playout_ntp_timestamp_ms =
|
||||
absl::nullopt;
|
||||
video_media_info.receivers[0].decoder_implementation_name = "";
|
||||
|
||||
RtpCodecParameters codec_parameters;
|
||||
@ -1872,11 +1876,13 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRTPStreamStats_Video) {
|
||||
|
||||
// Set previously undefined values and "GetStats" again.
|
||||
video_media_info.receivers[0].qp_sum = 9;
|
||||
video_media_info.receivers[0].last_packet_received_timestamp_ms = 1000;
|
||||
expected_video.qp_sum = 9;
|
||||
video_media_info.receivers[0].last_packet_received_timestamp_ms = 1000;
|
||||
expected_video.last_packet_received_timestamp = 1.0;
|
||||
video_media_info.receivers[0].content_type = VideoContentType::SCREENSHARE;
|
||||
expected_video.content_type = "screenshare";
|
||||
video_media_info.receivers[0].estimated_playout_ntp_timestamp_ms = 1234;
|
||||
expected_video.estimated_playout_timestamp = 1234;
|
||||
video_media_info.receivers[0].decoder_implementation_name = "libfoodecoder";
|
||||
expected_video.decoder_implementation = "libfoodecoder";
|
||||
video_media_channel->SetStats(video_media_info);
|
||||
|
||||
@ -821,6 +821,9 @@ class RTCStatsReportVerifier {
|
||||
verifier.TestMemberIsUndefined(inbound_stream.burst_discard_rate);
|
||||
verifier.TestMemberIsUndefined(inbound_stream.gap_loss_rate);
|
||||
verifier.TestMemberIsUndefined(inbound_stream.gap_discard_rate);
|
||||
// Test runtime too short to get an estimate (at least two RTCP sender
|
||||
// reports need to be received).
|
||||
verifier.MarkMemberTested(inbound_stream.estimated_playout_timestamp, true);
|
||||
if (inbound_stream.media_type.is_defined() &&
|
||||
*inbound_stream.media_type == "video") {
|
||||
verifier.TestMemberIsDefined(inbound_stream.frames_decoded);
|
||||
|
||||
@ -617,6 +617,7 @@ WEBRTC_RTCSTATS_IMPL(
|
||||
&key_frames_decoded,
|
||||
&total_decode_time,
|
||||
&content_type,
|
||||
&estimated_playout_timestamp,
|
||||
&decoder_implementation)
|
||||
// clang-format on
|
||||
|
||||
@ -650,6 +651,7 @@ RTCInboundRTPStreamStats::RTCInboundRTPStreamStats(std::string&& id,
|
||||
key_frames_decoded("keyFramesDecoded"),
|
||||
total_decode_time("totalDecodeTime"),
|
||||
content_type("contentType"),
|
||||
estimated_playout_timestamp("estimatedPlayoutTimestamp"),
|
||||
decoder_implementation("decoderImplementation") {}
|
||||
|
||||
RTCInboundRTPStreamStats::RTCInboundRTPStreamStats(
|
||||
@ -678,6 +680,7 @@ RTCInboundRTPStreamStats::RTCInboundRTPStreamStats(
|
||||
key_frames_decoded(other.key_frames_decoded),
|
||||
total_decode_time(other.total_decode_time),
|
||||
content_type(other.content_type),
|
||||
estimated_playout_timestamp(other.estimated_playout_timestamp),
|
||||
decoder_implementation(other.decoder_implementation) {}
|
||||
|
||||
RTCInboundRTPStreamStats::~RTCInboundRTPStreamStats() {}
|
||||
|
||||
@ -611,6 +611,17 @@ void ReceiveStatisticsProxy::UpdateDecodeTimeHistograms(
|
||||
}
|
||||
}
|
||||
|
||||
absl::optional<int64_t>
|
||||
ReceiveStatisticsProxy::GetCurrentEstimatedPlayoutNtpTimestampMs(
|
||||
int64_t now_ms) const {
|
||||
if (!last_estimated_playout_ntp_timestamp_ms_ ||
|
||||
!last_estimated_playout_time_ms_) {
|
||||
return absl::nullopt;
|
||||
}
|
||||
int64_t elapsed_ms = now_ms - *last_estimated_playout_time_ms_;
|
||||
return *last_estimated_playout_ntp_timestamp_ms_ + elapsed_ms;
|
||||
}
|
||||
|
||||
VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const {
|
||||
rtc::CritScope lock(&crit_);
|
||||
// Get current frame rates here, as only updating them on new frames prevents
|
||||
@ -637,6 +648,8 @@ VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const {
|
||||
static_cast<double>(current_delay_counter_.Sum(1).value_or(0)) /
|
||||
rtc::kNumMillisecsPerSec;
|
||||
stats_.jitter_buffer_emitted_count = current_delay_counter_.NumSamples();
|
||||
stats_.estimated_playout_ntp_timestamp_ms =
|
||||
GetCurrentEstimatedPlayoutNtpTimestampMs(now_ms);
|
||||
return stats_;
|
||||
}
|
||||
|
||||
@ -813,11 +826,14 @@ void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) {
|
||||
QualitySample();
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t sync_offset_ms,
|
||||
void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t video_playout_ntp_ms,
|
||||
int64_t sync_offset_ms,
|
||||
double estimated_freq_khz) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
sync_offset_counter_.Add(std::abs(sync_offset_ms));
|
||||
stats_.sync_offset_ms = sync_offset_ms;
|
||||
last_estimated_playout_ntp_timestamp_ms_ = video_playout_ntp_ms;
|
||||
last_estimated_playout_time_ms_ = clock_->TimeInMilliseconds();
|
||||
|
||||
const double kMaxFreqKhz = 10000.0;
|
||||
int offset_khz = kMaxFreqKhz;
|
||||
|
||||
@ -52,7 +52,9 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
|
||||
absl::optional<uint8_t> qp,
|
||||
int32_t decode_time_ms,
|
||||
VideoContentType content_type);
|
||||
void OnSyncOffsetUpdated(int64_t sync_offset_ms, double estimated_freq_khz);
|
||||
void OnSyncOffsetUpdated(int64_t video_playout_ntp_ms,
|
||||
int64_t sync_offset_ms,
|
||||
double estimated_freq_khz);
|
||||
void OnRenderedFrame(const VideoFrame& frame);
|
||||
void OnIncomingPayloadType(int payload_type);
|
||||
void OnDecoderImplementationName(const char* implementation_name);
|
||||
@ -133,6 +135,9 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
|
||||
int decode_time_ms) const
|
||||
RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
|
||||
|
||||
absl::optional<int64_t> GetCurrentEstimatedPlayoutNtpTimestampMs(
|
||||
int64_t now_ms) const RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
|
||||
|
||||
Clock* const clock_;
|
||||
// Ownership of this object lies with the owner of the ReceiveStatisticsProxy
|
||||
// instance. Lifetime is guaranteed to outlive |this|.
|
||||
@ -187,6 +192,10 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
|
||||
mutable rtc::MovingMaxCounter<TimingFrameInfo> timing_frame_info_counter_
|
||||
RTC_GUARDED_BY(&crit_);
|
||||
absl::optional<int> num_unique_frames_ RTC_GUARDED_BY(crit_);
|
||||
absl::optional<int64_t> last_estimated_playout_ntp_timestamp_ms_
|
||||
RTC_GUARDED_BY(&crit_);
|
||||
absl::optional<int64_t> last_estimated_playout_time_ms_
|
||||
RTC_GUARDED_BY(&crit_);
|
||||
rtc::ThreadChecker decode_thread_;
|
||||
rtc::ThreadChecker network_thread_;
|
||||
rtc::ThreadChecker main_thread_;
|
||||
|
||||
@ -598,20 +598,40 @@ TEST_F(ReceiveStatisticsProxyTest, PacketLossHistogramIsUpdated) {
|
||||
1, metrics::NumEvents("WebRTC.Video.ReceivedPacketsLostInPercent", 10));
|
||||
}
|
||||
|
||||
TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsPlayoutTimestamp) {
|
||||
const int64_t kVideoNtpMs = 21;
|
||||
const int64_t kSyncOffsetMs = 22;
|
||||
const double kFreqKhz = 90.0;
|
||||
EXPECT_EQ(absl::nullopt,
|
||||
statistics_proxy_->GetStats().estimated_playout_ntp_timestamp_ms);
|
||||
statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, kFreqKhz);
|
||||
EXPECT_EQ(kVideoNtpMs,
|
||||
statistics_proxy_->GetStats().estimated_playout_ntp_timestamp_ms);
|
||||
fake_clock_.AdvanceTimeMilliseconds(13);
|
||||
EXPECT_EQ(kVideoNtpMs + 13,
|
||||
statistics_proxy_->GetStats().estimated_playout_ntp_timestamp_ms);
|
||||
fake_clock_.AdvanceTimeMilliseconds(5);
|
||||
EXPECT_EQ(kVideoNtpMs + 13 + 5,
|
||||
statistics_proxy_->GetStats().estimated_playout_ntp_timestamp_ms);
|
||||
}
|
||||
|
||||
TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsAvSyncOffset) {
|
||||
const int64_t kVideoNtpMs = 21;
|
||||
const int64_t kSyncOffsetMs = 22;
|
||||
const double kFreqKhz = 90.0;
|
||||
EXPECT_EQ(std::numeric_limits<int>::max(),
|
||||
statistics_proxy_->GetStats().sync_offset_ms);
|
||||
statistics_proxy_->OnSyncOffsetUpdated(kSyncOffsetMs, kFreqKhz);
|
||||
statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, kFreqKhz);
|
||||
EXPECT_EQ(kSyncOffsetMs, statistics_proxy_->GetStats().sync_offset_ms);
|
||||
}
|
||||
|
||||
TEST_F(ReceiveStatisticsProxyTest, AvSyncOffsetHistogramIsUpdated) {
|
||||
const int64_t kVideoNtpMs = 21;
|
||||
const int64_t kSyncOffsetMs = 22;
|
||||
const double kFreqKhz = 90.0;
|
||||
for (int i = 0; i < kMinRequiredSamples; ++i)
|
||||
statistics_proxy_->OnSyncOffsetUpdated(kSyncOffsetMs, kFreqKhz);
|
||||
statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs,
|
||||
kFreqKhz);
|
||||
statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(),
|
||||
nullptr);
|
||||
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.AVSyncOffsetInMs"));
|
||||
@ -620,18 +640,23 @@ TEST_F(ReceiveStatisticsProxyTest, AvSyncOffsetHistogramIsUpdated) {
|
||||
}
|
||||
|
||||
TEST_F(ReceiveStatisticsProxyTest, RtpToNtpFrequencyOffsetHistogramIsUpdated) {
|
||||
const int64_t kVideoNtpMs = 21;
|
||||
const int64_t kSyncOffsetMs = 22;
|
||||
const double kFreqKhz = 90.0;
|
||||
statistics_proxy_->OnSyncOffsetUpdated(kSyncOffsetMs, kFreqKhz);
|
||||
statistics_proxy_->OnSyncOffsetUpdated(kSyncOffsetMs, kFreqKhz + 2.2);
|
||||
statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, kFreqKhz);
|
||||
statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs,
|
||||
kFreqKhz + 2.2);
|
||||
fake_clock_.AdvanceTimeMilliseconds(kFreqOffsetProcessIntervalInMs);
|
||||
// Process interval passed, max diff: 2.
|
||||
statistics_proxy_->OnSyncOffsetUpdated(kSyncOffsetMs, kFreqKhz + 1.1);
|
||||
statistics_proxy_->OnSyncOffsetUpdated(kSyncOffsetMs, kFreqKhz - 4.2);
|
||||
statistics_proxy_->OnSyncOffsetUpdated(kSyncOffsetMs, kFreqKhz - 0.9);
|
||||
statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs,
|
||||
kFreqKhz + 1.1);
|
||||
statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs,
|
||||
kFreqKhz - 4.2);
|
||||
statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs,
|
||||
kFreqKhz - 0.9);
|
||||
fake_clock_.AdvanceTimeMilliseconds(kFreqOffsetProcessIntervalInMs);
|
||||
// Process interval passed, max diff: 4.
|
||||
statistics_proxy_->OnSyncOffsetUpdated(kSyncOffsetMs, kFreqKhz);
|
||||
statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, kFreqKhz);
|
||||
statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(),
|
||||
nullptr);
|
||||
// Average reported: (2 + 4) / 2 = 3.
|
||||
|
||||
@ -118,9 +118,13 @@ void RtpStreamsSynchronizer::Process() {
|
||||
syncable_video_->SetMinimumPlayoutDelay(target_video_delay_ms);
|
||||
}
|
||||
|
||||
// TODO(https://bugs.webrtc.org/7065): Move RtpToNtpEstimator out of
|
||||
// RtpStreamsSynchronizer and into respective receive stream to always populate
|
||||
// the estimated playout timestamp.
|
||||
bool RtpStreamsSynchronizer::GetStreamSyncOffsetInMs(
|
||||
uint32_t timestamp,
|
||||
uint32_t rtp_timestamp,
|
||||
int64_t render_time_ms,
|
||||
int64_t* video_playout_ntp_ms,
|
||||
int64_t* stream_offset_ms,
|
||||
double* estimated_freq_khz) const {
|
||||
rtc::CritScope lock(&crit_);
|
||||
@ -128,23 +132,37 @@ bool RtpStreamsSynchronizer::GetStreamSyncOffsetInMs(
|
||||
return false;
|
||||
}
|
||||
|
||||
uint32_t playout_timestamp = syncable_audio_->GetPlayoutTimestamp();
|
||||
uint32_t audio_rtp_timestamp;
|
||||
int64_t time_ms;
|
||||
if (!syncable_audio_->GetPlayoutRtpTimestamp(&audio_rtp_timestamp,
|
||||
&time_ms)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int64_t latest_audio_ntp;
|
||||
if (!audio_measurement_.rtp_to_ntp.Estimate(playout_timestamp,
|
||||
if (!audio_measurement_.rtp_to_ntp.Estimate(audio_rtp_timestamp,
|
||||
&latest_audio_ntp)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
syncable_audio_->SetEstimatedPlayoutNtpTimestampMs(latest_audio_ntp, time_ms);
|
||||
|
||||
int64_t latest_video_ntp;
|
||||
if (!video_measurement_.rtp_to_ntp.Estimate(timestamp, &latest_video_ntp)) {
|
||||
if (!video_measurement_.rtp_to_ntp.Estimate(rtp_timestamp,
|
||||
&latest_video_ntp)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int64_t time_to_render_ms = render_time_ms - rtc::TimeMillis();
|
||||
if (time_to_render_ms > 0)
|
||||
latest_video_ntp += time_to_render_ms;
|
||||
// Current audio ntp.
|
||||
int64_t now_ms = rtc::TimeMillis();
|
||||
latest_audio_ntp += (now_ms - time_ms);
|
||||
|
||||
// Remove video playout delay.
|
||||
int64_t time_to_render_ms = render_time_ms - now_ms;
|
||||
if (time_to_render_ms > 0)
|
||||
latest_video_ntp -= time_to_render_ms;
|
||||
|
||||
*video_playout_ntp_ms = latest_video_ntp;
|
||||
*stream_offset_ms = latest_audio_ntp - latest_video_ntp;
|
||||
*estimated_freq_khz = video_measurement_.rtp_to_ntp.params()->frequency_khz;
|
||||
return true;
|
||||
|
||||
@ -36,12 +36,14 @@ class RtpStreamsSynchronizer : public Module {
|
||||
int64_t TimeUntilNextProcess() override;
|
||||
void Process() override;
|
||||
|
||||
// Gets the sync offset between the current played out audio frame and the
|
||||
// video |frame|. Returns true on success, false otherwise.
|
||||
// The estimated frequency is the frequency used in the RTP to NTP timestamp
|
||||
// Gets the estimated playout NTP timestamp for the video frame with
|
||||
// |rtp_timestamp| and the sync offset between the current played out audio
|
||||
// frame and the video frame. Returns true on success, false otherwise.
|
||||
// The |estimated_freq_khz| is the frequency used in the RTP to NTP timestamp
|
||||
// conversion.
|
||||
bool GetStreamSyncOffsetInMs(uint32_t timestamp,
|
||||
bool GetStreamSyncOffsetInMs(uint32_t rtp_timestamp,
|
||||
int64_t render_time_ms,
|
||||
int64_t* video_playout_ntp_ms,
|
||||
int64_t* stream_offset_ms,
|
||||
double* estimated_freq_khz) const;
|
||||
|
||||
|
||||
@ -502,6 +502,7 @@ int VideoReceiveStream::GetBaseMinimumPlayoutDelayMs() const {
|
||||
|
||||
// TODO(tommi): This method grabs a lock 6 times.
|
||||
void VideoReceiveStream::OnFrame(const VideoFrame& video_frame) {
|
||||
int64_t video_playout_ntp_ms;
|
||||
int64_t sync_offset_ms;
|
||||
double estimated_freq_khz;
|
||||
// TODO(tommi): GetStreamSyncOffsetInMs grabs three locks. One inside the
|
||||
@ -510,9 +511,10 @@ void VideoReceiveStream::OnFrame(const VideoFrame& video_frame) {
|
||||
// succeeds most of the time, which leads to grabbing a fourth lock.
|
||||
if (rtp_stream_sync_.GetStreamSyncOffsetInMs(
|
||||
video_frame.timestamp(), video_frame.render_time_ms(),
|
||||
&sync_offset_ms, &estimated_freq_khz)) {
|
||||
&video_playout_ntp_ms, &sync_offset_ms, &estimated_freq_khz)) {
|
||||
// TODO(tommi): OnSyncOffsetUpdated grabs a lock.
|
||||
stats_proxy_.OnSyncOffsetUpdated(sync_offset_ms, estimated_freq_khz);
|
||||
stats_proxy_.OnSyncOffsetUpdated(video_playout_ntp_ms, sync_offset_ms,
|
||||
estimated_freq_khz);
|
||||
}
|
||||
source_tracker_.OnFrameDelivered(video_frame.packet_infos());
|
||||
|
||||
@ -603,11 +605,18 @@ absl::optional<Syncable::Info> VideoReceiveStream::GetInfo() const {
|
||||
return info;
|
||||
}
|
||||
|
||||
uint32_t VideoReceiveStream::GetPlayoutTimestamp() const {
|
||||
bool VideoReceiveStream::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
|
||||
int64_t* time_ms) const {
|
||||
RTC_NOTREACHED();
|
||||
return 0;
|
||||
}
|
||||
|
||||
void VideoReceiveStream::SetEstimatedPlayoutNtpTimestampMs(
|
||||
int64_t ntp_timestamp_ms,
|
||||
int64_t time_ms) {
|
||||
RTC_NOTREACHED();
|
||||
}
|
||||
|
||||
void VideoReceiveStream::SetMinimumPlayoutDelay(int delay_ms) {
|
||||
RTC_DCHECK_RUN_ON(&module_process_sequence_checker_);
|
||||
rtc::CritScope cs(&playout_delay_lock_);
|
||||
|
||||
@ -124,7 +124,10 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
|
||||
// Implements Syncable.
|
||||
int id() const override;
|
||||
absl::optional<Syncable::Info> GetInfo() const override;
|
||||
uint32_t GetPlayoutTimestamp() const override;
|
||||
bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
|
||||
int64_t* time_ms) const override;
|
||||
void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms,
|
||||
int64_t time_ms) override;
|
||||
|
||||
// SetMinimumPlayoutDelay is only called by A/V sync.
|
||||
void SetMinimumPlayoutDelay(int delay_ms) override;
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user