Use NtpTime in RtcpMeasurement instead of uint sec/uint frac.

BUG=webrtc:6579

Review-Url: https://codereview.webrtc.org/2435053004
Cr-Commit-Position: refs/heads/master@{#15125}
This commit is contained in:
asapersson 2016-11-17 02:27:14 -08:00 committed by Commit bot
parent 4da304407c
commit b7e7b49551
5 changed files with 51 additions and 47 deletions

View File

@ -13,6 +13,7 @@
#include <list>
#include "webrtc/system_wrappers/include/ntp_time.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@ -22,8 +23,7 @@ struct RtcpMeasurement {
RtcpMeasurement(uint32_t ntp_secs, uint32_t ntp_frac, uint32_t timestamp);
bool IsEqual(const RtcpMeasurement& other) const;
uint32_t ntp_secs;
uint32_t ntp_frac;
NtpTime ntp_time;
uint32_t rtp_timestamp;
};

View File

@ -15,6 +15,9 @@
namespace webrtc {
namespace {
// Number of RTCP SR reports to use to map between RTP and NTP.
const size_t kNumRtcpReportsToUse = 2;
// Calculates the RTP timestamp frequency from two pairs of NTP/RTP timestamps.
bool CalculateFrequency(int64_t rtcp_ntp_ms1,
uint32_t rtp_timestamp1,
@ -45,19 +48,17 @@ bool CompensateForWrapAround(uint32_t new_timestamp,
} // namespace
// Class holding RTP and NTP timestamp from a RTCP SR report.
RtcpMeasurement::RtcpMeasurement()
: ntp_secs(0), ntp_frac(0), rtp_timestamp(0) {}
RtcpMeasurement::RtcpMeasurement() : ntp_time(0, 0), rtp_timestamp(0) {}
RtcpMeasurement::RtcpMeasurement(uint32_t ntp_secs,
uint32_t ntp_frac,
uint32_t timestamp)
: ntp_secs(ntp_secs), ntp_frac(ntp_frac), rtp_timestamp(timestamp) {}
: ntp_time(ntp_secs, ntp_frac), rtp_timestamp(timestamp) {}
bool RtcpMeasurement::IsEqual(const RtcpMeasurement& other) const {
// Use || since two equal timestamps will result in zero frequency and in
// RtpToNtpMs, |rtp_timestamp_ms| is estimated by dividing by the frequency.
return (ntp_secs == other.ntp_secs && ntp_frac == other.ntp_frac) ||
(rtp_timestamp == other.rtp_timestamp);
return (ntp_time == other.ntp_time) || (rtp_timestamp == other.rtp_timestamp);
}
// Class holding list of RTP and NTP timestamp pairs.
@ -73,13 +74,12 @@ bool RtcpMeasurements::Contains(const RtcpMeasurement& other) const {
}
bool RtcpMeasurements::IsValid(const RtcpMeasurement& other) const {
if (other.ntp_secs == 0 && other.ntp_frac == 0) {
// Invalid or not defined.
if (!other.ntp_time.Valid())
return false;
}
int64_t ntp_ms_new = Clock::NtpToMs(other.ntp_secs, other.ntp_frac);
int64_t ntp_ms_new = other.ntp_time.ToMs();
for (const auto& it : list) {
if (ntp_ms_new <= Clock::NtpToMs(it.ntp_secs, it.ntp_frac)) {
if (ntp_ms_new <= it.ntp_time.ToMs()) {
// Old report.
return false;
}
@ -97,7 +97,7 @@ bool RtcpMeasurements::IsValid(const RtcpMeasurement& other) const {
}
void RtcpMeasurements::UpdateParameters() {
if (list.size() != 2)
if (list.size() != kNumRtcpReportsToUse)
return;
int64_t timestamp_new = list.front().rtp_timestamp;
@ -105,10 +105,8 @@ void RtcpMeasurements::UpdateParameters() {
if (!CompensateForWrapAround(timestamp_new, timestamp_old, &timestamp_new))
return;
int64_t ntp_ms_new =
Clock::NtpToMs(list.front().ntp_secs, list.front().ntp_frac);
int64_t ntp_ms_old =
Clock::NtpToMs(list.back().ntp_secs, list.back().ntp_frac);
int64_t ntp_ms_new = list.front().ntp_time.ToMs();
int64_t ntp_ms_old = list.back().ntp_time.ToMs();
if (!CalculateFrequency(ntp_ms_new, timestamp_new, ntp_ms_old, timestamp_old,
&params.frequency_khz)) {
@ -137,9 +135,8 @@ bool UpdateRtcpList(uint32_t ntp_secs,
return false;
}
// Two RTCP SR reports are needed to map between RTP and NTP.
// More than two will not improve the mapping.
if (rtcp_measurements->list.size() == 2)
// Insert new RTCP SR report.
if (rtcp_measurements->list.size() == kNumRtcpReportsToUse)
rtcp_measurements->list.pop_back();
rtcp_measurements->list.push_front(measurement);

View File

@ -136,8 +136,8 @@ TEST(UpdateRtcpListTests, InjectRtcpSr) {
EXPECT_TRUE(UpdateRtcpList(kNtpSec, kNtpFrac, kTs, &rtcp, &new_sr));
EXPECT_TRUE(new_sr);
EXPECT_EQ(1u, rtcp.list.size());
EXPECT_EQ(kNtpSec, rtcp.list.front().ntp_secs);
EXPECT_EQ(kNtpFrac, rtcp.list.front().ntp_frac);
EXPECT_EQ(kNtpSec, rtcp.list.front().ntp_time.seconds());
EXPECT_EQ(kNtpFrac, rtcp.list.front().ntp_time.fractions());
EXPECT_EQ(kTs, rtcp.list.front().rtp_timestamp);
// Add second report.
EXPECT_TRUE(UpdateRtcpList(kNtpSec, kNtpFrac + kOneMsInNtpFrac, kTs + 1,

View File

@ -24,28 +24,29 @@
namespace webrtc {
namespace {
int UpdateMeasurements(StreamSynchronization::Measurements* stream,
RtpRtcp* rtp_rtcp, RtpReceiver* receiver) {
bool UpdateMeasurements(StreamSynchronization::Measurements* stream,
RtpRtcp* rtp_rtcp,
RtpReceiver* receiver) {
if (!receiver->Timestamp(&stream->latest_timestamp))
return -1;
return false;
if (!receiver->LastReceivedTimeMs(&stream->latest_receive_time_ms))
return -1;
return false;
uint32_t ntp_secs = 0;
uint32_t ntp_frac = 0;
uint32_t rtp_timestamp = 0;
if (rtp_rtcp->RemoteNTP(&ntp_secs, &ntp_frac, nullptr, nullptr,
&rtp_timestamp) != 0) {
return -1;
return false;
}
bool new_rtcp_sr = false;
if (!UpdateRtcpList(ntp_secs, ntp_frac, rtp_timestamp, &stream->rtcp,
&new_rtcp_sr)) {
return -1;
return false;
}
return 0;
return true;
}
} // namespace
@ -124,13 +125,13 @@ void RtpStreamsSynchronizer::Process() {
playout_buffer_delay_ms;
int64_t last_video_receive_ms = video_measurement_.latest_receive_time_ms;
if (UpdateMeasurements(&video_measurement_, video_rtp_rtcp_,
video_rtp_receiver_) != 0) {
if (!UpdateMeasurements(&video_measurement_, video_rtp_rtcp_,
video_rtp_receiver_)) {
return;
}
if (UpdateMeasurements(&audio_measurement_, audio_rtp_rtcp_,
audio_rtp_receiver_) != 0) {
if (!UpdateMeasurements(&audio_measurement_, audio_rtp_rtcp_,
audio_rtp_receiver_)) {
return;
}

View File

@ -36,19 +36,20 @@ class Time {
RtcpMeasurement GenerateRtcp(int frequency, uint32_t offset) const {
RtcpMeasurement rtcp;
NowNtp(&rtcp.ntp_secs, &rtcp.ntp_frac);
rtcp.rtp_timestamp = NowRtp(frequency, offset);
rtcp.ntp_time = GetNowNtp();
rtcp.rtp_timestamp = GetNowRtp(frequency, offset);
return rtcp;
}
void NowNtp(uint32_t* ntp_secs, uint32_t* ntp_frac) const {
*ntp_secs = time_now_ms_ / 1000 + kNtpJan1970;
NtpTime GetNowNtp() const {
uint32_t ntp_secs = time_now_ms_ / 1000 + kNtpJan1970;
int64_t remainder_ms = time_now_ms_ % 1000;
*ntp_frac = static_cast<uint32_t>(
uint32_t ntp_frac = static_cast<uint32_t>(
static_cast<double>(remainder_ms) * kNtpFracPerMs + 0.5);
return NtpTime(ntp_secs, ntp_frac);
}
uint32_t NowRtp(int frequency, uint32_t offset) const {
uint32_t GetNowRtp(int frequency, uint32_t offset) const {
return frequency * time_now_ms_ / 1000 + offset;
}
@ -105,31 +106,36 @@ class StreamSynchronizationTest : public ::testing::Test {
// Generate NTP/RTP timestamp pair for both streams corresponding to RTCP.
RtcpMeasurement rtcp =
send_time_->GenerateRtcp(audio_frequency, audio_offset);
EXPECT_TRUE(UpdateRtcpList(rtcp.ntp_secs, rtcp.ntp_frac, rtcp.rtp_timestamp,
EXPECT_TRUE(UpdateRtcpList(rtcp.ntp_time.seconds(),
rtcp.ntp_time.fractions(), rtcp.rtp_timestamp,
&audio.rtcp, &new_sr));
send_time_->IncreaseTimeMs(100);
receive_time_->IncreaseTimeMs(100);
rtcp = send_time_->GenerateRtcp(video_frequency, video_offset);
EXPECT_TRUE(UpdateRtcpList(rtcp.ntp_secs, rtcp.ntp_frac, rtcp.rtp_timestamp,
EXPECT_TRUE(UpdateRtcpList(rtcp.ntp_time.seconds(),
rtcp.ntp_time.fractions(), rtcp.rtp_timestamp,
&video.rtcp, &new_sr));
send_time_->IncreaseTimeMs(900);
receive_time_->IncreaseTimeMs(900);
rtcp = send_time_->GenerateRtcp(audio_frequency, audio_offset);
EXPECT_TRUE(UpdateRtcpList(rtcp.ntp_secs, rtcp.ntp_frac, rtcp.rtp_timestamp,
EXPECT_TRUE(UpdateRtcpList(rtcp.ntp_time.seconds(),
rtcp.ntp_time.fractions(), rtcp.rtp_timestamp,
&audio.rtcp, &new_sr));
send_time_->IncreaseTimeMs(100);
receive_time_->IncreaseTimeMs(100);
rtcp = send_time_->GenerateRtcp(video_frequency, video_offset);
EXPECT_TRUE(UpdateRtcpList(rtcp.ntp_secs, rtcp.ntp_frac, rtcp.rtp_timestamp,
EXPECT_TRUE(UpdateRtcpList(rtcp.ntp_time.seconds(),
rtcp.ntp_time.fractions(), rtcp.rtp_timestamp,
&video.rtcp, &new_sr));
send_time_->IncreaseTimeMs(900);
receive_time_->IncreaseTimeMs(900);
// Capture an audio and a video frame at the same time.
audio.latest_timestamp = send_time_->NowRtp(audio_frequency,
audio_offset);
video.latest_timestamp = send_time_->NowRtp(video_frequency,
video_offset);
audio.latest_timestamp =
send_time_->GetNowRtp(audio_frequency, audio_offset);
video.latest_timestamp =
send_time_->GetNowRtp(video_frequency, video_offset);
if (audio_delay_ms > video_delay_ms) {
// Audio later than video.