Propagate base minimum delay from video jitter buffer to webrtc/api.

On api level two methods were added to api/media_stream_interface.cc on VideoSourceInterface,
GetLatency and SetLatency. Latency is measured in seconds, delay in milliseconds but both describes
the same concept.


Bug: webrtc:10287
Change-Id: Ib8dc62a4d73f63fab7e10b82c716096ee6199957
Reviewed-on: https://webrtc-review.googlesource.com/c/123482
Commit-Queue: Ruslan Burakov <kuddai@google.com>
Reviewed-by: Stefan Holmer <stefan@webrtc.org>
Reviewed-by: Philip Eliasson <philipel@webrtc.org>
Reviewed-by: Steve Anton <steveanton@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#26877}
This commit is contained in:
Ruslan Burakov 2019-02-27 15:32:48 +01:00 committed by Commit Bot
parent 48e7065ac6
commit 493a650b1e
32 changed files with 884 additions and 205 deletions

View File

@ -32,7 +32,7 @@ const cricket::AudioOptions AudioSourceInterface::options() const {
return {};
}
double AudioSourceInterface::GetLatency() const {
double MediaSourceInterface::GetLatency() const {
return 0.0;
}

View File

@ -61,6 +61,13 @@ class MediaSourceInterface : public rtc::RefCountInterface,
virtual bool remote() const = 0;
// Sets the minimum latency of the remote source until audio playout. Actual
// observered latency may differ depending on the source. |latency| is in the
// range of [0.0, 10.0] seconds.
// TODO(kuddai) make pure virtual once not only remote tracks support latency.
virtual void SetLatency(double latency) {}
virtual double GetLatency() const;
protected:
~MediaSourceInterface() override = default;
};
@ -201,12 +208,6 @@ class AudioSourceInterface : public MediaSourceInterface {
// be applied in the track in a way that does not affect clones of the track.
virtual void SetVolume(double volume) {}
// Sets the minimum latency of the remote source until audio playout. Actual
// observered latency may differ depending on the source. |latency| is in the
// range of [0.0, 10.0] seconds.
virtual void SetLatency(double latency) {}
virtual double GetLatency() const;
// Registers/unregisters observers to the audio source.
virtual void RegisterAudioObserver(AudioObserver* observer) {}
virtual void UnregisterAudioObserver(AudioObserver* observer) {}

View File

@ -32,6 +32,8 @@ PROXY_WORKER_METHOD2(void,
rtc::VideoSinkInterface<VideoFrame>*,
const rtc::VideoSinkWants&)
PROXY_WORKER_METHOD1(void, RemoveSink, rtc::VideoSinkInterface<VideoFrame>*)
PROXY_WORKER_METHOD1(void, SetLatency, double)
PROXY_WORKER_CONSTMETHOD0(double, GetLatency)
PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
END_PROXY_MAP()

View File

@ -252,6 +252,15 @@ class VideoReceiveStream {
virtual std::vector<RtpSource> GetSources() const = 0;
// Sets a base minimum for the playout delay. Base minimum delay sets lower
// bound on minimum delay value determining lower bound on playout delay.
//
// Returns true if value was successfully set, false overwise.
virtual bool SetBaseMinimumPlayoutDelayMs(int delay_ms) = 0;
// Returns current value of base minimum delay in milliseconds.
virtual int GetBaseMinimumPlayoutDelayMs() const = 0;
protected:
virtual ~VideoReceiveStream() {}
};

View File

@ -88,6 +88,7 @@ rtc_static_library("rtc_media_base") {
"base/audio_source.h",
"base/codec.cc",
"base/codec.h",
"base/delayable.h",
"base/media_channel.cc",
"base/media_channel.h",
"base/media_constants.cc",

38
media/base/delayable.h Normal file
View File

@ -0,0 +1,38 @@
/*
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MEDIA_BASE_DELAYABLE_H_
#define MEDIA_BASE_DELAYABLE_H_
#include <stdint.h>
#include "absl/types/optional.h"
namespace cricket {
// Delayable is used by user code through ApplyConstraints algorithm. Its
// methods must take precendence over similar functional in |syncable.h|.
class Delayable {
public:
virtual ~Delayable() {}
// Set base minimum delay of the receive stream with specified ssrc.
// Base minimum delay sets lower bound on minimum delay value which
// determines minimum delay until audio playout.
// Returns false if there is no stream with given ssrc.
virtual bool SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, int delay_ms) = 0;
// Returns current value of base minimum delay in milliseconds.
virtual absl::optional<int> GetBaseMinimumPlayoutDelayMs(
uint32_t ssrc) const = 0;
};
} // namespace cricket
#endif // MEDIA_BASE_DELAYABLE_H_

View File

@ -341,12 +341,14 @@ bool FakeVideoMediaChannel::AddRecvStream(const StreamParams& sp) {
if (!RtpHelper<VideoMediaChannel>::AddRecvStream(sp))
return false;
sinks_[sp.first_ssrc()] = NULL;
output_delays_[sp.first_ssrc()] = 0;
return true;
}
bool FakeVideoMediaChannel::RemoveRecvStream(uint32_t ssrc) {
if (!RtpHelper<VideoMediaChannel>::RemoveRecvStream(ssrc))
return false;
sinks_.erase(ssrc);
output_delays_.erase(ssrc);
return true;
}
void FakeVideoMediaChannel::FillBitrateInfo(BandwidthEstimationInfo* bwe_info) {
@ -358,6 +360,23 @@ std::vector<webrtc::RtpSource> FakeVideoMediaChannel::GetSources(
uint32_t ssrc) const {
return {};
}
bool FakeVideoMediaChannel::SetBaseMinimumPlayoutDelayMs(uint32_t ssrc,
int delay_ms) {
if (output_delays_.find(ssrc) == output_delays_.end()) {
return false;
} else {
output_delays_[ssrc] = delay_ms;
return true;
}
}
absl::optional<int> FakeVideoMediaChannel::GetBaseMinimumPlayoutDelayMs(
uint32_t ssrc) const {
const auto it = output_delays_.find(ssrc);
if (it != output_delays_.end()) {
return it->second;
}
return absl::nullopt;
}
bool FakeVideoMediaChannel::SetRecvCodecs(
const std::vector<VideoCodec>& codecs) {
if (fail_set_recv_codecs()) {

View File

@ -441,6 +441,10 @@ class FakeVideoMediaChannel : public RtpHelper<VideoMediaChannel> {
std::vector<webrtc::RtpSource> GetSources(uint32_t ssrc) const override;
bool SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, int delay_ms) override;
absl::optional<int> GetBaseMinimumPlayoutDelayMs(
uint32_t ssrc) const override;
private:
bool SetRecvCodecs(const std::vector<VideoCodec>& codecs);
bool SetSendCodecs(const std::vector<VideoCodec>& codecs);
@ -452,6 +456,7 @@ class FakeVideoMediaChannel : public RtpHelper<VideoMediaChannel> {
std::vector<VideoCodec> send_codecs_;
std::map<uint32_t, rtc::VideoSinkInterface<webrtc::VideoFrame>*> sinks_;
std::map<uint32_t, rtc::VideoSourceInterface<webrtc::VideoFrame>*> sources_;
std::map<uint32_t, int> output_delays_;
VideoOptions options_;
int max_bps_;
};

View File

@ -32,6 +32,7 @@
#include "api/video/video_timing.h"
#include "api/video_codecs/video_encoder_config.h"
#include "media/base/codec.h"
#include "media/base/delayable.h"
#include "media/base/media_config.h"
#include "media/base/media_constants.h"
#include "media/base/stream_params.h"
@ -707,7 +708,7 @@ struct AudioSendParameters : RtpSendParameters<AudioCodec> {
struct AudioRecvParameters : RtpParameters<AudioCodec> {};
class VoiceMediaChannel : public MediaChannel {
class VoiceMediaChannel : public MediaChannel, public Delayable {
public:
VoiceMediaChannel() {}
explicit VoiceMediaChannel(const MediaConfig& config)
@ -738,13 +739,6 @@ class VoiceMediaChannel : public MediaChannel {
AudioSource* source) = 0;
// Set speaker output volume of the specified ssrc.
virtual bool SetOutputVolume(uint32_t ssrc, double volume) = 0;
// Set base minimum delay of the receive stream with specified ssrc.
// Base minimum delay sets lower bound on minimum delay value which
// determines minimum delay until audio playout.
// Returns false if there is no stream with given ssrc.
virtual bool SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, int delay_ms) = 0;
virtual absl::optional<int> GetBaseMinimumPlayoutDelayMs(
uint32_t ssrc) const = 0;
// Returns if the telephone-event has been negotiated.
virtual bool CanInsertDtmf() = 0;
// Send a DTMF |event|. The DTMF out-of-band signal will be used.
@ -783,7 +777,7 @@ struct VideoSendParameters : RtpSendParameters<VideoCodec> {
// encapsulate all the parameters needed for a video RtpReceiver.
struct VideoRecvParameters : RtpParameters<VideoCodec> {};
class VideoMediaChannel : public MediaChannel {
class VideoMediaChannel : public MediaChannel, public Delayable {
public:
VideoMediaChannel() {}
explicit VideoMediaChannel(const MediaConfig& config)

View File

@ -410,6 +410,15 @@ const std::vector<FakeVideoReceiveStream*>& FakeCall::GetVideoReceiveStreams() {
return video_receive_streams_;
}
const FakeVideoReceiveStream* FakeCall::GetVideoReceiveStream(uint32_t ssrc) {
for (const auto* p : GetVideoReceiveStreams()) {
if (p->GetConfig().rtp.remote_ssrc == ssrc) {
return p;
}
}
return nullptr;
}
const std::vector<FakeAudioSendStream*>& FakeCall::GetAudioSendStreams() {
return audio_send_streams_;
}

View File

@ -222,6 +222,10 @@ class FakeVideoReceiveStream final : public webrtc::VideoReceiveStream {
return std::vector<webrtc::RtpSource>();
}
int base_mininum_playout_delay_ms() const {
return base_mininum_playout_delay_ms_;
}
private:
// webrtc::VideoReceiveStream implementation.
void Start() override;
@ -229,10 +233,21 @@ class FakeVideoReceiveStream final : public webrtc::VideoReceiveStream {
webrtc::VideoReceiveStream::Stats GetStats() const override;
bool SetBaseMinimumPlayoutDelayMs(int delay_ms) override {
base_mininum_playout_delay_ms_ = delay_ms;
return true;
}
int GetBaseMinimumPlayoutDelayMs() const override {
return base_mininum_playout_delay_ms_;
}
webrtc::VideoReceiveStream::Config config_;
bool receiving_;
webrtc::VideoReceiveStream::Stats stats_;
int base_mininum_playout_delay_ms_ = 0;
int num_added_secondary_sinks_;
int num_removed_secondary_sinks_;
};
@ -268,6 +283,7 @@ class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver {
const FakeAudioSendStream* GetAudioSendStream(uint32_t ssrc);
const std::vector<FakeAudioReceiveStream*>& GetAudioReceiveStreams();
const FakeAudioReceiveStream* GetAudioReceiveStream(uint32_t ssrc);
const FakeVideoReceiveStream* GetVideoReceiveStream(uint32_t ssrc);
const std::vector<FakeFlexfecReceiveStream*>& GetFlexfecReceiveStreams();

View File

@ -418,10 +418,17 @@ UnsignalledSsrcHandler::Action DefaultUnsignalledSsrcHandler::OnUnsignalledSsrc(
RTC_LOG(LS_INFO) << "Creating default receive stream for SSRC=" << ssrc
<< ".";
if (!channel->AddRecvStream(sp, true)) {
if (!channel->AddRecvStream(sp, /*default_stream=*/true)) {
RTC_LOG(LS_WARNING) << "Could not create default receive stream.";
}
// SSRC 0 returns default_recv_base_minimum_delay_ms.
const int unsignaled_ssrc = 0;
int default_recv_base_minimum_delay_ms =
channel->GetBaseMinimumPlayoutDelayMs(unsignaled_ssrc).value_or(0);
// Set base minimum delay if it was set before for the default receive stream.
channel->SetBaseMinimumPlayoutDelayMs(ssrc,
default_recv_base_minimum_delay_ms);
channel->SetSink(ssrc, default_sink_);
return kDeliverPacket;
}
@ -1497,6 +1504,51 @@ void WebRtcVideoChannel::SetFrameEncryptor(
}
}
bool WebRtcVideoChannel::SetBaseMinimumPlayoutDelayMs(uint32_t ssrc,
int delay_ms) {
absl::optional<uint32_t> default_ssrc = GetDefaultReceiveStreamSsrc();
rtc::CritScope stream_lock(&stream_crit_);
// SSRC of 0 represents the default receive stream.
if (ssrc == 0) {
default_recv_base_minimum_delay_ms_ = delay_ms;
}
if (ssrc == 0 && !default_ssrc) {
return true;
}
if (ssrc == 0 && default_ssrc) {
ssrc = default_ssrc.value();
}
auto stream = receive_streams_.find(ssrc);
if (stream != receive_streams_.end()) {
stream->second->SetBaseMinimumPlayoutDelayMs(delay_ms);
return true;
} else {
RTC_LOG(LS_ERROR) << "No stream found to set base minimum playout delay";
return false;
}
}
absl::optional<int> WebRtcVideoChannel::GetBaseMinimumPlayoutDelayMs(
uint32_t ssrc) const {
rtc::CritScope stream_lock(&stream_crit_);
// SSRC of 0 represents the default receive stream.
if (ssrc == 0) {
return default_recv_base_minimum_delay_ms_;
}
auto stream = receive_streams_.find(ssrc);
if (stream != receive_streams_.end()) {
return stream->second->GetBaseMinimumPlayoutDelayMs();
} else {
RTC_LOG(LS_ERROR) << "No stream found to get base minimum playout delay";
return absl::nullopt;
}
}
absl::optional<uint32_t> WebRtcVideoChannel::GetDefaultReceiveStreamSsrc() {
rtc::CritScope stream_lock(&stream_crit_);
absl::optional<uint32_t> ssrc;
@ -2386,7 +2438,9 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetRecvParameters(
}
void WebRtcVideoChannel::WebRtcVideoReceiveStream::RecreateWebRtcVideoStream() {
absl::optional<int> base_minimum_playout_delay_ms;
if (stream_) {
base_minimum_playout_delay_ms = stream_->GetBaseMinimumPlayoutDelayMs();
MaybeDissociateFlexfecFromVideo();
call_->DestroyVideoReceiveStream(stream_);
stream_ = nullptr;
@ -2395,6 +2449,10 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::RecreateWebRtcVideoStream() {
config.rtp.protected_by_flexfec = (flexfec_stream_ != nullptr);
config.stream_id = stream_params_.id;
stream_ = call_->CreateVideoReceiveStream(std::move(config));
if (base_minimum_playout_delay_ms) {
stream_->SetBaseMinimumPlayoutDelayMs(
base_minimum_playout_delay_ms.value());
}
MaybeAssociateFlexfecWithVideo();
stream_->Start();
}
@ -2457,6 +2515,16 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetFrameDecryptor(
}
}
bool WebRtcVideoChannel::WebRtcVideoReceiveStream::SetBaseMinimumPlayoutDelayMs(
int delay_ms) {
return stream_ ? stream_->SetBaseMinimumPlayoutDelayMs(delay_ms) : false;
}
int WebRtcVideoChannel::WebRtcVideoReceiveStream::GetBaseMinimumPlayoutDelayMs()
const {
return stream_ ? stream_->GetBaseMinimumPlayoutDelayMs() : 0;
}
void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetSink(
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
rtc::CritScope crit(&sink_lock_);

View File

@ -170,6 +170,11 @@ class WebRtcVideoChannel : public VideoMediaChannel, public webrtc::Transport {
rtc::scoped_refptr<webrtc::FrameEncryptorInterface>
frame_encryptor) override;
bool SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, int delay_ms) override;
absl::optional<int> GetBaseMinimumPlayoutDelayMs(
uint32_t ssrc) const override;
// Implemented for VideoMediaChannelTest.
bool sending() const { return sending_; }
@ -393,6 +398,10 @@ class WebRtcVideoChannel : public VideoMediaChannel, public webrtc::Transport {
void SetFrameDecryptor(
rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor);
bool SetBaseMinimumPlayoutDelayMs(int delay_ms);
int GetBaseMinimumPlayoutDelayMs() const;
void SetSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink);
VideoReceiverInfo GetVideoReceiverInfo(bool log_stats);
@ -470,6 +479,9 @@ class WebRtcVideoChannel : public VideoMediaChannel, public webrtc::Transport {
DefaultUnsignalledSsrcHandler default_unsignalled_ssrc_handler_;
UnsignalledSsrcHandler* const unsignalled_ssrc_handler_;
// Delay for unsignaled streams, which may be set before the stream exists.
int default_recv_base_minimum_delay_ms_ = 0;
const MediaConfig::Video video_config_;
rtc::CriticalSection stream_crit_;

View File

@ -5012,6 +5012,56 @@ TEST_F(WebRtcVideoChannelTest, RecvUnsignaledSsrcWithSignaledStreamId) {
fake_call_->GetVideoReceiveStreams()[0]->GetConfig().sync_group.empty());
}
// Test BaseMinimumPlayoutDelayMs on receive streams.
TEST_F(WebRtcVideoChannelTest, BaseMinimumPlayoutDelayMs) {
// Test that set won't work for non-existing receive streams.
EXPECT_FALSE(channel_->SetBaseMinimumPlayoutDelayMs(kSsrc + 2, 200));
// Test that get won't work for non-existing receive streams.
EXPECT_FALSE(channel_->GetBaseMinimumPlayoutDelayMs(kSsrc + 2));
EXPECT_TRUE(AddRecvStream());
// Test that set works for the existing receive stream.
EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(last_ssrc_, 200));
auto* recv_stream = fake_call_->GetVideoReceiveStream(last_ssrc_);
EXPECT_TRUE(recv_stream);
EXPECT_EQ(recv_stream->base_mininum_playout_delay_ms(), 200);
EXPECT_EQ(channel_->GetBaseMinimumPlayoutDelayMs(last_ssrc_).value_or(0),
200);
}
// Test BaseMinimumPlayoutDelayMs on unsignaled receive streams.
TEST_F(WebRtcVideoChannelTest, BaseMinimumPlayoutDelayMsUnsignaledRecvStream) {
absl::optional<int> delay_ms;
const FakeVideoReceiveStream* recv_stream;
// Set default stream with SSRC 0
EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(0, 200));
EXPECT_EQ(200, channel_->GetBaseMinimumPlayoutDelayMs(0).value_or(0));
// Spawn an unsignaled stream by sending a packet, it should inherit
// default delay 200.
const size_t kDataLength = 12;
uint8_t data[kDataLength];
memset(data, 0, sizeof(data));
rtc::SetBE32(&data[8], kIncomingUnsignalledSsrc);
rtc::CopyOnWriteBuffer packet(data, kDataLength);
channel_->OnPacketReceived(&packet, /* packet_time_us */ -1);
recv_stream = fake_call_->GetVideoReceiveStream(kIncomingUnsignalledSsrc);
EXPECT_EQ(recv_stream->base_mininum_playout_delay_ms(), 200);
delay_ms = channel_->GetBaseMinimumPlayoutDelayMs(kIncomingUnsignalledSsrc);
EXPECT_EQ(200, delay_ms.value_or(0));
// Check that now if we change delay for SSRC 0 it will change delay for the
// default receiving stream as well.
EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(0, 300));
EXPECT_EQ(300, channel_->GetBaseMinimumPlayoutDelayMs(0).value_or(0));
delay_ms = channel_->GetBaseMinimumPlayoutDelayMs(kIncomingUnsignalledSsrc);
EXPECT_EQ(300, delay_ms.value_or(0));
recv_stream = fake_call_->GetVideoReceiveStream(kIncomingUnsignalledSsrc);
EXPECT_EQ(recv_stream->base_mininum_playout_delay_ms(), 300);
}
void WebRtcVideoChannelTest::TestReceiveUnsignaledSsrcPacket(
uint8_t payload_type,
bool expect_created_receive_stream) {

View File

@ -347,19 +347,6 @@ bool FrameBuffer::ValidReferences(const EncodedFrame& frame) const {
return true;
}
void FrameBuffer::UpdatePlayoutDelays(const EncodedFrame& frame) {
TRACE_EVENT0("webrtc", "FrameBuffer::UpdatePlayoutDelays");
PlayoutDelay playout_delay = frame.EncodedImage().playout_delay_;
if (playout_delay.min_ms >= 0)
timing_->set_min_playout_delay(playout_delay.min_ms);
if (playout_delay.max_ms >= 0)
timing_->set_max_playout_delay(playout_delay.max_ms);
if (!frame.delayed_by_retransmission())
timing_->IncomingTimestamp(frame.Timestamp(), frame.ReceivedTime());
}
int64_t FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
TRACE_EVENT0("webrtc", "FrameBuffer::InsertFrame");
RTC_DCHECK(frame);
@ -449,7 +436,9 @@ int64_t FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
if (!UpdateFrameInfoWithIncomingFrame(*frame, info))
return last_continuous_picture_id;
UpdatePlayoutDelays(*frame);
if (!frame->delayed_by_retransmission())
timing_->IncomingTimestamp(frame->Timestamp(), frame->ReceivedTime());
info->second.frame = std::move(frame);

View File

@ -118,11 +118,6 @@ class FrameBuffer {
// Check that the references of |frame| are valid.
bool ValidReferences(const EncodedFrame& frame) const;
// Updates the minimal and maximal playout delays
// depending on the frame.
void UpdatePlayoutDelays(const EncodedFrame& frame)
RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
// Update all directly dependent and indirectly dependent frames and mark
// them as continuous if all their references has been fulfilled.
void PropagateContinuity(FrameMap::iterator start)

View File

@ -271,16 +271,6 @@ TEST_F(TestFrameBuffer2, OneSuperFrame) {
CheckFrame(0, pid, 1);
}
TEST_F(TestFrameBuffer2, SetPlayoutDelay) {
const PlayoutDelay kPlayoutDelayMs = {123, 321};
std::unique_ptr<FrameObjectFake> test_frame(new FrameObjectFake());
test_frame->id.picture_id = 0;
test_frame->SetPlayoutDelay(kPlayoutDelayMs);
buffer_->InsertFrame(std::move(test_frame));
EXPECT_EQ(kPlayoutDelayMs.min_ms, timing_.min_playout_delay());
EXPECT_EQ(kPlayoutDelayMs.max_ms, timing_.max_playout_delay());
}
TEST_F(TestFrameBuffer2, ZeroPlayoutDelay) {
VCMTiming timing(&clock_);
buffer_.reset(

View File

@ -151,6 +151,10 @@ rtc_static_library("peerconnection") {
"peer_connection_factory.cc",
"peer_connection_factory.h",
"peer_connection_internal.h",
"playout_latency.cc",
"playout_latency.h",
"playout_latency_interface.h",
"playout_latency_proxy.h",
"remote_audio_source.cc",
"remote_audio_source.h",
"rtc_stats_collector.cc",
@ -396,6 +400,7 @@ if (rtc_include_tests) {
"test/frame_generator_capturer_video_track_source.h",
"test/mock_channel_interface.h",
"test/mock_data_channel.h",
"test/mock_delayable.h",
"test/mock_peer_connection_observers.h",
"test/mock_rtp_receiver_internal.h",
"test/mock_rtp_sender_internal.h",
@ -476,6 +481,7 @@ if (rtc_include_tests) {
"peer_connection_simulcast_unittest.cc",
"peer_connection_wrapper.cc",
"peer_connection_wrapper.h",
"playout_latency_unittest.cc",
"proxy_unittest.cc",
"rtc_stats_collector_unittest.cc",
"rtc_stats_integrationtest.cc",

87
pc/playout_latency.cc Normal file
View File

@ -0,0 +1,87 @@
/*
* Copyright 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "pc/playout_latency.h"
#include "iostream"
#include "rtc_base/checks.h"
#include "rtc_base/location.h"
#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/thread.h"
#include "rtc_base/thread_checker.h"
namespace {
constexpr int kDefaultLatency = 0;
constexpr int kRoundToZeroThresholdMs = 10;
} // namespace
namespace webrtc {
PlayoutLatency::PlayoutLatency(rtc::Thread* worker_thread)
: signaling_thread_(rtc::Thread::Current()), worker_thread_(worker_thread) {
RTC_DCHECK(worker_thread_);
}
void PlayoutLatency::OnStart(cricket::Delayable* media_channel, uint32_t ssrc) {
RTC_DCHECK_RUN_ON(signaling_thread_);
media_channel_ = media_channel;
ssrc_ = ssrc;
// Trying to apply cached latency for the audio stream.
if (cached_latency_) {
SetLatency(cached_latency_.value());
}
}
void PlayoutLatency::OnStop() {
RTC_DCHECK_RUN_ON(signaling_thread_);
// Assume that audio stream is no longer present for latency calls.
media_channel_ = nullptr;
ssrc_ = absl::nullopt;
}
void PlayoutLatency::SetLatency(double latency) {
RTC_DCHECK_RUN_ON(worker_thread_);
RTC_DCHECK_GE(latency, 0);
RTC_DCHECK_LE(latency, 10);
int delay_ms = rtc::dchecked_cast<int>(latency * 1000);
// In JitterBuffer 0 delay has special meaning of being unconstrained value
// that is why we round delay to 0 if it is small enough during conversion
// from latency.
if (delay_ms <= kRoundToZeroThresholdMs) {
delay_ms = 0;
}
cached_latency_ = latency;
if (media_channel_ && ssrc_) {
media_channel_->SetBaseMinimumPlayoutDelayMs(ssrc_.value(), delay_ms);
}
}
double PlayoutLatency::GetLatency() const {
RTC_DCHECK_RUN_ON(worker_thread_);
absl::optional<int> delay_ms;
if (media_channel_ && ssrc_) {
delay_ms = media_channel_->GetBaseMinimumPlayoutDelayMs(ssrc_.value());
}
if (delay_ms) {
return delay_ms.value() / 1000.0;
} else {
return cached_latency_.value_or(kDefaultLatency);
}
}
} // namespace webrtc

53
pc/playout_latency.h Normal file
View File

@ -0,0 +1,53 @@
/*
* Copyright 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef PC_PLAYOUT_LATENCY_H_
#define PC_PLAYOUT_LATENCY_H_
#include <stdint.h>
#include "absl/types/optional.h"
#include "media/base/delayable.h"
#include "pc/playout_latency_interface.h"
#include "rtc_base/thread.h"
namespace webrtc {
// PlayoutLatency converts latency measured in seconds to delay measured in
// milliseconds for the underlying media channel. It also handles cases when
// user sets Latency before the start of media_channel by caching its request.
// Note, this class is not thread safe. Its thread safe version is defined in
// pc/playout_latency_proxy.h
class PlayoutLatency : public PlayoutLatencyInterface {
public:
// Must be called on signaling thread.
explicit PlayoutLatency(rtc::Thread* worker_thread);
void OnStart(cricket::Delayable* media_channel, uint32_t ssrc) override;
void OnStop() override;
void SetLatency(double latency) override;
double GetLatency() const override;
private:
// Throughout webrtc source, sometimes it is also called as |main_thread_|.
rtc::Thread* const signaling_thread_;
rtc::Thread* const worker_thread_;
// Media channel and ssrc together uniqely identify audio stream.
cricket::Delayable* media_channel_ = nullptr;
absl::optional<uint32_t> ssrc_;
absl::optional<double> cached_latency_;
};
} // namespace webrtc
#endif // PC_PLAYOUT_LATENCY_H_

View File

@ -0,0 +1,43 @@
/*
* Copyright 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef PC_PLAYOUT_LATENCY_INTERFACE_H_
#define PC_PLAYOUT_LATENCY_INTERFACE_H_
#include <stdint.h>
#include "media/base/delayable.h"
#include "rtc_base/ref_count.h"
namespace webrtc {
// PlayoutLatency delivers user's latency queries to the underlying media
// channel. It can describe either video or audio latency for receiving stream.
// "Interface" suffix in the interface name is required to be compatible with
// api/proxy.cc
class PlayoutLatencyInterface : public rtc::RefCountInterface {
public:
// OnStart allows to uniqely identify to which receiving stream playout
// latency must correpond through |media_channel| and |ssrc| pair.
virtual void OnStart(cricket::Delayable* media_channel, uint32_t ssrc) = 0;
// Indicates that underlying receiving stream is stopped.
virtual void OnStop() = 0;
// Sets latency in seconds.
virtual void SetLatency(double latency) = 0;
// Returns latency in seconds.
virtual double GetLatency() const = 0;
};
} // namespace webrtc
#endif // PC_PLAYOUT_LATENCY_INTERFACE_H_

View File

@ -0,0 +1,32 @@
/*
* Copyright 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef PC_PLAYOUT_LATENCY_PROXY_H_
#define PC_PLAYOUT_LATENCY_PROXY_H_
#include <stdint.h>
#include "api/proxy.h"
#include "media/base/delayable.h"
#include "pc/playout_latency_interface.h"
namespace webrtc {
BEGIN_PROXY_MAP(PlayoutLatency)
PROXY_SIGNALING_THREAD_DESTRUCTOR()
PROXY_METHOD2(void, OnStart, cricket::Delayable*, uint32_t)
PROXY_METHOD0(void, OnStop)
PROXY_WORKER_METHOD1(void, SetLatency, double)
PROXY_WORKER_CONSTMETHOD0(double, GetLatency)
END_PROXY_MAP()
} // namespace webrtc
#endif // PC_PLAYOUT_LATENCY_PROXY_H_

View File

@ -0,0 +1,103 @@
/*
* Copyright 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <stdint.h>
#include "absl/types/optional.h"
#include "api/scoped_refptr.h"
#include "pc/playout_latency.h"
#include "pc/test/mock_delayable.h"
#include "rtc_base/ref_counted_object.h"
#include "rtc_base/thread.h"
#include "test/gmock.h"
#include "test/gtest.h"
using ::testing::Return;
namespace {
constexpr int kSsrc = 1234;
} // namespace
namespace webrtc {
class PlayoutLatencyTest : public testing::Test {
public:
PlayoutLatencyTest()
: latency_(
new rtc::RefCountedObject<PlayoutLatency>(rtc::Thread::Current())) {
}
protected:
rtc::scoped_refptr<PlayoutLatencyInterface> latency_;
MockDelayable delayable_;
};
TEST_F(PlayoutLatencyTest, DefaultValue) {
EXPECT_DOUBLE_EQ(0.0, latency_->GetLatency());
}
TEST_F(PlayoutLatencyTest, GetLatency) {
latency_->OnStart(&delayable_, kSsrc);
EXPECT_CALL(delayable_, GetBaseMinimumPlayoutDelayMs(kSsrc))
.WillOnce(Return(2000));
// Latency in seconds.
EXPECT_DOUBLE_EQ(2.0, latency_->GetLatency());
EXPECT_CALL(delayable_, GetBaseMinimumPlayoutDelayMs(kSsrc))
.WillOnce(Return(absl::nullopt));
// When no value is returned by GetBaseMinimumPlayoutDelayMs, and there are
// no caching, then return default value.
EXPECT_DOUBLE_EQ(0.0, latency_->GetLatency());
}
TEST_F(PlayoutLatencyTest, SetLatency) {
latency_->OnStart(&delayable_, kSsrc);
EXPECT_CALL(delayable_, SetBaseMinimumPlayoutDelayMs(kSsrc, 3000))
.WillOnce(Return(true));
// Latency in seconds.
latency_->SetLatency(3.0);
}
TEST_F(PlayoutLatencyTest, Caching) {
// Check that value is cached before start.
latency_->SetLatency(4.0);
// Latency in seconds.
EXPECT_DOUBLE_EQ(4.0, latency_->GetLatency());
// Check that cached value applied on the start.
EXPECT_CALL(delayable_, SetBaseMinimumPlayoutDelayMs(kSsrc, 4000))
.WillOnce(Return(true));
latency_->OnStart(&delayable_, kSsrc);
EXPECT_CALL(delayable_, GetBaseMinimumPlayoutDelayMs(kSsrc))
.WillOnce(Return(absl::nullopt));
// On false the latest cached value is returned.
EXPECT_DOUBLE_EQ(4.0, latency_->GetLatency());
latency_->OnStop();
// Check that after stop it returns last cached value.
EXPECT_DOUBLE_EQ(4.0, latency_->GetLatency());
}
TEST_F(PlayoutLatencyTest, Rounding) {
latency_->OnStart(&delayable_, kSsrc);
// In Jitter Buffer (Audio or Video) delay 0 has a special meaning of
// unconstrained variable, that is why here if latency is small enough we
// round it to 0 delay.
EXPECT_CALL(delayable_, SetBaseMinimumPlayoutDelayMs(kSsrc, 0))
.WillOnce(Return(true));
latency_->SetLatency(0.005);
}
} // namespace webrtc

View File

@ -16,6 +16,8 @@
#include "absl/algorithm/container.h"
#include "absl/memory/memory.h"
#include "api/scoped_refptr.h"
#include "pc/playout_latency.h"
#include "pc/playout_latency_proxy.h"
#include "rtc_base/checks.h"
#include "rtc_base/constructor_magic.h"
#include "rtc_base/location.h"
@ -26,11 +28,6 @@
namespace webrtc {
namespace {
constexpr int kDefaultLatency = 0;
constexpr int kRoundToZeroThresholdMs = 10;
} // namespace
// This proxy is passed to the underlying media engine to receive audio data as
// they come in. The data will then be passed back up to the RemoteAudioSource
// which will fan it out to all the sinks that have been added to it.
@ -55,7 +52,11 @@ class RemoteAudioSource::AudioDataProxy : public AudioSinkInterface {
RemoteAudioSource::RemoteAudioSource(rtc::Thread* worker_thread)
: main_thread_(rtc::Thread::Current()),
worker_thread_(worker_thread),
state_(MediaSourceInterface::kLive) {
state_(MediaSourceInterface::kLive),
latency_(PlayoutLatencyProxy::Create(
main_thread_,
worker_thread_,
new rtc::RefCountedObject<PlayoutLatency>(worker_thread))) {
RTC_DCHECK(main_thread_);
RTC_DCHECK(worker_thread_);
}
@ -70,12 +71,6 @@ void RemoteAudioSource::Start(cricket::VoiceMediaChannel* media_channel,
uint32_t ssrc) {
RTC_DCHECK_RUN_ON(main_thread_);
RTC_DCHECK(media_channel);
// Check that there are no consecutive start calls.
RTC_DCHECK(!media_channel_ && !ssrc_);
// Remember media channel ssrc pair for latency calls.
media_channel_ = media_channel;
ssrc_ = ssrc;
// Register for callbacks immediately before AddSink so that we always get
// notified when a channel goes out of scope (signaled when "AudioDataProxy"
@ -85,10 +80,8 @@ void RemoteAudioSource::Start(cricket::VoiceMediaChannel* media_channel,
absl::make_unique<AudioDataProxy>(this));
});
// Trying to apply cached latency for the audio stream.
if (cached_latency_) {
SetLatency(cached_latency_.value());
}
// Apply latency to the audio stream if |SetLatency| was called before.
latency_->OnStart(media_channel, ssrc);
}
void RemoteAudioSource::Stop(cricket::VoiceMediaChannel* media_channel,
@ -96,9 +89,7 @@ void RemoteAudioSource::Stop(cricket::VoiceMediaChannel* media_channel,
RTC_DCHECK_RUN_ON(main_thread_);
RTC_DCHECK(media_channel);
// Assume that audio stream is no longer present for latency calls.
media_channel_ = nullptr;
ssrc_ = absl::nullopt;
latency_->OnStop();
worker_thread_->Invoke<void>(
RTC_FROM_HERE, [&] { media_channel->SetRawAudioSink(ssrc, nullptr); });
@ -123,50 +114,11 @@ void RemoteAudioSource::SetVolume(double volume) {
}
void RemoteAudioSource::SetLatency(double latency) {
RTC_DCHECK_GE(latency, 0);
RTC_DCHECK_LE(latency, 10);
int delay_ms = rtc::dchecked_cast<int>(latency * 1000);
// In NetEq 0 delay has special meaning of being unconstrained value that is
// why we round delay to 0 if it is small enough during conversion from
// latency.
if (delay_ms <= kRoundToZeroThresholdMs) {
delay_ms = 0;
}
cached_latency_ = latency;
SetDelayMs(delay_ms);
latency_->SetLatency(latency);
}
double RemoteAudioSource::GetLatency() const {
absl::optional<int> delay_ms = GetDelayMs();
if (delay_ms) {
return delay_ms.value() / 1000.0;
} else {
return cached_latency_.value_or(kDefaultLatency);
}
}
bool RemoteAudioSource::SetDelayMs(int delay_ms) {
if (!media_channel_ || !ssrc_) {
return false;
}
worker_thread_->Invoke<void>(RTC_FROM_HERE, [&] {
media_channel_->SetBaseMinimumPlayoutDelayMs(ssrc_.value(), delay_ms);
});
return true;
}
absl::optional<int> RemoteAudioSource::GetDelayMs() const {
if (!media_channel_ || !ssrc_) {
return absl::nullopt;
}
return worker_thread_->Invoke<absl::optional<int>>(RTC_FROM_HERE, [&] {
return media_channel_->GetBaseMinimumPlayoutDelayMs(ssrc_.value());
});
return latency_->GetLatency();
}
void RemoteAudioSource::RegisterAudioObserver(AudioObserver* observer) {

View File

@ -17,6 +17,7 @@
#include "api/call/audio_sink.h"
#include "api/notifier.h"
#include "pc/channel.h"
#include "pc/playout_latency_interface.h"
#include "rtc_base/critical_section.h"
#include "rtc_base/message_handler.h"
@ -65,19 +66,15 @@ class RemoteAudioSource : public Notifier<AudioSourceInterface>,
void OnMessage(rtc::Message* msg) override;
bool SetDelayMs(int delay_ms);
absl::optional<int> GetDelayMs() const;
rtc::Thread* const main_thread_;
rtc::Thread* const worker_thread_;
std::list<AudioObserver*> audio_observers_;
rtc::CriticalSection sink_lock_;
std::list<AudioTrackSinkInterface*> sinks_;
SourceState state_;
// Media channel and ssrc together uniqely identify audio stream.
cricket::VoiceMediaChannel* media_channel_ = nullptr;
absl::optional<uint32_t> ssrc_;
absl::optional<double> cached_latency_;
// Allows to thread safely change playout latency. Handles caching cases if
// |SetLatency| is called before start.
rtc::scoped_refptr<PlayoutLatencyInterface> latency_;
};
} // namespace webrtc

View File

@ -456,6 +456,41 @@ class RtpSenderReceiverTest
RunSetLastLayerAsInactiveTest(video_rtp_sender_.get());
}
void VerifyTrackLatencyBehaviour(cricket::Delayable* media_channel,
MediaStreamTrackInterface* track,
MediaSourceInterface* source,
uint32_t ssrc) {
absl::optional<int> delay_ms; // In milliseconds.
double latency_s = 0.5; // In seconds.
source->SetLatency(latency_s);
delay_ms = media_channel->GetBaseMinimumPlayoutDelayMs(ssrc);
EXPECT_DOUBLE_EQ(latency_s, delay_ms.value_or(0) / 1000.0);
// Disabling the track should take no effect on previously set value.
track->set_enabled(false);
delay_ms = media_channel->GetBaseMinimumPlayoutDelayMs(ssrc);
EXPECT_DOUBLE_EQ(latency_s, delay_ms.value_or(0) / 1000.0);
// When the track is disabled, we still should be able to set latency.
latency_s = 0.3;
source->SetLatency(latency_s);
delay_ms = media_channel->GetBaseMinimumPlayoutDelayMs(ssrc);
EXPECT_DOUBLE_EQ(latency_s, delay_ms.value_or(0) / 1000.0);
// Enabling the track should take no effect on previously set value.
track->set_enabled(true);
delay_ms = media_channel->GetBaseMinimumPlayoutDelayMs(ssrc);
EXPECT_DOUBLE_EQ(latency_s, delay_ms.value_or(0) / 1000.0);
// We still should be able to change latency.
latency_s = 0.0;
source->SetLatency(latency_s);
delay_ms = media_channel->GetBaseMinimumPlayoutDelayMs(ssrc);
EXPECT_EQ(0, delay_ms.value_or(-1));
EXPECT_DOUBLE_EQ(latency_s, delay_ms.value_or(0) / 1000.0);
}
protected:
rtc::Thread* const network_thread_;
rtc::Thread* const worker_thread_;
@ -636,101 +671,36 @@ TEST_F(RtpSenderReceiverTest, RemoteAudioTrackSetVolume) {
DestroyAudioRtpReceiver();
}
TEST_F(RtpSenderReceiverTest, RemoteAudioSourceLatencyCaching) {
TEST_F(RtpSenderReceiverTest, RemoteAudioSourceLatency) {
absl::optional<int> delay_ms; // In milliseconds.
double latency_s = 0.5; // In seconds.
rtc::scoped_refptr<RemoteAudioSource> source =
new rtc::RefCountedObject<RemoteAudioSource>(rtc::Thread::Current());
// Check default value.
EXPECT_DOUBLE_EQ(source->GetLatency(), 0.0);
// Check caching behaviour.
source->SetLatency(latency_s);
EXPECT_DOUBLE_EQ(source->GetLatency(), latency_s);
// Check that cached value applied on the start.
source->Start(voice_media_channel_, kAudioSsrc);
delay_ms = voice_media_channel_->GetBaseMinimumPlayoutDelayMs(kAudioSsrc);
EXPECT_DOUBLE_EQ(latency_s, delay_ms.value_or(0) / 1000.0);
// Check that setting latency changes delay.
latency_s = 0.8;
source->SetLatency(latency_s);
delay_ms = voice_media_channel_->GetBaseMinimumPlayoutDelayMs(kAudioSsrc);
EXPECT_DOUBLE_EQ(latency_s, delay_ms.value_or(0) / 1000.0);
EXPECT_DOUBLE_EQ(latency_s, source->GetLatency());
// Check that if underlying delay is changed then remote source will reflect
// it.
delay_ms = 300;
voice_media_channel_->SetBaseMinimumPlayoutDelayMs(kAudioSsrc,
delay_ms.value());
EXPECT_DOUBLE_EQ(source->GetLatency(), delay_ms.value() / 1000.0);
// Check that after stop we get last cached value.
source->Stop(voice_media_channel_, kAudioSsrc);
EXPECT_DOUBLE_EQ(latency_s, source->GetLatency());
// Check that if we start source again with new ssrc then cached value is
// applied.
source->Start(voice_media_channel_, kAudioSsrc2);
delay_ms = voice_media_channel_->GetBaseMinimumPlayoutDelayMs(kAudioSsrc2);
EXPECT_DOUBLE_EQ(latency_s, delay_ms.value_or(0) / 1000.0);
// Check rounding behavior.
source->SetLatency(2 / 1000.0);
delay_ms = voice_media_channel_->GetBaseMinimumPlayoutDelayMs(kAudioSsrc2);
EXPECT_EQ(0, delay_ms.value_or(-1));
EXPECT_DOUBLE_EQ(0, source->GetLatency());
}
TEST_F(RtpSenderReceiverTest, RemoteAudioSourceLatencyNoCaching) {
int delay_ms = 300; // In milliseconds.
rtc::scoped_refptr<RemoteAudioSource> source =
new rtc::RefCountedObject<RemoteAudioSource>(rtc::Thread::Current());
// Set it to value different from default zero.
voice_media_channel_->SetBaseMinimumPlayoutDelayMs(kAudioSsrc, delay_ms);
voice_media_channel_->SetBaseMinimumPlayoutDelayMs(kAudioSsrc, 300);
// Check that calling GetLatency on the source that hasn't been started yet
// won't trigger caching.
// won't trigger caching and return default value.
EXPECT_DOUBLE_EQ(source->GetLatency(), 0);
// Check that cached latency will be applied on start.
source->SetLatency(0.4);
EXPECT_DOUBLE_EQ(source->GetLatency(), 0.4);
source->Start(voice_media_channel_, kAudioSsrc);
EXPECT_DOUBLE_EQ(source->GetLatency(), delay_ms / 1000.0);
delay_ms = voice_media_channel_->GetBaseMinimumPlayoutDelayMs(kAudioSsrc);
EXPECT_EQ(400, delay_ms);
}
TEST_F(RtpSenderReceiverTest, RemoteAudioTrackSetLatency) {
TEST_F(RtpSenderReceiverTest, RemoteAudioTrackLatency) {
CreateAudioRtpReceiver();
VerifyTrackLatencyBehaviour(voice_media_channel_, audio_track_.get(),
audio_track_->GetSource(), kAudioSsrc);
}
absl::optional<int> delay_ms; // In milliseconds.
double latency_s = 0.5; // In seconds.
audio_track_->GetSource()->SetLatency(latency_s);
delay_ms = voice_media_channel_->GetBaseMinimumPlayoutDelayMs(kAudioSsrc);
EXPECT_DOUBLE_EQ(latency_s, delay_ms.value_or(0) / 1000.0);
// Disabling the track should take no effect on previously set value.
audio_track_->set_enabled(false);
delay_ms = voice_media_channel_->GetBaseMinimumPlayoutDelayMs(kAudioSsrc);
EXPECT_DOUBLE_EQ(latency_s, delay_ms.value_or(0) / 1000.0);
// When the track is disabled, we still should be able to set latency.
latency_s = 0.3;
audio_track_->GetSource()->SetLatency(latency_s);
delay_ms = voice_media_channel_->GetBaseMinimumPlayoutDelayMs(kAudioSsrc);
EXPECT_DOUBLE_EQ(latency_s, delay_ms.value_or(0) / 1000.0);
// Enabling the track should take no effect on previously set value.
audio_track_->set_enabled(true);
delay_ms = voice_media_channel_->GetBaseMinimumPlayoutDelayMs(kAudioSsrc);
EXPECT_DOUBLE_EQ(latency_s, delay_ms.value_or(0) / 1000.0);
// We still should be able to change latency.
latency_s = 0.0;
audio_track_->GetSource()->SetLatency(latency_s);
delay_ms = voice_media_channel_->GetBaseMinimumPlayoutDelayMs(kAudioSsrc);
EXPECT_EQ(0, delay_ms.value_or(-1));
EXPECT_DOUBLE_EQ(latency_s, delay_ms.value_or(0) / 1000.0);
TEST_F(RtpSenderReceiverTest, RemoteVideoTrackLatency) {
CreateVideoRtpReceiver();
VerifyTrackLatencyBehaviour(video_media_channel_, video_track_.get(),
video_track_->GetSource(), kVideoSsrc);
}
// Test that the media channel isn't enabled for sending if the audio sender

31
pc/test/mock_delayable.h Normal file
View File

@ -0,0 +1,31 @@
/*
* Copyright 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef PC_TEST_MOCK_DELAYABLE_H_
#define PC_TEST_MOCK_DELAYABLE_H_
#include <stdint.h>
#include "absl/types/optional.h"
#include "media/base/delayable.h"
#include "test/gmock.h"
namespace webrtc {
class MockDelayable : public cricket::Delayable {
public:
MOCK_METHOD2(SetBaseMinimumPlayoutDelayMs, bool(uint32_t ssrc, int delay_ms));
MOCK_CONST_METHOD1(GetBaseMinimumPlayoutDelayMs,
absl::optional<int>(uint32_t ssrc));
};
} // namespace webrtc
#endif // PC_TEST_MOCK_DELAYABLE_H_

View File

@ -39,7 +39,7 @@ VideoRtpReceiver::VideoRtpReceiver(
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams)
: worker_thread_(worker_thread),
id_(receiver_id),
source_(new RefCountedObject<VideoRtpTrackSource>()),
source_(new RefCountedObject<VideoRtpTrackSource>(worker_thread_)),
track_(VideoTrackProxy::Create(
rtc::Thread::Current(),
worker_thread,
@ -123,6 +123,7 @@ void VideoRtpReceiver::Stop() {
// media channel has already been deleted.
SetSink(nullptr);
}
source_->Stop();
stopped_ = true;
}
@ -142,6 +143,8 @@ void VideoRtpReceiver::SetupMediaChannel(uint32_t ssrc) {
// Attach any existing frame decryptor to the media channel.
MaybeAttachFrameDecryptorToMediaChannel(
ssrc_, worker_thread_, frame_decryptor_, media_channel_, stopped_);
source_->Start(media_channel_, ssrc);
}
void VideoRtpReceiver::set_stream_ids(std::vector<std::string> stream_ids) {

View File

@ -27,6 +27,8 @@
#include "api/video/video_source_interface.h"
#include "media/base/media_channel.h"
#include "media/base/video_broadcaster.h"
#include "pc/playout_latency.h"
#include "pc/playout_latency_proxy.h"
#include "pc/rtp_receiver.h"
#include "pc/video_track_source.h"
#include "rtc_base/ref_counted_object.h"
@ -37,7 +39,7 @@ namespace webrtc {
class VideoRtpReceiver : public rtc::RefCountedObject<RtpReceiverInternal> {
public:
// An SSRC of 0 will create a receiver that will match the first SSRC it
// sees.
// sees. Must be called on signaling thread.
VideoRtpReceiver(rtc::Thread* worker_thread,
std::string receiver_id,
std::vector<std::string> streams_ids);
@ -103,23 +105,42 @@ class VideoRtpReceiver : public rtc::RefCountedObject<RtpReceiverInternal> {
std::vector<RtpSource> GetSources() const override;
private:
class VideoRtpTrackSource : public VideoTrackSource {
public:
VideoRtpTrackSource() : VideoTrackSource(true /* remote */) {}
explicit VideoRtpTrackSource(rtc::Thread* worker_thread)
: VideoTrackSource(true /* remote */),
latency_(PlayoutLatencyProxy::Create(
rtc::Thread::Current(),
worker_thread,
new rtc::RefCountedObject<PlayoutLatency>(worker_thread))) {}
rtc::VideoSourceInterface<VideoFrame>* source() override {
return &broadcaster_;
}
rtc::VideoSinkInterface<VideoFrame>* sink() { return &broadcaster_; }
void SetLatency(double latency) override { latency_->SetLatency(latency); }
void Start(cricket::VideoMediaChannel* media_channel, uint32_t ssrc) {
latency_->OnStart(media_channel, ssrc);
}
void Stop() { latency_->OnStop(); }
double GetLatency() const override { return latency_->GetLatency(); }
private:
// Allows to thread safely change playout latency. Handles caching cases if
// |SetLatency| is called before start.
rtc::scoped_refptr<PlayoutLatencyInterface> latency_;
// |broadcaster_| is needed since the decoder can only handle one sink.
// It might be better if the decoder can handle multiple sinks and consider
// the VideoSinkWants.
rtc::VideoBroadcaster broadcaster_;
};
private:
bool SetSink(rtc::VideoSinkInterface<VideoFrame>* sink);
rtc::Thread* const worker_thread_;

View File

@ -54,6 +54,9 @@
namespace webrtc {
namespace {
constexpr int kMinBaseMinimumDelayMs = 0;
constexpr int kMaxBaseMinimumDelayMs = 10000;
VideoCodec CreateDecoderVideoCodec(const VideoReceiveStream::Decoder& decoder) {
VideoCodec codec;
memset(&codec, 0, sizeof(codec));
@ -166,12 +169,14 @@ VideoReceiveStream::VideoReceiveStream(
PacketRouter* packet_router,
VideoReceiveStream::Config config,
ProcessThread* process_thread,
CallStats* call_stats)
CallStats* call_stats,
Clock* clock,
VCMTiming* timing)
: transport_adapter_(config.rtcp_send_transport),
config_(std::move(config)),
num_cpu_cores_(num_cpu_cores),
process_thread_(process_thread),
clock_(Clock::GetRealTimeClock()),
clock_(clock),
decode_thread_(&DecodeThreadFunction,
this,
"DecodingThread",
@ -180,8 +185,11 @@ VideoReceiveStream::VideoReceiveStream(
stats_proxy_(&config_, clock_),
rtp_receive_statistics_(
ReceiveStatistics::Create(clock_, &stats_proxy_, &stats_proxy_)),
timing_(new VCMTiming(clock_)),
video_receiver_(clock_, timing_.get(), this, this),
timing_(timing),
video_receiver_(clock_,
timing_.get(),
this, // NackSender
this), // KeyFrameRequestSender
rtp_video_stream_receiver_(&transport_adapter_,
call_stats,
packet_router,
@ -201,6 +209,7 @@ VideoReceiveStream::VideoReceiveStream(
RTC_DCHECK(call_stats_);
module_process_sequence_checker_.Detach();
network_sequence_checker_.Detach();
RTC_DCHECK(!config_.decoders.empty());
std::set<int> decoder_payload_types;
@ -241,6 +250,22 @@ VideoReceiveStream::VideoReceiveStream(
}
}
VideoReceiveStream::VideoReceiveStream(
RtpStreamReceiverControllerInterface* receiver_controller,
int num_cpu_cores,
PacketRouter* packet_router,
VideoReceiveStream::Config config,
ProcessThread* process_thread,
CallStats* call_stats)
: VideoReceiveStream(receiver_controller,
num_cpu_cores,
packet_router,
std::move(config),
process_thread,
call_stats,
Clock::GetRealTimeClock(),
new VCMTiming(Clock::GetRealTimeClock())) {}
VideoReceiveStream::~VideoReceiveStream() {
RTC_DCHECK_CALLED_SEQUENTIALLY(&worker_sequence_checker_);
RTC_LOG(LS_INFO) << "~VideoReceiveStream: " << config_.ToString();
@ -393,6 +418,25 @@ void VideoReceiveStream::RemoveSecondarySink(
rtp_video_stream_receiver_.RemoveSecondarySink(sink);
}
bool VideoReceiveStream::SetBaseMinimumPlayoutDelayMs(int delay_ms) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&worker_sequence_checker_);
if (delay_ms < kMinBaseMinimumDelayMs || delay_ms > kMaxBaseMinimumDelayMs) {
return false;
}
rtc::CritScope cs(&playout_delay_lock_);
base_minimum_playout_delay_ms_ = delay_ms;
UpdatePlayoutDelays();
return true;
}
int VideoReceiveStream::GetBaseMinimumPlayoutDelayMs() const {
RTC_DCHECK_CALLED_SEQUENTIALLY(&worker_sequence_checker_);
rtc::CritScope cs(&playout_delay_lock_);
return base_minimum_playout_delay_ms_;
}
// TODO(tommi): This method grabs a lock 6 times.
void VideoReceiveStream::OnFrame(const VideoFrame& video_frame) {
int64_t sync_offset_ms;
@ -428,6 +472,7 @@ void VideoReceiveStream::RequestKeyFrame() {
void VideoReceiveStream::OnCompleteFrame(
std::unique_ptr<video_coding::EncodedFrame> frame) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&network_sequence_checker_);
// TODO(https://bugs.webrtc.org/9974): Consider removing this workaround.
int64_t time_now_ms = rtc::TimeMillis();
if (last_complete_frame_time_ms_ > 0 &&
@ -436,6 +481,19 @@ void VideoReceiveStream::OnCompleteFrame(
}
last_complete_frame_time_ms_ = time_now_ms;
const PlayoutDelay& playout_delay = frame->EncodedImage().playout_delay_;
if (playout_delay.min_ms >= 0) {
rtc::CritScope cs(&playout_delay_lock_);
frame_minimum_playout_delay_ms_ = playout_delay.min_ms;
UpdatePlayoutDelays();
}
if (playout_delay.max_ms >= 0) {
rtc::CritScope cs(&playout_delay_lock_);
frame_maximum_playout_delay_ms_ = playout_delay.max_ms;
UpdatePlayoutDelays();
}
int64_t last_continuous_pid = frame_buffer_->InsertFrame(std::move(frame));
if (last_continuous_pid != -1)
rtp_video_stream_receiver_.FrameContinuous(last_continuous_pid);
@ -482,7 +540,9 @@ uint32_t VideoReceiveStream::GetPlayoutTimestamp() const {
void VideoReceiveStream::SetMinimumPlayoutDelay(int delay_ms) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&module_process_sequence_checker_);
video_receiver_.SetMinimumPlayoutDelay(delay_ms);
rtc::CritScope cs(&playout_delay_lock_);
syncable_minimum_playout_delay_ms_ = delay_ms;
UpdatePlayoutDelays();
}
void VideoReceiveStream::DecodeThreadFunction(void* ptr) {
@ -565,6 +625,20 @@ bool VideoReceiveStream::Decode() {
return true;
}
void VideoReceiveStream::UpdatePlayoutDelays() const {
const int minimum_delay_ms =
std::max({frame_minimum_playout_delay_ms_, base_minimum_playout_delay_ms_,
syncable_minimum_playout_delay_ms_});
if (minimum_delay_ms >= 0) {
timing_->set_min_playout_delay(minimum_delay_ms);
}
const int maximum_delay_ms = frame_maximum_playout_delay_ms_;
if (maximum_delay_ms >= 0) {
timing_->set_max_playout_delay(maximum_delay_ms);
}
}
std::vector<webrtc::RtpSource> VideoReceiveStream::GetSources() const {
return rtp_video_stream_receiver_.GetSources();
}

View File

@ -52,6 +52,14 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
public MediaTransportVideoSinkInterface,
public MediaTransportRttObserver {
public:
VideoReceiveStream(RtpStreamReceiverControllerInterface* receiver_controller,
int num_cpu_cores,
PacketRouter* packet_router,
VideoReceiveStream::Config config,
ProcessThread* process_thread,
CallStats* call_stats,
Clock* clock,
VCMTiming* timing);
VideoReceiveStream(RtpStreamReceiverControllerInterface* receiver_controller,
int num_cpu_cores,
PacketRouter* packet_router,
@ -76,6 +84,12 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
void AddSecondarySink(RtpPacketSinkInterface* sink) override;
void RemoveSecondarySink(const RtpPacketSinkInterface* sink) override;
// SetBaseMinimumPlayoutDelayMs and GetBaseMinimumPlayoutDelayMs are called
// from webrtc/api level and requested by user code. For e.g. blink/js layer
// in Chromium.
bool SetBaseMinimumPlayoutDelayMs(int delay_ms) override;
int GetBaseMinimumPlayoutDelayMs() const override;
// Implements rtc::VideoSinkInterface<VideoFrame>.
void OnFrame(const VideoFrame& video_frame) override;
@ -104,6 +118,8 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
int id() const override;
absl::optional<Syncable::Info> GetInfo() const override;
uint32_t GetPlayoutTimestamp() const override;
// SetMinimumPlayoutDelay is only called by A/V sync.
void SetMinimumPlayoutDelay(int delay_ms) override;
std::vector<webrtc::RtpSource> GetSources() const override;
@ -111,9 +127,12 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
private:
static void DecodeThreadFunction(void* ptr);
bool Decode();
void UpdatePlayoutDelays() const
RTC_EXCLUSIVE_LOCKS_REQUIRED(playout_delay_lock_);
rtc::SequencedTaskChecker worker_sequence_checker_;
rtc::SequencedTaskChecker module_process_sequence_checker_;
rtc::SequencedTaskChecker network_sequence_checker_;
TransportAdapter transport_adapter_;
const VideoReceiveStream::Config config_;
@ -158,6 +177,23 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
int64_t last_keyframe_request_ms_ = 0;
int64_t last_complete_frame_time_ms_ = 0;
rtc::CriticalSection playout_delay_lock_;
// All of them tries to change current min_playout_delay on |timing_| but
// source of the change request is different in each case. Among them the
// biggest delay is used. -1 means use default value from the |timing_|.
//
// Minimum delay as decided by the RTP playout delay extension.
int frame_minimum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = -1;
// Minimum delay as decided by the setLatency function in "webrtc/api".
int base_minimum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = -1;
// Minimum delay as decided by the A/V synchronization feature.
int syncable_minimum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) =
-1;
// Maximum delay as decided by the RTP playout delay extension.
int frame_maximum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = -1;
};
} // namespace internal
} // namespace webrtc

View File

@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include <utility>
#include <vector>
#include "test/gmock.h"
@ -59,6 +60,13 @@ class MockVideoDecoder : public VideoDecoder {
const char* ImplementationName() const { return "MockVideoDecoder"; }
};
class FrameObjectFake : public video_coding::EncodedFrame {
public:
int64_t ReceivedTime() const override { return 0; }
int64_t RenderTime() const override { return _renderTimeMs; }
};
} // namespace
class VideoReceiveStreamTest : public testing::Test {
@ -88,9 +96,12 @@ class VideoReceiveStreamTest : public testing::Test {
null_decoder.decoder_factory = &null_decoder_factory_;
config_.decoders.push_back(null_decoder);
Clock* clock = Clock::GetRealTimeClock();
timing_ = new VCMTiming(clock);
video_receive_stream_.reset(new webrtc::internal::VideoReceiveStream(
&rtp_stream_receiver_controller_, kDefaultNumCpuCores, &packet_router_,
config_.Copy(), process_thread_.get(), &call_stats_));
config_.Copy(), process_thread_.get(), &call_stats_, clock, timing_));
}
protected:
@ -106,6 +117,7 @@ class VideoReceiveStreamTest : public testing::Test {
PacketRouter packet_router_;
RtpStreamReceiverController rtp_stream_receiver_controller_;
std::unique_ptr<webrtc::internal::VideoReceiveStream> video_receive_stream_;
VCMTiming* timing_;
};
TEST_F(VideoReceiveStreamTest, CreateFrameFromH264FmtpSpropAndIdr) {
@ -136,4 +148,65 @@ TEST_F(VideoReceiveStreamTest, CreateFrameFromH264FmtpSpropAndIdr) {
init_decode_event_.Wait(kDefaultTimeOutMs);
}
TEST_F(VideoReceiveStreamTest, PlayoutDelay) {
const PlayoutDelay kPlayoutDelayMs = {123, 321};
std::unique_ptr<FrameObjectFake> test_frame(new FrameObjectFake());
test_frame->id.picture_id = 0;
test_frame->SetPlayoutDelay(kPlayoutDelayMs);
video_receive_stream_->OnCompleteFrame(std::move(test_frame));
EXPECT_EQ(kPlayoutDelayMs.min_ms, timing_->min_playout_delay());
EXPECT_EQ(kPlayoutDelayMs.max_ms, timing_->max_playout_delay());
// Check that the biggest minimum delay is chosen.
video_receive_stream_->SetMinimumPlayoutDelay(400);
EXPECT_EQ(400, timing_->min_playout_delay());
// Check base minimum delay validation.
EXPECT_FALSE(video_receive_stream_->SetBaseMinimumPlayoutDelayMs(12345));
EXPECT_FALSE(video_receive_stream_->SetBaseMinimumPlayoutDelayMs(-1));
EXPECT_TRUE(video_receive_stream_->SetBaseMinimumPlayoutDelayMs(500));
EXPECT_EQ(500, timing_->min_playout_delay());
// Check that intermidiate values are remembered and the biggest remembered
// is chosen.
video_receive_stream_->SetBaseMinimumPlayoutDelayMs(0);
EXPECT_EQ(400, timing_->min_playout_delay());
video_receive_stream_->SetMinimumPlayoutDelay(0);
EXPECT_EQ(123, timing_->min_playout_delay());
}
TEST_F(VideoReceiveStreamTest, PlayoutDelayPreservesDefaultMaxValue) {
const int default_max_playout_latency = timing_->max_playout_delay();
const PlayoutDelay kPlayoutDelayMs = {123, -1};
std::unique_ptr<FrameObjectFake> test_frame(new FrameObjectFake());
test_frame->id.picture_id = 0;
test_frame->SetPlayoutDelay(kPlayoutDelayMs);
video_receive_stream_->OnCompleteFrame(std::move(test_frame));
// Ensure that -1 preserves default maximum value from |timing_|.
EXPECT_EQ(kPlayoutDelayMs.min_ms, timing_->min_playout_delay());
EXPECT_NE(kPlayoutDelayMs.max_ms, timing_->max_playout_delay());
EXPECT_EQ(default_max_playout_latency, timing_->max_playout_delay());
}
TEST_F(VideoReceiveStreamTest, PlayoutDelayPreservesDefaultMinValue) {
const int default_min_playout_latency = timing_->min_playout_delay();
const PlayoutDelay kPlayoutDelayMs = {-1, 321};
std::unique_ptr<FrameObjectFake> test_frame(new FrameObjectFake());
test_frame->id.picture_id = 0;
test_frame->SetPlayoutDelay(kPlayoutDelayMs);
video_receive_stream_->OnCompleteFrame(std::move(test_frame));
// Ensure that -1 preserves default minimum value from |timing_|.
EXPECT_NE(kPlayoutDelayMs.min_ms, timing_->min_playout_delay());
EXPECT_EQ(kPlayoutDelayMs.max_ms, timing_->max_playout_delay());
EXPECT_EQ(default_min_playout_latency, timing_->min_playout_delay());
}
} // namespace webrtc