Revert of Use VoiceChannel/VideoChannel directly from RtpSender/RtpReceiver. (patchset #3 id:40001 of https://codereview.webrtc.org/2046173002/ )

Reason for revert:
Broke peerconnection_unittest somehow, due to introduction of thread check. Will fix and reland.

Original issue's description:
> Use VoiceChannel/VideoChannel directly from RtpSender/RtpReceiver.
>
> This eliminates the need for the extra layer of indirection provided by
> mediastreamprovider.h. It will thus make it easier to implement new
> functionality in RtpSender/RtpReceiver.
>
> It also brings us one step closer to the end goal of combining "senders"
> and "send streams". Currently the sender still needs to go through the
> BaseChannel and MediaChannel, using an SSRC as a key.
>
> R=pthatcher@webrtc.org
>
> Committed: https://crrev.com/bc5831999d3354509d75357b659b4bb8e39f8a6c
> Cr-Commit-Position: refs/heads/master@{#13285}

TBR=pthatcher@webrtc.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true

Review-Url: https://codereview.webrtc.org/2099843003
Cr-Commit-Position: refs/heads/master@{#13286}
This commit is contained in:
deadbeef 2016-06-24 14:13:06 -07:00 committed by Commit bot
parent bc5831999d
commit 1a7162dbc9
18 changed files with 905 additions and 654 deletions

View File

@ -52,6 +52,7 @@ source_set("libjingle_peerconnection") {
"mediastreaminterface.h",
"mediastreamobserver.cc",
"mediastreamobserver.h",
"mediastreamprovider.h",
"mediastreamproxy.h",
"mediastreamtrack.h",
"mediastreamtrackproxy.h",

View File

@ -128,6 +128,7 @@
'mediastreaminterface.h',
'mediastreamobserver.cc',
'mediastreamobserver.h',
'mediastreamprovider.h',
'mediastreamproxy.h',
'mediastreamtrack.h',
'mediastreamtrackproxy.h',

View File

@ -13,6 +13,7 @@
#include <memory>
#include <string>
#include "webrtc/api/mediastreamprovider.h"
#include "webrtc/api/sctputils.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/refcount.h"

View File

@ -0,0 +1,120 @@
/*
* Copyright 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_API_MEDIASTREAMPROVIDER_H_
#define WEBRTC_API_MEDIASTREAMPROVIDER_H_
#include <memory>
#include "webrtc/api/rtpsenderinterface.h"
#include "webrtc/base/basictypes.h"
#include "webrtc/media/base/videosinkinterface.h"
#include "webrtc/media/base/videosourceinterface.h"
namespace cricket {
class AudioSource;
class VideoFrame;
struct AudioOptions;
struct VideoOptions;
} // namespace cricket
namespace webrtc {
class AudioSinkInterface;
// TODO(deadbeef): Change the key from an ssrc to a "sender_id" or
// "receiver_id" string, which will be the MSID in the short term and MID in
// the long term.
// TODO(deadbeef): These interfaces are effectively just a way for the
// RtpSenders/Receivers to get to the BaseChannels. These interfaces should be
// refactored away eventually, as the classes converge.
// This interface is called by AudioRtpSender/Receivers to change the settings
// of an audio track connected to certain PeerConnection.
class AudioProviderInterface {
public:
// Enable/disable the audio playout of a remote audio track with |ssrc|.
virtual void SetAudioPlayout(uint32_t ssrc, bool enable) = 0;
// Enable/disable sending audio on the local audio track with |ssrc|.
// When |enable| is true |options| should be applied to the audio track.
virtual void SetAudioSend(uint32_t ssrc,
bool enable,
const cricket::AudioOptions& options,
cricket::AudioSource* source) = 0;
// Sets the audio playout volume of a remote audio track with |ssrc|.
// |volume| is in the range of [0, 10].
virtual void SetAudioPlayoutVolume(uint32_t ssrc, double volume) = 0;
// Allows for setting a direct audio sink for an incoming audio source.
// Only one audio sink is supported per ssrc and ownership of the sink is
// passed to the provider.
virtual void SetRawAudioSink(
uint32_t ssrc,
std::unique_ptr<webrtc::AudioSinkInterface> sink) = 0;
virtual RtpParameters GetAudioRtpSendParameters(uint32_t ssrc) const = 0;
virtual bool SetAudioRtpSendParameters(uint32_t ssrc,
const RtpParameters& parameters) = 0;
virtual RtpParameters GetAudioRtpReceiveParameters(uint32_t ssrc) const = 0;
virtual bool SetAudioRtpReceiveParameters(
uint32_t ssrc,
const RtpParameters& parameters) = 0;
// Called when the first audio packet is received.
sigslot::signal0<> SignalFirstAudioPacketReceived;
protected:
virtual ~AudioProviderInterface() {}
};
// This interface is called by VideoRtpSender/Receivers to change the settings
// of a video track connected to a certain PeerConnection.
class VideoProviderInterface {
public:
// Enable/disable the video playout of a remote video track with |ssrc|.
virtual void SetVideoPlayout(
uint32_t ssrc,
bool enable,
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) = 0;
// Enable/disable sending video on the local video track with |ssrc|.
// TODO(deadbeef): Make |options| a reference parameter.
// TODO(deadbeef): Eventually, |enable| and |options| will be contained
// in |source|. When that happens, remove those parameters and rename
// this to SetVideoSource.
virtual void SetVideoSend(
uint32_t ssrc,
bool enable,
const cricket::VideoOptions* options,
rtc::VideoSourceInterface<cricket::VideoFrame>* source) = 0;
virtual RtpParameters GetVideoRtpSendParameters(uint32_t ssrc) const = 0;
virtual bool SetVideoRtpSendParameters(uint32_t ssrc,
const RtpParameters& parameters) = 0;
virtual RtpParameters GetVideoRtpReceiveParameters(uint32_t ssrc) const = 0;
virtual bool SetVideoRtpReceiveParameters(
uint32_t ssrc,
const RtpParameters& parameters) = 0;
// Called when the first video packet is received.
sigslot::signal0<> SignalFirstVideoPacketReceived;
protected:
virtual ~VideoProviderInterface() {}
};
} // namespace webrtc
#endif // WEBRTC_API_MEDIASTREAMPROVIDER_H_

View File

@ -396,34 +396,6 @@ uint32_t ConvertIceTransportTypeToCandidateFilter(
return cricket::CF_NONE;
}
// Helper method to set a voice/video channel on all applicable senders
// and receivers when one is created/destroyed by WebRtcSession.
//
// Used by On(Voice|Video)Channel(Created|Destroyed)
template <class SENDER,
class RECEIVER,
class CHANNEL,
class SENDERS,
class RECEIVERS>
void SetChannelOnSendersAndReceivers(CHANNEL* channel,
SENDERS& senders,
RECEIVERS& receivers,
cricket::MediaType media_type) {
for (auto& sender : senders) {
if (sender->media_type() == media_type) {
static_cast<SENDER*>(sender->internal())->SetChannel(channel);
}
}
for (auto& receiver : receivers) {
if (receiver->media_type() == media_type) {
if (!channel) {
receiver->internal()->Stop();
}
static_cast<RECEIVER*>(receiver->internal())->SetChannel(channel);
}
}
}
} // namespace
namespace webrtc {
@ -636,12 +608,8 @@ bool PeerConnection::Initialize(
// All the callbacks will be posted to the application from PeerConnection.
session_->RegisterIceObserver(this);
session_->SignalState.connect(this, &PeerConnection::OnSessionStateChange);
session_->SignalVoiceChannelCreated.connect(
this, &PeerConnection::OnVoiceChannelCreated);
session_->SignalVoiceChannelDestroyed.connect(
this, &PeerConnection::OnVoiceChannelDestroyed);
session_->SignalVideoChannelCreated.connect(
this, &PeerConnection::OnVideoChannelCreated);
session_->SignalVideoChannelDestroyed.connect(
this, &PeerConnection::OnVideoChannelDestroyed);
session_->SignalDataChannelCreated.connect(
@ -745,7 +713,7 @@ rtc::scoped_refptr<RtpSenderInterface> PeerConnection::AddTrack(
new_sender = RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
signaling_thread(),
new AudioRtpSender(static_cast<AudioTrackInterface*>(track),
session_->voice_channel(), stats_.get()));
session_.get(), stats_.get()));
if (!streams.empty()) {
new_sender->internal()->set_stream_id(streams[0]->label());
}
@ -758,7 +726,7 @@ rtc::scoped_refptr<RtpSenderInterface> PeerConnection::AddTrack(
new_sender = RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
signaling_thread(),
new VideoRtpSender(static_cast<VideoTrackInterface*>(track),
session_->video_channel()));
session_.get()));
if (!streams.empty()) {
new_sender->internal()->set_stream_id(streams[0]->label());
}
@ -823,11 +791,10 @@ rtc::scoped_refptr<RtpSenderInterface> PeerConnection::CreateSender(
rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>> new_sender;
if (kind == MediaStreamTrackInterface::kAudioKind) {
new_sender = RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
signaling_thread(),
new AudioRtpSender(session_->voice_channel(), stats_.get()));
signaling_thread(), new AudioRtpSender(session_.get(), stats_.get()));
} else if (kind == MediaStreamTrackInterface::kVideoKind) {
new_sender = RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
signaling_thread(), new VideoRtpSender(session_->video_channel()));
signaling_thread(), new VideoRtpSender(session_.get()));
} else {
LOG(LS_ERROR) << "CreateSender called with invalid kind: " << kind;
return new_sender;
@ -1355,8 +1322,8 @@ void PeerConnection::CreateAudioReceiver(MediaStreamInterface* stream,
uint32_t ssrc) {
receivers_.push_back(
RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
signaling_thread(), new AudioRtpReceiver(stream, track_id, ssrc,
session_->voice_channel())));
signaling_thread(),
new AudioRtpReceiver(stream, track_id, ssrc, session_.get())));
}
void PeerConnection::CreateVideoReceiver(MediaStreamInterface* stream,
@ -1366,7 +1333,7 @@ void PeerConnection::CreateVideoReceiver(MediaStreamInterface* stream,
RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
signaling_thread(),
new VideoRtpReceiver(stream, track_id, factory_->worker_thread(),
ssrc, session_->video_channel())));
ssrc, session_.get())));
}
// TODO(deadbeef): Keep RtpReceivers around even if track goes away in remote
@ -1382,6 +1349,19 @@ void PeerConnection::DestroyReceiver(const std::string& track_id) {
}
}
void PeerConnection::StopReceivers(cricket::MediaType media_type) {
TrackInfos* current_tracks = GetRemoteTracks(media_type);
for (const auto& track_info : *current_tracks) {
auto it = FindReceiverForTrack(track_info.track_id);
if (it == receivers_.end()) {
LOG(LS_WARNING) << "RtpReceiver for track with id " << track_info.track_id
<< " doesn't exist.";
} else {
(*it)->internal()->Stop();
}
}
}
void PeerConnection::OnIceConnectionChange(
PeerConnectionInterface::IceConnectionState new_state) {
RTC_DCHECK(signaling_thread()->IsCurrent());
@ -1447,9 +1427,8 @@ void PeerConnection::OnAudioTrackAdded(AudioTrackInterface* track,
// Normal case; we've never seen this track before.
rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>> new_sender =
RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
signaling_thread(),
new AudioRtpSender(track, stream->label(), session_->voice_channel(),
stats_.get()));
signaling_thread(), new AudioRtpSender(track, stream->label(),
session_.get(), stats_.get()));
senders_.push_back(new_sender);
// If the sender has already been configured in SDP, we call SetSsrc,
// which will connect the sender to the underlying transport. This can
@ -1491,8 +1470,8 @@ void PeerConnection::OnVideoTrackAdded(VideoTrackInterface* track,
// Normal case; we've never seen this track before.
rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>> new_sender =
RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
signaling_thread(), new VideoRtpSender(track, stream->label(),
session_->video_channel()));
signaling_thread(),
new VideoRtpSender(track, stream->label(), session_.get()));
senders_.push_back(new_sender);
const TrackInfo* track_info =
FindTrackInfo(local_video_tracks_, stream->label(), track->id());
@ -2034,28 +2013,12 @@ void PeerConnection::OnSctpDataChannelClosed(DataChannel* channel) {
}
}
void PeerConnection::OnVoiceChannelCreated() {
SetChannelOnSendersAndReceivers<AudioRtpSender, AudioRtpReceiver>(
session_->voice_channel(), senders_, receivers_,
cricket::MEDIA_TYPE_AUDIO);
}
void PeerConnection::OnVoiceChannelDestroyed() {
SetChannelOnSendersAndReceivers<AudioRtpSender, AudioRtpReceiver,
cricket::VoiceChannel>(
nullptr, senders_, receivers_, cricket::MEDIA_TYPE_AUDIO);
}
void PeerConnection::OnVideoChannelCreated() {
SetChannelOnSendersAndReceivers<VideoRtpSender, VideoRtpReceiver>(
session_->video_channel(), senders_, receivers_,
cricket::MEDIA_TYPE_VIDEO);
StopReceivers(cricket::MEDIA_TYPE_AUDIO);
}
void PeerConnection::OnVideoChannelDestroyed() {
SetChannelOnSendersAndReceivers<VideoRtpSender, VideoRtpReceiver,
cricket::VideoChannel>(
nullptr, senders_, receivers_, cricket::MEDIA_TYPE_VIDEO);
StopReceivers(cricket::MEDIA_TYPE_VIDEO);
}
void PeerConnection::OnDataChannelCreated() {

View File

@ -174,6 +174,7 @@ class PeerConnection : public PeerConnectionInterface,
void CreateVideoReceiver(MediaStreamInterface* stream,
const std::string& track_id,
uint32_t ssrc);
void StopReceivers(cricket::MediaType media_type);
void DestroyReceiver(const std::string& track_id);
void DestroyAudioSender(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
@ -324,9 +325,7 @@ class PeerConnection : public PeerConnectionInterface,
void OnSctpDataChannelClosed(DataChannel* channel);
// Notifications from WebRtcSession relating to BaseChannels.
void OnVoiceChannelCreated();
void OnVoiceChannelDestroyed();
void OnVideoChannelCreated();
void OnVideoChannelDestroyed();
void OnDataChannelCreated();
void OnDataChannelDestroyed();

View File

@ -15,6 +15,7 @@
#include <memory>
#include <utility>
#include "webrtc/api/mediastreamprovider.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/constructormagic.h"
#include "webrtc/base/logging.h"
@ -41,7 +42,7 @@ class RemoteAudioSource::MessageHandler : public rtc::MessageHandler {
class RemoteAudioSource::Sink : public AudioSinkInterface {
public:
explicit Sink(RemoteAudioSource* source) : source_(source) {}
~Sink() override { source_->OnAudioChannelGone(); }
~Sink() override { source_->OnAudioProviderGone(); }
private:
void OnData(const AudioSinkInterface::Data& audio) override {
@ -55,10 +56,10 @@ class RemoteAudioSource::Sink : public AudioSinkInterface {
rtc::scoped_refptr<RemoteAudioSource> RemoteAudioSource::Create(
uint32_t ssrc,
cricket::VoiceChannel* channel) {
AudioProviderInterface* provider) {
rtc::scoped_refptr<RemoteAudioSource> ret(
new rtc::RefCountedObject<RemoteAudioSource>());
ret->Initialize(ssrc, channel);
ret->Initialize(ssrc, provider);
return ret;
}
@ -75,12 +76,12 @@ RemoteAudioSource::~RemoteAudioSource() {
}
void RemoteAudioSource::Initialize(uint32_t ssrc,
cricket::VoiceChannel* channel) {
AudioProviderInterface* provider) {
RTC_DCHECK(main_thread_->IsCurrent());
// To make sure we always get notified when the channel goes out of scope,
// To make sure we always get notified when the provider goes out of scope,
// we register for callbacks here and not on demand in AddSink.
if (channel) { // May be null in tests.
channel->SetRawAudioSink(
if (provider) { // May be null in tests.
provider->SetRawAudioSink(
ssrc, std::unique_ptr<AudioSinkInterface>(new Sink(this)));
}
}
@ -144,8 +145,8 @@ void RemoteAudioSource::OnData(const AudioSinkInterface::Data& audio) {
}
}
void RemoteAudioSource::OnAudioChannelGone() {
// Called when the audio channel is deleted. It may be the worker thread
void RemoteAudioSource::OnAudioProviderGone() {
// Called when the data provider is deleted. It may be the worker thread
// in libjingle or may be a different worker thread.
main_thread_->Post(RTC_FROM_HERE, new MessageHandler(this));
}

View File

@ -14,10 +14,10 @@
#include <list>
#include <string>
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/api/notifier.h"
#include "webrtc/audio_sink.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/pc/channel.h"
namespace rtc {
struct Message;
@ -26,13 +26,15 @@ class Thread;
namespace webrtc {
class AudioProviderInterface;
// This class implements the audio source used by the remote audio track.
class RemoteAudioSource : public Notifier<AudioSourceInterface> {
public:
// Creates an instance of RemoteAudioSource.
static rtc::scoped_refptr<RemoteAudioSource> Create(
uint32_t ssrc,
cricket::VoiceChannel* channel);
AudioProviderInterface* provider);
// MediaSourceInterface implementation.
MediaSourceInterface::SourceState state() const override;
@ -47,7 +49,7 @@ class RemoteAudioSource : public Notifier<AudioSourceInterface> {
// Post construction initialize where we can do things like save a reference
// to ourselves (need to be fully constructed).
void Initialize(uint32_t ssrc, cricket::VoiceChannel* channel);
void Initialize(uint32_t ssrc, AudioProviderInterface* provider);
private:
typedef std::list<AudioObserver*> AudioObserverList;
@ -59,7 +61,7 @@ class RemoteAudioSource : public Notifier<AudioSourceInterface> {
class Sink;
void OnData(const AudioSinkInterface::Data& audio);
void OnAudioChannelGone();
void OnAudioProviderGone();
class MessageHandler;
void OnMessage(rtc::Message* msg);

View File

@ -21,24 +21,22 @@ namespace webrtc {
AudioRtpReceiver::AudioRtpReceiver(MediaStreamInterface* stream,
const std::string& track_id,
uint32_t ssrc,
cricket::VoiceChannel* channel)
AudioProviderInterface* provider)
: id_(track_id),
ssrc_(ssrc),
channel_(channel),
provider_(provider),
track_(AudioTrackProxy::Create(
rtc::Thread::Current(),
AudioTrack::Create(track_id,
RemoteAudioSource::Create(ssrc, channel)))),
RemoteAudioSource::Create(ssrc, provider)))),
cached_track_enabled_(track_->enabled()) {
RTC_DCHECK(track_->GetSource()->remote());
track_->RegisterObserver(this);
track_->GetSource()->RegisterAudioObserver(this);
Reconfigure();
stream->AddTrack(track_);
if (channel_) {
channel_->SignalFirstPacketReceived.connect(
this, &AudioRtpReceiver::OnFirstPacketReceived);
}
provider_->SignalFirstAudioPacketReceived.connect(
this, &AudioRtpReceiver::OnFirstAudioPacketReceived);
}
AudioRtpReceiver::~AudioRtpReceiver() {
@ -55,78 +53,48 @@ void AudioRtpReceiver::OnChanged() {
}
void AudioRtpReceiver::OnSetVolume(double volume) {
RTC_DCHECK(volume >= 0 && volume <= 10);
cached_volume_ = volume;
if (!channel_) {
LOG(LS_ERROR) << "AudioRtpReceiver::OnSetVolume: No audio channel exists.";
return;
}
// When the track is disabled, the volume of the source, which is the
// corresponding WebRtc Voice Engine channel will be 0. So we do not allow
// setting the volume to the source when the track is disabled.
if (!stopped_ && track_->enabled()) {
RTC_DCHECK(channel_->SetOutputVolume(ssrc_, cached_volume_));
}
if (provider_ && track_->enabled())
provider_->SetAudioPlayoutVolume(ssrc_, volume);
}
RtpParameters AudioRtpReceiver::GetParameters() const {
if (!channel_ || stopped_) {
return RtpParameters();
}
return channel_->GetRtpReceiveParameters(ssrc_);
return provider_->GetAudioRtpReceiveParameters(ssrc_);
}
bool AudioRtpReceiver::SetParameters(const RtpParameters& parameters) {
TRACE_EVENT0("webrtc", "AudioRtpReceiver::SetParameters");
if (!channel_ || stopped_) {
return false;
}
return channel_->SetRtpReceiveParameters(ssrc_, parameters);
return provider_->SetAudioRtpReceiveParameters(ssrc_, parameters);
}
void AudioRtpReceiver::Stop() {
// TODO(deadbeef): Need to do more here to fully stop receiving packets.
if (stopped_) {
if (!provider_) {
return;
}
if (channel_) {
// Allow that SetOutputVolume fail. This is the normal case when the
// underlying media channel has already been deleted.
channel_->SetOutputVolume(ssrc_, 0);
}
stopped_ = true;
provider_->SetAudioPlayout(ssrc_, false);
provider_ = nullptr;
}
void AudioRtpReceiver::Reconfigure() {
RTC_DCHECK(!stopped_);
if (!channel_) {
LOG(LS_ERROR) << "AudioRtpReceiver::Reconfigure: No audio channel exists.";
if (!provider_) {
return;
}
RTC_DCHECK(
channel_->SetOutputVolume(ssrc_, track_->enabled() ? cached_volume_ : 0));
provider_->SetAudioPlayout(ssrc_, track_->enabled());
}
void AudioRtpReceiver::SetObserver(RtpReceiverObserverInterface* observer) {
observer_ = observer;
// Deliver any notifications the observer may have missed by being set late.
// If received the first packet before setting the observer, call the
// observer.
if (received_first_packet_) {
observer_->OnFirstPacketReceived(media_type());
}
}
void AudioRtpReceiver::SetChannel(cricket::VoiceChannel* channel) {
if (channel_) {
channel_->SignalFirstPacketReceived.disconnect(this);
}
channel_ = channel;
if (channel_) {
channel_->SignalFirstPacketReceived.connect(
this, &AudioRtpReceiver::OnFirstPacketReceived);
}
}
void AudioRtpReceiver::OnFirstPacketReceived(cricket::BaseChannel* channel) {
void AudioRtpReceiver::OnFirstAudioPacketReceived() {
if (observer_) {
observer_->OnFirstPacketReceived(media_type());
}
@ -137,10 +105,10 @@ VideoRtpReceiver::VideoRtpReceiver(MediaStreamInterface* stream,
const std::string& track_id,
rtc::Thread* worker_thread,
uint32_t ssrc,
cricket::VideoChannel* channel)
VideoProviderInterface* provider)
: id_(track_id),
ssrc_(ssrc),
channel_(channel),
provider_(provider),
source_(new RefCountedObject<VideoTrackSource>(&broadcaster_,
true /* remote */)),
track_(VideoTrackProxy::Create(
@ -152,77 +120,48 @@ VideoRtpReceiver::VideoRtpReceiver(MediaStreamInterface* stream,
worker_thread,
source_)))) {
source_->SetState(MediaSourceInterface::kLive);
if (!channel_) {
LOG(LS_ERROR)
<< "VideoRtpReceiver::VideoRtpReceiver: No video channel exists.";
} else {
RTC_DCHECK(channel_->SetSink(ssrc_, &broadcaster_));
}
provider_->SetVideoPlayout(ssrc_, true, &broadcaster_);
stream->AddTrack(track_);
if (channel_) {
channel_->SignalFirstPacketReceived.connect(
this, &VideoRtpReceiver::OnFirstPacketReceived);
}
provider_->SignalFirstVideoPacketReceived.connect(
this, &VideoRtpReceiver::OnFirstVideoPacketReceived);
}
VideoRtpReceiver::~VideoRtpReceiver() {
// Since cricket::VideoRenderer is not reference counted,
// we need to remove it from the channel before we are deleted.
// we need to remove it from the provider before we are deleted.
Stop();
}
RtpParameters VideoRtpReceiver::GetParameters() const {
if (!channel_ || stopped_) {
return RtpParameters();
}
return channel_->GetRtpReceiveParameters(ssrc_);
return provider_->GetVideoRtpReceiveParameters(ssrc_);
}
bool VideoRtpReceiver::SetParameters(const RtpParameters& parameters) {
TRACE_EVENT0("webrtc", "VideoRtpReceiver::SetParameters");
if (!channel_ || stopped_) {
return false;
}
return channel_->SetRtpReceiveParameters(ssrc_, parameters);
return provider_->SetVideoRtpReceiveParameters(ssrc_, parameters);
}
void VideoRtpReceiver::Stop() {
// TODO(deadbeef): Need to do more here to fully stop receiving packets.
if (stopped_) {
if (!provider_) {
return;
}
source_->SetState(MediaSourceInterface::kEnded);
source_->OnSourceDestroyed();
if (!channel_) {
LOG(LS_WARNING) << "VideoRtpReceiver::Stop: No video channel exists.";
} else {
// Allow that SetSink fail. This is the normal case when the underlying
// media channel has already been deleted.
channel_->SetSink(ssrc_, nullptr);
}
stopped_ = true;
provider_->SetVideoPlayout(ssrc_, false, nullptr);
provider_ = nullptr;
}
void VideoRtpReceiver::SetObserver(RtpReceiverObserverInterface* observer) {
observer_ = observer;
// Deliver any notifications the observer may have missed by being set late.
// If received the first packet before setting the observer, call the
// observer.
if (received_first_packet_) {
observer_->OnFirstPacketReceived(media_type());
}
}
void VideoRtpReceiver::SetChannel(cricket::VideoChannel* channel) {
if (channel_) {
channel_->SignalFirstPacketReceived.disconnect(this);
}
channel_ = channel;
if (channel_) {
channel_->SignalFirstPacketReceived.connect(
this, &VideoRtpReceiver::OnFirstPacketReceived);
}
}
void VideoRtpReceiver::OnFirstPacketReceived(cricket::BaseChannel* channel) {
void VideoRtpReceiver::OnFirstVideoPacketReceived() {
if (observer_) {
observer_->OnFirstPacketReceived(media_type());
}

View File

@ -10,21 +10,20 @@
// This file contains classes that implement RtpReceiverInterface.
// An RtpReceiver associates a MediaStreamTrackInterface with an underlying
// transport (provided by cricket::VoiceChannel/cricket::VideoChannel)
// transport (provided by AudioProviderInterface/VideoProviderInterface)
#ifndef WEBRTC_API_RTPRECEIVER_H_
#define WEBRTC_API_RTPRECEIVER_H_
#include <string>
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/api/mediastreamprovider.h"
#include "webrtc/api/rtpreceiverinterface.h"
#include "webrtc/api/remoteaudiosource.h"
#include "webrtc/api/videotracksource.h"
#include "webrtc/base/basictypes.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/media/base/videobroadcaster.h"
#include "webrtc/pc/channel.h"
namespace webrtc {
@ -42,7 +41,7 @@ class AudioRtpReceiver : public ObserverInterface,
AudioRtpReceiver(MediaStreamInterface* stream,
const std::string& track_id,
uint32_t ssrc,
cricket::VoiceChannel* channel);
AudioProviderInterface* provider);
virtual ~AudioRtpReceiver();
@ -61,10 +60,6 @@ class AudioRtpReceiver : public ObserverInterface,
return track_.get();
}
cricket::MediaType media_type() const override {
return cricket::MEDIA_TYPE_AUDIO;
}
std::string id() const override { return id_; }
RtpParameters GetParameters() const override;
@ -75,21 +70,17 @@ class AudioRtpReceiver : public ObserverInterface,
void SetObserver(RtpReceiverObserverInterface* observer) override;
// Does not take ownership.
// Should call SetChannel(nullptr) before |channel| is destroyed.
void SetChannel(cricket::VoiceChannel* channel);
cricket::MediaType media_type() override { return cricket::MEDIA_TYPE_AUDIO; }
private:
void Reconfigure();
void OnFirstPacketReceived(cricket::BaseChannel* channel);
void OnFirstAudioPacketReceived();
const std::string id_;
const uint32_t ssrc_;
cricket::VoiceChannel* channel_;
AudioProviderInterface* provider_; // Set to null in Stop().
const rtc::scoped_refptr<AudioTrackInterface> track_;
bool cached_track_enabled_;
double cached_volume_ = 1;
bool stopped_ = false;
RtpReceiverObserverInterface* observer_ = nullptr;
bool received_first_packet_ = false;
};
@ -101,7 +92,7 @@ class VideoRtpReceiver : public rtc::RefCountedObject<RtpReceiverInternal>,
const std::string& track_id,
rtc::Thread* worker_thread,
uint32_t ssrc,
cricket::VideoChannel* channel);
VideoProviderInterface* provider);
virtual ~VideoRtpReceiver();
@ -114,10 +105,6 @@ class VideoRtpReceiver : public rtc::RefCountedObject<RtpReceiverInternal>,
return track_.get();
}
cricket::MediaType media_type() const override {
return cricket::MEDIA_TYPE_VIDEO;
}
std::string id() const override { return id_; }
RtpParameters GetParameters() const override;
@ -128,16 +115,14 @@ class VideoRtpReceiver : public rtc::RefCountedObject<RtpReceiverInternal>,
void SetObserver(RtpReceiverObserverInterface* observer) override;
// Does not take ownership.
// Should call SetChannel(nullptr) before |channel| is destroyed.
void SetChannel(cricket::VideoChannel* channel);
cricket::MediaType media_type() override { return cricket::MEDIA_TYPE_VIDEO; }
private:
void OnFirstPacketReceived(cricket::BaseChannel* channel);
void OnFirstVideoPacketReceived();
std::string id_;
uint32_t ssrc_;
cricket::VideoChannel* channel_;
VideoProviderInterface* provider_;
// |broadcaster_| is needed since the decoder can only handle one sink.
// It might be better if the decoder can handle multiple sinks and consider
// the VideoSinkWants.
@ -146,7 +131,6 @@ class VideoRtpReceiver : public rtc::RefCountedObject<RtpReceiverInternal>,
// the VideoRtpReceiver is stopped.
rtc::scoped_refptr<VideoTrackSource> source_;
rtc::scoped_refptr<VideoTrackInterface> track_;
bool stopped_ = false;
RtpReceiverObserverInterface* observer_ = nullptr;
bool received_first_packet_ = false;
};

View File

@ -18,7 +18,6 @@
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/api/proxy.h"
#include "webrtc/api/rtpparameters.h"
#include "webrtc/base/refcount.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/pc/mediasession.h"
@ -27,12 +26,6 @@ namespace webrtc {
class RtpReceiverObserverInterface {
public:
// Note: Currently if there are multiple RtpReceivers of the same media type,
// they will all call OnFirstPacketReceived at once.
//
// In the future, it's likely that an RtpReceiver will only call
// OnFirstPacketReceived when a packet is received specifically for its
// SSRC/mid.
virtual void OnFirstPacketReceived(cricket::MediaType media_type) = 0;
protected:
@ -43,9 +36,6 @@ class RtpReceiverInterface : public rtc::RefCountInterface {
public:
virtual rtc::scoped_refptr<MediaStreamTrackInterface> track() const = 0;
// Audio or video receiver?
virtual cricket::MediaType media_type() const = 0;
// Not to be confused with "mid", this is a field we can temporarily use
// to uniquely identify a receiver until we implement Unified Plan SDP.
virtual std::string id() const = 0;
@ -56,10 +46,10 @@ class RtpReceiverInterface : public rtc::RefCountInterface {
virtual RtpParameters GetParameters() const = 0;
virtual bool SetParameters(const RtpParameters& parameters) = 0;
// Does not take ownership of observer.
// Must call SetObserver(nullptr) before the observer is destroyed.
virtual void SetObserver(RtpReceiverObserverInterface* observer) = 0;
virtual cricket::MediaType media_type() = 0;
protected:
virtual ~RtpReceiverInterface() {}
};
@ -67,11 +57,11 @@ class RtpReceiverInterface : public rtc::RefCountInterface {
// Define proxy for RtpReceiverInterface.
BEGIN_SIGNALING_PROXY_MAP(RtpReceiver)
PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track)
PROXY_CONSTMETHOD0(cricket::MediaType, media_type)
PROXY_CONSTMETHOD0(std::string, id)
PROXY_CONSTMETHOD0(RtpParameters, GetParameters);
PROXY_METHOD1(bool, SetParameters, const RtpParameters&)
PROXY_METHOD1(void, SetObserver, RtpReceiverObserverInterface*);
PROXY_METHOD0(cricket::MediaType, media_type);
END_SIGNALING_PROXY()
} // namespace webrtc

View File

@ -45,38 +45,40 @@ void LocalAudioSinkAdapter::SetSink(cricket::AudioSource::Sink* sink) {
AudioRtpSender::AudioRtpSender(AudioTrackInterface* track,
const std::string& stream_id,
cricket::VoiceChannel* channel,
AudioProviderInterface* provider,
StatsCollector* stats)
: id_(track->id()),
stream_id_(stream_id),
channel_(channel),
provider_(provider),
stats_(stats),
track_(track),
cached_track_enabled_(track->enabled()),
sink_adapter_(new LocalAudioSinkAdapter()) {
RTC_DCHECK(provider != nullptr);
track_->RegisterObserver(this);
track_->AddSink(sink_adapter_.get());
}
AudioRtpSender::AudioRtpSender(AudioTrackInterface* track,
cricket::VoiceChannel* channel,
AudioProviderInterface* provider,
StatsCollector* stats)
: id_(track->id()),
stream_id_(rtc::CreateRandomUuid()),
channel_(channel),
provider_(provider),
stats_(stats),
track_(track),
cached_track_enabled_(track->enabled()),
sink_adapter_(new LocalAudioSinkAdapter()) {
RTC_DCHECK(provider != nullptr);
track_->RegisterObserver(this);
track_->AddSink(sink_adapter_.get());
}
AudioRtpSender::AudioRtpSender(cricket::VoiceChannel* channel,
AudioRtpSender::AudioRtpSender(AudioProviderInterface* provider,
StatsCollector* stats)
: id_(rtc::CreateRandomUuid()),
stream_id_(rtc::CreateRandomUuid()),
channel_(channel),
provider_(provider),
stats_(stats),
sink_adapter_(new LocalAudioSinkAdapter()) {}
@ -130,31 +132,26 @@ bool AudioRtpSender::SetTrack(MediaStreamTrackInterface* track) {
track_->AddSink(sink_adapter_.get());
}
// Update audio channel.
// Update audio provider.
if (can_send_track()) {
SetAudioSend();
if (stats_) {
stats_->AddLocalAudioTrack(track_.get(), ssrc_);
}
} else if (prev_can_send_track) {
ClearAudioSend();
cricket::AudioOptions options;
provider_->SetAudioSend(ssrc_, false, options, nullptr);
}
return true;
}
RtpParameters AudioRtpSender::GetParameters() const {
if (!channel_ || stopped_) {
return RtpParameters();
}
return channel_->GetRtpSendParameters(ssrc_);
return provider_->GetAudioRtpSendParameters(ssrc_);
}
bool AudioRtpSender::SetParameters(const RtpParameters& parameters) {
TRACE_EVENT0("webrtc", "AudioRtpSender::SetParameters");
if (!channel_ || stopped_) {
return false;
}
return channel_->SetRtpSendParameters(ssrc_, parameters);
return provider_->SetAudioRtpSendParameters(ssrc_, parameters);
}
void AudioRtpSender::SetSsrc(uint32_t ssrc) {
@ -164,7 +161,8 @@ void AudioRtpSender::SetSsrc(uint32_t ssrc) {
}
// If we are already sending with a particular SSRC, stop sending.
if (can_send_track()) {
ClearAudioSend();
cricket::AudioOptions options;
provider_->SetAudioSend(ssrc_, false, options, nullptr);
if (stats_) {
stats_->RemoveLocalAudioTrack(track_.get(), ssrc_);
}
@ -189,7 +187,8 @@ void AudioRtpSender::Stop() {
track_->UnregisterObserver(this);
}
if (can_send_track()) {
ClearAudioSend();
cricket::AudioOptions options;
provider_->SetAudioSend(ssrc_, false, options, nullptr);
if (stats_) {
stats_->RemoveLocalAudioTrack(track_.get(), ssrc_);
}
@ -199,10 +198,6 @@ void AudioRtpSender::Stop() {
void AudioRtpSender::SetAudioSend() {
RTC_DCHECK(!stopped_ && can_send_track());
if (!channel_) {
LOG(LS_ERROR) << "SetAudioSend: No audio channel exists.";
return;
}
cricket::AudioOptions options;
#if !defined(WEBRTC_CHROMIUM_BUILD)
// TODO(tommi): Remove this hack when we move CreateAudioSource out of
@ -217,50 +212,37 @@ void AudioRtpSender::SetAudioSend() {
#endif
cricket::AudioSource* source = sink_adapter_.get();
RTC_DCHECK(source != nullptr);
if (!channel_->SetAudioSend(ssrc_, track_->enabled(), &options, source)) {
LOG(LS_ERROR) << "SetAudioSend: ssrc is incorrect: " << ssrc_;
}
}
void AudioRtpSender::ClearAudioSend() {
RTC_DCHECK(ssrc_ != 0);
RTC_DCHECK(!stopped_);
if (!channel_) {
LOG(LS_WARNING) << "ClearAudioSend: No audio channel exists.";
return;
}
cricket::AudioOptions options;
if (!channel_->SetAudioSend(ssrc_, false, &options, nullptr)) {
LOG(LS_WARNING) << "ClearAudioSend: ssrc is incorrect: " << ssrc_;
}
ASSERT(source != nullptr);
provider_->SetAudioSend(ssrc_, track_->enabled(), options, source);
}
VideoRtpSender::VideoRtpSender(VideoTrackInterface* track,
const std::string& stream_id,
cricket::VideoChannel* channel)
VideoProviderInterface* provider)
: id_(track->id()),
stream_id_(stream_id),
channel_(channel),
provider_(provider),
track_(track),
cached_track_enabled_(track->enabled()) {
RTC_DCHECK(provider != nullptr);
track_->RegisterObserver(this);
}
VideoRtpSender::VideoRtpSender(VideoTrackInterface* track,
cricket::VideoChannel* channel)
VideoProviderInterface* provider)
: id_(track->id()),
stream_id_(rtc::CreateRandomUuid()),
channel_(channel),
provider_(provider),
track_(track),
cached_track_enabled_(track->enabled()) {
RTC_DCHECK(provider != nullptr);
track_->RegisterObserver(this);
}
VideoRtpSender::VideoRtpSender(cricket::VideoChannel* channel)
VideoRtpSender::VideoRtpSender(VideoProviderInterface* provider)
: id_(rtc::CreateRandomUuid()),
stream_id_(rtc::CreateRandomUuid()),
channel_(channel) {}
provider_(provider) {}
VideoRtpSender::~VideoRtpSender() {
Stop();
@ -306,7 +288,7 @@ bool VideoRtpSender::SetTrack(MediaStreamTrackInterface* track) {
track_->RegisterObserver(this);
}
// Update video channel.
// Update video provider.
if (can_send_track()) {
SetVideoSend();
} else if (prev_can_send_track) {
@ -316,18 +298,12 @@ bool VideoRtpSender::SetTrack(MediaStreamTrackInterface* track) {
}
RtpParameters VideoRtpSender::GetParameters() const {
if (!channel_ || stopped_) {
return RtpParameters();
}
return channel_->GetRtpSendParameters(ssrc_);
return provider_->GetVideoRtpSendParameters(ssrc_);
}
bool VideoRtpSender::SetParameters(const RtpParameters& parameters) {
TRACE_EVENT0("webrtc", "VideoRtpSender::SetParameters");
if (!channel_ || stopped_) {
return false;
}
return channel_->SetRtpSendParameters(ssrc_, parameters);
return provider_->SetVideoRtpSendParameters(ssrc_, parameters);
}
void VideoRtpSender::SetSsrc(uint32_t ssrc) {
@ -362,31 +338,19 @@ void VideoRtpSender::Stop() {
void VideoRtpSender::SetVideoSend() {
RTC_DCHECK(!stopped_ && can_send_track());
if (!channel_) {
LOG(LS_ERROR) << "SetVideoSend: No video channel exists.";
return;
}
cricket::VideoOptions options;
VideoTrackSourceInterface* source = track_->GetSource();
if (source) {
options.is_screencast = rtc::Optional<bool>(source->is_screencast());
options.video_noise_reduction = source->needs_denoising();
}
RTC_DCHECK(
channel_->SetVideoSend(ssrc_, track_->enabled(), &options, track_));
provider_->SetVideoSend(ssrc_, track_->enabled(), &options, track_);
}
void VideoRtpSender::ClearVideoSend() {
RTC_DCHECK(ssrc_ != 0);
RTC_DCHECK(!stopped_);
if (!channel_) {
LOG(LS_WARNING) << "SetVideoSend: No video channel exists.";
return;
}
// Allow SetVideoSend to fail since |enable| is false and |source| is null.
// This the normal case when the underlying media channel has already been
// deleted.
channel_->SetVideoSend(ssrc_, false, nullptr, nullptr);
RTC_DCHECK(provider_ != nullptr);
provider_->SetVideoSend(ssrc_, false, nullptr, nullptr);
}
} // namespace webrtc

View File

@ -18,13 +18,12 @@
#include <memory>
#include <string>
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/api/mediastreamprovider.h"
#include "webrtc/api/rtpsenderinterface.h"
#include "webrtc/api/statscollector.h"
#include "webrtc/base/basictypes.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/media/base/audiosource.h"
#include "webrtc/pc/channel.h"
namespace webrtc {
@ -73,21 +72,18 @@ class AudioRtpSender : public ObserverInterface,
public:
// StatsCollector provided so that Add/RemoveLocalAudioTrack can be called
// at the appropriate times.
// |channel| can be null if one does not exist yet.
AudioRtpSender(AudioTrackInterface* track,
const std::string& stream_id,
cricket::VoiceChannel* channel,
AudioProviderInterface* provider,
StatsCollector* stats);
// Randomly generates stream_id.
// |channel| can be null if one does not exist yet.
AudioRtpSender(AudioTrackInterface* track,
cricket::VoiceChannel* channel,
AudioProviderInterface* provider,
StatsCollector* stats);
// Randomly generates id and stream_id.
// |channel| can be null if one does not exist yet.
AudioRtpSender(cricket::VoiceChannel* channel, StatsCollector* stats);
AudioRtpSender(AudioProviderInterface* provider, StatsCollector* stats);
virtual ~AudioRtpSender();
@ -126,10 +122,6 @@ class AudioRtpSender : public ObserverInterface,
void Stop() override;
// Does not take ownership.
// Should call SetChannel(nullptr) before |channel| is destroyed.
void SetChannel(cricket::VoiceChannel* channel) { channel_ = channel; }
private:
// TODO(nisse): Since SSRC == 0 is technically valid, figure out
// some other way to test if we have a valid SSRC.
@ -137,12 +129,10 @@ class AudioRtpSender : public ObserverInterface,
// Helper function to construct options for
// AudioProviderInterface::SetAudioSend.
void SetAudioSend();
// Helper function to call SetAudioSend with "stop sending" parameters.
void ClearAudioSend();
std::string id_;
std::string stream_id_;
cricket::VoiceChannel* channel_ = nullptr;
AudioProviderInterface* provider_;
StatsCollector* stats_;
rtc::scoped_refptr<AudioTrackInterface> track_;
uint32_t ssrc_ = 0;
@ -157,18 +147,15 @@ class AudioRtpSender : public ObserverInterface,
class VideoRtpSender : public ObserverInterface,
public rtc::RefCountedObject<RtpSenderInternal> {
public:
// |channel| can be null if one does not exist yet.
VideoRtpSender(VideoTrackInterface* track,
const std::string& stream_id,
cricket::VideoChannel* channel);
VideoProviderInterface* provider);
// Randomly generates stream_id.
// |channel| can be null if one does not exist yet.
VideoRtpSender(VideoTrackInterface* track, cricket::VideoChannel* channel);
VideoRtpSender(VideoTrackInterface* track, VideoProviderInterface* provider);
// Randomly generates id and stream_id.
// |channel| can be null if one does not exist yet.
explicit VideoRtpSender(cricket::VideoChannel* channel);
explicit VideoRtpSender(VideoProviderInterface* provider);
virtual ~VideoRtpSender();
@ -207,10 +194,6 @@ class VideoRtpSender : public ObserverInterface,
void Stop() override;
// Does not take ownership.
// Should call SetChannel(nullptr) before |channel| is destroyed.
void SetChannel(cricket::VideoChannel* channel) { channel_ = channel; }
private:
bool can_send_track() const { return track_ && ssrc_; }
// Helper function to construct options for
@ -221,7 +204,7 @@ class VideoRtpSender : public ObserverInterface,
std::string id_;
std::string stream_id_;
cricket::VideoChannel* channel_ = nullptr;
VideoProviderInterface* provider_;
rtc::scoped_refptr<VideoTrackInterface> track_;
uint32_t ssrc_ = 0;
bool cached_track_enabled_ = false;

View File

@ -15,8 +15,6 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/api/audiotrack.h"
#include "webrtc/api/fakemediacontroller.h"
#include "webrtc/api/localaudiosource.h"
#include "webrtc/api/mediastream.h"
#include "webrtc/api/remoteaudiosource.h"
#include "webrtc/api/rtpreceiver.h"
@ -27,10 +25,6 @@
#include "webrtc/api/videotrack.h"
#include "webrtc/base/gunit.h"
#include "webrtc/media/base/mediachannel.h"
#include "webrtc/media/base/fakemediaengine.h"
#include "webrtc/media/engine/fakewebrtccall.h"
#include "webrtc/p2p/base/faketransportcontroller.h"
#include "webrtc/pc/channelmanager.h"
using ::testing::_;
using ::testing::Exactly;
@ -47,56 +41,69 @@ static const uint32_t kAudioSsrc2 = 101;
namespace webrtc {
class RtpSenderReceiverTest : public testing::Test {
// Helper class to test RtpSender/RtpReceiver.
class MockAudioProvider : public AudioProviderInterface {
public:
RtpSenderReceiverTest()
: // Create fake media engine/etc. so we can create channels to use to
// test RtpSenders/RtpReceivers.
media_engine_(new cricket::FakeMediaEngine()),
channel_manager_(media_engine_,
rtc::Thread::Current(),
rtc::Thread::Current()),
fake_call_(webrtc::Call::Config()),
fake_media_controller_(&channel_manager_, &fake_call_),
stream_(MediaStream::Create(kStreamLabel1)) {
// Create channels to be used by the RtpSenders and RtpReceivers.
channel_manager_.Init();
voice_channel_ = channel_manager_.CreateVoiceChannel(
&fake_media_controller_, &fake_transport_controller_, cricket::CN_AUDIO,
nullptr, false, cricket::AudioOptions());
video_channel_ = channel_manager_.CreateVideoChannel(
&fake_media_controller_, &fake_transport_controller_, cricket::CN_VIDEO,
nullptr, false, cricket::VideoOptions());
voice_media_channel_ = media_engine_->GetVoiceChannel(0);
video_media_channel_ = media_engine_->GetVideoChannel(0);
RTC_CHECK(voice_channel_);
RTC_CHECK(video_channel_);
RTC_CHECK(voice_media_channel_);
RTC_CHECK(video_media_channel_);
// TODO(nisse): Valid overrides commented out, because the gmock
// methods don't use any override declarations, and we want to avoid
// warnings from -Winconsistent-missing-override. See
// http://crbug.com/428099.
~MockAudioProvider() /* override */ {}
// Create streams for predefined SSRCs. Streams need to exist in order
// for the senders and receievers to apply parameters to them.
// Normally these would be created by SetLocalDescription and
// SetRemoteDescription.
voice_media_channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(kAudioSsrc));
voice_media_channel_->AddRecvStream(
cricket::StreamParams::CreateLegacy(kAudioSsrc));
voice_media_channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(kAudioSsrc2));
voice_media_channel_->AddRecvStream(
cricket::StreamParams::CreateLegacy(kAudioSsrc2));
video_media_channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(kVideoSsrc));
video_media_channel_->AddRecvStream(
cricket::StreamParams::CreateLegacy(kVideoSsrc));
video_media_channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(kVideoSsrc2));
video_media_channel_->AddRecvStream(
cricket::StreamParams::CreateLegacy(kVideoSsrc2));
MOCK_METHOD2(SetAudioPlayout,
void(uint32_t ssrc,
bool enable));
MOCK_METHOD4(SetAudioSend,
void(uint32_t ssrc,
bool enable,
const cricket::AudioOptions& options,
cricket::AudioSource* source));
MOCK_METHOD2(SetAudioPlayoutVolume, void(uint32_t ssrc, double volume));
MOCK_CONST_METHOD1(GetAudioRtpSendParameters, RtpParameters(uint32_t ssrc));
MOCK_METHOD2(SetAudioRtpSendParameters,
bool(uint32_t ssrc, const RtpParameters&));
MOCK_CONST_METHOD1(GetAudioRtpReceiveParameters,
RtpParameters(uint32_t ssrc));
MOCK_METHOD2(SetAudioRtpReceiveParameters,
bool(uint32_t ssrc, const RtpParameters&));
void SetRawAudioSink(
uint32_t, std::unique_ptr<AudioSinkInterface> sink) /* override */ {
sink_ = std::move(sink);
}
void TearDown() override { channel_manager_.Terminate(); }
private:
std::unique_ptr<AudioSinkInterface> sink_;
};
// Helper class to test RtpSender/RtpReceiver.
class MockVideoProvider : public VideoProviderInterface {
public:
virtual ~MockVideoProvider() {}
MOCK_METHOD3(SetVideoPlayout,
void(uint32_t ssrc,
bool enable,
rtc::VideoSinkInterface<cricket::VideoFrame>* sink));
MOCK_METHOD4(SetVideoSend,
void(uint32_t ssrc,
bool enable,
const cricket::VideoOptions* options,
rtc::VideoSourceInterface<cricket::VideoFrame>* source));
MOCK_CONST_METHOD1(GetVideoRtpSendParameters, RtpParameters(uint32_t ssrc));
MOCK_METHOD2(SetVideoRtpSendParameters,
bool(uint32_t ssrc, const RtpParameters&));
MOCK_CONST_METHOD1(GetVideoRtpReceiveParameters,
RtpParameters(uint32_t ssrc));
MOCK_METHOD2(SetVideoRtpReceiveParameters,
bool(uint32_t ssrc, const RtpParameters&));
};
class RtpSenderReceiverTest : public testing::Test {
public:
virtual void SetUp() {
stream_ = MediaStream::Create(kStreamLabel1);
}
void AddVideoTrack() {
rtc::scoped_refptr<VideoTrackSourceInterface> source(
@ -105,128 +112,68 @@ class RtpSenderReceiverTest : public testing::Test {
EXPECT_TRUE(stream_->AddTrack(video_track_));
}
void CreateAudioRtpSender() { CreateAudioRtpSender(nullptr); }
void CreateAudioRtpSender(rtc::scoped_refptr<LocalAudioSource> source) {
audio_track_ = AudioTrack::Create(kAudioTrackId, source);
void CreateAudioRtpSender() {
audio_track_ = AudioTrack::Create(kAudioTrackId, NULL);
EXPECT_TRUE(stream_->AddTrack(audio_track_));
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
audio_rtp_sender_ =
new AudioRtpSender(stream_->GetAudioTracks()[0], stream_->label(),
voice_channel_, nullptr);
&audio_provider_, nullptr);
audio_rtp_sender_->SetSsrc(kAudioSsrc);
VerifyVoiceChannelInput();
}
void CreateVideoRtpSender() {
AddVideoTrack();
EXPECT_CALL(video_provider_,
SetVideoSend(kVideoSsrc, true, _, video_track_.get()));
video_rtp_sender_ = new VideoRtpSender(stream_->GetVideoTracks()[0],
stream_->label(), video_channel_);
stream_->label(), &video_provider_);
video_rtp_sender_->SetSsrc(kVideoSsrc);
VerifyVideoChannelInput();
}
void DestroyAudioRtpSender() {
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _))
.Times(1);
audio_rtp_sender_ = nullptr;
VerifyVoiceChannelNoInput();
}
void DestroyVideoRtpSender() {
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _, nullptr))
.Times(1);
video_rtp_sender_ = nullptr;
VerifyVideoChannelNoInput();
}
void CreateAudioRtpReceiver() {
audio_track_ = AudioTrack::Create(
kAudioTrackId, RemoteAudioSource::Create(kAudioSsrc, NULL));
EXPECT_TRUE(stream_->AddTrack(audio_track_));
EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, true));
audio_rtp_receiver_ = new AudioRtpReceiver(stream_, kAudioTrackId,
kAudioSsrc, voice_channel_);
kAudioSsrc, &audio_provider_);
audio_track_ = audio_rtp_receiver_->audio_track();
VerifyVoiceChannelOutput();
}
void CreateVideoRtpReceiver() {
EXPECT_CALL(video_provider_, SetVideoPlayout(kVideoSsrc, true, _));
video_rtp_receiver_ =
new VideoRtpReceiver(stream_, kVideoTrackId, rtc::Thread::Current(),
kVideoSsrc, video_channel_);
kVideoSsrc, &video_provider_);
video_track_ = video_rtp_receiver_->video_track();
VerifyVideoChannelOutput();
}
void DestroyAudioRtpReceiver() {
EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, false));
audio_rtp_receiver_ = nullptr;
VerifyVoiceChannelNoOutput();
}
void DestroyVideoRtpReceiver() {
EXPECT_CALL(video_provider_, SetVideoPlayout(kVideoSsrc, false, NULL));
video_rtp_receiver_ = nullptr;
VerifyVideoChannelNoOutput();
}
void VerifyVoiceChannelInput() { VerifyVoiceChannelInput(kAudioSsrc); }
void VerifyVoiceChannelInput(uint32_t ssrc) {
// Verify that the media channel has an audio source, and the stream isn't
// muted.
EXPECT_TRUE(voice_media_channel_->HasSource(ssrc));
EXPECT_FALSE(voice_media_channel_->IsStreamMuted(ssrc));
}
void VerifyVideoChannelInput() { VerifyVideoChannelInput(kVideoSsrc); }
void VerifyVideoChannelInput(uint32_t ssrc) {
// Verify that the media channel has a video source,
EXPECT_TRUE(video_media_channel_->HasSource(ssrc));
}
void VerifyVoiceChannelNoInput() { VerifyVoiceChannelNoInput(kAudioSsrc); }
void VerifyVoiceChannelNoInput(uint32_t ssrc) {
// Verify that the media channel's source is reset.
EXPECT_FALSE(voice_media_channel_->HasSource(ssrc));
}
void VerifyVideoChannelNoInput() { VerifyVideoChannelNoInput(kVideoSsrc); }
void VerifyVideoChannelNoInput(uint32_t ssrc) {
// Verify that the media channel's source is reset.
EXPECT_FALSE(video_media_channel_->HasSource(ssrc));
}
void VerifyVoiceChannelOutput() {
// Verify that the volume is initialized to 1.
double volume;
EXPECT_TRUE(voice_media_channel_->GetOutputVolume(kAudioSsrc, &volume));
EXPECT_EQ(1, volume);
}
void VerifyVideoChannelOutput() {
// Verify that the media channel has a sink.
EXPECT_TRUE(video_media_channel_->HasSink(kVideoSsrc));
}
void VerifyVoiceChannelNoOutput() {
// Verify that the volume is reset to 0.
double volume;
EXPECT_TRUE(voice_media_channel_->GetOutputVolume(kAudioSsrc, &volume));
EXPECT_EQ(0, volume);
}
void VerifyVideoChannelNoOutput() {
// Verify that the media channel's sink is reset.
EXPECT_FALSE(video_media_channel_->HasSink(kVideoSsrc));
}
protected:
cricket::FakeMediaEngine* media_engine_;
cricket::FakeTransportController fake_transport_controller_;
cricket::ChannelManager channel_manager_;
cricket::FakeCall fake_call_;
cricket::FakeMediaController fake_media_controller_;
cricket::VoiceChannel* voice_channel_;
cricket::VideoChannel* video_channel_;
cricket::FakeVoiceMediaChannel* voice_media_channel_;
cricket::FakeVideoMediaChannel* video_media_channel_;
MockAudioProvider audio_provider_;
MockVideoProvider video_provider_;
rtc::scoped_refptr<AudioRtpSender> audio_rtp_sender_;
rtc::scoped_refptr<VideoRtpSender> video_rtp_sender_;
rtc::scoped_refptr<AudioRtpReceiver> audio_rtp_receiver_;
@ -236,96 +183,72 @@ class RtpSenderReceiverTest : public testing::Test {
rtc::scoped_refptr<AudioTrackInterface> audio_track_;
};
// Test that |voice_channel_| is updated when an audio track is associated
// Test that |audio_provider_| is notified when an audio track is associated
// and disassociated with an AudioRtpSender.
TEST_F(RtpSenderReceiverTest, AddAndDestroyAudioRtpSender) {
CreateAudioRtpSender();
DestroyAudioRtpSender();
}
// Test that |video_channel_| is updated when a video track is associated and
// Test that |video_provider_| is notified when a video track is associated and
// disassociated with a VideoRtpSender.
TEST_F(RtpSenderReceiverTest, AddAndDestroyVideoRtpSender) {
CreateVideoRtpSender();
DestroyVideoRtpSender();
}
// Test that |voice_channel_| is updated when a remote audio track is
// Test that |audio_provider_| is notified when a remote audio and track is
// associated and disassociated with an AudioRtpReceiver.
TEST_F(RtpSenderReceiverTest, AddAndDestroyAudioRtpReceiver) {
CreateAudioRtpReceiver();
DestroyAudioRtpReceiver();
}
// Test that |video_channel_| is updated when a remote video track is
// associated and disassociated with a VideoRtpReceiver.
// Test that |video_provider_| is notified when a remote
// video track is associated and disassociated with a VideoRtpReceiver.
TEST_F(RtpSenderReceiverTest, AddAndDestroyVideoRtpReceiver) {
CreateVideoRtpReceiver();
DestroyVideoRtpReceiver();
}
// Test that the AudioRtpSender applies options from the local audio source.
TEST_F(RtpSenderReceiverTest, LocalAudioSourceOptionsApplied) {
cricket::AudioOptions options;
options.echo_cancellation = rtc::Optional<bool>(true);
auto source = LocalAudioSource::Create(
PeerConnectionFactoryInterface::Options(), &options);
CreateAudioRtpSender(source.get());
EXPECT_EQ(rtc::Optional<bool>(true),
voice_media_channel_->options().echo_cancellation);
DestroyAudioRtpSender();
}
// Test that the stream is muted when the track is disabled, and unmuted when
// the track is enabled.
TEST_F(RtpSenderReceiverTest, LocalAudioTrackDisable) {
CreateAudioRtpSender();
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _));
audio_track_->set_enabled(false);
EXPECT_TRUE(voice_media_channel_->IsStreamMuted(kAudioSsrc));
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
audio_track_->set_enabled(true);
EXPECT_FALSE(voice_media_channel_->IsStreamMuted(kAudioSsrc));
DestroyAudioRtpSender();
}
// Test that the volume is set to 0 when the track is disabled, and back to
// 1 when the track is enabled.
TEST_F(RtpSenderReceiverTest, RemoteAudioTrackDisable) {
CreateAudioRtpReceiver();
double volume;
EXPECT_TRUE(voice_media_channel_->GetOutputVolume(kAudioSsrc, &volume));
EXPECT_EQ(1, volume);
EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, false));
audio_track_->set_enabled(false);
EXPECT_TRUE(voice_media_channel_->GetOutputVolume(kAudioSsrc, &volume));
EXPECT_EQ(0, volume);
EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, true));
audio_track_->set_enabled(true);
EXPECT_TRUE(voice_media_channel_->GetOutputVolume(kAudioSsrc, &volume));
EXPECT_EQ(1, volume);
DestroyAudioRtpReceiver();
}
// Currently no action is taken when a remote video track is disabled or
// enabled, so there's nothing to test here, other than what is normally
// verified in DestroyVideoRtpSender.
TEST_F(RtpSenderReceiverTest, LocalVideoTrackDisable) {
CreateVideoRtpSender();
EXPECT_CALL(video_provider_,
SetVideoSend(kVideoSsrc, false, _, video_track_.get()));
video_track_->set_enabled(false);
EXPECT_CALL(video_provider_,
SetVideoSend(kVideoSsrc, true, _, video_track_.get()));
video_track_->set_enabled(true);
DestroyVideoRtpSender();
}
// Test that the state of the video track created by the VideoRtpReceiver is
// updated when the receiver is destroyed.
TEST_F(RtpSenderReceiverTest, RemoteVideoTrackState) {
CreateVideoRtpReceiver();
@ -340,268 +263,282 @@ TEST_F(RtpSenderReceiverTest, RemoteVideoTrackState) {
video_track_->GetSource()->state());
}
// Currently no action is taken when a remote video track is disabled or
// enabled, so there's nothing to test here, other than what is normally
// verified in DestroyVideoRtpReceiver.
TEST_F(RtpSenderReceiverTest, RemoteVideoTrackDisable) {
CreateVideoRtpReceiver();
video_track_->set_enabled(false);
video_track_->set_enabled(true);
DestroyVideoRtpReceiver();
}
// Test that the AudioRtpReceiver applies volume changes from the track source
// to the media channel.
TEST_F(RtpSenderReceiverTest, RemoteAudioTrackSetVolume) {
CreateAudioRtpReceiver();
double volume;
audio_track_->GetSource()->SetVolume(0.5);
EXPECT_TRUE(voice_media_channel_->GetOutputVolume(kAudioSsrc, &volume));
EXPECT_EQ(0.5, volume);
double volume = 0.5;
EXPECT_CALL(audio_provider_, SetAudioPlayoutVolume(kAudioSsrc, volume));
audio_track_->GetSource()->SetVolume(volume);
// Disable the audio track, this should prevent setting the volume.
EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, false));
audio_track_->set_enabled(false);
audio_track_->GetSource()->SetVolume(0.8);
EXPECT_TRUE(voice_media_channel_->GetOutputVolume(kAudioSsrc, &volume));
EXPECT_EQ(0, volume);
audio_track_->GetSource()->SetVolume(1.0);
// When the track is enabled, the previously set volume should take effect.
EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, true));
audio_track_->set_enabled(true);
EXPECT_TRUE(voice_media_channel_->GetOutputVolume(kAudioSsrc, &volume));
EXPECT_EQ(0.8, volume);
// Try changing volume one more time.
audio_track_->GetSource()->SetVolume(0.9);
EXPECT_TRUE(voice_media_channel_->GetOutputVolume(kAudioSsrc, &volume));
EXPECT_EQ(0.9, volume);
double new_volume = 0.8;
EXPECT_CALL(audio_provider_, SetAudioPlayoutVolume(kAudioSsrc, new_volume));
audio_track_->GetSource()->SetVolume(new_volume);
DestroyAudioRtpReceiver();
}
// Test that the media channel isn't enabled for sending if the audio sender
// doesn't have both a track and SSRC.
// Test that provider methods aren't called without both a track and an SSRC.
TEST_F(RtpSenderReceiverTest, AudioSenderWithoutTrackAndSsrc) {
audio_rtp_sender_ = new AudioRtpSender(voice_channel_, nullptr);
rtc::scoped_refptr<AudioRtpSender> sender =
new AudioRtpSender(&audio_provider_, nullptr);
rtc::scoped_refptr<AudioTrackInterface> track =
AudioTrack::Create(kAudioTrackId, nullptr);
// Track but no SSRC.
EXPECT_TRUE(audio_rtp_sender_->SetTrack(track));
VerifyVoiceChannelNoInput();
// SSRC but no track.
EXPECT_TRUE(audio_rtp_sender_->SetTrack(nullptr));
audio_rtp_sender_->SetSsrc(kAudioSsrc);
VerifyVoiceChannelNoInput();
EXPECT_TRUE(sender->SetTrack(track));
EXPECT_TRUE(sender->SetTrack(nullptr));
sender->SetSsrc(kAudioSsrc);
sender->SetSsrc(0);
// Just let it get destroyed and make sure it doesn't call any methods on the
// provider interface.
}
// Test that the media channel isn't enabled for sending if the video sender
// doesn't have both a track and SSRC.
// Test that provider methods aren't called without both a track and an SSRC.
TEST_F(RtpSenderReceiverTest, VideoSenderWithoutTrackAndSsrc) {
video_rtp_sender_ = new VideoRtpSender(video_channel_);
// Track but no SSRC.
EXPECT_TRUE(video_rtp_sender_->SetTrack(video_track_));
VerifyVideoChannelNoInput();
// SSRC but no track.
EXPECT_TRUE(video_rtp_sender_->SetTrack(nullptr));
video_rtp_sender_->SetSsrc(kVideoSsrc);
VerifyVideoChannelNoInput();
rtc::scoped_refptr<VideoRtpSender> sender =
new VideoRtpSender(&video_provider_);
EXPECT_TRUE(sender->SetTrack(video_track_));
EXPECT_TRUE(sender->SetTrack(nullptr));
sender->SetSsrc(kVideoSsrc);
sender->SetSsrc(0);
// Just let it get destroyed and make sure it doesn't call any methods on the
// provider interface.
}
// Test that the media channel is enabled for sending when the audio sender
// has a track and SSRC, when the SSRC is set first.
// Test that an audio sender calls the expected methods on the provider once
// it has a track and SSRC, when the SSRC is set first.
TEST_F(RtpSenderReceiverTest, AudioSenderEarlyWarmupSsrcThenTrack) {
audio_rtp_sender_ = new AudioRtpSender(voice_channel_, nullptr);
rtc::scoped_refptr<AudioRtpSender> sender =
new AudioRtpSender(&audio_provider_, nullptr);
rtc::scoped_refptr<AudioTrackInterface> track =
AudioTrack::Create(kAudioTrackId, nullptr);
audio_rtp_sender_->SetSsrc(kAudioSsrc);
audio_rtp_sender_->SetTrack(track);
VerifyVoiceChannelInput();
sender->SetSsrc(kAudioSsrc);
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
sender->SetTrack(track);
DestroyAudioRtpSender();
// Calls expected from destructor.
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _)).Times(1);
}
// Test that the media channel is enabled for sending when the audio sender
// has a track and SSRC, when the SSRC is set last.
// Test that an audio sender calls the expected methods on the provider once
// it has a track and SSRC, when the SSRC is set last.
TEST_F(RtpSenderReceiverTest, AudioSenderEarlyWarmupTrackThenSsrc) {
audio_rtp_sender_ = new AudioRtpSender(voice_channel_, nullptr);
rtc::scoped_refptr<AudioRtpSender> sender =
new AudioRtpSender(&audio_provider_, nullptr);
rtc::scoped_refptr<AudioTrackInterface> track =
AudioTrack::Create(kAudioTrackId, nullptr);
audio_rtp_sender_->SetTrack(track);
audio_rtp_sender_->SetSsrc(kAudioSsrc);
VerifyVoiceChannelInput();
sender->SetTrack(track);
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
sender->SetSsrc(kAudioSsrc);
DestroyAudioRtpSender();
// Calls expected from destructor.
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _)).Times(1);
}
// Test that the media channel is enabled for sending when the video sender
// has a track and SSRC, when the SSRC is set first.
// Test that a video sender calls the expected methods on the provider once
// it has a track and SSRC, when the SSRC is set first.
TEST_F(RtpSenderReceiverTest, VideoSenderEarlyWarmupSsrcThenTrack) {
AddVideoTrack();
video_rtp_sender_ = new VideoRtpSender(video_channel_);
video_rtp_sender_->SetSsrc(kVideoSsrc);
video_rtp_sender_->SetTrack(video_track_);
VerifyVideoChannelInput();
rtc::scoped_refptr<VideoRtpSender> sender =
new VideoRtpSender(&video_provider_);
sender->SetSsrc(kVideoSsrc);
EXPECT_CALL(video_provider_,
SetVideoSend(kVideoSsrc, true, _, video_track_.get()));
sender->SetTrack(video_track_);
DestroyVideoRtpSender();
// Calls expected from destructor.
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _, nullptr))
.Times(1);
}
// Test that the media channel is enabled for sending when the video sender
// has a track and SSRC, when the SSRC is set last.
// Test that a video sender calls the expected methods on the provider once
// it has a track and SSRC, when the SSRC is set last.
TEST_F(RtpSenderReceiverTest, VideoSenderEarlyWarmupTrackThenSsrc) {
AddVideoTrack();
video_rtp_sender_ = new VideoRtpSender(video_channel_);
video_rtp_sender_->SetTrack(video_track_);
video_rtp_sender_->SetSsrc(kVideoSsrc);
VerifyVideoChannelInput();
rtc::scoped_refptr<VideoRtpSender> sender =
new VideoRtpSender(&video_provider_);
sender->SetTrack(video_track_);
EXPECT_CALL(video_provider_,
SetVideoSend(kVideoSsrc, true, _, video_track_.get()));
sender->SetSsrc(kVideoSsrc);
DestroyVideoRtpSender();
// Calls expected from destructor.
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _, nullptr))
.Times(1);
}
// Test that the media channel stops sending when the audio sender's SSRC is set
// to 0.
// Test that the sender is disconnected from the provider when its SSRC is
// set to 0.
TEST_F(RtpSenderReceiverTest, AudioSenderSsrcSetToZero) {
CreateAudioRtpSender();
rtc::scoped_refptr<AudioTrackInterface> track =
AudioTrack::Create(kAudioTrackId, nullptr);
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
rtc::scoped_refptr<AudioRtpSender> sender =
new AudioRtpSender(track, kStreamLabel1, &audio_provider_, nullptr);
sender->SetSsrc(kAudioSsrc);
audio_rtp_sender_->SetSsrc(0);
VerifyVoiceChannelNoInput();
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _)).Times(1);
sender->SetSsrc(0);
// Make sure it's SetSsrc that called methods on the provider, and not the
// destructor.
EXPECT_CALL(audio_provider_, SetAudioSend(_, _, _, _)).Times(0);
}
// Test that the media channel stops sending when the video sender's SSRC is set
// to 0.
// Test that the sender is disconnected from the provider when its SSRC is
// set to 0.
TEST_F(RtpSenderReceiverTest, VideoSenderSsrcSetToZero) {
CreateAudioRtpSender();
AddVideoTrack();
EXPECT_CALL(video_provider_,
SetVideoSend(kVideoSsrc, true, _, video_track_.get()));
rtc::scoped_refptr<VideoRtpSender> sender =
new VideoRtpSender(video_track_, kStreamLabel1, &video_provider_);
sender->SetSsrc(kVideoSsrc);
audio_rtp_sender_->SetSsrc(0);
VerifyVideoChannelNoInput();
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _, nullptr))
.Times(1);
sender->SetSsrc(0);
// Make sure it's SetSsrc that called methods on the provider, and not the
// destructor.
EXPECT_CALL(video_provider_, SetVideoSend(_, _, _, _)).Times(0);
}
// Test that the media channel stops sending when the audio sender's track is
// set to null.
TEST_F(RtpSenderReceiverTest, AudioSenderTrackSetToNull) {
CreateAudioRtpSender();
rtc::scoped_refptr<AudioTrackInterface> track =
AudioTrack::Create(kAudioTrackId, nullptr);
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
rtc::scoped_refptr<AudioRtpSender> sender =
new AudioRtpSender(track, kStreamLabel1, &audio_provider_, nullptr);
sender->SetSsrc(kAudioSsrc);
EXPECT_TRUE(audio_rtp_sender_->SetTrack(nullptr));
VerifyVoiceChannelNoInput();
// Expect that SetAudioSend will be called before the reference to the track
// is released.
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, nullptr))
.Times(1)
.WillOnce(InvokeWithoutArgs([&track] {
EXPECT_LT(2, track->AddRef());
track->Release();
}));
EXPECT_TRUE(sender->SetTrack(nullptr));
// Make sure it's SetTrack that called methods on the provider, and not the
// destructor.
EXPECT_CALL(audio_provider_, SetAudioSend(_, _, _, _)).Times(0);
}
// Test that the media channel stops sending when the video sender's track is
// set to null.
TEST_F(RtpSenderReceiverTest, VideoSenderTrackSetToNull) {
CreateVideoRtpSender();
rtc::scoped_refptr<VideoTrackSourceInterface> source(
FakeVideoTrackSource::Create());
rtc::scoped_refptr<VideoTrackInterface> track =
VideoTrack::Create(kVideoTrackId, source);
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _, track.get()));
rtc::scoped_refptr<VideoRtpSender> sender =
new VideoRtpSender(track, kStreamLabel1, &video_provider_);
sender->SetSsrc(kVideoSsrc);
video_rtp_sender_->SetSsrc(0);
VerifyVideoChannelNoInput();
// Expect that SetVideoSend will be called before the reference to the track
// is released.
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _, nullptr))
.Times(1)
.WillOnce(InvokeWithoutArgs([&track] {
EXPECT_LT(2, track->AddRef());
track->Release();
}));
EXPECT_TRUE(sender->SetTrack(nullptr));
// Make sure it's SetTrack that called methods on the provider, and not the
// destructor.
EXPECT_CALL(video_provider_, SetVideoSend(_, _, _, _)).Times(0);
}
// Test that when the audio sender's SSRC is changed, the media channel stops
// sending with the old SSRC and starts sending with the new one.
TEST_F(RtpSenderReceiverTest, AudioSenderSsrcChanged) {
CreateAudioRtpSender();
AddVideoTrack();
rtc::scoped_refptr<AudioTrackInterface> track =
AudioTrack::Create(kAudioTrackId, nullptr);
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
rtc::scoped_refptr<AudioRtpSender> sender =
new AudioRtpSender(track, kStreamLabel1, &audio_provider_, nullptr);
sender->SetSsrc(kAudioSsrc);
audio_rtp_sender_->SetSsrc(kAudioSsrc2);
VerifyVoiceChannelNoInput(kAudioSsrc);
VerifyVoiceChannelInput(kAudioSsrc2);
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _)).Times(1);
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc2, true, _, _)).Times(1);
sender->SetSsrc(kAudioSsrc2);
audio_rtp_sender_ = nullptr;
VerifyVoiceChannelNoInput(kAudioSsrc2);
// Calls expected from destructor.
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc2, false, _, _)).Times(1);
}
// Test that when the audio sender's SSRC is changed, the media channel stops
// sending with the old SSRC and starts sending with the new one.
TEST_F(RtpSenderReceiverTest, VideoSenderSsrcChanged) {
CreateVideoRtpSender();
AddVideoTrack();
EXPECT_CALL(video_provider_,
SetVideoSend(kVideoSsrc, true, _, video_track_.get()));
rtc::scoped_refptr<VideoRtpSender> sender =
new VideoRtpSender(video_track_, kStreamLabel1, &video_provider_);
sender->SetSsrc(kVideoSsrc);
video_rtp_sender_->SetSsrc(kVideoSsrc2);
VerifyVideoChannelNoInput(kVideoSsrc);
VerifyVideoChannelInput(kVideoSsrc2);
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _, nullptr))
.Times(1);
EXPECT_CALL(video_provider_,
SetVideoSend(kVideoSsrc2, true, _, video_track_.get()))
.Times(1);
sender->SetSsrc(kVideoSsrc2);
video_rtp_sender_ = nullptr;
VerifyVideoChannelNoInput(kVideoSsrc2);
// Calls expected from destructor.
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc2, false, _, nullptr))
.Times(1);
}
TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParameters) {
CreateAudioRtpSender();
EXPECT_CALL(audio_provider_, GetAudioRtpSendParameters(kAudioSsrc))
.WillOnce(Return(RtpParameters()));
EXPECT_CALL(audio_provider_, SetAudioRtpSendParameters(kAudioSsrc, _))
.WillOnce(Return(true));
RtpParameters params = audio_rtp_sender_->GetParameters();
EXPECT_EQ(1u, params.encodings.size());
EXPECT_TRUE(audio_rtp_sender_->SetParameters(params));
DestroyAudioRtpSender();
}
TEST_F(RtpSenderReceiverTest, SetAudioMaxSendBitrate) {
CreateAudioRtpSender();
EXPECT_EQ(-1, voice_media_channel_->max_bps());
webrtc::RtpParameters params = audio_rtp_sender_->GetParameters();
EXPECT_EQ(1, params.encodings.size());
EXPECT_EQ(-1, params.encodings[0].max_bitrate_bps);
params.encodings[0].max_bitrate_bps = 1000;
EXPECT_TRUE(audio_rtp_sender_->SetParameters(params));
// Read back the parameters and verify they have been changed.
params = audio_rtp_sender_->GetParameters();
EXPECT_EQ(1, params.encodings.size());
EXPECT_EQ(1000, params.encodings[0].max_bitrate_bps);
// Verify that the audio channel received the new parameters.
params = voice_media_channel_->GetRtpSendParameters(kAudioSsrc);
EXPECT_EQ(1, params.encodings.size());
EXPECT_EQ(1000, params.encodings[0].max_bitrate_bps);
// Verify that the global bitrate limit has not been changed.
EXPECT_EQ(-1, voice_media_channel_->max_bps());
DestroyAudioRtpSender();
}
TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParameters) {
CreateVideoRtpSender();
EXPECT_CALL(video_provider_, GetVideoRtpSendParameters(kVideoSsrc))
.WillOnce(Return(RtpParameters()));
EXPECT_CALL(video_provider_, SetVideoRtpSendParameters(kVideoSsrc, _))
.WillOnce(Return(true));
RtpParameters params = video_rtp_sender_->GetParameters();
EXPECT_EQ(1u, params.encodings.size());
EXPECT_TRUE(video_rtp_sender_->SetParameters(params));
DestroyVideoRtpSender();
}
TEST_F(RtpSenderReceiverTest, SetVideoMaxSendBitrate) {
CreateVideoRtpSender();
EXPECT_EQ(-1, video_media_channel_->max_bps());
webrtc::RtpParameters params = video_rtp_sender_->GetParameters();
EXPECT_EQ(1, params.encodings.size());
EXPECT_EQ(-1, params.encodings[0].max_bitrate_bps);
params.encodings[0].max_bitrate_bps = 1000;
EXPECT_TRUE(video_rtp_sender_->SetParameters(params));
// Read back the parameters and verify they have been changed.
params = video_rtp_sender_->GetParameters();
EXPECT_EQ(1, params.encodings.size());
EXPECT_EQ(1000, params.encodings[0].max_bitrate_bps);
// Verify that the video channel received the new parameters.
params = video_media_channel_->GetRtpSendParameters(kVideoSsrc);
EXPECT_EQ(1, params.encodings.size());
EXPECT_EQ(1000, params.encodings[0].max_bitrate_bps);
// Verify that the global bitrate limit has not been changed.
EXPECT_EQ(-1, video_media_channel_->max_bps());
DestroyVideoRtpSender();
}
TEST_F(RtpSenderReceiverTest, AudioReceiverCanSetParameters) {
CreateAudioRtpReceiver();
EXPECT_CALL(audio_provider_, GetAudioRtpReceiveParameters(kAudioSsrc))
.WillOnce(Return(RtpParameters()));
EXPECT_CALL(audio_provider_, SetAudioRtpReceiveParameters(kAudioSsrc, _))
.WillOnce(Return(true));
RtpParameters params = audio_rtp_receiver_->GetParameters();
EXPECT_EQ(1u, params.encodings.size());
EXPECT_TRUE(audio_rtp_receiver_->SetParameters(params));
DestroyAudioRtpReceiver();
@ -610,8 +547,11 @@ TEST_F(RtpSenderReceiverTest, AudioReceiverCanSetParameters) {
TEST_F(RtpSenderReceiverTest, VideoReceiverCanSetParameters) {
CreateVideoRtpReceiver();
EXPECT_CALL(video_provider_, GetVideoRtpReceiveParameters(kVideoSsrc))
.WillOnce(Return(RtpParameters()));
EXPECT_CALL(video_provider_, SetVideoRtpReceiveParameters(kVideoSsrc, _))
.WillOnce(Return(true));
RtpParameters params = video_rtp_receiver_->GetParameters();
EXPECT_EQ(1u, params.encodings.size());
EXPECT_TRUE(video_rtp_receiver_->SetParameters(params));
DestroyVideoRtpReceiver();

View File

@ -1169,6 +1169,161 @@ std::string WebRtcSession::BadStateErrMsg(State state) {
return desc.str();
}
void WebRtcSession::SetAudioPlayout(uint32_t ssrc, bool enable) {
ASSERT(signaling_thread()->IsCurrent());
if (!voice_channel_) {
LOG(LS_ERROR) << "SetAudioPlayout: No audio channel exists.";
return;
}
if (!voice_channel_->SetOutputVolume(ssrc, enable ? 1 : 0)) {
// Allow that SetOutputVolume fail if |enable| is false but assert
// otherwise. This in the normal case when the underlying media channel has
// already been deleted.
ASSERT(enable == false);
}
}
void WebRtcSession::SetAudioSend(uint32_t ssrc,
bool enable,
const cricket::AudioOptions& options,
cricket::AudioSource* source) {
ASSERT(signaling_thread()->IsCurrent());
if (!voice_channel_) {
LOG(LS_ERROR) << "SetAudioSend: No audio channel exists.";
return;
}
if (!voice_channel_->SetAudioSend(ssrc, enable, &options, source)) {
LOG(LS_ERROR) << "SetAudioSend: ssrc is incorrect: " << ssrc;
}
}
void WebRtcSession::SetAudioPlayoutVolume(uint32_t ssrc, double volume) {
ASSERT(signaling_thread()->IsCurrent());
ASSERT(volume >= 0 && volume <= 10);
if (!voice_channel_) {
LOG(LS_ERROR) << "SetAudioPlayoutVolume: No audio channel exists.";
return;
}
if (!voice_channel_->SetOutputVolume(ssrc, volume)) {
ASSERT(false);
}
}
void WebRtcSession::SetRawAudioSink(uint32_t ssrc,
std::unique_ptr<AudioSinkInterface> sink) {
ASSERT(signaling_thread()->IsCurrent());
if (!voice_channel_)
return;
voice_channel_->SetRawAudioSink(ssrc, std::move(sink));
}
RtpParameters WebRtcSession::GetAudioRtpSendParameters(uint32_t ssrc) const {
ASSERT(signaling_thread()->IsCurrent());
if (voice_channel_) {
return voice_channel_->GetRtpSendParameters(ssrc);
}
return RtpParameters();
}
bool WebRtcSession::SetAudioRtpSendParameters(uint32_t ssrc,
const RtpParameters& parameters) {
ASSERT(signaling_thread()->IsCurrent());
if (!voice_channel_) {
return false;
}
return voice_channel_->SetRtpSendParameters(ssrc, parameters);
}
RtpParameters WebRtcSession::GetAudioRtpReceiveParameters(uint32_t ssrc) const {
ASSERT(signaling_thread()->IsCurrent());
if (voice_channel_) {
return voice_channel_->GetRtpReceiveParameters(ssrc);
}
return RtpParameters();
}
bool WebRtcSession::SetAudioRtpReceiveParameters(
uint32_t ssrc,
const RtpParameters& parameters) {
ASSERT(signaling_thread()->IsCurrent());
if (!voice_channel_) {
return false;
}
return voice_channel_->SetRtpReceiveParameters(ssrc, parameters);
}
void WebRtcSession::SetVideoPlayout(
uint32_t ssrc,
bool enable,
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
ASSERT(signaling_thread()->IsCurrent());
if (!video_channel_) {
LOG(LS_WARNING) << "SetVideoPlayout: No video channel exists.";
return;
}
if (!video_channel_->SetSink(ssrc, enable ? sink : NULL)) {
// Allow that SetSink fail if |sink| is NULL but assert otherwise.
// This in the normal case when the underlying media channel has already
// been deleted.
ASSERT(sink == NULL);
}
}
void WebRtcSession::SetVideoSend(
uint32_t ssrc,
bool enable,
const cricket::VideoOptions* options,
rtc::VideoSourceInterface<cricket::VideoFrame>* source) {
ASSERT(signaling_thread()->IsCurrent());
if (!video_channel_) {
LOG(LS_WARNING) << "SetVideoSend: No video channel exists.";
return;
}
if (!video_channel_->SetVideoSend(ssrc, enable, options, source)) {
// Allow that MuteStream fail if |enable| is false and |source| is NULL but
// assert otherwise. This in the normal case when the underlying media
// channel has already been deleted.
ASSERT(enable == false && source == nullptr);
}
}
RtpParameters WebRtcSession::GetVideoRtpSendParameters(uint32_t ssrc) const {
ASSERT(signaling_thread()->IsCurrent());
if (video_channel_) {
return video_channel_->GetRtpSendParameters(ssrc);
}
return RtpParameters();
}
bool WebRtcSession::SetVideoRtpSendParameters(uint32_t ssrc,
const RtpParameters& parameters) {
ASSERT(signaling_thread()->IsCurrent());
if (!video_channel_) {
return false;
}
return video_channel_->SetRtpSendParameters(ssrc, parameters);
}
RtpParameters WebRtcSession::GetVideoRtpReceiveParameters(uint32_t ssrc) const {
ASSERT(signaling_thread()->IsCurrent());
if (video_channel_) {
return video_channel_->GetRtpReceiveParameters(ssrc);
}
return RtpParameters();
}
bool WebRtcSession::SetVideoRtpReceiveParameters(
uint32_t ssrc,
const RtpParameters& parameters) {
ASSERT(signaling_thread()->IsCurrent());
if (!video_channel_) {
return false;
}
return video_channel_->SetRtpReceiveParameters(ssrc, parameters);
}
bool WebRtcSession::CanInsertDtmf(const std::string& track_id) {
ASSERT(signaling_thread()->IsCurrent());
if (!voice_channel_) {
@ -1612,6 +1767,8 @@ bool WebRtcSession::CreateVoiceChannel(const cricket::ContentInfo* content,
voice_channel_->SignalDtlsSetupFailure.connect(
this, &WebRtcSession::OnDtlsSetupFailure);
voice_channel_->SignalFirstPacketReceived.connect(
this, &WebRtcSession::OnChannelFirstPacketReceived);
SignalVoiceChannelCreated();
voice_channel_->SignalSentPacket.connect(this,
@ -1635,6 +1792,8 @@ bool WebRtcSession::CreateVideoChannel(const cricket::ContentInfo* content,
}
video_channel_->SignalDtlsSetupFailure.connect(
this, &WebRtcSession::OnDtlsSetupFailure);
video_channel_->SignalFirstPacketReceived.connect(
this, &WebRtcSession::OnChannelFirstPacketReceived);
SignalVideoChannelCreated();
video_channel_->SignalSentPacket.connect(this,
@ -1676,6 +1835,21 @@ void WebRtcSession::OnDtlsSetupFailure(cricket::BaseChannel*, bool rtcp) {
rtcp ? kDtlsSetupFailureRtcp : kDtlsSetupFailureRtp);
}
void WebRtcSession::OnChannelFirstPacketReceived(
cricket::BaseChannel* channel) {
ASSERT(signaling_thread()->IsCurrent());
if (!received_first_audio_packet_ &&
channel->media_type() == cricket::MEDIA_TYPE_AUDIO) {
received_first_audio_packet_ = true;
SignalFirstAudioPacketReceived();
} else if (!received_first_video_packet_ &&
channel->media_type() == cricket::MEDIA_TYPE_VIDEO) {
received_first_video_packet_ = true;
SignalFirstVideoPacketReceived();
}
}
void WebRtcSession::OnDataChannelMessageReceived(
cricket::DataChannel* channel,
const cricket::ReceiveDataParams& params,

View File

@ -19,6 +19,7 @@
#include "webrtc/api/datachannel.h"
#include "webrtc/api/dtmfsender.h"
#include "webrtc/api/mediacontroller.h"
#include "webrtc/api/mediastreamprovider.h"
#include "webrtc/api/peerconnectioninterface.h"
#include "webrtc/api/statstypes.h"
#include "webrtc/base/constructormagic.h"
@ -114,11 +115,11 @@ struct SessionStats {
// participates in the network-level negotiation. The individual streams of
// packets are represented by TransportChannels. The application-level protocol
// is represented by SessionDecription objects.
class WebRtcSession :
public DtmfProviderInterface,
public DataChannelProviderInterface,
public sigslot::has_slots<> {
class WebRtcSession : public AudioProviderInterface,
public VideoProviderInterface,
public DtmfProviderInterface,
public DataChannelProviderInterface,
public sigslot::has_slots<> {
public:
enum State {
STATE_INIT = 0,
@ -233,6 +234,41 @@ class WebRtcSession :
virtual bool GetLocalTrackIdBySsrc(uint32_t ssrc, std::string* track_id);
virtual bool GetRemoteTrackIdBySsrc(uint32_t ssrc, std::string* track_id);
// AudioMediaProviderInterface implementation.
void SetAudioPlayout(uint32_t ssrc, bool enable) override;
void SetAudioSend(uint32_t ssrc,
bool enable,
const cricket::AudioOptions& options,
cricket::AudioSource* source) override;
void SetAudioPlayoutVolume(uint32_t ssrc, double volume) override;
void SetRawAudioSink(uint32_t ssrc,
std::unique_ptr<AudioSinkInterface> sink) override;
RtpParameters GetAudioRtpSendParameters(uint32_t ssrc) const override;
bool SetAudioRtpSendParameters(uint32_t ssrc,
const RtpParameters& parameters) override;
RtpParameters GetAudioRtpReceiveParameters(uint32_t ssrc) const override;
bool SetAudioRtpReceiveParameters(uint32_t ssrc,
const RtpParameters& parameters) override;
// Implements VideoMediaProviderInterface.
void SetVideoPlayout(
uint32_t ssrc,
bool enable,
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) override;
void SetVideoSend(
uint32_t ssrc,
bool enable,
const cricket::VideoOptions* options,
rtc::VideoSourceInterface<cricket::VideoFrame>* source) override;
RtpParameters GetVideoRtpSendParameters(uint32_t ssrc) const override;
bool SetVideoRtpSendParameters(uint32_t ssrc,
const RtpParameters& parameters) override;
RtpParameters GetVideoRtpReceiveParameters(uint32_t ssrc) const override;
bool SetVideoRtpReceiveParameters(uint32_t ssrc,
const RtpParameters& parameters) override;
// Implements DtmfProviderInterface.
bool CanInsertDtmf(const std::string& track_id) override;
bool InsertDtmf(const std::string& track_id,
@ -274,6 +310,8 @@ class WebRtcSession :
void OnCertificateReady(
const rtc::scoped_refptr<rtc::RTCCertificate>& certificate);
void OnDtlsSetupFailure(cricket::BaseChannel*, bool rtcp);
// Called when the channel received the first packet.
void OnChannelFirstPacketReceived(cricket::BaseChannel*);
// For unit test.
bool waiting_for_certificate_for_testing() const;

View File

@ -253,6 +253,11 @@ class WebRtcSessionForTest : public webrtc::WebRtcSession {
return rtcp_transport_channel(data_channel());
}
using webrtc::WebRtcSession::SetAudioPlayout;
using webrtc::WebRtcSession::SetAudioSend;
using webrtc::WebRtcSession::SetVideoPlayout;
using webrtc::WebRtcSession::SetVideoSend;
private:
cricket::TransportChannel* rtp_transport_channel(cricket::BaseChannel* ch) {
if (!ch) {
@ -3387,6 +3392,163 @@ TEST_F(WebRtcSessionTest, TestDisabledRtcpMuxWithBundleEnabled) {
SetLocalDescriptionWithoutError(offer);
}
TEST_F(WebRtcSessionTest, SetAudioPlayout) {
Init();
SendAudioVideoStream1();
CreateAndSetRemoteOfferAndLocalAnswer();
cricket::FakeVoiceMediaChannel* channel = media_engine_->GetVoiceChannel(0);
ASSERT_TRUE(channel != NULL);
ASSERT_EQ(1u, channel->recv_streams().size());
uint32_t receive_ssrc = channel->recv_streams()[0].first_ssrc();
double volume;
EXPECT_TRUE(channel->GetOutputVolume(receive_ssrc, &volume));
EXPECT_EQ(1, volume);
session_->SetAudioPlayout(receive_ssrc, false);
EXPECT_TRUE(channel->GetOutputVolume(receive_ssrc, &volume));
EXPECT_EQ(0, volume);
session_->SetAudioPlayout(receive_ssrc, true);
EXPECT_TRUE(channel->GetOutputVolume(receive_ssrc, &volume));
EXPECT_EQ(1, volume);
}
TEST_F(WebRtcSessionTest, SetAudioMaxSendBitrate) {
Init();
SendAudioVideoStream1();
CreateAndSetRemoteOfferAndLocalAnswer();
cricket::FakeVoiceMediaChannel* channel = media_engine_->GetVoiceChannel(0);
ASSERT_TRUE(channel != NULL);
uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
EXPECT_EQ(-1, channel->max_bps());
webrtc::RtpParameters params = session_->GetAudioRtpSendParameters(send_ssrc);
EXPECT_EQ(1, params.encodings.size());
EXPECT_EQ(-1, params.encodings[0].max_bitrate_bps);
params.encodings[0].max_bitrate_bps = 1000;
EXPECT_TRUE(session_->SetAudioRtpSendParameters(send_ssrc, params));
// Read back the parameters and verify they have been changed.
params = session_->GetAudioRtpSendParameters(send_ssrc);
EXPECT_EQ(1, params.encodings.size());
EXPECT_EQ(1000, params.encodings[0].max_bitrate_bps);
// Verify that the audio channel received the new parameters.
params = channel->GetRtpSendParameters(send_ssrc);
EXPECT_EQ(1, params.encodings.size());
EXPECT_EQ(1000, params.encodings[0].max_bitrate_bps);
// Verify that the global bitrate limit has not been changed.
EXPECT_EQ(-1, channel->max_bps());
}
TEST_F(WebRtcSessionTest, SetAudioSend) {
Init();
SendAudioVideoStream1();
CreateAndSetRemoteOfferAndLocalAnswer();
cricket::FakeVoiceMediaChannel* channel = media_engine_->GetVoiceChannel(0);
ASSERT_TRUE(channel != NULL);
ASSERT_EQ(1u, channel->send_streams().size());
uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
EXPECT_FALSE(channel->IsStreamMuted(send_ssrc));
cricket::AudioOptions options;
options.echo_cancellation = rtc::Optional<bool>(true);
std::unique_ptr<FakeAudioSource> source(new FakeAudioSource());
session_->SetAudioSend(send_ssrc, false, options, source.get());
EXPECT_TRUE(channel->IsStreamMuted(send_ssrc));
EXPECT_EQ(rtc::Optional<bool>(), channel->options().echo_cancellation);
EXPECT_TRUE(source->sink() != nullptr);
// This will trigger SetSink(nullptr) to the |source|.
session_->SetAudioSend(send_ssrc, true, options, nullptr);
EXPECT_FALSE(channel->IsStreamMuted(send_ssrc));
EXPECT_EQ(rtc::Optional<bool>(true), channel->options().echo_cancellation);
EXPECT_TRUE(source->sink() == nullptr);
}
TEST_F(WebRtcSessionTest, AudioSourceForLocalStream) {
Init();
SendAudioVideoStream1();
CreateAndSetRemoteOfferAndLocalAnswer();
cricket::FakeVoiceMediaChannel* channel = media_engine_->GetVoiceChannel(0);
ASSERT_TRUE(channel != NULL);
ASSERT_EQ(1u, channel->send_streams().size());
uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
std::unique_ptr<FakeAudioSource> source(new FakeAudioSource());
cricket::AudioOptions options;
session_->SetAudioSend(send_ssrc, true, options, source.get());
EXPECT_TRUE(source->sink() != nullptr);
// Delete the |source| and it will trigger OnClose() to the sink, and this
// will invalidate the |source_| pointer in the sink and prevent getting a
// SetSink(nullptr) callback afterwards.
source.reset();
// This will trigger SetSink(nullptr) if no OnClose() callback.
session_->SetAudioSend(send_ssrc, true, options, nullptr);
}
TEST_F(WebRtcSessionTest, SetVideoPlayout) {
Init();
SendAudioVideoStream1();
CreateAndSetRemoteOfferAndLocalAnswer();
cricket::FakeVideoMediaChannel* channel = media_engine_->GetVideoChannel(0);
ASSERT_TRUE(channel != NULL);
ASSERT_LT(0u, channel->sinks().size());
EXPECT_TRUE(channel->sinks().begin()->second == NULL);
ASSERT_EQ(1u, channel->recv_streams().size());
uint32_t receive_ssrc = channel->recv_streams()[0].first_ssrc();
cricket::FakeVideoRenderer renderer;
session_->SetVideoPlayout(receive_ssrc, true, &renderer);
EXPECT_TRUE(channel->sinks().begin()->second == &renderer);
session_->SetVideoPlayout(receive_ssrc, false, &renderer);
EXPECT_TRUE(channel->sinks().begin()->second == NULL);
}
TEST_F(WebRtcSessionTest, SetVideoMaxSendBitrate) {
Init();
SendAudioVideoStream1();
CreateAndSetRemoteOfferAndLocalAnswer();
cricket::FakeVideoMediaChannel* channel = media_engine_->GetVideoChannel(0);
ASSERT_TRUE(channel != NULL);
uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
EXPECT_EQ(-1, channel->max_bps());
webrtc::RtpParameters params = session_->GetVideoRtpSendParameters(send_ssrc);
EXPECT_EQ(1, params.encodings.size());
EXPECT_EQ(-1, params.encodings[0].max_bitrate_bps);
params.encodings[0].max_bitrate_bps = 1000;
EXPECT_TRUE(session_->SetVideoRtpSendParameters(send_ssrc, params));
// Read back the parameters and verify they have been changed.
params = session_->GetVideoRtpSendParameters(send_ssrc);
EXPECT_EQ(1, params.encodings.size());
EXPECT_EQ(1000, params.encodings[0].max_bitrate_bps);
// Verify that the video channel received the new parameters.
params = channel->GetRtpSendParameters(send_ssrc);
EXPECT_EQ(1, params.encodings.size());
EXPECT_EQ(1000, params.encodings[0].max_bitrate_bps);
// Verify that the global bitrate limit has not been changed.
EXPECT_EQ(-1, channel->max_bps());
}
TEST_F(WebRtcSessionTest, SetVideoSend) {
Init();
SendAudioVideoStream1();
CreateAndSetRemoteOfferAndLocalAnswer();
cricket::FakeVideoMediaChannel* channel = media_engine_->GetVideoChannel(0);
ASSERT_TRUE(channel != NULL);
ASSERT_EQ(1u, channel->send_streams().size());
uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
EXPECT_FALSE(channel->IsStreamMuted(send_ssrc));
cricket::VideoOptions* options = NULL;
session_->SetVideoSend(send_ssrc, false, options, nullptr);
EXPECT_TRUE(channel->IsStreamMuted(send_ssrc));
session_->SetVideoSend(send_ssrc, true, options, nullptr);
EXPECT_FALSE(channel->IsStreamMuted(send_ssrc));
}
TEST_F(WebRtcSessionTest, CanNotInsertDtmf) {
TestCanInsertDtmf(false);
}

View File

@ -339,11 +339,6 @@ class FakeVoiceMediaChannel : public RtpHelper<VoiceMediaChannel> {
}
return true;
}
bool HasSource(uint32_t ssrc) const {
return local_sinks_.find(ssrc) != local_sinks_.end();
}
virtual bool AddRecvStream(const StreamParams& sp) {
if (!RtpHelper<VoiceMediaChannel>::AddRecvStream(sp))
return false;
@ -550,9 +545,6 @@ class FakeVideoMediaChannel : public RtpHelper<VideoMediaChannel> {
}
return true;
}
bool HasSink(uint32_t ssrc) const {
return sinks_.find(ssrc) != sinks_.end() && sinks_.at(ssrc) != nullptr;
}
bool SetSend(bool send) override { return set_sending(send); }
bool SetVideoSend(
@ -564,17 +556,14 @@ class FakeVideoMediaChannel : public RtpHelper<VideoMediaChannel> {
return false;
}
if (enable && options) {
if (!SetOptions(*options)) {
return false;
}
return SetOptions(*options);
}
sources_[ssrc] = source;
return true;
}
bool HasSource(uint32_t ssrc) const {
return sources_.find(ssrc) != sources_.end() &&
sources_.at(ssrc) != nullptr;
return sources_.find(ssrc) != sources_.end();
}
bool AddRecvStream(const StreamParams& sp) override {
if (!RtpHelper<VideoMediaChannel>::AddRecvStream(sp))