Configure media flow correctly with Unified Plan

This also changes RtpReceiver and RemoteAudioSource to have two-step
initialization, since in Unified Plan RtpReceivers are created much
earlier than in Plan B.

Bug: webrtc:7600
Change-Id: Ia135d25eb8bcab22969007b3a825a5a43ce62bf4
Reviewed-on: https://webrtc-review.googlesource.com/39382
Reviewed-by: Peter Thatcher <pthatcher@webrtc.org>
Commit-Queue: Steve Anton <steveanton@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#21681}
This commit is contained in:
Steve Anton 2018-01-17 17:41:02 -08:00 committed by Commit Bot
parent 9b045fa316
commit d367921eb1
8 changed files with 205 additions and 133 deletions

View File

@ -151,22 +151,29 @@ void OrtcRtpReceiverAdapter::MaybeRecreateInternalReceiver() {
internal_receiver_ = nullptr;
switch (kind_) {
case cricket::MEDIA_TYPE_AUDIO: {
auto* audio_receiver =
new AudioRtpReceiver(rtp_transport_controller_->worker_thread(),
rtc::CreateRandomUuid(), {});
auto* voice_channel = rtp_transport_controller_->voice_channel();
internal_receiver_ = new AudioRtpReceiver(
rtp_transport_controller_->worker_thread(), rtc::CreateRandomUuid(),
{}, ssrc, (voice_channel ? voice_channel->media_channel() : nullptr));
RTC_DCHECK(voice_channel);
audio_receiver->SetMediaChannel(voice_channel->media_channel());
internal_receiver_ = audio_receiver;
break;
}
case cricket::MEDIA_TYPE_VIDEO: {
auto* video_receiver =
new VideoRtpReceiver(rtp_transport_controller_->worker_thread(),
rtc::CreateRandomUuid(), {});
auto* video_channel = rtp_transport_controller_->video_channel();
internal_receiver_ = new VideoRtpReceiver(
rtp_transport_controller_->worker_thread(), rtc::CreateRandomUuid(),
{}, ssrc, (video_channel ? video_channel->media_channel() : nullptr));
RTC_DCHECK(video_channel);
video_receiver->SetMediaChannel(video_channel->media_channel());
internal_receiver_ = video_receiver;
break;
}
case cricket::MEDIA_TYPE_DATA:
RTC_NOTREACHED();
}
internal_receiver_->SetupMediaChannel(ssrc);
}
} // namespace webrtc

View File

@ -1314,12 +1314,12 @@ PeerConnection::CreateReceiver(cricket::MediaType media_type,
if (media_type == cricket::MEDIA_TYPE_AUDIO) {
receiver = RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
signaling_thread(),
new AudioRtpReceiver(worker_thread(), receiver_id, {}, 0, nullptr));
new AudioRtpReceiver(worker_thread(), receiver_id, {}));
} else {
RTC_DCHECK_EQ(media_type, cricket::MEDIA_TYPE_VIDEO);
receiver = RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
signaling_thread(),
new VideoRtpReceiver(worker_thread(), receiver_id, {}, 0, nullptr));
new VideoRtpReceiver(worker_thread(), receiver_id, {}));
}
return receiver;
}
@ -1766,7 +1766,27 @@ RTCError PeerConnection::ApplyLocalDescription(
AllocateSctpSids(role);
}
if (!IsUnifiedPlan()) {
if (IsUnifiedPlan()) {
for (auto transceiver : transceivers_) {
const ContentInfo* content =
FindMediaSectionForTransceiver(transceiver, local_description());
if (!content) {
continue;
}
if (content->rejected && !transceiver->stopped()) {
transceiver->Stop();
}
if (!content->rejected) {
const auto& stream = content->media_description()->streams()[0];
transceiver->internal()->sender_internal()->set_stream_ids(
{stream.sync_label});
transceiver->internal()->sender_internal()->SetSsrc(
stream.first_ssrc());
}
}
} else {
// Plan B semantics.
// Update state and SSRC of local MediaStreams and DataChannels based on the
// local session description.
const cricket::ContentInfo* audio_content =
@ -2029,6 +2049,11 @@ RTCError PeerConnection::ApplyRemoteDescription(
if (content->rejected && !transceiver->stopped()) {
transceiver->Stop();
}
if (!content->rejected) {
const auto& stream = content->media_description()->streams()[0];
transceiver->internal()->receiver_internal()->SetupMediaChannel(
stream.first_ssrc());
}
}
for (auto event : track_events) {
observer_->OnAddTrack(event.receiver, event.streams);
@ -2773,12 +2798,12 @@ void PeerConnection::CreateAudioReceiver(
const RtpSenderInfo& remote_sender_info) {
std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams;
streams.push_back(rtc::scoped_refptr<MediaStreamInterface>(stream));
rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>
receiver = RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
signaling_thread(),
new AudioRtpReceiver(worker_thread(), remote_sender_info.sender_id,
streams, remote_sender_info.first_ssrc,
voice_media_channel()));
auto* audio_receiver = new AudioRtpReceiver(
worker_thread(), remote_sender_info.sender_id, streams);
audio_receiver->SetMediaChannel(voice_media_channel());
audio_receiver->SetupMediaChannel(remote_sender_info.first_ssrc);
auto receiver = RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
signaling_thread(), audio_receiver);
GetAudioTransceiver()->internal()->AddReceiver(receiver);
observer_->OnAddTrack(receiver, std::move(streams));
}
@ -2788,12 +2813,12 @@ void PeerConnection::CreateVideoReceiver(
const RtpSenderInfo& remote_sender_info) {
std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams;
streams.push_back(rtc::scoped_refptr<MediaStreamInterface>(stream));
rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>
receiver = RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
signaling_thread(),
new VideoRtpReceiver(worker_thread(), remote_sender_info.sender_id,
streams, remote_sender_info.first_ssrc,
video_media_channel()));
auto* video_receiver = new VideoRtpReceiver(
worker_thread(), remote_sender_info.sender_id, streams);
video_receiver->SetMediaChannel(video_media_channel());
video_receiver->SetupMediaChannel(remote_sender_info.first_ssrc);
auto receiver = RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
signaling_thread(), video_receiver);
GetVideoTransceiver()->internal()->AddReceiver(receiver);
observer_->OnAddTrack(receiver, std::move(streams));
}

View File

@ -84,6 +84,7 @@ using webrtc::PeerConnectionFactory;
using webrtc::PeerConnectionProxy;
using webrtc::RTCErrorType;
using webrtc::RtpReceiverInterface;
using webrtc::SdpSemantics;
using webrtc::SdpType;
using webrtc::SessionDescriptionInterface;
using webrtc::StreamCollectionInterface;
@ -3625,6 +3626,23 @@ TEST_F(PeerConnectionIntegrationTest, ClosingConnectionStopsPacketFlow) {
EXPECT_EQ(sent_packets_a, sent_packets_b);
}
// Test that a basic 1 audio and 1 video track call works when Unified Plan
// semantics configured for both sides.
TEST_F(PeerConnectionIntegrationTest, UnifiedPlanMediaFlows) {
PeerConnectionInterface::RTCConfiguration config;
config.sdp_semantics = SdpSemantics::kUnifiedPlan;
ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config));
ConnectFakeSignaling();
caller()->AddAudioVideoTracks();
callee()->AddAudioVideoTracks();
caller()->CreateAndSetAndSignalOffer();
ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
ExpectNewFramesReceivedWithWait(
kDefaultExpectedAudioFrameCount, kDefaultExpectedVideoFrameCount,
kDefaultExpectedAudioFrameCount, kDefaultExpectedVideoFrameCount,
kMaxWaitForFramesMs);
}
} // namespace
#endif // if !defined(THREAD_SANITIZER)

View File

@ -23,35 +23,33 @@
namespace webrtc {
class RemoteAudioSource::Sink : public AudioSinkInterface {
// This proxy is passed to the underlying media engine to receive audio data as
// they come in. The data will then be passed back up to the RemoteAudioSource
// which will fan it out to all the sinks that have been added to it.
class RemoteAudioSource::AudioDataProxy : public AudioSinkInterface {
public:
explicit Sink(RemoteAudioSource* source) : source_(source) {}
~Sink() override { source_->OnAudioChannelGone(); }
explicit AudioDataProxy(RemoteAudioSource* source) : source_(source) {
RTC_DCHECK(source);
}
~AudioDataProxy() override { source_->OnAudioChannelGone(); }
private:
// AudioSinkInterface implementation.
void OnData(const AudioSinkInterface::Data& audio) override {
if (source_)
source_->OnData(audio);
source_->OnData(audio);
}
private:
const rtc::scoped_refptr<RemoteAudioSource> source_;
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(Sink);
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AudioDataProxy);
};
rtc::scoped_refptr<RemoteAudioSource> RemoteAudioSource::Create(
rtc::Thread* worker_thread,
cricket::VoiceMediaChannel* media_channel,
uint32_t ssrc) {
rtc::scoped_refptr<RemoteAudioSource> ret(
new rtc::RefCountedObject<RemoteAudioSource>());
ret->Initialize(worker_thread, media_channel, ssrc);
return ret;
}
RemoteAudioSource::RemoteAudioSource()
RemoteAudioSource::RemoteAudioSource(rtc::Thread* worker_thread)
: main_thread_(rtc::Thread::Current()),
worker_thread_(worker_thread),
state_(MediaSourceInterface::kLive) {
RTC_DCHECK(main_thread_);
RTC_DCHECK(worker_thread_);
}
RemoteAudioSource::~RemoteAudioSource() {
@ -60,17 +58,24 @@ RemoteAudioSource::~RemoteAudioSource() {
RTC_DCHECK(sinks_.empty());
}
void RemoteAudioSource::Initialize(rtc::Thread* worker_thread,
cricket::VoiceMediaChannel* media_channel,
uint32_t ssrc) {
RTC_DCHECK(main_thread_->IsCurrent());
// To make sure we always get notified when the channel goes out of scope,
// we register for callbacks here and not on demand in AddSink.
if (media_channel) { // May be null in tests.
worker_thread->Invoke<void>(RTC_FROM_HERE, [&] {
media_channel->SetRawAudioSink(ssrc, rtc::MakeUnique<Sink>(this));
});
}
void RemoteAudioSource::Start(cricket::VoiceMediaChannel* media_channel,
uint32_t ssrc) {
RTC_DCHECK_RUN_ON(main_thread_);
RTC_DCHECK(media_channel);
// Register for callbacks immediately before AddSink so that we always get
// notified when a channel goes out of scope (signaled when "AudioDataProxy"
// is destroyed).
worker_thread_->Invoke<void>(RTC_FROM_HERE, [&] {
media_channel->SetRawAudioSink(ssrc, rtc::MakeUnique<AudioDataProxy>(this));
});
}
void RemoteAudioSource::Stop(cricket::VoiceMediaChannel* media_channel,
uint32_t ssrc) {
RTC_DCHECK_RUN_ON(main_thread_);
RTC_DCHECK(media_channel);
worker_thread_->Invoke<void>(
RTC_FROM_HERE, [&] { media_channel->SetRawAudioSink(ssrc, nullptr); });
}
MediaSourceInterface::SourceState RemoteAudioSource::state() const {
@ -86,8 +91,9 @@ bool RemoteAudioSource::remote() const {
void RemoteAudioSource::SetVolume(double volume) {
RTC_DCHECK_GE(volume, 0);
RTC_DCHECK_LE(volume, 10);
for (auto* observer : audio_observers_)
for (auto* observer : audio_observers_) {
observer->OnSetVolume(volume);
}
}
void RemoteAudioSource::RegisterAudioObserver(AudioObserver* observer) {

View File

@ -28,50 +28,46 @@ class Thread;
namespace webrtc {
// This class implements the audio source used by the remote audio track.
// This class works by configuring itself as a sink with the underlying media
// engine, then when receiving data will fan out to all added sinks.
class RemoteAudioSource : public Notifier<AudioSourceInterface>,
rtc::MessageHandler {
public:
// Creates an instance of RemoteAudioSource.
static rtc::scoped_refptr<RemoteAudioSource> Create(
rtc::Thread* worker_thread,
cricket::VoiceMediaChannel* media_channel,
uint32_t ssrc);
explicit RemoteAudioSource(rtc::Thread* worker_thread);
// Register and unregister remote audio source with the underlying media
// engine.
void Start(cricket::VoiceMediaChannel* media_channel, uint32_t ssrc);
void Stop(cricket::VoiceMediaChannel* media_channel, uint32_t ssrc);
// MediaSourceInterface implementation.
MediaSourceInterface::SourceState state() const override;
bool remote() const override;
void AddSink(AudioTrackSinkInterface* sink) override;
void RemoveSink(AudioTrackSinkInterface* sink) override;
protected:
RemoteAudioSource();
~RemoteAudioSource() override;
// Post construction initialize where we can do things like save a reference
// to ourselves (need to be fully constructed).
void Initialize(rtc::Thread* worker_thread,
cricket::VoiceMediaChannel* media_channel,
uint32_t ssrc);
private:
typedef std::list<AudioObserver*> AudioObserverList;
// AudioSourceInterface implementation.
void SetVolume(double volume) override;
void RegisterAudioObserver(AudioObserver* observer) override;
void UnregisterAudioObserver(AudioObserver* observer) override;
class Sink;
void AddSink(AudioTrackSinkInterface* sink) override;
void RemoveSink(AudioTrackSinkInterface* sink) override;
protected:
~RemoteAudioSource() override;
private:
// These are callbacks from the media engine.
class AudioDataProxy;
void OnData(const AudioSinkInterface::Data& audio);
void OnAudioChannelGone();
void OnMessage(rtc::Message* msg) override;
AudioObserverList audio_observers_;
rtc::Thread* const main_thread_;
rtc::Thread* const worker_thread_;
std::list<AudioObserver*> audio_observers_;
rtc::CriticalSection sink_lock_;
std::list<AudioTrackSinkInterface*> sinks_;
rtc::Thread* const main_thread_;
SourceState state_;
};

View File

@ -35,17 +35,12 @@ int GenerateUniqueId() {
AudioRtpReceiver::AudioRtpReceiver(
rtc::Thread* worker_thread,
const std::string& receiver_id,
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams,
uint32_t ssrc,
cricket::VoiceMediaChannel* media_channel)
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams)
: worker_thread_(worker_thread),
id_(receiver_id),
ssrc_(ssrc),
track_(AudioTrackProxy::Create(
rtc::Thread::Current(),
AudioTrack::Create(
receiver_id,
RemoteAudioSource::Create(worker_thread, media_channel, ssrc)))),
source_(new rtc::RefCountedObject<RemoteAudioSource>(worker_thread)),
track_(AudioTrackProxy::Create(rtc::Thread::Current(),
AudioTrack::Create(receiver_id, source_))),
cached_track_enabled_(track_->enabled()),
attachment_id_(GenerateUniqueId()) {
RTC_DCHECK(worker_thread_);
@ -53,8 +48,6 @@ AudioRtpReceiver::AudioRtpReceiver(
track_->RegisterObserver(this);
track_->GetSource()->RegisterAudioObserver(this);
SetStreams(streams);
SetMediaChannel(media_channel);
Reconfigure();
}
AudioRtpReceiver::~AudioRtpReceiver() {
@ -74,8 +67,9 @@ bool AudioRtpReceiver::SetOutputVolume(double volume) {
RTC_DCHECK_GE(volume, 0.0);
RTC_DCHECK_LE(volume, 10.0);
RTC_DCHECK(media_channel_);
RTC_DCHECK(ssrc_);
return worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
return media_channel_->SetOutputVolume(ssrc_, volume);
return media_channel_->SetOutputVolume(*ssrc_, volume);
});
}
@ -83,7 +77,7 @@ void AudioRtpReceiver::OnSetVolume(double volume) {
RTC_DCHECK_GE(volume, 0);
RTC_DCHECK_LE(volume, 10);
cached_volume_ = volume;
if (!media_channel_) {
if (!media_channel_ || !ssrc_) {
RTC_LOG(LS_ERROR)
<< "AudioRtpReceiver::OnSetVolume: No audio channel exists.";
return;
@ -99,21 +93,21 @@ void AudioRtpReceiver::OnSetVolume(double volume) {
}
RtpParameters AudioRtpReceiver::GetParameters() const {
if (!media_channel_ || stopped_) {
if (!media_channel_ || !ssrc_ || stopped_) {
return RtpParameters();
}
return worker_thread_->Invoke<RtpParameters>(RTC_FROM_HERE, [&] {
return media_channel_->GetRtpReceiveParameters(ssrc_);
return media_channel_->GetRtpReceiveParameters(*ssrc_);
});
}
bool AudioRtpReceiver::SetParameters(const RtpParameters& parameters) {
TRACE_EVENT0("webrtc", "AudioRtpReceiver::SetParameters");
if (!media_channel_ || stopped_) {
if (!media_channel_ || !ssrc_ || stopped_) {
return false;
}
return worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
return media_channel_->SetRtpReceiveParameters(ssrc_, parameters);
return media_channel_->SetRtpReceiveParameters(*ssrc_, parameters);
});
}
@ -122,7 +116,7 @@ void AudioRtpReceiver::Stop() {
if (stopped_) {
return;
}
if (media_channel_) {
if (media_channel_ && ssrc_) {
// Allow that SetOutputVolume fail. This is the normal case when the
// underlying media channel has already been deleted.
SetOutputVolume(0.0);
@ -130,6 +124,23 @@ void AudioRtpReceiver::Stop() {
stopped_ = true;
}
void AudioRtpReceiver::SetupMediaChannel(uint32_t ssrc) {
if (!media_channel_) {
RTC_LOG(LS_ERROR)
<< "AudioRtpReceiver::SetupMediaChannel: No audio channel exists.";
return;
}
if (ssrc_ == ssrc) {
return;
}
if (ssrc_) {
source_->Stop(media_channel_, *ssrc_);
}
ssrc_ = ssrc;
source_->Start(media_channel_, *ssrc_);
Reconfigure();
}
void AudioRtpReceiver::SetStreams(
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
// Remove remote track from any streams that are going away.
@ -164,13 +175,16 @@ void AudioRtpReceiver::SetStreams(
}
std::vector<RtpSource> AudioRtpReceiver::GetSources() const {
if (!media_channel_ || !ssrc_ || stopped_) {
return {};
}
return worker_thread_->Invoke<std::vector<RtpSource>>(
RTC_FROM_HERE, [&] { return media_channel_->GetSources(ssrc_); });
RTC_FROM_HERE, [&] { return media_channel_->GetSources(*ssrc_); });
}
void AudioRtpReceiver::Reconfigure() {
RTC_DCHECK(!stopped_);
if (!media_channel_) {
if (!media_channel_ || !ssrc_) {
RTC_LOG(LS_ERROR)
<< "AudioRtpReceiver::Reconfigure: No audio channel exists.";
return;
@ -203,12 +217,9 @@ void AudioRtpReceiver::NotifyFirstPacketReceived() {
VideoRtpReceiver::VideoRtpReceiver(
rtc::Thread* worker_thread,
const std::string& receiver_id,
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams,
uint32_t ssrc,
cricket::VideoMediaChannel* media_channel)
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams)
: worker_thread_(worker_thread),
id_(receiver_id),
ssrc_(ssrc),
source_(new RefCountedObject<VideoTrackSource>(&broadcaster_,
true /* remote */)),
track_(VideoTrackProxy::Create(
@ -224,7 +235,6 @@ VideoRtpReceiver::VideoRtpReceiver(
RTC_DCHECK(worker_thread_);
SetStreams(streams);
source_->SetState(MediaSourceInterface::kLive);
SetMediaChannel(media_channel);
}
VideoRtpReceiver::~VideoRtpReceiver() {
@ -235,26 +245,27 @@ VideoRtpReceiver::~VideoRtpReceiver() {
bool VideoRtpReceiver::SetSink(rtc::VideoSinkInterface<VideoFrame>* sink) {
RTC_DCHECK(media_channel_);
RTC_DCHECK(ssrc_);
return worker_thread_->Invoke<bool>(
RTC_FROM_HERE, [&] { return media_channel_->SetSink(ssrc_, sink); });
RTC_FROM_HERE, [&] { return media_channel_->SetSink(*ssrc_, sink); });
}
RtpParameters VideoRtpReceiver::GetParameters() const {
if (!media_channel_ || stopped_) {
if (!media_channel_ || !ssrc_ || stopped_) {
return RtpParameters();
}
return worker_thread_->Invoke<RtpParameters>(RTC_FROM_HERE, [&] {
return media_channel_->GetRtpReceiveParameters(ssrc_);
return media_channel_->GetRtpReceiveParameters(*ssrc_);
});
}
bool VideoRtpReceiver::SetParameters(const RtpParameters& parameters) {
TRACE_EVENT0("webrtc", "VideoRtpReceiver::SetParameters");
if (!media_channel_ || stopped_) {
if (!media_channel_ || !ssrc_ || stopped_) {
return false;
}
return worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
return media_channel_->SetRtpReceiveParameters(ssrc_, parameters);
return media_channel_->SetRtpReceiveParameters(*ssrc_, parameters);
});
}
@ -265,7 +276,7 @@ void VideoRtpReceiver::Stop() {
}
source_->SetState(MediaSourceInterface::kEnded);
source_->OnSourceDestroyed();
if (!media_channel_) {
if (!media_channel_ || !ssrc_) {
RTC_LOG(LS_WARNING) << "VideoRtpReceiver::Stop: No video channel exists.";
} else {
// Allow that SetSink fail. This is the normal case when the underlying
@ -275,6 +286,21 @@ void VideoRtpReceiver::Stop() {
stopped_ = true;
}
void VideoRtpReceiver::SetupMediaChannel(uint32_t ssrc) {
if (!media_channel_) {
RTC_LOG(LS_ERROR)
<< "VideoRtpReceiver::SetupMediaChannel: No video channel exists.";
}
if (ssrc_ == ssrc) {
return;
}
if (ssrc_) {
SetSink(nullptr);
}
ssrc_ = ssrc;
SetSink(&broadcaster_);
}
void VideoRtpReceiver::SetStreams(
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
// Remove remote track from any streams that are going away.
@ -318,15 +344,7 @@ void VideoRtpReceiver::SetObserver(RtpReceiverObserverInterface* observer) {
void VideoRtpReceiver::SetMediaChannel(
cricket::VideoMediaChannel* media_channel) {
if (media_channel_) {
SetSink(nullptr);
}
media_channel_ = media_channel;
if (media_channel_) {
if (!SetSink(&broadcaster_)) {
RTC_NOTREACHED();
}
}
}
void VideoRtpReceiver::NotifyFirstPacketReceived() {

View File

@ -34,6 +34,11 @@ class RtpReceiverInternal : public RtpReceiverInterface {
public:
virtual void Stop() = 0;
// Configures the RtpReceiver with the underlying media channel, with the
// given SSRC as the stream identifier. If |ssrc| is 0, the receiver will
// receive packets on unsignaled SSRCs.
virtual void SetupMediaChannel(uint32_t ssrc) = 0;
// This SSRC is used as an identifier for the receiver between the API layer
// and the WebRtcVideoEngine, WebRtcVoiceEngine layer.
virtual uint32_t ssrc() const = 0;
@ -53,16 +58,10 @@ class AudioRtpReceiver : public ObserverInterface,
public AudioSourceInterface::AudioObserver,
public rtc::RefCountedObject<RtpReceiverInternal> {
public:
// An SSRC of 0 will create a receiver that will match the first SSRC it
// sees.
// TODO(deadbeef): Use rtc::Optional, or have another constructor that
// doesn't take an SSRC, and make this one DCHECK(ssrc != 0).
AudioRtpReceiver(
rtc::Thread* worker_thread,
const std::string& receiver_id,
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams,
uint32_t ssrc,
cricket::VoiceMediaChannel* media_channel);
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams);
virtual ~AudioRtpReceiver();
// ObserverInterface implementation
@ -95,7 +94,8 @@ class AudioRtpReceiver : public ObserverInterface,
// RtpReceiverInternal implementation.
void Stop() override;
uint32_t ssrc() const override { return ssrc_; }
void SetupMediaChannel(uint32_t ssrc) override;
uint32_t ssrc() const override { return ssrc_.value_or(0); }
void NotifyFirstPacketReceived() override;
void SetStreams(const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&
streams) override;
@ -115,9 +115,10 @@ class AudioRtpReceiver : public ObserverInterface,
rtc::Thread* const worker_thread_;
const std::string id_;
const uint32_t ssrc_;
cricket::VoiceMediaChannel* media_channel_ = nullptr;
const rtc::scoped_refptr<RemoteAudioSource> source_;
const rtc::scoped_refptr<AudioTrackInterface> track_;
cricket::VoiceMediaChannel* media_channel_ = nullptr;
rtc::Optional<uint32_t> ssrc_;
std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams_;
bool cached_track_enabled_;
double cached_volume_ = 1;
@ -134,9 +135,7 @@ class VideoRtpReceiver : public rtc::RefCountedObject<RtpReceiverInternal> {
VideoRtpReceiver(
rtc::Thread* worker_thread,
const std::string& receiver_id,
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams,
uint32_t ssrc,
cricket::VideoMediaChannel* media_channel);
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams);
virtual ~VideoRtpReceiver();
@ -164,7 +163,8 @@ class VideoRtpReceiver : public rtc::RefCountedObject<RtpReceiverInternal> {
// RtpReceiverInternal implementation.
void Stop() override;
uint32_t ssrc() const override { return ssrc_; }
void SetupMediaChannel(uint32_t ssrc) override;
uint32_t ssrc() const override { return ssrc_.value_or(0); }
void NotifyFirstPacketReceived() override;
void SetStreams(const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&
streams) override;
@ -180,8 +180,8 @@ class VideoRtpReceiver : public rtc::RefCountedObject<RtpReceiverInternal> {
rtc::Thread* const worker_thread_;
const std::string id_;
uint32_t ssrc_;
cricket::VideoMediaChannel* media_channel_ = nullptr;
rtc::Optional<uint32_t> ssrc_;
// |broadcaster_| is needed since the decoder can only handle one sink.
// It might be better if the decoder can handle multiple sinks and consider
// the VideoSinkWants.

View File

@ -183,8 +183,9 @@ class RtpSenderReceiverTest : public testing::Test,
void CreateAudioRtpReceiver(
std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams = {}) {
audio_rtp_receiver_ = new AudioRtpReceiver(
rtc::Thread::Current(), kAudioTrackId, std::move(streams), kAudioSsrc,
voice_media_channel_);
rtc::Thread::Current(), kAudioTrackId, std::move(streams));
audio_rtp_receiver_->SetMediaChannel(voice_media_channel_);
audio_rtp_receiver_->SetupMediaChannel(kAudioSsrc);
audio_track_ = audio_rtp_receiver_->audio_track();
VerifyVoiceChannelOutput();
}
@ -192,8 +193,9 @@ class RtpSenderReceiverTest : public testing::Test,
void CreateVideoRtpReceiver(
std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams = {}) {
video_rtp_receiver_ = new VideoRtpReceiver(
rtc::Thread::Current(), kVideoTrackId, std::move(streams), kVideoSsrc,
video_media_channel_);
rtc::Thread::Current(), kVideoTrackId, std::move(streams));
video_rtp_receiver_->SetMediaChannel(video_media_channel_);
video_rtp_receiver_->SetupMediaChannel(kVideoSsrc);
video_track_ = video_rtp_receiver_->video_track();
VerifyVideoChannelOutput();
}