Removed MediaStreamTrackInterface::set_state
The track state should be implicitly set by the underlying source. This removes the public method and cleans up how AudioRtpReceiver is created. Further more it cleans up how the RtpReceivers are destroyed. Note that this cl depend on https://codereview.webrtc.org/1790633002. BUG=webrtc:5426 Review URL: https://codereview.webrtc.org/1816143002 Cr-Commit-Position: refs/heads/master@{#12115}
This commit is contained in:
parent
e29f0e2515
commit
d61bf803d2
@ -92,11 +92,6 @@ class RTCMediaStreamTrackObserver : public ObserverInterface {
|
||||
return [RTCEnumConverter convertTrackStateToObjC:self.mediaTrack->state()];
|
||||
}
|
||||
|
||||
- (BOOL)setState:(RTCTrackState)state {
|
||||
return self.mediaTrack->set_state(
|
||||
[RTCEnumConverter convertTrackStateToNative:state]);
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation RTCMediaStreamTrack (Internal)
|
||||
|
||||
@ -48,7 +48,6 @@
|
||||
- (BOOL)isEnabled;
|
||||
- (BOOL)setEnabled:(BOOL)enabled;
|
||||
- (RTCTrackState)state;
|
||||
- (BOOL)setState:(RTCTrackState)state;
|
||||
|
||||
#ifndef DOXYGEN_SHOULD_SKIP_THIS
|
||||
// Disallow init and don't add to documentation
|
||||
|
||||
@ -1956,14 +1956,6 @@ JOW(jobject, MediaStreamTrack_nativeState)(JNIEnv* jni, jclass, jlong j_p) {
|
||||
reinterpret_cast<MediaStreamTrackInterface*>(j_p)->state());
|
||||
}
|
||||
|
||||
JOW(jboolean, MediaStreamTrack_nativeSetState)(
|
||||
JNIEnv* jni, jclass, jlong j_p, jint j_new_state) {
|
||||
MediaStreamTrackInterface::TrackState new_state =
|
||||
(MediaStreamTrackInterface::TrackState)j_new_state;
|
||||
return reinterpret_cast<MediaStreamTrackInterface*>(j_p)
|
||||
->set_state(new_state);
|
||||
}
|
||||
|
||||
JOW(jboolean, MediaStreamTrack_nativeSetEnabled)(
|
||||
JNIEnv* jni, jclass, jlong j_p, jboolean enabled) {
|
||||
return reinterpret_cast<MediaStreamTrackInterface*>(j_p)
|
||||
|
||||
@ -41,10 +41,6 @@ public class MediaStreamTrack {
|
||||
return nativeState(nativeTrack);
|
||||
}
|
||||
|
||||
public boolean setState(State newState) {
|
||||
return nativeSetState(nativeTrack, newState.ordinal());
|
||||
}
|
||||
|
||||
public void dispose() {
|
||||
free(nativeTrack);
|
||||
}
|
||||
@ -60,8 +56,5 @@ public class MediaStreamTrack {
|
||||
|
||||
private static native State nativeState(long nativeTrack);
|
||||
|
||||
private static native boolean nativeSetState(
|
||||
long nativeTrack, int newState);
|
||||
|
||||
private static native void free(long nativeTrack);
|
||||
}
|
||||
|
||||
@ -80,11 +80,6 @@ class MediaStreamTest: public testing::Test {
|
||||
.Times(Exactly(1));
|
||||
track->set_enabled(false);
|
||||
EXPECT_FALSE(track->enabled());
|
||||
|
||||
EXPECT_CALL(observer, OnChanged())
|
||||
.Times(Exactly(1));
|
||||
track->set_state(MediaStreamTrackInterface::kEnded);
|
||||
EXPECT_EQ(MediaStreamTrackInterface::kEnded, track->state());
|
||||
}
|
||||
|
||||
scoped_refptr<MediaStreamInterface> stream_;
|
||||
|
||||
@ -92,8 +92,6 @@ class MediaStreamTrackInterface : public rtc::RefCountInterface,
|
||||
virtual bool enabled() const = 0;
|
||||
virtual TrackState state() const = 0;
|
||||
virtual bool set_enabled(bool enable) = 0;
|
||||
// These methods should be called by implementation only.
|
||||
virtual bool set_state(TrackState new_state) = 0;
|
||||
|
||||
protected:
|
||||
virtual ~MediaStreamTrackInterface() {}
|
||||
|
||||
@ -25,12 +25,12 @@ class MediaStreamTrack : public Notifier<T> {
|
||||
public:
|
||||
typedef typename T::TrackState TypedTrackState;
|
||||
|
||||
virtual std::string id() const { return id_; }
|
||||
virtual MediaStreamTrackInterface::TrackState state() const {
|
||||
std::string id() const override { return id_; }
|
||||
MediaStreamTrackInterface::TrackState state() const override {
|
||||
return state_;
|
||||
}
|
||||
virtual bool enabled() const { return enabled_; }
|
||||
virtual bool set_enabled(bool enable) {
|
||||
bool enabled() const override { return enabled_; }
|
||||
bool set_enabled(bool enable) override {
|
||||
bool fire_on_change = (enable != enabled_);
|
||||
enabled_ = enable;
|
||||
if (fire_on_change) {
|
||||
@ -38,7 +38,12 @@ class MediaStreamTrack : public Notifier<T> {
|
||||
}
|
||||
return fire_on_change;
|
||||
}
|
||||
virtual bool set_state(MediaStreamTrackInterface::TrackState new_state) {
|
||||
|
||||
protected:
|
||||
explicit MediaStreamTrack(const std::string& id)
|
||||
: enabled_(true), id_(id), state_(MediaStreamTrackInterface::kLive) {}
|
||||
|
||||
bool set_state(MediaStreamTrackInterface::TrackState new_state) {
|
||||
bool fire_on_change = (state_ != new_state);
|
||||
state_ = new_state;
|
||||
if (fire_on_change)
|
||||
@ -46,10 +51,6 @@ class MediaStreamTrack : public Notifier<T> {
|
||||
return true;
|
||||
}
|
||||
|
||||
protected:
|
||||
explicit MediaStreamTrack(const std::string& id)
|
||||
: enabled_(true), id_(id), state_(MediaStreamTrackInterface::kLive) {}
|
||||
|
||||
private:
|
||||
bool enabled_;
|
||||
std::string id_;
|
||||
|
||||
@ -30,10 +30,7 @@ BEGIN_PROXY_MAP(AudioTrack)
|
||||
PROXY_METHOD1(bool, GetSignalLevel, int*)
|
||||
PROXY_METHOD0(rtc::scoped_refptr<AudioProcessorInterface>,
|
||||
GetAudioProcessor)
|
||||
|
||||
PROXY_METHOD1(bool, set_enabled, bool)
|
||||
PROXY_METHOD1(bool, set_state, TrackState)
|
||||
|
||||
PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
|
||||
PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
|
||||
END_PROXY()
|
||||
@ -44,8 +41,6 @@ BEGIN_PROXY_MAP(VideoTrack)
|
||||
PROXY_CONSTMETHOD0(TrackState, state)
|
||||
PROXY_CONSTMETHOD0(bool, enabled)
|
||||
PROXY_METHOD1(bool, set_enabled, bool)
|
||||
PROXY_METHOD1(bool, set_state, TrackState)
|
||||
|
||||
PROXY_METHOD2(void,
|
||||
AddOrUpdateSink,
|
||||
rtc::VideoSinkInterface<cricket::VideoFrame>*,
|
||||
|
||||
@ -377,43 +377,6 @@ void AddSendStreams(
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Factory class for creating remote MediaStreams and MediaStreamTracks.
|
||||
class RemoteMediaStreamFactory {
|
||||
public:
|
||||
explicit RemoteMediaStreamFactory(rtc::Thread* signaling_thread)
|
||||
: signaling_thread_(signaling_thread) {}
|
||||
|
||||
rtc::scoped_refptr<MediaStreamInterface> CreateMediaStream(
|
||||
const std::string& stream_label) {
|
||||
return MediaStreamProxy::Create(signaling_thread_,
|
||||
MediaStream::Create(stream_label));
|
||||
}
|
||||
|
||||
AudioTrackInterface* AddAudioTrack(uint32_t ssrc,
|
||||
AudioProviderInterface* provider,
|
||||
webrtc::MediaStreamInterface* stream,
|
||||
const std::string& track_id) {
|
||||
return AddTrack<AudioTrackInterface, AudioTrack, AudioTrackProxy>(
|
||||
stream, track_id, RemoteAudioSource::Create(ssrc, provider));
|
||||
}
|
||||
|
||||
private:
|
||||
template <typename TI, typename T, typename TP, typename S>
|
||||
TI* AddTrack(MediaStreamInterface* stream,
|
||||
const std::string& track_id,
|
||||
const S& source) {
|
||||
rtc::scoped_refptr<TI> track(
|
||||
TP::Create(signaling_thread_, T::Create(track_id, source)));
|
||||
track->set_state(webrtc::MediaStreamTrackInterface::kLive);
|
||||
if (stream->AddTrack(track)) {
|
||||
return track;
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
rtc::Thread* signaling_thread_;
|
||||
};
|
||||
|
||||
bool ExtractMediaSessionOptions(
|
||||
const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options,
|
||||
bool is_offer,
|
||||
@ -608,9 +571,6 @@ bool PeerConnection::Initialize(
|
||||
|
||||
media_controller_.reset(factory_->CreateMediaController(media_config));
|
||||
|
||||
remote_stream_factory_.reset(
|
||||
new RemoteMediaStreamFactory(factory_->signaling_thread()));
|
||||
|
||||
session_.reset(
|
||||
new WebRtcSession(media_controller_.get(), factory_->signaling_thread(),
|
||||
factory_->worker_thread(), port_allocator_.get()));
|
||||
@ -1320,29 +1280,28 @@ void PeerConnection::OnMessage(rtc::Message* msg) {
|
||||
}
|
||||
|
||||
void PeerConnection::CreateAudioReceiver(MediaStreamInterface* stream,
|
||||
AudioTrackInterface* audio_track,
|
||||
const std::string& track_id,
|
||||
uint32_t ssrc) {
|
||||
receivers_.push_back(RtpReceiverProxy::Create(
|
||||
signaling_thread(),
|
||||
new AudioRtpReceiver(audio_track, ssrc, session_.get())));
|
||||
new AudioRtpReceiver(stream, track_id, ssrc, session_.get())));
|
||||
}
|
||||
|
||||
void PeerConnection::CreateVideoReceiver(MediaStreamInterface* stream,
|
||||
const std::string& track_id,
|
||||
uint32_t ssrc) {
|
||||
VideoRtpReceiver* video_receiver = new VideoRtpReceiver(
|
||||
stream, track_id, factory_->worker_thread(), ssrc, session_.get());
|
||||
receivers_.push_back(
|
||||
RtpReceiverProxy::Create(signaling_thread(), video_receiver));
|
||||
receivers_.push_back(RtpReceiverProxy::Create(
|
||||
signaling_thread(),
|
||||
new VideoRtpReceiver(stream, track_id, factory_->worker_thread(), ssrc,
|
||||
session_.get())));
|
||||
}
|
||||
|
||||
// TODO(deadbeef): Keep RtpReceivers around even if track goes away in remote
|
||||
// description.
|
||||
void PeerConnection::DestroyAudioReceiver(MediaStreamInterface* stream,
|
||||
AudioTrackInterface* audio_track) {
|
||||
auto it = FindReceiverForTrack(audio_track);
|
||||
void PeerConnection::DestroyReceiver(const std::string& track_id) {
|
||||
auto it = FindReceiverForTrack(track_id);
|
||||
if (it == receivers_.end()) {
|
||||
LOG(LS_WARNING) << "RtpReceiver for track with id " << audio_track->id()
|
||||
LOG(LS_WARNING) << "RtpReceiver for track with id " << track_id
|
||||
<< " doesn't exist.";
|
||||
} else {
|
||||
(*it)->Stop();
|
||||
@ -1350,15 +1309,16 @@ void PeerConnection::DestroyAudioReceiver(MediaStreamInterface* stream,
|
||||
}
|
||||
}
|
||||
|
||||
void PeerConnection::DestroyVideoReceiver(MediaStreamInterface* stream,
|
||||
VideoTrackInterface* video_track) {
|
||||
auto it = FindReceiverForTrack(video_track);
|
||||
if (it == receivers_.end()) {
|
||||
LOG(LS_WARNING) << "RtpReceiver for track with id " << video_track->id()
|
||||
<< " doesn't exist.";
|
||||
} else {
|
||||
(*it)->Stop();
|
||||
receivers_.erase(it);
|
||||
void PeerConnection::StopReceivers(cricket::MediaType media_type) {
|
||||
TrackInfos* current_tracks = GetRemoteTracks(media_type);
|
||||
for (const auto& track_info : *current_tracks) {
|
||||
auto it = FindReceiverForTrack(track_info.track_id);
|
||||
if (it == receivers_.end()) {
|
||||
LOG(LS_WARNING) << "RtpReceiver for track with id " << track_info.track_id
|
||||
<< " doesn't exist.";
|
||||
} else {
|
||||
(*it)->Stop();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1639,7 +1599,8 @@ void PeerConnection::UpdateRemoteStreamsList(
|
||||
remote_streams_->find(stream_label);
|
||||
if (!stream) {
|
||||
// This is a new MediaStream. Create a new remote MediaStream.
|
||||
stream = remote_stream_factory_->CreateMediaStream(stream_label);
|
||||
stream = MediaStreamProxy::Create(rtc::Thread::Current(),
|
||||
MediaStream::Create(stream_label));
|
||||
remote_streams_->AddStream(stream);
|
||||
new_streams->AddStream(stream);
|
||||
}
|
||||
@ -1658,8 +1619,8 @@ void PeerConnection::UpdateRemoteStreamsList(
|
||||
remote_streams_->find(kDefaultStreamLabel);
|
||||
if (!default_stream) {
|
||||
// Create the new default MediaStream.
|
||||
default_stream =
|
||||
remote_stream_factory_->CreateMediaStream(kDefaultStreamLabel);
|
||||
default_stream = MediaStreamProxy::Create(
|
||||
rtc::Thread::Current(), MediaStream::Create(kDefaultStreamLabel));
|
||||
remote_streams_->AddStream(default_stream);
|
||||
new_streams->AddStream(default_stream);
|
||||
}
|
||||
@ -1683,9 +1644,7 @@ void PeerConnection::OnRemoteTrackSeen(const std::string& stream_label,
|
||||
MediaStreamInterface* stream = remote_streams_->find(stream_label);
|
||||
|
||||
if (media_type == cricket::MEDIA_TYPE_AUDIO) {
|
||||
AudioTrackInterface* audio_track = remote_stream_factory_->AddAudioTrack(
|
||||
ssrc, session_.get(), stream, track_id);
|
||||
CreateAudioReceiver(stream, audio_track, ssrc);
|
||||
CreateAudioReceiver(stream, track_id, ssrc);
|
||||
} else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
|
||||
CreateVideoReceiver(stream, track_id, ssrc);
|
||||
} else {
|
||||
@ -1699,21 +1658,24 @@ void PeerConnection::OnRemoteTrackRemoved(const std::string& stream_label,
|
||||
MediaStreamInterface* stream = remote_streams_->find(stream_label);
|
||||
|
||||
if (media_type == cricket::MEDIA_TYPE_AUDIO) {
|
||||
// When the MediaEngine audio channel is destroyed, the RemoteAudioSource
|
||||
// will be notified which will end the AudioRtpReceiver::track().
|
||||
DestroyReceiver(track_id);
|
||||
rtc::scoped_refptr<AudioTrackInterface> audio_track =
|
||||
stream->FindAudioTrack(track_id);
|
||||
if (audio_track) {
|
||||
audio_track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
|
||||
stream->RemoveTrack(audio_track);
|
||||
DestroyAudioReceiver(stream, audio_track);
|
||||
}
|
||||
} else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
|
||||
// Stopping or destroying a VideoRtpReceiver will end the
|
||||
// VideoRtpReceiver::track().
|
||||
DestroyReceiver(track_id);
|
||||
rtc::scoped_refptr<VideoTrackInterface> video_track =
|
||||
stream->FindVideoTrack(track_id);
|
||||
if (video_track) {
|
||||
// There's no guarantee the track is still available, e.g. the track may
|
||||
// have been removed from the stream by an application.
|
||||
stream->RemoveTrack(video_track);
|
||||
// Stopping or destroying a VideoRtpReceiver will end the
|
||||
// VideoRtpReceiver::track().
|
||||
DestroyVideoReceiver(stream, video_track);
|
||||
}
|
||||
} else {
|
||||
ASSERT(false && "Invalid media type");
|
||||
@ -1735,31 +1697,6 @@ void PeerConnection::UpdateEndedRemoteMediaStreams() {
|
||||
}
|
||||
}
|
||||
|
||||
void PeerConnection::EndRemoteTracks(cricket::MediaType media_type) {
|
||||
TrackInfos* current_tracks = GetRemoteTracks(media_type);
|
||||
for (TrackInfos::iterator track_it = current_tracks->begin();
|
||||
track_it != current_tracks->end(); ++track_it) {
|
||||
const TrackInfo& info = *track_it;
|
||||
MediaStreamInterface* stream = remote_streams_->find(info.stream_label);
|
||||
if (media_type == cricket::MEDIA_TYPE_AUDIO) {
|
||||
AudioTrackInterface* track = stream->FindAudioTrack(info.track_id);
|
||||
// There's no guarantee the track is still available, e.g. the track may
|
||||
// have been removed from the stream by javascript.
|
||||
if (track) {
|
||||
track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
|
||||
}
|
||||
}
|
||||
if (media_type == cricket::MEDIA_TYPE_VIDEO) {
|
||||
VideoTrackInterface* track = stream->FindVideoTrack(info.track_id);
|
||||
// There's no guarantee the track is still available, e.g. the track may
|
||||
// have been removed from the stream by javascript.
|
||||
if (track) {
|
||||
track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void PeerConnection::UpdateLocalTracks(
|
||||
const std::vector<cricket::StreamParams>& streams,
|
||||
cricket::MediaType media_type) {
|
||||
@ -2019,11 +1956,11 @@ void PeerConnection::OnSctpDataChannelClosed(DataChannel* channel) {
|
||||
}
|
||||
|
||||
void PeerConnection::OnVoiceChannelDestroyed() {
|
||||
EndRemoteTracks(cricket::MEDIA_TYPE_AUDIO);
|
||||
StopReceivers(cricket::MEDIA_TYPE_AUDIO);
|
||||
}
|
||||
|
||||
void PeerConnection::OnVideoChannelDestroyed() {
|
||||
EndRemoteTracks(cricket::MEDIA_TYPE_VIDEO);
|
||||
StopReceivers(cricket::MEDIA_TYPE_VIDEO);
|
||||
}
|
||||
|
||||
void PeerConnection::OnDataChannelCreated() {
|
||||
@ -2081,11 +2018,11 @@ PeerConnection::FindSenderForTrack(MediaStreamTrackInterface* track) {
|
||||
}
|
||||
|
||||
std::vector<rtc::scoped_refptr<RtpReceiverInterface>>::iterator
|
||||
PeerConnection::FindReceiverForTrack(MediaStreamTrackInterface* track) {
|
||||
PeerConnection::FindReceiverForTrack(const std::string& track_id) {
|
||||
return std::find_if(
|
||||
receivers_.begin(), receivers_.end(),
|
||||
[track](const rtc::scoped_refptr<RtpReceiverInterface>& receiver) {
|
||||
return receiver->track() == track;
|
||||
[track_id](const rtc::scoped_refptr<RtpReceiverInterface>& receiver) {
|
||||
return receiver->id() == track_id;
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@ -12,6 +12,8 @@
|
||||
#define WEBRTC_API_PEERCONNECTION_H_
|
||||
|
||||
#include <string>
|
||||
#include <map>
|
||||
#include <vector>
|
||||
|
||||
#include "webrtc/api/dtlsidentitystore.h"
|
||||
#include "webrtc/api/peerconnectionfactory.h"
|
||||
@ -26,7 +28,6 @@
|
||||
namespace webrtc {
|
||||
|
||||
class MediaStreamObserver;
|
||||
class RemoteMediaStreamFactory;
|
||||
class VideoRtpReceiver;
|
||||
|
||||
// Populates |session_options| from |rtc_options|, and returns true if options
|
||||
@ -143,7 +144,7 @@ class PeerConnection : public PeerConnectionInterface,
|
||||
virtual const std::vector<rtc::scoped_refptr<DataChannel>>&
|
||||
sctp_data_channels() const {
|
||||
return sctp_data_channels_;
|
||||
};
|
||||
}
|
||||
|
||||
protected:
|
||||
~PeerConnection() override;
|
||||
@ -169,16 +170,14 @@ class PeerConnection : public PeerConnectionInterface,
|
||||
void OnMessage(rtc::Message* msg) override;
|
||||
|
||||
void CreateAudioReceiver(MediaStreamInterface* stream,
|
||||
AudioTrackInterface* audio_track,
|
||||
const std::string& track_id,
|
||||
uint32_t ssrc);
|
||||
|
||||
void CreateVideoReceiver(MediaStreamInterface* stream,
|
||||
const std::string& track_id,
|
||||
uint32_t ssrc);
|
||||
void DestroyAudioReceiver(MediaStreamInterface* stream,
|
||||
AudioTrackInterface* audio_track);
|
||||
void DestroyVideoReceiver(MediaStreamInterface* stream,
|
||||
VideoTrackInterface* video_track);
|
||||
void StopReceivers(cricket::MediaType media_type);
|
||||
void DestroyReceiver(const std::string& track_id);
|
||||
void DestroyAudioSender(MediaStreamInterface* stream,
|
||||
AudioTrackInterface* audio_track,
|
||||
uint32_t ssrc);
|
||||
@ -278,10 +277,6 @@ class PeerConnection : public PeerConnectionInterface,
|
||||
// exist.
|
||||
void UpdateEndedRemoteMediaStreams();
|
||||
|
||||
// Set the MediaStreamTrackInterface::TrackState to |kEnded| on all remote
|
||||
// tracks of type |media_type|.
|
||||
void EndRemoteTracks(cricket::MediaType media_type);
|
||||
|
||||
// Loops through the vector of |streams| and finds added and removed
|
||||
// StreamParams since last time this method was called.
|
||||
// For each new or removed StreamParam, OnLocalTrackSeen or
|
||||
@ -344,7 +339,7 @@ class PeerConnection : public PeerConnectionInterface,
|
||||
std::vector<rtc::scoped_refptr<RtpSenderInterface>>::iterator
|
||||
FindSenderForTrack(MediaStreamTrackInterface* track);
|
||||
std::vector<rtc::scoped_refptr<RtpReceiverInterface>>::iterator
|
||||
FindReceiverForTrack(MediaStreamTrackInterface* track);
|
||||
FindReceiverForTrack(const std::string& track_id);
|
||||
|
||||
TrackInfos* GetRemoteTracks(cricket::MediaType media_type);
|
||||
TrackInfos* GetLocalTracks(cricket::MediaType media_type);
|
||||
@ -394,7 +389,6 @@ class PeerConnection : public PeerConnectionInterface,
|
||||
std::vector<rtc::scoped_refptr<DataChannel>> sctp_data_channels_to_free_;
|
||||
|
||||
bool remote_peer_supports_msid_ = false;
|
||||
rtc::scoped_ptr<RemoteMediaStreamFactory> remote_stream_factory_;
|
||||
|
||||
std::vector<rtc::scoped_refptr<RtpSenderInterface>> senders_;
|
||||
std::vector<rtc::scoped_refptr<RtpReceiverInterface>> receivers_;
|
||||
|
||||
@ -11,6 +11,7 @@
|
||||
#include <string>
|
||||
#include <utility>
|
||||
|
||||
#include "testing/gmock/include/gmock/gmock.h"
|
||||
#include "webrtc/api/audiotrack.h"
|
||||
#include "webrtc/api/jsepsessiondescription.h"
|
||||
#include "webrtc/api/mediastream.h"
|
||||
@ -240,6 +241,7 @@ static const char kSdpStringMs1Video1[] =
|
||||
|
||||
using rtc::scoped_ptr;
|
||||
using rtc::scoped_refptr;
|
||||
using ::testing::Exactly;
|
||||
using webrtc::AudioSourceInterface;
|
||||
using webrtc::AudioTrack;
|
||||
using webrtc::AudioTrackInterface;
|
||||
@ -255,6 +257,8 @@ using webrtc::MockCreateSessionDescriptionObserver;
|
||||
using webrtc::MockDataChannelObserver;
|
||||
using webrtc::MockSetSessionDescriptionObserver;
|
||||
using webrtc::MockStatsObserver;
|
||||
using webrtc::NotifierInterface;
|
||||
using webrtc::ObserverInterface;
|
||||
using webrtc::PeerConnectionInterface;
|
||||
using webrtc::PeerConnectionObserver;
|
||||
using webrtc::RtpReceiverInterface;
|
||||
@ -387,6 +391,29 @@ bool CompareStreamCollections(StreamCollectionInterface* s1,
|
||||
return true;
|
||||
}
|
||||
|
||||
// Helper class to test Observer.
|
||||
class MockTrackObserver : public ObserverInterface {
|
||||
public:
|
||||
explicit MockTrackObserver(NotifierInterface* notifier)
|
||||
: notifier_(notifier) {
|
||||
notifier_->RegisterObserver(this);
|
||||
}
|
||||
|
||||
~MockTrackObserver() { Unregister(); }
|
||||
|
||||
void Unregister() {
|
||||
if (notifier_) {
|
||||
notifier_->UnregisterObserver(this);
|
||||
notifier_ = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
MOCK_METHOD0(OnChanged, void());
|
||||
|
||||
private:
|
||||
NotifierInterface* notifier_;
|
||||
};
|
||||
|
||||
class MockPeerConnectionObserver : public PeerConnectionObserver {
|
||||
public:
|
||||
MockPeerConnectionObserver() : remote_streams_(StreamCollection::Create()) {}
|
||||
@ -1843,8 +1870,9 @@ TEST_F(PeerConnectionInterfaceTest, CloseAndTestStreamsAndStates) {
|
||||
pc_->remote_streams()->at(0);
|
||||
EXPECT_EQ(MediaStreamTrackInterface::kEnded,
|
||||
remote_stream->GetVideoTracks()[0]->state());
|
||||
EXPECT_EQ(MediaStreamTrackInterface::kEnded,
|
||||
remote_stream->GetAudioTracks()[0]->state());
|
||||
// Audio source state changes are posted.
|
||||
EXPECT_EQ_WAIT(MediaStreamTrackInterface::kEnded,
|
||||
remote_stream->GetAudioTracks()[0]->state(), 1);
|
||||
}
|
||||
|
||||
// Test that PeerConnection methods fails gracefully after
|
||||
@ -1949,13 +1977,28 @@ TEST_F(PeerConnectionInterfaceTest,
|
||||
EXPECT_TRUE(DoSetRemoteDescription(desc_ms1_two_tracks.release()));
|
||||
EXPECT_TRUE(CompareStreamCollections(observer_.remote_streams(),
|
||||
reference_collection_));
|
||||
scoped_refptr<AudioTrackInterface> audio_track2 =
|
||||
observer_.remote_streams()->at(0)->GetAudioTracks()[1];
|
||||
EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, audio_track2->state());
|
||||
scoped_refptr<VideoTrackInterface> video_track2 =
|
||||
observer_.remote_streams()->at(0)->GetVideoTracks()[1];
|
||||
EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, video_track2->state());
|
||||
|
||||
// Remove the extra audio and video tracks.
|
||||
rtc::scoped_ptr<SessionDescriptionInterface> desc_ms2 =
|
||||
CreateSessionDescriptionAndReference(1, 1);
|
||||
MockTrackObserver audio_track_observer(audio_track2);
|
||||
MockTrackObserver video_track_observer(video_track2);
|
||||
|
||||
EXPECT_CALL(audio_track_observer, OnChanged()).Times(Exactly(1));
|
||||
EXPECT_CALL(video_track_observer, OnChanged()).Times(Exactly(1));
|
||||
EXPECT_TRUE(DoSetRemoteDescription(desc_ms2.release()));
|
||||
EXPECT_TRUE(CompareStreamCollections(observer_.remote_streams(),
|
||||
reference_collection_));
|
||||
// Audio source state changes are posted.
|
||||
EXPECT_EQ_WAIT(webrtc::MediaStreamTrackInterface::kEnded,
|
||||
audio_track2->state(), 1);
|
||||
EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, video_track2->state());
|
||||
}
|
||||
|
||||
// This tests that remote tracks are ended if a local session description is set
|
||||
@ -2001,7 +2044,9 @@ TEST_F(PeerConnectionInterfaceTest, RejectMediaContent) {
|
||||
audio_info->rejected = true;
|
||||
EXPECT_TRUE(DoSetLocalDescription(local_offer.release()));
|
||||
EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_video->state());
|
||||
EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_audio->state());
|
||||
// Audio source state changes are posted.
|
||||
EXPECT_EQ_WAIT(webrtc::MediaStreamTrackInterface::kEnded,
|
||||
remote_audio->state(), 1);
|
||||
}
|
||||
|
||||
// This tests that we won't crash if the remote track has been removed outside
|
||||
|
||||
@ -11,22 +11,28 @@
|
||||
#include "webrtc/api/rtpreceiver.h"
|
||||
|
||||
#include "webrtc/api/mediastreamtrackproxy.h"
|
||||
#include "webrtc/api/audiotrack.h"
|
||||
#include "webrtc/api/videotrack.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
AudioRtpReceiver::AudioRtpReceiver(AudioTrackInterface* track,
|
||||
AudioRtpReceiver::AudioRtpReceiver(MediaStreamInterface* stream,
|
||||
const std::string& track_id,
|
||||
uint32_t ssrc,
|
||||
AudioProviderInterface* provider)
|
||||
: id_(track->id()),
|
||||
track_(track),
|
||||
: id_(track_id),
|
||||
ssrc_(ssrc),
|
||||
provider_(provider),
|
||||
cached_track_enabled_(track->enabled()) {
|
||||
track_(AudioTrackProxy::Create(
|
||||
rtc::Thread::Current(),
|
||||
AudioTrack::Create(track_id,
|
||||
RemoteAudioSource::Create(ssrc, provider)))),
|
||||
cached_track_enabled_(track_->enabled()) {
|
||||
RTC_DCHECK(track_->GetSource()->remote());
|
||||
track_->RegisterObserver(this);
|
||||
track_->GetSource()->RegisterAudioObserver(this);
|
||||
Reconfigure();
|
||||
stream->AddTrack(track_);
|
||||
}
|
||||
|
||||
AudioRtpReceiver::~AudioRtpReceiver() {
|
||||
|
||||
@ -19,6 +19,7 @@
|
||||
|
||||
#include "webrtc/api/mediastreamprovider.h"
|
||||
#include "webrtc/api/rtpreceiverinterface.h"
|
||||
#include "webrtc/api/remoteaudiosource.h"
|
||||
#include "webrtc/api/videotracksource.h"
|
||||
#include "webrtc/base/basictypes.h"
|
||||
#include "webrtc/media/base/videobroadcaster.h"
|
||||
@ -29,7 +30,8 @@ class AudioRtpReceiver : public ObserverInterface,
|
||||
public AudioSourceInterface::AudioObserver,
|
||||
public rtc::RefCountedObject<RtpReceiverInterface> {
|
||||
public:
|
||||
AudioRtpReceiver(AudioTrackInterface* track,
|
||||
AudioRtpReceiver(MediaStreamInterface* stream,
|
||||
const std::string& track_id,
|
||||
uint32_t ssrc,
|
||||
AudioProviderInterface* provider);
|
||||
|
||||
@ -41,6 +43,10 @@ class AudioRtpReceiver : public ObserverInterface,
|
||||
// AudioSourceInterface::AudioObserver implementation
|
||||
void OnSetVolume(double volume) override;
|
||||
|
||||
rtc::scoped_refptr<AudioTrackInterface> audio_track() const {
|
||||
return track_.get();
|
||||
}
|
||||
|
||||
// RtpReceiverInterface implementation
|
||||
rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
|
||||
return track_.get();
|
||||
@ -54,9 +60,9 @@ class AudioRtpReceiver : public ObserverInterface,
|
||||
void Reconfigure();
|
||||
|
||||
const std::string id_;
|
||||
const rtc::scoped_refptr<AudioTrackInterface> track_;
|
||||
const uint32_t ssrc_;
|
||||
AudioProviderInterface* provider_; // Set to null in Stop().
|
||||
const rtc::scoped_refptr<AudioTrackInterface> track_;
|
||||
bool cached_track_enabled_;
|
||||
};
|
||||
|
||||
|
||||
@ -137,8 +137,9 @@ class RtpSenderReceiverTest : public testing::Test {
|
||||
kAudioTrackId, RemoteAudioSource::Create(kAudioSsrc, NULL));
|
||||
EXPECT_TRUE(stream_->AddTrack(audio_track_));
|
||||
EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, true));
|
||||
audio_rtp_receiver_ = new AudioRtpReceiver(stream_->GetAudioTracks()[0],
|
||||
audio_rtp_receiver_ = new AudioRtpReceiver(stream_, kAudioTrackId,
|
||||
kAudioSsrc, &audio_provider_);
|
||||
audio_track_ = audio_rtp_receiver_->audio_track();
|
||||
}
|
||||
|
||||
void CreateVideoRtpReceiver() {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user