Fix for scenario where m-line is revived after being set to port 0.

When this is detected, we'll now "reconfigure" the senders and
receivers, which will reconnect the capturers/renderers to the
underlying streams which have been recreated.

BUG=webrtc:2136

Review URL: https://codereview.webrtc.org/1428243005

Cr-Commit-Position: refs/heads/master@{#10628}
This commit is contained in:
deadbeef 2015-11-12 15:33:07 -08:00 committed by Commit bot
parent 69d0d46c25
commit faac497af5
3 changed files with 124 additions and 28 deletions

View File

@ -1014,19 +1014,27 @@ void PeerConnection::SetLocalDescription(
const cricket::ContentInfo* audio_content =
GetFirstAudioContent(desc->description());
if (audio_content) {
const cricket::AudioContentDescription* audio_desc =
static_cast<const cricket::AudioContentDescription*>(
audio_content->description);
UpdateLocalTracks(audio_desc->streams(), audio_desc->type());
if (audio_content->rejected) {
RemoveTracks(cricket::MEDIA_TYPE_AUDIO);
} else {
const cricket::AudioContentDescription* audio_desc =
static_cast<const cricket::AudioContentDescription*>(
audio_content->description);
UpdateLocalTracks(audio_desc->streams(), audio_desc->type());
}
}
const cricket::ContentInfo* video_content =
GetFirstVideoContent(desc->description());
if (video_content) {
const cricket::VideoContentDescription* video_desc =
static_cast<const cricket::VideoContentDescription*>(
video_content->description);
UpdateLocalTracks(video_desc->streams(), video_desc->type());
if (video_content->rejected) {
RemoveTracks(cricket::MEDIA_TYPE_VIDEO);
} else {
const cricket::VideoContentDescription* video_desc =
static_cast<const cricket::VideoContentDescription*>(
video_content->description);
UpdateLocalTracks(video_desc->streams(), video_desc->type());
}
}
const cricket::ContentInfo* data_content =
@ -1088,26 +1096,36 @@ void PeerConnection::SetRemoteDescription(
// and MediaStreams.
const cricket::ContentInfo* audio_content = GetFirstAudioContent(remote_desc);
if (audio_content) {
const cricket::AudioContentDescription* desc =
static_cast<const cricket::AudioContentDescription*>(
audio_content->description);
UpdateRemoteStreamsList(GetActiveStreams(desc), desc->type(), new_streams);
remote_info_.default_audio_track_needed =
!remote_desc->msid_supported() && desc->streams().empty() &&
MediaContentDirectionHasSend(desc->direction());
if (audio_content->rejected) {
RemoveTracks(cricket::MEDIA_TYPE_AUDIO);
} else {
const cricket::AudioContentDescription* desc =
static_cast<const cricket::AudioContentDescription*>(
audio_content->description);
UpdateRemoteStreamsList(GetActiveStreams(desc), desc->type(),
new_streams);
remote_info_.default_audio_track_needed =
!remote_desc->msid_supported() && desc->streams().empty() &&
MediaContentDirectionHasSend(desc->direction());
}
}
// Find all video rtp streams and create corresponding remote VideoTracks
// and MediaStreams.
const cricket::ContentInfo* video_content = GetFirstVideoContent(remote_desc);
if (video_content) {
const cricket::VideoContentDescription* desc =
static_cast<const cricket::VideoContentDescription*>(
video_content->description);
UpdateRemoteStreamsList(GetActiveStreams(desc), desc->type(), new_streams);
remote_info_.default_video_track_needed =
!remote_desc->msid_supported() && desc->streams().empty() &&
MediaContentDirectionHasSend(desc->direction());
if (video_content->rejected) {
RemoveTracks(cricket::MEDIA_TYPE_VIDEO);
} else {
const cricket::VideoContentDescription* desc =
static_cast<const cricket::VideoContentDescription*>(
video_content->description);
UpdateRemoteStreamsList(GetActiveStreams(desc), desc->type(),
new_streams);
remote_info_.default_video_track_needed =
!remote_desc->msid_supported() && desc->streams().empty() &&
MediaContentDirectionHasSend(desc->direction());
}
}
// Update the DataChannels with the information from the remote peer.
@ -1472,6 +1490,12 @@ bool PeerConnection::GetOptionsForAnswer(
return true;
}
void PeerConnection::RemoveTracks(cricket::MediaType media_type) {
UpdateLocalTracks(std::vector<cricket::StreamParams>(), media_type);
UpdateRemoteStreamsList(std::vector<cricket::StreamParams>(), media_type,
nullptr);
}
void PeerConnection::UpdateRemoteStreamsList(
const cricket::StreamParamsVec& streams,
cricket::MediaType media_type,

View File

@ -246,6 +246,10 @@ class PeerConnection : public PeerConnectionInterface,
const MediaConstraintsInterface* constraints,
cricket::MediaSessionOptions* session_options);
// Remove all local and remote tracks of type |media_type|.
// Called when a media type is rejected (m-line set to port 0).
void RemoveTracks(cricket::MediaType media_type);
// Makes sure a MediaStream Track is created for each StreamParam in
// |streams|. |media_type| is the type of the |streams| and can be either
// audio or video.

View File

@ -78,11 +78,13 @@ using webrtc::DtmfSenderInterface;
using webrtc::DtmfSenderObserverInterface;
using webrtc::FakeConstraints;
using webrtc::MediaConstraintsInterface;
using webrtc::MediaStreamInterface;
using webrtc::MediaStreamTrackInterface;
using webrtc::MockCreateSessionDescriptionObserver;
using webrtc::MockDataChannelObserver;
using webrtc::MockSetSessionDescriptionObserver;
using webrtc::MockStatsObserver;
using webrtc::ObserverInterface;
using webrtc::PeerConnectionInterface;
using webrtc::PeerConnectionFactory;
using webrtc::SessionDescriptionInterface;
@ -139,7 +141,8 @@ class SignalingMessageReceiver {
};
class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
public SignalingMessageReceiver {
public SignalingMessageReceiver,
public ObserverInterface {
public:
static PeerConnectionTestClient* CreateClient(
const std::string& id,
@ -206,7 +209,8 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
webrtc::PeerConnectionInterface::SignalingState new_state) override {
EXPECT_EQ(pc()->signaling_state(), new_state);
}
void OnAddStream(webrtc::MediaStreamInterface* media_stream) override {
void OnAddStream(MediaStreamInterface* media_stream) override {
media_stream->RegisterObserver(this);
for (size_t i = 0; i < media_stream->GetVideoTracks().size(); ++i) {
const std::string id = media_stream->GetVideoTracks()[i]->id();
ASSERT_TRUE(fake_video_renderers_.find(id) ==
@ -215,7 +219,7 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
new webrtc::FakeVideoTrackRenderer(media_stream->GetVideoTracks()[i]);
}
}
void OnRemoveStream(webrtc::MediaStreamInterface* media_stream) override {}
void OnRemoveStream(MediaStreamInterface* media_stream) override {}
void OnRenegotiationNeeded() override {}
void OnIceConnectionChange(
webrtc::PeerConnectionInterface::IceConnectionState new_state) override {
@ -238,6 +242,40 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
candidate->sdp_mid(), candidate->sdp_mline_index(), ice_sdp);
}
// MediaStreamInterface callback
void OnChanged() override {
// Track added or removed from MediaStream, so update our renderers.
rtc::scoped_refptr<StreamCollectionInterface> remote_streams =
pc()->remote_streams();
// Remove renderers for tracks that were removed.
for (auto it = fake_video_renderers_.begin();
it != fake_video_renderers_.end();) {
if (remote_streams->FindVideoTrack(it->first) == nullptr) {
auto to_delete = it++;
delete to_delete->second;
fake_video_renderers_.erase(to_delete);
} else {
++it;
}
}
// Create renderers for new video tracks.
for (size_t stream_index = 0; stream_index < remote_streams->count();
++stream_index) {
MediaStreamInterface* remote_stream = remote_streams->at(stream_index);
for (size_t track_index = 0;
track_index < remote_stream->GetVideoTracks().size();
++track_index) {
const std::string id =
remote_stream->GetVideoTracks()[track_index]->id();
if (fake_video_renderers_.find(id) != fake_video_renderers_.end()) {
continue;
}
fake_video_renderers_[id] = new webrtc::FakeVideoTrackRenderer(
remote_stream->GetVideoTracks()[track_index]);
}
}
}
void SetVideoConstraints(const webrtc::FakeConstraints& video_constraint) {
video_constraints_ = video_constraint;
}
@ -246,7 +284,7 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
std::string stream_label =
kStreamLabelBase +
rtc::ToString<int>(static_cast<int>(pc()->local_streams()->count()));
rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
rtc::scoped_refptr<MediaStreamInterface> stream =
peer_connection_factory_->CreateLocalMediaStream(stream_label);
if (audio && can_receive_audio()) {
@ -276,6 +314,12 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
return pc()->signaling_state() == webrtc::PeerConnectionInterface::kStable;
}
// Automatically add a stream when receiving an offer, if we don't have one.
// Defaults to true.
void set_auto_add_stream(bool auto_add_stream) {
auto_add_stream_ = auto_add_stream;
}
void set_signaling_message_receiver(
SignalingMessageReceiver* signaling_message_receiver) {
signaling_message_receiver_ = signaling_message_receiver;
@ -705,7 +749,7 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
void HandleIncomingOffer(const std::string& msg) {
LOG(INFO) << id_ << "HandleIncomingOffer ";
if (NumberOfLocalMediaStreams() == 0) {
if (NumberOfLocalMediaStreams() == 0 && auto_add_stream_) {
// If we are not sending any streams ourselves it is time to add some.
AddMediaStream(true, true);
}
@ -812,6 +856,8 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
peer_connection_factory_;
bool auto_add_stream_ = true;
typedef std::pair<std::string, std::string> IceUfragPwdPair;
std::map<int, IceUfragPwdPair> ice_ufrag_pwd_;
bool expect_ice_restart_ = false;
@ -963,7 +1009,6 @@ class JsepPeerConnectionP2PTestClient : public testing::Test {
ASSERT_TRUE_WAIT(SessionActive(), kMaxWaitForActivationMs);
VerifySessionDescriptions();
int audio_frame_count = kEndAudioFrameCount;
// TODO(ronghuawu): Add test to cover the case of sendonly and recvonly.
if (!initiating_client_->can_receive_audio() ||
@ -1562,6 +1607,29 @@ TEST_F(JsepPeerConnectionP2PTestClient, IceRestart) {
EXPECT_NE(receiver_candidate, receiver_candidate_restart);
}
// This test sets up a call between two parties with audio, and video.
// It then renegotiates setting the video m-line to "port 0", then later
// renegotiates again, enabling video.
TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestVideoDisableEnable) {
ASSERT_TRUE(CreateTestClients());
// Do initial negotiation. Will result in video and audio sendonly m-lines.
receiving_client()->set_auto_add_stream(false);
initializing_client()->AddMediaStream(true, true);
initializing_client()->Negotiate();
// Negotiate again, disabling the video m-line (receiving client will
// set port to 0 due to mandatory "OfferToReceiveVideo: false" constraint).
receiving_client()->SetReceiveVideo(false);
initializing_client()->Negotiate();
// Enable video and do negotiation again, making sure video is received
// end-to-end.
receiving_client()->SetReceiveVideo(true);
receiving_client()->AddMediaStream(true, true);
LocalP2PTest();
}
// This test sets up a Jsep call between two parties with external
// VideoDecoderFactory.
// TODO(holmer): Disabled due to sometimes crashing on buildbots.