Remove RTP data implementation

Bug: webrtc:6625
Change-Id: Ie68d7a938d8b7be95a01cca74a176104e4e44e1b
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/215321
Reviewed-by: Henrik Boström <hbos@webrtc.org>
Commit-Queue: Harald Alvestrand <hta@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#33759}
This commit is contained in:
Harald Alvestrand 2021-04-16 11:12:14 +00:00 committed by Commit Bot
parent f981cb3d2e
commit 7af57c6e48
41 changed files with 87 additions and 3487 deletions

View File

@ -142,8 +142,6 @@ rtc_library("rtc_media_base") {
"base/media_engine.h",
"base/rid_description.cc",
"base/rid_description.h",
"base/rtp_data_engine.cc",
"base/rtp_data_engine.h",
"base/rtp_utils.cc",
"base/rtp_utils.h",
"base/stream_params.cc",
@ -628,7 +626,6 @@ if (rtc_include_tests) {
sources = [
"base/codec_unittest.cc",
"base/media_engine_unittest.cc",
"base/rtp_data_engine_unittest.cc",
"base/rtp_utils_unittest.cc",
"base/sdp_fmtp_utils_unittest.cc",
"base/stream_params_unittest.cc",

View File

@ -422,93 +422,6 @@ void FakeVideoMediaChannel::ClearRecordableEncodedFrameCallback(uint32_t ssrc) {
void FakeVideoMediaChannel::GenerateKeyFrame(uint32_t ssrc) {}
FakeDataMediaChannel::FakeDataMediaChannel(void* unused,
const DataOptions& options)
: send_blocked_(false), max_bps_(-1) {}
FakeDataMediaChannel::~FakeDataMediaChannel() {}
const std::vector<DataCodec>& FakeDataMediaChannel::recv_codecs() const {
return recv_codecs_;
}
const std::vector<DataCodec>& FakeDataMediaChannel::send_codecs() const {
return send_codecs_;
}
const std::vector<DataCodec>& FakeDataMediaChannel::codecs() const {
return send_codecs();
}
int FakeDataMediaChannel::max_bps() const {
return max_bps_;
}
bool FakeDataMediaChannel::SetSendParameters(const DataSendParameters& params) {
set_send_rtcp_parameters(params.rtcp);
return (SetSendCodecs(params.codecs) &&
SetMaxSendBandwidth(params.max_bandwidth_bps));
}
bool FakeDataMediaChannel::SetRecvParameters(const DataRecvParameters& params) {
set_recv_rtcp_parameters(params.rtcp);
return SetRecvCodecs(params.codecs);
}
bool FakeDataMediaChannel::SetSend(bool send) {
return set_sending(send);
}
bool FakeDataMediaChannel::SetReceive(bool receive) {
set_playout(receive);
return true;
}
bool FakeDataMediaChannel::AddRecvStream(const StreamParams& sp) {
if (!RtpHelper<DataMediaChannel>::AddRecvStream(sp))
return false;
return true;
}
bool FakeDataMediaChannel::RemoveRecvStream(uint32_t ssrc) {
if (!RtpHelper<DataMediaChannel>::RemoveRecvStream(ssrc))
return false;
return true;
}
bool FakeDataMediaChannel::SendData(const SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
SendDataResult* result) {
if (send_blocked_) {
*result = SDR_BLOCK;
return false;
} else {
last_sent_data_params_ = params;
last_sent_data_ = std::string(payload.data<char>(), payload.size());
return true;
}
}
SendDataParams FakeDataMediaChannel::last_sent_data_params() {
return last_sent_data_params_;
}
std::string FakeDataMediaChannel::last_sent_data() {
return last_sent_data_;
}
bool FakeDataMediaChannel::is_send_blocked() {
return send_blocked_;
}
void FakeDataMediaChannel::set_send_blocked(bool blocked) {
send_blocked_ = blocked;
}
bool FakeDataMediaChannel::SetRecvCodecs(const std::vector<DataCodec>& codecs) {
if (fail_set_recv_codecs()) {
// Fake the failure in SetRecvCodecs.
return false;
}
recv_codecs_ = codecs;
return true;
}
bool FakeDataMediaChannel::SetSendCodecs(const std::vector<DataCodec>& codecs) {
if (fail_set_send_codecs()) {
// Fake the failure in SetSendCodecs.
return false;
}
send_codecs_ = codecs;
return true;
}
bool FakeDataMediaChannel::SetMaxSendBandwidth(int bps) {
max_bps_ = bps;
return true;
}
FakeVoiceEngine::FakeVoiceEngine() : fail_create_channel_(false) {
// Add a fake audio codec. Note that the name must not be "" as there are
// sanity checks against that.
@ -668,22 +581,4 @@ void FakeMediaEngine::set_fail_create_channel(bool fail) {
video_->fail_create_channel_ = fail;
}
DataMediaChannel* FakeDataEngine::CreateChannel(const MediaConfig& config) {
FakeDataMediaChannel* ch = new FakeDataMediaChannel(this, DataOptions());
channels_.push_back(ch);
return ch;
}
FakeDataMediaChannel* FakeDataEngine::GetChannel(size_t index) {
return (channels_.size() > index) ? channels_[index] : NULL;
}
void FakeDataEngine::UnregisterChannel(DataMediaChannel* channel) {
channels_.erase(absl::c_find(channels_, channel));
}
void FakeDataEngine::SetDataCodecs(const std::vector<DataCodec>& data_codecs) {
data_codecs_ = data_codecs;
}
const std::vector<DataCodec>& FakeDataEngine::data_codecs() {
return data_codecs_;
}
} // namespace cricket

View File

@ -472,48 +472,6 @@ class FakeVideoMediaChannel : public RtpHelper<VideoMediaChannel> {
int max_bps_;
};
// Dummy option class, needed for the DataTraits abstraction in
// channel_unittest.c.
class DataOptions {};
class FakeDataMediaChannel : public RtpHelper<DataMediaChannel> {
public:
explicit FakeDataMediaChannel(void* unused, const DataOptions& options);
~FakeDataMediaChannel();
const std::vector<DataCodec>& recv_codecs() const;
const std::vector<DataCodec>& send_codecs() const;
const std::vector<DataCodec>& codecs() const;
int max_bps() const;
bool SetSendParameters(const DataSendParameters& params) override;
bool SetRecvParameters(const DataRecvParameters& params) override;
bool SetSend(bool send) override;
bool SetReceive(bool receive) override;
bool AddRecvStream(const StreamParams& sp) override;
bool RemoveRecvStream(uint32_t ssrc) override;
bool SendData(const SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
SendDataResult* result) override;
SendDataParams last_sent_data_params();
std::string last_sent_data();
bool is_send_blocked();
void set_send_blocked(bool blocked);
private:
bool SetRecvCodecs(const std::vector<DataCodec>& codecs);
bool SetSendCodecs(const std::vector<DataCodec>& codecs);
bool SetMaxSendBandwidth(int bps);
std::vector<DataCodec> recv_codecs_;
std::vector<DataCodec> send_codecs_;
SendDataParams last_sent_data_params_;
std::string last_sent_data_;
bool send_blocked_;
int max_bps_;
};
class FakeVoiceEngine : public VoiceEngineInterface {
public:
FakeVoiceEngine();
@ -609,25 +567,6 @@ class FakeMediaEngine : public CompositeMediaEngine {
FakeVideoEngine* const video_;
};
// Have to come afterwards due to declaration order
class FakeDataEngine : public DataEngineInterface {
public:
DataMediaChannel* CreateChannel(const MediaConfig& config) override;
FakeDataMediaChannel* GetChannel(size_t index);
void UnregisterChannel(DataMediaChannel* channel);
void SetDataCodecs(const std::vector<DataCodec>& data_codecs);
const std::vector<DataCodec>& data_codecs() override;
private:
std::vector<FakeDataMediaChannel*> channels_;
std::vector<DataCodec> data_codecs_;
};
} // namespace cricket
#endif // MEDIA_BASE_FAKE_MEDIA_ENGINE_H_

View File

@ -955,11 +955,8 @@ enum DataMessageType {
// signal fires, on up the chain.
struct ReceiveDataParams {
// The in-packet stream indentifier.
// RTP data channels use SSRCs, SCTP data channels use SIDs.
union {
uint32_t ssrc;
// SCTP data channels use SIDs.
int sid = 0;
};
// The type of message (binary, text, or control).
DataMessageType type = DMT_TEXT;
// A per-stream value incremented per packet in the stream.
@ -970,11 +967,7 @@ struct ReceiveDataParams {
struct SendDataParams {
// The in-packet stream indentifier.
// RTP data channels use SSRCs, SCTP data channels use SIDs.
union {
uint32_t ssrc;
int sid = 0;
};
// The type of message (binary, text, or control).
DataMessageType type = DMT_TEXT;

View File

@ -158,17 +158,9 @@ class CompositeMediaEngine : public MediaEngineInterface {
enum DataChannelType {
DCT_NONE = 0,
DCT_RTP = 1,
DCT_SCTP = 2,
};
class DataEngineInterface {
public:
virtual ~DataEngineInterface() {}
virtual DataMediaChannel* CreateChannel(const MediaConfig& config) = 0;
virtual const std::vector<DataCodec>& data_codecs() = 0;
};
webrtc::RtpParameters CreateRtpParametersWithOneEncoding();
webrtc::RtpParameters CreateRtpParametersWithEncodings(StreamParams sp);

View File

@ -1,340 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "media/base/rtp_data_engine.h"
#include <map>
#include "absl/strings/match.h"
#include "media/base/codec.h"
#include "media/base/media_constants.h"
#include "media/base/rtp_utils.h"
#include "media/base/stream_params.h"
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/data_rate_limiter.h"
#include "rtc_base/helpers.h"
#include "rtc_base/logging.h"
#include "rtc_base/sanitizer.h"
namespace cricket {
// We want to avoid IP fragmentation.
static const size_t kDataMaxRtpPacketLen = 1200U;
// We reserve space after the RTP header for future wiggle room.
static const unsigned char kReservedSpace[] = {0x00, 0x00, 0x00, 0x00};
// Amount of overhead SRTP may take. We need to leave room in the
// buffer for it, otherwise SRTP will fail later. If SRTP ever uses
// more than this, we need to increase this number.
static const size_t kMaxSrtpHmacOverhead = 16;
RtpDataEngine::RtpDataEngine() {
data_codecs_.push_back(
DataCodec(kGoogleRtpDataCodecPlType, kGoogleRtpDataCodecName));
}
DataMediaChannel* RtpDataEngine::CreateChannel(const MediaConfig& config) {
return new RtpDataMediaChannel(config);
}
static const DataCodec* FindCodecByName(const std::vector<DataCodec>& codecs,
const std::string& name) {
for (const DataCodec& codec : codecs) {
if (absl::EqualsIgnoreCase(name, codec.name))
return &codec;
}
return nullptr;
}
RtpDataMediaChannel::RtpDataMediaChannel(const MediaConfig& config)
: DataMediaChannel(config) {
Construct();
SetPreferredDscp(rtc::DSCP_AF41);
}
void RtpDataMediaChannel::Construct() {
sending_ = false;
receiving_ = false;
send_limiter_.reset(new rtc::DataRateLimiter(kRtpDataMaxBandwidth / 8, 1.0));
}
RtpDataMediaChannel::~RtpDataMediaChannel() {
std::map<uint32_t, RtpClock*>::const_iterator iter;
for (iter = rtp_clock_by_send_ssrc_.begin();
iter != rtp_clock_by_send_ssrc_.end(); ++iter) {
delete iter->second;
}
}
void RTC_NO_SANITIZE("float-cast-overflow") // bugs.webrtc.org/8204
RtpClock::Tick(double now, int* seq_num, uint32_t* timestamp) {
*seq_num = ++last_seq_num_;
*timestamp = timestamp_offset_ + static_cast<uint32_t>(now * clockrate_);
// UBSan: 5.92374e+10 is outside the range of representable values of type
// 'unsigned int'
}
const DataCodec* FindUnknownCodec(const std::vector<DataCodec>& codecs) {
DataCodec data_codec(kGoogleRtpDataCodecPlType, kGoogleRtpDataCodecName);
std::vector<DataCodec>::const_iterator iter;
for (iter = codecs.begin(); iter != codecs.end(); ++iter) {
if (!iter->Matches(data_codec)) {
return &(*iter);
}
}
return NULL;
}
const DataCodec* FindKnownCodec(const std::vector<DataCodec>& codecs) {
DataCodec data_codec(kGoogleRtpDataCodecPlType, kGoogleRtpDataCodecName);
std::vector<DataCodec>::const_iterator iter;
for (iter = codecs.begin(); iter != codecs.end(); ++iter) {
if (iter->Matches(data_codec)) {
return &(*iter);
}
}
return NULL;
}
bool RtpDataMediaChannel::SetRecvCodecs(const std::vector<DataCodec>& codecs) {
const DataCodec* unknown_codec = FindUnknownCodec(codecs);
if (unknown_codec) {
RTC_LOG(LS_WARNING) << "Failed to SetRecvCodecs because of unknown codec: "
<< unknown_codec->ToString();
return false;
}
recv_codecs_ = codecs;
return true;
}
bool RtpDataMediaChannel::SetSendCodecs(const std::vector<DataCodec>& codecs) {
const DataCodec* known_codec = FindKnownCodec(codecs);
if (!known_codec) {
RTC_LOG(LS_WARNING)
<< "Failed to SetSendCodecs because there is no known codec.";
return false;
}
send_codecs_ = codecs;
return true;
}
bool RtpDataMediaChannel::SetSendParameters(const DataSendParameters& params) {
return (SetSendCodecs(params.codecs) &&
SetMaxSendBandwidth(params.max_bandwidth_bps));
}
bool RtpDataMediaChannel::SetRecvParameters(const DataRecvParameters& params) {
return SetRecvCodecs(params.codecs);
}
bool RtpDataMediaChannel::AddSendStream(const StreamParams& stream) {
if (!stream.has_ssrcs()) {
return false;
}
if (GetStreamBySsrc(send_streams_, stream.first_ssrc())) {
RTC_LOG(LS_WARNING) << "Not adding data send stream '" << stream.id
<< "' with ssrc=" << stream.first_ssrc()
<< " because stream already exists.";
return false;
}
send_streams_.push_back(stream);
// TODO(pthatcher): This should be per-stream, not per-ssrc.
// And we should probably allow more than one per stream.
rtp_clock_by_send_ssrc_[stream.first_ssrc()] =
new RtpClock(kDataCodecClockrate, rtc::CreateRandomNonZeroId(),
rtc::CreateRandomNonZeroId());
RTC_LOG(LS_INFO) << "Added data send stream '" << stream.id
<< "' with ssrc=" << stream.first_ssrc();
return true;
}
bool RtpDataMediaChannel::RemoveSendStream(uint32_t ssrc) {
if (!GetStreamBySsrc(send_streams_, ssrc)) {
return false;
}
RemoveStreamBySsrc(&send_streams_, ssrc);
delete rtp_clock_by_send_ssrc_[ssrc];
rtp_clock_by_send_ssrc_.erase(ssrc);
return true;
}
bool RtpDataMediaChannel::AddRecvStream(const StreamParams& stream) {
if (!stream.has_ssrcs()) {
return false;
}
if (GetStreamBySsrc(recv_streams_, stream.first_ssrc())) {
RTC_LOG(LS_WARNING) << "Not adding data recv stream '" << stream.id
<< "' with ssrc=" << stream.first_ssrc()
<< " because stream already exists.";
return false;
}
recv_streams_.push_back(stream);
RTC_LOG(LS_INFO) << "Added data recv stream '" << stream.id
<< "' with ssrc=" << stream.first_ssrc();
return true;
}
bool RtpDataMediaChannel::RemoveRecvStream(uint32_t ssrc) {
RemoveStreamBySsrc(&recv_streams_, ssrc);
return true;
}
// Not implemented.
void RtpDataMediaChannel::ResetUnsignaledRecvStream() {}
void RtpDataMediaChannel::OnDemuxerCriteriaUpdatePending() {}
void RtpDataMediaChannel::OnDemuxerCriteriaUpdateComplete() {}
void RtpDataMediaChannel::OnPacketReceived(rtc::CopyOnWriteBuffer packet,
int64_t /* packet_time_us */) {
RtpHeader header;
if (!GetRtpHeader(packet.cdata(), packet.size(), &header)) {
return;
}
size_t header_length;
if (!GetRtpHeaderLen(packet.cdata(), packet.size(), &header_length)) {
return;
}
const char* data =
packet.cdata<char>() + header_length + sizeof(kReservedSpace);
size_t data_len = packet.size() - header_length - sizeof(kReservedSpace);
if (!receiving_) {
RTC_LOG(LS_WARNING) << "Not receiving packet " << header.ssrc << ":"
<< header.seq_num << " before SetReceive(true) called.";
return;
}
if (!FindCodecById(recv_codecs_, header.payload_type)) {
return;
}
if (!GetStreamBySsrc(recv_streams_, header.ssrc)) {
RTC_LOG(LS_WARNING) << "Received packet for unknown ssrc: " << header.ssrc;
return;
}
// Uncomment this for easy debugging.
// const auto* found_stream = GetStreamBySsrc(recv_streams_, header.ssrc);
// RTC_LOG(LS_INFO) << "Received packet"
// << " groupid=" << found_stream.groupid
// << ", ssrc=" << header.ssrc
// << ", seqnum=" << header.seq_num
// << ", timestamp=" << header.timestamp
// << ", len=" << data_len;
ReceiveDataParams params;
params.ssrc = header.ssrc;
params.seq_num = header.seq_num;
params.timestamp = header.timestamp;
SignalDataReceived(params, data, data_len);
}
bool RtpDataMediaChannel::SetMaxSendBandwidth(int bps) {
if (bps <= 0) {
bps = kRtpDataMaxBandwidth;
}
send_limiter_.reset(new rtc::DataRateLimiter(bps / 8, 1.0));
RTC_LOG(LS_INFO) << "RtpDataMediaChannel::SetSendBandwidth to " << bps
<< "bps.";
return true;
}
bool RtpDataMediaChannel::SendData(const SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
SendDataResult* result) {
if (result) {
// If we return true, we'll set this to SDR_SUCCESS.
*result = SDR_ERROR;
}
if (!sending_) {
RTC_LOG(LS_WARNING) << "Not sending packet with ssrc=" << params.ssrc
<< " len=" << payload.size()
<< " before SetSend(true).";
return false;
}
if (params.type != cricket::DMT_TEXT) {
RTC_LOG(LS_WARNING)
<< "Not sending data because binary type is unsupported.";
return false;
}
const StreamParams* found_stream =
GetStreamBySsrc(send_streams_, params.ssrc);
if (!found_stream) {
RTC_LOG(LS_WARNING) << "Not sending data because ssrc is unknown: "
<< params.ssrc;
return false;
}
const DataCodec* found_codec =
FindCodecByName(send_codecs_, kGoogleRtpDataCodecName);
if (!found_codec) {
RTC_LOG(LS_WARNING) << "Not sending data because codec is unknown: "
<< kGoogleRtpDataCodecName;
return false;
}
size_t packet_len = (kMinRtpPacketLen + sizeof(kReservedSpace) +
payload.size() + kMaxSrtpHmacOverhead);
if (packet_len > kDataMaxRtpPacketLen) {
return false;
}
double now =
rtc::TimeMicros() / static_cast<double>(rtc::kNumMicrosecsPerSec);
if (!send_limiter_->CanUse(packet_len, now)) {
RTC_LOG(LS_VERBOSE) << "Dropped data packet of len=" << packet_len
<< "; already sent " << send_limiter_->used_in_period()
<< "/" << send_limiter_->max_per_period();
return false;
}
RtpHeader header;
header.payload_type = found_codec->id;
header.ssrc = params.ssrc;
rtp_clock_by_send_ssrc_[header.ssrc]->Tick(now, &header.seq_num,
&header.timestamp);
rtc::CopyOnWriteBuffer packet(kMinRtpPacketLen, packet_len);
if (!SetRtpHeader(packet.MutableData(), packet.size(), header)) {
return false;
}
packet.AppendData(kReservedSpace);
packet.AppendData(payload);
RTC_LOG(LS_VERBOSE) << "Sent RTP data packet: "
" stream="
<< found_stream->id << " ssrc=" << header.ssrc
<< ", seqnum=" << header.seq_num
<< ", timestamp=" << header.timestamp
<< ", len=" << payload.size();
rtc::PacketOptions options;
options.info_signaled_after_sent.packet_type = rtc::PacketType::kData;
MediaChannel::SendPacket(&packet, options);
send_limiter_->Use(packet_len, now);
if (result) {
*result = SDR_SUCCESS;
}
return true;
}
} // namespace cricket

View File

@ -1,111 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MEDIA_BASE_RTP_DATA_ENGINE_H_
#define MEDIA_BASE_RTP_DATA_ENGINE_H_
#include <map>
#include <memory>
#include <string>
#include <vector>
#include "media/base/codec.h"
#include "media/base/media_channel.h"
#include "media/base/media_constants.h"
#include "media/base/media_engine.h"
namespace rtc {
class DataRateLimiter;
}
namespace cricket {
class RtpDataEngine : public DataEngineInterface {
public:
RtpDataEngine();
virtual DataMediaChannel* CreateChannel(const MediaConfig& config);
virtual const std::vector<DataCodec>& data_codecs() { return data_codecs_; }
private:
std::vector<DataCodec> data_codecs_;
};
// Keep track of sequence number and timestamp of an RTP stream. The
// sequence number starts with a "random" value and increments. The
// timestamp starts with a "random" value and increases monotonically
// according to the clockrate.
class RtpClock {
public:
RtpClock(int clockrate, uint16_t first_seq_num, uint32_t timestamp_offset)
: clockrate_(clockrate),
last_seq_num_(first_seq_num),
timestamp_offset_(timestamp_offset) {}
// Given the current time (in number of seconds which must be
// monotonically increasing), Return the next sequence number and
// timestamp.
void Tick(double now, int* seq_num, uint32_t* timestamp);
private:
int clockrate_;
uint16_t last_seq_num_;
uint32_t timestamp_offset_;
};
class RtpDataMediaChannel : public DataMediaChannel {
public:
explicit RtpDataMediaChannel(const MediaConfig& config);
virtual ~RtpDataMediaChannel();
virtual bool SetSendParameters(const DataSendParameters& params);
virtual bool SetRecvParameters(const DataRecvParameters& params);
virtual bool AddSendStream(const StreamParams& sp);
virtual bool RemoveSendStream(uint32_t ssrc);
virtual bool AddRecvStream(const StreamParams& sp);
virtual bool RemoveRecvStream(uint32_t ssrc);
virtual void ResetUnsignaledRecvStream();
virtual void OnDemuxerCriteriaUpdatePending();
virtual void OnDemuxerCriteriaUpdateComplete();
virtual bool SetSend(bool send) {
sending_ = send;
return true;
}
virtual bool SetReceive(bool receive) {
receiving_ = receive;
return true;
}
virtual void OnPacketReceived(rtc::CopyOnWriteBuffer packet,
int64_t packet_time_us);
virtual void OnReadyToSend(bool ready) {}
virtual bool SendData(const SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
SendDataResult* result);
private:
void Construct();
bool SetMaxSendBandwidth(int bps);
bool SetSendCodecs(const std::vector<DataCodec>& codecs);
bool SetRecvCodecs(const std::vector<DataCodec>& codecs);
bool sending_;
bool receiving_;
std::vector<DataCodec> send_codecs_;
std::vector<DataCodec> recv_codecs_;
std::vector<StreamParams> send_streams_;
std::vector<StreamParams> recv_streams_;
std::map<uint32_t, RtpClock*> rtp_clock_by_send_ssrc_;
std::unique_ptr<rtc::DataRateLimiter> send_limiter_;
};
} // namespace cricket
#endif // MEDIA_BASE_RTP_DATA_ENGINE_H_

View File

@ -1,362 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "media/base/rtp_data_engine.h"
#include <string.h>
#include <memory>
#include <string>
#include "media/base/fake_network_interface.h"
#include "media/base/media_constants.h"
#include "media/base/rtp_utils.h"
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/fake_clock.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "rtc_base/time_utils.h"
#include "test/gtest.h"
class FakeDataReceiver : public sigslot::has_slots<> {
public:
FakeDataReceiver() : has_received_data_(false) {}
void OnDataReceived(const cricket::ReceiveDataParams& params,
const char* data,
size_t len) {
has_received_data_ = true;
last_received_data_ = std::string(data, len);
last_received_data_len_ = len;
last_received_data_params_ = params;
}
bool has_received_data() const { return has_received_data_; }
std::string last_received_data() const { return last_received_data_; }
size_t last_received_data_len() const { return last_received_data_len_; }
cricket::ReceiveDataParams last_received_data_params() const {
return last_received_data_params_;
}
private:
bool has_received_data_;
std::string last_received_data_;
size_t last_received_data_len_;
cricket::ReceiveDataParams last_received_data_params_;
};
class RtpDataMediaChannelTest : public ::testing::Test {
protected:
virtual void SetUp() {
// Seed needed for each test to satisfy expectations.
iface_.reset(new cricket::FakeNetworkInterface());
dme_.reset(CreateEngine());
receiver_.reset(new FakeDataReceiver());
}
void SetNow(double now) { clock_.SetTime(webrtc::Timestamp::Seconds(now)); }
cricket::RtpDataEngine* CreateEngine() {
cricket::RtpDataEngine* dme = new cricket::RtpDataEngine();
return dme;
}
cricket::RtpDataMediaChannel* CreateChannel() {
return CreateChannel(dme_.get());
}
cricket::RtpDataMediaChannel* CreateChannel(cricket::RtpDataEngine* dme) {
cricket::MediaConfig config;
cricket::RtpDataMediaChannel* channel =
static_cast<cricket::RtpDataMediaChannel*>(dme->CreateChannel(config));
channel->SetInterface(iface_.get());
channel->SignalDataReceived.connect(receiver_.get(),
&FakeDataReceiver::OnDataReceived);
return channel;
}
FakeDataReceiver* receiver() { return receiver_.get(); }
bool HasReceivedData() { return receiver_->has_received_data(); }
std::string GetReceivedData() { return receiver_->last_received_data(); }
size_t GetReceivedDataLen() { return receiver_->last_received_data_len(); }
cricket::ReceiveDataParams GetReceivedDataParams() {
return receiver_->last_received_data_params();
}
bool HasSentData(int count) { return (iface_->NumRtpPackets() > count); }
std::string GetSentData(int index) {
// Assume RTP header of length 12
std::unique_ptr<const rtc::CopyOnWriteBuffer> packet(
iface_->GetRtpPacket(index));
if (packet->size() > 12) {
return std::string(packet->data<char>() + 12, packet->size() - 12);
} else {
return "";
}
}
cricket::RtpHeader GetSentDataHeader(int index) {
std::unique_ptr<const rtc::CopyOnWriteBuffer> packet(
iface_->GetRtpPacket(index));
cricket::RtpHeader header;
GetRtpHeader(packet->data(), packet->size(), &header);
return header;
}
private:
std::unique_ptr<cricket::RtpDataEngine> dme_;
rtc::ScopedFakeClock clock_;
std::unique_ptr<cricket::FakeNetworkInterface> iface_;
std::unique_ptr<FakeDataReceiver> receiver_;
};
TEST_F(RtpDataMediaChannelTest, SetUnknownCodecs) {
std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
cricket::DataCodec known_codec;
known_codec.id = 103;
known_codec.name = "google-data";
cricket::DataCodec unknown_codec;
unknown_codec.id = 104;
unknown_codec.name = "unknown-data";
cricket::DataSendParameters send_parameters_known;
send_parameters_known.codecs.push_back(known_codec);
cricket::DataRecvParameters recv_parameters_known;
recv_parameters_known.codecs.push_back(known_codec);
cricket::DataSendParameters send_parameters_unknown;
send_parameters_unknown.codecs.push_back(unknown_codec);
cricket::DataRecvParameters recv_parameters_unknown;
recv_parameters_unknown.codecs.push_back(unknown_codec);
cricket::DataSendParameters send_parameters_mixed;
send_parameters_mixed.codecs.push_back(known_codec);
send_parameters_mixed.codecs.push_back(unknown_codec);
cricket::DataRecvParameters recv_parameters_mixed;
recv_parameters_mixed.codecs.push_back(known_codec);
recv_parameters_mixed.codecs.push_back(unknown_codec);
EXPECT_TRUE(dmc->SetSendParameters(send_parameters_known));
EXPECT_FALSE(dmc->SetSendParameters(send_parameters_unknown));
EXPECT_TRUE(dmc->SetSendParameters(send_parameters_mixed));
EXPECT_TRUE(dmc->SetRecvParameters(recv_parameters_known));
EXPECT_FALSE(dmc->SetRecvParameters(recv_parameters_unknown));
EXPECT_FALSE(dmc->SetRecvParameters(recv_parameters_mixed));
}
TEST_F(RtpDataMediaChannelTest, AddRemoveSendStream) {
std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
cricket::StreamParams stream1;
stream1.add_ssrc(41);
EXPECT_TRUE(dmc->AddSendStream(stream1));
cricket::StreamParams stream2;
stream2.add_ssrc(42);
EXPECT_TRUE(dmc->AddSendStream(stream2));
EXPECT_TRUE(dmc->RemoveSendStream(41));
EXPECT_TRUE(dmc->RemoveSendStream(42));
EXPECT_FALSE(dmc->RemoveSendStream(43));
}
TEST_F(RtpDataMediaChannelTest, AddRemoveRecvStream) {
std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
cricket::StreamParams stream1;
stream1.add_ssrc(41);
EXPECT_TRUE(dmc->AddRecvStream(stream1));
cricket::StreamParams stream2;
stream2.add_ssrc(42);
EXPECT_TRUE(dmc->AddRecvStream(stream2));
EXPECT_FALSE(dmc->AddRecvStream(stream2));
EXPECT_TRUE(dmc->RemoveRecvStream(41));
EXPECT_TRUE(dmc->RemoveRecvStream(42));
}
TEST_F(RtpDataMediaChannelTest, SendData) {
std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
cricket::SendDataParams params;
params.ssrc = 42;
unsigned char data[] = "food";
rtc::CopyOnWriteBuffer payload(data, 4);
unsigned char padded_data[] = {
0x00, 0x00, 0x00, 0x00, 'f', 'o', 'o', 'd',
};
cricket::SendDataResult result;
// Not sending
EXPECT_FALSE(dmc->SendData(params, payload, &result));
EXPECT_EQ(cricket::SDR_ERROR, result);
EXPECT_FALSE(HasSentData(0));
ASSERT_TRUE(dmc->SetSend(true));
// Unknown stream name.
EXPECT_FALSE(dmc->SendData(params, payload, &result));
EXPECT_EQ(cricket::SDR_ERROR, result);
EXPECT_FALSE(HasSentData(0));
cricket::StreamParams stream;
stream.add_ssrc(42);
ASSERT_TRUE(dmc->AddSendStream(stream));
// Unknown codec;
EXPECT_FALSE(dmc->SendData(params, payload, &result));
EXPECT_EQ(cricket::SDR_ERROR, result);
EXPECT_FALSE(HasSentData(0));
cricket::DataCodec codec;
codec.id = 103;
codec.name = cricket::kGoogleRtpDataCodecName;
cricket::DataSendParameters parameters;
parameters.codecs.push_back(codec);
ASSERT_TRUE(dmc->SetSendParameters(parameters));
// Length too large;
std::string x10000(10000, 'x');
EXPECT_FALSE(dmc->SendData(
params, rtc::CopyOnWriteBuffer(x10000.data(), x10000.length()), &result));
EXPECT_EQ(cricket::SDR_ERROR, result);
EXPECT_FALSE(HasSentData(0));
// Finally works!
EXPECT_TRUE(dmc->SendData(params, payload, &result));
EXPECT_EQ(cricket::SDR_SUCCESS, result);
ASSERT_TRUE(HasSentData(0));
EXPECT_EQ(sizeof(padded_data), GetSentData(0).length());
EXPECT_EQ(0, memcmp(padded_data, GetSentData(0).data(), sizeof(padded_data)));
cricket::RtpHeader header0 = GetSentDataHeader(0);
EXPECT_NE(0, header0.seq_num);
EXPECT_NE(0U, header0.timestamp);
EXPECT_EQ(header0.ssrc, 42U);
EXPECT_EQ(header0.payload_type, 103);
// Should bump timestamp by 180000 because the clock rate is 90khz.
SetNow(2);
EXPECT_TRUE(dmc->SendData(params, payload, &result));
ASSERT_TRUE(HasSentData(1));
EXPECT_EQ(sizeof(padded_data), GetSentData(1).length());
EXPECT_EQ(0, memcmp(padded_data, GetSentData(1).data(), sizeof(padded_data)));
cricket::RtpHeader header1 = GetSentDataHeader(1);
EXPECT_EQ(header1.ssrc, 42U);
EXPECT_EQ(header1.payload_type, 103);
EXPECT_EQ(static_cast<uint16_t>(header0.seq_num + 1),
static_cast<uint16_t>(header1.seq_num));
EXPECT_EQ(header0.timestamp + 180000, header1.timestamp);
}
TEST_F(RtpDataMediaChannelTest, SendDataRate) {
std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
ASSERT_TRUE(dmc->SetSend(true));
cricket::DataCodec codec;
codec.id = 103;
codec.name = cricket::kGoogleRtpDataCodecName;
cricket::DataSendParameters parameters;
parameters.codecs.push_back(codec);
ASSERT_TRUE(dmc->SetSendParameters(parameters));
cricket::StreamParams stream;
stream.add_ssrc(42);
ASSERT_TRUE(dmc->AddSendStream(stream));
cricket::SendDataParams params;
params.ssrc = 42;
unsigned char data[] = "food";
rtc::CopyOnWriteBuffer payload(data, 4);
cricket::SendDataResult result;
// With rtp overhead of 32 bytes, each one of our packets is 36
// bytes, or 288 bits. So, a limit of 872bps will allow 3 packets,
// but not four.
parameters.max_bandwidth_bps = 872;
ASSERT_TRUE(dmc->SetSendParameters(parameters));
EXPECT_TRUE(dmc->SendData(params, payload, &result));
EXPECT_TRUE(dmc->SendData(params, payload, &result));
EXPECT_TRUE(dmc->SendData(params, payload, &result));
EXPECT_FALSE(dmc->SendData(params, payload, &result));
EXPECT_FALSE(dmc->SendData(params, payload, &result));
SetNow(0.9);
EXPECT_FALSE(dmc->SendData(params, payload, &result));
SetNow(1.1);
EXPECT_TRUE(dmc->SendData(params, payload, &result));
EXPECT_TRUE(dmc->SendData(params, payload, &result));
SetNow(1.9);
EXPECT_TRUE(dmc->SendData(params, payload, &result));
SetNow(2.2);
EXPECT_TRUE(dmc->SendData(params, payload, &result));
EXPECT_TRUE(dmc->SendData(params, payload, &result));
EXPECT_TRUE(dmc->SendData(params, payload, &result));
EXPECT_FALSE(dmc->SendData(params, payload, &result));
}
TEST_F(RtpDataMediaChannelTest, ReceiveData) {
// PT= 103, SN=2, TS=3, SSRC = 4, data = "abcde"
unsigned char data[] = {0x80, 0x67, 0x00, 0x02, 0x00, 0x00, 0x00,
0x03, 0x00, 0x00, 0x00, 0x2A, 0x00, 0x00,
0x00, 0x00, 'a', 'b', 'c', 'd', 'e'};
rtc::CopyOnWriteBuffer packet(data, sizeof(data));
std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
// SetReceived not called.
dmc->OnPacketReceived(packet, /* packet_time_us */ -1);
EXPECT_FALSE(HasReceivedData());
dmc->SetReceive(true);
// Unknown payload id
dmc->OnPacketReceived(packet, /* packet_time_us */ -1);
EXPECT_FALSE(HasReceivedData());
cricket::DataCodec codec;
codec.id = 103;
codec.name = cricket::kGoogleRtpDataCodecName;
cricket::DataRecvParameters parameters;
parameters.codecs.push_back(codec);
ASSERT_TRUE(dmc->SetRecvParameters(parameters));
// Unknown stream
dmc->OnPacketReceived(packet, /* packet_time_us */ -1);
EXPECT_FALSE(HasReceivedData());
cricket::StreamParams stream;
stream.add_ssrc(42);
ASSERT_TRUE(dmc->AddRecvStream(stream));
// Finally works!
dmc->OnPacketReceived(packet, /* packet_time_us */ -1);
EXPECT_TRUE(HasReceivedData());
EXPECT_EQ("abcde", GetReceivedData());
EXPECT_EQ(5U, GetReceivedDataLen());
}
TEST_F(RtpDataMediaChannelTest, InvalidRtpPackets) {
unsigned char data[] = {0x80, 0x65, 0x00, 0x02};
rtc::CopyOnWriteBuffer packet(data, sizeof(data));
std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
// Too short
dmc->OnPacketReceived(packet, /* packet_time_us */ -1);
EXPECT_FALSE(HasReceivedData());
}

View File

@ -190,8 +190,6 @@ rtc_library("peerconnection") {
"rtc_stats_collector.h",
"rtc_stats_traversal.cc",
"rtc_stats_traversal.h",
"rtp_data_channel.cc",
"rtp_data_channel.h",
"sctp_data_channel.cc",
"sctp_data_channel.h",
"sdp_offer_answer.cc", # TODO: Make separate target when not circular

View File

@ -1281,243 +1281,4 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content,
return true;
}
RtpDataChannel::RtpDataChannel(rtc::Thread* worker_thread,
rtc::Thread* network_thread,
rtc::Thread* signaling_thread,
std::unique_ptr<DataMediaChannel> media_channel,
const std::string& content_name,
bool srtp_required,
webrtc::CryptoOptions crypto_options,
UniqueRandomIdGenerator* ssrc_generator)
: BaseChannel(worker_thread,
network_thread,
signaling_thread,
std::move(media_channel),
content_name,
srtp_required,
crypto_options,
ssrc_generator) {}
RtpDataChannel::~RtpDataChannel() {
TRACE_EVENT0("webrtc", "RtpDataChannel::~RtpDataChannel");
// this can't be done in the base class, since it calls a virtual
DisableMedia_w();
Deinit();
}
void RtpDataChannel::Init_w(webrtc::RtpTransportInternal* rtp_transport) {
BaseChannel::Init_w(rtp_transport);
media_channel()->SignalDataReceived.connect(this,
&RtpDataChannel::OnDataReceived);
media_channel()->SignalReadyToSend.connect(
this, &RtpDataChannel::OnDataChannelReadyToSend);
}
bool RtpDataChannel::SendData(const SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
SendDataResult* result) {
DataMediaChannel* mc = media_channel();
return InvokeOnWorker<bool>(RTC_FROM_HERE, [mc, &params, &payload, result] {
return mc->SendData(params, payload, result);
});
}
bool RtpDataChannel::CheckDataChannelTypeFromContent(
const MediaContentDescription* content,
std::string* error_desc) {
if (!content->as_rtp_data()) {
if (content->as_sctp()) {
SafeSetError("Data channel type mismatch. Expected RTP, got SCTP.",
error_desc);
} else {
SafeSetError("Data channel is not RTP or SCTP.", error_desc);
}
return false;
}
return true;
}
bool RtpDataChannel::SetLocalContent_w(const MediaContentDescription* content,
SdpType type,
std::string* error_desc) {
TRACE_EVENT0("webrtc", "RtpDataChannel::SetLocalContent_w");
RTC_DCHECK_RUN_ON(worker_thread());
RTC_LOG(LS_INFO) << "Setting local data description for " << ToString();
RTC_DCHECK(content);
if (!content) {
SafeSetError("Can't find data content in local description.", error_desc);
return false;
}
if (!CheckDataChannelTypeFromContent(content, error_desc)) {
return false;
}
const RtpDataContentDescription* data = content->as_rtp_data();
RtpHeaderExtensions rtp_header_extensions =
GetFilteredRtpHeaderExtensions(data->rtp_header_extensions());
DataRecvParameters recv_params = last_recv_params_;
RtpParametersFromMediaDescription(
data, rtp_header_extensions,
webrtc::RtpTransceiverDirectionHasRecv(data->direction()), &recv_params);
if (!media_channel()->SetRecvParameters(recv_params)) {
SafeSetError(
"Failed to set remote data description recv parameters for m-section "
"with mid='" +
content_name() + "'.",
error_desc);
return false;
}
for (const DataCodec& codec : data->codecs()) {
MaybeAddHandledPayloadType(codec.id);
}
// Need to re-register the sink to update the handled payload.
if (!RegisterRtpDemuxerSink_w()) {
RTC_LOG(LS_ERROR) << "Failed to set up data demuxing for " << ToString();
return false;
}
last_recv_params_ = recv_params;
// TODO(pthatcher): Move local streams into DataSendParameters, and
// only give it to the media channel once we have a remote
// description too (without a remote description, we won't be able
// to send them anyway).
if (!UpdateLocalStreams_w(data->streams(), type, error_desc)) {
SafeSetError(
"Failed to set local data description streams for m-section with "
"mid='" +
content_name() + "'.",
error_desc);
return false;
}
set_local_content_direction(content->direction());
UpdateMediaSendRecvState_w();
return true;
}
bool RtpDataChannel::SetRemoteContent_w(const MediaContentDescription* content,
SdpType type,
std::string* error_desc) {
TRACE_EVENT0("webrtc", "RtpDataChannel::SetRemoteContent_w");
RTC_DCHECK_RUN_ON(worker_thread());
RTC_LOG(LS_INFO) << "Setting remote data description for " << ToString();
RTC_DCHECK(content);
if (!content) {
SafeSetError("Can't find data content in remote description.", error_desc);
return false;
}
if (!CheckDataChannelTypeFromContent(content, error_desc)) {
return false;
}
const RtpDataContentDescription* data = content->as_rtp_data();
// If the remote data doesn't have codecs, it must be empty, so ignore it.
if (!data->has_codecs()) {
return true;
}
RtpHeaderExtensions rtp_header_extensions =
GetFilteredRtpHeaderExtensions(data->rtp_header_extensions());
RTC_LOG(LS_INFO) << "Setting remote data description for " << ToString();
DataSendParameters send_params = last_send_params_;
RtpSendParametersFromMediaDescription<DataCodec>(
data, rtp_header_extensions,
webrtc::RtpTransceiverDirectionHasRecv(data->direction()), &send_params);
if (!media_channel()->SetSendParameters(send_params)) {
SafeSetError(
"Failed to set remote data description send parameters for m-section "
"with mid='" +
content_name() + "'.",
error_desc);
return false;
}
last_send_params_ = send_params;
// TODO(pthatcher): Move remote streams into DataRecvParameters,
// and only give it to the media channel once we have a local
// description too (without a local description, we won't be able to
// recv them anyway).
if (!UpdateRemoteStreams_w(data->streams(), type, error_desc)) {
SafeSetError(
"Failed to set remote data description streams for m-section with "
"mid='" +
content_name() + "'.",
error_desc);
return false;
}
set_remote_content_direction(content->direction());
UpdateMediaSendRecvState_w();
return true;
}
void RtpDataChannel::UpdateMediaSendRecvState_w() {
// Render incoming data if we're the active call, and we have the local
// content. We receive data on the default channel and multiplexed streams.
RTC_DCHECK_RUN_ON(worker_thread());
bool recv = IsReadyToReceiveMedia_w();
if (!media_channel()->SetReceive(recv)) {
RTC_LOG(LS_ERROR) << "Failed to SetReceive on data channel: " << ToString();
}
// Send outgoing data if we're the active call, we have the remote content,
// and we have had some form of connectivity.
bool send = IsReadyToSendMedia_w();
if (!media_channel()->SetSend(send)) {
RTC_LOG(LS_ERROR) << "Failed to SetSend on data channel: " << ToString();
}
// Trigger SignalReadyToSendData asynchronously.
OnDataChannelReadyToSend(send);
RTC_LOG(LS_INFO) << "Changing data state, recv=" << recv << " send=" << send
<< " for " << ToString();
}
void RtpDataChannel::OnMessage(rtc::Message* pmsg) {
switch (pmsg->message_id) {
case MSG_READYTOSENDDATA: {
DataChannelReadyToSendMessageData* data =
static_cast<DataChannelReadyToSendMessageData*>(pmsg->pdata);
ready_to_send_data_ = data->data();
SignalReadyToSendData(ready_to_send_data_);
delete data;
break;
}
case MSG_DATARECEIVED: {
DataReceivedMessageData* data =
static_cast<DataReceivedMessageData*>(pmsg->pdata);
SignalDataReceived(data->params, data->payload);
delete data;
break;
}
default:
BaseChannel::OnMessage(pmsg);
break;
}
}
void RtpDataChannel::OnDataReceived(const ReceiveDataParams& params,
const char* data,
size_t len) {
DataReceivedMessageData* msg = new DataReceivedMessageData(params, data, len);
signaling_thread()->Post(RTC_FROM_HERE, this, MSG_DATARECEIVED, msg);
}
void RtpDataChannel::OnDataChannelReadyToSend(bool writable) {
// This is usded for congestion control to indicate that the stream is ready
// to send by the MediaChannel, as opposed to OnReadyToSend, which indicates
// that the transport channel is ready.
signaling_thread()->Post(RTC_FROM_HERE, this, MSG_READYTOSENDDATA,
new DataChannelReadyToSendMessageData(writable));
}
} // namespace cricket

View File

@ -471,104 +471,6 @@ class VideoChannel : public BaseChannel {
VideoRecvParameters last_recv_params_;
};
// RtpDataChannel is a specialization for data.
class RtpDataChannel : public BaseChannel {
public:
RtpDataChannel(rtc::Thread* worker_thread,
rtc::Thread* network_thread,
rtc::Thread* signaling_thread,
std::unique_ptr<DataMediaChannel> channel,
const std::string& content_name,
bool srtp_required,
webrtc::CryptoOptions crypto_options,
rtc::UniqueRandomIdGenerator* ssrc_generator);
~RtpDataChannel();
// TODO(zhihuang): Remove this once the RtpTransport can be shared between
// BaseChannels.
void Init_w(DtlsTransportInternal* rtp_dtls_transport,
DtlsTransportInternal* rtcp_dtls_transport,
rtc::PacketTransportInternal* rtp_packet_transport,
rtc::PacketTransportInternal* rtcp_packet_transport);
void Init_w(webrtc::RtpTransportInternal* rtp_transport) override;
virtual bool SendData(const SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
SendDataResult* result);
// Should be called on the signaling thread only.
bool ready_to_send_data() const { return ready_to_send_data_; }
sigslot::signal2<const ReceiveDataParams&, const rtc::CopyOnWriteBuffer&>
SignalDataReceived;
// Signal for notifying when the channel becomes ready to send data.
// That occurs when the channel is enabled, the transport is writable,
// both local and remote descriptions are set, and the channel is unblocked.
sigslot::signal1<bool> SignalReadyToSendData;
cricket::MediaType media_type() const override {
return cricket::MEDIA_TYPE_DATA;
}
protected:
// downcasts a MediaChannel.
DataMediaChannel* media_channel() const override {
return static_cast<DataMediaChannel*>(BaseChannel::media_channel());
}
private:
struct SendDataMessageData : public rtc::MessageData {
SendDataMessageData(const SendDataParams& params,
const rtc::CopyOnWriteBuffer* payload,
SendDataResult* result)
: params(params), payload(payload), result(result), succeeded(false) {}
const SendDataParams& params;
const rtc::CopyOnWriteBuffer* payload;
SendDataResult* result;
bool succeeded;
};
struct DataReceivedMessageData : public rtc::MessageData {
// We copy the data because the data will become invalid after we
// handle DataMediaChannel::SignalDataReceived but before we fire
// SignalDataReceived.
DataReceivedMessageData(const ReceiveDataParams& params,
const char* data,
size_t len)
: params(params), payload(data, len) {}
const ReceiveDataParams params;
const rtc::CopyOnWriteBuffer payload;
};
typedef rtc::TypedMessageData<bool> DataChannelReadyToSendMessageData;
// overrides from BaseChannel
// Checks that data channel type is RTP.
bool CheckDataChannelTypeFromContent(const MediaContentDescription* content,
std::string* error_desc);
bool SetLocalContent_w(const MediaContentDescription* content,
webrtc::SdpType type,
std::string* error_desc) override;
bool SetRemoteContent_w(const MediaContentDescription* content,
webrtc::SdpType type,
std::string* error_desc) override;
void UpdateMediaSendRecvState_w() override;
void OnMessage(rtc::Message* pmsg) override;
void OnDataReceived(const ReceiveDataParams& params,
const char* data,
size_t len);
void OnDataChannelReadyToSend(bool writable);
bool ready_to_send_data_ = false;
// Last DataSendParameters sent down to the media_channel() via
// SetSendParameters.
DataSendParameters last_send_params_;
// Last DataRecvParameters sent down to the media_channel() via
// SetRecvParameters.
DataRecvParameters last_recv_params_;
};
} // namespace cricket
#endif // PC_CHANNEL_H_

View File

@ -28,35 +28,29 @@ namespace cricket {
// static
std::unique_ptr<ChannelManager> ChannelManager::Create(
std::unique_ptr<MediaEngineInterface> media_engine,
std::unique_ptr<DataEngineInterface> data_engine,
bool enable_rtx,
rtc::Thread* worker_thread,
rtc::Thread* network_thread) {
RTC_DCHECK_RUN_ON(worker_thread);
RTC_DCHECK(network_thread);
RTC_DCHECK(worker_thread);
RTC_DCHECK(data_engine);
if (media_engine)
media_engine->Init();
return absl::WrapUnique(new ChannelManager(std::move(media_engine),
std::move(data_engine), enable_rtx,
worker_thread, network_thread));
return absl::WrapUnique(new ChannelManager(
std::move(media_engine), enable_rtx, worker_thread, network_thread));
}
ChannelManager::ChannelManager(
std::unique_ptr<MediaEngineInterface> media_engine,
std::unique_ptr<DataEngineInterface> data_engine,
bool enable_rtx,
rtc::Thread* worker_thread,
rtc::Thread* network_thread)
: media_engine_(std::move(media_engine)),
data_engine_(std::move(data_engine)),
worker_thread_(worker_thread),
network_thread_(network_thread),
enable_rtx_(enable_rtx) {
RTC_DCHECK(data_engine_);
RTC_DCHECK(worker_thread_);
RTC_DCHECK(network_thread_);
RTC_DCHECK_RUN_ON(worker_thread_);
@ -116,11 +110,6 @@ void ChannelManager::GetSupportedVideoReceiveCodecs(
}
}
void ChannelManager::GetSupportedDataCodecs(
std::vector<DataCodec>* codecs) const {
*codecs = data_engine_->data_codecs();
}
RtpHeaderExtensions ChannelManager::GetDefaultEnabledAudioRtpHeaderExtensions()
const {
if (!media_engine_)
@ -273,61 +262,6 @@ void ChannelManager::DestroyVideoChannel(VideoChannel* video_channel) {
}));
}
RtpDataChannel* ChannelManager::CreateRtpDataChannel(
const MediaConfig& media_config,
webrtc::RtpTransportInternal* rtp_transport,
rtc::Thread* signaling_thread,
const std::string& content_name,
bool srtp_required,
const webrtc::CryptoOptions& crypto_options,
rtc::UniqueRandomIdGenerator* ssrc_generator) {
if (!worker_thread_->IsCurrent()) {
return worker_thread_->Invoke<RtpDataChannel*>(RTC_FROM_HERE, [&] {
return CreateRtpDataChannel(media_config, rtp_transport, signaling_thread,
content_name, srtp_required, crypto_options,
ssrc_generator);
});
}
RTC_DCHECK_RUN_ON(worker_thread_);
// This is ok to alloc from a thread other than the worker thread.
DataMediaChannel* media_channel = data_engine_->CreateChannel(media_config);
if (!media_channel) {
RTC_LOG(LS_WARNING) << "Failed to create RTP data channel.";
return nullptr;
}
auto data_channel = std::make_unique<RtpDataChannel>(
worker_thread_, network_thread_, signaling_thread,
absl::WrapUnique(media_channel), content_name, srtp_required,
crypto_options, ssrc_generator);
// Media Transports are not supported with Rtp Data Channel.
data_channel->Init_w(rtp_transport);
RtpDataChannel* data_channel_ptr = data_channel.get();
data_channels_.push_back(std::move(data_channel));
return data_channel_ptr;
}
void ChannelManager::DestroyRtpDataChannel(RtpDataChannel* data_channel) {
TRACE_EVENT0("webrtc", "ChannelManager::DestroyRtpDataChannel");
RTC_DCHECK(data_channel);
if (!worker_thread_->IsCurrent()) {
worker_thread_->Invoke<void>(
RTC_FROM_HERE, [&] { return DestroyRtpDataChannel(data_channel); });
return;
}
RTC_DCHECK_RUN_ON(worker_thread_);
data_channels_.erase(absl::c_find_if(
data_channels_, [&](const std::unique_ptr<RtpDataChannel>& p) {
return p.get() == data_channel;
}));
}
bool ChannelManager::StartAecDump(webrtc::FileWrapper file,
int64_t max_size_bytes) {
RTC_DCHECK_RUN_ON(worker_thread_);

View File

@ -50,7 +50,6 @@ class ChannelManager final {
// will own that reference and media engine initialization
static std::unique_ptr<ChannelManager> Create(
std::unique_ptr<MediaEngineInterface> media_engine,
std::unique_ptr<DataEngineInterface> data_engine,
bool enable_rtx,
rtc::Thread* worker_thread,
rtc::Thread* network_thread);
@ -110,17 +109,6 @@ class ChannelManager final {
// Destroys a video channel created by CreateVideoChannel.
void DestroyVideoChannel(VideoChannel* video_channel);
RtpDataChannel* CreateRtpDataChannel(
const MediaConfig& media_config,
webrtc::RtpTransportInternal* rtp_transport,
rtc::Thread* signaling_thread,
const std::string& content_name,
bool srtp_required,
const webrtc::CryptoOptions& crypto_options,
rtc::UniqueRandomIdGenerator* ssrc_generator);
// Destroys a data channel created by CreateRtpDataChannel.
void DestroyRtpDataChannel(RtpDataChannel* data_channel);
// Starts AEC dump using existing file, with a specified maximum file size in
// bytes. When the limit is reached, logging will stop and the file will be
// closed. If max_size_bytes is set to <= 0, no limit will be used.
@ -131,13 +119,11 @@ class ChannelManager final {
private:
ChannelManager(std::unique_ptr<MediaEngineInterface> media_engine,
std::unique_ptr<DataEngineInterface> data_engine,
bool enable_rtx,
rtc::Thread* worker_thread,
rtc::Thread* network_thread);
const std::unique_ptr<MediaEngineInterface> media_engine_; // Nullable.
const std::unique_ptr<DataEngineInterface> data_engine_; // Non-null.
rtc::Thread* const worker_thread_;
rtc::Thread* const network_thread_;
@ -146,8 +132,6 @@ class ChannelManager final {
RTC_GUARDED_BY(worker_thread_);
std::vector<std::unique_ptr<VideoChannel>> video_channels_
RTC_GUARDED_BY(worker_thread_);
std::vector<std::unique_ptr<RtpDataChannel>> data_channels_
RTC_GUARDED_BY(worker_thread_);
const bool enable_rtx_;
};

View File

@ -58,7 +58,6 @@ class ChannelManagerTest : public ::testing::Test {
video_bitrate_allocator_factory_(
webrtc::CreateBuiltinVideoBitrateAllocatorFactory()),
cm_(cricket::ChannelManager::Create(CreateFakeMediaEngine(),
std::make_unique<FakeDataEngine>(),
false,
worker_,
network_.get())),
@ -80,14 +79,8 @@ class ChannelManagerTest : public ::testing::Test {
webrtc::CryptoOptions(), &ssrc_generator_, VideoOptions(),
video_bitrate_allocator_factory_.get());
EXPECT_TRUE(video_channel != nullptr);
cricket::RtpDataChannel* rtp_data_channel = cm_->CreateRtpDataChannel(
cricket::MediaConfig(), rtp_transport, rtc::Thread::Current(),
cricket::CN_DATA, kDefaultSrtpRequired, webrtc::CryptoOptions(),
&ssrc_generator_);
EXPECT_TRUE(rtp_data_channel != nullptr);
cm_->DestroyVideoChannel(video_channel);
cm_->DestroyVoiceChannel(voice_channel);
cm_->DestroyRtpDataChannel(rtp_data_channel);
}
std::unique_ptr<rtc::Thread> network_;
@ -112,7 +105,6 @@ TEST_F(ChannelManagerTest, SetVideoRtxEnabled) {
// Enable and check.
cm_ = cricket::ChannelManager::Create(CreateFakeMediaEngine(),
std::make_unique<FakeDataEngine>(),
true, worker_, network_.get());
cm_->GetSupportedVideoSendCodecs(&send_codecs);
EXPECT_TRUE(ContainsMatchingCodec(send_codecs, rtx_codec));
@ -121,7 +113,6 @@ TEST_F(ChannelManagerTest, SetVideoRtxEnabled) {
// Disable and check.
cm_ = cricket::ChannelManager::Create(CreateFakeMediaEngine(),
std::make_unique<FakeDataEngine>(),
false, worker_, network_.get());
cm_->GetSupportedVideoSendCodecs(&send_codecs);
EXPECT_FALSE(ContainsMatchingCodec(send_codecs, rtx_codec));

View File

@ -93,13 +93,6 @@ class VideoTraits : public Traits<cricket::VideoChannel,
cricket::VideoMediaInfo,
cricket::VideoOptions> {};
class DataTraits : public Traits<cricket::RtpDataChannel,
cricket::FakeDataMediaChannel,
cricket::RtpDataContentDescription,
cricket::RtpDataCodec,
cricket::DataMediaInfo,
cricket::DataOptions> {};
// Base class for Voice/Video/RtpDataChannel tests
template <class T>
class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
@ -2274,220 +2267,5 @@ TEST_F(VideoChannelDoubleThreadTest, SocketOptionsMergedOnSetTransport) {
Base::SocketOptionsMergedOnSetTransport();
}
// RtpDataChannelSingleThreadTest
class RtpDataChannelSingleThreadTest : public ChannelTest<DataTraits> {
public:
typedef ChannelTest<DataTraits> Base;
RtpDataChannelSingleThreadTest()
: Base(true, kDataPacket, kRtcpReport, NetworkIsWorker::Yes) {}
};
// RtpDataChannelDoubleThreadTest
class RtpDataChannelDoubleThreadTest : public ChannelTest<DataTraits> {
public:
typedef ChannelTest<DataTraits> Base;
RtpDataChannelDoubleThreadTest()
: Base(true, kDataPacket, kRtcpReport, NetworkIsWorker::No) {}
};
// Override to avoid engine channel parameter.
template <>
std::unique_ptr<cricket::RtpDataChannel> ChannelTest<DataTraits>::CreateChannel(
rtc::Thread* worker_thread,
rtc::Thread* network_thread,
std::unique_ptr<cricket::FakeDataMediaChannel> ch,
webrtc::RtpTransportInternal* rtp_transport,
int flags) {
rtc::Thread* signaling_thread = rtc::Thread::Current();
auto channel = std::make_unique<cricket::RtpDataChannel>(
worker_thread, network_thread, signaling_thread, std::move(ch),
cricket::CN_DATA, (flags & DTLS) != 0, webrtc::CryptoOptions(),
&ssrc_generator_);
channel->Init_w(rtp_transport);
return channel;
}
template <>
void ChannelTest<DataTraits>::CreateContent(
int flags,
const cricket::AudioCodec& audio_codec,
const cricket::VideoCodec& video_codec,
cricket::RtpDataContentDescription* data) {
data->AddCodec(kGoogleDataCodec);
data->set_rtcp_mux((flags & RTCP_MUX) != 0);
}
template <>
void ChannelTest<DataTraits>::CopyContent(
const cricket::RtpDataContentDescription& source,
cricket::RtpDataContentDescription* data) {
*data = source;
}
template <>
bool ChannelTest<DataTraits>::CodecMatches(const cricket::DataCodec& c1,
const cricket::DataCodec& c2) {
return c1.name == c2.name;
}
template <>
void ChannelTest<DataTraits>::AddLegacyStreamInContent(
uint32_t ssrc,
int flags,
cricket::RtpDataContentDescription* data) {
data->AddLegacyStream(ssrc);
}
TEST_F(RtpDataChannelSingleThreadTest, TestInit) {
Base::TestInit();
EXPECT_FALSE(media_channel1_->IsStreamMuted(0));
}
TEST_F(RtpDataChannelSingleThreadTest, TestDeinit) {
Base::TestDeinit();
}
TEST_F(RtpDataChannelSingleThreadTest, TestSetContents) {
Base::TestSetContents();
}
TEST_F(RtpDataChannelSingleThreadTest, TestSetContentsNullOffer) {
Base::TestSetContentsNullOffer();
}
TEST_F(RtpDataChannelSingleThreadTest, TestSetContentsRtcpMux) {
Base::TestSetContentsRtcpMux();
}
TEST_F(RtpDataChannelSingleThreadTest, TestChangeStreamParamsInContent) {
Base::TestChangeStreamParamsInContent();
}
TEST_F(RtpDataChannelSingleThreadTest, TestPlayoutAndSendingStates) {
Base::TestPlayoutAndSendingStates();
}
TEST_F(RtpDataChannelSingleThreadTest, TestMediaContentDirection) {
Base::TestMediaContentDirection();
}
TEST_F(RtpDataChannelSingleThreadTest, TestCallSetup) {
Base::TestCallSetup();
}
TEST_F(RtpDataChannelSingleThreadTest, TestCallTeardownRtcpMux) {
Base::TestCallTeardownRtcpMux();
}
TEST_F(RtpDataChannelSingleThreadTest, TestOnTransportReadyToSend) {
Base::TestOnTransportReadyToSend();
}
TEST_F(RtpDataChannelSingleThreadTest, SendRtpToRtp) {
Base::SendRtpToRtp();
}
TEST_F(RtpDataChannelSingleThreadTest, SendRtpToRtpOnThread) {
Base::SendRtpToRtpOnThread();
}
TEST_F(RtpDataChannelSingleThreadTest, SendWithWritabilityLoss) {
Base::SendWithWritabilityLoss();
}
TEST_F(RtpDataChannelSingleThreadTest, SocketOptionsMergedOnSetTransport) {
Base::SocketOptionsMergedOnSetTransport();
}
TEST_F(RtpDataChannelSingleThreadTest, TestSendData) {
CreateChannels(0, 0);
EXPECT_TRUE(SendInitiate());
EXPECT_TRUE(SendAccept());
cricket::SendDataParams params;
params.ssrc = 42;
unsigned char data[] = {'f', 'o', 'o'};
rtc::CopyOnWriteBuffer payload(data, 3);
cricket::SendDataResult result;
ASSERT_TRUE(media_channel1_->SendData(params, payload, &result));
EXPECT_EQ(params.ssrc, media_channel1_->last_sent_data_params().ssrc);
EXPECT_EQ("foo", media_channel1_->last_sent_data());
}
TEST_F(RtpDataChannelDoubleThreadTest, TestInit) {
Base::TestInit();
EXPECT_FALSE(media_channel1_->IsStreamMuted(0));
}
TEST_F(RtpDataChannelDoubleThreadTest, TestDeinit) {
Base::TestDeinit();
}
TEST_F(RtpDataChannelDoubleThreadTest, TestSetContents) {
Base::TestSetContents();
}
TEST_F(RtpDataChannelDoubleThreadTest, TestSetContentsNullOffer) {
Base::TestSetContentsNullOffer();
}
TEST_F(RtpDataChannelDoubleThreadTest, TestSetContentsRtcpMux) {
Base::TestSetContentsRtcpMux();
}
TEST_F(RtpDataChannelDoubleThreadTest, TestChangeStreamParamsInContent) {
Base::TestChangeStreamParamsInContent();
}
TEST_F(RtpDataChannelDoubleThreadTest, TestPlayoutAndSendingStates) {
Base::TestPlayoutAndSendingStates();
}
TEST_F(RtpDataChannelDoubleThreadTest, TestMediaContentDirection) {
Base::TestMediaContentDirection();
}
TEST_F(RtpDataChannelDoubleThreadTest, TestCallSetup) {
Base::TestCallSetup();
}
TEST_F(RtpDataChannelDoubleThreadTest, TestCallTeardownRtcpMux) {
Base::TestCallTeardownRtcpMux();
}
TEST_F(RtpDataChannelDoubleThreadTest, TestOnTransportReadyToSend) {
Base::TestOnTransportReadyToSend();
}
TEST_F(RtpDataChannelDoubleThreadTest, SendRtpToRtp) {
Base::SendRtpToRtp();
}
TEST_F(RtpDataChannelDoubleThreadTest, SendRtpToRtpOnThread) {
Base::SendRtpToRtpOnThread();
}
TEST_F(RtpDataChannelDoubleThreadTest, SendWithWritabilityLoss) {
Base::SendWithWritabilityLoss();
}
TEST_F(RtpDataChannelDoubleThreadTest, SocketOptionsMergedOnSetTransport) {
Base::SocketOptionsMergedOnSetTransport();
}
TEST_F(RtpDataChannelDoubleThreadTest, TestSendData) {
CreateChannels(0, 0);
EXPECT_TRUE(SendInitiate());
EXPECT_TRUE(SendAccept());
cricket::SendDataParams params;
params.ssrc = 42;
unsigned char data[] = {'f', 'o', 'o'};
rtc::CopyOnWriteBuffer payload(data, 3);
cricket::SendDataResult result;
ASSERT_TRUE(media_channel1_->SendData(params, payload, &result));
EXPECT_EQ(params.ssrc, media_channel1_->last_sent_data_params().ssrc);
EXPECT_EQ("foo", media_channel1_->last_sent_data());
}
// TODO(pthatcher): TestSetReceiver?

View File

@ -15,7 +15,6 @@
#include <utility>
#include "api/transport/field_trial_based_config.h"
#include "media/base/rtp_data_engine.h"
#include "media/sctp/sctp_transport_factory.h"
#include "rtc_base/helpers.h"
#include "rtc_base/ref_counted_object.h"
@ -127,8 +126,7 @@ ConnectionContext::ConnectionContext(
worker_thread_->Invoke<void>(RTC_FROM_HERE, [&]() {
channel_manager_ = cricket::ChannelManager::Create(
std::move(dependencies->media_engine),
std::make_unique<cricket::RtpDataEngine>(), /*enable_rtx=*/true,
worker_thread(), network_thread());
/*enable_rtx=*/true, worker_thread(), network_thread());
});
// Set warning levels on the threads, to give warnings when response

View File

@ -28,7 +28,7 @@ namespace webrtc {
bool DataChannelController::HasDataChannels() const {
RTC_DCHECK_RUN_ON(signaling_thread());
return !rtp_data_channels_.empty() || !sctp_data_channels_.empty();
return !sctp_data_channels_.empty();
}
bool DataChannelController::SendData(const cricket::SendDataParams& params,
@ -36,40 +36,10 @@ bool DataChannelController::SendData(const cricket::SendDataParams& params,
cricket::SendDataResult* result) {
if (data_channel_transport())
return DataChannelSendData(params, payload, result);
if (rtp_data_channel())
return rtp_data_channel()->SendData(params, payload, result);
RTC_LOG(LS_ERROR) << "SendData called before transport is ready";
return false;
}
bool DataChannelController::ConnectDataChannel(
RtpDataChannel* webrtc_data_channel) {
RTC_DCHECK_RUN_ON(signaling_thread());
if (!rtp_data_channel()) {
// Don't log an error here, because DataChannels are expected to call
// ConnectDataChannel in this state. It's the only way to initially tell
// whether or not the underlying transport is ready.
return false;
}
rtp_data_channel()->SignalReadyToSendData.connect(
webrtc_data_channel, &RtpDataChannel::OnChannelReady);
rtp_data_channel()->SignalDataReceived.connect(
webrtc_data_channel, &RtpDataChannel::OnDataReceived);
return true;
}
void DataChannelController::DisconnectDataChannel(
RtpDataChannel* webrtc_data_channel) {
RTC_DCHECK_RUN_ON(signaling_thread());
if (!rtp_data_channel()) {
RTC_LOG(LS_ERROR)
<< "DisconnectDataChannel called when rtp_data_channel_ is NULL.";
return;
}
rtp_data_channel()->SignalReadyToSendData.disconnect(webrtc_data_channel);
rtp_data_channel()->SignalDataReceived.disconnect(webrtc_data_channel);
}
bool DataChannelController::ConnectDataChannel(
SctpDataChannel* webrtc_data_channel) {
RTC_DCHECK_RUN_ON(signaling_thread());
@ -126,8 +96,7 @@ void DataChannelController::RemoveSctpDataStream(int sid) {
bool DataChannelController::ReadyToSendData() const {
RTC_DCHECK_RUN_ON(signaling_thread());
return (rtp_data_channel() && rtp_data_channel()->ready_to_send_data()) ||
(data_channel_transport() && data_channel_transport_ready_to_send_);
return (data_channel_transport() && data_channel_transport_ready_to_send_);
}
void DataChannelController::OnDataReceived(
@ -258,10 +227,10 @@ bool DataChannelController::HandleOpenMessage_s(
// be created.
std::string label;
InternalDataChannelInit config;
config.id = params.ssrc;
config.id = params.sid;
if (!ParseDataChannelOpenMessage(buffer, &label, &config)) {
RTC_LOG(LS_WARNING) << "Failed to parse the OPEN message for ssrc "
<< params.ssrc;
RTC_LOG(LS_WARNING) << "Failed to parse the OPEN message for sid "
<< params.sid;
return true;
}
config.open_handshake_role = InternalDataChannelInit::kAcker;
@ -304,38 +273,11 @@ DataChannelController::InternalCreateDataChannelWithProxy(
if (channel) {
return SctpDataChannel::CreateProxy(channel);
}
} else if (data_channel_type() == cricket::DCT_RTP) {
rtc::scoped_refptr<RtpDataChannel> channel =
InternalCreateRtpDataChannel(label, config);
if (channel) {
return RtpDataChannel::CreateProxy(channel);
}
}
return nullptr;
}
rtc::scoped_refptr<RtpDataChannel>
DataChannelController::InternalCreateRtpDataChannel(
const std::string& label,
const DataChannelInit* config) {
RTC_DCHECK_RUN_ON(signaling_thread());
DataChannelInit new_config = config ? (*config) : DataChannelInit();
rtc::scoped_refptr<RtpDataChannel> channel(
RtpDataChannel::Create(this, label, new_config, signaling_thread()));
if (!channel) {
return nullptr;
}
if (rtp_data_channels_.find(channel->label()) != rtp_data_channels_.end()) {
RTC_LOG(LS_ERROR) << "DataChannel with label " << channel->label()
<< " already exists.";
return nullptr;
}
rtp_data_channels_[channel->label()] = channel;
SignalRtpDataChannelCreated_(channel.get());
return channel;
}
rtc::scoped_refptr<SctpDataChannel>
DataChannelController::InternalCreateSctpDataChannel(
const std::string& label,
@ -416,14 +358,8 @@ void DataChannelController::OnSctpDataChannelClosed(SctpDataChannel* channel) {
void DataChannelController::OnTransportChannelClosed() {
RTC_DCHECK_RUN_ON(signaling_thread());
// Use a temporary copy of the RTP/SCTP DataChannel list because the
// Use a temporary copy of the SCTP DataChannel list because the
// DataChannel may callback to us and try to modify the list.
std::map<std::string, rtc::scoped_refptr<RtpDataChannel>> temp_rtp_dcs;
temp_rtp_dcs.swap(rtp_data_channels_);
for (const auto& kv : temp_rtp_dcs) {
kv.second->OnTransportChannelClosed();
}
std::vector<rtc::scoped_refptr<SctpDataChannel>> temp_sctp_dcs;
temp_sctp_dcs.swap(sctp_data_channels_);
for (const auto& channel : temp_sctp_dcs) {
@ -441,58 +377,6 @@ SctpDataChannel* DataChannelController::FindDataChannelBySid(int sid) const {
return nullptr;
}
void DataChannelController::UpdateLocalRtpDataChannels(
const cricket::StreamParamsVec& streams) {
std::vector<std::string> existing_channels;
RTC_DCHECK_RUN_ON(signaling_thread());
// Find new and active data channels.
for (const cricket::StreamParams& params : streams) {
// |it->sync_label| is actually the data channel label. The reason is that
// we use the same naming of data channels as we do for
// MediaStreams and Tracks.
// For MediaStreams, the sync_label is the MediaStream label and the
// track label is the same as |streamid|.
const std::string& channel_label = params.first_stream_id();
auto data_channel_it = rtp_data_channels()->find(channel_label);
if (data_channel_it == rtp_data_channels()->end()) {
RTC_LOG(LS_ERROR) << "channel label not found";
continue;
}
// Set the SSRC the data channel should use for sending.
data_channel_it->second->SetSendSsrc(params.first_ssrc());
existing_channels.push_back(data_channel_it->first);
}
UpdateClosingRtpDataChannels(existing_channels, true);
}
void DataChannelController::UpdateRemoteRtpDataChannels(
const cricket::StreamParamsVec& streams) {
RTC_DCHECK_RUN_ON(signaling_thread());
std::vector<std::string> existing_channels;
// Find new and active data channels.
for (const cricket::StreamParams& params : streams) {
// The data channel label is either the mslabel or the SSRC if the mslabel
// does not exist. Ex a=ssrc:444330170 mslabel:test1.
std::string label = params.first_stream_id().empty()
? rtc::ToString(params.first_ssrc())
: params.first_stream_id();
auto data_channel_it = rtp_data_channels()->find(label);
if (data_channel_it == rtp_data_channels()->end()) {
// This is a new data channel.
CreateRemoteRtpDataChannel(label, params.first_ssrc());
} else {
data_channel_it->second->SetReceiveSsrc(params.first_ssrc());
}
existing_channels.push_back(label);
}
UpdateClosingRtpDataChannels(existing_channels, false);
}
cricket::DataChannelType DataChannelController::data_channel_type() const {
// TODO(bugs.webrtc.org/9987): Should be restricted to the signaling thread.
// RTC_DCHECK_RUN_ON(signaling_thread());
@ -505,19 +389,6 @@ void DataChannelController::set_data_channel_type(
data_channel_type_ = type;
}
cricket::RtpDataChannel* DataChannelController::rtp_data_channel() const {
// TODO(bugs.webrtc.org/9987): Only allow this accessor to be called on the
// network thread.
// RTC_DCHECK_RUN_ON(network_thread());
return rtp_data_channel_;
}
void DataChannelController::set_rtp_data_channel(
cricket::RtpDataChannel* channel) {
RTC_DCHECK_RUN_ON(network_thread());
rtp_data_channel_ = channel;
}
DataChannelTransportInterface* DataChannelController::data_channel_transport()
const {
// TODO(bugs.webrtc.org/11547): Only allow this accessor to be called on the
@ -532,56 +403,6 @@ void DataChannelController::set_data_channel_transport(
data_channel_transport_ = transport;
}
const std::map<std::string, rtc::scoped_refptr<RtpDataChannel>>*
DataChannelController::rtp_data_channels() const {
RTC_DCHECK_RUN_ON(signaling_thread());
return &rtp_data_channels_;
}
void DataChannelController::UpdateClosingRtpDataChannels(
const std::vector<std::string>& active_channels,
bool is_local_update) {
auto it = rtp_data_channels_.begin();
while (it != rtp_data_channels_.end()) {
RtpDataChannel* data_channel = it->second;
if (absl::c_linear_search(active_channels, data_channel->label())) {
++it;
continue;
}
if (is_local_update) {
data_channel->SetSendSsrc(0);
} else {
data_channel->RemotePeerRequestClose();
}
if (data_channel->state() == RtpDataChannel::kClosed) {
rtp_data_channels_.erase(it);
it = rtp_data_channels_.begin();
} else {
++it;
}
}
}
void DataChannelController::CreateRemoteRtpDataChannel(const std::string& label,
uint32_t remote_ssrc) {
if (data_channel_type() != cricket::DCT_RTP) {
return;
}
rtc::scoped_refptr<RtpDataChannel> channel(
InternalCreateRtpDataChannel(label, nullptr));
if (!channel.get()) {
RTC_LOG(LS_WARNING) << "Remote peer requested a DataChannel but"
"CreateDataChannel failed.";
return;
}
channel->SetReceiveSsrc(remote_ssrc);
rtc::scoped_refptr<DataChannelInterface> proxy_channel =
RtpDataChannel::CreateProxy(std::move(channel));
pc_->Observer()->OnDataChannel(std::move(proxy_channel));
}
bool DataChannelController::DataChannelSendData(
const cricket::SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,

View File

@ -27,7 +27,6 @@
#include "media/base/stream_params.h"
#include "pc/channel.h"
#include "pc/data_channel_utils.h"
#include "pc/rtp_data_channel.h"
#include "pc/sctp_data_channel.h"
#include "rtc_base/checks.h"
#include "rtc_base/copy_on_write_buffer.h"
@ -41,8 +40,7 @@ namespace webrtc {
class PeerConnection;
class DataChannelController : public RtpDataChannelProviderInterface,
public SctpDataChannelProviderInterface,
class DataChannelController : public SctpDataChannelProviderInterface,
public DataChannelSink {
public:
explicit DataChannelController(PeerConnection* pc) : pc_(pc) {}
@ -53,13 +51,11 @@ class DataChannelController : public RtpDataChannelProviderInterface,
DataChannelController(DataChannelController&&) = delete;
DataChannelController& operator=(DataChannelController&& other) = delete;
// Implements RtpDataChannelProviderInterface/
// Implements
// SctpDataChannelProviderInterface.
bool SendData(const cricket::SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
cricket::SendDataResult* result) override;
bool ConnectDataChannel(RtpDataChannel* webrtc_data_channel) override;
void DisconnectDataChannel(RtpDataChannel* webrtc_data_channel) override;
bool ConnectDataChannel(SctpDataChannel* webrtc_data_channel) override;
void DisconnectDataChannel(SctpDataChannel* webrtc_data_channel) override;
void AddSctpDataStream(int sid) override;
@ -104,28 +100,13 @@ class DataChannelController : public RtpDataChannelProviderInterface,
RTC_DCHECK_RUN_ON(signaling_thread());
return !sctp_data_channels_.empty();
}
bool HasRtpDataChannels() const {
RTC_DCHECK_RUN_ON(signaling_thread());
return !rtp_data_channels_.empty();
}
void UpdateLocalRtpDataChannels(const cricket::StreamParamsVec& streams);
void UpdateRemoteRtpDataChannels(const cricket::StreamParamsVec& streams);
// Accessors
cricket::DataChannelType data_channel_type() const;
void set_data_channel_type(cricket::DataChannelType type);
cricket::RtpDataChannel* rtp_data_channel() const;
void set_rtp_data_channel(cricket::RtpDataChannel* channel);
DataChannelTransportInterface* data_channel_transport() const;
void set_data_channel_transport(DataChannelTransportInterface* transport);
const std::map<std::string, rtc::scoped_refptr<RtpDataChannel>>*
rtp_data_channels() const;
sigslot::signal1<RtpDataChannel*>& SignalRtpDataChannelCreated() {
RTC_DCHECK_RUN_ON(signaling_thread());
return SignalRtpDataChannelCreated_;
}
sigslot::signal1<SctpDataChannel*>& SignalSctpDataChannelCreated() {
RTC_DCHECK_RUN_ON(signaling_thread());
return SignalSctpDataChannelCreated_;
@ -136,10 +117,6 @@ class DataChannelController : public RtpDataChannelProviderInterface,
void OnSctpDataChannelClosed(SctpDataChannel* channel);
private:
rtc::scoped_refptr<RtpDataChannel> InternalCreateRtpDataChannel(
const std::string& label,
const DataChannelInit* config) /* RTC_RUN_ON(signaling_thread()) */;
rtc::scoped_refptr<SctpDataChannel> InternalCreateSctpDataChannel(
const std::string& label,
const InternalDataChannelInit*
@ -155,14 +132,6 @@ class DataChannelController : public RtpDataChannelProviderInterface,
const InternalDataChannelInit& config)
RTC_RUN_ON(signaling_thread());
void CreateRemoteRtpDataChannel(const std::string& label,
uint32_t remote_ssrc)
RTC_RUN_ON(signaling_thread());
void UpdateClosingRtpDataChannels(
const std::vector<std::string>& active_channels,
bool is_local_update) RTC_RUN_ON(signaling_thread());
// Called from SendData when data_channel_transport() is true.
bool DataChannelSendData(const cricket::SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
@ -175,13 +144,7 @@ class DataChannelController : public RtpDataChannelProviderInterface,
rtc::Thread* network_thread() const;
rtc::Thread* signaling_thread() const;
// Specifies which kind of data channel is allowed. This is controlled
// by the chrome command-line flag and constraints:
// 1. If chrome command-line switch 'enable-sctp-data-channels' is enabled,
// constraint kEnableDtlsSrtp is true, and constaint kEnableRtpDataChannels is
// not set or false, SCTP is allowed (DCT_SCTP);
// 2. If constraint kEnableRtpDataChannels is true, RTP is allowed (DCT_RTP);
// 3. If both 1&2 are false, data channel is not allowed (DCT_NONE).
// Specifies whether or not SCTP data channels are allowed.
cricket::DataChannelType data_channel_type_ =
cricket::DCT_NONE; // TODO(bugs.webrtc.org/9987): Accessed on both
// signaling and network thread.
@ -197,22 +160,12 @@ class DataChannelController : public RtpDataChannelProviderInterface,
bool data_channel_transport_ready_to_send_
RTC_GUARDED_BY(signaling_thread()) = false;
// |rtp_data_channel_| is used if in RTP data channel mode,
// |data_channel_transport_| when using SCTP.
// TODO(bugs.webrtc.org/9987): Accessed on both signaling and network
// thread.
cricket::RtpDataChannel* rtp_data_channel_ = nullptr;
SctpSidAllocator sid_allocator_ /* RTC_GUARDED_BY(signaling_thread()) */;
std::vector<rtc::scoped_refptr<SctpDataChannel>> sctp_data_channels_
RTC_GUARDED_BY(signaling_thread());
std::vector<rtc::scoped_refptr<SctpDataChannel>> sctp_data_channels_to_free_
RTC_GUARDED_BY(signaling_thread());
// Map of label -> DataChannel
std::map<std::string, rtc::scoped_refptr<RtpDataChannel>> rtp_data_channels_
RTC_GUARDED_BY(signaling_thread());
// Signals from |data_channel_transport_|. These are invoked on the
// signaling thread.
// TODO(bugs.webrtc.org/11547): These '_s' signals likely all belong on the
@ -228,8 +181,6 @@ class DataChannelController : public RtpDataChannelProviderInterface,
sigslot::signal1<int> SignalDataChannelTransportChannelClosed_s
RTC_GUARDED_BY(signaling_thread());
sigslot::signal1<RtpDataChannel*> SignalRtpDataChannelCreated_
RTC_GUARDED_BY(signaling_thread());
sigslot::signal1<SctpDataChannel*> SignalSctpDataChannelCreated_
RTC_GUARDED_BY(signaling_thread());

View File

@ -553,11 +553,10 @@ TEST_F(DataChannelIntegrationTestUnifiedPlan,
callee()->AddAudioVideoTracks();
caller()->CreateAndSetAndSignalOffer();
ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
network_thread()->Invoke<void>(RTC_FROM_HERE, [this] {
ASSERT_TRUE_WAIT(caller()->pc()->GetSctpTransport(), kDefaultTimeout);
ASSERT_EQ_WAIT(SctpTransportState::kConnected,
caller()->pc()->GetSctpTransport()->Information().state(),
kDefaultTimeout);
});
ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout);
ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
}

View File

@ -287,8 +287,7 @@ TEST_F(SctpDataChannelTest, OpenMessageSent) {
SetChannelReady();
EXPECT_GE(webrtc_data_channel_->id(), 0);
EXPECT_EQ(cricket::DMT_CONTROL, provider_->last_send_data_params().type);
EXPECT_EQ(provider_->last_send_data_params().ssrc,
static_cast<uint32_t>(webrtc_data_channel_->id()));
EXPECT_EQ(provider_->last_send_data_params().sid, webrtc_data_channel_->id());
}
TEST_F(SctpDataChannelTest, QueuedOpenMessageSent) {
@ -297,8 +296,7 @@ TEST_F(SctpDataChannelTest, QueuedOpenMessageSent) {
provider_->set_send_blocked(false);
EXPECT_EQ(cricket::DMT_CONTROL, provider_->last_send_data_params().type);
EXPECT_EQ(provider_->last_send_data_params().ssrc,
static_cast<uint32_t>(webrtc_data_channel_->id()));
EXPECT_EQ(provider_->last_send_data_params().sid, webrtc_data_channel_->id());
}
// Tests that the DataChannel created after transport gets ready can enter OPEN
@ -334,7 +332,7 @@ TEST_F(SctpDataChannelTest, SendUnorderedAfterReceivesOpenAck) {
// Emulates receiving an OPEN_ACK message.
cricket::ReceiveDataParams params;
params.ssrc = init.id;
params.sid = init.id;
params.type = cricket::DMT_CONTROL;
rtc::CopyOnWriteBuffer payload;
webrtc::WriteDataChannelOpenAckMessage(&payload);
@ -360,7 +358,7 @@ TEST_F(SctpDataChannelTest, SendUnorderedAfterReceiveData) {
// Emulates receiving a DATA message.
cricket::ReceiveDataParams params;
params.ssrc = init.id;
params.sid = init.id;
params.type = cricket::DMT_TEXT;
webrtc::DataBuffer buffer("data");
dc->OnDataReceived(params, buffer.data);
@ -406,39 +404,39 @@ TEST_F(SctpDataChannelTest, QueuedCloseFlushes) {
EXPECT_EQ(cricket::DMT_TEXT, provider_->last_send_data_params().type);
}
// Tests that messages are sent with the right ssrc.
TEST_F(SctpDataChannelTest, SendDataSsrc) {
// Tests that messages are sent with the right id.
TEST_F(SctpDataChannelTest, SendDataId) {
webrtc_data_channel_->SetSctpSid(1);
SetChannelReady();
webrtc::DataBuffer buffer("data");
EXPECT_TRUE(webrtc_data_channel_->Send(buffer));
EXPECT_EQ(1U, provider_->last_send_data_params().ssrc);
EXPECT_EQ(1, provider_->last_send_data_params().sid);
}
// Tests that the incoming messages with wrong ssrcs are rejected.
TEST_F(SctpDataChannelTest, ReceiveDataWithInvalidSsrc) {
// Tests that the incoming messages with wrong ids are rejected.
TEST_F(SctpDataChannelTest, ReceiveDataWithInvalidId) {
webrtc_data_channel_->SetSctpSid(1);
SetChannelReady();
AddObserver();
cricket::ReceiveDataParams params;
params.ssrc = 0;
params.sid = 0;
webrtc::DataBuffer buffer("abcd");
webrtc_data_channel_->OnDataReceived(params, buffer.data);
EXPECT_EQ(0U, observer_->messages_received());
}
// Tests that the incoming messages with right ssrcs are acceted.
TEST_F(SctpDataChannelTest, ReceiveDataWithValidSsrc) {
// Tests that the incoming messages with right ids are accepted.
TEST_F(SctpDataChannelTest, ReceiveDataWithValidId) {
webrtc_data_channel_->SetSctpSid(1);
SetChannelReady();
AddObserver();
cricket::ReceiveDataParams params;
params.ssrc = 1;
params.sid = 1;
webrtc::DataBuffer buffer("abcd");
webrtc_data_channel_->OnDataReceived(params, buffer.data);
@ -459,7 +457,7 @@ TEST_F(SctpDataChannelTest, NoMsgSentIfNegotiatedAndNotFromOpenMsg) {
rtc::Thread::Current(), rtc::Thread::Current());
EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
EXPECT_EQ(0U, provider_->last_send_data_params().ssrc);
EXPECT_EQ(0, provider_->last_send_data_params().sid);
}
// Tests that DataChannel::messages_received() and DataChannel::bytes_received()
@ -477,7 +475,7 @@ TEST_F(SctpDataChannelTest, VerifyMessagesAndBytesReceived) {
webrtc_data_channel_->SetSctpSid(1);
cricket::ReceiveDataParams params;
params.ssrc = 1;
params.sid = 1;
// Default values.
EXPECT_EQ(0U, webrtc_data_channel_->messages_received());
@ -524,8 +522,7 @@ TEST_F(SctpDataChannelTest, OpenAckSentIfCreatedFromOpenMessage) {
EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
EXPECT_EQ(static_cast<unsigned int>(config.id),
provider_->last_send_data_params().ssrc);
EXPECT_EQ(config.id, provider_->last_send_data_params().sid);
EXPECT_EQ(cricket::DMT_CONTROL, provider_->last_send_data_params().type);
}
@ -584,7 +581,7 @@ TEST_F(SctpDataChannelTest, ClosedWhenReceivedBufferFull) {
memset(buffer.MutableData(), 0, buffer.size());
cricket::ReceiveDataParams params;
params.ssrc = 0;
params.sid = 0;
// Receiving data without having an observer will overflow the buffer.
for (size_t i = 0; i < 16 * 1024 + 1; ++i) {

View File

@ -1380,14 +1380,6 @@ void MediaDescriptionOptions::AddVideoSender(
num_sim_layers);
}
void MediaDescriptionOptions::AddRtpDataChannel(const std::string& track_id,
const std::string& stream_id) {
RTC_DCHECK(type == MEDIA_TYPE_DATA);
// TODO(steveanton): Is it the case that RtpDataChannel will never have more
// than one stream?
AddSenderInternal(track_id, {stream_id}, {}, SimulcastLayerList(), 1);
}
void MediaDescriptionOptions::AddSenderInternal(
const std::string& track_id,
const std::vector<std::string>& stream_ids,
@ -1428,7 +1420,6 @@ MediaSessionDescriptionFactory::MediaSessionDescriptionFactory(
channel_manager->GetSupportedAudioReceiveCodecs(&audio_recv_codecs_);
channel_manager->GetSupportedVideoSendCodecs(&video_send_codecs_);
channel_manager->GetSupportedVideoReceiveCodecs(&video_recv_codecs_);
channel_manager->GetSupportedDataCodecs(&rtp_data_codecs_);
ComputeAudioCodecsIntersectionAndUnion();
ComputeVideoCodecsIntersectionAndUnion();
}
@ -1521,12 +1512,8 @@ std::unique_ptr<SessionDescription> MediaSessionDescriptionFactory::CreateOffer(
AudioCodecs offer_audio_codecs;
VideoCodecs offer_video_codecs;
RtpDataCodecs offer_rtp_data_codecs;
GetCodecsForOffer(
current_active_contents, &offer_audio_codecs, &offer_video_codecs,
session_options.data_channel_type == DataChannelType::DCT_SCTP
? nullptr
: &offer_rtp_data_codecs);
GetCodecsForOffer(current_active_contents, &offer_audio_codecs,
&offer_video_codecs);
if (!session_options.vad_enabled) {
// If application doesn't want CN codecs in offer.
StripCNCodecs(&offer_audio_codecs);
@ -1574,8 +1561,8 @@ std::unique_ptr<SessionDescription> MediaSessionDescriptionFactory::CreateOffer(
case MEDIA_TYPE_DATA:
if (!AddDataContentForOffer(media_description_options, session_options,
current_content, current_description,
offer_rtp_data_codecs, &current_streams,
offer.get(), &ice_credentials)) {
&current_streams, offer.get(),
&ice_credentials)) {
return nullptr;
}
break;
@ -1673,9 +1660,8 @@ MediaSessionDescriptionFactory::CreateAnswer(
// sections.
AudioCodecs answer_audio_codecs;
VideoCodecs answer_video_codecs;
RtpDataCodecs answer_rtp_data_codecs;
GetCodecsForAnswer(current_active_contents, *offer, &answer_audio_codecs,
&answer_video_codecs, &answer_rtp_data_codecs);
&answer_video_codecs);
if (!session_options.vad_enabled) {
// If application doesn't want CN codecs in answer.
@ -1732,11 +1718,11 @@ MediaSessionDescriptionFactory::CreateAnswer(
}
break;
case MEDIA_TYPE_DATA:
if (!AddDataContentForAnswer(
media_description_options, session_options, offer_content,
offer, current_content, current_description,
bundle_transport.get(), answer_rtp_data_codecs,
&current_streams, answer.get(), &ice_credentials)) {
if (!AddDataContentForAnswer(media_description_options, session_options,
offer_content, offer, current_content,
current_description,
bundle_transport.get(), &current_streams,
answer.get(), &ice_credentials)) {
return nullptr;
}
break;
@ -1899,7 +1885,6 @@ void MergeCodecsFromDescription(
const std::vector<const ContentInfo*>& current_active_contents,
AudioCodecs* audio_codecs,
VideoCodecs* video_codecs,
RtpDataCodecs* rtp_data_codecs,
UsedPayloadTypes* used_pltypes) {
for (const ContentInfo* content : current_active_contents) {
if (IsMediaContentOfType(content, MEDIA_TYPE_AUDIO)) {
@ -1910,14 +1895,6 @@ void MergeCodecsFromDescription(
const VideoContentDescription* video =
content->media_description()->as_video();
MergeCodecs<VideoCodec>(video->codecs(), video_codecs, used_pltypes);
} else if (IsMediaContentOfType(content, MEDIA_TYPE_DATA)) {
const RtpDataContentDescription* data =
content->media_description()->as_rtp_data();
if (data) {
// Only relevant for RTP datachannels
MergeCodecs<RtpDataCodec>(data->codecs(), rtp_data_codecs,
used_pltypes);
}
}
}
}
@ -1931,22 +1908,17 @@ void MergeCodecsFromDescription(
void MediaSessionDescriptionFactory::GetCodecsForOffer(
const std::vector<const ContentInfo*>& current_active_contents,
AudioCodecs* audio_codecs,
VideoCodecs* video_codecs,
RtpDataCodecs* rtp_data_codecs) const {
VideoCodecs* video_codecs) const {
// First - get all codecs from the current description if the media type
// is used. Add them to |used_pltypes| so the payload type is not reused if a
// new media type is added.
UsedPayloadTypes used_pltypes;
MergeCodecsFromDescription(current_active_contents, audio_codecs,
video_codecs, rtp_data_codecs, &used_pltypes);
video_codecs, &used_pltypes);
// Add our codecs that are not in the current description.
MergeCodecs<AudioCodec>(all_audio_codecs_, audio_codecs, &used_pltypes);
MergeCodecs<VideoCodec>(all_video_codecs_, video_codecs, &used_pltypes);
// Only allocate a payload type for rtp datachannels when using rtp data
// channels.
if (rtp_data_codecs)
MergeCodecs<DataCodec>(rtp_data_codecs_, rtp_data_codecs, &used_pltypes);
}
// Getting codecs for an answer involves these steps:
@ -1960,19 +1932,17 @@ void MediaSessionDescriptionFactory::GetCodecsForAnswer(
const std::vector<const ContentInfo*>& current_active_contents,
const SessionDescription& remote_offer,
AudioCodecs* audio_codecs,
VideoCodecs* video_codecs,
RtpDataCodecs* rtp_data_codecs) const {
VideoCodecs* video_codecs) const {
// First - get all codecs from the current description if the media type
// is used. Add them to |used_pltypes| so the payload type is not reused if a
// new media type is added.
UsedPayloadTypes used_pltypes;
MergeCodecsFromDescription(current_active_contents, audio_codecs,
video_codecs, rtp_data_codecs, &used_pltypes);
video_codecs, &used_pltypes);
// Second - filter out codecs that we don't support at all and should ignore.
AudioCodecs filtered_offered_audio_codecs;
VideoCodecs filtered_offered_video_codecs;
RtpDataCodecs filtered_offered_rtp_data_codecs;
for (const ContentInfo& content : remote_offer.contents()) {
if (IsMediaContentOfType(&content, MEDIA_TYPE_AUDIO)) {
const AudioContentDescription* audio =
@ -1998,22 +1968,6 @@ void MediaSessionDescriptionFactory::GetCodecsForAnswer(
filtered_offered_video_codecs.push_back(offered_video_codec);
}
}
} else if (IsMediaContentOfType(&content, MEDIA_TYPE_DATA)) {
const RtpDataContentDescription* data =
content.media_description()->as_rtp_data();
if (data) {
// RTP data. This part is inactive for SCTP data.
for (const RtpDataCodec& offered_rtp_data_codec : data->codecs()) {
if (!FindMatchingCodec<RtpDataCodec>(
data->codecs(), filtered_offered_rtp_data_codecs,
offered_rtp_data_codec, nullptr) &&
FindMatchingCodec<RtpDataCodec>(data->codecs(), rtp_data_codecs_,
offered_rtp_data_codec,
nullptr)) {
filtered_offered_rtp_data_codecs.push_back(offered_rtp_data_codec);
}
}
}
}
}
@ -2023,8 +1977,6 @@ void MediaSessionDescriptionFactory::GetCodecsForAnswer(
&used_pltypes);
MergeCodecs<VideoCodec>(filtered_offered_video_codecs, video_codecs,
&used_pltypes);
MergeCodecs<DataCodec>(filtered_offered_rtp_data_codecs, rtp_data_codecs,
&used_pltypes);
}
MediaSessionDescriptionFactory::AudioVideoRtpHeaderExtensions
@ -2333,7 +2285,7 @@ bool MediaSessionDescriptionFactory::AddVideoContentForOffer(
return true;
}
bool MediaSessionDescriptionFactory::AddSctpDataContentForOffer(
bool MediaSessionDescriptionFactory::AddDataContentForOffer(
const MediaDescriptionOptions& media_description_options,
const MediaSessionOptions& session_options,
const ContentInfo* current_content,
@ -2378,73 +2330,6 @@ bool MediaSessionDescriptionFactory::AddSctpDataContentForOffer(
return true;
}
bool MediaSessionDescriptionFactory::AddRtpDataContentForOffer(
const MediaDescriptionOptions& media_description_options,
const MediaSessionOptions& session_options,
const ContentInfo* current_content,
const SessionDescription* current_description,
const RtpDataCodecs& rtp_data_codecs,
StreamParamsVec* current_streams,
SessionDescription* desc,
IceCredentialsIterator* ice_credentials) const {
auto data = std::make_unique<RtpDataContentDescription>();
bool secure_transport = (transport_desc_factory_->secure() != SEC_DISABLED);
cricket::SecurePolicy sdes_policy =
IsDtlsActive(current_content, current_description) ? cricket::SEC_DISABLED
: secure();
std::vector<std::string> crypto_suites;
GetSupportedDataSdesCryptoSuiteNames(session_options.crypto_options,
&crypto_suites);
if (!CreateMediaContentOffer(media_description_options, session_options,
rtp_data_codecs, sdes_policy,
GetCryptos(current_content), crypto_suites,
RtpHeaderExtensions(), ssrc_generator_,
current_streams, data.get())) {
return false;
}
data->set_bandwidth(kRtpDataMaxBandwidth);
SetMediaProtocol(secure_transport, data.get());
desc->AddContent(media_description_options.mid, MediaProtocolType::kRtp,
media_description_options.stopped, std::move(data));
if (!AddTransportOffer(media_description_options.mid,
media_description_options.transport_options,
current_description, desc, ice_credentials)) {
return false;
}
return true;
}
bool MediaSessionDescriptionFactory::AddDataContentForOffer(
const MediaDescriptionOptions& media_description_options,
const MediaSessionOptions& session_options,
const ContentInfo* current_content,
const SessionDescription* current_description,
const RtpDataCodecs& rtp_data_codecs,
StreamParamsVec* current_streams,
SessionDescription* desc,
IceCredentialsIterator* ice_credentials) const {
bool is_sctp = (session_options.data_channel_type == DCT_SCTP);
// If the DataChannel type is not specified, use the DataChannel type in
// the current description.
if (session_options.data_channel_type == DCT_NONE && current_content) {
RTC_CHECK(IsMediaContentOfType(current_content, MEDIA_TYPE_DATA));
is_sctp = (current_content->media_description()->protocol() ==
kMediaProtocolSctp);
}
if (is_sctp) {
return AddSctpDataContentForOffer(
media_description_options, session_options, current_content,
current_description, current_streams, desc, ice_credentials);
} else {
return AddRtpDataContentForOffer(media_description_options, session_options,
current_content, current_description,
rtp_data_codecs, current_streams, desc,
ice_credentials);
}
}
bool MediaSessionDescriptionFactory::AddUnsupportedContentForOffer(
const MediaDescriptionOptions& media_description_options,
const MediaSessionOptions& session_options,
@ -2718,7 +2603,6 @@ bool MediaSessionDescriptionFactory::AddDataContentForAnswer(
const ContentInfo* current_content,
const SessionDescription* current_description,
const TransportInfo* bundle_transport,
const RtpDataCodecs& rtp_data_codecs,
StreamParamsVec* current_streams,
SessionDescription* answer,
IceCredentialsIterator* ice_credentials) const {
@ -2766,25 +2650,7 @@ bool MediaSessionDescriptionFactory::AddDataContentForAnswer(
bool offer_uses_sctpmap = offer_data_description->use_sctpmap();
data_answer->as_sctp()->set_use_sctpmap(offer_uses_sctpmap);
} else {
// RTP offer
data_answer = std::make_unique<RtpDataContentDescription>();
const RtpDataContentDescription* offer_data_description =
offer_content->media_description()->as_rtp_data();
RTC_CHECK(offer_data_description);
if (!SetCodecsInAnswer(offer_data_description, rtp_data_codecs,
media_description_options, session_options,
ssrc_generator_, current_streams,
data_answer->as_rtp_data())) {
return false;
}
if (!CreateMediaContentAnswer(
offer_data_description, media_description_options, session_options,
sdes_policy, GetCryptos(current_content), RtpHeaderExtensions(),
ssrc_generator_, enable_encrypted_rtp_header_extensions_,
current_streams, bundle_enabled, data_answer.get())) {
return false; // Fails the session setup.
}
RTC_NOTREACHED() << "Non-SCTP data content found";
}
bool secure = bundle_transport ? bundle_transport->description.secure()
@ -2800,13 +2666,6 @@ bool MediaSessionDescriptionFactory::AddDataContentForAnswer(
return false;
}
if (!rejected && session_options.data_channel_type == DCT_RTP) {
data_answer->set_bandwidth(kRtpDataMaxBandwidth);
} else {
// RFC 3264
// The answer MUST contain the same number of m-lines as the offer.
RTC_LOG(LS_INFO) << "Data is not supported in the answer.";
}
answer->AddContent(media_description_options.mid, offer_content->type,
rejected, std::move(data_answer));
return true;
@ -2991,12 +2850,6 @@ const VideoContentDescription* GetFirstVideoContentDescription(
return desc ? desc->as_video() : nullptr;
}
const RtpDataContentDescription* GetFirstRtpDataContentDescription(
const SessionDescription* sdesc) {
auto desc = GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_DATA);
return desc ? desc->as_rtp_data() : nullptr;
}
const SctpDataContentDescription* GetFirstSctpDataContentDescription(
const SessionDescription* sdesc) {
auto desc = GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_DATA);
@ -3069,12 +2922,6 @@ VideoContentDescription* GetFirstVideoContentDescription(
return desc ? desc->as_video() : nullptr;
}
RtpDataContentDescription* GetFirstRtpDataContentDescription(
SessionDescription* sdesc) {
auto desc = GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_DATA);
return desc ? desc->as_rtp_data() : nullptr;
}
SctpDataContentDescription* GetFirstSctpDataContentDescription(
SessionDescription* sdesc) {
auto desc = GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_DATA);

View File

@ -73,10 +73,6 @@ struct MediaDescriptionOptions {
const SimulcastLayerList& simulcast_layers,
int num_sim_layers);
// Internally just uses sender_options.
void AddRtpDataChannel(const std::string& track_id,
const std::string& stream_id);
MediaType type;
std::string mid;
webrtc::RtpTransceiverDirection direction;
@ -162,10 +158,6 @@ class MediaSessionDescriptionFactory {
const VideoCodecs& recv_codecs);
RtpHeaderExtensions filtered_rtp_header_extensions(
RtpHeaderExtensions extensions) const;
const RtpDataCodecs& rtp_data_codecs() const { return rtp_data_codecs_; }
void set_rtp_data_codecs(const RtpDataCodecs& codecs) {
rtp_data_codecs_ = codecs;
}
SecurePolicy secure() const { return secure_; }
void set_secure(SecurePolicy s) { secure_ = s; }
@ -204,14 +196,12 @@ class MediaSessionDescriptionFactory {
void GetCodecsForOffer(
const std::vector<const ContentInfo*>& current_active_contents,
AudioCodecs* audio_codecs,
VideoCodecs* video_codecs,
RtpDataCodecs* rtp_data_codecs) const;
VideoCodecs* video_codecs) const;
void GetCodecsForAnswer(
const std::vector<const ContentInfo*>& current_active_contents,
const SessionDescription& remote_offer,
AudioCodecs* audio_codecs,
VideoCodecs* video_codecs,
RtpDataCodecs* rtp_data_codecs) const;
VideoCodecs* video_codecs) const;
AudioVideoRtpHeaderExtensions GetOfferedRtpHeaderExtensionsWithIds(
const std::vector<const ContentInfo*>& current_active_contents,
bool extmap_allow_mixed,
@ -261,32 +251,11 @@ class MediaSessionDescriptionFactory {
SessionDescription* desc,
IceCredentialsIterator* ice_credentials) const;
bool AddSctpDataContentForOffer(
const MediaDescriptionOptions& media_description_options,
const MediaSessionOptions& session_options,
const ContentInfo* current_content,
const SessionDescription* current_description,
StreamParamsVec* current_streams,
SessionDescription* desc,
IceCredentialsIterator* ice_credentials) const;
bool AddRtpDataContentForOffer(
const MediaDescriptionOptions& media_description_options,
const MediaSessionOptions& session_options,
const ContentInfo* current_content,
const SessionDescription* current_description,
const RtpDataCodecs& rtp_data_codecs,
StreamParamsVec* current_streams,
SessionDescription* desc,
IceCredentialsIterator* ice_credentials) const;
// This function calls either AddRtpDataContentForOffer or
// AddSctpDataContentForOffer depending on protocol.
// The codecs argument is ignored for SCTP.
bool AddDataContentForOffer(
const MediaDescriptionOptions& media_description_options,
const MediaSessionOptions& session_options,
const ContentInfo* current_content,
const SessionDescription* current_description,
const RtpDataCodecs& rtp_data_codecs,
StreamParamsVec* current_streams,
SessionDescription* desc,
IceCredentialsIterator* ice_credentials) const;
@ -335,7 +304,6 @@ class MediaSessionDescriptionFactory {
const ContentInfo* current_content,
const SessionDescription* current_description,
const TransportInfo* bundle_transport,
const RtpDataCodecs& rtp_data_codecs,
StreamParamsVec* current_streams,
SessionDescription* answer,
IceCredentialsIterator* ice_credentials) const;
@ -368,7 +336,6 @@ class MediaSessionDescriptionFactory {
VideoCodecs video_sendrecv_codecs_;
// Union of send and recv.
VideoCodecs all_video_codecs_;
RtpDataCodecs rtp_data_codecs_;
// This object is not owned by the channel so it must outlive it.
rtc::UniqueRandomIdGenerator* const ssrc_generator_;
bool enable_encrypted_rtp_header_extensions_ = false;
@ -398,8 +365,6 @@ const AudioContentDescription* GetFirstAudioContentDescription(
const SessionDescription* sdesc);
const VideoContentDescription* GetFirstVideoContentDescription(
const SessionDescription* sdesc);
const RtpDataContentDescription* GetFirstRtpDataContentDescription(
const SessionDescription* sdesc);
const SctpDataContentDescription* GetFirstSctpDataContentDescription(
const SessionDescription* sdesc);
// Non-const versions of the above functions.
@ -417,8 +382,6 @@ AudioContentDescription* GetFirstAudioContentDescription(
SessionDescription* sdesc);
VideoContentDescription* GetFirstVideoContentDescription(
SessionDescription* sdesc);
RtpDataContentDescription* GetFirstRtpDataContentDescription(
SessionDescription* sdesc);
SctpDataContentDescription* GetFirstSctpDataContentDescription(
SessionDescription* sdesc);

View File

@ -50,7 +50,6 @@ using cricket::CryptoParamsVec;
using cricket::GetFirstAudioContent;
using cricket::GetFirstAudioContentDescription;
using cricket::GetFirstDataContent;
using cricket::GetFirstRtpDataContentDescription;
using cricket::GetFirstVideoContent;
using cricket::GetFirstVideoContentDescription;
using cricket::kAutoBandwidth;
@ -65,8 +64,6 @@ using cricket::MediaSessionOptions;
using cricket::MediaType;
using cricket::RidDescription;
using cricket::RidDirection;
using cricket::RtpDataCodec;
using cricket::RtpDataContentDescription;
using cricket::SctpDataContentDescription;
using cricket::SEC_DISABLED;
using cricket::SEC_ENABLED;
@ -133,15 +130,6 @@ static const VideoCodec kVideoCodecs2[] = {VideoCodec(126, "H264"),
static const VideoCodec kVideoCodecsAnswer[] = {VideoCodec(97, "H264")};
static const RtpDataCodec kDataCodecs1[] = {RtpDataCodec(98, "binary-data"),
RtpDataCodec(99, "utf8-text")};
static const RtpDataCodec kDataCodecs2[] = {RtpDataCodec(126, "binary-data"),
RtpDataCodec(127, "utf8-text")};
static const RtpDataCodec kDataCodecsAnswer[] = {
RtpDataCodec(98, "binary-data"), RtpDataCodec(99, "utf8-text")};
static const RtpExtension kAudioRtpExtension1[] = {
RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 8),
RtpExtension("http://google.com/testing/audio_something", 10),
@ -260,9 +248,6 @@ static const char kVideoTrack2[] = "video_2";
static const char kAudioTrack1[] = "audio_1";
static const char kAudioTrack2[] = "audio_2";
static const char kAudioTrack3[] = "audio_3";
static const char kDataTrack1[] = "data_1";
static const char kDataTrack2[] = "data_2";
static const char kDataTrack3[] = "data_3";
static const char* kMediaProtocols[] = {"RTP/AVP", "RTP/SAVP", "RTP/AVPF",
"RTP/SAVPF"};
@ -369,10 +354,6 @@ static void AttachSenderToMediaDescriptionOptions(
it->AddVideoSender(track_id, stream_ids, rids, simulcast_layers,
num_sim_layer);
break;
case MEDIA_TYPE_DATA:
RTC_CHECK(stream_ids.size() == 1U);
it->AddRtpDataChannel(track_id, stream_ids[0]);
break;
default:
RTC_NOTREACHED();
}
@ -437,12 +418,10 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test {
MAKE_VECTOR(kAudioCodecs1));
f1_.set_video_codecs(MAKE_VECTOR(kVideoCodecs1),
MAKE_VECTOR(kVideoCodecs1));
f1_.set_rtp_data_codecs(MAKE_VECTOR(kDataCodecs1));
f2_.set_audio_codecs(MAKE_VECTOR(kAudioCodecs2),
MAKE_VECTOR(kAudioCodecs2));
f2_.set_video_codecs(MAKE_VECTOR(kVideoCodecs2),
MAKE_VECTOR(kVideoCodecs2));
f2_.set_rtp_data_codecs(MAKE_VECTOR(kDataCodecs2));
tdf1_.set_certificate(rtc::RTCCertificate::Create(
std::unique_ptr<rtc::SSLIdentity>(new rtc::FakeSSLIdentity("id1"))));
tdf2_.set_certificate(rtc::RTCCertificate::Create(
@ -604,8 +583,6 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test {
f1_.set_secure(SEC_ENABLED);
MediaSessionOptions options;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly,
&options);
std::unique_ptr<SessionDescription> ref_desc;
std::unique_ptr<SessionDescription> desc;
if (offer) {
@ -862,30 +839,21 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoOffer) {
TEST_F(MediaSessionDescriptionFactoryTest, TestBundleOfferWithSameCodecPlType) {
const VideoCodec& offered_video_codec = f2_.video_sendrecv_codecs()[0];
const AudioCodec& offered_audio_codec = f2_.audio_sendrecv_codecs()[0];
const RtpDataCodec& offered_data_codec = f2_.rtp_data_codecs()[0];
ASSERT_EQ(offered_video_codec.id, offered_audio_codec.id);
ASSERT_EQ(offered_video_codec.id, offered_data_codec.id);
MediaSessionOptions opts;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly, &opts);
opts.bundle_enabled = true;
std::unique_ptr<SessionDescription> offer = f2_.CreateOffer(opts, NULL);
const VideoContentDescription* vcd =
GetFirstVideoContentDescription(offer.get());
const AudioContentDescription* acd =
GetFirstAudioContentDescription(offer.get());
const RtpDataContentDescription* dcd =
GetFirstRtpDataContentDescription(offer.get());
ASSERT_TRUE(NULL != vcd);
ASSERT_TRUE(NULL != acd);
ASSERT_TRUE(NULL != dcd);
EXPECT_NE(vcd->codecs()[0].id, acd->codecs()[0].id);
EXPECT_NE(vcd->codecs()[0].id, dcd->codecs()[0].id);
EXPECT_NE(acd->codecs()[0].id, dcd->codecs()[0].id);
EXPECT_EQ(vcd->codecs()[0].name, offered_video_codec.name);
EXPECT_EQ(acd->codecs()[0].name, offered_audio_codec.name);
EXPECT_EQ(dcd->codecs()[0].name, offered_data_codec.name);
}
// Test creating an updated offer with bundle, audio, video and data
@ -909,8 +877,6 @@ TEST_F(MediaSessionDescriptionFactoryTest,
MediaSessionOptions updated_opts;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &updated_opts);
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly,
&updated_opts);
updated_opts.bundle_enabled = true;
std::unique_ptr<SessionDescription> updated_offer(
f1_.CreateOffer(updated_opts, answer.get()));
@ -919,51 +885,13 @@ TEST_F(MediaSessionDescriptionFactoryTest,
GetFirstAudioContentDescription(updated_offer.get());
const VideoContentDescription* vcd =
GetFirstVideoContentDescription(updated_offer.get());
const RtpDataContentDescription* dcd =
GetFirstRtpDataContentDescription(updated_offer.get());
EXPECT_TRUE(NULL != vcd);
EXPECT_TRUE(NULL != acd);
EXPECT_TRUE(NULL != dcd);
ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuite);
EXPECT_EQ(cricket::kMediaProtocolSavpf, acd->protocol());
ASSERT_CRYPTO(vcd, 1U, kDefaultSrtpCryptoSuite);
EXPECT_EQ(cricket::kMediaProtocolSavpf, vcd->protocol());
ASSERT_CRYPTO(dcd, 1U, kDefaultSrtpCryptoSuite);
EXPECT_EQ(cricket::kMediaProtocolSavpf, dcd->protocol());
}
// Create a RTP data offer, and ensure it matches what we expect.
TEST_F(MediaSessionDescriptionFactoryTest, TestCreateRtpDataOffer) {
MediaSessionOptions opts;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly, &opts);
f1_.set_secure(SEC_ENABLED);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
ASSERT_TRUE(offer.get() != NULL);
const ContentInfo* ac = offer->GetContentByName("audio");
const ContentInfo* dc = offer->GetContentByName("data");
ASSERT_TRUE(ac != NULL);
ASSERT_TRUE(dc != NULL);
EXPECT_EQ(MediaProtocolType::kRtp, ac->type);
EXPECT_EQ(MediaProtocolType::kRtp, dc->type);
const AudioContentDescription* acd = ac->media_description()->as_audio();
const RtpDataContentDescription* dcd = dc->media_description()->as_rtp_data();
EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type());
EXPECT_EQ(f1_.audio_sendrecv_codecs(), acd->codecs());
EXPECT_EQ(0U, acd->first_ssrc()); // no sender is attched.
EXPECT_EQ(kAutoBandwidth, acd->bandwidth()); // default bandwidth (auto)
EXPECT_TRUE(acd->rtcp_mux()); // rtcp-mux defaults on
ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuite);
EXPECT_EQ(cricket::kMediaProtocolSavpf, acd->protocol());
EXPECT_EQ(MEDIA_TYPE_DATA, dcd->type());
EXPECT_EQ(f1_.rtp_data_codecs(), dcd->codecs());
EXPECT_EQ(0U, dcd->first_ssrc()); // no sender is attached.
EXPECT_EQ(cricket::kRtpDataMaxBandwidth,
dcd->bandwidth()); // default bandwidth (auto)
EXPECT_TRUE(dcd->rtcp_mux()); // rtcp-mux defaults on
ASSERT_CRYPTO(dcd, 1U, kDefaultSrtpCryptoSuite);
EXPECT_EQ(cricket::kMediaProtocolSavpf, dcd->protocol());
}
// Create an SCTP data offer with bundle without error.
@ -1350,74 +1278,6 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerGcmAnswer) {
TestVideoGcmCipher(false, true);
}
TEST_F(MediaSessionDescriptionFactoryTest, TestCreateDataAnswer) {
MediaSessionOptions opts = CreatePlanBMediaSessionOptions();
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly, &opts);
f1_.set_secure(SEC_ENABLED);
f2_.set_secure(SEC_ENABLED);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
ASSERT_TRUE(offer.get() != NULL);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, NULL);
const ContentInfo* ac = answer->GetContentByName("audio");
const ContentInfo* dc = answer->GetContentByName("data");
ASSERT_TRUE(ac != NULL);
ASSERT_TRUE(dc != NULL);
EXPECT_EQ(MediaProtocolType::kRtp, ac->type);
EXPECT_EQ(MediaProtocolType::kRtp, dc->type);
const AudioContentDescription* acd = ac->media_description()->as_audio();
const RtpDataContentDescription* dcd = dc->media_description()->as_rtp_data();
EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type());
EXPECT_THAT(acd->codecs(), ElementsAreArray(kAudioCodecsAnswer));
EXPECT_EQ(kAutoBandwidth, acd->bandwidth()); // negotiated auto bw
EXPECT_EQ(0U, acd->first_ssrc()); // no sender is attached
EXPECT_TRUE(acd->rtcp_mux()); // negotiated rtcp-mux
ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuite);
EXPECT_EQ(MEDIA_TYPE_DATA, dcd->type());
EXPECT_THAT(dcd->codecs(), ElementsAreArray(kDataCodecsAnswer));
EXPECT_EQ(0U, dcd->first_ssrc()); // no sender is attached
EXPECT_TRUE(dcd->rtcp_mux()); // negotiated rtcp-mux
ASSERT_CRYPTO(dcd, 1U, kDefaultSrtpCryptoSuite);
EXPECT_EQ(cricket::kMediaProtocolSavpf, dcd->protocol());
}
TEST_F(MediaSessionDescriptionFactoryTest, TestCreateDataAnswerGcm) {
MediaSessionOptions opts = CreatePlanBMediaSessionOptions();
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly, &opts);
opts.crypto_options.srtp.enable_gcm_crypto_suites = true;
f1_.set_secure(SEC_ENABLED);
f2_.set_secure(SEC_ENABLED);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
ASSERT_TRUE(offer.get() != NULL);
for (cricket::ContentInfo& content : offer->contents()) {
auto cryptos = content.media_description()->cryptos();
PreferGcmCryptoParameters(&cryptos);
content.media_description()->set_cryptos(cryptos);
}
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, NULL);
const ContentInfo* ac = answer->GetContentByName("audio");
const ContentInfo* dc = answer->GetContentByName("data");
ASSERT_TRUE(ac != NULL);
ASSERT_TRUE(dc != NULL);
EXPECT_EQ(MediaProtocolType::kRtp, ac->type);
EXPECT_EQ(MediaProtocolType::kRtp, dc->type);
const AudioContentDescription* acd = ac->media_description()->as_audio();
const RtpDataContentDescription* dcd = dc->media_description()->as_rtp_data();
EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type());
EXPECT_THAT(acd->codecs(), ElementsAreArray(kAudioCodecsAnswer));
EXPECT_EQ(kAutoBandwidth, acd->bandwidth()); // negotiated auto bw
EXPECT_EQ(0U, acd->first_ssrc()); // no sender is attached
EXPECT_TRUE(acd->rtcp_mux()); // negotiated rtcp-mux
ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuiteGcm);
EXPECT_EQ(MEDIA_TYPE_DATA, dcd->type());
EXPECT_THAT(dcd->codecs(), ElementsAreArray(kDataCodecsAnswer));
EXPECT_EQ(0U, dcd->first_ssrc()); // no sender is attached
EXPECT_TRUE(dcd->rtcp_mux()); // negotiated rtcp-mux
ASSERT_CRYPTO(dcd, 1U, kDefaultSrtpCryptoSuiteGcm);
EXPECT_EQ(cricket::kMediaProtocolSavpf, dcd->protocol());
}
// The use_sctpmap flag should be set in an Sctp DataContentDescription by
// default. The answer's use_sctpmap flag should match the offer's.
TEST_F(MediaSessionDescriptionFactoryTest, TestCreateDataAnswerUsesSctpmap) {
@ -1621,35 +1481,6 @@ TEST_F(MediaSessionDescriptionFactoryTest, CreateAnswerToInactiveOffer) {
RtpTransceiverDirection::kInactive);
}
// Test that a data content with an unknown protocol is rejected in an answer.
TEST_F(MediaSessionDescriptionFactoryTest,
CreateDataAnswerToOfferWithUnknownProtocol) {
MediaSessionOptions opts;
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly, &opts);
f1_.set_secure(SEC_ENABLED);
f2_.set_secure(SEC_ENABLED);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
ContentInfo* dc_offer = offer->GetContentByName("data");
ASSERT_TRUE(dc_offer != NULL);
RtpDataContentDescription* dcd_offer =
dc_offer->media_description()->as_rtp_data();
ASSERT_TRUE(dcd_offer != NULL);
// Offer must be acceptable as an RTP protocol in order to be set.
std::string protocol = "RTP/a weird unknown protocol";
dcd_offer->set_protocol(protocol);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, NULL);
const ContentInfo* dc_answer = answer->GetContentByName("data");
ASSERT_TRUE(dc_answer != NULL);
EXPECT_TRUE(dc_answer->rejected);
const RtpDataContentDescription* dcd_answer =
dc_answer->media_description()->as_rtp_data();
ASSERT_TRUE(dcd_answer != NULL);
EXPECT_EQ(protocol, dcd_answer->protocol());
}
// Test that the media protocol is RTP/AVPF if DTLS and SDES are disabled.
TEST_F(MediaSessionDescriptionFactoryTest, AudioOfferAnswerWithCryptoDisabled) {
MediaSessionOptions opts = CreatePlanBMediaSessionOptions();
@ -2169,36 +2000,28 @@ TEST_F(MediaSessionDescriptionFactoryTest,
TestCreateAnswerWithoutLegacyStreams) {
MediaSessionOptions opts;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly, &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
ASSERT_TRUE(offer.get() != NULL);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, NULL);
const ContentInfo* ac = answer->GetContentByName("audio");
const ContentInfo* vc = answer->GetContentByName("video");
const ContentInfo* dc = answer->GetContentByName("data");
ASSERT_TRUE(ac != NULL);
ASSERT_TRUE(vc != NULL);
const AudioContentDescription* acd = ac->media_description()->as_audio();
const VideoContentDescription* vcd = vc->media_description()->as_video();
const RtpDataContentDescription* dcd = dc->media_description()->as_rtp_data();
EXPECT_FALSE(acd->has_ssrcs()); // No StreamParams.
EXPECT_FALSE(vcd->has_ssrcs()); // No StreamParams.
EXPECT_FALSE(dcd->has_ssrcs()); // No StreamParams.
}
// Create a typical video answer, and ensure it matches what we expect.
TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerRtcpMux) {
MediaSessionOptions offer_opts;
AddAudioVideoSections(RtpTransceiverDirection::kSendRecv, &offer_opts);
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kSendRecv,
&offer_opts);
MediaSessionOptions answer_opts;
AddAudioVideoSections(RtpTransceiverDirection::kSendRecv, &answer_opts);
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kSendRecv,
&answer_opts);
std::unique_ptr<SessionDescription> offer;
std::unique_ptr<SessionDescription> answer;
@ -2209,16 +2032,12 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerRtcpMux) {
answer = f2_.CreateAnswer(offer.get(), answer_opts, NULL);
ASSERT_TRUE(NULL != GetFirstAudioContentDescription(offer.get()));
ASSERT_TRUE(NULL != GetFirstVideoContentDescription(offer.get()));
ASSERT_TRUE(NULL != GetFirstRtpDataContentDescription(offer.get()));
ASSERT_TRUE(NULL != GetFirstAudioContentDescription(answer.get()));
ASSERT_TRUE(NULL != GetFirstVideoContentDescription(answer.get()));
ASSERT_TRUE(NULL != GetFirstRtpDataContentDescription(answer.get()));
EXPECT_TRUE(GetFirstAudioContentDescription(offer.get())->rtcp_mux());
EXPECT_TRUE(GetFirstVideoContentDescription(offer.get())->rtcp_mux());
EXPECT_TRUE(GetFirstRtpDataContentDescription(offer.get())->rtcp_mux());
EXPECT_TRUE(GetFirstAudioContentDescription(answer.get())->rtcp_mux());
EXPECT_TRUE(GetFirstVideoContentDescription(answer.get())->rtcp_mux());
EXPECT_TRUE(GetFirstRtpDataContentDescription(answer.get())->rtcp_mux());
offer_opts.rtcp_mux_enabled = true;
answer_opts.rtcp_mux_enabled = false;
@ -2226,16 +2045,12 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerRtcpMux) {
answer = f2_.CreateAnswer(offer.get(), answer_opts, NULL);
ASSERT_TRUE(NULL != GetFirstAudioContentDescription(offer.get()));
ASSERT_TRUE(NULL != GetFirstVideoContentDescription(offer.get()));
ASSERT_TRUE(NULL != GetFirstRtpDataContentDescription(offer.get()));
ASSERT_TRUE(NULL != GetFirstAudioContentDescription(answer.get()));
ASSERT_TRUE(NULL != GetFirstVideoContentDescription(answer.get()));
ASSERT_TRUE(NULL != GetFirstRtpDataContentDescription(answer.get()));
EXPECT_TRUE(GetFirstAudioContentDescription(offer.get())->rtcp_mux());
EXPECT_TRUE(GetFirstVideoContentDescription(offer.get())->rtcp_mux());
EXPECT_TRUE(GetFirstRtpDataContentDescription(offer.get())->rtcp_mux());
EXPECT_FALSE(GetFirstAudioContentDescription(answer.get())->rtcp_mux());
EXPECT_FALSE(GetFirstVideoContentDescription(answer.get())->rtcp_mux());
EXPECT_FALSE(GetFirstRtpDataContentDescription(answer.get())->rtcp_mux());
offer_opts.rtcp_mux_enabled = false;
answer_opts.rtcp_mux_enabled = true;
@ -2243,16 +2058,12 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerRtcpMux) {
answer = f2_.CreateAnswer(offer.get(), answer_opts, NULL);
ASSERT_TRUE(NULL != GetFirstAudioContentDescription(offer.get()));
ASSERT_TRUE(NULL != GetFirstVideoContentDescription(offer.get()));
ASSERT_TRUE(NULL != GetFirstRtpDataContentDescription(offer.get()));
ASSERT_TRUE(NULL != GetFirstAudioContentDescription(answer.get()));
ASSERT_TRUE(NULL != GetFirstVideoContentDescription(answer.get()));
ASSERT_TRUE(NULL != GetFirstRtpDataContentDescription(answer.get()));
EXPECT_FALSE(GetFirstAudioContentDescription(offer.get())->rtcp_mux());
EXPECT_FALSE(GetFirstVideoContentDescription(offer.get())->rtcp_mux());
EXPECT_FALSE(GetFirstRtpDataContentDescription(offer.get())->rtcp_mux());
EXPECT_FALSE(GetFirstAudioContentDescription(answer.get())->rtcp_mux());
EXPECT_FALSE(GetFirstVideoContentDescription(answer.get())->rtcp_mux());
EXPECT_FALSE(GetFirstRtpDataContentDescription(answer.get())->rtcp_mux());
offer_opts.rtcp_mux_enabled = false;
answer_opts.rtcp_mux_enabled = false;
@ -2260,16 +2071,12 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerRtcpMux) {
answer = f2_.CreateAnswer(offer.get(), answer_opts, NULL);
ASSERT_TRUE(NULL != GetFirstAudioContentDescription(offer.get()));
ASSERT_TRUE(NULL != GetFirstVideoContentDescription(offer.get()));
ASSERT_TRUE(NULL != GetFirstRtpDataContentDescription(offer.get()));
ASSERT_TRUE(NULL != GetFirstAudioContentDescription(answer.get()));
ASSERT_TRUE(NULL != GetFirstVideoContentDescription(answer.get()));
ASSERT_TRUE(NULL != GetFirstRtpDataContentDescription(answer.get()));
EXPECT_FALSE(GetFirstAudioContentDescription(offer.get())->rtcp_mux());
EXPECT_FALSE(GetFirstVideoContentDescription(offer.get())->rtcp_mux());
EXPECT_FALSE(GetFirstRtpDataContentDescription(offer.get())->rtcp_mux());
EXPECT_FALSE(GetFirstAudioContentDescription(answer.get())->rtcp_mux());
EXPECT_FALSE(GetFirstVideoContentDescription(answer.get())->rtcp_mux());
EXPECT_FALSE(GetFirstRtpDataContentDescription(answer.get())->rtcp_mux());
}
// Create an audio-only answer to a video offer.
@ -2295,55 +2102,27 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAudioAnswerToVideo) {
EXPECT_TRUE(vc->rejected);
}
// Create an audio-only answer to an offer with data.
TEST_F(MediaSessionDescriptionFactoryTest, TestCreateNoDataAnswerToDataOffer) {
MediaSessionOptions opts = CreatePlanBMediaSessionOptions();
opts.data_channel_type = cricket::DCT_RTP;
AddMediaDescriptionOptions(MEDIA_TYPE_DATA, "data",
RtpTransceiverDirection::kRecvOnly, kActive,
&opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
ASSERT_TRUE(offer.get() != NULL);
opts.media_description_options[1].stopped = true;
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, NULL);
const ContentInfo* ac = answer->GetContentByName("audio");
const ContentInfo* dc = answer->GetContentByName("data");
ASSERT_TRUE(ac != NULL);
ASSERT_TRUE(dc != NULL);
ASSERT_TRUE(dc->media_description() != NULL);
EXPECT_TRUE(dc->rejected);
}
// Create an answer that rejects the contents which are rejected in the offer.
TEST_F(MediaSessionDescriptionFactoryTest,
CreateAnswerToOfferWithRejectedMedia) {
MediaSessionOptions opts;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly, &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
ASSERT_TRUE(offer.get() != NULL);
ContentInfo* ac = offer->GetContentByName("audio");
ContentInfo* vc = offer->GetContentByName("video");
ContentInfo* dc = offer->GetContentByName("data");
ASSERT_TRUE(ac != NULL);
ASSERT_TRUE(vc != NULL);
ASSERT_TRUE(dc != NULL);
ac->rejected = true;
vc->rejected = true;
dc->rejected = true;
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, NULL);
ac = answer->GetContentByName("audio");
vc = answer->GetContentByName("video");
dc = answer->GetContentByName("data");
ASSERT_TRUE(ac != NULL);
ASSERT_TRUE(vc != NULL);
ASSERT_TRUE(dc != NULL);
EXPECT_TRUE(ac->rejected);
EXPECT_TRUE(vc->rejected);
EXPECT_TRUE(dc->rejected);
}
TEST_F(MediaSessionDescriptionFactoryTest,
@ -2458,7 +2237,6 @@ TEST_F(MediaSessionDescriptionFactoryTest,
// Create an audio and video offer with:
// - one video track
// - two audio tracks
// - two data tracks
// and ensure it matches what we expect. Also updates the initial offer by
// adding a new video track and replaces one of the audio tracks.
TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoOffer) {
@ -2471,25 +2249,16 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoOffer) {
AttachSenderToMediaDescriptionOptions("audio", MEDIA_TYPE_AUDIO, kAudioTrack2,
{kMediaStream1}, 1, &opts);
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kSendRecv, &opts);
AttachSenderToMediaDescriptionOptions("data", MEDIA_TYPE_DATA, kDataTrack1,
{kMediaStream1}, 1, &opts);
AttachSenderToMediaDescriptionOptions("data", MEDIA_TYPE_DATA, kDataTrack2,
{kMediaStream1}, 1, &opts);
f1_.set_secure(SEC_ENABLED);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
ASSERT_TRUE(offer.get() != NULL);
const ContentInfo* ac = offer->GetContentByName("audio");
const ContentInfo* vc = offer->GetContentByName("video");
const ContentInfo* dc = offer->GetContentByName("data");
ASSERT_TRUE(ac != NULL);
ASSERT_TRUE(vc != NULL);
ASSERT_TRUE(dc != NULL);
const AudioContentDescription* acd = ac->media_description()->as_audio();
const VideoContentDescription* vcd = vc->media_description()->as_video();
const RtpDataContentDescription* dcd = dc->media_description()->as_rtp_data();
EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type());
EXPECT_EQ(f1_.audio_sendrecv_codecs(), acd->codecs());
@ -2518,25 +2287,6 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoOffer) {
EXPECT_EQ(kAutoBandwidth, vcd->bandwidth()); // default bandwidth (auto)
EXPECT_TRUE(vcd->rtcp_mux()); // rtcp-mux defaults on
EXPECT_EQ(MEDIA_TYPE_DATA, dcd->type());
EXPECT_EQ(f1_.rtp_data_codecs(), dcd->codecs());
ASSERT_CRYPTO(dcd, 1U, kDefaultSrtpCryptoSuite);
const StreamParamsVec& data_streams = dcd->streams();
ASSERT_EQ(2U, data_streams.size());
EXPECT_EQ(data_streams[0].cname, data_streams[1].cname);
EXPECT_EQ(kDataTrack1, data_streams[0].id);
ASSERT_EQ(1U, data_streams[0].ssrcs.size());
EXPECT_NE(0U, data_streams[0].ssrcs[0]);
EXPECT_EQ(kDataTrack2, data_streams[1].id);
ASSERT_EQ(1U, data_streams[1].ssrcs.size());
EXPECT_NE(0U, data_streams[1].ssrcs[0]);
EXPECT_EQ(cricket::kRtpDataMaxBandwidth,
dcd->bandwidth()); // default bandwidth (auto)
EXPECT_TRUE(dcd->rtcp_mux()); // rtcp-mux defaults on
ASSERT_CRYPTO(dcd, 1U, kDefaultSrtpCryptoSuite);
// Update the offer. Add a new video track that is not synched to the
// other tracks and replace audio track 2 with audio track 3.
AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack2,
@ -2544,38 +2294,27 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoOffer) {
DetachSenderFromMediaSection("audio", kAudioTrack2, &opts);
AttachSenderToMediaDescriptionOptions("audio", MEDIA_TYPE_AUDIO, kAudioTrack3,
{kMediaStream1}, 1, &opts);
DetachSenderFromMediaSection("data", kDataTrack2, &opts);
AttachSenderToMediaDescriptionOptions("data", MEDIA_TYPE_DATA, kDataTrack3,
{kMediaStream1}, 1, &opts);
std::unique_ptr<SessionDescription> updated_offer(
f1_.CreateOffer(opts, offer.get()));
ASSERT_TRUE(updated_offer.get() != NULL);
ac = updated_offer->GetContentByName("audio");
vc = updated_offer->GetContentByName("video");
dc = updated_offer->GetContentByName("data");
ASSERT_TRUE(ac != NULL);
ASSERT_TRUE(vc != NULL);
ASSERT_TRUE(dc != NULL);
const AudioContentDescription* updated_acd =
ac->media_description()->as_audio();
const VideoContentDescription* updated_vcd =
vc->media_description()->as_video();
const RtpDataContentDescription* updated_dcd =
dc->media_description()->as_rtp_data();
EXPECT_EQ(acd->type(), updated_acd->type());
EXPECT_EQ(acd->codecs(), updated_acd->codecs());
EXPECT_EQ(vcd->type(), updated_vcd->type());
EXPECT_EQ(vcd->codecs(), updated_vcd->codecs());
EXPECT_EQ(dcd->type(), updated_dcd->type());
EXPECT_EQ(dcd->codecs(), updated_dcd->codecs());
ASSERT_CRYPTO(updated_acd, 1U, kDefaultSrtpCryptoSuite);
EXPECT_TRUE(CompareCryptoParams(acd->cryptos(), updated_acd->cryptos()));
ASSERT_CRYPTO(updated_vcd, 1U, kDefaultSrtpCryptoSuite);
EXPECT_TRUE(CompareCryptoParams(vcd->cryptos(), updated_vcd->cryptos()));
ASSERT_CRYPTO(updated_dcd, 1U, kDefaultSrtpCryptoSuite);
EXPECT_TRUE(CompareCryptoParams(dcd->cryptos(), updated_dcd->cryptos()));
const StreamParamsVec& updated_audio_streams = updated_acd->streams();
ASSERT_EQ(2U, updated_audio_streams.size());
@ -2591,18 +2330,6 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoOffer) {
EXPECT_EQ(kVideoTrack2, updated_video_streams[1].id);
// All the media streams in one PeerConnection share one RTCP CNAME.
EXPECT_EQ(updated_video_streams[1].cname, updated_video_streams[0].cname);
const StreamParamsVec& updated_data_streams = updated_dcd->streams();
ASSERT_EQ(2U, updated_data_streams.size());
EXPECT_EQ(data_streams[0], updated_data_streams[0]);
EXPECT_EQ(kDataTrack3, updated_data_streams[1].id); // New data track.
ASSERT_EQ(1U, updated_data_streams[1].ssrcs.size());
EXPECT_NE(0U, updated_data_streams[1].ssrcs[0]);
EXPECT_EQ(updated_data_streams[0].cname, updated_data_streams[1].cname);
// The stream correctly got the CNAME from the MediaSessionOptions.
// The Expected RTCP CNAME is the default one as we are using the default
// MediaSessionOptions.
EXPECT_EQ(updated_data_streams[0].cname, cricket::kDefaultRtcpCname);
}
// Create an offer with simulcast video stream.
@ -2805,10 +2532,6 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoAnswer) {
AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
RtpTransceiverDirection::kRecvOnly, kActive,
&offer_opts);
offer_opts.data_channel_type = cricket::DCT_RTP;
AddMediaDescriptionOptions(MEDIA_TYPE_DATA, "data",
RtpTransceiverDirection::kRecvOnly, kActive,
&offer_opts);
f1_.set_secure(SEC_ENABLED);
f2_.set_secure(SEC_ENABLED);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(offer_opts, NULL);
@ -2827,31 +2550,18 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoAnswer) {
AttachSenderToMediaDescriptionOptions("audio", MEDIA_TYPE_AUDIO, kAudioTrack2,
{kMediaStream1}, 1, &answer_opts);
AddMediaDescriptionOptions(MEDIA_TYPE_DATA, "data",
RtpTransceiverDirection::kSendRecv, kActive,
&answer_opts);
AttachSenderToMediaDescriptionOptions("data", MEDIA_TYPE_DATA, kDataTrack1,
{kMediaStream1}, 1, &answer_opts);
AttachSenderToMediaDescriptionOptions("data", MEDIA_TYPE_DATA, kDataTrack2,
{kMediaStream1}, 1, &answer_opts);
answer_opts.data_channel_type = cricket::DCT_RTP;
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), answer_opts, NULL);
ASSERT_TRUE(answer.get() != NULL);
const ContentInfo* ac = answer->GetContentByName("audio");
const ContentInfo* vc = answer->GetContentByName("video");
const ContentInfo* dc = answer->GetContentByName("data");
ASSERT_TRUE(ac != NULL);
ASSERT_TRUE(vc != NULL);
ASSERT_TRUE(dc != NULL);
const AudioContentDescription* acd = ac->media_description()->as_audio();
const VideoContentDescription* vcd = vc->media_description()->as_video();
const RtpDataContentDescription* dcd = dc->media_description()->as_rtp_data();
ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuite);
ASSERT_CRYPTO(vcd, 1U, kDefaultSrtpCryptoSuite);
ASSERT_CRYPTO(dcd, 1U, kDefaultSrtpCryptoSuite);
EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type());
EXPECT_THAT(acd->codecs(), ElementsAreArray(kAudioCodecsAnswer));
@ -2879,59 +2589,33 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoAnswer) {
EXPECT_EQ(kAutoBandwidth, vcd->bandwidth()); // default bandwidth (auto)
EXPECT_TRUE(vcd->rtcp_mux()); // rtcp-mux defaults on
EXPECT_EQ(MEDIA_TYPE_DATA, dcd->type());
EXPECT_THAT(dcd->codecs(), ElementsAreArray(kDataCodecsAnswer));
const StreamParamsVec& data_streams = dcd->streams();
ASSERT_EQ(2U, data_streams.size());
EXPECT_TRUE(data_streams[0].cname == data_streams[1].cname);
EXPECT_EQ(kDataTrack1, data_streams[0].id);
ASSERT_EQ(1U, data_streams[0].ssrcs.size());
EXPECT_NE(0U, data_streams[0].ssrcs[0]);
EXPECT_EQ(kDataTrack2, data_streams[1].id);
ASSERT_EQ(1U, data_streams[1].ssrcs.size());
EXPECT_NE(0U, data_streams[1].ssrcs[0]);
EXPECT_EQ(cricket::kRtpDataMaxBandwidth,
dcd->bandwidth()); // default bandwidth (auto)
EXPECT_TRUE(dcd->rtcp_mux()); // rtcp-mux defaults on
// Update the answer. Add a new video track that is not synched to the
// other tracks and remove 1 audio track.
AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack2,
{kMediaStream2}, 1, &answer_opts);
DetachSenderFromMediaSection("audio", kAudioTrack2, &answer_opts);
DetachSenderFromMediaSection("data", kDataTrack2, &answer_opts);
std::unique_ptr<SessionDescription> updated_answer(
f2_.CreateAnswer(offer.get(), answer_opts, answer.get()));
ASSERT_TRUE(updated_answer.get() != NULL);
ac = updated_answer->GetContentByName("audio");
vc = updated_answer->GetContentByName("video");
dc = updated_answer->GetContentByName("data");
ASSERT_TRUE(ac != NULL);
ASSERT_TRUE(vc != NULL);
ASSERT_TRUE(dc != NULL);
const AudioContentDescription* updated_acd =
ac->media_description()->as_audio();
const VideoContentDescription* updated_vcd =
vc->media_description()->as_video();
const RtpDataContentDescription* updated_dcd =
dc->media_description()->as_rtp_data();
ASSERT_CRYPTO(updated_acd, 1U, kDefaultSrtpCryptoSuite);
EXPECT_TRUE(CompareCryptoParams(acd->cryptos(), updated_acd->cryptos()));
ASSERT_CRYPTO(updated_vcd, 1U, kDefaultSrtpCryptoSuite);
EXPECT_TRUE(CompareCryptoParams(vcd->cryptos(), updated_vcd->cryptos()));
ASSERT_CRYPTO(updated_dcd, 1U, kDefaultSrtpCryptoSuite);
EXPECT_TRUE(CompareCryptoParams(dcd->cryptos(), updated_dcd->cryptos()));
EXPECT_EQ(acd->type(), updated_acd->type());
EXPECT_EQ(acd->codecs(), updated_acd->codecs());
EXPECT_EQ(vcd->type(), updated_vcd->type());
EXPECT_EQ(vcd->codecs(), updated_vcd->codecs());
EXPECT_EQ(dcd->type(), updated_dcd->type());
EXPECT_EQ(dcd->codecs(), updated_dcd->codecs());
const StreamParamsVec& updated_audio_streams = updated_acd->streams();
ASSERT_EQ(1U, updated_audio_streams.size());
@ -2943,10 +2627,6 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoAnswer) {
EXPECT_EQ(kVideoTrack2, updated_video_streams[1].id);
// All media streams in one PeerConnection share one CNAME.
EXPECT_EQ(updated_video_streams[1].cname, updated_video_streams[0].cname);
const StreamParamsVec& updated_data_streams = updated_dcd->streams();
ASSERT_EQ(1U, updated_data_streams.size());
EXPECT_TRUE(data_streams[0] == updated_data_streams[0]);
}
// Create an updated offer after creating an answer to the original offer and
@ -3790,8 +3470,6 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfoOfferAudioCurrent) {
TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfoOfferMultimedia) {
MediaSessionOptions options;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly,
&options);
TestTransportInfo(true, options, false);
}
@ -3799,16 +3477,12 @@ TEST_F(MediaSessionDescriptionFactoryTest,
TestTransportInfoOfferMultimediaCurrent) {
MediaSessionOptions options;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly,
&options);
TestTransportInfo(true, options, true);
}
TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfoOfferBundle) {
MediaSessionOptions options;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly,
&options);
options.bundle_enabled = true;
TestTransportInfo(true, options, false);
}
@ -3817,8 +3491,6 @@ TEST_F(MediaSessionDescriptionFactoryTest,
TestTransportInfoOfferBundleCurrent) {
MediaSessionOptions options;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly,
&options);
options.bundle_enabled = true;
TestTransportInfo(true, options, true);
}
@ -3854,8 +3526,6 @@ TEST_F(MediaSessionDescriptionFactoryTest,
TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfoAnswerMultimedia) {
MediaSessionOptions options;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly,
&options);
TestTransportInfo(false, options, false);
}
@ -3863,16 +3533,12 @@ TEST_F(MediaSessionDescriptionFactoryTest,
TestTransportInfoAnswerMultimediaCurrent) {
MediaSessionOptions options;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly,
&options);
TestTransportInfo(false, options, true);
}
TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfoAnswerBundle) {
MediaSessionOptions options;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly,
&options);
options.bundle_enabled = true;
TestTransportInfo(false, options, false);
}
@ -3881,8 +3547,6 @@ TEST_F(MediaSessionDescriptionFactoryTest,
TestTransportInfoAnswerBundleCurrent) {
MediaSessionOptions options;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly,
&options);
options.bundle_enabled = true;
TestTransportInfo(false, options, true);
}
@ -4072,8 +3736,6 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCryptoOfferDtlsButNotSdes) {
tdf2_.set_secure(SEC_ENABLED);
MediaSessionOptions options;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly,
&options);
// Generate an offer with DTLS but without SDES.
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(options, NULL);
@ -4085,9 +3747,6 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCryptoOfferDtlsButNotSdes) {
const VideoContentDescription* video_offer =
GetFirstVideoContentDescription(offer.get());
ASSERT_TRUE(video_offer->cryptos().empty());
const RtpDataContentDescription* data_offer =
GetFirstRtpDataContentDescription(offer.get());
ASSERT_TRUE(data_offer->cryptos().empty());
const cricket::TransportDescription* audio_offer_trans_desc =
offer->GetTransportDescriptionByName("audio");
@ -4095,9 +3754,6 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCryptoOfferDtlsButNotSdes) {
const cricket::TransportDescription* video_offer_trans_desc =
offer->GetTransportDescriptionByName("video");
ASSERT_TRUE(video_offer_trans_desc->identity_fingerprint.get() != NULL);
const cricket::TransportDescription* data_offer_trans_desc =
offer->GetTransportDescriptionByName("data");
ASSERT_TRUE(data_offer_trans_desc->identity_fingerprint.get() != NULL);
// Generate an answer with DTLS.
std::unique_ptr<SessionDescription> answer =
@ -4110,9 +3766,6 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCryptoOfferDtlsButNotSdes) {
const cricket::TransportDescription* video_answer_trans_desc =
answer->GetTransportDescriptionByName("video");
EXPECT_TRUE(video_answer_trans_desc->identity_fingerprint.get() != NULL);
const cricket::TransportDescription* data_answer_trans_desc =
answer->GetTransportDescriptionByName("data");
EXPECT_TRUE(data_answer_trans_desc->identity_fingerprint.get() != NULL);
}
// Verifies if vad_enabled option is set to false, CN codecs are not present in
@ -4618,12 +4271,10 @@ class MediaProtocolTest : public ::testing::TestWithParam<const char*> {
MAKE_VECTOR(kAudioCodecs1));
f1_.set_video_codecs(MAKE_VECTOR(kVideoCodecs1),
MAKE_VECTOR(kVideoCodecs1));
f1_.set_rtp_data_codecs(MAKE_VECTOR(kDataCodecs1));
f2_.set_audio_codecs(MAKE_VECTOR(kAudioCodecs2),
MAKE_VECTOR(kAudioCodecs2));
f2_.set_video_codecs(MAKE_VECTOR(kVideoCodecs2),
MAKE_VECTOR(kVideoCodecs2));
f2_.set_rtp_data_codecs(MAKE_VECTOR(kDataCodecs2));
f1_.set_secure(SEC_ENABLED);
f2_.set_secure(SEC_ENABLED);
tdf1_.set_certificate(rtc::RTCCertificate::Create(

View File

@ -1302,7 +1302,7 @@ rtc::scoped_refptr<DataChannelInterface> PeerConnection::CreateDataChannel(
// Trigger the onRenegotiationNeeded event for every new RTP DataChannel, or
// the first SCTP DataChannel.
if (data_channel_type() == cricket::DCT_RTP || first_datachannel) {
if (first_datachannel) {
sdp_handler_->UpdateNegotiationNeeded();
}
NoteUsageEvent(UsageEvent::DATA_ADDED);
@ -1768,7 +1768,6 @@ void PeerConnection::Close() {
// TODO(tommi): ^^ That's not exactly optimal since this is yet another
// blocking hop to the network thread during Close(). Further still, the
// voice/video/data channels will be cleared on the worker thread.
RTC_DCHECK(!data_channel_controller_.rtp_data_channel());
transport_controller_.reset();
port_allocator_->DiscardCandidatePool();
if (network_thread_safety_) {
@ -1945,11 +1944,6 @@ void PeerConnection::OnSelectedCandidatePairChanged(
absl::optional<std::string> PeerConnection::GetDataMid() const {
RTC_DCHECK_RUN_ON(signaling_thread());
switch (data_channel_type()) {
case cricket::DCT_RTP:
if (!data_channel_controller_.rtp_data_channel()) {
return absl::nullopt;
}
return data_channel_controller_.rtp_data_channel()->content_name();
case cricket::DCT_SCTP:
return sctp_mid_s_;
default:
@ -2106,10 +2100,6 @@ cricket::ChannelInterface* PeerConnection::GetChannel(
return channel;
}
}
if (rtp_data_channel() &&
rtp_data_channel()->content_name() == content_name) {
return rtp_data_channel();
}
return nullptr;
}
@ -2211,11 +2201,6 @@ std::map<std::string, std::string> PeerConnection::GetTransportNamesByMid()
channel->transport_name();
}
}
if (data_channel_controller_.rtp_data_channel()) {
transport_names_by_mid[data_channel_controller_.rtp_data_channel()
->content_name()] =
data_channel_controller_.rtp_data_channel()->transport_name();
}
if (sctp_mid_n_) {
cricket::DtlsTransportInternal* dtls_transport =
transport_controller_->GetDtlsTransport(*sctp_mid_n_);
@ -2448,22 +2433,7 @@ bool PeerConnection::SetupDataChannelTransport_n(const std::string& mid) {
return true;
}
void PeerConnection::SetupRtpDataChannelTransport_n(
cricket::RtpDataChannel* data_channel) {
data_channel_controller_.set_rtp_data_channel(data_channel);
if (!data_channel)
return;
// TODO(bugs.webrtc.org/9987): OnSentPacket_w needs to be changed to
// OnSentPacket_n (and be called on the network thread).
data_channel->SignalSentPacket().connect(this,
&PeerConnection::OnSentPacket_w);
}
void PeerConnection::TeardownDataChannelTransport_n() {
// Clear the RTP data channel if any.
data_channel_controller_.set_rtp_data_channel(nullptr);
if (sctp_mid_n_) {
// |sctp_mid_| may still be active through an SCTP transport. If not, unset
// it.
@ -2707,11 +2677,6 @@ void PeerConnection::ReportTransportStats() {
}
}
if (rtp_data_channel()) {
media_types_by_transport_name[rtp_data_channel()->transport_name()].insert(
cricket::MEDIA_TYPE_DATA);
}
if (sctp_mid_n_) {
cricket::DtlsTransportInternal* dtls_transport =
transport_controller_->GetDtlsTransport(*sctp_mid_n_);

View File

@ -72,7 +72,6 @@
#include "pc/peer_connection_internal.h"
#include "pc/peer_connection_message_handler.h"
#include "pc/rtc_stats_collector.h"
#include "pc/rtp_data_channel.h"
#include "pc/rtp_receiver.h"
#include "pc/rtp_sender.h"
#include "pc/rtp_transceiver.h"
@ -287,18 +286,10 @@ class PeerConnection : public PeerConnectionInternal,
return rtp_manager()->transceivers()->List();
}
sigslot::signal1<RtpDataChannel*>& SignalRtpDataChannelCreated() override {
return data_channel_controller_.SignalRtpDataChannelCreated();
}
sigslot::signal1<SctpDataChannel*>& SignalSctpDataChannelCreated() override {
return data_channel_controller_.SignalSctpDataChannelCreated();
}
cricket::RtpDataChannel* rtp_data_channel() const override {
return data_channel_controller_.rtp_data_channel();
}
std::vector<DataChannelStats> GetDataChannelStats() const override;
absl::optional<std::string> sctp_transport_name() const override;
@ -404,8 +395,8 @@ class PeerConnection : public PeerConnectionInternal,
}
bool ValidateBundleSettings(const cricket::SessionDescription* desc);
// Returns the MID for the data section associated with either the
// RtpDataChannel or SCTP data channel, if it has been set. If no data
// Returns the MID for the data section associated with the
// SCTP data channel, if it has been set. If no data
// channels are configured this will return nullopt.
absl::optional<std::string> GetDataMid() const;
@ -443,8 +434,6 @@ class PeerConnection : public PeerConnectionInternal,
bool SetupDataChannelTransport_n(const std::string& mid)
RTC_RUN_ON(network_thread());
void SetupRtpDataChannelTransport_n(cricket::RtpDataChannel* data_channel)
RTC_RUN_ON(network_thread());
void TeardownDataChannelTransport_n() RTC_RUN_ON(network_thread());
cricket::ChannelInterface* GetChannel(const std::string& content_name);

View File

@ -330,8 +330,9 @@ TEST_P(PeerConnectionDataChannelTest, SctpPortPropagatedFromSdpToTransport) {
auto answer = callee->CreateAnswer();
ChangeSctpPortOnDescription(answer->description(), kNewRecvPort);
std::string sdp;
answer->ToString(&sdp);
ASSERT_TRUE(callee->SetLocalDescription(std::move(answer)));
auto* callee_transport =
callee->sctp_transport_factory()->last_fake_sctp_transport();
ASSERT_TRUE(callee_transport);

View File

@ -19,7 +19,6 @@
#include "api/peer_connection_interface.h"
#include "call/call.h"
#include "pc/rtp_data_channel.h"
#include "pc/rtp_transceiver.h"
#include "pc/sctp_data_channel.h"
@ -41,13 +40,9 @@ class PeerConnectionInternal : public PeerConnectionInterface {
rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>>
GetTransceiversInternal() const = 0;
virtual sigslot::signal1<RtpDataChannel*>& SignalRtpDataChannelCreated() = 0;
virtual sigslot::signal1<SctpDataChannel*>&
SignalSctpDataChannelCreated() = 0;
// Only valid when using deprecated RTP data channels.
virtual cricket::RtpDataChannel* rtp_data_channel() const = 0;
// Call on the network thread to fetch stats for all the data channels.
// TODO(tommi): Make pure virtual after downstream updates.
virtual std::vector<DataChannelStats> GetDataChannelStats() const {

View File

@ -1118,8 +1118,6 @@ RTCStatsCollector::RTCStatsCollector(PeerConnectionInternal* pc,
RTC_DCHECK(worker_thread_);
RTC_DCHECK(network_thread_);
RTC_DCHECK_GE(cache_lifetime_us_, 0);
pc_->SignalRtpDataChannelCreated().connect(
this, &RTCStatsCollector::OnRtpDataChannelCreated);
pc_->SignalSctpDataChannelCreated().connect(
this, &RTCStatsCollector::OnSctpDataChannelCreated);
}
@ -2150,20 +2148,12 @@ std::set<std::string> RTCStatsCollector::PrepareTransportNames_s() const {
transceiver->internal()->channel()->transport_name());
}
}
if (pc_->rtp_data_channel()) {
transport_names.insert(pc_->rtp_data_channel()->transport_name());
}
if (pc_->sctp_transport_name()) {
transport_names.insert(*pc_->sctp_transport_name());
}
return transport_names;
}
void RTCStatsCollector::OnRtpDataChannelCreated(RtpDataChannel* channel) {
channel->SignalOpened.connect(this, &RTCStatsCollector::OnDataChannelOpened);
channel->SignalClosed.connect(this, &RTCStatsCollector::OnDataChannelClosed);
}
void RTCStatsCollector::OnSctpDataChannelCreated(SctpDataChannel* channel) {
channel->SignalOpened.connect(this, &RTCStatsCollector::OnDataChannelOpened);
channel->SignalClosed.connect(this, &RTCStatsCollector::OnDataChannelClosed);

View File

@ -29,7 +29,6 @@
#include "media/base/media_channel.h"
#include "pc/data_channel_utils.h"
#include "pc/peer_connection_internal.h"
#include "pc/rtp_data_channel.h"
#include "pc/rtp_receiver.h"
#include "pc/rtp_sender.h"
#include "pc/rtp_transceiver.h"
@ -239,7 +238,6 @@ class RTCStatsCollector : public virtual rtc::RefCountInterface,
void MergeNetworkReport_s();
// Slots for signals (sigslot) that are wired up to |pc_|.
void OnRtpDataChannelCreated(RtpDataChannel* channel);
void OnSctpDataChannelCreated(SctpDataChannel* channel);
// Slots for signals (sigslot) that are wired up to |channel|.
void OnDataChannelOpened(DataChannelInterface* channel);

View File

@ -1,394 +0,0 @@
/*
* Copyright 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "pc/rtp_data_channel.h"
#include <memory>
#include <string>
#include <utility>
#include "api/proxy.h"
#include "rtc_base/checks.h"
#include "rtc_base/location.h"
#include "rtc_base/logging.h"
#include "rtc_base/ref_counted_object.h"
#include "rtc_base/thread.h"
namespace webrtc {
namespace {
static size_t kMaxQueuedReceivedDataBytes = 16 * 1024 * 1024;
static std::atomic<int> g_unique_id{0};
int GenerateUniqueId() {
return ++g_unique_id;
}
// Define proxy for DataChannelInterface.
BEGIN_PRIMARY_PROXY_MAP(DataChannel)
PROXY_PRIMARY_THREAD_DESTRUCTOR()
PROXY_METHOD1(void, RegisterObserver, DataChannelObserver*)
PROXY_METHOD0(void, UnregisterObserver)
BYPASS_PROXY_CONSTMETHOD0(std::string, label)
BYPASS_PROXY_CONSTMETHOD0(bool, reliable)
BYPASS_PROXY_CONSTMETHOD0(bool, ordered)
BYPASS_PROXY_CONSTMETHOD0(uint16_t, maxRetransmitTime)
BYPASS_PROXY_CONSTMETHOD0(uint16_t, maxRetransmits)
BYPASS_PROXY_CONSTMETHOD0(absl::optional<int>, maxRetransmitsOpt)
BYPASS_PROXY_CONSTMETHOD0(absl::optional<int>, maxPacketLifeTime)
BYPASS_PROXY_CONSTMETHOD0(std::string, protocol)
BYPASS_PROXY_CONSTMETHOD0(bool, negotiated)
// Can't bypass the proxy since the id may change.
PROXY_CONSTMETHOD0(int, id)
BYPASS_PROXY_CONSTMETHOD0(Priority, priority)
PROXY_CONSTMETHOD0(DataState, state)
PROXY_CONSTMETHOD0(RTCError, error)
PROXY_CONSTMETHOD0(uint32_t, messages_sent)
PROXY_CONSTMETHOD0(uint64_t, bytes_sent)
PROXY_CONSTMETHOD0(uint32_t, messages_received)
PROXY_CONSTMETHOD0(uint64_t, bytes_received)
PROXY_CONSTMETHOD0(uint64_t, buffered_amount)
PROXY_METHOD0(void, Close)
// TODO(bugs.webrtc.org/11547): Change to run on the network thread.
PROXY_METHOD1(bool, Send, const DataBuffer&)
END_PROXY_MAP()
} // namespace
rtc::scoped_refptr<RtpDataChannel> RtpDataChannel::Create(
RtpDataChannelProviderInterface* provider,
const std::string& label,
const DataChannelInit& config,
rtc::Thread* signaling_thread) {
rtc::scoped_refptr<RtpDataChannel> channel(
new rtc::RefCountedObject<RtpDataChannel>(config, provider, label,
signaling_thread));
if (!channel->Init()) {
return nullptr;
}
return channel;
}
// static
rtc::scoped_refptr<DataChannelInterface> RtpDataChannel::CreateProxy(
rtc::scoped_refptr<RtpDataChannel> channel) {
return DataChannelProxy::Create(channel->signaling_thread_, channel.get());
}
RtpDataChannel::RtpDataChannel(const DataChannelInit& config,
RtpDataChannelProviderInterface* provider,
const std::string& label,
rtc::Thread* signaling_thread)
: signaling_thread_(signaling_thread),
internal_id_(GenerateUniqueId()),
label_(label),
config_(config),
provider_(provider) {
RTC_DCHECK_RUN_ON(signaling_thread_);
}
bool RtpDataChannel::Init() {
RTC_DCHECK_RUN_ON(signaling_thread_);
if (config_.reliable || config_.id != -1 || config_.maxRetransmits ||
config_.maxRetransmitTime) {
RTC_LOG(LS_ERROR) << "Failed to initialize the RTP data channel due to "
"invalid DataChannelInit.";
return false;
}
return true;
}
RtpDataChannel::~RtpDataChannel() {
RTC_DCHECK_RUN_ON(signaling_thread_);
}
void RtpDataChannel::RegisterObserver(DataChannelObserver* observer) {
RTC_DCHECK_RUN_ON(signaling_thread_);
observer_ = observer;
DeliverQueuedReceivedData();
}
void RtpDataChannel::UnregisterObserver() {
RTC_DCHECK_RUN_ON(signaling_thread_);
observer_ = nullptr;
}
void RtpDataChannel::Close() {
RTC_DCHECK_RUN_ON(signaling_thread_);
if (state_ == kClosed)
return;
send_ssrc_ = 0;
send_ssrc_set_ = false;
SetState(kClosing);
UpdateState();
}
RtpDataChannel::DataState RtpDataChannel::state() const {
RTC_DCHECK_RUN_ON(signaling_thread_);
return state_;
}
RTCError RtpDataChannel::error() const {
RTC_DCHECK_RUN_ON(signaling_thread_);
return error_;
}
uint32_t RtpDataChannel::messages_sent() const {
RTC_DCHECK_RUN_ON(signaling_thread_);
return messages_sent_;
}
uint64_t RtpDataChannel::bytes_sent() const {
RTC_DCHECK_RUN_ON(signaling_thread_);
return bytes_sent_;
}
uint32_t RtpDataChannel::messages_received() const {
RTC_DCHECK_RUN_ON(signaling_thread_);
return messages_received_;
}
uint64_t RtpDataChannel::bytes_received() const {
RTC_DCHECK_RUN_ON(signaling_thread_);
return bytes_received_;
}
bool RtpDataChannel::Send(const DataBuffer& buffer) {
RTC_DCHECK_RUN_ON(signaling_thread_);
if (state_ != kOpen) {
return false;
}
// TODO(jiayl): the spec is unclear about if the remote side should get the
// onmessage event. We need to figure out the expected behavior and change the
// code accordingly.
if (buffer.size() == 0) {
return true;
}
return SendDataMessage(buffer);
}
void RtpDataChannel::SetReceiveSsrc(uint32_t receive_ssrc) {
RTC_DCHECK_RUN_ON(signaling_thread_);
if (receive_ssrc_set_) {
return;
}
receive_ssrc_ = receive_ssrc;
receive_ssrc_set_ = true;
UpdateState();
}
void RtpDataChannel::OnTransportChannelClosed() {
RTCError error = RTCError(RTCErrorType::OPERATION_ERROR_WITH_DATA,
"Transport channel closed");
CloseAbruptlyWithError(std::move(error));
}
DataChannelStats RtpDataChannel::GetStats() const {
RTC_DCHECK_RUN_ON(signaling_thread_);
DataChannelStats stats{internal_id_, id(), label(),
protocol(), state(), messages_sent(),
messages_received(), bytes_sent(), bytes_received()};
return stats;
}
// The remote peer request that this channel shall be closed.
void RtpDataChannel::RemotePeerRequestClose() {
// Close with error code explicitly set to OK.
CloseAbruptlyWithError(RTCError());
}
void RtpDataChannel::SetSendSsrc(uint32_t send_ssrc) {
RTC_DCHECK_RUN_ON(signaling_thread_);
if (send_ssrc_set_) {
return;
}
send_ssrc_ = send_ssrc;
send_ssrc_set_ = true;
UpdateState();
}
void RtpDataChannel::OnDataReceived(const cricket::ReceiveDataParams& params,
const rtc::CopyOnWriteBuffer& payload) {
RTC_DCHECK_RUN_ON(signaling_thread_);
if (params.ssrc != receive_ssrc_) {
return;
}
RTC_DCHECK(params.type == cricket::DMT_BINARY ||
params.type == cricket::DMT_TEXT);
RTC_LOG(LS_VERBOSE) << "DataChannel received DATA message, sid = "
<< params.sid;
bool binary = (params.type == cricket::DMT_BINARY);
auto buffer = std::make_unique<DataBuffer>(payload, binary);
if (state_ == kOpen && observer_) {
++messages_received_;
bytes_received_ += buffer->size();
observer_->OnMessage(*buffer.get());
} else {
if (queued_received_data_.byte_count() + payload.size() >
kMaxQueuedReceivedDataBytes) {
RTC_LOG(LS_ERROR) << "Queued received data exceeds the max buffer size.";
queued_received_data_.Clear();
CloseAbruptlyWithError(
RTCError(RTCErrorType::RESOURCE_EXHAUSTED,
"Queued received data exceeds the max buffer size."));
return;
}
queued_received_data_.PushBack(std::move(buffer));
}
}
void RtpDataChannel::OnChannelReady(bool writable) {
RTC_DCHECK_RUN_ON(signaling_thread_);
writable_ = writable;
if (!writable) {
return;
}
UpdateState();
}
void RtpDataChannel::CloseAbruptlyWithError(RTCError error) {
RTC_DCHECK_RUN_ON(signaling_thread_);
if (state_ == kClosed) {
return;
}
if (connected_to_provider_) {
DisconnectFromProvider();
}
// Still go to "kClosing" before "kClosed", since observers may be expecting
// that.
SetState(kClosing);
error_ = std::move(error);
SetState(kClosed);
}
void RtpDataChannel::UpdateState() {
RTC_DCHECK_RUN_ON(signaling_thread_);
// UpdateState determines what to do from a few state variables. Include
// all conditions required for each state transition here for
// clarity.
switch (state_) {
case kConnecting: {
if (send_ssrc_set_ == receive_ssrc_set_) {
if (!connected_to_provider_) {
connected_to_provider_ = provider_->ConnectDataChannel(this);
}
if (connected_to_provider_ && writable_) {
SetState(kOpen);
// If we have received buffers before the channel got writable.
// Deliver them now.
DeliverQueuedReceivedData();
}
}
break;
}
case kOpen: {
break;
}
case kClosing: {
// For RTP data channels, we can go to "closed" after we finish
// sending data and the send/recv SSRCs are unset.
if (connected_to_provider_) {
DisconnectFromProvider();
}
if (!send_ssrc_set_ && !receive_ssrc_set_) {
SetState(kClosed);
}
break;
}
case kClosed:
break;
}
}
void RtpDataChannel::SetState(DataState state) {
RTC_DCHECK_RUN_ON(signaling_thread_);
if (state_ == state) {
return;
}
state_ = state;
if (observer_) {
observer_->OnStateChange();
}
if (state_ == kOpen) {
SignalOpened(this);
} else if (state_ == kClosed) {
SignalClosed(this);
}
}
void RtpDataChannel::DisconnectFromProvider() {
RTC_DCHECK_RUN_ON(signaling_thread_);
if (!connected_to_provider_)
return;
provider_->DisconnectDataChannel(this);
connected_to_provider_ = false;
}
void RtpDataChannel::DeliverQueuedReceivedData() {
RTC_DCHECK_RUN_ON(signaling_thread_);
if (!observer_) {
return;
}
while (!queued_received_data_.Empty()) {
std::unique_ptr<DataBuffer> buffer = queued_received_data_.PopFront();
++messages_received_;
bytes_received_ += buffer->size();
observer_->OnMessage(*buffer);
}
}
bool RtpDataChannel::SendDataMessage(const DataBuffer& buffer) {
RTC_DCHECK_RUN_ON(signaling_thread_);
cricket::SendDataParams send_params;
send_params.ssrc = send_ssrc_;
send_params.type = buffer.binary ? cricket::DMT_BINARY : cricket::DMT_TEXT;
cricket::SendDataResult send_result = cricket::SDR_SUCCESS;
bool success = provider_->SendData(send_params, buffer.data, &send_result);
if (success) {
++messages_sent_;
bytes_sent_ += buffer.size();
if (observer_ && buffer.size() > 0) {
observer_->OnBufferedAmountChange(buffer.size());
}
return true;
}
return false;
}
// static
void RtpDataChannel::ResetInternalIdAllocatorForTesting(int new_value) {
g_unique_id = new_value;
}
} // namespace webrtc

View File

@ -1,204 +0,0 @@
/*
* Copyright 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef PC_RTP_DATA_CHANNEL_H_
#define PC_RTP_DATA_CHANNEL_H_
#include <stdint.h>
#include <memory>
#include <string>
#include "absl/types/optional.h"
#include "api/data_channel_interface.h"
#include "api/priority.h"
#include "api/rtc_error.h"
#include "api/scoped_refptr.h"
#include "api/transport/data_channel_transport_interface.h"
#include "media/base/media_channel.h"
#include "pc/channel.h"
#include "pc/data_channel_utils.h"
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "rtc_base/thread.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
class RtpDataChannel;
// TODO(deadbeef): Once RTP data channels go away, get rid of this and have
// DataChannel depend on SctpTransportInternal (pure virtual SctpTransport
// interface) instead.
class RtpDataChannelProviderInterface {
public:
// Sends the data to the transport.
virtual bool SendData(const cricket::SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
cricket::SendDataResult* result) = 0;
// Connects to the transport signals.
virtual bool ConnectDataChannel(RtpDataChannel* data_channel) = 0;
// Disconnects from the transport signals.
virtual void DisconnectDataChannel(RtpDataChannel* data_channel) = 0;
// Returns true if the transport channel is ready to send data.
virtual bool ReadyToSendData() const = 0;
protected:
virtual ~RtpDataChannelProviderInterface() {}
};
// RtpDataChannel is an implementation of the DataChannelInterface based on
// libjingle's data engine. It provides an implementation of unreliable data
// channels.
// DataChannel states:
// kConnecting: The channel has been created the transport might not yet be
// ready.
// kOpen: The channel have a local SSRC set by a call to UpdateSendSsrc
// and a remote SSRC set by call to UpdateReceiveSsrc and the transport
// has been writable once.
// kClosing: DataChannelInterface::Close has been called or UpdateReceiveSsrc
// has been called with SSRC==0
// kClosed: Both UpdateReceiveSsrc and UpdateSendSsrc has been called with
// SSRC==0.
class RtpDataChannel : public DataChannelInterface,
public sigslot::has_slots<> {
public:
static rtc::scoped_refptr<RtpDataChannel> Create(
RtpDataChannelProviderInterface* provider,
const std::string& label,
const DataChannelInit& config,
rtc::Thread* signaling_thread);
// Instantiates an API proxy for a DataChannel instance that will be handed
// out to external callers.
static rtc::scoped_refptr<DataChannelInterface> CreateProxy(
rtc::scoped_refptr<RtpDataChannel> channel);
void RegisterObserver(DataChannelObserver* observer) override;
void UnregisterObserver() override;
std::string label() const override { return label_; }
bool reliable() const override { return false; }
bool ordered() const override { return config_.ordered; }
// Backwards compatible accessors
uint16_t maxRetransmitTime() const override {
return config_.maxRetransmitTime ? *config_.maxRetransmitTime
: static_cast<uint16_t>(-1);
}
uint16_t maxRetransmits() const override {
return config_.maxRetransmits ? *config_.maxRetransmits
: static_cast<uint16_t>(-1);
}
absl::optional<int> maxPacketLifeTime() const override {
return config_.maxRetransmitTime;
}
absl::optional<int> maxRetransmitsOpt() const override {
return config_.maxRetransmits;
}
std::string protocol() const override { return config_.protocol; }
bool negotiated() const override { return config_.negotiated; }
int id() const override { return config_.id; }
Priority priority() const override {
return config_.priority ? *config_.priority : Priority::kLow;
}
virtual int internal_id() const { return internal_id_; }
uint64_t buffered_amount() const override { return 0; }
void Close() override;
DataState state() const override;
RTCError error() const override;
uint32_t messages_sent() const override;
uint64_t bytes_sent() const override;
uint32_t messages_received() const override;
uint64_t bytes_received() const override;
bool Send(const DataBuffer& buffer) override;
// Close immediately, ignoring any queued data or closing procedure.
// This is called when SDP indicates a channel should be removed.
void CloseAbruptlyWithError(RTCError error);
// Called when the channel's ready to use. That can happen when the
// underlying DataMediaChannel becomes ready, or when this channel is a new
// stream on an existing DataMediaChannel, and we've finished negotiation.
void OnChannelReady(bool writable);
// Slots for provider to connect signals to.
void OnDataReceived(const cricket::ReceiveDataParams& params,
const rtc::CopyOnWriteBuffer& payload);
// Called when the transport channel is unusable.
// This method makes sure the DataChannel is disconnected and changes state
// to kClosed.
void OnTransportChannelClosed();
DataChannelStats GetStats() const;
// The remote peer requested that this channel should be closed.
void RemotePeerRequestClose();
// Set the SSRC this channel should use to send data on the
// underlying data engine. |send_ssrc| == 0 means that the channel is no
// longer part of the session negotiation.
void SetSendSsrc(uint32_t send_ssrc);
// Set the SSRC this channel should use to receive data from the
// underlying data engine.
void SetReceiveSsrc(uint32_t receive_ssrc);
// Emitted when state transitions to kOpen.
sigslot::signal1<DataChannelInterface*> SignalOpened;
// Emitted when state transitions to kClosed.
sigslot::signal1<DataChannelInterface*> SignalClosed;
// Reset the allocator for internal ID values for testing, so that
// the internal IDs generated are predictable. Test only.
static void ResetInternalIdAllocatorForTesting(int new_value);
protected:
RtpDataChannel(const DataChannelInit& config,
RtpDataChannelProviderInterface* client,
const std::string& label,
rtc::Thread* signaling_thread);
~RtpDataChannel() override;
private:
bool Init();
void UpdateState();
void SetState(DataState state);
void DisconnectFromProvider();
void DeliverQueuedReceivedData();
bool SendDataMessage(const DataBuffer& buffer);
rtc::Thread* const signaling_thread_;
const int internal_id_;
const std::string label_;
const DataChannelInit config_;
DataChannelObserver* observer_ RTC_GUARDED_BY(signaling_thread_) = nullptr;
DataState state_ RTC_GUARDED_BY(signaling_thread_) = kConnecting;
RTCError error_ RTC_GUARDED_BY(signaling_thread_);
uint32_t messages_sent_ RTC_GUARDED_BY(signaling_thread_) = 0;
uint64_t bytes_sent_ RTC_GUARDED_BY(signaling_thread_) = 0;
uint32_t messages_received_ RTC_GUARDED_BY(signaling_thread_) = 0;
uint64_t bytes_received_ RTC_GUARDED_BY(signaling_thread_) = 0;
RtpDataChannelProviderInterface* const provider_;
bool connected_to_provider_ RTC_GUARDED_BY(signaling_thread_) = false;
bool send_ssrc_set_ RTC_GUARDED_BY(signaling_thread_) = false;
bool receive_ssrc_set_ RTC_GUARDED_BY(signaling_thread_) = false;
bool writable_ RTC_GUARDED_BY(signaling_thread_) = false;
uint32_t send_ssrc_ RTC_GUARDED_BY(signaling_thread_) = 0;
uint32_t receive_ssrc_ RTC_GUARDED_BY(signaling_thread_) = 0;
PacketQueue queued_received_data_ RTC_GUARDED_BY(signaling_thread_);
};
} // namespace webrtc
#endif // PC_RTP_DATA_CHANNEL_H_

View File

@ -37,7 +37,6 @@
#include "media/base/media_channel.h"
#include "media/base/media_config.h"
#include "media/base/media_engine.h"
#include "media/base/rtp_data_engine.h"
#include "media/base/stream_params.h"
#include "media/base/test_utils.h"
#include "media/engine/fake_webrtc_call.h"
@ -112,8 +111,7 @@ class RtpSenderReceiverTest
local_stream_(MediaStream::Create(kStreamId1)) {
worker_thread_->Invoke<void>(RTC_FROM_HERE, [&]() {
channel_manager_ = cricket::ChannelManager::Create(
absl::WrapUnique(media_engine_),
std::make_unique<cricket::RtpDataEngine>(), false, worker_thread_,
absl::WrapUnique(media_engine_), false, worker_thread_,
network_thread_);
});

View File

@ -84,7 +84,6 @@ class RtpTransceiverUnifiedPlanTest : public ::testing::Test {
RtpTransceiverUnifiedPlanTest()
: channel_manager_(cricket::ChannelManager::Create(
std::make_unique<cricket::FakeMediaEngine>(),
std::make_unique<cricket::FakeDataEngine>(),
false,
rtc::Thread::Current(),
rtc::Thread::Current())),
@ -121,7 +120,6 @@ class RtpTransceiverTestForHeaderExtensions : public ::testing::Test {
RtpTransceiverTestForHeaderExtensions()
: channel_manager_(cricket::ChannelManager::Create(
std::make_unique<cricket::FakeMediaEngine>(),
std::make_unique<cricket::FakeDataEngine>(),
false,
rtc::Thread::Current(),
rtc::Thread::Current())),

View File

@ -43,7 +43,6 @@
#include "pc/media_stream.h"
#include "pc/peer_connection.h"
#include "pc/peer_connection_message_handler.h"
#include "pc/rtp_data_channel.h"
#include "pc/rtp_media_utils.h"
#include "pc/rtp_sender.h"
#include "pc/rtp_transport_internal.h"
@ -693,27 +692,6 @@ std::string GenerateRtcpCname() {
return cname;
}
// Add options to |session_options| from |rtp_data_channels|.
void AddRtpDataChannelOptions(
const std::map<std::string, rtc::scoped_refptr<RtpDataChannel>>&
rtp_data_channels,
cricket::MediaDescriptionOptions* data_media_description_options) {
if (!data_media_description_options) {
return;
}
// Check for data channels.
for (const auto& kv : rtp_data_channels) {
const RtpDataChannel* channel = kv.second;
if (channel->state() == RtpDataChannel::kConnecting ||
channel->state() == RtpDataChannel::kOpen) {
// Legacy RTP data channels are signaled with the track/stream ID set to
// the data channel's label.
data_media_description_options->AddRtpDataChannel(channel->label(),
channel->label());
}
}
}
// Check if we can send |new_stream| on a PeerConnection.
bool CanAddLocalMediaStream(webrtc::StreamCollectionInterface* current_streams,
webrtc::MediaStreamInterface* new_stream) {
@ -1464,17 +1442,7 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription(
}
}
const cricket::ContentInfo* data_content =
GetFirstDataContent(local_description()->description());
if (data_content) {
const cricket::RtpDataContentDescription* rtp_data_desc =
data_content->media_description()->as_rtp_data();
// rtp_data_desc will be null if this is an SCTP description.
if (rtp_data_desc) {
data_channel_controller()->UpdateLocalRtpDataChannels(
rtp_data_desc->streams());
}
}
// This function does nothing with data content.
if (type == SdpType::kAnswer &&
local_ice_credentials_to_replace_->SatisfiesIceRestart(
@ -1802,8 +1770,6 @@ RTCError SdpOfferAnswerHandler::ApplyRemoteDescription(
GetFirstAudioContentDescription(remote_description()->description());
const cricket::VideoContentDescription* video_desc =
GetFirstVideoContentDescription(remote_description()->description());
const cricket::RtpDataContentDescription* rtp_data_desc =
GetFirstRtpDataContentDescription(remote_description()->description());
// Check if the descriptions include streams, just in case the peer supports
// MSID, but doesn't indicate so with "a=msid-semantic".
@ -1856,13 +1822,6 @@ RTCError SdpOfferAnswerHandler::ApplyRemoteDescription(
}
}
// If this is an RTP data transport, update the DataChannels with the
// information from the remote peer.
if (rtp_data_desc) {
data_channel_controller()->UpdateRemoteRtpDataChannels(
GetActiveStreams(rtp_data_desc));
}
// Iterate new_streams and notify the observer about new MediaStreams.
auto observer = pc_->Observer();
for (size_t i = 0; i < new_streams->count(); ++i) {
@ -2515,7 +2474,6 @@ RTCError SdpOfferAnswerHandler::UpdateSessionState(
RTC_DCHECK(type == SdpType::kAnswer);
ChangeSignalingState(PeerConnectionInterface::kStable);
transceivers()->DiscardStableStates();
have_pending_rtp_data_channel_ = false;
}
// Update internal objects according to the session description's media
@ -2739,10 +2697,6 @@ RTCError SdpOfferAnswerHandler::Rollback(SdpType desc_type) {
transceiver->internal()->set_mline_index(state.mline_index());
}
transport_controller()->RollbackTransports();
if (have_pending_rtp_data_channel_) {
DestroyDataChannelTransport();
have_pending_rtp_data_channel_ = false;
}
transceivers()->DiscardStableStates();
pending_local_description_.reset();
pending_remote_description_.reset();
@ -3407,21 +3361,13 @@ RTCError SdpOfferAnswerHandler::UpdateDataChannel(
RTC_LOG(LS_INFO) << "Rejected data channel, mid=" << content.mid();
DestroyDataChannelTransport();
} else {
if (!data_channel_controller()->rtp_data_channel() &&
!data_channel_controller()->data_channel_transport()) {
if (!data_channel_controller()->data_channel_transport()) {
RTC_LOG(LS_INFO) << "Creating data channel, mid=" << content.mid();
if (!CreateDataChannel(content.name)) {
LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
"Failed to create data channel.");
}
}
if (source == cricket::CS_REMOTE) {
const MediaContentDescription* data_desc = content.media_description();
if (data_desc && cricket::IsRtpProtocol(data_desc->protocol())) {
data_channel_controller()->UpdateRemoteRtpDataChannels(
GetActiveStreams(data_desc));
}
}
}
return RTCError::OK();
}
@ -3548,15 +3494,7 @@ void SdpOfferAnswerHandler::GetOptionsForOffer(
GetOptionsForPlanBOffer(offer_answer_options, session_options);
}
// Intentionally unset the data channel type for RTP data channel with the
// second condition. Otherwise the RTP data channels would be successfully
// negotiated by default and the unit tests in WebRtcDataBrowserTest will fail
// when building with chromium. We want to leave RTP data channels broken, so
// people won't try to use them.
if (data_channel_controller()->HasRtpDataChannels() ||
pc_->data_channel_type() != cricket::DCT_RTP) {
session_options->data_channel_type = pc_->data_channel_type();
}
// Apply ICE restart flag and renomination flag.
bool ice_restart = offer_answer_options.ice_restart || HasNewIceCredentials();
@ -3815,14 +3753,7 @@ void SdpOfferAnswerHandler::GetOptionsForAnswer(
GetOptionsForPlanBAnswer(offer_answer_options, session_options);
}
// Intentionally unset the data channel type for RTP data channel. Otherwise
// the RTP data channels would be successfully negotiated by default and the
// unit tests in WebRtcDataBrowserTest will fail when building with chromium.
// We want to leave RTP data channels broken, so people won't try to use them.
if (data_channel_controller()->HasRtpDataChannels() ||
pc_->data_channel_type() != cricket::DCT_RTP) {
session_options->data_channel_type = pc_->data_channel_type();
}
// Apply ICE renomination flag.
for (auto& options : session_options->media_description_options) {
@ -4234,11 +4165,6 @@ void SdpOfferAnswerHandler::EnableSending() {
channel->Enable(true);
}
}
if (data_channel_controller()->rtp_data_channel() &&
!data_channel_controller()->rtp_data_channel()->enabled()) {
data_channel_controller()->rtp_data_channel()->Enable(true);
}
}
RTCError SdpOfferAnswerHandler::PushdownMediaDescription(
@ -4280,29 +4206,6 @@ RTCError SdpOfferAnswerHandler::PushdownMediaDescription(
}
}
// If using the RtpDataChannel, push down the new SDP section for it too.
if (data_channel_controller()->rtp_data_channel()) {
const ContentInfo* data_content =
cricket::GetFirstDataContent(sdesc->description());
if (data_content && !data_content->rejected) {
const MediaContentDescription* data_desc =
data_content->media_description();
if (data_desc) {
std::string error;
bool success = (source == cricket::CS_LOCAL)
? data_channel_controller()
->rtp_data_channel()
->SetLocalContent(data_desc, type, &error)
: data_channel_controller()
->rtp_data_channel()
->SetRemoteContent(data_desc, type, &error);
if (!success) {
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, error);
}
}
}
}
// Need complete offer/answer with an SCTP m= section before starting SCTP,
// according to https://tools.ietf.org/html/draft-ietf-mmusic-sctp-sdp-19
if (pc_->sctp_mid() && local_description() && remote_description()) {
@ -4599,8 +4502,7 @@ RTCError SdpOfferAnswerHandler::CreateChannels(const SessionDescription& desc) {
const cricket::ContentInfo* data = cricket::GetFirstDataContent(&desc);
if (pc_->data_channel_type() != cricket::DCT_NONE && data &&
!data->rejected && !data_channel_controller()->rtp_data_channel() &&
!data_channel_controller()->data_channel_transport()) {
!data->rejected && !data_channel_controller()->data_channel_transport()) {
if (!CreateDataChannel(data->name)) {
LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
"Failed to create data channel.");
@ -4678,23 +4580,10 @@ bool SdpOfferAnswerHandler::CreateDataChannel(const std::string& mid) {
// out of sync (transport name not set while the mid is set).
pc_->SetSctpDataMid(mid);
break;
case cricket::DCT_RTP:
default:
RtpTransportInternal* rtp_transport = pc_->GetRtpTransport(mid);
cricket::RtpDataChannel* data_channel =
channel_manager()->CreateRtpDataChannel(
pc_->configuration()->media_config, rtp_transport,
signaling_thread(), mid, pc_->SrtpRequired(),
pc_->GetCryptoOptions(), &ssrc_generator_);
if (!data_channel)
case cricket::DCT_NONE:
// User error.
RTC_NOTREACHED();
return false;
pc_->network_thread()->Invoke<void>(RTC_FROM_HERE, [this, data_channel] {
RTC_DCHECK_RUN_ON(pc_->network_thread());
pc_->SetupRtpDataChannelTransport_n(data_channel);
});
have_pending_rtp_data_channel_ = true;
break;
}
return true;
}
@ -4716,9 +4605,9 @@ void SdpOfferAnswerHandler::DestroyTransceiverChannel(
RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0);
if (channel) {
// TODO(tommi): VideoRtpReceiver::SetMediaChannel blocks and jumps to the
// worker thread. When being set to nullptr, there are additional blocking
// calls to e.g. ClearRecordableEncodedFrameCallback which triggers another
// blocking call or Stop() for video channels.
// worker thread. When being set to nullptrpus, there are additional
// blocking calls to e.g. ClearRecordableEncodedFrameCallback which triggers
// another blocking call or Stop() for video channels.
transceiver->internal()->SetChannel(nullptr);
RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(2);
// TODO(tommi): All channel objects end up getting deleted on the
@ -4731,9 +4620,8 @@ void SdpOfferAnswerHandler::DestroyTransceiverChannel(
void SdpOfferAnswerHandler::DestroyDataChannelTransport() {
RTC_DCHECK_RUN_ON(signaling_thread());
const bool has_sctp = pc_->sctp_mid().has_value();
auto* rtp_data_channel = data_channel_controller()->rtp_data_channel();
if (has_sctp || rtp_data_channel)
if (has_sctp)
data_channel_controller()->OnTransportChannelClosed();
pc_->network_thread()->Invoke<void>(RTC_FROM_HERE, [this] {
@ -4743,9 +4631,6 @@ void SdpOfferAnswerHandler::DestroyDataChannelTransport() {
if (has_sctp)
pc_->ResetSctpDataMid();
if (rtp_data_channel)
DestroyChannelInterface(rtp_data_channel);
}
void SdpOfferAnswerHandler::DestroyChannelInterface(
@ -4770,8 +4655,8 @@ void SdpOfferAnswerHandler::DestroyChannelInterface(
static_cast<cricket::VideoChannel*>(channel));
break;
case cricket::MEDIA_TYPE_DATA:
channel_manager()->DestroyRtpDataChannel(
static_cast<cricket::RtpDataChannel*>(channel));
RTC_NOTREACHED()
<< "Trying to destroy datachannel through DestroyChannelInterface";
break;
default:
RTC_NOTREACHED() << "Unknown media type: " << channel->media_type();
@ -4887,8 +4772,6 @@ SdpOfferAnswerHandler::GetMediaDescriptionOptionsForActiveData(
cricket::MediaDescriptionOptions options(cricket::MEDIA_TYPE_DATA, mid,
RtpTransceiverDirection::kSendRecv,
/*stopped=*/false);
AddRtpDataChannelOptions(*(data_channel_controller()->rtp_data_channels()),
&options);
return options;
}
@ -4899,8 +4782,6 @@ SdpOfferAnswerHandler::GetMediaDescriptionOptionsForRejectedData(
cricket::MediaDescriptionOptions options(cricket::MEDIA_TYPE_DATA, mid,
RtpTransceiverDirection::kInactive,
/*stopped=*/true);
AddRtpDataChannelOptions(*(data_channel_controller()->rtp_data_channels()),
&options);
return options;
}

View File

@ -632,10 +632,6 @@ class SdpOfferAnswerHandler : public SdpStateProvider,
rtc::scoped_refptr<MediaStreamInterface> missing_msid_default_stream_
RTC_GUARDED_BY(signaling_thread());
// Used when rolling back RTP data channels.
bool have_pending_rtp_data_channel_ RTC_GUARDED_BY(signaling_thread()) =
false;
// Updates the error state, signaling if necessary.
void SetSessionError(SessionError error, const std::string& error_desc);

View File

@ -60,7 +60,6 @@ const int kAutoBandwidth = -1;
class AudioContentDescription;
class VideoContentDescription;
class RtpDataContentDescription;
class SctpDataContentDescription;
class UnsupportedContentDescription;
@ -83,11 +82,6 @@ class MediaContentDescription {
virtual VideoContentDescription* as_video() { return nullptr; }
virtual const VideoContentDescription* as_video() const { return nullptr; }
virtual RtpDataContentDescription* as_rtp_data() { return nullptr; }
virtual const RtpDataContentDescription* as_rtp_data() const {
return nullptr;
}
virtual SctpDataContentDescription* as_sctp() { return nullptr; }
virtual const SctpDataContentDescription* as_sctp() const { return nullptr; }
@ -361,20 +355,6 @@ class VideoContentDescription : public MediaContentDescriptionImpl<VideoCodec> {
}
};
class RtpDataContentDescription
: public MediaContentDescriptionImpl<RtpDataCodec> {
public:
RtpDataContentDescription() {}
MediaType type() const override { return MEDIA_TYPE_DATA; }
RtpDataContentDescription* as_rtp_data() override { return this; }
const RtpDataContentDescription* as_rtp_data() const override { return this; }
private:
RtpDataContentDescription* CloneInternal() const override {
return new RtpDataContentDescription(*this);
}
};
class SctpDataContentDescription : public MediaContentDescription {
public:
SctpDataContentDescription() {}

View File

@ -130,16 +130,6 @@ TEST(SessionDescriptionTest, AddContentTransfersExtmapAllowMixedSetting) {
EXPECT_EQ(MediaContentDescription::kSession,
session_desc.GetContentDescriptionByName("video")
->extmap_allow_mixed_enum());
// Session level setting overrides media level when new content is added.
std::unique_ptr<MediaContentDescription> data_desc =
std::make_unique<RtpDataContentDescription>();
data_desc->set_extmap_allow_mixed_enum(MediaContentDescription::kMedia);
session_desc.AddContent("data", MediaProtocolType::kRtp,
std::move(data_desc));
EXPECT_EQ(MediaContentDescription::kSession,
session_desc.GetContentDescriptionByName("data")
->extmap_allow_mixed_enum());
}
} // namespace cricket

View File

@ -248,16 +248,10 @@ class FakePeerConnectionBase : public PeerConnectionInternal {
return {};
}
sigslot::signal1<RtpDataChannel*>& SignalRtpDataChannelCreated() override {
return SignalRtpDataChannelCreated_;
}
sigslot::signal1<SctpDataChannel*>& SignalSctpDataChannelCreated() override {
return SignalSctpDataChannelCreated_;
}
cricket::RtpDataChannel* rtp_data_channel() const override { return nullptr; }
absl::optional<std::string> sctp_transport_name() const override {
return absl::nullopt;
}
@ -298,7 +292,6 @@ class FakePeerConnectionBase : public PeerConnectionInternal {
}
protected:
sigslot::signal1<RtpDataChannel*> SignalRtpDataChannelCreated_;
sigslot::signal1<SctpDataChannel*> SignalSctpDataChannelCreated_;
};

View File

@ -82,7 +82,6 @@ using cricket::MediaContentDescription;
using cricket::MediaProtocolType;
using cricket::MediaType;
using cricket::RidDescription;
using cricket::RtpDataContentDescription;
using cricket::RtpHeaderExtensions;
using cricket::SctpDataContentDescription;
using cricket::SimulcastDescription;
@ -1415,12 +1414,7 @@ void BuildMediaDescription(const ContentInfo* content_info,
fmt.append(kDefaultSctpmapProtocol);
}
} else {
const RtpDataContentDescription* rtp_data_desc =
media_desc->as_rtp_data();
for (const cricket::RtpDataCodec& codec : rtp_data_desc->codecs()) {
fmt.append(" ");
fmt.append(rtc::ToString(codec.id));
}
RTC_NOTREACHED() << "Data description without SCTP";
}
} else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) {
const UnsupportedContentDescription* unsupported_desc =
@ -1972,19 +1966,6 @@ void BuildRtpMap(const MediaContentDescription* media_desc,
ptime = std::max(ptime, max_minptime);
AddAttributeLine(kCodecParamPTime, ptime, message);
}
} else if (media_type == cricket::MEDIA_TYPE_DATA) {
if (media_desc->as_rtp_data()) {
for (const cricket::RtpDataCodec& codec :
media_desc->as_rtp_data()->codecs()) {
// RFC 4566
// a=rtpmap:<payload type> <encoding name>/<clock rate>
// [/<encodingparameters>]
InitAttrLine(kAttributeRtpmap, &os);
os << kSdpDelimiterColon << codec.id << " " << codec.name << "/"
<< codec.clockrate;
AddLine(os.str(), message);
}
}
}
}
@ -2739,14 +2720,6 @@ bool ParseMediaDescription(
}
data_desc->set_protocol(protocol);
content = std::move(data_desc);
} else if (cricket::IsRtpProtocol(protocol)) {
// RTP
std::unique_ptr<RtpDataContentDescription> data_desc =
ParseContentDescription<RtpDataContentDescription>(
message, cricket::MEDIA_TYPE_DATA, mline_index, protocol,
payload_types, pos, &content_name, &bundle_only,
&section_msid_signaling, &transport, candidates, error);
content = std::move(data_desc);
} else {
return ParseFailed(line, "Unsupported protocol for media type", error);
}
@ -3671,11 +3644,6 @@ bool ParseRtpmapAttribute(const std::string& line,
AudioContentDescription* audio_desc = media_desc->as_audio();
UpdateCodec(payload_type, encoding_name, clock_rate, 0, channels,
audio_desc);
} else if (media_type == cricket::MEDIA_TYPE_DATA) {
RtpDataContentDescription* data_desc = media_desc->as_rtp_data();
if (data_desc) {
data_desc->AddCodec(cricket::RtpDataCodec(payload_type, encoding_name));
}
}
return true;
}

View File

@ -65,7 +65,6 @@ using cricket::MediaProtocolType;
using cricket::RELAY_PORT_TYPE;
using cricket::RidDescription;
using cricket::RidDirection;
using cricket::RtpDataContentDescription;
using cricket::SctpDataContentDescription;
using cricket::SessionDescription;
using cricket::SimulcastDescription;
@ -263,22 +262,6 @@ static const char kSdpString[] =
"a=ssrc:3 mslabel:local_stream_1\r\n"
"a=ssrc:3 label:video_track_id_1\r\n";
static const char kSdpRtpDataChannelString[] =
"m=application 9 RTP/SAVPF 101\r\n"
"c=IN IP4 0.0.0.0\r\n"
"a=rtcp:9 IN IP4 0.0.0.0\r\n"
"a=ice-ufrag:ufrag_data\r\n"
"a=ice-pwd:pwd_data\r\n"
"a=mid:data_content_name\r\n"
"a=sendrecv\r\n"
"a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
"inline:FvLcvU2P3ZWmQxgPAgcDu7Zl9vftYElFOjEzhWs5\r\n"
"a=rtpmap:101 google-data/90000\r\n"
"a=ssrc:10 cname:data_channel_cname\r\n"
"a=ssrc:10 msid:data_channel data_channeld0\r\n"
"a=ssrc:10 mslabel:data_channel\r\n"
"a=ssrc:10 label:data_channeld0\r\n";
// draft-ietf-mmusic-sctp-sdp-03
static const char kSdpSctpDataChannelString[] =
"m=application 9 UDP/DTLS/SCTP 5000\r\n"
@ -906,12 +889,6 @@ static const uint32_t kVideoTrack3Ssrc = 6;
static const char kAudioTrackId3[] = "audio_track_id_3";
static const uint32_t kAudioTrack3Ssrc = 7;
// DataChannel
static const char kDataChannelLabel[] = "data_channel";
static const char kDataChannelMsid[] = "data_channeld0";
static const char kDataChannelCname[] = "data_channel_cname";
static const uint32_t kDataChannelSsrc = 10;
// Candidate
static const char kDummyMid[] = "dummy_mid";
static const int kDummyIndex = 123;
@ -1466,11 +1443,6 @@ class WebRtcSdpTest : public ::testing::Test {
simulcast2.receive_layers().size());
}
void CompareRtpDataContentDescription(const RtpDataContentDescription* dcd1,
const RtpDataContentDescription* dcd2) {
CompareMediaContentDescription<RtpDataContentDescription>(dcd1, dcd2);
}
void CompareSctpDataContentDescription(
const SctpDataContentDescription* dcd1,
const SctpDataContentDescription* dcd2) {
@ -1521,14 +1493,6 @@ class WebRtcSdpTest : public ::testing::Test {
const SctpDataContentDescription* scd2 =
c2.media_description()->as_sctp();
CompareSctpDataContentDescription(scd1, scd2);
} else {
if (IsDataContent(&c1)) {
const RtpDataContentDescription* dcd1 =
c1.media_description()->as_rtp_data();
const RtpDataContentDescription* dcd2 =
c2.media_description()->as_rtp_data();
CompareRtpDataContentDescription(dcd1, dcd2);
}
}
CompareSimulcastDescription(
@ -1816,28 +1780,6 @@ class WebRtcSdpTest : public ::testing::Test {
kDataContentName, TransportDescription(kUfragData, kPwdData)));
}
void AddRtpDataChannel() {
std::unique_ptr<RtpDataContentDescription> data(
new RtpDataContentDescription());
data_desc_ = data.get();
data_desc_->AddCodec(DataCodec(101, "google-data"));
StreamParams data_stream;
data_stream.id = kDataChannelMsid;
data_stream.cname = kDataChannelCname;
data_stream.set_stream_ids({kDataChannelLabel});
data_stream.ssrcs.push_back(kDataChannelSsrc);
data_desc_->AddStream(data_stream);
data_desc_->AddCrypto(
CryptoParams(1, "AES_CM_128_HMAC_SHA1_80",
"inline:FvLcvU2P3ZWmQxgPAgcDu7Zl9vftYElFOjEzhWs5", ""));
data_desc_->set_protocol(cricket::kMediaProtocolSavpf);
desc_.AddContent(kDataContentName, MediaProtocolType::kRtp,
std::move(data));
desc_.AddTransportInfo(TransportInfo(
kDataContentName, TransportDescription(kUfragData, kPwdData)));
}
bool TestDeserializeDirection(RtpTransceiverDirection direction) {
std::string new_sdp = kSdpFullString;
ReplaceDirection(direction, &new_sdp);
@ -2103,7 +2045,6 @@ class WebRtcSdpTest : public ::testing::Test {
SessionDescription desc_;
AudioContentDescription* audio_desc_;
VideoContentDescription* video_desc_;
RtpDataContentDescription* data_desc_;
SctpDataContentDescription* sctp_desc_;
Candidates candidates_;
std::unique_ptr<IceCandidateInterface> jcandidate_;
@ -2269,18 +2210,6 @@ TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithAudioVideoRejected) {
EXPECT_TRUE(TestSerializeRejected(true, true));
}
TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithRtpDataChannel) {
AddRtpDataChannel();
JsepSessionDescription jsep_desc(kDummyType);
MakeDescriptionWithoutCandidates(&jsep_desc);
std::string message = webrtc::SdpSerialize(jsep_desc);
std::string expected_sdp = kSdpString;
expected_sdp.append(kSdpRtpDataChannelString);
EXPECT_EQ(expected_sdp, message);
}
TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithSctpDataChannel) {
bool use_sctpmap = true;
AddSctpDataChannel(use_sctpmap);
@ -2327,22 +2256,6 @@ TEST_F(WebRtcSdpTest, SerializeWithSctpDataChannelAndNewPort) {
EXPECT_EQ(expected_sdp, message);
}
TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithDataChannelAndBandwidth) {
JsepSessionDescription jsep_desc(kDummyType);
AddRtpDataChannel();
data_desc_->set_bandwidth(100 * 1000);
data_desc_->set_bandwidth_type("AS");
MakeDescriptionWithoutCandidates(&jsep_desc);
std::string message = webrtc::SdpSerialize(jsep_desc);
std::string expected_sdp = kSdpString;
expected_sdp.append(kSdpRtpDataChannelString);
// Serializing data content shouldn't ignore bandwidth settings.
InjectAfter("m=application 9 RTP/SAVPF 101\r\nc=IN IP4 0.0.0.0\r\n",
"b=AS:100\r\n", &expected_sdp);
EXPECT_EQ(expected_sdp, message);
}
TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithExtmapAllowMixed) {
jdesc_.description()->set_extmap_allow_mixed(true);
TestSerialize(jdesc_);
@ -2913,21 +2826,6 @@ TEST_F(WebRtcSdpTest, DeserializeInvalidCandidiate) {
EXPECT_FALSE(SdpDeserializeCandidate(kSdpTcpInvalidCandidate, &jcandidate));
}
TEST_F(WebRtcSdpTest, DeserializeSdpWithRtpDataChannels) {
AddRtpDataChannel();
JsepSessionDescription jdesc(kDummyType);
ASSERT_TRUE(jdesc.Initialize(desc_.Clone(), kSessionId, kSessionVersion));
std::string sdp_with_data = kSdpString;
sdp_with_data.append(kSdpRtpDataChannelString);
JsepSessionDescription jdesc_output(kDummyType);
// Deserialize
EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
// Verify
EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
}
TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannels) {
bool use_sctpmap = true;
AddSctpDataChannel(use_sctpmap);
@ -3088,8 +2986,9 @@ TEST_F(WebRtcSdpTest, DeserializeSdpWithRtpmapAttribute) {
}
TEST_F(WebRtcSdpTest, DeserializeSdpWithStrangeApplicationProtocolNames) {
static const char* bad_strings[] = {"DTLS/SCTPRTP/", "obviously-bogus",
"UDP/TL/RTSP/SAVPF", "UDP/TL/RTSP/S"};
static const char* bad_strings[] = {
"DTLS/SCTPRTP/", "obviously-bogus", "UDP/TL/RTSP/SAVPF",
"UDP/TL/RTSP/S", "DTLS/SCTP/RTP/FOO", "obviously-bogus/RTP/"};
for (auto proto : bad_strings) {
std::string sdp_with_data = kSdpString;
sdp_with_data.append("m=application 9 ");
@ -3099,18 +2998,6 @@ TEST_F(WebRtcSdpTest, DeserializeSdpWithStrangeApplicationProtocolNames) {
EXPECT_FALSE(SdpDeserialize(sdp_with_data, &jdesc_output))
<< "Parsing should have failed on " << proto;
}
// The following strings are strange, but acceptable as RTP.
static const char* weird_strings[] = {"DTLS/SCTP/RTP/FOO",
"obviously-bogus/RTP/"};
for (auto proto : weird_strings) {
std::string sdp_with_data = kSdpString;
sdp_with_data.append("m=application 9 ");
sdp_with_data.append(proto);
sdp_with_data.append(" 47\r\n");
JsepSessionDescription jdesc_output(kDummyType);
EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output))
<< "Parsing should have succeeded on " << proto;
}
}
// For crbug/344475.
@ -3168,21 +3055,6 @@ TEST_F(WebRtcSdpTest,
EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
}
TEST_F(WebRtcSdpTest, DeserializeSdpWithRtpDataChannelsAndBandwidth) {
// We want to test that deserializing data content limits bandwidth
// settings (it should never be greater than the default).
// This should prevent someone from using unlimited data bandwidth through
// JS and "breaking the Internet".
// See: https://code.google.com/p/chromium/issues/detail?id=280726
std::string sdp_with_bandwidth = kSdpString;
sdp_with_bandwidth.append(kSdpRtpDataChannelString);
InjectAfter("a=mid:data_content_name\r\n", "b=AS:100\r\n",
&sdp_with_bandwidth);
JsepSessionDescription jdesc_with_bandwidth(kDummyType);
EXPECT_FALSE(SdpDeserialize(sdp_with_bandwidth, &jdesc_with_bandwidth));
}
TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelsAndBandwidth) {
bool use_sctpmap = true;
AddSctpDataChannel(use_sctpmap);

View File

@ -40,8 +40,14 @@ class FakeSctpTransport : public cricket::SctpTransportInternal {
int max_message_size() const { return max_message_size_; }
absl::optional<int> max_outbound_streams() const { return absl::nullopt; }
absl::optional<int> max_inbound_streams() const { return absl::nullopt; }
int local_port() const { return *local_port_; }
int remote_port() const { return *remote_port_; }
int local_port() const {
RTC_DCHECK(local_port_);
return *local_port_;
}
int remote_port() const {
RTC_DCHECK(remote_port_);
return *remote_port_;
}
private:
absl::optional<int> local_port_;