introduce an unsupported content description type

This carries around unsupported content descriptions
(i.e. things where webrtc does not understand the media type
or protocol) in a special data type so that a rejected content or
mediasection is added to the answer SDP.

BUG=webrtc:3513

Change-Id: Ifc4168eae11e899f2504649de5e1eecb6801a9fb
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/179082
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Commit-Queue: Philipp Hancke <philipp.hancke@googlemail.com>
Cr-Commit-Position: refs/heads/master@{#32410}
This commit is contained in:
Philipp Hancke 2020-10-13 12:43:15 +02:00 committed by Commit Bot
parent a2b31c35ff
commit 239f92ecf7
14 changed files with 291 additions and 29 deletions

View File

@ -26,6 +26,10 @@ std::string MediaTypeToString(MediaType type) {
return kMediaTypeVideo; return kMediaTypeVideo;
case MEDIA_TYPE_DATA: case MEDIA_TYPE_DATA:
return kMediaTypeData; return kMediaTypeData;
case MEDIA_TYPE_UNSUPPORTED:
// Unsupported media stores the m=<mediatype> differently.
RTC_NOTREACHED();
return "";
} }
FATAL(); FATAL();
// Not reachable; avoids compile warning. // Not reachable; avoids compile warning.

View File

@ -20,7 +20,12 @@
namespace cricket { namespace cricket {
enum MediaType { MEDIA_TYPE_AUDIO, MEDIA_TYPE_VIDEO, MEDIA_TYPE_DATA }; enum MediaType {
MEDIA_TYPE_AUDIO,
MEDIA_TYPE_VIDEO,
MEDIA_TYPE_DATA,
MEDIA_TYPE_UNSUPPORTED
};
extern const char kMediaTypeAudio[]; extern const char kMediaTypeAudio[];
extern const char kMediaTypeVideo[]; extern const char kMediaTypeVideo[];

View File

@ -1559,6 +1559,13 @@ std::unique_ptr<SessionDescription> MediaSessionDescriptionFactory::CreateOffer(
return nullptr; return nullptr;
} }
break; break;
case MEDIA_TYPE_UNSUPPORTED:
if (!AddUnsupportedContentForOffer(
media_description_options, session_options, current_content,
current_description, offer.get(), &ice_credentials)) {
return nullptr;
}
break;
default: default:
RTC_NOTREACHED(); RTC_NOTREACHED();
} }
@ -1713,6 +1720,14 @@ MediaSessionDescriptionFactory::CreateAnswer(
return nullptr; return nullptr;
} }
break; break;
case MEDIA_TYPE_UNSUPPORTED:
if (!AddUnsupportedContentForAnswer(
media_description_options, session_options, offer_content,
offer, current_content, current_description,
bundle_transport.get(), answer.get(), &ice_credentials)) {
return nullptr;
}
break;
default: default:
RTC_NOTREACHED(); RTC_NOTREACHED();
} }
@ -2403,6 +2418,31 @@ bool MediaSessionDescriptionFactory::AddDataContentForOffer(
} }
} }
bool MediaSessionDescriptionFactory::AddUnsupportedContentForOffer(
const MediaDescriptionOptions& media_description_options,
const MediaSessionOptions& session_options,
const ContentInfo* current_content,
const SessionDescription* current_description,
SessionDescription* desc,
IceCredentialsIterator* ice_credentials) const {
RTC_CHECK(IsMediaContentOfType(current_content, MEDIA_TYPE_UNSUPPORTED));
const UnsupportedContentDescription* current_unsupported_description =
current_content->media_description()->as_unsupported();
auto unsupported = std::make_unique<UnsupportedContentDescription>(
current_unsupported_description->media_type());
unsupported->set_protocol(current_content->media_description()->protocol());
desc->AddContent(media_description_options.mid, MediaProtocolType::kOther,
/*rejected=*/true, std::move(unsupported));
if (!AddTransportOffer(media_description_options.mid,
media_description_options.transport_options,
current_description, desc, ice_credentials)) {
return false;
}
return true;
}
// |audio_codecs| = set of all possible codecs that can be used, with correct // |audio_codecs| = set of all possible codecs that can be used, with correct
// payload type mappings // payload type mappings
// //
@ -2745,6 +2785,42 @@ bool MediaSessionDescriptionFactory::AddDataContentForAnswer(
return true; return true;
} }
bool MediaSessionDescriptionFactory::AddUnsupportedContentForAnswer(
const MediaDescriptionOptions& media_description_options,
const MediaSessionOptions& session_options,
const ContentInfo* offer_content,
const SessionDescription* offer_description,
const ContentInfo* current_content,
const SessionDescription* current_description,
const TransportInfo* bundle_transport,
SessionDescription* answer,
IceCredentialsIterator* ice_credentials) const {
std::unique_ptr<TransportDescription> unsupported_transport =
CreateTransportAnswer(media_description_options.mid, offer_description,
media_description_options.transport_options,
current_description, bundle_transport != nullptr,
ice_credentials);
if (!unsupported_transport) {
return false;
}
RTC_CHECK(IsMediaContentOfType(offer_content, MEDIA_TYPE_UNSUPPORTED));
const UnsupportedContentDescription* offer_unsupported_description =
offer_content->media_description()->as_unsupported();
std::unique_ptr<MediaContentDescription> unsupported_answer =
std::make_unique<UnsupportedContentDescription>(
offer_unsupported_description->media_type());
unsupported_answer->set_protocol(offer_unsupported_description->protocol());
if (!AddTransportAnswer(media_description_options.mid,
*(unsupported_transport.get()), answer)) {
return false;
}
answer->AddContent(media_description_options.mid, offer_content->type,
/*rejected=*/true, std::move(unsupported_answer));
return true;
}
void MediaSessionDescriptionFactory::ComputeAudioCodecsIntersectionAndUnion() { void MediaSessionDescriptionFactory::ComputeAudioCodecsIntersectionAndUnion() {
audio_sendrecv_codecs_.clear(); audio_sendrecv_codecs_.clear();
all_audio_codecs_.clear(); all_audio_codecs_.clear();
@ -2822,6 +2898,10 @@ bool IsDataContent(const ContentInfo* content) {
return IsMediaContentOfType(content, MEDIA_TYPE_DATA); return IsMediaContentOfType(content, MEDIA_TYPE_DATA);
} }
bool IsUnsupportedContent(const ContentInfo* content) {
return IsMediaContentOfType(content, MEDIA_TYPE_UNSUPPORTED);
}
const ContentInfo* GetFirstMediaContent(const ContentInfos& contents, const ContentInfo* GetFirstMediaContent(const ContentInfos& contents,
MediaType media_type) { MediaType media_type) {
for (const ContentInfo& content : contents) { for (const ContentInfo& content : contents) {

View File

@ -283,6 +283,14 @@ class MediaSessionDescriptionFactory {
SessionDescription* desc, SessionDescription* desc,
IceCredentialsIterator* ice_credentials) const; IceCredentialsIterator* ice_credentials) const;
bool AddUnsupportedContentForOffer(
const MediaDescriptionOptions& media_description_options,
const MediaSessionOptions& session_options,
const ContentInfo* current_content,
const SessionDescription* current_description,
SessionDescription* desc,
IceCredentialsIterator* ice_credentials) const;
bool AddAudioContentForAnswer( bool AddAudioContentForAnswer(
const MediaDescriptionOptions& media_description_options, const MediaDescriptionOptions& media_description_options,
const MediaSessionOptions& session_options, const MediaSessionOptions& session_options,
@ -324,6 +332,17 @@ class MediaSessionDescriptionFactory {
SessionDescription* answer, SessionDescription* answer,
IceCredentialsIterator* ice_credentials) const; IceCredentialsIterator* ice_credentials) const;
bool AddUnsupportedContentForAnswer(
const MediaDescriptionOptions& media_description_options,
const MediaSessionOptions& session_options,
const ContentInfo* offer_content,
const SessionDescription* offer_description,
const ContentInfo* current_content,
const SessionDescription* current_description,
const TransportInfo* bundle_transport,
SessionDescription* answer,
IceCredentialsIterator* ice_credentials) const;
void ComputeAudioCodecsIntersectionAndUnion(); void ComputeAudioCodecsIntersectionAndUnion();
void ComputeVideoCodecsIntersectionAndUnion(); void ComputeVideoCodecsIntersectionAndUnion();
@ -356,6 +375,7 @@ bool IsMediaContent(const ContentInfo* content);
bool IsAudioContent(const ContentInfo* content); bool IsAudioContent(const ContentInfo* content);
bool IsVideoContent(const ContentInfo* content); bool IsVideoContent(const ContentInfo* content);
bool IsDataContent(const ContentInfo* content); bool IsDataContent(const ContentInfo* content);
bool IsUnsupportedContent(const ContentInfo* content);
const ContentInfo* GetFirstMediaContent(const ContentInfos& contents, const ContentInfo* GetFirstMediaContent(const ContentInfos& contents,
MediaType media_type); MediaType media_type);
const ContentInfo* GetFirstAudioContent(const ContentInfos& contents); const ContentInfo* GetFirstAudioContent(const ContentInfos& contents);

View File

@ -190,6 +190,8 @@ RtpCapabilities PeerConnectionFactory::GetRtpSenderCapabilities(
} }
case cricket::MEDIA_TYPE_DATA: case cricket::MEDIA_TYPE_DATA:
return RtpCapabilities(); return RtpCapabilities();
case cricket::MEDIA_TYPE_UNSUPPORTED:
return RtpCapabilities();
} }
// Not reached; avoids compile warning. // Not reached; avoids compile warning.
FATAL(); FATAL();
@ -215,6 +217,8 @@ RtpCapabilities PeerConnectionFactory::GetRtpReceiverCapabilities(
} }
case cricket::MEDIA_TYPE_DATA: case cricket::MEDIA_TYPE_DATA:
return RtpCapabilities(); return RtpCapabilities();
case cricket::MEDIA_TYPE_UNSUPPORTED:
return RtpCapabilities();
} }
// Not reached; avoids compile warning. // Not reached; avoids compile warning.
FATAL(); FATAL();

View File

@ -17,12 +17,14 @@
#include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h"
#include "api/create_peerconnection_factory.h" #include "api/create_peerconnection_factory.h"
#include "api/jsep_session_description.h"
#include "api/peer_connection_proxy.h" #include "api/peer_connection_proxy.h"
#include "api/video_codecs/builtin_video_decoder_factory.h" #include "api/video_codecs/builtin_video_decoder_factory.h"
#include "api/video_codecs/builtin_video_encoder_factory.h" #include "api/video_codecs/builtin_video_encoder_factory.h"
#include "pc/peer_connection.h" #include "pc/peer_connection.h"
#include "pc/peer_connection_wrapper.h" #include "pc/peer_connection_wrapper.h"
#include "pc/sdp_utils.h" #include "pc/sdp_utils.h"
#include "pc/webrtc_sdp.h"
#ifdef WEBRTC_ANDROID #ifdef WEBRTC_ANDROID
#include "pc/test/android_test_initializer.h" #include "pc/test/android_test_initializer.h"
#endif #endif
@ -841,6 +843,64 @@ TEST_P(PeerConnectionSignalingTest,
observer->error()); observer->error());
} }
TEST_P(PeerConnectionSignalingTest, UnsupportedContentType) {
auto caller = CreatePeerConnection();
// Call setRemoteDescription with a m= line we don't understand.
std::string sdp =
"v=0\r\n"
"o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
"s=-\r\n"
"t=0 0\r\n"
"m=bogus 9 FOO 0 8\r\n"
"c=IN IP4 0.0.0.0\r\n"
"a=mid:bogusmid\r\n";
std::unique_ptr<webrtc::SessionDescriptionInterface> remote_description =
webrtc::CreateSessionDescription(SdpType::kOffer, sdp, nullptr);
EXPECT_TRUE(caller->SetRemoteDescription(std::move(remote_description)));
// Assert we respond back with something meaningful.
auto answer = caller->CreateAnswer();
ASSERT_EQ(answer->description()->contents().size(), 1u);
EXPECT_NE(answer->description()
->contents()[0]
.media_description()
->as_unsupported(),
nullptr);
EXPECT_EQ(answer->description()
->contents()[0]
.media_description()
->as_unsupported()
->media_type(),
"bogus");
EXPECT_TRUE(answer->description()->contents()[0].rejected);
EXPECT_EQ(answer->description()->contents()[0].mid(), "bogusmid");
EXPECT_EQ(
answer->description()->contents()[0].media_description()->protocol(),
"FOO");
EXPECT_FALSE(
answer->description()->contents()[0].media_description()->has_codecs());
EXPECT_TRUE(caller->SetLocalDescription(std::move(answer)));
// Assert we keep this in susequent offers.
auto offer = caller->CreateOffer();
EXPECT_EQ(offer->description()
->contents()[0]
.media_description()
->as_unsupported()
->media_type(),
"bogus");
EXPECT_TRUE(offer->description()->contents()[0].rejected);
EXPECT_EQ(offer->description()->contents()[0].media_description()->protocol(),
"FOO");
EXPECT_EQ(offer->description()->contents()[0].mid(), "bogusmid");
EXPECT_FALSE(
offer->description()->contents()[0].media_description()->has_codecs());
EXPECT_TRUE(caller->SetLocalDescription(std::move(offer)));
}
INSTANTIATE_TEST_SUITE_P(PeerConnectionSignalingTest, INSTANTIATE_TEST_SUITE_P(PeerConnectionSignalingTest,
PeerConnectionSignalingTest, PeerConnectionSignalingTest,
Values(SdpSemantics::kPlanB, Values(SdpSemantics::kPlanB,

View File

@ -2597,6 +2597,7 @@ class RTCStatsCollectorTestWithParamKind
case cricket::MEDIA_TYPE_VIDEO: case cricket::MEDIA_TYPE_VIDEO:
return "Video"; return "Video";
case cricket::MEDIA_TYPE_DATA: case cricket::MEDIA_TYPE_DATA:
case cricket::MEDIA_TYPE_UNSUPPORTED:
RTC_NOTREACHED(); RTC_NOTREACHED();
return ""; return "";
} }
@ -2655,6 +2656,7 @@ class RTCStatsCollectorTestWithParamKind
return; return;
} }
case cricket::MEDIA_TYPE_DATA: case cricket::MEDIA_TYPE_DATA:
case cricket::MEDIA_TYPE_UNSUPPORTED:
RTC_NOTREACHED(); RTC_NOTREACHED();
} }
} }

View File

@ -523,6 +523,8 @@ static absl::string_view GetDefaultMidForPlanB(cricket::MediaType media_type) {
return cricket::CN_VIDEO; return cricket::CN_VIDEO;
case cricket::MEDIA_TYPE_DATA: case cricket::MEDIA_TYPE_DATA:
return cricket::CN_DATA; return cricket::CN_DATA;
case cricket::MEDIA_TYPE_UNSUPPORTED:
return "not supported";
} }
RTC_NOTREACHED(); RTC_NOTREACHED();
return ""; return "";
@ -2999,6 +3001,8 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels(
if (!error.ok()) { if (!error.ok()) {
return error; return error;
} }
} else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) {
RTC_LOG(LS_INFO) << "Ignoring unsupported media type";
} else { } else {
LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
"Unknown section type."); "Unknown section type.");
@ -3538,6 +3542,12 @@ void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanOffer(
transceiver->internal()->set_mline_index(i); transceiver->internal()->set_mline_index(i);
} }
} }
} else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) {
RTC_DCHECK(local_content->rejected);
session_options->media_description_options.push_back(
cricket::MediaDescriptionOptions(media_type, mid,
RtpTransceiverDirection::kInactive,
/*stopped=*/true));
} else { } else {
RTC_CHECK_EQ(cricket::MEDIA_TYPE_DATA, media_type); RTC_CHECK_EQ(cricket::MEDIA_TYPE_DATA, media_type);
if (had_been_rejected) { if (had_been_rejected) {
@ -3701,6 +3711,12 @@ void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanAnswer(
RtpTransceiverDirection::kInactive, RtpTransceiverDirection::kInactive,
/*stopped=*/true)); /*stopped=*/true));
} }
} else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) {
RTC_DCHECK(content.rejected);
session_options->media_description_options.push_back(
cricket::MediaDescriptionOptions(media_type, content.name,
RtpTransceiverDirection::kInactive,
/*stopped=*/true));
} else { } else {
RTC_CHECK_EQ(cricket::MEDIA_TYPE_DATA, media_type); RTC_CHECK_EQ(cricket::MEDIA_TYPE_DATA, media_type);
// Reject all data sections if data channels are disabled. // Reject all data sections if data channels are disabled.
@ -4642,6 +4658,12 @@ void SdpOfferAnswerHandler::GenerateMediaDescriptionOptions(
} }
session_options->media_description_options.back().header_extensions = session_options->media_description_options.back().header_extensions =
channel_manager()->GetSupportedVideoRtpHeaderExtensions(); channel_manager()->GetSupportedVideoRtpHeaderExtensions();
} else if (IsUnsupportedContent(&content)) {
session_options->media_description_options.push_back(
cricket::MediaDescriptionOptions(cricket::MEDIA_TYPE_UNSUPPORTED,
content.name,
RtpTransceiverDirection::kInactive,
/*stopped=*/true));
} else { } else {
RTC_DCHECK(IsDataContent(&content)); RTC_DCHECK(IsDataContent(&content));
// If we already have an data m= section, reject this extra one. // If we already have an data m= section, reject this extra one.

View File

@ -58,6 +58,7 @@ class AudioContentDescription;
class VideoContentDescription; class VideoContentDescription;
class RtpDataContentDescription; class RtpDataContentDescription;
class SctpDataContentDescription; class SctpDataContentDescription;
class UnsupportedContentDescription;
// Describes a session description media section. There are subclasses for each // Describes a session description media section. There are subclasses for each
// media type (audio, video, data) that will have additional information. // media type (audio, video, data) that will have additional information.
@ -86,6 +87,11 @@ class MediaContentDescription {
virtual SctpDataContentDescription* as_sctp() { return nullptr; } virtual SctpDataContentDescription* as_sctp() { return nullptr; }
virtual const SctpDataContentDescription* as_sctp() const { return nullptr; } virtual const SctpDataContentDescription* as_sctp() const { return nullptr; }
virtual UnsupportedContentDescription* as_unsupported() { return nullptr; }
virtual const UnsupportedContentDescription* as_unsupported() const {
return nullptr;
}
virtual bool has_codecs() const = 0; virtual bool has_codecs() const = 0;
// Copy operator that returns an unique_ptr. // Copy operator that returns an unique_ptr.
@ -406,13 +412,37 @@ class SctpDataContentDescription : public MediaContentDescription {
int max_message_size_ = 64 * 1024; int max_message_size_ = 64 * 1024;
}; };
class UnsupportedContentDescription : public MediaContentDescription {
public:
explicit UnsupportedContentDescription(const std::string& media_type)
: media_type_(media_type) {}
MediaType type() const override { return MEDIA_TYPE_UNSUPPORTED; }
UnsupportedContentDescription* as_unsupported() override { return this; }
const UnsupportedContentDescription* as_unsupported() const override {
return this;
}
bool has_codecs() const override { return false; }
const std::string& media_type() const { return media_type_; }
private:
UnsupportedContentDescription* CloneInternal() const override {
return new UnsupportedContentDescription(*this);
}
std::string media_type_;
};
// Protocol used for encoding media. This is the "top level" protocol that may // Protocol used for encoding media. This is the "top level" protocol that may
// be wrapped by zero or many transport protocols (UDP, ICE, etc.). // be wrapped by zero or many transport protocols (UDP, ICE, etc.).
enum class MediaProtocolType { enum class MediaProtocolType {
kRtp, // Section will use the RTP protocol (e.g., for audio or video). kRtp, // Section will use the RTP protocol (e.g., for audio or video).
// https://tools.ietf.org/html/rfc3550 // https://tools.ietf.org/html/rfc3550
kSctp // Section will use the SCTP protocol (e.g., for a data channel). kSctp, // Section will use the SCTP protocol (e.g., for a data channel).
// https://tools.ietf.org/html/rfc4960 // https://tools.ietf.org/html/rfc4960
kOther // Section will use another top protocol which is not
// explicitly supported.
}; };
// Represents a session description section. Most information about the section // Represents a session description section. Most information about the section

View File

@ -75,6 +75,7 @@ using cricket::StreamParams;
using cricket::StreamParamsVec; using cricket::StreamParamsVec;
using cricket::TransportDescription; using cricket::TransportDescription;
using cricket::TransportInfo; using cricket::TransportInfo;
using cricket::UnsupportedContentDescription;
using cricket::VideoContentDescription; using cricket::VideoContentDescription;
using rtc::SocketAddress; using rtc::SocketAddress;
@ -276,9 +277,6 @@ static bool ParseSessionDescription(const std::string& message,
rtc::SocketAddress* connection_addr, rtc::SocketAddress* connection_addr,
cricket::SessionDescription* desc, cricket::SessionDescription* desc,
SdpParseError* error); SdpParseError* error);
static bool ParseGroupAttribute(const std::string& line,
cricket::SessionDescription* desc,
SdpParseError* error);
static bool ParseMediaDescription( static bool ParseMediaDescription(
const std::string& message, const std::string& message,
const TransportDescription& session_td, const TransportDescription& session_td,
@ -302,6 +300,9 @@ static bool ParseContent(
TransportDescription* transport, TransportDescription* transport,
std::vector<std::unique_ptr<JsepIceCandidate>>* candidates, std::vector<std::unique_ptr<JsepIceCandidate>>* candidates,
SdpParseError* error); SdpParseError* error);
static bool ParseGroupAttribute(const std::string& line,
cricket::SessionDescription* desc,
SdpParseError* error);
static bool ParseSsrcAttribute(const std::string& line, static bool ParseSsrcAttribute(const std::string& line,
SsrcInfoVec* ssrc_infos, SsrcInfoVec* ssrc_infos,
int* msid_signaling, int* msid_signaling,
@ -1346,30 +1347,24 @@ void BuildMediaDescription(const ContentInfo* content_info,
// RFC 4566 // RFC 4566
// m=<media> <port> <proto> <fmt> // m=<media> <port> <proto> <fmt>
// fmt is a list of payload type numbers that MAY be used in the session. // fmt is a list of payload type numbers that MAY be used in the session.
const char* type = NULL; std::string type;
if (media_type == cricket::MEDIA_TYPE_AUDIO)
type = kMediaTypeAudio;
else if (media_type == cricket::MEDIA_TYPE_VIDEO)
type = kMediaTypeVideo;
else if (media_type == cricket::MEDIA_TYPE_DATA)
type = kMediaTypeData;
else
RTC_NOTREACHED();
std::string fmt; std::string fmt;
if (media_type == cricket::MEDIA_TYPE_VIDEO) { if (media_type == cricket::MEDIA_TYPE_VIDEO) {
type = kMediaTypeVideo;
const VideoContentDescription* video_desc = media_desc->as_video(); const VideoContentDescription* video_desc = media_desc->as_video();
for (const cricket::VideoCodec& codec : video_desc->codecs()) { for (const cricket::VideoCodec& codec : video_desc->codecs()) {
fmt.append(" "); fmt.append(" ");
fmt.append(rtc::ToString(codec.id)); fmt.append(rtc::ToString(codec.id));
} }
} else if (media_type == cricket::MEDIA_TYPE_AUDIO) { } else if (media_type == cricket::MEDIA_TYPE_AUDIO) {
type = kMediaTypeAudio;
const AudioContentDescription* audio_desc = media_desc->as_audio(); const AudioContentDescription* audio_desc = media_desc->as_audio();
for (const cricket::AudioCodec& codec : audio_desc->codecs()) { for (const cricket::AudioCodec& codec : audio_desc->codecs()) {
fmt.append(" "); fmt.append(" ");
fmt.append(rtc::ToString(codec.id)); fmt.append(rtc::ToString(codec.id));
} }
} else if (media_type == cricket::MEDIA_TYPE_DATA) { } else if (media_type == cricket::MEDIA_TYPE_DATA) {
type = kMediaTypeData;
const cricket::SctpDataContentDescription* sctp_data_desc = const cricket::SctpDataContentDescription* sctp_data_desc =
media_desc->as_sctp(); media_desc->as_sctp();
if (sctp_data_desc) { if (sctp_data_desc) {
@ -1388,6 +1383,12 @@ void BuildMediaDescription(const ContentInfo* content_info,
fmt.append(rtc::ToString(codec.id)); fmt.append(rtc::ToString(codec.id));
} }
} }
} else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) {
const UnsupportedContentDescription* unsupported_desc =
media_desc->as_unsupported();
type = unsupported_desc->media_type();
} else {
RTC_NOTREACHED();
} }
// The fmt must never be empty. If no codecs are found, set the fmt attribute // The fmt must never be empty. If no codecs are found, set the fmt attribute
// to 0. // to 0.
@ -2711,7 +2712,17 @@ bool ParseMediaDescription(
} }
} else { } else {
RTC_LOG(LS_WARNING) << "Unsupported media type: " << line; RTC_LOG(LS_WARNING) << "Unsupported media type: " << line;
continue; auto unsupported_desc =
std::make_unique<UnsupportedContentDescription>(media_type);
if (!ParseContent(message, cricket::MEDIA_TYPE_UNSUPPORTED, mline_index,
protocol, payload_types, pos, &content_name,
&bundle_only, &section_msid_signaling,
unsupported_desc.get(), &transport, candidates,
error)) {
return false;
}
unsupported_desc->set_protocol(protocol);
content = std::move(unsupported_desc);
} }
if (!content.get()) { if (!content.get()) {
// ParseContentDescription returns NULL if failed. // ParseContentDescription returns NULL if failed.
@ -2739,7 +2750,9 @@ bool ParseMediaDescription(
content_rejected = port_rejected; content_rejected = port_rejected;
} }
if (cricket::IsRtpProtocol(protocol) && !content->as_sctp()) { if (content->as_unsupported()) {
content_rejected = true;
} else if (cricket::IsRtpProtocol(protocol) && !content->as_sctp()) {
content->set_protocol(protocol); content->set_protocol(protocol);
// Set the extmap. // Set the extmap.
if (!session_extmaps.empty() && if (!session_extmaps.empty() &&

View File

@ -2991,7 +2991,8 @@ TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelsButWrongMediaType) {
JsepSessionDescription jdesc_output(kDummyType); JsepSessionDescription jdesc_output(kDummyType);
EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output)); EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output));
EXPECT_EQ(0u, jdesc_output.description()->contents().size()); EXPECT_EQ(1u, jdesc_output.description()->contents().size());
EXPECT_TRUE(jdesc_output.description()->contents()[0].rejected);
} }
// Helper function to set the max-message-size parameter in the // Helper function to set the max-message-size parameter in the
@ -4725,21 +4726,32 @@ TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutCname) {
} }
TEST_F(WebRtcSdpTest, DeserializeSdpWithUnsupportedMediaType) { TEST_F(WebRtcSdpTest, DeserializeSdpWithUnsupportedMediaType) {
bool use_sctpmap = true;
AddSctpDataChannel(use_sctpmap);
JsepSessionDescription jdesc(kDummyType);
ASSERT_TRUE(jdesc.Initialize(desc_.Clone(), kSessionId, kSessionVersion));
std::string sdp = kSdpSessionString; std::string sdp = kSdpSessionString;
sdp += sdp +=
"m=bogus 9 RTP/SAVPF 0 8\r\n" "m=bogus 9 RTP/SAVPF 0 8\r\n"
"c=IN IP4 0.0.0.0\r\n"; "c=IN IP4 0.0.0.0\r\n"
"a=mid:bogusmid\r\n";
sdp += sdp +=
"m=audio/something 9 RTP/SAVPF 0 8\r\n" "m=audio/something 9 RTP/SAVPF 0 8\r\n"
"c=IN IP4 0.0.0.0\r\n"; "c=IN IP4 0.0.0.0\r\n"
"a=mid:somethingmid\r\n";
JsepSessionDescription jdesc_output(kDummyType); JsepSessionDescription jdesc_output(kDummyType);
EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output)); EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output));
EXPECT_EQ(0u, jdesc_output.description()->contents().size()); ASSERT_EQ(2u, jdesc_output.description()->contents().size());
ASSERT_NE(nullptr, jdesc_output.description()
->contents()[0]
.media_description()
->as_unsupported());
ASSERT_NE(nullptr, jdesc_output.description()
->contents()[1]
.media_description()
->as_unsupported());
EXPECT_TRUE(jdesc_output.description()->contents()[0].rejected);
EXPECT_TRUE(jdesc_output.description()->contents()[1].rejected);
EXPECT_EQ(jdesc_output.description()->contents()[0].name, "bogusmid");
EXPECT_EQ(jdesc_output.description()->contents()[1].name, "somethingmid");
} }

View File

@ -62,6 +62,9 @@ const NSString * const kRTCH264CodecName = @(cricket::kH264CodecName);
case cricket::MEDIA_TYPE_DATA: case cricket::MEDIA_TYPE_DATA:
RTC_NOTREACHED(); RTC_NOTREACHED();
break; break;
case cricket::MEDIA_TYPE_UNSUPPORTED:
RTC_NOTREACHED();
break;
} }
if (nativeParameters.clock_rate) { if (nativeParameters.clock_rate) {
_clockRate = [NSNumber numberWithInt:*nativeParameters.clock_rate]; _clockRate = [NSNumber numberWithInt:*nativeParameters.clock_rate];

View File

@ -21,6 +21,7 @@ typedef NS_ENUM(NSInteger, RTCRtpMediaType) {
RTCRtpMediaTypeAudio, RTCRtpMediaTypeAudio,
RTCRtpMediaTypeVideo, RTCRtpMediaTypeVideo,
RTCRtpMediaTypeData, RTCRtpMediaTypeData,
RTCRtpMediaTypeUnsupported,
}; };
@class RTC_OBJC_TYPE(RTCRtpReceiver); @class RTC_OBJC_TYPE(RTCRtpReceiver);

View File

@ -125,6 +125,8 @@ void RtpReceiverDelegateAdapter::OnFirstPacketReceived(
return RTCRtpMediaTypeVideo; return RTCRtpMediaTypeVideo;
case cricket::MEDIA_TYPE_DATA: case cricket::MEDIA_TYPE_DATA:
return RTCRtpMediaTypeData; return RTCRtpMediaTypeData;
case cricket::MEDIA_TYPE_UNSUPPORTED:
return RTCRtpMediaTypeUnsupported;
} }
} }
@ -136,6 +138,8 @@ void RtpReceiverDelegateAdapter::OnFirstPacketReceived(
return cricket::MEDIA_TYPE_VIDEO; return cricket::MEDIA_TYPE_VIDEO;
case RTCRtpMediaTypeData: case RTCRtpMediaTypeData:
return cricket::MEDIA_TYPE_DATA; return cricket::MEDIA_TYPE_DATA;
case RTCRtpMediaTypeUnsupported:
return cricket::MEDIA_TYPE_UNSUPPORTED;
} }
} }
@ -147,6 +151,8 @@ void RtpReceiverDelegateAdapter::OnFirstPacketReceived(
return @"VIDEO"; return @"VIDEO";
case RTCRtpMediaTypeData: case RTCRtpMediaTypeData:
return @"DATA"; return @"DATA";
case RTCRtpMediaTypeUnsupported:
return @"UNSUPPORTED";
} }
} }