Split audio and video channels into Send and Receive APIs.

The implementation here has a number of changes that force the callers
that called the "channel" functions into specific interfaces rather than
just letting C++ take care of it; this should go away once there stops
being a common implementation class for those interfaces.

Bug: webrtc:13931
Change-Id: Ic4e279528a341bc0a0e88d2e1e76c90bc43a1035
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/287640
Reviewed-by: Florent Castelli <orphis@webrtc.org>
Commit-Queue: Harald Alvestrand <hta@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#38888}
This commit is contained in:
Harald Alvestrand 2022-12-13 12:57:24 +00:00 committed by WebRTC LUCI CQ
parent 1c1ff7293b
commit c0d44d9d63
30 changed files with 1306 additions and 749 deletions

View File

@ -60,11 +60,13 @@ rtc_library("rtc_media_base") {
"../api:rtp_sender_interface", "../api:rtp_sender_interface",
"../api:scoped_refptr", "../api:scoped_refptr",
"../api:sequence_checker", "../api:sequence_checker",
"../api:transport_api",
"../api/audio:audio_frame_processor", "../api/audio:audio_frame_processor",
"../api/audio_codecs:audio_codecs_api", "../api/audio_codecs:audio_codecs_api",
"../api/crypto:frame_decryptor_interface", "../api/crypto:frame_decryptor_interface",
"../api/crypto:frame_encryptor_interface", "../api/crypto:frame_encryptor_interface",
"../api/crypto:options", "../api/crypto:options",
"../api/task_queue",
"../api/task_queue:pending_task_safety_flag", "../api/task_queue:pending_task_safety_flag",
"../api/transport:datagram_transport_interface", "../api/transport:datagram_transport_interface",
"../api/transport:stun_types", "../api/transport:stun_types",
@ -94,6 +96,7 @@ rtc_library("rtc_media_base") {
"../rtc_base:socket", "../rtc_base:socket",
"../rtc_base:stringutils", "../rtc_base:stringutils",
"../rtc_base:timeutils", "../rtc_base:timeutils",
"../rtc_base/network:sent_packet",
"../rtc_base/synchronization:mutex", "../rtc_base/synchronization:mutex",
"../rtc_base/system:file_wrapper", "../rtc_base/system:file_wrapper",
"../rtc_base/system:no_unique_address", "../rtc_base/system:no_unique_address",
@ -105,6 +108,7 @@ rtc_library("rtc_media_base") {
absl_deps = [ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/functional:any_invocable",
"//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional", "//third_party/abseil-cpp/absl/types:optional",
] ]
@ -115,8 +119,9 @@ rtc_library("rtc_media_base") {
"base/codec.cc", "base/codec.cc",
"base/codec.h", "base/codec.h",
"base/delayable.h", "base/delayable.h",
"base/media_channel.cc",
"base/media_channel.h", "base/media_channel.h",
"base/media_channel_impl.cc",
"base/media_channel_impl.h",
"base/media_constants.cc", "base/media_constants.cc",
"base/media_constants.h", "base/media_constants.h",
"base/media_engine.cc", "base/media_engine.cc",

View File

@ -238,6 +238,24 @@ class RtpHelper : public Base {
rtcp_packets_.push_back(std::string(packet->cdata<char>(), packet->size())); rtcp_packets_.push_back(std::string(packet->cdata<char>(), packet->size()));
} }
// Stuff that deals with encryptors, transformers and the like
void SetFrameEncryptor(uint32_t ssrc,
rtc::scoped_refptr<webrtc::FrameEncryptorInterface>
frame_encryptor) override {}
void SetEncoderToPacketizerFrameTransformer(
uint32_t ssrc,
rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer)
override {}
void SetFrameDecryptor(uint32_t ssrc,
rtc::scoped_refptr<webrtc::FrameDecryptorInterface>
frame_decryptor) override {}
void SetDepacketizerToDecoderFrameTransformer(
uint32_t ssrc,
rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer)
override {}
protected: protected:
bool MuteStream(uint32_t ssrc, bool mute) { bool MuteStream(uint32_t ssrc, bool mute) {
if (!HasSendStream(ssrc) && ssrc != 0) { if (!HasSendStream(ssrc) && ssrc != 0) {

View File

@ -31,7 +31,7 @@
namespace cricket { namespace cricket {
// Fake NetworkInterface that sends/receives RTP/RTCP packets. // Fake NetworkInterface that sends/receives RTP/RTCP packets.
class FakeNetworkInterface : public MediaChannel::NetworkInterface { class FakeNetworkInterface : public MediaChannelNetworkInterface {
public: public:
FakeNetworkInterface() FakeNetworkInterface()
: thread_(rtc::Thread::Current()), : thread_(rtc::Thread::Current()),

View File

@ -75,6 +75,10 @@ class AudioSource;
class VideoCapturer; class VideoCapturer;
struct RtpHeader; struct RtpHeader;
struct VideoFormat; struct VideoFormat;
class VideoMediaSendChannelInterface;
class VideoMediaReceiveChannelInterface;
class VoiceMediaSendChannelInterface;
class VoiceMediaReceiveChannelInterface;
const int kScreencastDefaultFps = 5; const int kScreencastDefaultFps = 5;
@ -163,6 +167,23 @@ struct VideoOptions {
} }
}; };
class MediaChannelNetworkInterface {
public:
enum SocketType { ST_RTP, ST_RTCP };
virtual bool SendPacket(rtc::CopyOnWriteBuffer* packet,
const rtc::PacketOptions& options) = 0;
virtual bool SendRtcp(rtc::CopyOnWriteBuffer* packet,
const rtc::PacketOptions& options) = 0;
virtual int SetOption(SocketType type,
rtc::Socket::Option opt,
int option) = 0;
virtual ~MediaChannelNetworkInterface() {}
};
// Functions shared across all MediaChannel interfaces.
// Because there are implementation types that implement multiple
// interfaces, this is not a base class (no diamond inheritance).
template <class T>
class MediaBaseChannelInterface { class MediaBaseChannelInterface {
public: public:
virtual ~MediaBaseChannelInterface() = default; virtual ~MediaBaseChannelInterface() = default;
@ -184,13 +205,29 @@ class MediaBaseChannelInterface {
virtual void OnNetworkRouteChanged( virtual void OnNetworkRouteChanged(
absl::string_view transport_name, absl::string_view transport_name,
const rtc::NetworkRoute& network_route) = 0; const rtc::NetworkRoute& network_route) = 0;
// Corresponds to the SDP attribute extmap-allow-mixed, see RFC8285.
// Set to true if it's allowed to mix one- and two-byte RTP header extensions
// in the same stream. The setter and getter must only be called from
// worker_thread.
virtual void SetExtmapAllowMixed(bool extmap_allow_mixed) = 0;
virtual bool ExtmapAllowMixed() const = 0;
}; };
class MediaSendChannelInterface : virtual public MediaBaseChannelInterface { class MediaSendChannelInterface
: public MediaBaseChannelInterface<MediaSendChannelInterface> {
public: public:
virtual ~MediaSendChannelInterface() = default; virtual ~MediaSendChannelInterface() = default;
virtual cricket::MediaType media_type() const = 0; virtual VideoMediaSendChannelInterface* AsVideoSendChannel() {
RTC_CHECK_NOTREACHED();
return nullptr;
}
virtual VoiceMediaSendChannelInterface* AsVoiceSendChannel() {
RTC_CHECK_NOTREACHED();
return nullptr;
}
// Creates a new outgoing media stream with SSRCs and CNAME as described // Creates a new outgoing media stream with SSRCs and CNAME as described
// by sp. // by sp.
virtual bool AddSendStream(const StreamParams& sp) = 0; virtual bool AddSendStream(const StreamParams& sp) = 0;
@ -225,10 +262,20 @@ class MediaSendChannelInterface : virtual public MediaBaseChannelInterface {
virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const = 0; virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const = 0;
}; };
class MediaReceiveChannelInterface : virtual public MediaBaseChannelInterface { class MediaReceiveChannelInterface
: public MediaBaseChannelInterface<MediaReceiveChannelInterface>,
public Delayable {
public: public:
virtual ~MediaReceiveChannelInterface() = default; virtual ~MediaReceiveChannelInterface() = default;
virtual VideoMediaReceiveChannelInterface* AsVideoReceiveChannel() {
RTC_CHECK_NOTREACHED();
return nullptr;
}
virtual VoiceMediaReceiveChannelInterface* AsVoiceReceiveChannel() {
RTC_CHECK_NOTREACHED();
return nullptr;
}
// Creates a new incoming media stream with SSRCs, CNAME as described // Creates a new incoming media stream with SSRCs, CNAME as described
// by sp. In the case of a sp without SSRCs, the unsignaled sp is cached // by sp. In the case of a sp without SSRCs, the unsignaled sp is cached
// to be used later for unsignaled streams received. // to be used later for unsignaled streams received.
@ -267,113 +314,6 @@ class MediaReceiveChannelInterface : virtual public MediaBaseChannelInterface {
frame_transformer) = 0; frame_transformer) = 0;
}; };
class MediaChannel : public MediaSendChannelInterface,
public MediaReceiveChannelInterface {
public:
class NetworkInterface {
public:
enum SocketType { ST_RTP, ST_RTCP };
virtual bool SendPacket(rtc::CopyOnWriteBuffer* packet,
const rtc::PacketOptions& options) = 0;
virtual bool SendRtcp(rtc::CopyOnWriteBuffer* packet,
const rtc::PacketOptions& options) = 0;
virtual int SetOption(SocketType type,
rtc::Socket::Option opt,
int option) = 0;
virtual ~NetworkInterface() {}
};
explicit MediaChannel(webrtc::TaskQueueBase* network_thread,
bool enable_dscp = false);
virtual ~MediaChannel();
// Sets the abstract interface class for sending RTP/RTCP data.
virtual void SetInterface(NetworkInterface* iface);
// Returns the absolute sendtime extension id value from media channel.
virtual int GetRtpSendTimeExtnId() const;
// Enable network condition based codec switching.
virtual void SetVideoCodecSwitchingEnabled(bool enabled);
// Base method to send packet using NetworkInterface.
bool SendPacket(rtc::CopyOnWriteBuffer* packet,
const rtc::PacketOptions& options);
bool SendRtcp(rtc::CopyOnWriteBuffer* packet,
const rtc::PacketOptions& options);
int SetOption(NetworkInterface::SocketType type,
rtc::Socket::Option opt,
int option);
// Corresponds to the SDP attribute extmap-allow-mixed, see RFC8285.
// Set to true if it's allowed to mix one- and two-byte RTP header extensions
// in the same stream. The setter and getter must only be called from
// worker_thread.
void SetExtmapAllowMixed(bool extmap_allow_mixed);
bool ExtmapAllowMixed() const;
// Returns `true` if a non-null NetworkInterface pointer is held.
// Must be called on the network thread.
bool HasNetworkInterface() const;
void SetFrameEncryptor(uint32_t ssrc,
rtc::scoped_refptr<webrtc::FrameEncryptorInterface>
frame_encryptor) override;
void SetFrameDecryptor(uint32_t ssrc,
rtc::scoped_refptr<webrtc::FrameDecryptorInterface>
frame_decryptor) override;
void SetEncoderToPacketizerFrameTransformer(
uint32_t ssrc,
rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer)
override;
void SetDepacketizerToDecoderFrameTransformer(
uint32_t ssrc,
rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer)
override;
protected:
int SetOptionLocked(NetworkInterface::SocketType type,
rtc::Socket::Option opt,
int option) RTC_RUN_ON(network_thread_);
bool DscpEnabled() const;
// This is the DSCP value used for both RTP and RTCP channels if DSCP is
// enabled. It can be changed at any time via `SetPreferredDscp`.
rtc::DiffServCodePoint PreferredDscp() const;
void SetPreferredDscp(rtc::DiffServCodePoint new_dscp);
rtc::scoped_refptr<webrtc::PendingTaskSafetyFlag> network_safety();
// Utility implementation for derived classes (video/voice) that applies
// the packet options and passes the data onwards to `SendPacket`.
void SendRtp(const uint8_t* data,
size_t len,
const webrtc::PacketOptions& options);
void SendRtcp(const uint8_t* data, size_t len);
private:
// Apply the preferred DSCP setting to the underlying network interface RTP
// and RTCP channels. If DSCP is disabled, then apply the default DSCP value.
void UpdateDscp() RTC_RUN_ON(network_thread_);
bool DoSendPacket(rtc::CopyOnWriteBuffer* packet,
bool rtcp,
const rtc::PacketOptions& options);
const bool enable_dscp_;
const rtc::scoped_refptr<webrtc::PendingTaskSafetyFlag> network_safety_
RTC_PT_GUARDED_BY(network_thread_);
webrtc::TaskQueueBase* const network_thread_;
NetworkInterface* network_interface_ RTC_GUARDED_BY(network_thread_) =
nullptr;
rtc::DiffServCodePoint preferred_dscp_ RTC_GUARDED_BY(network_thread_) =
rtc::DSCP_DEFAULT;
bool extmap_allow_mixed_ = false;
};
// The stats information is structured as follows: // The stats information is structured as follows:
// Media are represented by either MediaSenderInfo or MediaReceiverInfo. // Media are represented by either MediaSenderInfo or MediaReceiverInfo.
// Media contains a vector of SSRC infos that are exclusively used by this // Media contains a vector of SSRC infos that are exclusively used by this
@ -458,6 +398,7 @@ struct MediaSenderInfo {
struct MediaReceiverInfo { struct MediaReceiverInfo {
MediaReceiverInfo(); MediaReceiverInfo();
~MediaReceiverInfo(); ~MediaReceiverInfo();
void add_ssrc(const SsrcReceiverInfo& stat) { local_stats.push_back(stat); } void add_ssrc(const SsrcReceiverInfo& stat) { local_stats.push_back(stat); }
// Temporary utility function for call sites that only provide SSRC. // Temporary utility function for call sites that only provide SSRC.
// As more info is added into SsrcSenderInfo, this function should go away. // As more info is added into SsrcSenderInfo, this function should go away.
@ -850,24 +791,9 @@ struct AudioSendParameters : RtpSendParameters<AudioCodec> {
struct AudioRecvParameters : RtpParameters<AudioCodec> {}; struct AudioRecvParameters : RtpParameters<AudioCodec> {};
class VoiceMediaChannel : public MediaChannel, public Delayable { class VoiceMediaSendChannelInterface : public MediaSendChannelInterface {
public: public:
VoiceMediaChannel(webrtc::TaskQueueBase* network_thread,
bool enable_dscp = false)
: MediaChannel(network_thread, enable_dscp) {}
~VoiceMediaChannel() override {}
cricket::MediaType media_type() const override;
virtual bool SetSendParameters(const AudioSendParameters& params) = 0; virtual bool SetSendParameters(const AudioSendParameters& params) = 0;
virtual bool SetRecvParameters(const AudioRecvParameters& params) = 0;
// Get the receive parameters for the incoming stream identified by `ssrc`.
virtual webrtc::RtpParameters GetRtpReceiveParameters(
uint32_t ssrc) const = 0;
// Retrieve the receive parameters for the default receive
// stream, which is used when SSRCs are not signaled.
virtual webrtc::RtpParameters GetDefaultRtpReceiveParameters() const = 0;
// Starts or stops playout of received audio.
virtual void SetPlayout(bool playout) = 0;
// Starts or stops sending (and potentially capture) of local audio. // Starts or stops sending (and potentially capture) of local audio.
virtual void SetSend(bool send) = 0; virtual void SetSend(bool send) = 0;
// Configure stream for sending. // Configure stream for sending.
@ -875,10 +801,6 @@ class VoiceMediaChannel : public MediaChannel, public Delayable {
bool enable, bool enable,
const AudioOptions* options, const AudioOptions* options,
AudioSource* source) = 0; AudioSource* source) = 0;
// Set speaker output volume of the specified ssrc.
virtual bool SetOutputVolume(uint32_t ssrc, double volume) = 0;
// Set speaker output volume for future unsignaled streams.
virtual bool SetDefaultOutputVolume(double volume) = 0;
// Returns if the telephone-event has been negotiated. // Returns if the telephone-event has been negotiated.
virtual bool CanInsertDtmf() = 0; virtual bool CanInsertDtmf() = 0;
// Send a DTMF `event`. The DTMF out-of-band signal will be used. // Send a DTMF `event`. The DTMF out-of-band signal will be used.
@ -886,17 +808,29 @@ class VoiceMediaChannel : public MediaChannel, public Delayable {
// The valid value for the `event` are 0 to 15 which corresponding to // The valid value for the `event` are 0 to 15 which corresponding to
// DTMF event 0-9, *, #, A-D. // DTMF event 0-9, *, #, A-D.
virtual bool InsertDtmf(uint32_t ssrc, int event, int duration) = 0; virtual bool InsertDtmf(uint32_t ssrc, int event, int duration) = 0;
// Gets quality stats for the channel. };
virtual bool GetStats(VoiceMediaInfo* info,
bool get_and_clear_legacy_stats) = 0;
class VoiceMediaReceiveChannelInterface : public MediaReceiveChannelInterface {
public:
virtual bool SetRecvParameters(const AudioRecvParameters& params) = 0;
// Get the receive parameters for the incoming stream identified by `ssrc`.
virtual webrtc::RtpParameters GetRtpReceiveParameters(
uint32_t ssrc) const = 0;
virtual std::vector<webrtc::RtpSource> GetSources(uint32_t ssrc) const = 0;
// Retrieve the receive parameters for the default receive
// stream, which is used when SSRCs are not signaled.
virtual webrtc::RtpParameters GetDefaultRtpReceiveParameters() const = 0;
// Starts or stops playout of received audio.
virtual void SetPlayout(bool playout) = 0;
// Set speaker output volume of the specified ssrc.
virtual bool SetOutputVolume(uint32_t ssrc, double volume) = 0;
// Set speaker output volume for future unsignaled streams.
virtual bool SetDefaultOutputVolume(double volume) = 0;
virtual void SetRawAudioSink( virtual void SetRawAudioSink(
uint32_t ssrc, uint32_t ssrc,
std::unique_ptr<webrtc::AudioSinkInterface> sink) = 0; std::unique_ptr<webrtc::AudioSinkInterface> sink) = 0;
virtual void SetDefaultRawAudioSink( virtual void SetDefaultRawAudioSink(
std::unique_ptr<webrtc::AudioSinkInterface> sink) = 0; std::unique_ptr<webrtc::AudioSinkInterface> sink) = 0;
virtual std::vector<webrtc::RtpSource> GetSources(uint32_t ssrc) const = 0;
}; };
// TODO(deadbeef): Rename to VideoSenderParameters, since they're intended to // TODO(deadbeef): Rename to VideoSenderParameters, since they're intended to
@ -920,22 +854,9 @@ struct VideoSendParameters : RtpSendParameters<VideoCodec> {
// encapsulate all the parameters needed for a video RtpReceiver. // encapsulate all the parameters needed for a video RtpReceiver.
struct VideoRecvParameters : RtpParameters<VideoCodec> {}; struct VideoRecvParameters : RtpParameters<VideoCodec> {};
class VideoMediaChannel : public MediaChannel, public Delayable { class VideoMediaSendChannelInterface : public MediaSendChannelInterface {
public: public:
explicit VideoMediaChannel(webrtc::TaskQueueBase* network_thread,
bool enable_dscp = false)
: MediaChannel(network_thread, enable_dscp) {}
~VideoMediaChannel() override {}
cricket::MediaType media_type() const override;
virtual bool SetSendParameters(const VideoSendParameters& params) = 0; virtual bool SetSendParameters(const VideoSendParameters& params) = 0;
virtual bool SetRecvParameters(const VideoRecvParameters& params) = 0;
// Get the receive parameters for the incoming stream identified by `ssrc`.
virtual webrtc::RtpParameters GetRtpReceiveParameters(
uint32_t ssrc) const = 0;
// Retrieve the receive parameters for the default receive
// stream, which is used when SSRCs are not signaled.
virtual webrtc::RtpParameters GetDefaultRtpReceiveParameters() const = 0;
// Gets the currently set codecs/payload types to be used for outgoing media. // Gets the currently set codecs/payload types to be used for outgoing media.
virtual bool GetSendCodec(VideoCodec* send_codec) = 0; virtual bool GetSendCodec(VideoCodec* send_codec) = 0;
// Starts or stops transmission (and potentially capture) of local video. // Starts or stops transmission (and potentially capture) of local video.
@ -946,37 +867,39 @@ class VideoMediaChannel : public MediaChannel, public Delayable {
uint32_t ssrc, uint32_t ssrc,
const VideoOptions* options, const VideoOptions* options,
rtc::VideoSourceInterface<webrtc::VideoFrame>* source) = 0; rtc::VideoSourceInterface<webrtc::VideoFrame>* source) = 0;
// Cause generation of a keyframe for `ssrc` on a sending channel.
virtual void GenerateSendKeyFrame(uint32_t ssrc,
const std::vector<std::string>& rids) = 0;
// Enable network condition based codec switching.
virtual void SetVideoCodecSwitchingEnabled(bool enabled) = 0;
};
class VideoMediaReceiveChannelInterface : public MediaReceiveChannelInterface {
public:
virtual bool SetRecvParameters(const VideoRecvParameters& params) = 0;
// Get the receive parameters for the incoming stream identified by `ssrc`.
virtual webrtc::RtpParameters GetRtpReceiveParameters(
uint32_t ssrc) const = 0;
// Retrieve the receive parameters for the default receive
// stream, which is used when SSRCs are not signaled.
virtual webrtc::RtpParameters GetDefaultRtpReceiveParameters() const = 0;
// Sets the sink object to be used for the specified stream. // Sets the sink object to be used for the specified stream.
virtual bool SetSink(uint32_t ssrc, virtual bool SetSink(uint32_t ssrc,
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) = 0; rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) = 0;
// The sink is used for the 'default' stream. // The sink is used for the 'default' stream.
virtual void SetDefaultSink( virtual void SetDefaultSink(
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) = 0; rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) = 0;
// This fills the "bitrate parts" (rtx, video bitrate) of the // Request generation of a keyframe for `ssrc` on a receiving channel via
// BandwidthEstimationInfo, since that part that isn't possible to get // RTCP feedback.
// through webrtc::Call::GetStats, as they are statistics of the send virtual void RequestRecvKeyFrame(uint32_t ssrc) = 0;
// streams.
// TODO(holmer): We should change this so that either BWE graphs doesn't virtual std::vector<webrtc::RtpSource> GetSources(uint32_t ssrc) const = 0;
// need access to bitrates of the streams, or change the (RTC)StatsCollector
// so that it's getting the send stream stats separately by calling
// GetStats(), and merges with BandwidthEstimationInfo by itself.
virtual void FillBitrateInfo(BandwidthEstimationInfo* bwe_info) = 0;
// Gets quality stats for the channel.
virtual bool GetStats(VideoMediaInfo* info) = 0;
// Set recordable encoded frame callback for `ssrc` // Set recordable encoded frame callback for `ssrc`
virtual void SetRecordableEncodedFrameCallback( virtual void SetRecordableEncodedFrameCallback(
uint32_t ssrc, uint32_t ssrc,
std::function<void(const webrtc::RecordableEncodedFrame&)> callback) = 0; std::function<void(const webrtc::RecordableEncodedFrame&)> callback) = 0;
// Clear recordable encoded frame callback for `ssrc` // Clear recordable encoded frame callback for `ssrc`
virtual void ClearRecordableEncodedFrameCallback(uint32_t ssrc) = 0; virtual void ClearRecordableEncodedFrameCallback(uint32_t ssrc) = 0;
// Request generation of a keyframe for `ssrc` on a receiving channel via
// RTCP feedback.
virtual void RequestRecvKeyFrame(uint32_t ssrc) = 0;
// Cause generation of a keyframe for `ssrc` on a sending channel.
virtual void GenerateSendKeyFrame(uint32_t ssrc,
const std::vector<std::string>& rids) = 0;
virtual std::vector<webrtc::RtpSource> GetSources(uint32_t ssrc) const = 0;
}; };
// Info about data received in DataMediaChannel. For use in // Info about data received in DataMediaChannel. For use in

View File

@ -8,9 +8,26 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include "media/base/media_channel.h" #include "media/base/media_channel_impl.h"
#include <map>
#include <string>
#include <utility>
#include "absl/functional/any_invocable.h"
#include "api/audio_options.h"
#include "api/media_stream_interface.h"
#include "api/rtc_error.h"
#include "api/rtp_sender_interface.h"
#include "api/units/time_delta.h"
#include "api/video/video_timing.h"
#include "common_video/include/quality_limitation_reason.h"
#include "media/base/codec.h"
#include "media/base/media_channel.h"
#include "media/base/rtp_utils.h" #include "media/base/rtp_utils.h"
#include "media/base/stream_params.h"
#include "modules/rtp_rtcp/include/report_block_data.h"
#include "rtc_base/checks.h"
namespace webrtc { namespace webrtc {
@ -47,7 +64,7 @@ MediaChannel::~MediaChannel() {
RTC_DCHECK(!network_interface_); RTC_DCHECK(!network_interface_);
} }
void MediaChannel::SetInterface(NetworkInterface* iface) { void MediaChannel::SetInterface(MediaChannelNetworkInterface* iface) {
RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK_RUN_ON(network_thread_);
iface ? network_safety_->SetAlive() : network_safety_->SetNotAlive(); iface ? network_safety_->SetAlive() : network_safety_->SetNotAlive();
network_interface_ = iface; network_interface_ = iface;
@ -70,8 +87,6 @@ void MediaChannel::SetFrameDecryptor(
// Placeholder should be pure virtual once internal supports it. // Placeholder should be pure virtual once internal supports it.
} }
void MediaChannel::SetVideoCodecSwitchingEnabled(bool enabled) {}
bool MediaChannel::SendPacket(rtc::CopyOnWriteBuffer* packet, bool MediaChannel::SendPacket(rtc::CopyOnWriteBuffer* packet,
const rtc::PacketOptions& options) { const rtc::PacketOptions& options) {
return DoSendPacket(packet, false, options); return DoSendPacket(packet, false, options);
@ -82,7 +97,7 @@ bool MediaChannel::SendRtcp(rtc::CopyOnWriteBuffer* packet,
return DoSendPacket(packet, true, options); return DoSendPacket(packet, true, options);
} }
int MediaChannel::SetOption(NetworkInterface::SocketType type, int MediaChannel::SetOption(MediaChannelNetworkInterface::SocketType type,
rtc::Socket::Option opt, rtc::Socket::Option opt,
int option) { int option) {
RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK_RUN_ON(network_thread_);
@ -114,7 +129,7 @@ void MediaChannel::SetDepacketizerToDecoderFrameTransformer(
uint32_t ssrc, uint32_t ssrc,
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) {} rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) {}
int MediaChannel::SetOptionLocked(NetworkInterface::SocketType type, int MediaChannel::SetOptionLocked(MediaChannelNetworkInterface::SocketType type,
rtc::Socket::Option opt, rtc::Socket::Option opt,
int option) { int option) {
if (!network_interface_) if (!network_interface_)
@ -158,10 +173,11 @@ rtc::scoped_refptr<PendingTaskSafetyFlag> MediaChannel::network_safety() {
void MediaChannel::UpdateDscp() { void MediaChannel::UpdateDscp() {
rtc::DiffServCodePoint value = rtc::DiffServCodePoint value =
enable_dscp_ ? preferred_dscp_ : rtc::DSCP_DEFAULT; enable_dscp_ ? preferred_dscp_ : rtc::DSCP_DEFAULT;
int ret = int ret = SetOptionLocked(MediaChannelNetworkInterface::ST_RTP,
SetOptionLocked(NetworkInterface::ST_RTP, rtc::Socket::OPT_DSCP, value); rtc::Socket::OPT_DSCP, value);
if (ret == 0) if (ret == 0)
SetOptionLocked(NetworkInterface::ST_RTCP, rtc::Socket::OPT_DSCP, value); SetOptionLocked(MediaChannelNetworkInterface::ST_RTCP,
rtc::Socket::OPT_DSCP, value);
} }
bool MediaChannel::DoSendPacket(rtc::CopyOnWriteBuffer* packet, bool MediaChannel::DoSendPacket(rtc::CopyOnWriteBuffer* packet,
@ -273,4 +289,6 @@ cricket::MediaType VideoMediaChannel::media_type() const {
return cricket::MediaType::MEDIA_TYPE_VIDEO; return cricket::MediaType::MEDIA_TYPE_VIDEO;
} }
void VideoMediaChannel::SetVideoCodecSwitchingEnabled(bool enabled) {}
} // namespace cricket } // namespace cricket

View File

@ -0,0 +1,242 @@
/*
* Copyright 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MEDIA_BASE_MEDIA_CHANNEL_IMPL_H_
#define MEDIA_BASE_MEDIA_CHANNEL_IMPL_H_
#include <stddef.h>
#include <stdint.h>
#include "absl/strings/string_view.h"
#include "api/call/transport.h"
#include "api/crypto/frame_decryptor_interface.h"
#include "api/crypto/frame_encryptor_interface.h"
#include "api/frame_transformer_interface.h"
#include "api/media_types.h"
#include "api/scoped_refptr.h"
#include "api/sequence_checker.h"
#include "api/task_queue/pending_task_safety_flag.h"
#include "api/task_queue/task_queue_base.h"
#include "media/base/media_channel.h"
#include "rtc_base/async_packet_socket.h"
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/dscp.h"
#include "rtc_base/network/sent_packet.h"
#include "rtc_base/network_route.h"
#include "rtc_base/socket.h"
#include "rtc_base/thread_annotations.h"
// This file contains the base classes for classes that implement
// the MediaChannel interfaces.
// These implementation classes used to be the exposed interface names,
// but this is in the process of being changed.
// TODO(bugs.webrtc.org/13931): Consider removing these classes.
namespace cricket {
class VoiceMediaChannel;
class VideoMediaChannel;
class MediaChannel : public MediaSendChannelInterface,
public MediaReceiveChannelInterface {
public:
explicit MediaChannel(webrtc::TaskQueueBase* network_thread,
bool enable_dscp = false);
virtual ~MediaChannel();
// Downcasting to the implemented interfaces.
MediaSendChannelInterface* AsSendChannel() { return this; }
MediaReceiveChannelInterface* AsReceiveChannel() { return this; }
// Downcasting to the subclasses.
virtual VideoMediaChannel* AsVideoChannel() {
RTC_CHECK_NOTREACHED();
return nullptr;
}
virtual VoiceMediaChannel* AsVoiceChannel() {
RTC_CHECK_NOTREACHED();
return nullptr;
}
// Must declare the methods inherited from the base interface template,
// even when abstract, to tell the compiler that all instances of the name
// referred to by subclasses of this share the same implementation.
cricket::MediaType media_type() const override = 0;
void OnPacketReceived(rtc::CopyOnWriteBuffer packet,
int64_t packet_time_us) override = 0;
void OnPacketSent(const rtc::SentPacket& sent_packet) override = 0;
void OnReadyToSend(bool ready) override = 0;
void OnNetworkRouteChanged(absl::string_view transport_name,
const rtc::NetworkRoute& network_route) override =
0;
// Sets the abstract interface class for sending RTP/RTCP data.
virtual void SetInterface(MediaChannelNetworkInterface* iface);
// Returns the absolute sendtime extension id value from media channel.
virtual int GetRtpSendTimeExtnId() const;
// Base method to send packet using MediaChannelNetworkInterface.
bool SendPacket(rtc::CopyOnWriteBuffer* packet,
const rtc::PacketOptions& options);
bool SendRtcp(rtc::CopyOnWriteBuffer* packet,
const rtc::PacketOptions& options);
int SetOption(MediaChannelNetworkInterface::SocketType type,
rtc::Socket::Option opt,
int option);
// Corresponds to the SDP attribute extmap-allow-mixed, see RFC8285.
// Set to true if it's allowed to mix one- and two-byte RTP header extensions
// in the same stream. The setter and getter must only be called from
// worker_thread.
void SetExtmapAllowMixed(bool extmap_allow_mixed) override;
bool ExtmapAllowMixed() const override;
// Returns `true` if a non-null MediaChannelNetworkInterface pointer is held.
// Must be called on the network thread.
bool HasNetworkInterface() const;
void SetFrameEncryptor(uint32_t ssrc,
rtc::scoped_refptr<webrtc::FrameEncryptorInterface>
frame_encryptor) override;
void SetFrameDecryptor(uint32_t ssrc,
rtc::scoped_refptr<webrtc::FrameDecryptorInterface>
frame_decryptor) override;
void SetEncoderToPacketizerFrameTransformer(
uint32_t ssrc,
rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer)
override;
void SetDepacketizerToDecoderFrameTransformer(
uint32_t ssrc,
rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer)
override;
protected:
int SetOptionLocked(MediaChannelNetworkInterface::SocketType type,
rtc::Socket::Option opt,
int option) RTC_RUN_ON(network_thread_);
bool DscpEnabled() const;
// This is the DSCP value used for both RTP and RTCP channels if DSCP is
// enabled. It can be changed at any time via `SetPreferredDscp`.
rtc::DiffServCodePoint PreferredDscp() const;
void SetPreferredDscp(rtc::DiffServCodePoint new_dscp);
rtc::scoped_refptr<webrtc::PendingTaskSafetyFlag> network_safety();
// Utility implementation for derived classes (video/voice) that applies
// the packet options and passes the data onwards to `SendPacket`.
void SendRtp(const uint8_t* data,
size_t len,
const webrtc::PacketOptions& options);
void SendRtcp(const uint8_t* data, size_t len);
private:
// Apply the preferred DSCP setting to the underlying network interface RTP
// and RTCP channels. If DSCP is disabled, then apply the default DSCP value.
void UpdateDscp() RTC_RUN_ON(network_thread_);
bool DoSendPacket(rtc::CopyOnWriteBuffer* packet,
bool rtcp,
const rtc::PacketOptions& options);
const bool enable_dscp_;
const rtc::scoped_refptr<webrtc::PendingTaskSafetyFlag> network_safety_
RTC_PT_GUARDED_BY(network_thread_);
webrtc::TaskQueueBase* const network_thread_;
MediaChannelNetworkInterface* network_interface_
RTC_GUARDED_BY(network_thread_) = nullptr;
rtc::DiffServCodePoint preferred_dscp_ RTC_GUARDED_BY(network_thread_) =
rtc::DSCP_DEFAULT;
bool extmap_allow_mixed_ = false;
};
// Base class for implementation classes
class VideoMediaChannel : public MediaChannel,
public VideoMediaSendChannelInterface,
public VideoMediaReceiveChannelInterface {
public:
explicit VideoMediaChannel(webrtc::TaskQueueBase* network_thread,
bool enable_dscp = false)
: MediaChannel(network_thread, enable_dscp) {}
~VideoMediaChannel() override {}
// Downcasting to the implemented interfaces.
VideoMediaSendChannelInterface* AsVideoSendChannel() override { return this; }
VideoMediaReceiveChannelInterface* AsVideoReceiveChannel() override {
return this;
}
cricket::MediaType media_type() const override;
// Downcasting to the subclasses.
VideoMediaChannel* AsVideoChannel() override { return this; }
void SetExtmapAllowMixed(bool mixed) override {
MediaChannel::SetExtmapAllowMixed(mixed);
}
bool ExtmapAllowMixed() const override {
return MediaChannel::ExtmapAllowMixed();
}
// This fills the "bitrate parts" (rtx, video bitrate) of the
// BandwidthEstimationInfo, since that part that isn't possible to get
// through webrtc::Call::GetStats, as they are statistics of the send
// streams.
// TODO(holmer): We should change this so that either BWE graphs doesn't
// need access to bitrates of the streams, or change the (RTC)StatsCollector
// so that it's getting the send stream stats separately by calling
// GetStats(), and merges with BandwidthEstimationInfo by itself.
virtual void FillBitrateInfo(BandwidthEstimationInfo* bwe_info) = 0;
// Gets quality stats for the channel.
virtual bool GetStats(VideoMediaInfo* info) = 0;
// Enable network condition based codec switching.
void SetVideoCodecSwitchingEnabled(bool enabled) override;
};
// Base class for implementation classes
class VoiceMediaChannel : public MediaChannel,
public VoiceMediaSendChannelInterface,
public VoiceMediaReceiveChannelInterface {
public:
MediaType media_type() const override;
VoiceMediaChannel(webrtc::TaskQueueBase* network_thread,
bool enable_dscp = false)
: MediaChannel(network_thread, enable_dscp) {}
~VoiceMediaChannel() override {}
// Downcasting to the implemented interfaces.
VoiceMediaSendChannelInterface* AsVoiceSendChannel() override { return this; }
VoiceMediaReceiveChannelInterface* AsVoiceReceiveChannel() override {
return this;
}
VoiceMediaChannel* AsVoiceChannel() override { return this; }
void SetExtmapAllowMixed(bool mixed) override {
MediaChannel::SetExtmapAllowMixed(mixed);
}
bool ExtmapAllowMixed() const override {
return MediaChannel::ExtmapAllowMixed();
}
// Gets quality stats for the channel.
virtual bool GetStats(VoiceMediaInfo* info,
bool get_and_clear_legacy_stats) = 0;
};
} // namespace cricket
#endif // MEDIA_BASE_MEDIA_CHANNEL_IMPL_H_

View File

@ -24,6 +24,7 @@
#include "call/audio_state.h" #include "call/audio_state.h"
#include "media/base/codec.h" #include "media/base/codec.h"
#include "media/base/media_channel.h" #include "media/base/media_channel.h"
#include "media/base/media_channel_impl.h"
#include "media/base/media_config.h" #include "media/base/media_config.h"
#include "media/base/video_common.h" #include "media/base/video_common.h"
#include "rtc_base/system/file_wrapper.h" #include "rtc_base/system/file_wrapper.h"

View File

@ -1887,12 +1887,12 @@ void WebRtcVideoChannel::OnNetworkRouteChanged(
})); }));
} }
void WebRtcVideoChannel::SetInterface(NetworkInterface* iface) { void WebRtcVideoChannel::SetInterface(MediaChannelNetworkInterface* iface) {
RTC_DCHECK_RUN_ON(&network_thread_checker_); RTC_DCHECK_RUN_ON(&network_thread_checker_);
MediaChannel::SetInterface(iface); MediaChannel::SetInterface(iface);
// Set the RTP recv/send buffer to a bigger size. // Set the RTP recv/send buffer to a bigger size.
MediaChannel::SetOption(NetworkInterface::ST_RTP, rtc::Socket::OPT_RCVBUF, MediaChannel::SetOption(MediaChannelNetworkInterface::ST_RTP,
kVideoRtpRecvBufferSize); rtc::Socket::OPT_RCVBUF, kVideoRtpRecvBufferSize);
// Speculative change to increase the outbound socket buffer size. // Speculative change to increase the outbound socket buffer size.
// In b/15152257, we are seeing a significant number of packets discarded // In b/15152257, we are seeing a significant number of packets discarded
@ -1909,8 +1909,8 @@ void WebRtcVideoChannel::SetInterface(NetworkInterface* iface) {
send_buffer_size = kVideoRtpSendBufferSize; send_buffer_size = kVideoRtpSendBufferSize;
} }
MediaChannel::SetOption(NetworkInterface::ST_RTP, rtc::Socket::OPT_SNDBUF, MediaChannel::SetOption(MediaChannelNetworkInterface::ST_RTP,
send_buffer_size); rtc::Socket::OPT_SNDBUF, send_buffer_size);
} }
void WebRtcVideoChannel::SetFrameDecryptor( void WebRtcVideoChannel::SetFrameDecryptor(

View File

@ -180,7 +180,7 @@ class WebRtcVideoChannel : public VideoMediaChannel,
void OnReadyToSend(bool ready) override; void OnReadyToSend(bool ready) override;
void OnNetworkRouteChanged(absl::string_view transport_name, void OnNetworkRouteChanged(absl::string_view transport_name,
const rtc::NetworkRoute& network_route) override; const rtc::NetworkRoute& network_route) override;
void SetInterface(NetworkInterface* iface) override; void SetInterface(MediaChannelNetworkInterface* iface) override;
// E2E Encrypted Video Frame API // E2E Encrypted Video Frame API
// Set a frame decryptor to a particular ssrc that will intercept all // Set a frame decryptor to a particular ssrc that will intercept all

File diff suppressed because it is too large Load Diff

View File

@ -247,7 +247,7 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam<bool> {
if (!SetupChannel()) { if (!SetupChannel()) {
return false; return false;
} }
if (!channel_->AddSendStream(sp)) { if (!channel_->AsSendChannel()->AddSendStream(sp)) {
return false; return false;
} }
if (!use_null_apm_) { if (!use_null_apm_) {
@ -258,21 +258,23 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam<bool> {
bool AddRecvStream(uint32_t ssrc) { bool AddRecvStream(uint32_t ssrc) {
EXPECT_TRUE(channel_); EXPECT_TRUE(channel_);
return channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(ssrc)); return channel_->AsReceiveChannel()->AddRecvStream(
cricket::StreamParams::CreateLegacy(ssrc));
} }
void SetupForMultiSendStream() { void SetupForMultiSendStream() {
EXPECT_TRUE(SetupSendStream()); EXPECT_TRUE(SetupSendStream());
// Remove stream added in Setup. // Remove stream added in Setup.
EXPECT_TRUE(call_.GetAudioSendStream(kSsrcX)); EXPECT_TRUE(call_.GetAudioSendStream(kSsrcX));
EXPECT_TRUE(channel_->RemoveSendStream(kSsrcX)); EXPECT_TRUE(channel_->AsSendChannel()->RemoveSendStream(kSsrcX));
// Verify the channel does not exist. // Verify the channel does not exist.
EXPECT_FALSE(call_.GetAudioSendStream(kSsrcX)); EXPECT_FALSE(call_.GetAudioSendStream(kSsrcX));
} }
void DeliverPacket(const void* data, int len) { void DeliverPacket(const void* data, int len) {
rtc::CopyOnWriteBuffer packet(reinterpret_cast<const uint8_t*>(data), len); rtc::CopyOnWriteBuffer packet(reinterpret_cast<const uint8_t*>(data), len);
channel_->OnPacketReceived(packet, /* packet_time_us */ -1); channel_->AsReceiveChannel()->OnPacketReceived(packet,
/* packet_time_us */ -1);
rtc::Thread::Current()->ProcessMessages(0); rtc::Thread::Current()->ProcessMessages(0);
} }
@ -338,8 +340,8 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam<bool> {
if (caller) { if (caller) {
// If this is a caller, local description will be applied and add the // If this is a caller, local description will be applied and add the
// send stream. // send stream.
EXPECT_TRUE( EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(
channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); cricket::StreamParams::CreateLegacy(kSsrcX)));
} }
// Test we can only InsertDtmf when the other side supports telephone-event. // Test we can only InsertDtmf when the other side supports telephone-event.
@ -354,8 +356,8 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam<bool> {
if (!caller) { if (!caller) {
// If this is callee, there's no active send channel yet. // If this is callee, there's no active send channel yet.
EXPECT_FALSE(channel_->InsertDtmf(ssrc, 2, 123)); EXPECT_FALSE(channel_->InsertDtmf(ssrc, 2, 123));
EXPECT_TRUE( EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(
channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); cricket::StreamParams::CreateLegacy(kSsrcX)));
} }
// Check we fail if the ssrc is invalid. // Check we fail if the ssrc is invalid.
@ -377,8 +379,8 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam<bool> {
// For a caller, the answer will be applied in set remote description // For a caller, the answer will be applied in set remote description
// where SetSendParameters() is called. // where SetSendParameters() is called.
EXPECT_TRUE(SetupChannel()); EXPECT_TRUE(SetupChannel());
EXPECT_TRUE( EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(
channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); cricket::StreamParams::CreateLegacy(kSsrcX)));
send_parameters_.extmap_allow_mixed = extmap_allow_mixed; send_parameters_.extmap_allow_mixed = extmap_allow_mixed;
SetSendParameters(send_parameters_); SetSendParameters(send_parameters_);
const webrtc::AudioSendStream::Config& config = GetSendStreamConfig(kSsrcX); const webrtc::AudioSendStream::Config& config = GetSendStreamConfig(kSsrcX);
@ -390,8 +392,8 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam<bool> {
// where SetExtmapAllowMixed() and AddSendStream() are called. // where SetExtmapAllowMixed() and AddSendStream() are called.
EXPECT_TRUE(SetupChannel()); EXPECT_TRUE(SetupChannel());
channel_->SetExtmapAllowMixed(extmap_allow_mixed); channel_->SetExtmapAllowMixed(extmap_allow_mixed);
EXPECT_TRUE( EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(
channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); cricket::StreamParams::CreateLegacy(kSsrcX)));
const webrtc::AudioSendStream::Config& config = GetSendStreamConfig(kSsrcX); const webrtc::AudioSendStream::Config& config = GetSendStreamConfig(kSsrcX);
EXPECT_EQ(extmap_allow_mixed, config.rtp.extmap_allow_mixed); EXPECT_EQ(extmap_allow_mixed, config.rtp.extmap_allow_mixed);
@ -419,11 +421,14 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam<bool> {
// Sets the per-stream maximum bitrate limit for the specified SSRC. // Sets the per-stream maximum bitrate limit for the specified SSRC.
bool SetMaxBitrateForStream(int32_t ssrc, int bitrate) { bool SetMaxBitrateForStream(int32_t ssrc, int bitrate) {
webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(ssrc); webrtc::RtpParameters parameters =
channel_->AsSendChannel()->GetRtpSendParameters(ssrc);
EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_EQ(1UL, parameters.encodings.size());
parameters.encodings[0].max_bitrate_bps = bitrate; parameters.encodings[0].max_bitrate_bps = bitrate;
return channel_->SetRtpSendParameters(ssrc, parameters).ok(); return channel_->AsSendChannel()
->SetRtpSendParameters(ssrc, parameters)
.ok();
} }
void SetGlobalMaxBitrate(const cricket::AudioCodec& codec, int bitrate) { void SetGlobalMaxBitrate(const cricket::AudioCodec& codec, int bitrate) {
@ -469,7 +474,7 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam<bool> {
// Verify that reading back the parameters gives results // Verify that reading back the parameters gives results
// consistent with the Set() result. // consistent with the Set() result.
webrtc::RtpParameters resulting_parameters = webrtc::RtpParameters resulting_parameters =
channel_->GetRtpSendParameters(kSsrcX); channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX);
EXPECT_EQ(1UL, resulting_parameters.encodings.size()); EXPECT_EQ(1UL, resulting_parameters.encodings.size());
EXPECT_EQ(expected_result ? stream_max : -1, EXPECT_EQ(expected_result ? stream_max : -1,
resulting_parameters.encodings[0].max_bitrate_bps); resulting_parameters.encodings[0].max_bitrate_bps);
@ -530,8 +535,8 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam<bool> {
EXPECT_EQ(id, GetSendStreamConfig(kSsrcX).rtp.extensions[0].id); EXPECT_EQ(id, GetSendStreamConfig(kSsrcX).rtp.extensions[0].id);
// Ensure extension is set properly on new stream. // Ensure extension is set properly on new stream.
EXPECT_TRUE( EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(
channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcY))); cricket::StreamParams::CreateLegacy(kSsrcY)));
EXPECT_NE(call_.GetAudioSendStream(kSsrcX), EXPECT_NE(call_.GetAudioSendStream(kSsrcX),
call_.GetAudioSendStream(kSsrcY)); call_.GetAudioSendStream(kSsrcY));
EXPECT_EQ(1u, GetSendStreamConfig(kSsrcY).rtp.extensions.size()); EXPECT_EQ(1u, GetSendStreamConfig(kSsrcY).rtp.extensions.size());
@ -816,8 +821,8 @@ TEST_P(WebRtcVoiceEngineTestFake, CreateMediaChannel) {
// Test that we can add a send stream and that it has the correct defaults. // Test that we can add a send stream and that it has the correct defaults.
TEST_P(WebRtcVoiceEngineTestFake, CreateSendStream) { TEST_P(WebRtcVoiceEngineTestFake, CreateSendStream) {
EXPECT_TRUE(SetupChannel()); EXPECT_TRUE(SetupChannel());
EXPECT_TRUE( EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(
channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); cricket::StreamParams::CreateLegacy(kSsrcX)));
const webrtc::AudioSendStream::Config& config = GetSendStreamConfig(kSsrcX); const webrtc::AudioSendStream::Config& config = GetSendStreamConfig(kSsrcX);
EXPECT_EQ(kSsrcX, config.rtp.ssrc); EXPECT_EQ(kSsrcX, config.rtp.ssrc);
EXPECT_EQ("", config.rtp.c_name); EXPECT_EQ("", config.rtp.c_name);
@ -1074,8 +1079,8 @@ TEST_P(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthMultiRateAsCallee) {
parameters.max_bandwidth_bps = kDesiredBitrate; parameters.max_bandwidth_bps = kDesiredBitrate;
SetSendParameters(parameters); SetSendParameters(parameters);
EXPECT_TRUE( EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(
channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); cricket::StreamParams::CreateLegacy(kSsrcX)));
EXPECT_EQ(kDesiredBitrate, GetCodecBitrate(kSsrcX)); EXPECT_EQ(kDesiredBitrate, GetCodecBitrate(kSsrcX));
} }
@ -1126,12 +1131,13 @@ TEST_P(WebRtcVoiceEngineTestFake, SetMaxBitratePerStream) {
TEST_P(WebRtcVoiceEngineTestFake, CannotSetMaxBitrateForNonexistentStream) { TEST_P(WebRtcVoiceEngineTestFake, CannotSetMaxBitrateForNonexistentStream) {
EXPECT_TRUE(SetupChannel()); EXPECT_TRUE(SetupChannel());
webrtc::RtpParameters nonexistent_parameters = webrtc::RtpParameters nonexistent_parameters =
channel_->GetRtpSendParameters(kSsrcX); channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX);
EXPECT_EQ(0u, nonexistent_parameters.encodings.size()); EXPECT_EQ(0u, nonexistent_parameters.encodings.size());
nonexistent_parameters.encodings.push_back(webrtc::RtpEncodingParameters()); nonexistent_parameters.encodings.push_back(webrtc::RtpEncodingParameters());
EXPECT_FALSE( EXPECT_FALSE(channel_->AsSendChannel()
channel_->SetRtpSendParameters(kSsrcX, nonexistent_parameters).ok()); ->SetRtpSendParameters(kSsrcX, nonexistent_parameters)
.ok());
} }
TEST_P(WebRtcVoiceEngineTestFake, TEST_P(WebRtcVoiceEngineTestFake,
@ -1142,21 +1148,26 @@ TEST_P(WebRtcVoiceEngineTestFake,
// for each encoding individually. // for each encoding individually.
EXPECT_TRUE(SetupSendStream()); EXPECT_TRUE(SetupSendStream());
webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX); webrtc::RtpParameters parameters =
channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX);
// Two or more encodings should result in failure. // Two or more encodings should result in failure.
parameters.encodings.push_back(webrtc::RtpEncodingParameters()); parameters.encodings.push_back(webrtc::RtpEncodingParameters());
EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); EXPECT_FALSE(
channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok());
// Zero encodings should also fail. // Zero encodings should also fail.
parameters.encodings.clear(); parameters.encodings.clear();
EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); EXPECT_FALSE(
channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok());
} }
// Changing the SSRC through RtpParameters is not allowed. // Changing the SSRC through RtpParameters is not allowed.
TEST_P(WebRtcVoiceEngineTestFake, CannotSetSsrcInRtpSendParameters) { TEST_P(WebRtcVoiceEngineTestFake, CannotSetSsrcInRtpSendParameters) {
EXPECT_TRUE(SetupSendStream()); EXPECT_TRUE(SetupSendStream());
webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX); webrtc::RtpParameters parameters =
channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX);
parameters.encodings[0].ssrc = 0xdeadbeef; parameters.encodings[0].ssrc = 0xdeadbeef;
EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); EXPECT_FALSE(
channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok());
} }
// Test that a stream will not be sending if its encoding is made // Test that a stream will not be sending if its encoding is made
@ -1166,34 +1177,40 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRtpParametersEncodingsActive) {
SetSend(true); SetSend(true);
EXPECT_TRUE(GetSendStream(kSsrcX).IsSending()); EXPECT_TRUE(GetSendStream(kSsrcX).IsSending());
// Get current parameters and change "active" to false. // Get current parameters and change "active" to false.
webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX); webrtc::RtpParameters parameters =
channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX);
ASSERT_EQ(1u, parameters.encodings.size()); ASSERT_EQ(1u, parameters.encodings.size());
ASSERT_TRUE(parameters.encodings[0].active); ASSERT_TRUE(parameters.encodings[0].active);
parameters.encodings[0].active = false; parameters.encodings[0].active = false;
EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); EXPECT_TRUE(
channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok());
EXPECT_FALSE(GetSendStream(kSsrcX).IsSending()); EXPECT_FALSE(GetSendStream(kSsrcX).IsSending());
// Now change it back to active and verify we resume sending. // Now change it back to active and verify we resume sending.
// This should occur even when other parameters are updated. // This should occur even when other parameters are updated.
parameters.encodings[0].active = true; parameters.encodings[0].active = true;
parameters.encodings[0].max_bitrate_bps = absl::optional<int>(6000); parameters.encodings[0].max_bitrate_bps = absl::optional<int>(6000);
EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); EXPECT_TRUE(
channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok());
EXPECT_TRUE(GetSendStream(kSsrcX).IsSending()); EXPECT_TRUE(GetSendStream(kSsrcX).IsSending());
} }
TEST_P(WebRtcVoiceEngineTestFake, SetRtpParametersAdaptivePtime) { TEST_P(WebRtcVoiceEngineTestFake, SetRtpParametersAdaptivePtime) {
EXPECT_TRUE(SetupSendStream()); EXPECT_TRUE(SetupSendStream());
// Get current parameters and change "adaptive_ptime" to true. // Get current parameters and change "adaptive_ptime" to true.
webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX); webrtc::RtpParameters parameters =
channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX);
ASSERT_EQ(1u, parameters.encodings.size()); ASSERT_EQ(1u, parameters.encodings.size());
ASSERT_FALSE(parameters.encodings[0].adaptive_ptime); ASSERT_FALSE(parameters.encodings[0].adaptive_ptime);
parameters.encodings[0].adaptive_ptime = true; parameters.encodings[0].adaptive_ptime = true;
EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); EXPECT_TRUE(
channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok());
EXPECT_TRUE(GetAudioNetworkAdaptorConfig(kSsrcX)); EXPECT_TRUE(GetAudioNetworkAdaptorConfig(kSsrcX));
EXPECT_EQ(16000, GetSendStreamConfig(kSsrcX).min_bitrate_bps); EXPECT_EQ(16000, GetSendStreamConfig(kSsrcX).min_bitrate_bps);
parameters.encodings[0].adaptive_ptime = false; parameters.encodings[0].adaptive_ptime = false;
EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); EXPECT_TRUE(
channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok());
EXPECT_FALSE(GetAudioNetworkAdaptorConfig(kSsrcX)); EXPECT_FALSE(GetAudioNetworkAdaptorConfig(kSsrcX));
EXPECT_EQ(32000, GetSendStreamConfig(kSsrcX).min_bitrate_bps); EXPECT_EQ(32000, GetSendStreamConfig(kSsrcX).min_bitrate_bps);
} }
@ -1207,9 +1224,11 @@ TEST_P(WebRtcVoiceEngineTestFake,
EXPECT_EQ(send_parameters_.options.audio_network_adaptor_config, EXPECT_EQ(send_parameters_.options.audio_network_adaptor_config,
GetAudioNetworkAdaptorConfig(kSsrcX)); GetAudioNetworkAdaptorConfig(kSsrcX));
webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX); webrtc::RtpParameters parameters =
channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX);
parameters.encodings[0].adaptive_ptime = false; parameters.encodings[0].adaptive_ptime = false;
EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); EXPECT_TRUE(
channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, parameters).ok());
EXPECT_EQ(send_parameters_.options.audio_network_adaptor_config, EXPECT_EQ(send_parameters_.options.audio_network_adaptor_config,
GetAudioNetworkAdaptorConfig(kSsrcX)); GetAudioNetworkAdaptorConfig(kSsrcX));
} }
@ -1227,8 +1246,8 @@ TEST_P(WebRtcVoiceEngineTestFake, RtpParametersArePerStream) {
SetupForMultiSendStream(); SetupForMultiSendStream();
// Create send streams. // Create send streams.
for (uint32_t ssrc : kSsrcs4) { for (uint32_t ssrc : kSsrcs4) {
EXPECT_TRUE( EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(
channel_->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc))); cricket::StreamParams::CreateLegacy(ssrc)));
} }
// Configure one stream to be limited by the stream config, another to be // Configure one stream to be limited by the stream config, another to be
// limited by the global max, and the third one with no per-stream limit // limited by the global max, and the third one with no per-stream limit
@ -1258,7 +1277,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpSendParametersCodecs) {
parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kPcmuCodec);
SetSendParameters(parameters); SetSendParameters(parameters);
webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); webrtc::RtpParameters rtp_parameters =
channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX);
ASSERT_EQ(2u, rtp_parameters.codecs.size()); ASSERT_EQ(2u, rtp_parameters.codecs.size());
EXPECT_EQ(kOpusCodec.ToCodecParameters(), rtp_parameters.codecs[0]); EXPECT_EQ(kOpusCodec.ToCodecParameters(), rtp_parameters.codecs[0]);
EXPECT_EQ(kPcmuCodec.ToCodecParameters(), rtp_parameters.codecs[1]); EXPECT_EQ(kPcmuCodec.ToCodecParameters(), rtp_parameters.codecs[1]);
@ -1270,7 +1290,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpSendParametersRtcpCname) {
params.cname = "rtcpcname"; params.cname = "rtcpcname";
EXPECT_TRUE(SetupSendStream(params)); EXPECT_TRUE(SetupSendStream(params));
webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); webrtc::RtpParameters rtp_parameters =
channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX);
EXPECT_STREQ("rtcpcname", rtp_parameters.rtcp.cname.c_str()); EXPECT_STREQ("rtcpcname", rtp_parameters.rtcp.cname.c_str());
} }
@ -1278,20 +1299,22 @@ TEST_P(WebRtcVoiceEngineTestFake,
DetectRtpSendParameterHeaderExtensionsChange) { DetectRtpSendParameterHeaderExtensionsChange) {
EXPECT_TRUE(SetupSendStream()); EXPECT_TRUE(SetupSendStream());
webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); webrtc::RtpParameters rtp_parameters =
channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX);
rtp_parameters.header_extensions.emplace_back(); rtp_parameters.header_extensions.emplace_back();
EXPECT_NE(0u, rtp_parameters.header_extensions.size()); EXPECT_NE(0u, rtp_parameters.header_extensions.size());
webrtc::RTCError result = webrtc::RTCError result =
channel_->SetRtpSendParameters(kSsrcX, rtp_parameters); channel_->AsSendChannel()->SetRtpSendParameters(kSsrcX, rtp_parameters);
EXPECT_EQ(webrtc::RTCErrorType::INVALID_MODIFICATION, result.type()); EXPECT_EQ(webrtc::RTCErrorType::INVALID_MODIFICATION, result.type());
} }
// Test that GetRtpSendParameters returns an SSRC. // Test that GetRtpSendParameters returns an SSRC.
TEST_P(WebRtcVoiceEngineTestFake, GetRtpSendParametersSsrc) { TEST_P(WebRtcVoiceEngineTestFake, GetRtpSendParametersSsrc) {
EXPECT_TRUE(SetupSendStream()); EXPECT_TRUE(SetupSendStream());
webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); webrtc::RtpParameters rtp_parameters =
channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX);
ASSERT_EQ(1u, rtp_parameters.encodings.size()); ASSERT_EQ(1u, rtp_parameters.encodings.size());
EXPECT_EQ(kSsrcX, rtp_parameters.encodings[0].ssrc); EXPECT_EQ(kSsrcX, rtp_parameters.encodings[0].ssrc);
} }
@ -1304,14 +1327,19 @@ TEST_P(WebRtcVoiceEngineTestFake, SetAndGetRtpSendParameters) {
parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kPcmuCodec);
SetSendParameters(parameters); SetSendParameters(parameters);
webrtc::RtpParameters initial_params = channel_->GetRtpSendParameters(kSsrcX); webrtc::RtpParameters initial_params =
channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX);
// We should be able to set the params we just got. // We should be able to set the params we just got.
EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, initial_params).ok()); EXPECT_TRUE(channel_->AsSendChannel()
->SetRtpSendParameters(kSsrcX, initial_params)
.ok());
// ... And this shouldn't change the params returned by GetRtpSendParameters. // ... And this shouldn't change the params returned by GetRtpSendParameters.
webrtc::RtpParameters new_params = channel_->GetRtpSendParameters(kSsrcX); webrtc::RtpParameters new_params =
EXPECT_EQ(initial_params, channel_->GetRtpSendParameters(kSsrcX)); channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX);
EXPECT_EQ(initial_params,
channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX));
} }
// Test that max_bitrate_bps in send stream config gets updated correctly when // Test that max_bitrate_bps in send stream config gets updated correctly when
@ -1322,13 +1350,16 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRtpSendParameterUpdatesMaxBitrate) {
send_parameters.codecs.push_back(kOpusCodec); send_parameters.codecs.push_back(kOpusCodec);
SetSendParameters(send_parameters); SetSendParameters(send_parameters);
webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); webrtc::RtpParameters rtp_parameters =
channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX);
// Expect empty on parameters.encodings[0].max_bitrate_bps; // Expect empty on parameters.encodings[0].max_bitrate_bps;
EXPECT_FALSE(rtp_parameters.encodings[0].max_bitrate_bps); EXPECT_FALSE(rtp_parameters.encodings[0].max_bitrate_bps);
constexpr int kMaxBitrateBps = 6000; constexpr int kMaxBitrateBps = 6000;
rtp_parameters.encodings[0].max_bitrate_bps = kMaxBitrateBps; rtp_parameters.encodings[0].max_bitrate_bps = kMaxBitrateBps;
EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, rtp_parameters).ok()); EXPECT_TRUE(channel_->AsSendChannel()
->SetRtpSendParameters(kSsrcX, rtp_parameters)
.ok());
const int max_bitrate = GetSendStreamConfig(kSsrcX).max_bitrate_bps; const int max_bitrate = GetSendStreamConfig(kSsrcX).max_bitrate_bps;
EXPECT_EQ(max_bitrate, kMaxBitrateBps); EXPECT_EQ(max_bitrate, kMaxBitrateBps);
@ -1338,35 +1369,44 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRtpSendParameterUpdatesMaxBitrate) {
// a value <= 0, setting the parameters returns false. // a value <= 0, setting the parameters returns false.
TEST_P(WebRtcVoiceEngineTestFake, SetRtpSendParameterInvalidBitratePriority) { TEST_P(WebRtcVoiceEngineTestFake, SetRtpSendParameterInvalidBitratePriority) {
EXPECT_TRUE(SetupSendStream()); EXPECT_TRUE(SetupSendStream());
webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); webrtc::RtpParameters rtp_parameters =
channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX);
EXPECT_EQ(1UL, rtp_parameters.encodings.size()); EXPECT_EQ(1UL, rtp_parameters.encodings.size());
EXPECT_EQ(webrtc::kDefaultBitratePriority, EXPECT_EQ(webrtc::kDefaultBitratePriority,
rtp_parameters.encodings[0].bitrate_priority); rtp_parameters.encodings[0].bitrate_priority);
rtp_parameters.encodings[0].bitrate_priority = 0; rtp_parameters.encodings[0].bitrate_priority = 0;
EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrcX, rtp_parameters).ok()); EXPECT_FALSE(channel_->AsSendChannel()
->SetRtpSendParameters(kSsrcX, rtp_parameters)
.ok());
rtp_parameters.encodings[0].bitrate_priority = -1.0; rtp_parameters.encodings[0].bitrate_priority = -1.0;
EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrcX, rtp_parameters).ok()); EXPECT_FALSE(channel_->AsSendChannel()
->SetRtpSendParameters(kSsrcX, rtp_parameters)
.ok());
} }
// Test that the bitrate_priority in the send stream config gets updated when // Test that the bitrate_priority in the send stream config gets updated when
// SetRtpSendParameters is set for the VoiceMediaChannel. // SetRtpSendParameters is set for the VoiceMediaChannel.
TEST_P(WebRtcVoiceEngineTestFake, SetRtpSendParameterUpdatesBitratePriority) { TEST_P(WebRtcVoiceEngineTestFake, SetRtpSendParameterUpdatesBitratePriority) {
EXPECT_TRUE(SetupSendStream()); EXPECT_TRUE(SetupSendStream());
webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrcX); webrtc::RtpParameters rtp_parameters =
channel_->AsSendChannel()->GetRtpSendParameters(kSsrcX);
EXPECT_EQ(1UL, rtp_parameters.encodings.size()); EXPECT_EQ(1UL, rtp_parameters.encodings.size());
EXPECT_EQ(webrtc::kDefaultBitratePriority, EXPECT_EQ(webrtc::kDefaultBitratePriority,
rtp_parameters.encodings[0].bitrate_priority); rtp_parameters.encodings[0].bitrate_priority);
double new_bitrate_priority = 2.0; double new_bitrate_priority = 2.0;
rtp_parameters.encodings[0].bitrate_priority = new_bitrate_priority; rtp_parameters.encodings[0].bitrate_priority = new_bitrate_priority;
EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, rtp_parameters).ok()); EXPECT_TRUE(channel_->AsSendChannel()
->SetRtpSendParameters(kSsrcX, rtp_parameters)
.ok());
// The priority should get set for both the audio channel's rtp parameters // The priority should get set for both the audio channel's rtp parameters
// and the audio send stream's audio config. // and the audio send stream's audio config.
EXPECT_EQ( EXPECT_EQ(new_bitrate_priority, channel_->AsSendChannel()
new_bitrate_priority, ->GetRtpSendParameters(kSsrcX)
channel_->GetRtpSendParameters(kSsrcX).encodings[0].bitrate_priority); .encodings[0]
.bitrate_priority);
EXPECT_EQ(new_bitrate_priority, GetSendStreamConfig(kSsrcX).bitrate_priority); EXPECT_EQ(new_bitrate_priority, GetSendStreamConfig(kSsrcX).bitrate_priority);
} }
@ -1792,8 +1832,8 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecEnableNackAsCallee) {
// NACK should be enabled even with no send stream. // NACK should be enabled even with no send stream.
EXPECT_EQ(kRtpHistoryMs, GetRecvStreamConfig(kSsrcX).rtp.nack.rtp_history_ms); EXPECT_EQ(kRtpHistoryMs, GetRecvStreamConfig(kSsrcX).rtp.nack.rtp_history_ms);
EXPECT_TRUE( EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(
channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); cricket::StreamParams::CreateLegacy(kSsrcX)));
} }
// Test that we can enable NACK on receive streams. // Test that we can enable NACK on receive streams.
@ -2038,8 +2078,8 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCallee) {
parameters.codecs[2].id = 97; // narrowband CN parameters.codecs[2].id = 97; // narrowband CN
parameters.codecs[3].id = 98; // DTMF parameters.codecs[3].id = 98; // DTMF
SetSendParameters(parameters); SetSendParameters(parameters);
EXPECT_TRUE( EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(
channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); cricket::StreamParams::CreateLegacy(kSsrcX)));
const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
EXPECT_EQ(96, send_codec_spec.payload_type); EXPECT_EQ(96, send_codec_spec.payload_type);
@ -2212,8 +2252,8 @@ TEST_P(WebRtcVoiceEngineTestFake, CreateAndDeleteMultipleSendStreams) {
SetSend(true); SetSend(true);
for (uint32_t ssrc : kSsrcs4) { for (uint32_t ssrc : kSsrcs4) {
EXPECT_TRUE( EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(
channel_->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc))); cricket::StreamParams::CreateLegacy(ssrc)));
SetAudioSend(ssrc, true, &fake_source_); SetAudioSend(ssrc, true, &fake_source_);
// Verify that we are in a sending state for all the created streams. // Verify that we are in a sending state for all the created streams.
EXPECT_TRUE(GetSendStream(ssrc).IsSending()); EXPECT_TRUE(GetSendStream(ssrc).IsSending());
@ -2222,9 +2262,9 @@ TEST_P(WebRtcVoiceEngineTestFake, CreateAndDeleteMultipleSendStreams) {
// Delete the send streams. // Delete the send streams.
for (uint32_t ssrc : kSsrcs4) { for (uint32_t ssrc : kSsrcs4) {
EXPECT_TRUE(channel_->RemoveSendStream(ssrc)); EXPECT_TRUE(channel_->AsSendChannel()->RemoveSendStream(ssrc));
EXPECT_FALSE(call_.GetAudioSendStream(ssrc)); EXPECT_FALSE(call_.GetAudioSendStream(ssrc));
EXPECT_FALSE(channel_->RemoveSendStream(ssrc)); EXPECT_FALSE(channel_->AsSendChannel()->RemoveSendStream(ssrc));
} }
EXPECT_EQ(0u, call_.GetAudioSendStreams().size()); EXPECT_EQ(0u, call_.GetAudioSendStreams().size());
} }
@ -2235,8 +2275,8 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsWithMultipleSendStreams) {
// Create send streams. // Create send streams.
for (uint32_t ssrc : kSsrcs4) { for (uint32_t ssrc : kSsrcs4) {
EXPECT_TRUE( EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(
channel_->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc))); cricket::StreamParams::CreateLegacy(ssrc)));
} }
cricket::AudioSendParameters parameters; cricket::AudioSendParameters parameters;
@ -2275,8 +2315,8 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendWithMultipleSendStreams) {
// Create the send channels and they should be a "not sending" date. // Create the send channels and they should be a "not sending" date.
for (uint32_t ssrc : kSsrcs4) { for (uint32_t ssrc : kSsrcs4) {
EXPECT_TRUE( EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(
channel_->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc))); cricket::StreamParams::CreateLegacy(ssrc)));
SetAudioSend(ssrc, true, &fake_source_); SetAudioSend(ssrc, true, &fake_source_);
EXPECT_FALSE(GetSendStream(ssrc).IsSending()); EXPECT_FALSE(GetSendStream(ssrc).IsSending());
} }
@ -2302,8 +2342,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStatsWithMultipleSendStreams) {
// Create send streams. // Create send streams.
for (uint32_t ssrc : kSsrcs4) { for (uint32_t ssrc : kSsrcs4) {
EXPECT_TRUE( EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(
channel_->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc))); cricket::StreamParams::CreateLegacy(ssrc)));
} }
// Create a receive stream to check that none of the send streams end up in // Create a receive stream to check that none of the send streams end up in
@ -2337,7 +2377,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStatsWithMultipleSendStreams) {
// Remove the kSsrcY stream. No receiver stats. // Remove the kSsrcY stream. No receiver stats.
{ {
cricket::VoiceMediaInfo info; cricket::VoiceMediaInfo info;
EXPECT_TRUE(channel_->RemoveRecvStream(kSsrcY)); EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrcY));
EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0)); EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0));
EXPECT_EQ(true, EXPECT_EQ(true,
channel_->GetStats(&info, /*get_and_clear_legacy_stats=*/true)); channel_->GetStats(&info, /*get_and_clear_legacy_stats=*/true));
@ -2398,8 +2438,8 @@ TEST_P(WebRtcVoiceEngineTestFake, PlayoutWithMultipleStreams) {
EXPECT_TRUE(GetRecvStream(kSsrcZ).started()); EXPECT_TRUE(GetRecvStream(kSsrcZ).started());
// Now remove the recv streams. // Now remove the recv streams.
EXPECT_TRUE(channel_->RemoveRecvStream(kSsrcZ)); EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrcZ));
EXPECT_TRUE(channel_->RemoveRecvStream(kSsrcY)); EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrcY));
} }
TEST_P(WebRtcVoiceEngineTestFake, SetAudioNetworkAdaptorViaOptions) { TEST_P(WebRtcVoiceEngineTestFake, SetAudioNetworkAdaptorViaOptions) {
@ -2489,7 +2529,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStats) {
// Remove the kSsrcY stream. No receiver stats. // Remove the kSsrcY stream. No receiver stats.
{ {
cricket::VoiceMediaInfo info; cricket::VoiceMediaInfo info;
EXPECT_TRUE(channel_->RemoveRecvStream(kSsrcY)); EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrcY));
EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0)); EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0));
EXPECT_EQ(true, EXPECT_EQ(true,
channel_->GetStats(&info, /*get_and_clear_legacy_stats=*/true)); channel_->GetStats(&info, /*get_and_clear_legacy_stats=*/true));
@ -2527,8 +2567,8 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendSsrcWithMultipleStreams) {
TEST_P(WebRtcVoiceEngineTestFake, SetSendSsrcAfterCreatingReceiveChannel) { TEST_P(WebRtcVoiceEngineTestFake, SetSendSsrcAfterCreatingReceiveChannel) {
EXPECT_TRUE(SetupChannel()); EXPECT_TRUE(SetupChannel());
EXPECT_TRUE(AddRecvStream(kSsrcY)); EXPECT_TRUE(AddRecvStream(kSsrcY));
EXPECT_TRUE( EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(
channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcX))); cricket::StreamParams::CreateLegacy(kSsrcX)));
EXPECT_TRUE(call_.GetAudioSendStream(kSsrcX)); EXPECT_TRUE(call_.GetAudioSendStream(kSsrcX));
EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcY).rtp.local_ssrc); EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcY).rtp.local_ssrc);
} }
@ -2590,9 +2630,9 @@ TEST_P(WebRtcVoiceEngineTestFake, RecvWithMultipleStreams) {
EXPECT_EQ(s3.received_packets(), 1); EXPECT_EQ(s3.received_packets(), 1);
EXPECT_TRUE(s3.VerifyLastPacket(packets[3], sizeof(packets[3]))); EXPECT_TRUE(s3.VerifyLastPacket(packets[3], sizeof(packets[3])));
EXPECT_TRUE(channel_->RemoveRecvStream(ssrc3)); EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(ssrc3));
EXPECT_TRUE(channel_->RemoveRecvStream(ssrc2)); EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(ssrc2));
EXPECT_TRUE(channel_->RemoveRecvStream(ssrc1)); EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(ssrc1));
} }
// Test that receiving on an unsignaled stream works (a stream is created). // Test that receiving on an unsignaled stream works (a stream is created).
@ -2615,7 +2655,7 @@ TEST_P(WebRtcVoiceEngineTestFake, RecvUnsignaledSsrcWithSignaledStreamId) {
EXPECT_TRUE(SetupChannel()); EXPECT_TRUE(SetupChannel());
cricket::StreamParams unsignaled_stream; cricket::StreamParams unsignaled_stream;
unsignaled_stream.set_stream_ids({kSyncLabel}); unsignaled_stream.set_stream_ids({kSyncLabel});
ASSERT_TRUE(channel_->AddRecvStream(unsignaled_stream)); ASSERT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(unsignaled_stream));
// The stream shouldn't have been created at this point because it doesn't // The stream shouldn't have been created at this point because it doesn't
// have any SSRCs. // have any SSRCs.
EXPECT_EQ(0u, call_.GetAudioReceiveStreams().size()); EXPECT_EQ(0u, call_.GetAudioReceiveStreams().size());
@ -2629,8 +2669,8 @@ TEST_P(WebRtcVoiceEngineTestFake, RecvUnsignaledSsrcWithSignaledStreamId) {
// Remset the unsignaled stream to clear the cached parameters. If a new // Remset the unsignaled stream to clear the cached parameters. If a new
// default unsignaled receive stream is created it will not have a sync group. // default unsignaled receive stream is created it will not have a sync group.
channel_->ResetUnsignaledRecvStream(); channel_->AsReceiveChannel()->ResetUnsignaledRecvStream();
channel_->RemoveRecvStream(kSsrc1); channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc1);
DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame)); DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame));
@ -2659,7 +2699,7 @@ TEST_P(WebRtcVoiceEngineTestFake,
ASSERT_EQ(receivers1.size(), 2u); ASSERT_EQ(receivers1.size(), 2u);
// Should remove all default streams. // Should remove all default streams.
channel_->ResetUnsignaledRecvStream(); channel_->AsReceiveChannel()->ResetUnsignaledRecvStream();
const auto& receivers2 = call_.GetAudioReceiveStreams(); const auto& receivers2 = call_.GetAudioReceiveStreams();
EXPECT_EQ(0u, receivers2.size()); EXPECT_EQ(0u, receivers2.size());
} }
@ -2788,7 +2828,7 @@ TEST_P(WebRtcVoiceEngineTestFake, AddRecvStreamAfterUnsignaled_Updates) {
stream_params.ssrcs.push_back(1); stream_params.ssrcs.push_back(1);
stream_params.set_stream_ids({new_stream_id}); stream_params.set_stream_ids({new_stream_id});
EXPECT_TRUE(channel_->AddRecvStream(stream_params)); EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(stream_params));
EXPECT_EQ(1u, streams.size()); EXPECT_EQ(1u, streams.size());
// The audio receive stream should not have been recreated. // The audio receive stream should not have been recreated.
EXPECT_EQ(audio_receive_stream_id, streams.front()->id()); EXPECT_EQ(audio_receive_stream_id, streams.front()->id());
@ -3186,7 +3226,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOutputVolume) {
EXPECT_FALSE(channel_->SetOutputVolume(kSsrcY, 0.5)); EXPECT_FALSE(channel_->SetOutputVolume(kSsrcY, 0.5));
cricket::StreamParams stream; cricket::StreamParams stream;
stream.ssrcs.push_back(kSsrcY); stream.ssrcs.push_back(kSsrcY);
EXPECT_TRUE(channel_->AddRecvStream(stream)); EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(stream));
EXPECT_DOUBLE_EQ(1, GetRecvStream(kSsrcY).gain()); EXPECT_DOUBLE_EQ(1, GetRecvStream(kSsrcY).gain());
EXPECT_TRUE(channel_->SetOutputVolume(kSsrcY, 3)); EXPECT_TRUE(channel_->SetOutputVolume(kSsrcY, 3));
EXPECT_DOUBLE_EQ(3, GetRecvStream(kSsrcY).gain()); EXPECT_DOUBLE_EQ(3, GetRecvStream(kSsrcY).gain());
@ -3228,14 +3268,18 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOutputVolumeUnsignaledRecvStream) {
TEST_P(WebRtcVoiceEngineTestFake, BaseMinimumPlayoutDelayMs) { TEST_P(WebRtcVoiceEngineTestFake, BaseMinimumPlayoutDelayMs) {
EXPECT_TRUE(SetupChannel()); EXPECT_TRUE(SetupChannel());
EXPECT_FALSE(channel_->SetBaseMinimumPlayoutDelayMs(kSsrcY, 200)); EXPECT_FALSE(
EXPECT_FALSE(channel_->GetBaseMinimumPlayoutDelayMs(kSsrcY).has_value()); channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs(kSsrcY, 200));
EXPECT_FALSE(channel_->AsReceiveChannel()
->GetBaseMinimumPlayoutDelayMs(kSsrcY)
.has_value());
cricket::StreamParams stream; cricket::StreamParams stream;
stream.ssrcs.push_back(kSsrcY); stream.ssrcs.push_back(kSsrcY);
EXPECT_TRUE(channel_->AddRecvStream(stream)); EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(stream));
EXPECT_EQ(0, GetRecvStream(kSsrcY).base_mininum_playout_delay_ms()); EXPECT_EQ(0, GetRecvStream(kSsrcY).base_mininum_playout_delay_ms());
EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(kSsrcY, 300)); EXPECT_TRUE(
channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs(kSsrcY, 300));
EXPECT_EQ(300, GetRecvStream(kSsrcY).base_mininum_playout_delay_ms()); EXPECT_EQ(300, GetRecvStream(kSsrcY).base_mininum_playout_delay_ms());
} }
@ -3246,43 +3290,70 @@ TEST_P(WebRtcVoiceEngineTestFake,
// Spawn an unsignaled stream by sending a packet - delay should be 0. // Spawn an unsignaled stream by sending a packet - delay should be 0.
DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame)); DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame));
EXPECT_EQ(0, channel_->GetBaseMinimumPlayoutDelayMs(kSsrc1).value_or(-1)); EXPECT_EQ(0, channel_->AsReceiveChannel()
->GetBaseMinimumPlayoutDelayMs(kSsrc1)
.value_or(-1));
// Check that it doesn't provide default values for unknown ssrc. // Check that it doesn't provide default values for unknown ssrc.
EXPECT_FALSE(channel_->GetBaseMinimumPlayoutDelayMs(kSsrcY).has_value()); EXPECT_FALSE(channel_->AsReceiveChannel()
->GetBaseMinimumPlayoutDelayMs(kSsrcY)
.has_value());
// Check that default value for unsignaled streams is 0. // Check that default value for unsignaled streams is 0.
EXPECT_EQ(0, channel_->GetBaseMinimumPlayoutDelayMs(kSsrc0).value_or(-1)); EXPECT_EQ(0, channel_->AsReceiveChannel()
->GetBaseMinimumPlayoutDelayMs(kSsrc0)
.value_or(-1));
// Should remember the delay 100 which will be set on new unsignaled streams, // Should remember the delay 100 which will be set on new unsignaled streams,
// and also set the delay to 100 on existing unsignaled streams. // and also set the delay to 100 on existing unsignaled streams.
EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(kSsrc0, 100)); EXPECT_TRUE(
EXPECT_EQ(100, channel_->GetBaseMinimumPlayoutDelayMs(kSsrc0).value_or(-1)); channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs(kSsrc0, 100));
EXPECT_EQ(100, channel_->AsReceiveChannel()
->GetBaseMinimumPlayoutDelayMs(kSsrc0)
.value_or(-1));
// Check that it doesn't provide default values for unknown ssrc. // Check that it doesn't provide default values for unknown ssrc.
EXPECT_FALSE(channel_->GetBaseMinimumPlayoutDelayMs(kSsrcY).has_value()); EXPECT_FALSE(channel_->AsReceiveChannel()
->GetBaseMinimumPlayoutDelayMs(kSsrcY)
.has_value());
// Spawn an unsignaled stream by sending a packet - delay should be 100. // Spawn an unsignaled stream by sending a packet - delay should be 100.
unsigned char pcmuFrame2[sizeof(kPcmuFrame)]; unsigned char pcmuFrame2[sizeof(kPcmuFrame)];
memcpy(pcmuFrame2, kPcmuFrame, sizeof(kPcmuFrame)); memcpy(pcmuFrame2, kPcmuFrame, sizeof(kPcmuFrame));
rtc::SetBE32(&pcmuFrame2[8], kSsrcX); rtc::SetBE32(&pcmuFrame2[8], kSsrcX);
DeliverPacket(pcmuFrame2, sizeof(pcmuFrame2)); DeliverPacket(pcmuFrame2, sizeof(pcmuFrame2));
EXPECT_EQ(100, channel_->GetBaseMinimumPlayoutDelayMs(kSsrcX).value_or(-1)); EXPECT_EQ(100, channel_->AsReceiveChannel()
->GetBaseMinimumPlayoutDelayMs(kSsrcX)
.value_or(-1));
// Setting delay with SSRC=0 should affect all unsignaled streams. // Setting delay with SSRC=0 should affect all unsignaled streams.
EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(kSsrc0, 300)); EXPECT_TRUE(
channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs(kSsrc0, 300));
if (kMaxUnsignaledRecvStreams > 1) { if (kMaxUnsignaledRecvStreams > 1) {
EXPECT_EQ(300, channel_->GetBaseMinimumPlayoutDelayMs(kSsrc1).value_or(-1)); EXPECT_EQ(300, channel_->AsReceiveChannel()
->GetBaseMinimumPlayoutDelayMs(kSsrc1)
.value_or(-1));
} }
EXPECT_EQ(300, channel_->GetBaseMinimumPlayoutDelayMs(kSsrcX).value_or(-1)); EXPECT_EQ(300, channel_->AsReceiveChannel()
->GetBaseMinimumPlayoutDelayMs(kSsrcX)
.value_or(-1));
// Setting delay on an individual stream affects only that. // Setting delay on an individual stream affects only that.
EXPECT_TRUE(channel_->SetBaseMinimumPlayoutDelayMs(kSsrcX, 400)); EXPECT_TRUE(
channel_->AsReceiveChannel()->SetBaseMinimumPlayoutDelayMs(kSsrcX, 400));
if (kMaxUnsignaledRecvStreams > 1) { if (kMaxUnsignaledRecvStreams > 1) {
EXPECT_EQ(300, channel_->GetBaseMinimumPlayoutDelayMs(kSsrc1).value_or(-1)); EXPECT_EQ(300, channel_->AsReceiveChannel()
->GetBaseMinimumPlayoutDelayMs(kSsrc1)
.value_or(-1));
} }
EXPECT_EQ(400, channel_->GetBaseMinimumPlayoutDelayMs(kSsrcX).value_or(-1)); EXPECT_EQ(400, channel_->AsReceiveChannel()
EXPECT_EQ(300, channel_->GetBaseMinimumPlayoutDelayMs(kSsrc0).value_or(-1)); ->GetBaseMinimumPlayoutDelayMs(kSsrcX)
.value_or(-1));
EXPECT_EQ(300, channel_->AsReceiveChannel()
->GetBaseMinimumPlayoutDelayMs(kSsrc0)
.value_or(-1));
// Check that it doesn't provide default values for unknown ssrc. // Check that it doesn't provide default values for unknown ssrc.
EXPECT_FALSE(channel_->GetBaseMinimumPlayoutDelayMs(kSsrcY).has_value()); EXPECT_FALSE(channel_->AsReceiveChannel()
->GetBaseMinimumPlayoutDelayMs(kSsrcY)
.has_value());
} }
TEST_P(WebRtcVoiceEngineTestFake, SetsSyncGroupFromStreamId) { TEST_P(WebRtcVoiceEngineTestFake, SetsSyncGroupFromStreamId) {
@ -3294,9 +3365,9 @@ TEST_P(WebRtcVoiceEngineTestFake, SetsSyncGroupFromStreamId) {
sp.set_stream_ids({kStreamId}); sp.set_stream_ids({kStreamId});
// Creating two channels to make sure that sync label is set properly for both // Creating two channels to make sure that sync label is set properly for both
// the default voice channel and following ones. // the default voice channel and following ones.
EXPECT_TRUE(channel_->AddRecvStream(sp)); EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(sp));
sp.ssrcs[0] += 1; sp.ssrcs[0] += 1;
EXPECT_TRUE(channel_->AddRecvStream(sp)); EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(sp));
ASSERT_EQ(2u, call_.GetAudioReceiveStreams().size()); ASSERT_EQ(2u, call_.GetAudioReceiveStreams().size());
EXPECT_EQ(kStreamId, EXPECT_EQ(kStreamId,
@ -3319,8 +3390,8 @@ TEST_P(WebRtcVoiceEngineTestFake, ConfiguresAudioReceiveStreamRtpExtensions) {
EXPECT_TRUE(SetupSendStream()); EXPECT_TRUE(SetupSendStream());
SetSendParameters(send_parameters_); SetSendParameters(send_parameters_);
for (uint32_t ssrc : ssrcs) { for (uint32_t ssrc : ssrcs) {
EXPECT_TRUE( EXPECT_TRUE(channel_->AsReceiveChannel()->AddRecvStream(
channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(ssrc))); cricket::StreamParams::CreateLegacy(ssrc)));
} }
EXPECT_EQ(2u, call_.GetAudioReceiveStreams().size()); EXPECT_EQ(2u, call_.GetAudioReceiveStreams().size());
@ -3381,7 +3452,8 @@ TEST_P(WebRtcVoiceEngineTestFake, DeliverAudioPacket_Call) {
const cricket::FakeAudioReceiveStream* s = const cricket::FakeAudioReceiveStream* s =
call_.GetAudioReceiveStream(kAudioSsrc); call_.GetAudioReceiveStream(kAudioSsrc);
EXPECT_EQ(0, s->received_packets()); EXPECT_EQ(0, s->received_packets());
channel_->OnPacketReceived(kPcmuPacket, /* packet_time_us */ -1); channel_->AsReceiveChannel()->OnPacketReceived(kPcmuPacket,
/* packet_time_us */ -1);
rtc::Thread::Current()->ProcessMessages(0); rtc::Thread::Current()->ProcessMessages(0);
EXPECT_EQ(1, s->received_packets()); EXPECT_EQ(1, s->received_packets());
@ -3393,8 +3465,8 @@ TEST_P(WebRtcVoiceEngineTestFake, AssociateFirstSendChannel_SendCreatedFirst) {
EXPECT_TRUE(SetupSendStream()); EXPECT_TRUE(SetupSendStream());
EXPECT_TRUE(AddRecvStream(kSsrcY)); EXPECT_TRUE(AddRecvStream(kSsrcY));
EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcY).rtp.local_ssrc); EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcY).rtp.local_ssrc);
EXPECT_TRUE( EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(
channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcZ))); cricket::StreamParams::CreateLegacy(kSsrcZ)));
EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcY).rtp.local_ssrc); EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcY).rtp.local_ssrc);
EXPECT_TRUE(AddRecvStream(kSsrcW)); EXPECT_TRUE(AddRecvStream(kSsrcW));
EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcW).rtp.local_ssrc); EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcW).rtp.local_ssrc);
@ -3403,13 +3475,13 @@ TEST_P(WebRtcVoiceEngineTestFake, AssociateFirstSendChannel_SendCreatedFirst) {
TEST_P(WebRtcVoiceEngineTestFake, AssociateFirstSendChannel_RecvCreatedFirst) { TEST_P(WebRtcVoiceEngineTestFake, AssociateFirstSendChannel_RecvCreatedFirst) {
EXPECT_TRUE(SetupRecvStream()); EXPECT_TRUE(SetupRecvStream());
EXPECT_EQ(0xFA17FA17u, GetRecvStreamConfig(kSsrcX).rtp.local_ssrc); EXPECT_EQ(0xFA17FA17u, GetRecvStreamConfig(kSsrcX).rtp.local_ssrc);
EXPECT_TRUE( EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(
channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcY))); cricket::StreamParams::CreateLegacy(kSsrcY)));
EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcX).rtp.local_ssrc); EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcX).rtp.local_ssrc);
EXPECT_TRUE(AddRecvStream(kSsrcZ)); EXPECT_TRUE(AddRecvStream(kSsrcZ));
EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcZ).rtp.local_ssrc); EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcZ).rtp.local_ssrc);
EXPECT_TRUE( EXPECT_TRUE(channel_->AsSendChannel()->AddSendStream(
channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcW))); cricket::StreamParams::CreateLegacy(kSsrcW)));
EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcX).rtp.local_ssrc); EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcX).rtp.local_ssrc);
EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcZ).rtp.local_ssrc); EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcZ).rtp.local_ssrc);
} }
@ -3457,7 +3529,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRawAudioSinkUnsignaledRecvStream) {
EXPECT_NE(nullptr, GetRecvStream(kSsrc1).sink()); EXPECT_NE(nullptr, GetRecvStream(kSsrc1).sink());
// If we remove and add a default stream, it should get the same sink. // If we remove and add a default stream, it should get the same sink.
EXPECT_TRUE(channel_->RemoveRecvStream(kSsrc1)); EXPECT_TRUE(channel_->AsReceiveChannel()->RemoveRecvStream(kSsrc1));
DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame)); DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame));
EXPECT_NE(nullptr, GetRecvStream(kSsrc1).sink()); EXPECT_NE(nullptr, GetRecvStream(kSsrc1).sink());
@ -3507,13 +3579,13 @@ TEST_P(WebRtcVoiceEngineTestFake, OnReadyToSendSignalsNetworkState) {
EXPECT_EQ(webrtc::kNetworkUp, EXPECT_EQ(webrtc::kNetworkUp,
call_.GetNetworkState(webrtc::MediaType::VIDEO)); call_.GetNetworkState(webrtc::MediaType::VIDEO));
channel_->OnReadyToSend(false); channel_->AsSendChannel()->OnReadyToSend(false);
EXPECT_EQ(webrtc::kNetworkDown, EXPECT_EQ(webrtc::kNetworkDown,
call_.GetNetworkState(webrtc::MediaType::AUDIO)); call_.GetNetworkState(webrtc::MediaType::AUDIO));
EXPECT_EQ(webrtc::kNetworkUp, EXPECT_EQ(webrtc::kNetworkUp,
call_.GetNetworkState(webrtc::MediaType::VIDEO)); call_.GetNetworkState(webrtc::MediaType::VIDEO));
channel_->OnReadyToSend(true); channel_->AsSendChannel()->OnReadyToSend(true);
EXPECT_EQ(webrtc::kNetworkUp, EXPECT_EQ(webrtc::kNetworkUp,
call_.GetNetworkState(webrtc::MediaType::AUDIO)); call_.GetNetworkState(webrtc::MediaType::AUDIO));
EXPECT_EQ(webrtc::kNetworkUp, EXPECT_EQ(webrtc::kNetworkUp,

View File

@ -28,7 +28,7 @@ AudioRtpReceiver::AudioRtpReceiver(
std::string receiver_id, std::string receiver_id,
std::vector<std::string> stream_ids, std::vector<std::string> stream_ids,
bool is_unified_plan, bool is_unified_plan,
cricket::VoiceMediaChannel* voice_channel /*= nullptr*/) cricket::VoiceMediaReceiveChannelInterface* voice_channel /*= nullptr*/)
: AudioRtpReceiver(worker_thread, : AudioRtpReceiver(worker_thread,
receiver_id, receiver_id,
CreateStreamsFromIds(std::move(stream_ids)), CreateStreamsFromIds(std::move(stream_ids)),
@ -40,7 +40,7 @@ AudioRtpReceiver::AudioRtpReceiver(
const std::string& receiver_id, const std::string& receiver_id,
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams, const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams,
bool is_unified_plan, bool is_unified_plan,
cricket::VoiceMediaChannel* voice_channel /*= nullptr*/) cricket::VoiceMediaReceiveChannelInterface* voice_channel /*= nullptr*/)
: worker_thread_(worker_thread), : worker_thread_(worker_thread),
id_(receiver_id), id_(receiver_id),
source_(rtc::make_ref_counted<RemoteAudioSource>( source_(rtc::make_ref_counted<RemoteAudioSource>(
@ -324,7 +324,8 @@ void AudioRtpReceiver::SetMediaChannel(
media_channel ? worker_thread_safety_->SetAlive() media_channel ? worker_thread_safety_->SetAlive()
: worker_thread_safety_->SetNotAlive(); : worker_thread_safety_->SetNotAlive();
media_channel_ = static_cast<cricket::VoiceMediaChannel*>(media_channel); media_channel_ =
static_cast<cricket::VoiceMediaReceiveChannelInterface*>(media_channel);
} }
void AudioRtpReceiver::NotifyFirstPacketReceived() { void AudioRtpReceiver::NotifyFirstPacketReceived() {

View File

@ -50,18 +50,19 @@ class AudioRtpReceiver : public ObserverInterface,
// However, when using that, the assumption is that right after construction, // However, when using that, the assumption is that right after construction,
// a call to either `SetupUnsignaledMediaChannel` or `SetupMediaChannel` // a call to either `SetupUnsignaledMediaChannel` or `SetupMediaChannel`
// will be made, which will internally start the source on the worker thread. // will be made, which will internally start the source on the worker thread.
AudioRtpReceiver(rtc::Thread* worker_thread, AudioRtpReceiver(
rtc::Thread* worker_thread,
std::string receiver_id, std::string receiver_id,
std::vector<std::string> stream_ids, std::vector<std::string> stream_ids,
bool is_unified_plan, bool is_unified_plan,
cricket::VoiceMediaChannel* voice_channel = nullptr); cricket::VoiceMediaReceiveChannelInterface* voice_channel = nullptr);
// TODO(https://crbug.com/webrtc/9480): Remove this when streams() is removed. // TODO(https://crbug.com/webrtc/9480): Remove this when streams() is removed.
AudioRtpReceiver( AudioRtpReceiver(
rtc::Thread* worker_thread, rtc::Thread* worker_thread,
const std::string& receiver_id, const std::string& receiver_id,
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams, const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams,
bool is_unified_plan, bool is_unified_plan,
cricket::VoiceMediaChannel* media_channel = nullptr); cricket::VoiceMediaReceiveChannelInterface* media_channel = nullptr);
virtual ~AudioRtpReceiver(); virtual ~AudioRtpReceiver();
// ObserverInterface implementation // ObserverInterface implementation
@ -135,8 +136,8 @@ class AudioRtpReceiver : public ObserverInterface,
const std::string id_; const std::string id_;
const rtc::scoped_refptr<RemoteAudioSource> source_; const rtc::scoped_refptr<RemoteAudioSource> source_;
const rtc::scoped_refptr<AudioTrackProxyWithInternal<AudioTrack>> track_; const rtc::scoped_refptr<AudioTrackProxyWithInternal<AudioTrack>> track_;
cricket::VoiceMediaChannel* media_channel_ RTC_GUARDED_BY(worker_thread_) = cricket::VoiceMediaReceiveChannelInterface* media_channel_
nullptr; RTC_GUARDED_BY(worker_thread_) = nullptr;
absl::optional<uint32_t> ssrc_ RTC_GUARDED_BY(worker_thread_); absl::optional<uint32_t> ssrc_ RTC_GUARDED_BY(worker_thread_);
std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams_ std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams_
RTC_GUARDED_BY(&signaling_thread_checker_); RTC_GUARDED_BY(&signaling_thread_checker_);

View File

@ -66,7 +66,7 @@ TEST_F(AudioRtpReceiverTest, SetOutputVolumeIsCalled) {
receiver_->track(); receiver_->track();
receiver_->track()->set_enabled(true); receiver_->track()->set_enabled(true);
receiver_->SetMediaChannel(&media_channel_); receiver_->SetMediaChannel(media_channel_.AsVoiceReceiveChannel());
EXPECT_CALL(media_channel_, SetDefaultRawAudioSink(_)).Times(0); EXPECT_CALL(media_channel_, SetDefaultRawAudioSink(_)).Times(0);
receiver_->SetupMediaChannel(kSsrc); receiver_->SetupMediaChannel(kSsrc);
@ -86,7 +86,7 @@ TEST_F(AudioRtpReceiverTest, VolumesSetBeforeStartingAreRespected) {
receiver_->OnSetVolume(kVolume); receiver_->OnSetVolume(kVolume);
receiver_->track()->set_enabled(true); receiver_->track()->set_enabled(true);
receiver_->SetMediaChannel(&media_channel_); receiver_->SetMediaChannel(media_channel_.AsVoiceReceiveChannel());
// The previosly set initial volume should be propagated to the provided // The previosly set initial volume should be propagated to the provided
// media_channel_ as soon as SetupMediaChannel is called. // media_channel_ as soon as SetupMediaChannel is called.

View File

@ -869,7 +869,7 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content,
webrtc::RtpTransceiverDirectionHasRecv(content->direction()), webrtc::RtpTransceiverDirectionHasRecv(content->direction()),
&recv_params); &recv_params);
if (!media_send_channel()->SetRecvParameters(recv_params)) { if (!media_receive_channel()->SetRecvParameters(recv_params)) {
error_desc = StringFormat( error_desc = StringFormat(
"Failed to set local audio description recv parameters for m-section " "Failed to set local audio description recv parameters for m-section "
"with mid='%s'.", "with mid='%s'.",
@ -1008,7 +1008,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
} }
} }
if (!media_send_channel()->SetRecvParameters(recv_params)) { if (!media_receive_channel()->SetRecvParameters(recv_params)) {
error_desc = StringFormat( error_desc = StringFormat(
"Failed to set local video description recv parameters for m-section " "Failed to set local video description recv parameters for m-section "
"with mid='%s'.", "with mid='%s'.",
@ -1103,7 +1103,7 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content,
last_send_params_ = send_params; last_send_params_ = send_params;
if (needs_recv_params_update) { if (needs_recv_params_update) {
if (!media_send_channel()->SetRecvParameters(recv_params)) { if (!media_receive_channel()->SetRecvParameters(recv_params)) {
error_desc = StringFormat( error_desc = StringFormat(
"Failed to set recv parameters for m-section with mid='%s'.", "Failed to set recv parameters for m-section with mid='%s'.",
mid().c_str()); mid().c_str());

View File

@ -32,6 +32,7 @@
#include "call/rtp_demuxer.h" #include "call/rtp_demuxer.h"
#include "call/rtp_packet_sink_interface.h" #include "call/rtp_packet_sink_interface.h"
#include "media/base/media_channel.h" #include "media/base/media_channel.h"
#include "media/base/media_channel_impl.h"
#include "media/base/stream_params.h" #include "media/base/stream_params.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "pc/channel_interface.h" #include "pc/channel_interface.h"
@ -70,7 +71,7 @@ class BaseChannel : public ChannelInterface,
public sigslot::has_slots<>, public sigslot::has_slots<>,
// TODO(tommi): Consider implementing these interfaces // TODO(tommi): Consider implementing these interfaces
// via composition. // via composition.
public MediaChannel::NetworkInterface, public MediaChannelNetworkInterface,
public webrtc::RtpPacketSinkInterface { public webrtc::RtpPacketSinkInterface {
public: public:
// If `srtp_required` is true, the channel will not send or receive any // If `srtp_required` is true, the channel will not send or receive any
@ -155,25 +156,29 @@ class BaseChannel : public ChannelInterface,
// RtpPacketSinkInterface overrides. // RtpPacketSinkInterface overrides.
void OnRtpPacket(const webrtc::RtpPacketReceived& packet) override; void OnRtpPacket(const webrtc::RtpPacketReceived& packet) override;
MediaChannel* media_channel() const override { return media_channel_.get(); }
MediaSendChannelInterface* media_send_channel() const override { MediaSendChannelInterface* media_send_channel() const override {
return media_channel_.get(); return media_channel_->AsSendChannel();
} }
VideoMediaChannel* video_media_send_channel() const override { VideoMediaSendChannelInterface* video_media_send_channel() const override {
RTC_CHECK(false) << "Attempt to fetch video channel from non-video"; RTC_CHECK(false) << "Attempt to fetch video channel from non-video";
return nullptr; return nullptr;
} }
VoiceMediaChannel* voice_media_send_channel() const override { VoiceMediaSendChannelInterface* voice_media_send_channel() const override {
RTC_CHECK(false) << "Attempt to fetch voice channel from non-voice"; RTC_CHECK(false) << "Attempt to fetch voice channel from non-voice";
return nullptr; return nullptr;
} }
MediaReceiveChannelInterface* media_receive_channel() const override { MediaReceiveChannelInterface* media_receive_channel() const override {
return media_channel_.get(); return media_channel_->AsReceiveChannel();
} }
VideoMediaChannel* video_media_receive_channel() const override { VideoMediaReceiveChannelInterface* video_media_receive_channel()
const override {
RTC_CHECK(false) << "Attempt to fetch video channel from non-video"; RTC_CHECK(false) << "Attempt to fetch video channel from non-video";
return nullptr; return nullptr;
} }
VoiceMediaChannel* voice_media_receive_channel() const override { VoiceMediaReceiveChannelInterface* voice_media_receive_channel()
const override {
RTC_CHECK(false) << "Attempt to fetch voice channel from non-voice"; RTC_CHECK(false) << "Attempt to fetch voice channel from non-voice";
return nullptr; return nullptr;
} }
@ -379,22 +384,22 @@ class VoiceChannel : public BaseChannel {
~VoiceChannel(); ~VoiceChannel();
// downcasts a MediaChannel // downcasts a MediaChannel
VoiceMediaChannel* media_send_channel() const override { VoiceMediaSendChannelInterface* media_send_channel() const override {
return static_cast<VoiceMediaChannel*>(BaseChannel::media_send_channel()); return media_channel()->AsVoiceChannel()->AsVoiceSendChannel();
} }
VoiceMediaChannel* voice_media_send_channel() const override { VoiceMediaSendChannelInterface* voice_media_send_channel() const override {
return static_cast<VoiceMediaChannel*>(media_send_channel()); return media_send_channel();
} }
// downcasts a MediaChannel // downcasts a MediaChannel
VoiceMediaChannel* media_receive_channel() const override { VoiceMediaReceiveChannelInterface* media_receive_channel() const override {
return static_cast<VoiceMediaChannel*>( return media_channel()->AsVoiceChannel()->AsVoiceReceiveChannel();
BaseChannel::media_receive_channel());
} }
VoiceMediaChannel* voice_media_receive_channel() const override { VoiceMediaReceiveChannelInterface* voice_media_receive_channel()
return static_cast<VoiceMediaChannel*>(media_receive_channel()); const override {
return media_receive_channel();
} }
cricket::MediaType media_type() const override { cricket::MediaType media_type() const override {
@ -435,22 +440,22 @@ class VideoChannel : public BaseChannel {
~VideoChannel(); ~VideoChannel();
// downcasts a MediaChannel // downcasts a MediaChannel
VideoMediaChannel* media_send_channel() const override { VideoMediaSendChannelInterface* media_send_channel() const override {
return static_cast<VideoMediaChannel*>(BaseChannel::media_send_channel()); return media_channel()->AsVideoChannel()->AsVideoSendChannel();
} }
VideoMediaChannel* video_media_send_channel() const override { VideoMediaSendChannelInterface* video_media_send_channel() const override {
return static_cast<cricket::VideoMediaChannel*>(media_send_channel()); return media_send_channel();
} }
// downcasts a MediaChannel // downcasts a MediaChannel
VideoMediaChannel* media_receive_channel() const override { VideoMediaReceiveChannelInterface* media_receive_channel() const override {
return static_cast<VideoMediaChannel*>( return media_channel()->AsVideoChannel()->AsVideoReceiveChannel();
BaseChannel::media_receive_channel());
} }
VideoMediaChannel* video_media_receive_channel() const override { VideoMediaReceiveChannelInterface* video_media_receive_channel()
return static_cast<cricket::VideoMediaChannel*>(media_receive_channel()); const override {
return media_receive_channel();
} }
cricket::MediaType media_type() const override { cricket::MediaType media_type() const override {

View File

@ -28,6 +28,7 @@ class VideoBitrateAllocatorFactory;
namespace cricket { namespace cricket {
class MediaChannel;
class MediaContentDescription; class MediaContentDescription;
struct MediaConfig; struct MediaConfig;
@ -47,16 +48,20 @@ class ChannelInterface {
virtual ~ChannelInterface() = default; virtual ~ChannelInterface() = default;
virtual cricket::MediaType media_type() const = 0; virtual cricket::MediaType media_type() const = 0;
// Temporary fix while MediaChannel is being reconstructed
virtual MediaChannel* media_channel() const = 0;
virtual MediaSendChannelInterface* media_send_channel() const = 0; virtual MediaSendChannelInterface* media_send_channel() const = 0;
// Typecasts of media_channel(). Will cause an exception if the // Typecasts of media_channel(). Will cause an exception if the
// channel is of the wrong type. // channel is of the wrong type.
virtual VideoMediaChannel* video_media_send_channel() const = 0; virtual VideoMediaSendChannelInterface* video_media_send_channel() const = 0;
virtual VoiceMediaChannel* voice_media_send_channel() const = 0; virtual VoiceMediaSendChannelInterface* voice_media_send_channel() const = 0;
virtual MediaReceiveChannelInterface* media_receive_channel() const = 0; virtual MediaReceiveChannelInterface* media_receive_channel() const = 0;
// Typecasts of media_channel(). Will cause an exception if the // Typecasts of media_channel(). Will cause an exception if the
// channel is of the wrong type. // channel is of the wrong type.
virtual VideoMediaChannel* video_media_receive_channel() const = 0; virtual VideoMediaReceiveChannelInterface* video_media_receive_channel()
virtual VoiceMediaChannel* voice_media_receive_channel() const = 0; const = 0;
virtual VoiceMediaReceiveChannelInterface* voice_media_receive_channel()
const = 0;
// Returns a string view for the transport name. Fetching the transport name // Returns a string view for the transport name. Fetching the transport name
// must be done on the network thread only and note that the lifetime of // must be done on the network thread only and note that the lifetime of

View File

@ -34,6 +34,7 @@
#include "api/video/video_timing.h" #include "api/video/video_timing.h"
#include "call/call.h" #include "call/call.h"
#include "media/base/media_channel.h" #include "media/base/media_channel.h"
#include "media/base/media_channel_impl.h"
#include "modules/audio_processing/include/audio_processing_statistics.h" #include "modules/audio_processing/include/audio_processing_statistics.h"
#include "p2p/base/ice_transport_internal.h" #include "p2p/base/ice_transport_internal.h"
#include "p2p/base/p2p_constants.h" #include "p2p/base/p2p_constants.h"
@ -1042,7 +1043,8 @@ void LegacyStatsCollector::ExtractBweInfo() {
} }
auto* video_channel = transceiver->internal()->channel(); auto* video_channel = transceiver->internal()->channel();
if (video_channel) { if (video_channel) {
video_media_channels.push_back(video_channel->video_media_send_channel()); video_media_channels.push_back(static_cast<cricket::VideoMediaChannel*>(
video_channel->video_media_send_channel()));
} }
} }
@ -1150,15 +1152,15 @@ class VideoMediaChannelStatsGatherer final : public MediaChannelStatsGatherer {
}; };
std::unique_ptr<MediaChannelStatsGatherer> CreateMediaChannelStatsGatherer( std::unique_ptr<MediaChannelStatsGatherer> CreateMediaChannelStatsGatherer(
cricket::MediaSendChannelInterface* channel) { cricket::MediaChannel* channel) {
RTC_DCHECK(channel); RTC_DCHECK(channel);
if (channel->media_type() == cricket::MEDIA_TYPE_AUDIO) { if (channel->media_type() == cricket::MEDIA_TYPE_AUDIO) {
return std::make_unique<VoiceMediaChannelStatsGatherer>( return std::make_unique<VoiceMediaChannelStatsGatherer>(
static_cast<cricket::VoiceMediaChannel*>(channel)); channel->AsVoiceChannel());
} else { } else {
RTC_DCHECK_EQ(channel->media_type(), cricket::MEDIA_TYPE_VIDEO); RTC_DCHECK_EQ(channel->media_type(), cricket::MEDIA_TYPE_VIDEO);
return std::make_unique<VideoMediaChannelStatsGatherer>( return std::make_unique<VideoMediaChannelStatsGatherer>(
static_cast<cricket::VideoMediaChannel*>(channel)); channel->AsVideoChannel());
} }
} }
@ -1179,7 +1181,7 @@ void LegacyStatsCollector::ExtractMediaInfo(
continue; continue;
} }
std::unique_ptr<MediaChannelStatsGatherer> gatherer = std::unique_ptr<MediaChannelStatsGatherer> gatherer =
CreateMediaChannelStatsGatherer(channel->media_send_channel()); CreateMediaChannelStatsGatherer(channel->media_channel());
gatherer->mid = channel->mid(); gatherer->mid = channel->mid();
gatherer->transport_name = transport_names_by_mid.at(gatherer->mid); gatherer->transport_name = transport_names_by_mid.at(gatherer->mid);

View File

@ -1174,14 +1174,14 @@ rtc::scoped_refptr<RtpSenderInterface> PeerConnection::CreateSender(
auto audio_sender = auto audio_sender =
AudioRtpSender::Create(worker_thread(), rtc::CreateRandomUuid(), AudioRtpSender::Create(worker_thread(), rtc::CreateRandomUuid(),
legacy_stats_.get(), rtp_manager()); legacy_stats_.get(), rtp_manager());
audio_sender->SetMediaChannel(rtp_manager()->voice_media_channel()); audio_sender->SetMediaChannel(rtp_manager()->voice_media_send_channel());
new_sender = RtpSenderProxyWithInternal<RtpSenderInternal>::Create( new_sender = RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
signaling_thread(), audio_sender); signaling_thread(), audio_sender);
rtp_manager()->GetAudioTransceiver()->internal()->AddSender(new_sender); rtp_manager()->GetAudioTransceiver()->internal()->AddSender(new_sender);
} else if (kind == MediaStreamTrackInterface::kVideoKind) { } else if (kind == MediaStreamTrackInterface::kVideoKind) {
auto video_sender = VideoRtpSender::Create( auto video_sender = VideoRtpSender::Create(
worker_thread(), rtc::CreateRandomUuid(), rtp_manager()); worker_thread(), rtc::CreateRandomUuid(), rtp_manager());
video_sender->SetMediaChannel(rtp_manager()->video_media_channel()); video_sender->SetMediaChannel(rtp_manager()->video_media_send_channel());
new_sender = RtpSenderProxyWithInternal<RtpSenderInternal>::Create( new_sender = RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
signaling_thread(), video_sender); signaling_thread(), video_sender);
rtp_manager()->GetVideoTransceiver()->internal()->AddSender(new_sender); rtp_manager()->GetVideoTransceiver()->internal()->AddSender(new_sender);
@ -1629,14 +1629,15 @@ RTCError PeerConnection::SetConfiguration(
} }
if (modified_config.allow_codec_switching.has_value()) { if (modified_config.allow_codec_switching.has_value()) {
std::vector<cricket::VideoMediaChannel*> channels; std::vector<cricket::VideoMediaSendChannelInterface*> channels;
for (const auto& transceiver : rtp_manager()->transceivers()->List()) { for (const auto& transceiver : rtp_manager()->transceivers()->List()) {
if (transceiver->media_type() != cricket::MEDIA_TYPE_VIDEO) if (transceiver->media_type() != cricket::MEDIA_TYPE_VIDEO)
continue; continue;
auto* video_channel = transceiver->internal()->channel(); auto* video_channel = transceiver->internal()->channel();
if (video_channel) if (video_channel)
channels.push_back(static_cast<cricket::VideoMediaChannel*>( channels.push_back(
static_cast<cricket::VideoMediaSendChannelInterface*>(
video_channel->media_send_channel())); video_channel->media_send_channel()));
} }

View File

@ -70,7 +70,8 @@ RemoteAudioSource::~RemoteAudioSource() {
} }
} }
void RemoteAudioSource::Start(cricket::VoiceMediaChannel* media_channel, void RemoteAudioSource::Start(
cricket::VoiceMediaReceiveChannelInterface* media_channel,
absl::optional<uint32_t> ssrc) { absl::optional<uint32_t> ssrc) {
RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK_RUN_ON(worker_thread_);
@ -84,7 +85,8 @@ void RemoteAudioSource::Start(cricket::VoiceMediaChannel* media_channel,
std::make_unique<AudioDataProxy>(this)); std::make_unique<AudioDataProxy>(this));
} }
void RemoteAudioSource::Stop(cricket::VoiceMediaChannel* media_channel, void RemoteAudioSource::Stop(
cricket::VoiceMediaReceiveChannelInterface* media_channel,
absl::optional<uint32_t> ssrc) { absl::optional<uint32_t> ssrc) {
RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK_RUN_ON(worker_thread_);
RTC_DCHECK(media_channel); RTC_DCHECK(media_channel);

View File

@ -49,9 +49,9 @@ class RemoteAudioSource : public Notifier<AudioSourceInterface> {
// Register and unregister remote audio source with the underlying media // Register and unregister remote audio source with the underlying media
// engine. // engine.
void Start(cricket::VoiceMediaChannel* media_channel, void Start(cricket::VoiceMediaReceiveChannelInterface* media_channel,
absl::optional<uint32_t> ssrc); absl::optional<uint32_t> ssrc);
void Stop(cricket::VoiceMediaChannel* media_channel, void Stop(cricket::VoiceMediaReceiveChannelInterface* media_channel,
absl::optional<uint32_t> ssrc); absl::optional<uint32_t> ssrc);
void SetState(SourceState new_state); void SetState(SourceState new_state);

View File

@ -36,6 +36,7 @@
#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/scalability_mode.h"
#include "common_video/include/quality_limitation_reason.h" #include "common_video/include/quality_limitation_reason.h"
#include "media/base/media_channel.h" #include "media/base/media_channel.h"
#include "media/base/media_channel_impl.h"
#include "modules/audio_processing/include/audio_processing_statistics.h" #include "modules/audio_processing/include/audio_processing_statistics.h"
#include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/report_block_data.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
@ -2365,13 +2366,15 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n() {
if (media_type == cricket::MEDIA_TYPE_AUDIO) { if (media_type == cricket::MEDIA_TYPE_AUDIO) {
cricket::VoiceMediaChannel* voice_channel = cricket::VoiceMediaChannel* voice_channel =
channel->voice_media_send_channel(); static_cast<cricket::VoiceMediaChannel*>(
channel->voice_media_send_channel());
RTC_DCHECK(voice_stats.find(voice_channel) == voice_stats.end()); RTC_DCHECK(voice_stats.find(voice_channel) == voice_stats.end());
voice_stats.insert( voice_stats.insert(
std::make_pair(voice_channel, cricket::VoiceMediaInfo())); std::make_pair(voice_channel, cricket::VoiceMediaInfo()));
} else if (media_type == cricket::MEDIA_TYPE_VIDEO) { } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
cricket::VideoMediaChannel* video_channel = cricket::VideoMediaChannel* video_channel =
channel->video_media_send_channel(); static_cast<cricket::VideoMediaChannel*>(
channel->video_media_send_channel());
RTC_DCHECK(video_stats.find(video_channel) == video_stats.end()); RTC_DCHECK(video_stats.find(video_channel) == video_stats.end());
video_stats.insert( video_stats.insert(
std::make_pair(video_channel, cricket::VideoMediaInfo())); std::make_pair(video_channel, cricket::VideoMediaInfo()));
@ -2410,12 +2413,14 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n() {
cricket::MediaType media_type = transceiver->media_type(); cricket::MediaType media_type = transceiver->media_type();
if (media_type == cricket::MEDIA_TYPE_AUDIO) { if (media_type == cricket::MEDIA_TYPE_AUDIO) {
cricket::VoiceMediaChannel* voice_channel = cricket::VoiceMediaChannel* voice_channel =
channel->voice_media_send_channel(); static_cast<cricket::VoiceMediaChannel*>(
channel->voice_media_send_channel());
RTC_DCHECK(voice_stats.find(voice_channel) != voice_stats.end()); RTC_DCHECK(voice_stats.find(voice_channel) != voice_stats.end());
voice_media_info = std::move(voice_stats[voice_channel]); voice_media_info = std::move(voice_stats[voice_channel]);
} else if (media_type == cricket::MEDIA_TYPE_VIDEO) { } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
cricket::VideoMediaChannel* video_channel = cricket::VideoMediaChannel* video_channel =
channel->video_media_send_channel(); static_cast<cricket::VideoMediaChannel*>(
channel->video_media_send_channel());
RTC_DCHECK(video_stats.find(video_channel) != video_stats.end()); RTC_DCHECK(video_stats.find(video_channel) != video_stats.end());
video_media_info = std::move(video_stats[video_channel]); video_media_info = std::move(video_stats[video_channel]);
} }

View File

@ -378,8 +378,8 @@ class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase {
void RemoveTrackFromStats() override; void RemoveTrackFromStats() override;
private: private:
cricket::VoiceMediaChannel* voice_media_channel() { cricket::VoiceMediaSendChannelInterface* voice_media_channel() {
return static_cast<cricket::VoiceMediaChannel*>(media_channel_); return media_channel_->AsVoiceSendChannel();
} }
rtc::scoped_refptr<AudioTrackInterface> audio_track() const { rtc::scoped_refptr<AudioTrackInterface> audio_track() const {
return rtc::scoped_refptr<AudioTrackInterface>( return rtc::scoped_refptr<AudioTrackInterface>(
@ -436,8 +436,8 @@ class VideoRtpSender : public RtpSenderBase {
void AttachTrack() override; void AttachTrack() override;
private: private:
cricket::VideoMediaChannel* video_media_channel() { cricket::VideoMediaSendChannelInterface* video_media_channel() {
return static_cast<cricket::VideoMediaChannel*>(media_channel_); return media_channel_->AsVideoSendChannel();
} }
rtc::scoped_refptr<VideoTrackInterface> video_track() const { rtc::scoped_refptr<VideoTrackInterface> video_track() const {
return rtc::scoped_refptr<VideoTrackInterface>( return rtc::scoped_refptr<VideoTrackInterface>(

View File

@ -204,7 +204,7 @@ class RtpSenderReceiverTest
ASSERT_TRUE(audio_rtp_sender_->SetTrack(audio_track_.get())); ASSERT_TRUE(audio_rtp_sender_->SetTrack(audio_track_.get()));
EXPECT_CALL(*set_streams_observer, OnSetStreams()); EXPECT_CALL(*set_streams_observer, OnSetStreams());
audio_rtp_sender_->SetStreams({local_stream_->id()}); audio_rtp_sender_->SetStreams({local_stream_->id()});
audio_rtp_sender_->SetMediaChannel(voice_media_channel()); audio_rtp_sender_->SetMediaChannel(voice_media_channel()->AsSendChannel());
audio_rtp_sender_->SetSsrc(kAudioSsrc); audio_rtp_sender_->SetSsrc(kAudioSsrc);
VerifyVoiceChannelInput(); VerifyVoiceChannelInput();
} }
@ -212,7 +212,8 @@ class RtpSenderReceiverTest
void CreateAudioRtpSenderWithNoTrack() { void CreateAudioRtpSenderWithNoTrack() {
audio_rtp_sender_ = audio_rtp_sender_ =
AudioRtpSender::Create(worker_thread_, /*id=*/"", nullptr, nullptr); AudioRtpSender::Create(worker_thread_, /*id=*/"", nullptr, nullptr);
audio_rtp_sender_->SetMediaChannel(voice_media_channel()); audio_rtp_sender_->SetMediaChannel(
voice_media_channel()->AsVoiceSendChannel());
} }
void CreateVideoRtpSender(uint32_t ssrc) { void CreateVideoRtpSender(uint32_t ssrc) {
@ -264,14 +265,16 @@ class RtpSenderReceiverTest
ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_.get())); ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_.get()));
EXPECT_CALL(*set_streams_observer, OnSetStreams()); EXPECT_CALL(*set_streams_observer, OnSetStreams());
video_rtp_sender_->SetStreams({local_stream_->id()}); video_rtp_sender_->SetStreams({local_stream_->id()});
video_rtp_sender_->SetMediaChannel(video_media_channel()); video_rtp_sender_->SetMediaChannel(
video_media_channel()->AsVideoSendChannel());
video_rtp_sender_->SetSsrc(ssrc); video_rtp_sender_->SetSsrc(ssrc);
VerifyVideoChannelInput(ssrc); VerifyVideoChannelInput(ssrc);
} }
void CreateVideoRtpSenderWithNoTrack() { void CreateVideoRtpSenderWithNoTrack() {
video_rtp_sender_ = video_rtp_sender_ =
VideoRtpSender::Create(worker_thread_, /*id=*/"", nullptr); VideoRtpSender::Create(worker_thread_, /*id=*/"", nullptr);
video_rtp_sender_->SetMediaChannel(video_media_channel()); video_rtp_sender_->SetMediaChannel(
video_media_channel()->AsVideoSendChannel());
} }
void DestroyAudioRtpSender() { void DestroyAudioRtpSender() {
@ -289,7 +292,8 @@ class RtpSenderReceiverTest
audio_rtp_receiver_ = rtc::make_ref_counted<AudioRtpReceiver>( audio_rtp_receiver_ = rtc::make_ref_counted<AudioRtpReceiver>(
rtc::Thread::Current(), kAudioTrackId, streams, rtc::Thread::Current(), kAudioTrackId, streams,
/*is_unified_plan=*/true); /*is_unified_plan=*/true);
audio_rtp_receiver_->SetMediaChannel(voice_media_channel()); audio_rtp_receiver_->SetMediaChannel(
voice_media_channel()->AsVoiceReceiveChannel());
audio_rtp_receiver_->SetupMediaChannel(kAudioSsrc); audio_rtp_receiver_->SetupMediaChannel(kAudioSsrc);
audio_track_ = audio_rtp_receiver_->audio_track(); audio_track_ = audio_rtp_receiver_->audio_track();
VerifyVoiceChannelOutput(); VerifyVoiceChannelOutput();
@ -299,7 +303,8 @@ class RtpSenderReceiverTest
std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams = {}) { std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams = {}) {
video_rtp_receiver_ = rtc::make_ref_counted<VideoRtpReceiver>( video_rtp_receiver_ = rtc::make_ref_counted<VideoRtpReceiver>(
rtc::Thread::Current(), kVideoTrackId, streams); rtc::Thread::Current(), kVideoTrackId, streams);
video_rtp_receiver_->SetMediaChannel(video_media_channel()); video_rtp_receiver_->SetMediaChannel(
video_media_channel()->AsVideoReceiveChannel());
video_rtp_receiver_->SetupMediaChannel(kVideoSsrc); video_rtp_receiver_->SetupMediaChannel(kVideoSsrc);
video_track_ = video_rtp_receiver_->video_track(); video_track_ = video_rtp_receiver_->video_track();
VerifyVideoChannelOutput(); VerifyVideoChannelOutput();
@ -319,7 +324,8 @@ class RtpSenderReceiverTest
video_rtp_receiver_ = rtc::make_ref_counted<VideoRtpReceiver>( video_rtp_receiver_ = rtc::make_ref_counted<VideoRtpReceiver>(
rtc::Thread::Current(), kVideoTrackId, streams); rtc::Thread::Current(), kVideoTrackId, streams);
video_rtp_receiver_->SetMediaChannel(video_media_channel()); video_rtp_receiver_->SetMediaChannel(
video_media_channel()->AsVideoReceiveChannel());
video_rtp_receiver_->SetupMediaChannel(primary_ssrc); video_rtp_receiver_->SetupMediaChannel(primary_ssrc);
video_track_ = video_rtp_receiver_->video_track(); video_track_ = video_rtp_receiver_->video_track();
} }
@ -689,15 +695,17 @@ TEST_F(RtpSenderReceiverTest, RemoteAudioTrackSetVolume) {
TEST_F(RtpSenderReceiverTest, AudioRtpReceiverDelay) { TEST_F(RtpSenderReceiverTest, AudioRtpReceiverDelay) {
CreateAudioRtpReceiver(); CreateAudioRtpReceiver();
VerifyRtpReceiverDelayBehaviour(voice_media_channel(), VerifyRtpReceiverDelayBehaviour(
audio_rtp_receiver_.get(), kAudioSsrc); voice_media_channel()->AsVoiceReceiveChannel(), audio_rtp_receiver_.get(),
kAudioSsrc);
DestroyAudioRtpReceiver(); DestroyAudioRtpReceiver();
} }
TEST_F(RtpSenderReceiverTest, VideoRtpReceiverDelay) { TEST_F(RtpSenderReceiverTest, VideoRtpReceiverDelay) {
CreateVideoRtpReceiver(); CreateVideoRtpReceiver();
VerifyRtpReceiverDelayBehaviour(video_media_channel(), VerifyRtpReceiverDelayBehaviour(
video_rtp_receiver_.get(), kVideoSsrc); video_media_channel()->AsVideoReceiveChannel(), video_rtp_receiver_.get(),
kVideoSsrc);
DestroyVideoRtpReceiver(); DestroyVideoRtpReceiver();
} }
@ -936,7 +944,8 @@ TEST_F(RtpSenderReceiverTest, AudioSenderInitParametersMovedAfterNegotiation) {
cricket::StreamParams stream_params = cricket::StreamParams stream_params =
cricket::CreateSimStreamParams("cname", ssrcs); cricket::CreateSimStreamParams("cname", ssrcs);
voice_media_channel()->AddSendStream(stream_params); voice_media_channel()->AddSendStream(stream_params);
audio_rtp_sender_->SetMediaChannel(voice_media_channel()); audio_rtp_sender_->SetMediaChannel(
voice_media_channel()->AsVoiceSendChannel());
audio_rtp_sender_->SetSsrc(1); audio_rtp_sender_->SetSsrc(1);
params = audio_rtp_sender_->GetParameters(); params = audio_rtp_sender_->GetParameters();
@ -1189,7 +1198,8 @@ TEST_F(RtpSenderReceiverTest, VideoSenderInitParametersMovedAfterNegotiation) {
cricket::StreamParams stream_params = cricket::StreamParams stream_params =
cricket::CreateSimStreamParams("cname", ssrcs); cricket::CreateSimStreamParams("cname", ssrcs);
video_media_channel()->AddSendStream(stream_params); video_media_channel()->AddSendStream(stream_params);
video_rtp_sender_->SetMediaChannel(video_media_channel()); video_rtp_sender_->SetMediaChannel(
video_media_channel()->AsVideoSendChannel());
video_rtp_sender_->SetSsrc(kVideoSsrcSimulcast); video_rtp_sender_->SetSsrc(kVideoSsrcSimulcast);
params = video_rtp_sender_->GetParameters(); params = video_rtp_sender_->GetParameters();
@ -1229,7 +1239,8 @@ TEST_F(RtpSenderReceiverTest,
cricket::StreamParams stream_params = cricket::StreamParams stream_params =
cricket::CreateSimStreamParams("cname", ssrcs); cricket::CreateSimStreamParams("cname", ssrcs);
video_media_channel()->AddSendStream(stream_params); video_media_channel()->AddSendStream(stream_params);
video_rtp_sender_->SetMediaChannel(video_media_channel()); video_rtp_sender_->SetMediaChannel(
video_media_channel()->AsVideoSendChannel());
video_rtp_sender_->SetSsrc(kVideoSsrcSimulcast); video_rtp_sender_->SetSsrc(kVideoSsrcSimulcast);
params = video_rtp_sender_->GetParameters(); params = video_rtp_sender_->GetParameters();
@ -1272,7 +1283,8 @@ TEST_F(RtpSenderReceiverDeathTest,
cricket::StreamParams stream_params = cricket::StreamParams stream_params =
cricket::StreamParams::CreateLegacy(kVideoSsrc); cricket::StreamParams::CreateLegacy(kVideoSsrc);
video_media_channel()->AddSendStream(stream_params); video_media_channel()->AddSendStream(stream_params);
video_rtp_sender_->SetMediaChannel(video_media_channel()); video_rtp_sender_->SetMediaChannel(
video_media_channel()->AsVideoSendChannel());
EXPECT_DEATH(video_rtp_sender_->SetSsrc(kVideoSsrcSimulcast), ""); EXPECT_DEATH(video_rtp_sender_->SetSsrc(kVideoSsrcSimulcast), "");
} }
#endif #endif
@ -1687,7 +1699,8 @@ TEST_F(RtpSenderReceiverTest,
ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_.get())); ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_.get()));
EXPECT_CALL(*set_streams_observer, OnSetStreams()); EXPECT_CALL(*set_streams_observer, OnSetStreams());
video_rtp_sender_->SetStreams({local_stream_->id()}); video_rtp_sender_->SetStreams({local_stream_->id()});
video_rtp_sender_->SetMediaChannel(video_media_channel()); video_rtp_sender_->SetMediaChannel(
video_media_channel()->AsVideoSendChannel());
video_track_->set_enabled(true); video_track_->set_enabled(true);
// Sender is not ready to send (no SSRC) so no option should have been set. // Sender is not ready to send (no SSRC) so no option should have been set.

View File

@ -72,8 +72,8 @@ PeerConnectionObserver* RtpTransmissionManager::Observer() const {
return observer_; return observer_;
} }
cricket::VoiceMediaChannel* RtpTransmissionManager::voice_media_channel() cricket::VoiceMediaSendChannelInterface*
const { RtpTransmissionManager::voice_media_send_channel() const {
RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK_RUN_ON(signaling_thread());
RTC_DCHECK(!IsUnifiedPlan()); RTC_DCHECK(!IsUnifiedPlan());
auto* voice_channel = GetAudioTransceiver()->internal()->channel(); auto* voice_channel = GetAudioTransceiver()->internal()->channel();
@ -84,8 +84,8 @@ cricket::VoiceMediaChannel* RtpTransmissionManager::voice_media_channel()
} }
} }
cricket::VideoMediaChannel* RtpTransmissionManager::video_media_channel() cricket::VideoMediaSendChannelInterface*
const { RtpTransmissionManager::video_media_send_channel() const {
RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK_RUN_ON(signaling_thread());
RTC_DCHECK(!IsUnifiedPlan()); RTC_DCHECK(!IsUnifiedPlan());
auto* video_channel = GetVideoTransceiver()->internal()->channel(); auto* video_channel = GetVideoTransceiver()->internal()->channel();
@ -95,6 +95,29 @@ cricket::VideoMediaChannel* RtpTransmissionManager::video_media_channel()
return nullptr; return nullptr;
} }
} }
cricket::VoiceMediaReceiveChannelInterface*
RtpTransmissionManager::voice_media_receive_channel() const {
RTC_DCHECK_RUN_ON(signaling_thread());
RTC_DCHECK(!IsUnifiedPlan());
auto* voice_channel = GetAudioTransceiver()->internal()->channel();
if (voice_channel) {
return voice_channel->voice_media_receive_channel();
} else {
return nullptr;
}
}
cricket::VideoMediaReceiveChannelInterface*
RtpTransmissionManager::video_media_receive_channel() const {
RTC_DCHECK_RUN_ON(signaling_thread());
RTC_DCHECK(!IsUnifiedPlan());
auto* video_channel = GetVideoTransceiver()->internal()->channel();
if (video_channel) {
return video_channel->video_media_receive_channel();
} else {
return nullptr;
}
}
RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>>
RtpTransmissionManager::AddTrack( RtpTransmissionManager::AddTrack(
@ -132,7 +155,7 @@ RtpTransmissionManager::AddTrackPlanB(
init_send_encodings ? *init_send_encodings init_send_encodings ? *init_send_encodings
: std::vector<RtpEncodingParameters>()); : std::vector<RtpEncodingParameters>());
if (track->kind() == MediaStreamTrackInterface::kAudioKind) { if (track->kind() == MediaStreamTrackInterface::kAudioKind) {
new_sender->internal()->SetMediaChannel(voice_media_channel()); new_sender->internal()->SetMediaChannel(voice_media_send_channel());
GetAudioTransceiver()->internal()->AddSender(new_sender); GetAudioTransceiver()->internal()->AddSender(new_sender);
const RtpSenderInfo* sender_info = const RtpSenderInfo* sender_info =
FindSenderInfo(local_audio_sender_infos_, FindSenderInfo(local_audio_sender_infos_,
@ -142,7 +165,7 @@ RtpTransmissionManager::AddTrackPlanB(
} }
} else { } else {
RTC_DCHECK_EQ(MediaStreamTrackInterface::kVideoKind, track->kind()); RTC_DCHECK_EQ(MediaStreamTrackInterface::kVideoKind, track->kind());
new_sender->internal()->SetMediaChannel(video_media_channel()); new_sender->internal()->SetMediaChannel(video_media_send_channel());
GetVideoTransceiver()->internal()->AddSender(new_sender); GetVideoTransceiver()->internal()->AddSender(new_sender);
const RtpSenderInfo* sender_info = const RtpSenderInfo* sender_info =
FindSenderInfo(local_video_sender_infos_, FindSenderInfo(local_video_sender_infos_,
@ -389,7 +412,7 @@ void RtpTransmissionManager::AddAudioTrack(AudioTrackInterface* track,
auto new_sender = CreateSender(cricket::MEDIA_TYPE_AUDIO, track->id(), auto new_sender = CreateSender(cricket::MEDIA_TYPE_AUDIO, track->id(),
rtc::scoped_refptr<AudioTrackInterface>(track), rtc::scoped_refptr<AudioTrackInterface>(track),
{stream->id()}, {}); {stream->id()}, {});
new_sender->internal()->SetMediaChannel(voice_media_channel()); new_sender->internal()->SetMediaChannel(voice_media_send_channel());
GetAudioTransceiver()->internal()->AddSender(new_sender); GetAudioTransceiver()->internal()->AddSender(new_sender);
// If the sender has already been configured in SDP, we call SetSsrc, // If the sender has already been configured in SDP, we call SetSsrc,
// which will connect the sender to the underlying transport. This can // which will connect the sender to the underlying transport. This can
@ -436,7 +459,7 @@ void RtpTransmissionManager::AddVideoTrack(VideoTrackInterface* track,
auto new_sender = CreateSender(cricket::MEDIA_TYPE_VIDEO, track->id(), auto new_sender = CreateSender(cricket::MEDIA_TYPE_VIDEO, track->id(),
rtc::scoped_refptr<VideoTrackInterface>(track), rtc::scoped_refptr<VideoTrackInterface>(track),
{stream->id()}, {}); {stream->id()}, {});
new_sender->internal()->SetMediaChannel(video_media_channel()); new_sender->internal()->SetMediaChannel(video_media_send_channel());
GetVideoTransceiver()->internal()->AddSender(new_sender); GetVideoTransceiver()->internal()->AddSender(new_sender);
const RtpSenderInfo* sender_info = const RtpSenderInfo* sender_info =
FindSenderInfo(local_video_sender_infos_, stream->id(), track->id()); FindSenderInfo(local_video_sender_infos_, stream->id(), track->id());
@ -468,7 +491,7 @@ void RtpTransmissionManager::CreateAudioReceiver(
// the constructor taking stream IDs instead. // the constructor taking stream IDs instead.
auto audio_receiver = rtc::make_ref_counted<AudioRtpReceiver>( auto audio_receiver = rtc::make_ref_counted<AudioRtpReceiver>(
worker_thread(), remote_sender_info.sender_id, streams, IsUnifiedPlan(), worker_thread(), remote_sender_info.sender_id, streams, IsUnifiedPlan(),
voice_media_channel()); voice_media_receive_channel());
if (remote_sender_info.sender_id == kDefaultAudioSenderId) { if (remote_sender_info.sender_id == kDefaultAudioSenderId) {
audio_receiver->SetupUnsignaledMediaChannel(); audio_receiver->SetupUnsignaledMediaChannel();
} else { } else {
@ -497,7 +520,7 @@ void RtpTransmissionManager::CreateVideoReceiver(
remote_sender_info.sender_id == kDefaultVideoSenderId remote_sender_info.sender_id == kDefaultVideoSenderId
? absl::nullopt ? absl::nullopt
: absl::optional<uint32_t>(remote_sender_info.first_ssrc), : absl::optional<uint32_t>(remote_sender_info.first_ssrc),
video_media_channel()); video_media_receive_channel());
auto receiver = RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create( auto receiver = RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
signaling_thread(), worker_thread(), std::move(video_receiver)); signaling_thread(), worker_thread(), std::move(video_receiver));

View File

@ -204,8 +204,12 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver {
// Plan B helpers for getting the voice/video media channels for the single // Plan B helpers for getting the voice/video media channels for the single
// audio/video transceiver, if it exists. // audio/video transceiver, if it exists.
cricket::VoiceMediaChannel* voice_media_channel() const; cricket::VoiceMediaSendChannelInterface* voice_media_send_channel() const;
cricket::VideoMediaChannel* video_media_channel() const; cricket::VideoMediaSendChannelInterface* video_media_send_channel() const;
cricket::VoiceMediaReceiveChannelInterface* voice_media_receive_channel()
const;
cricket::VideoMediaReceiveChannelInterface* video_media_receive_channel()
const;
private: private:
rtc::Thread* signaling_thread() const { return context_->signaling_thread(); } rtc::Thread* signaling_thread() const { return context_->signaling_thread(); }

View File

@ -25,6 +25,7 @@ namespace cricket {
class MockChannelInterface : public cricket::ChannelInterface { class MockChannelInterface : public cricket::ChannelInterface {
public: public:
MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); MOCK_METHOD(cricket::MediaType, media_type, (), (const, override));
MOCK_METHOD(MediaChannel*, media_channel, (), (const, override));
MOCK_METHOD(MediaChannel*, media_send_channel, (), (const, override)); MOCK_METHOD(MediaChannel*, media_send_channel, (), (const, override));
MOCK_METHOD(VoiceMediaChannel*, MOCK_METHOD(VoiceMediaChannel*,
voice_media_send_channel, voice_media_send_channel,

View File

@ -16,6 +16,7 @@
#include "api/call/audio_sink.h" #include "api/call/audio_sink.h"
#include "media/base/media_channel.h" #include "media/base/media_channel.h"
#include "media/base/media_channel_impl.h"
#include "rtc_base/gunit.h" #include "rtc_base/gunit.h"
#include "test/gmock.h" #include "test/gmock.h"
#include "test/gtest.h" #include "test/gtest.h"
@ -29,7 +30,10 @@ class MockVoiceMediaChannel : public VoiceMediaChannel {
explicit MockVoiceMediaChannel(webrtc::TaskQueueBase* network_thread) explicit MockVoiceMediaChannel(webrtc::TaskQueueBase* network_thread)
: VoiceMediaChannel(network_thread) {} : VoiceMediaChannel(network_thread) {}
MOCK_METHOD(void, SetInterface, (NetworkInterface * iface), (override)); MOCK_METHOD(void,
SetInterface,
(MediaChannelNetworkInterface * iface),
(override));
MOCK_METHOD(void, MOCK_METHOD(void,
OnPacketReceived, OnPacketReceived,
(rtc::CopyOnWriteBuffer packet, int64_t packet_time_us), (rtc::CopyOnWriteBuffer packet, int64_t packet_time_us),
@ -64,7 +68,6 @@ class MockVoiceMediaChannel : public VoiceMediaChannel {
(uint32_t ssrc, (uint32_t ssrc,
rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor), rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor),
(override)); (override));
MOCK_METHOD(void, SetVideoCodecSwitchingEnabled, (bool enabled), (override));
MOCK_METHOD(webrtc::RtpParameters, MOCK_METHOD(webrtc::RtpParameters,
GetRtpSendParameters, GetRtpSendParameters,
(uint32_t ssrc), (uint32_t ssrc),

View File

@ -276,7 +276,11 @@ void VideoRtpReceiver::SetMediaChannel_w(
SetEncodedSinkEnabled(false); SetEncodedSinkEnabled(false);
} }
media_channel_ = static_cast<cricket::VideoMediaChannel*>(media_channel); if (media_channel) {
media_channel_ = media_channel->AsVideoReceiveChannel();
} else {
media_channel_ = nullptr;
}
if (media_channel_) { if (media_channel_) {
if (saved_generate_keyframe_) { if (saved_generate_keyframe_) {

View File

@ -149,8 +149,8 @@ class VideoRtpReceiver : public RtpReceiverInternal {
rtc::Thread* const worker_thread_; rtc::Thread* const worker_thread_;
const std::string id_; const std::string id_;
cricket::VideoMediaChannel* media_channel_ RTC_GUARDED_BY(worker_thread_) = cricket::VideoMediaReceiveChannelInterface* media_channel_
nullptr; RTC_GUARDED_BY(worker_thread_) = nullptr;
absl::optional<uint32_t> ssrc_ RTC_GUARDED_BY(worker_thread_); absl::optional<uint32_t> ssrc_ RTC_GUARDED_BY(worker_thread_);
// `source_` is held here to be able to change the state of the source when // `source_` is held here to be able to change the state of the source when
// the VideoRtpReceiver is stopped. // the VideoRtpReceiver is stopped.