Reformat the WebRTC code base

Running clang-format with chromium's style guide.

The goal is n-fold:
 * providing consistency and readability (that's what code guidelines are for)
 * preventing noise with presubmit checks and git cl format
 * building on the previous point: making it easier to automatically fix format issues
 * you name it

Please consider using git-hyper-blame to ignore this commit.

Bug: webrtc:9340
Change-Id: I694567c4cdf8cee2860958cfe82bfaf25848bb87
Reviewed-on: https://webrtc-review.googlesource.com/81185
Reviewed-by: Patrik Höglund <phoglund@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23660}
This commit is contained in:
Yves Gerey 2018-06-19 15:03:05 +02:00
parent b602123a5a
commit 665174fdbb
1569 changed files with 30495 additions and 30309 deletions

View File

@ -11,8 +11,8 @@
#ifndef API_ARRAY_VIEW_H_ #ifndef API_ARRAY_VIEW_H_
#define API_ARRAY_VIEW_H_ #define API_ARRAY_VIEW_H_
#include <array>
#include <algorithm> #include <algorithm>
#include <array>
#include <type_traits> #include <type_traits>
#include "rtc_base/checks.h" #include "rtc_base/checks.h"

View File

@ -66,7 +66,8 @@ void AudioFrame::UpdateFrame(uint32_t timestamp,
} }
void AudioFrame::CopyFrom(const AudioFrame& src) { void AudioFrame::CopyFrom(const AudioFrame& src) {
if (this == &src) return; if (this == &src)
return;
timestamp_ = src.timestamp_; timestamp_ = src.timestamp_;
elapsed_time_ms_ = src.elapsed_time_ms_; elapsed_time_ms_ = src.elapsed_time_ms_;
@ -116,7 +117,9 @@ void AudioFrame::Mute() {
muted_ = true; muted_ = true;
} }
bool AudioFrame::muted() const { return muted_; } bool AudioFrame::muted() const {
return muted_;
}
// static // static
const int16_t* AudioFrame::empty_data() { const int16_t* AudioFrame::empty_data() {

View File

@ -43,11 +43,7 @@ class AudioFrame {
kMaxDataSizeBytes = kMaxDataSizeSamples * sizeof(int16_t), kMaxDataSizeBytes = kMaxDataSizeSamples * sizeof(int16_t),
}; };
enum VADActivity { enum VADActivity { kVadActive = 0, kVadPassive = 1, kVadUnknown = 2 };
kVadActive = 0,
kVadPassive = 1,
kVadUnknown = 2
};
enum SpeechType { enum SpeechType {
kNormalSpeech = 0, kNormalSpeech = 0,
kPLC = 1, kPLC = 1,
@ -66,9 +62,12 @@ class AudioFrame {
// ResetWithoutMuting() to skip this wasteful zeroing. // ResetWithoutMuting() to skip this wasteful zeroing.
void ResetWithoutMuting(); void ResetWithoutMuting();
void UpdateFrame(uint32_t timestamp, const int16_t* data, void UpdateFrame(uint32_t timestamp,
size_t samples_per_channel, int sample_rate_hz, const int16_t* data,
SpeechType speech_type, VADActivity vad_activity, size_t samples_per_channel,
int sample_rate_hz,
SpeechType speech_type,
VADActivity vad_activity,
size_t num_channels = 1); size_t num_channels = 1);
void CopyFrom(const AudioFrame& src); void CopyFrom(const AudioFrame& src);

View File

@ -87,9 +87,8 @@ TEST(AudioFrameTest, CopyFrom) {
AudioFrame frame2; AudioFrame frame2;
int16_t samples[kNumChannels * kSamplesPerChannel] = {17}; int16_t samples[kNumChannels * kSamplesPerChannel] = {17};
frame2.UpdateFrame(kTimestamp, samples, kSamplesPerChannel, frame2.UpdateFrame(kTimestamp, samples, kSamplesPerChannel, kSampleRateHz,
kSampleRateHz, AudioFrame::kPLC, AudioFrame::kVadActive, AudioFrame::kPLC, AudioFrame::kVadActive, kNumChannels);
kNumChannels);
frame1.CopyFrom(frame2); frame1.CopyFrom(frame2);
EXPECT_EQ(frame2.timestamp_, frame1.timestamp_); EXPECT_EQ(frame2.timestamp_, frame1.timestamp_);

View File

@ -220,9 +220,8 @@ class AudioEncoder {
// Provides target audio bitrate and corresponding probing interval of // Provides target audio bitrate and corresponding probing interval of
// the bandwidth estimator to this encoder to allow it to adapt. // the bandwidth estimator to this encoder to allow it to adapt.
virtual void OnReceivedUplinkBandwidth( virtual void OnReceivedUplinkBandwidth(int target_audio_bitrate_bps,
int target_audio_bitrate_bps, rtc::Optional<int64_t> bwe_period_ms);
rtc::Optional<int64_t> bwe_period_ms);
// Provides RTT to this encoder to allow it to adapt. // Provides RTT to this encoder to allow it to adapt.
virtual void OnReceivedRtt(int rtt_ms); virtual void OnReceivedRtt(int rtt_ms);

View File

@ -112,7 +112,7 @@ struct AudioOptions {
ost << ToStringIfSet("residual_echo_detector", residual_echo_detector); ost << ToStringIfSet("residual_echo_detector", residual_echo_detector);
ost << ToStringIfSet("tx_agc_target_dbov", tx_agc_target_dbov); ost << ToStringIfSet("tx_agc_target_dbov", tx_agc_target_dbov);
ost << ToStringIfSet("tx_agc_digital_compression_gain", ost << ToStringIfSet("tx_agc_digital_compression_gain",
tx_agc_digital_compression_gain); tx_agc_digital_compression_gain);
ost << ToStringIfSet("tx_agc_limiter", tx_agc_limiter); ost << ToStringIfSet("tx_agc_limiter", tx_agc_limiter);
ost << ToStringIfSet("combined_audio_video_bwe", combined_audio_video_bwe); ost << ToStringIfSet("combined_audio_video_bwe", combined_audio_video_bwe);
ost << ToStringIfSet("audio_network_adaptor", audio_network_adaptor); ost << ToStringIfSet("audio_network_adaptor", audio_network_adaptor);

View File

@ -46,14 +46,14 @@ class Candidate {
Candidate(const Candidate&); Candidate(const Candidate&);
~Candidate(); ~Candidate();
const std::string & id() const { return id_; } const std::string& id() const { return id_; }
void set_id(const std::string & id) { id_ = id; } void set_id(const std::string& id) { id_ = id; }
int component() const { return component_; } int component() const { return component_; }
void set_component(int component) { component_ = component; } void set_component(int component) { component_ = component; }
const std::string & protocol() const { return protocol_; } const std::string& protocol() const { return protocol_; }
void set_protocol(const std::string & protocol) { protocol_ = protocol; } void set_protocol(const std::string& protocol) { protocol_ = protocol; }
// The protocol used to talk to relay. // The protocol used to talk to relay.
const std::string& relay_protocol() const { return relay_protocol_; } const std::string& relay_protocol() const { return relay_protocol_; }
@ -61,10 +61,8 @@ class Candidate {
relay_protocol_ = protocol; relay_protocol_ = protocol;
} }
const rtc::SocketAddress & address() const { return address_; } const rtc::SocketAddress& address() const { return address_; }
void set_address(const rtc::SocketAddress & address) { void set_address(const rtc::SocketAddress& address) { address_ = address; }
address_ = address;
}
uint32_t priority() const { return priority_; } uint32_t priority() const { return priority_; }
void set_priority(const uint32_t priority) { priority_ = priority; } void set_priority(const uint32_t priority) { priority_ = priority; }
@ -91,17 +89,17 @@ class Candidate {
} }
// TODO(honghaiz): Change to usernameFragment or ufrag. // TODO(honghaiz): Change to usernameFragment or ufrag.
const std::string & username() const { return username_; } const std::string& username() const { return username_; }
void set_username(const std::string & username) { username_ = username; } void set_username(const std::string& username) { username_ = username; }
const std::string & password() const { return password_; } const std::string& password() const { return password_; }
void set_password(const std::string & password) { password_ = password; } void set_password(const std::string& password) { password_ = password; }
const std::string & type() const { return type_; } const std::string& type() const { return type_; }
void set_type(const std::string & type) { type_ = type; } void set_type(const std::string& type) { type_ = type; }
const std::string & network_name() const { return network_name_; } const std::string& network_name() const { return network_name_; }
void set_network_name(const std::string & network_name) { void set_network_name(const std::string& network_name) {
network_name_ = network_name; network_name_ = network_name;
} }
@ -127,24 +125,17 @@ class Candidate {
uint16_t network_id() const { return network_id_; } uint16_t network_id() const { return network_id_; }
void set_network_id(uint16_t network_id) { network_id_ = network_id; } void set_network_id(uint16_t network_id) { network_id_ = network_id; }
const std::string& foundation() const { const std::string& foundation() const { return foundation_; }
return foundation_;
}
void set_foundation(const std::string& foundation) { void set_foundation(const std::string& foundation) {
foundation_ = foundation; foundation_ = foundation;
} }
const rtc::SocketAddress & related_address() const { const rtc::SocketAddress& related_address() const { return related_address_; }
return related_address_; void set_related_address(const rtc::SocketAddress& related_address) {
}
void set_related_address(
const rtc::SocketAddress & related_address) {
related_address_ = related_address; related_address_ = related_address;
} }
const std::string& tcptype() const { return tcptype_; } const std::string& tcptype() const { return tcptype_; }
void set_tcptype(const std::string& tcptype) { void set_tcptype(const std::string& tcptype) { tcptype_ = tcptype; }
tcptype_ = tcptype;
}
// The name of the transport channel of this candidate. // The name of the transport channel of this candidate.
// TODO(phoglund): remove. // TODO(phoglund): remove.
@ -164,13 +155,9 @@ class Candidate {
// given one when looking for a matching candidate to remove. // given one when looking for a matching candidate to remove.
bool MatchesForRemoval(const Candidate& c) const; bool MatchesForRemoval(const Candidate& c) const;
std::string ToString() const { std::string ToString() const { return ToStringInternal(false); }
return ToStringInternal(false);
}
std::string ToSensitiveString() const { std::string ToSensitiveString() const { return ToStringInternal(true); }
return ToStringInternal(true);
}
uint32_t GetPriority(uint32_t type_preference, uint32_t GetPriority(uint32_t type_preference,
int network_adapter_preference, int network_adapter_preference,

View File

@ -61,14 +61,10 @@ struct DataChannelInit {
// as binary or text. // as binary or text.
struct DataBuffer { struct DataBuffer {
DataBuffer(const rtc::CopyOnWriteBuffer& data, bool binary) DataBuffer(const rtc::CopyOnWriteBuffer& data, bool binary)
: data(data), : data(data), binary(binary) {}
binary(binary) {
}
// For convenience for unit tests. // For convenience for unit tests.
explicit DataBuffer(const std::string& text) explicit DataBuffer(const std::string& text)
: data(text.data(), text.length()), : data(text.data(), text.length()), binary(false) {}
binary(false) {
}
size_t size() const { return data.size(); } size_t size() const { return data.size(); }
rtc::CopyOnWriteBuffer data; rtc::CopyOnWriteBuffer data;

View File

@ -67,7 +67,8 @@ class DtmfSenderInterface : public rtc::RefCountInterface {
// If InsertDtmf is called on the same object while an existing task for this // If InsertDtmf is called on the same object while an existing task for this
// object to generate DTMF is still running, the previous task is canceled. // object to generate DTMF is still running, the previous task is canceled.
// Returns true on success and false on failure. // Returns true on success and false on failure.
virtual bool InsertDtmf(const std::string& tones, int duration, virtual bool InsertDtmf(const std::string& tones,
int duration,
int inter_tone_gap) = 0; int inter_tone_gap) = 0;
// Returns the track given as argument to the constructor. Only exists for // Returns the track given as argument to the constructor. Only exists for

View File

@ -36,7 +36,7 @@ void FakeMetricsObserver::IncrementEnumCounter(
} }
void FakeMetricsObserver::AddHistogramSample(PeerConnectionMetricsName type, void FakeMetricsObserver::AddHistogramSample(PeerConnectionMetricsName type,
int value) { int value) {
RTC_DCHECK(thread_checker_.CalledOnValidThread()); RTC_DCHECK(thread_checker_.CalledOnValidThread());
RTC_DCHECK_EQ(histogram_samples_[type], 0); RTC_DCHECK_EQ(histogram_samples_[type], 0);
histogram_samples_[type] = value; histogram_samples_[type] = value;

View File

@ -28,8 +28,7 @@ class FakeMetricsObserver : public MetricsObserverInterface {
void IncrementEnumCounter(PeerConnectionEnumCounterType, void IncrementEnumCounter(PeerConnectionEnumCounterType,
int counter, int counter,
int counter_max) override; int counter_max) override;
void AddHistogramSample(PeerConnectionMetricsName type, void AddHistogramSample(PeerConnectionMetricsName type, int value) override;
int value) override;
// Accessors to be used by the tests. // Accessors to be used by the tests.
int GetEnumCounter(PeerConnectionEnumCounterType type, int counter) const; int GetEnumCounter(PeerConnectionEnumCounterType type, int counter) const;

View File

@ -205,9 +205,7 @@ class CreateSessionDescriptionObserver : public rtc::RefCountInterface {
// is deprecated; in order to let clients remove the old version, it has a // is deprecated; in order to let clients remove the old version, it has a
// default implementation. If both versions are unimplemented, the // default implementation. If both versions are unimplemented, the
// result will be a runtime error (stack overflow). This is intentional. // result will be a runtime error (stack overflow). This is intentional.
virtual void OnFailure(RTCError error) { virtual void OnFailure(RTCError error) { OnFailure(error.message()); }
OnFailure(error.message());
}
virtual void OnFailure(const std::string& error) { virtual void OnFailure(const std::string& error) {
OnFailure(RTCError(RTCErrorType::INTERNAL_ERROR, std::string(error))); OnFailure(RTCError(RTCErrorType::INTERNAL_ERROR, std::string(error)));
} }

View File

@ -28,7 +28,8 @@ namespace webrtc {
class JsepIceCandidate : public IceCandidateInterface { class JsepIceCandidate : public IceCandidateInterface {
public: public:
JsepIceCandidate(const std::string& sdp_mid, int sdp_mline_index); JsepIceCandidate(const std::string& sdp_mid, int sdp_mline_index);
JsepIceCandidate(const std::string& sdp_mid, int sdp_mline_index, JsepIceCandidate(const std::string& sdp_mid,
int sdp_mline_index,
const cricket::Candidate& candidate); const cricket::Candidate& candidate);
~JsepIceCandidate(); ~JsepIceCandidate();
// |err| may be null. // |err| may be null.
@ -39,9 +40,7 @@ class JsepIceCandidate : public IceCandidateInterface {
virtual std::string sdp_mid() const { return sdp_mid_; } virtual std::string sdp_mid() const { return sdp_mid_; }
virtual int sdp_mline_index() const { return sdp_mline_index_; } virtual int sdp_mline_index() const { return sdp_mline_index_; }
virtual const cricket::Candidate& candidate() const { virtual const cricket::Candidate& candidate() const { return candidate_; }
return candidate_;
}
virtual std::string server_url() const { return candidate_.url(); } virtual std::string server_url() const { return candidate_.url(); }
@ -64,9 +63,7 @@ class JsepCandidateCollection : public IceCandidateCollection {
JsepCandidateCollection(JsepCandidateCollection&& o) JsepCandidateCollection(JsepCandidateCollection&& o)
: candidates_(std::move(o.candidates_)) {} : candidates_(std::move(o.candidates_)) {}
~JsepCandidateCollection(); ~JsepCandidateCollection();
virtual size_t count() const { virtual size_t count() const { return candidates_.size(); }
return candidates_.size();
}
virtual bool HasCandidate(const IceCandidateInterface* candidate) const; virtual bool HasCandidate(const IceCandidateInterface* candidate) const;
// Adds and takes ownership of the JsepIceCandidate. // Adds and takes ownership of the JsepIceCandidate.
// TODO(deadbeef): Make this use an std::unique_ptr<>, so ownership logic is // TODO(deadbeef): Make this use an std::unique_ptr<>, so ownership logic is

View File

@ -41,8 +41,8 @@ class JsepSessionDescription : public SessionDescriptionInterface {
// TODO(deadbeef): Make this use an std::unique_ptr<>, so ownership logic is // TODO(deadbeef): Make this use an std::unique_ptr<>, so ownership logic is
// more clear. // more clear.
bool Initialize(cricket::SessionDescription* description, bool Initialize(cricket::SessionDescription* description,
const std::string& session_id, const std::string& session_id,
const std::string& session_version); const std::string& session_version);
virtual cricket::SessionDescription* description() { virtual cricket::SessionDescription* description() {
return description_.get(); return description_.get();
@ -50,12 +50,8 @@ class JsepSessionDescription : public SessionDescriptionInterface {
virtual const cricket::SessionDescription* description() const { virtual const cricket::SessionDescription* description() const {
return description_.get(); return description_.get();
} }
virtual std::string session_id() const { virtual std::string session_id() const { return session_id_; }
return session_id_; virtual std::string session_version() const { return session_version_; }
}
virtual std::string session_version() const {
return session_version_;
}
virtual SdpType GetType() const { return type_; } virtual SdpType GetType() const { return type_; }
virtual std::string type() const { return SdpTypeToString(type_); } virtual std::string type() const { return SdpTypeToString(type_); }
// Allows changing the type. Used for testing. // Allows changing the type. Used for testing.

View File

@ -89,8 +89,7 @@ const char MediaConstraintsInterface::kMaxFrameRate[] = "maxFrameRate";
const char MediaConstraintsInterface::kMinFrameRate[] = "minFrameRate"; const char MediaConstraintsInterface::kMinFrameRate[] = "minFrameRate";
// Audio constraints. // Audio constraints.
const char MediaConstraintsInterface::kEchoCancellation[] = const char MediaConstraintsInterface::kEchoCancellation[] = "echoCancellation";
"echoCancellation";
const char MediaConstraintsInterface::kGoogEchoCancellation[] = const char MediaConstraintsInterface::kGoogEchoCancellation[] =
"googEchoCancellation"; "googEchoCancellation";
const char MediaConstraintsInterface::kExtendedFilterEchoCancellation[] = const char MediaConstraintsInterface::kExtendedFilterEchoCancellation[] =
@ -107,8 +106,7 @@ const char MediaConstraintsInterface::kExperimentalNoiseSuppression[] =
"googNoiseSuppression2"; "googNoiseSuppression2";
const char MediaConstraintsInterface::kIntelligibilityEnhancer[] = const char MediaConstraintsInterface::kIntelligibilityEnhancer[] =
"intelligibilityEnhancer"; "intelligibilityEnhancer";
const char MediaConstraintsInterface::kHighpassFilter[] = const char MediaConstraintsInterface::kHighpassFilter[] = "googHighpassFilter";
"googHighpassFilter";
const char MediaConstraintsInterface::kTypingNoiseDetection[] = const char MediaConstraintsInterface::kTypingNoiseDetection[] =
"googTypingNoiseDetection"; "googTypingNoiseDetection";
const char MediaConstraintsInterface::kAudioMirroring[] = "googAudioMirroring"; const char MediaConstraintsInterface::kAudioMirroring[] = "googAudioMirroring";
@ -125,11 +123,9 @@ const char MediaConstraintsInterface::kOfferToReceiveVideo[] =
"OfferToReceiveVideo"; "OfferToReceiveVideo";
const char MediaConstraintsInterface::kVoiceActivityDetection[] = const char MediaConstraintsInterface::kVoiceActivityDetection[] =
"VoiceActivityDetection"; "VoiceActivityDetection";
const char MediaConstraintsInterface::kIceRestart[] = const char MediaConstraintsInterface::kIceRestart[] = "IceRestart";
"IceRestart";
// Google specific constraint for BUNDLE enable/disable. // Google specific constraint for BUNDLE enable/disable.
const char MediaConstraintsInterface::kUseRtpMux[] = const char MediaConstraintsInterface::kUseRtpMux[] = "googUseRtpMUX";
"googUseRtpMUX";
// Below constraints should be used during PeerConnection construction. // Below constraints should be used during PeerConnection construction.
const char MediaConstraintsInterface::kEnableDtlsSrtp[] = const char MediaConstraintsInterface::kEnableDtlsSrtp[] =
@ -150,11 +146,11 @@ const char MediaConstraintsInterface::kCpuOveruseDetection[] =
"googCpuOveruseDetection"; "googCpuOveruseDetection";
const char MediaConstraintsInterface::kPayloadPadding[] = "googPayloadPadding"; const char MediaConstraintsInterface::kPayloadPadding[] = "googPayloadPadding";
// Set |value| to the value associated with the first appearance of |key|, or // Set |value| to the value associated with the first appearance of |key|, or
// return false if |key| is not found. // return false if |key| is not found.
bool MediaConstraintsInterface::Constraints::FindFirst( bool MediaConstraintsInterface::Constraints::FindFirst(
const std::string& key, std::string* value) const { const std::string& key,
std::string* value) const {
for (Constraints::const_iterator iter = begin(); iter != end(); ++iter) { for (Constraints::const_iterator iter = begin(); iter != end(); ++iter) {
if (iter->key == key) { if (iter->key == key) {
*value = iter->value; *value = iter->value;
@ -165,7 +161,8 @@ bool MediaConstraintsInterface::Constraints::FindFirst(
} }
bool FindConstraint(const MediaConstraintsInterface* constraints, bool FindConstraint(const MediaConstraintsInterface* constraints,
const std::string& key, bool* value, const std::string& key,
bool* value,
size_t* mandatory_constraints) { size_t* mandatory_constraints) {
return ::FindConstraint<bool>(constraints, key, value, mandatory_constraints); return ::FindConstraint<bool>(constraints, key, value, mandatory_constraints);
} }
@ -192,9 +189,9 @@ void CopyConstraintsIntoRtcConfiguration(
} }
FindConstraint(constraints, MediaConstraintsInterface::kEnableDscp, FindConstraint(constraints, MediaConstraintsInterface::kEnableDscp,
&configuration->media_config.enable_dscp, nullptr); &configuration->media_config.enable_dscp, nullptr);
FindConstraint( FindConstraint(constraints, MediaConstraintsInterface::kCpuOveruseDetection,
constraints, MediaConstraintsInterface::kCpuOveruseDetection, &configuration->media_config.video.enable_cpu_adaptation,
&configuration->media_config.video.enable_cpu_adaptation, nullptr); nullptr);
FindConstraint(constraints, MediaConstraintsInterface::kEnableRtpDataChannels, FindConstraint(constraints, MediaConstraintsInterface::kEnableRtpDataChannels,
&configuration->enable_rtp_data_channel, nullptr); &configuration->enable_rtp_data_channel, nullptr);
// Find Suspend Below Min Bitrate constraint. // Find Suspend Below Min Bitrate constraint.

View File

@ -39,8 +39,7 @@ class MediaConstraintsInterface {
struct Constraint { struct Constraint {
Constraint() {} Constraint() {}
Constraint(const std::string& key, const std::string value) Constraint(const std::string& key, const std::string value)
: key(key), value(value) { : key(key), value(value) {}
}
std::string key; std::string key;
std::string value; std::string value;
}; };
@ -54,12 +53,12 @@ class MediaConstraintsInterface {
// Specified by draft-alvestrand-constraints-resolution-00b // Specified by draft-alvestrand-constraints-resolution-00b
static const char kMinAspectRatio[]; // minAspectRatio static const char kMinAspectRatio[]; // minAspectRatio
static const char kMaxAspectRatio[]; // maxAspectRatio static const char kMaxAspectRatio[]; // maxAspectRatio
static const char kMaxWidth[]; // maxWidth static const char kMaxWidth[]; // maxWidth
static const char kMinWidth[]; // minWidth static const char kMinWidth[]; // minWidth
static const char kMaxHeight[]; // maxHeight static const char kMaxHeight[]; // maxHeight
static const char kMinHeight[]; // minHeight static const char kMinHeight[]; // minHeight
static const char kMaxFrameRate[]; // maxFrameRate static const char kMaxFrameRate[]; // maxFrameRate
static const char kMinFrameRate[]; // minFrameRate static const char kMinFrameRate[]; // minFrameRate
// Constraint keys used by a local audio source. // Constraint keys used by a local audio source.
static const char kEchoCancellation[]; // echoCancellation static const char kEchoCancellation[]; // echoCancellation
@ -68,15 +67,15 @@ class MediaConstraintsInterface {
static const char kGoogEchoCancellation[]; // googEchoCancellation static const char kGoogEchoCancellation[]; // googEchoCancellation
static const char kExtendedFilterEchoCancellation[]; // googEchoCancellation2 static const char kExtendedFilterEchoCancellation[]; // googEchoCancellation2
static const char kDAEchoCancellation[]; // googDAEchoCancellation static const char kDAEchoCancellation[]; // googDAEchoCancellation
static const char kAutoGainControl[]; // googAutoGainControl static const char kAutoGainControl[]; // googAutoGainControl
static const char kExperimentalAutoGainControl[]; // googAutoGainControl2 static const char kExperimentalAutoGainControl[]; // googAutoGainControl2
static const char kNoiseSuppression[]; // googNoiseSuppression static const char kNoiseSuppression[]; // googNoiseSuppression
static const char kExperimentalNoiseSuppression[]; // googNoiseSuppression2 static const char kExperimentalNoiseSuppression[]; // googNoiseSuppression2
static const char kIntelligibilityEnhancer[]; // intelligibilityEnhancer static const char kIntelligibilityEnhancer[]; // intelligibilityEnhancer
static const char kHighpassFilter[]; // googHighpassFilter static const char kHighpassFilter[]; // googHighpassFilter
static const char kTypingNoiseDetection[]; // googTypingNoiseDetection static const char kTypingNoiseDetection[]; // googTypingNoiseDetection
static const char kAudioMirroring[]; // googAudioMirroring static const char kAudioMirroring[]; // googAudioMirroring
static const char static const char
kAudioNetworkAdaptorConfig[]; // goodAudioNetworkAdaptorConfig kAudioNetworkAdaptorConfig[]; // goodAudioNetworkAdaptorConfig
@ -85,15 +84,15 @@ class MediaConstraintsInterface {
// Constraint keys for CreateOffer / CreateAnswer // Constraint keys for CreateOffer / CreateAnswer
// Specified by the W3C PeerConnection spec // Specified by the W3C PeerConnection spec
static const char kOfferToReceiveVideo[]; // OfferToReceiveVideo static const char kOfferToReceiveVideo[]; // OfferToReceiveVideo
static const char kOfferToReceiveAudio[]; // OfferToReceiveAudio static const char kOfferToReceiveAudio[]; // OfferToReceiveAudio
static const char kVoiceActivityDetection[]; // VoiceActivityDetection static const char kVoiceActivityDetection[]; // VoiceActivityDetection
static const char kIceRestart[]; // IceRestart static const char kIceRestart[]; // IceRestart
// These keys are google specific. // These keys are google specific.
static const char kUseRtpMux[]; // googUseRtpMUX static const char kUseRtpMux[]; // googUseRtpMUX
// Constraints values. // Constraints values.
static const char kValueTrue[]; // true static const char kValueTrue[]; // true
static const char kValueFalse[]; // false static const char kValueFalse[]; // false
// PeerConnection constraint keys. // PeerConnection constraint keys.
@ -108,12 +107,12 @@ class MediaConstraintsInterface {
static const char kEnableIPv6[]; // googIPv6 static const char kEnableIPv6[]; // googIPv6
// Temporary constraint to enable suspend below min bitrate feature. // Temporary constraint to enable suspend below min bitrate feature.
static const char kEnableVideoSuspendBelowMinBitrate[]; static const char kEnableVideoSuspendBelowMinBitrate[];
// googSuspendBelowMinBitrate // googSuspendBelowMinBitrate
// Constraint to enable combined audio+video bandwidth estimation. // Constraint to enable combined audio+video bandwidth estimation.
static const char kCombinedAudioVideoBwe[]; // googCombinedAudioVideoBwe static const char kCombinedAudioVideoBwe[]; // googCombinedAudioVideoBwe
static const char kScreencastMinBitrate[]; // googScreencastMinBitrate static const char kScreencastMinBitrate[]; // googScreencastMinBitrate
static const char kCpuOveruseDetection[]; // googCpuOveruseDetection static const char kCpuOveruseDetection[]; // googCpuOveruseDetection
static const char kPayloadPadding[]; // googPayloadPadding static const char kPayloadPadding[]; // googPayloadPadding
// The prefix of internal-only constraints whose JS set values should be // The prefix of internal-only constraints whose JS set values should be
// stripped by Chrome before passed down to Libjingle. // stripped by Chrome before passed down to Libjingle.
@ -126,7 +125,8 @@ class MediaConstraintsInterface {
}; };
bool FindConstraint(const MediaConstraintsInterface* constraints, bool FindConstraint(const MediaConstraintsInterface* constraints,
const std::string& key, bool* value, const std::string& key,
bool* value,
size_t* mandatory_constraints); size_t* mandatory_constraints);
bool FindConstraint(const MediaConstraintsInterface* constraints, bool FindConstraint(const MediaConstraintsInterface* constraints,

View File

@ -60,12 +60,7 @@ class NotifierInterface {
class MediaSourceInterface : public rtc::RefCountInterface, class MediaSourceInterface : public rtc::RefCountInterface,
public NotifierInterface { public NotifierInterface {
public: public:
enum SourceState { enum SourceState { kInitializing, kLive, kEnded, kMuted };
kInitializing,
kLive,
kEnded,
kMuted
};
virtual SourceState state() const = 0; virtual SourceState state() const = 0;
@ -116,9 +111,8 @@ class MediaStreamTrackInterface : public rtc::RefCountInterface,
// on the worker thread via a VideoTrack. A custom implementation of a source // on the worker thread via a VideoTrack. A custom implementation of a source
// can inherit AdaptedVideoTrackSource instead of directly implementing this // can inherit AdaptedVideoTrackSource instead of directly implementing this
// interface. // interface.
class VideoTrackSourceInterface class VideoTrackSourceInterface : public MediaSourceInterface,
: public MediaSourceInterface, public rtc::VideoSourceInterface<VideoFrame> {
public rtc::VideoSourceInterface<VideoFrame> {
public: public:
struct Stats { struct Stats {
// Original size of captured frame, before video adaptation. // Original size of captured frame, before video adaptation.
@ -156,9 +150,8 @@ class VideoTrackSourceInterface
// PeerConnectionFactory::CreateVideoTrack can be used for creating a VideoTrack // PeerConnectionFactory::CreateVideoTrack can be used for creating a VideoTrack
// that ensures thread safety and that all methods are called on the right // that ensures thread safety and that all methods are called on the right
// thread. // thread.
class VideoTrackInterface class VideoTrackInterface : public MediaStreamTrackInterface,
: public MediaStreamTrackInterface, public rtc::VideoSourceInterface<VideoFrame> {
public rtc::VideoSourceInterface<VideoFrame> {
public: public:
// Video track content hint, used to override the source is_screencast // Video track content hint, used to override the source is_screencast
// property. // property.
@ -276,7 +269,7 @@ class AudioTrackInterface : public MediaStreamTrackInterface {
public: public:
// TODO(deadbeef): Figure out if the following interface should be const or // TODO(deadbeef): Figure out if the following interface should be const or
// not. // not.
virtual AudioSourceInterface* GetSource() const = 0; virtual AudioSourceInterface* GetSource() const = 0;
// Add/Remove a sink that will receive the audio data from the track. // Add/Remove a sink that will receive the audio data from the track.
virtual void AddSink(AudioTrackSinkInterface* sink) = 0; virtual void AddSink(AudioTrackSinkInterface* sink) = 0;
@ -297,10 +290,8 @@ class AudioTrackInterface : public MediaStreamTrackInterface {
~AudioTrackInterface() override = default; ~AudioTrackInterface() override = default;
}; };
typedef std::vector<rtc::scoped_refptr<AudioTrackInterface> > typedef std::vector<rtc::scoped_refptr<AudioTrackInterface> > AudioTrackVector;
AudioTrackVector; typedef std::vector<rtc::scoped_refptr<VideoTrackInterface> > VideoTrackVector;
typedef std::vector<rtc::scoped_refptr<VideoTrackInterface> >
VideoTrackVector;
// C++ version of https://www.w3.org/TR/mediacapture-streams/#mediastream. // C++ version of https://www.w3.org/TR/mediacapture-streams/#mediastream.
// //
@ -317,10 +308,10 @@ class MediaStreamInterface : public rtc::RefCountInterface,
virtual AudioTrackVector GetAudioTracks() = 0; virtual AudioTrackVector GetAudioTracks() = 0;
virtual VideoTrackVector GetVideoTracks() = 0; virtual VideoTrackVector GetVideoTracks() = 0;
virtual rtc::scoped_refptr<AudioTrackInterface> virtual rtc::scoped_refptr<AudioTrackInterface> FindAudioTrack(
FindAudioTrack(const std::string& track_id) = 0; const std::string& track_id) = 0;
virtual rtc::scoped_refptr<VideoTrackInterface> virtual rtc::scoped_refptr<VideoTrackInterface> FindVideoTrack(
FindVideoTrack(const std::string& track_id) = 0; const std::string& track_id) = 0;
virtual bool AddTrack(AudioTrackInterface* track) = 0; virtual bool AddTrack(AudioTrackInterface* track) = 0;
virtual bool AddTrack(VideoTrackInterface* track) = 0; virtual bool AddTrack(VideoTrackInterface* track) = 0;

View File

@ -21,22 +21,22 @@ namespace webrtc {
// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods // TODO(deadbeef): Move this to .cc file and out of api/. What threads methods
// are called on is an implementation detail. // are called on is an implementation detail.
BEGIN_SIGNALING_PROXY_MAP(MediaStream) BEGIN_SIGNALING_PROXY_MAP(MediaStream)
PROXY_SIGNALING_THREAD_DESTRUCTOR() PROXY_SIGNALING_THREAD_DESTRUCTOR()
PROXY_CONSTMETHOD0(std::string, id) PROXY_CONSTMETHOD0(std::string, id)
PROXY_METHOD0(AudioTrackVector, GetAudioTracks) PROXY_METHOD0(AudioTrackVector, GetAudioTracks)
PROXY_METHOD0(VideoTrackVector, GetVideoTracks) PROXY_METHOD0(VideoTrackVector, GetVideoTracks)
PROXY_METHOD1(rtc::scoped_refptr<AudioTrackInterface>, PROXY_METHOD1(rtc::scoped_refptr<AudioTrackInterface>,
FindAudioTrack, FindAudioTrack,
const std::string&) const std::string&)
PROXY_METHOD1(rtc::scoped_refptr<VideoTrackInterface>, PROXY_METHOD1(rtc::scoped_refptr<VideoTrackInterface>,
FindVideoTrack, FindVideoTrack,
const std::string&) const std::string&)
PROXY_METHOD1(bool, AddTrack, AudioTrackInterface*) PROXY_METHOD1(bool, AddTrack, AudioTrackInterface*)
PROXY_METHOD1(bool, AddTrack, VideoTrackInterface*) PROXY_METHOD1(bool, AddTrack, VideoTrackInterface*)
PROXY_METHOD1(bool, RemoveTrack, AudioTrackInterface*) PROXY_METHOD1(bool, RemoveTrack, AudioTrackInterface*)
PROXY_METHOD1(bool, RemoveTrack, VideoTrackInterface*) PROXY_METHOD1(bool, RemoveTrack, VideoTrackInterface*)
PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
END_PROXY_MAP() END_PROXY_MAP()
} // namespace webrtc } // namespace webrtc

View File

@ -25,39 +25,39 @@ namespace webrtc {
// are called on is an implementation detail. // are called on is an implementation detail.
BEGIN_SIGNALING_PROXY_MAP(AudioTrack) BEGIN_SIGNALING_PROXY_MAP(AudioTrack)
PROXY_SIGNALING_THREAD_DESTRUCTOR() PROXY_SIGNALING_THREAD_DESTRUCTOR()
PROXY_CONSTMETHOD0(std::string, kind) PROXY_CONSTMETHOD0(std::string, kind)
PROXY_CONSTMETHOD0(std::string, id) PROXY_CONSTMETHOD0(std::string, id)
PROXY_CONSTMETHOD0(TrackState, state) PROXY_CONSTMETHOD0(TrackState, state)
PROXY_CONSTMETHOD0(bool, enabled) PROXY_CONSTMETHOD0(bool, enabled)
PROXY_CONSTMETHOD0(AudioSourceInterface*, GetSource) PROXY_CONSTMETHOD0(AudioSourceInterface*, GetSource)
PROXY_METHOD1(void, AddSink, AudioTrackSinkInterface*) PROXY_METHOD1(void, AddSink, AudioTrackSinkInterface*)
PROXY_METHOD1(void, RemoveSink, AudioTrackSinkInterface*) PROXY_METHOD1(void, RemoveSink, AudioTrackSinkInterface*)
PROXY_METHOD1(bool, GetSignalLevel, int*) PROXY_METHOD1(bool, GetSignalLevel, int*)
PROXY_METHOD0(rtc::scoped_refptr<AudioProcessorInterface>, GetAudioProcessor) PROXY_METHOD0(rtc::scoped_refptr<AudioProcessorInterface>, GetAudioProcessor)
PROXY_METHOD1(bool, set_enabled, bool) PROXY_METHOD1(bool, set_enabled, bool)
PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
END_PROXY_MAP() END_PROXY_MAP()
BEGIN_PROXY_MAP(VideoTrack) BEGIN_PROXY_MAP(VideoTrack)
PROXY_SIGNALING_THREAD_DESTRUCTOR() PROXY_SIGNALING_THREAD_DESTRUCTOR()
PROXY_CONSTMETHOD0(std::string, kind) PROXY_CONSTMETHOD0(std::string, kind)
PROXY_CONSTMETHOD0(std::string, id) PROXY_CONSTMETHOD0(std::string, id)
PROXY_CONSTMETHOD0(TrackState, state) PROXY_CONSTMETHOD0(TrackState, state)
PROXY_CONSTMETHOD0(bool, enabled) PROXY_CONSTMETHOD0(bool, enabled)
PROXY_METHOD1(bool, set_enabled, bool) PROXY_METHOD1(bool, set_enabled, bool)
PROXY_CONSTMETHOD0(ContentHint, content_hint) PROXY_CONSTMETHOD0(ContentHint, content_hint)
PROXY_METHOD1(void, set_content_hint, ContentHint) PROXY_METHOD1(void, set_content_hint, ContentHint)
PROXY_WORKER_METHOD2(void, PROXY_WORKER_METHOD2(void,
AddOrUpdateSink, AddOrUpdateSink,
rtc::VideoSinkInterface<VideoFrame>*, rtc::VideoSinkInterface<VideoFrame>*,
const rtc::VideoSinkWants&) const rtc::VideoSinkWants&)
PROXY_WORKER_METHOD1(void, RemoveSink, rtc::VideoSinkInterface<VideoFrame>*) PROXY_WORKER_METHOD1(void, RemoveSink, rtc::VideoSinkInterface<VideoFrame>*)
PROXY_CONSTMETHOD0(VideoTrackSourceInterface*, GetSource) PROXY_CONSTMETHOD0(VideoTrackSourceInterface*, GetSource)
PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
END_PROXY_MAP() END_PROXY_MAP()
} // namespace webrtc } // namespace webrtc

View File

@ -15,11 +15,7 @@
namespace cricket { namespace cricket {
enum MediaType { enum MediaType { MEDIA_TYPE_AUDIO, MEDIA_TYPE_VIDEO, MEDIA_TYPE_DATA };
MEDIA_TYPE_AUDIO,
MEDIA_TYPE_VIDEO,
MEDIA_TYPE_DATA
};
std::string MediaTypeToString(MediaType type); std::string MediaTypeToString(MediaType type);
// Aborts on invalid string. Only expected to be used on strings that are // Aborts on invalid string. Only expected to be used on strings that are

View File

@ -23,8 +23,7 @@ namespace webrtc {
template <class T> template <class T>
class Notifier : public T { class Notifier : public T {
public: public:
Notifier() { Notifier() {}
}
virtual void RegisterObserver(ObserverInterface* observer) { virtual void RegisterObserver(ObserverInterface* observer) {
RTC_DCHECK(observer != nullptr); RTC_DCHECK(observer != nullptr);

View File

@ -20,4 +20,4 @@ TEST_F(SessionDescriptionTest, CreateSessionDescription) {
EXPECT_EQ(-1, s.session_id()); EXPECT_EQ(-1, s.session_id());
EXPECT_EQ("0", s.session_version()); EXPECT_EQ("0", s.session_version());
} }
} } // namespace webrtc

View File

@ -11,9 +11,9 @@
#ifndef API_ORTC_SRTPTRANSPORTINTERFACE_H_ #ifndef API_ORTC_SRTPTRANSPORTINTERFACE_H_
#define API_ORTC_SRTPTRANSPORTINTERFACE_H_ #define API_ORTC_SRTPTRANSPORTINTERFACE_H_
#include "api/cryptoparams.h"
#include "api/ortc/rtptransportinterface.h" #include "api/ortc/rtptransportinterface.h"
#include "api/rtcerror.h" #include "api/rtcerror.h"
#include "api/cryptoparams.h"
namespace webrtc { namespace webrtc {

View File

@ -24,50 +24,53 @@ namespace webrtc {
// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods // TODO(deadbeef): Move this to .cc file and out of api/. What threads methods
// are called on is an implementation detail. // are called on is an implementation detail.
BEGIN_SIGNALING_PROXY_MAP(PeerConnectionFactory) BEGIN_SIGNALING_PROXY_MAP(PeerConnectionFactory)
PROXY_SIGNALING_THREAD_DESTRUCTOR() PROXY_SIGNALING_THREAD_DESTRUCTOR()
// Use the overloads of CreateVideoSource that take raw VideoCapturer // Use the overloads of CreateVideoSource that take raw VideoCapturer
// pointers from PeerConnectionFactoryInterface. // pointers from PeerConnectionFactoryInterface.
// TODO(deadbeef): Remove this using statement once those overloads are // TODO(deadbeef): Remove this using statement once those overloads are
// removed. // removed.
using PeerConnectionFactoryInterface::CreateVideoSource; using PeerConnectionFactoryInterface::CreateVideoSource;
PROXY_METHOD1(void, SetOptions, const Options&) PROXY_METHOD1(void, SetOptions, const Options&)
PROXY_METHOD5(rtc::scoped_refptr<PeerConnectionInterface>, PROXY_METHOD5(rtc::scoped_refptr<PeerConnectionInterface>,
CreatePeerConnection, CreatePeerConnection,
const PeerConnectionInterface::RTCConfiguration&, const PeerConnectionInterface::RTCConfiguration&,
const MediaConstraintsInterface*, const MediaConstraintsInterface*,
std::unique_ptr<cricket::PortAllocator>, std::unique_ptr<cricket::PortAllocator>,
std::unique_ptr<rtc::RTCCertificateGeneratorInterface>, std::unique_ptr<rtc::RTCCertificateGeneratorInterface>,
PeerConnectionObserver*); PeerConnectionObserver*);
PROXY_METHOD4(rtc::scoped_refptr<PeerConnectionInterface>, PROXY_METHOD4(rtc::scoped_refptr<PeerConnectionInterface>,
CreatePeerConnection, CreatePeerConnection,
const PeerConnectionInterface::RTCConfiguration&, const PeerConnectionInterface::RTCConfiguration&,
std::unique_ptr<cricket::PortAllocator>, std::unique_ptr<cricket::PortAllocator>,
std::unique_ptr<rtc::RTCCertificateGeneratorInterface>, std::unique_ptr<rtc::RTCCertificateGeneratorInterface>,
PeerConnectionObserver*); PeerConnectionObserver*);
PROXY_METHOD2(rtc::scoped_refptr<PeerConnectionInterface>, PROXY_METHOD2(rtc::scoped_refptr<PeerConnectionInterface>,
CreatePeerConnection, CreatePeerConnection,
const PeerConnectionInterface::RTCConfiguration&, const PeerConnectionInterface::RTCConfiguration&,
PeerConnectionDependencies); PeerConnectionDependencies);
PROXY_METHOD1(rtc::scoped_refptr<MediaStreamInterface>, PROXY_METHOD1(rtc::scoped_refptr<MediaStreamInterface>,
CreateLocalMediaStream, const std::string&) CreateLocalMediaStream,
PROXY_METHOD1(rtc::scoped_refptr<AudioSourceInterface>, const std::string&)
CreateAudioSource, PROXY_METHOD1(rtc::scoped_refptr<AudioSourceInterface>,
const cricket::AudioOptions&) CreateAudioSource,
PROXY_METHOD2(rtc::scoped_refptr<VideoTrackSourceInterface>, const cricket::AudioOptions&)
CreateVideoSource, PROXY_METHOD2(rtc::scoped_refptr<VideoTrackSourceInterface>,
std::unique_ptr<cricket::VideoCapturer>, CreateVideoSource,
const MediaConstraintsInterface*) std::unique_ptr<cricket::VideoCapturer>,
PROXY_METHOD1(rtc::scoped_refptr<VideoTrackSourceInterface>, const MediaConstraintsInterface*)
CreateVideoSource, PROXY_METHOD1(rtc::scoped_refptr<VideoTrackSourceInterface>,
std::unique_ptr<cricket::VideoCapturer>) CreateVideoSource,
PROXY_METHOD2(rtc::scoped_refptr<VideoTrackInterface>, std::unique_ptr<cricket::VideoCapturer>)
CreateVideoTrack, PROXY_METHOD2(rtc::scoped_refptr<VideoTrackInterface>,
const std::string&, CreateVideoTrack,
VideoTrackSourceInterface*) const std::string&,
PROXY_METHOD2(rtc::scoped_refptr<AudioTrackInterface>, VideoTrackSourceInterface*)
CreateAudioTrack, const std::string&, AudioSourceInterface*) PROXY_METHOD2(rtc::scoped_refptr<AudioTrackInterface>,
PROXY_METHOD2(bool, StartAecDump, rtc::PlatformFile, int64_t) CreateAudioTrack,
PROXY_METHOD0(void, StopAecDump) const std::string&,
AudioSourceInterface*)
PROXY_METHOD2(bool, StartAecDump, rtc::PlatformFile, int64_t)
PROXY_METHOD0(void, StopAecDump)
END_PROXY_MAP() END_PROXY_MAP()
} // namespace webrtc } // namespace webrtc

View File

@ -116,13 +116,13 @@
namespace rtc { namespace rtc {
class SSLIdentity; class SSLIdentity;
class Thread; class Thread;
} } // namespace rtc
namespace cricket { namespace cricket {
class MediaEngineInterface; class MediaEngineInterface;
class WebRtcVideoDecoderFactory; class WebRtcVideoDecoderFactory;
class WebRtcVideoEncoderFactory; class WebRtcVideoEncoderFactory;
} } // namespace cricket
namespace webrtc { namespace webrtc {
class AudioDeviceModule; class AudioDeviceModule;
@ -139,10 +139,8 @@ class StreamCollectionInterface : public rtc::RefCountInterface {
virtual size_t count() = 0; virtual size_t count() = 0;
virtual MediaStreamInterface* at(size_t index) = 0; virtual MediaStreamInterface* at(size_t index) = 0;
virtual MediaStreamInterface* find(const std::string& label) = 0; virtual MediaStreamInterface* find(const std::string& label) = 0;
virtual MediaStreamTrackInterface* FindAudioTrack( virtual MediaStreamTrackInterface* FindAudioTrack(const std::string& id) = 0;
const std::string& id) = 0; virtual MediaStreamTrackInterface* FindVideoTrack(const std::string& id) = 0;
virtual MediaStreamTrackInterface* FindVideoTrack(
const std::string& id) = 0;
protected: protected:
// Dtor protected as objects shouldn't be deleted via this interface. // Dtor protected as objects shouldn't be deleted via this interface.
@ -262,10 +260,7 @@ class PeerConnectionInterface : public rtc::RefCountInterface {
kCandidateNetworkPolicyLowCost kCandidateNetworkPolicyLowCost
}; };
enum ContinualGatheringPolicy { enum ContinualGatheringPolicy { GATHER_ONCE, GATHER_CONTINUALLY };
GATHER_ONCE,
GATHER_CONTINUALLY
};
enum class RTCConfigurationType { enum class RTCConfigurationType {
// A configuration that is safer to use, despite not having the best // A configuration that is safer to use, despite not having the best
@ -634,14 +629,12 @@ class PeerConnectionInterface : public rtc::RefCountInterface {
// Accessor methods to active local streams. // Accessor methods to active local streams.
// This method is not supported with kUnifiedPlan semantics. Please use // This method is not supported with kUnifiedPlan semantics. Please use
// GetSenders() instead. // GetSenders() instead.
virtual rtc::scoped_refptr<StreamCollectionInterface> virtual rtc::scoped_refptr<StreamCollectionInterface> local_streams() = 0;
local_streams() = 0;
// Accessor methods to remote streams. // Accessor methods to remote streams.
// This method is not supported with kUnifiedPlan semantics. Please use // This method is not supported with kUnifiedPlan semantics. Please use
// GetReceivers() instead. // GetReceivers() instead.
virtual rtc::scoped_refptr<StreamCollectionInterface> virtual rtc::scoped_refptr<StreamCollectionInterface> remote_streams() = 0;
remote_streams() = 0;
// Add a new MediaStream to be sent on this PeerConnection. // Add a new MediaStream to be sent on this PeerConnection.
// Note that a SessionDescription negotiation is needed before the // Note that a SessionDescription negotiation is needed before the
@ -1374,9 +1367,9 @@ class PeerConnectionFactoryInterface : public rtc::RefCountInterface {
VideoTrackSourceInterface* source) = 0; VideoTrackSourceInterface* source) = 0;
// Creates an new AudioTrack. At the moment |source| can be null. // Creates an new AudioTrack. At the moment |source| can be null.
virtual rtc::scoped_refptr<AudioTrackInterface> virtual rtc::scoped_refptr<AudioTrackInterface> CreateAudioTrack(
CreateAudioTrack(const std::string& label, const std::string& label,
AudioSourceInterface* source) = 0; AudioSourceInterface* source) = 0;
// Starts AEC dump using existing file. Takes ownership of |file| and passes // Starts AEC dump using existing file. Takes ownership of |file| and passes
// it on to VoiceEngine (via other objects) immediately, which will take // it on to VoiceEngine (via other objects) immediately, which will take
@ -1394,7 +1387,7 @@ class PeerConnectionFactoryInterface : public rtc::RefCountInterface {
// Dtor and ctor protected as objects shouldn't be created or deleted via // Dtor and ctor protected as objects shouldn't be created or deleted via
// this interface. // this interface.
PeerConnectionFactoryInterface() {} PeerConnectionFactoryInterface() {}
~PeerConnectionFactoryInterface() {} // NOLINT ~PeerConnectionFactoryInterface() {} // NOLINT
}; };
// Create a new instance of PeerConnectionFactoryInterface. // Create a new instance of PeerConnectionFactoryInterface.

View File

@ -23,132 +23,132 @@ namespace webrtc {
// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods // TODO(deadbeef): Move this to .cc file and out of api/. What threads methods
// are called on is an implementation detail. // are called on is an implementation detail.
BEGIN_SIGNALING_PROXY_MAP(PeerConnection) BEGIN_SIGNALING_PROXY_MAP(PeerConnection)
PROXY_SIGNALING_THREAD_DESTRUCTOR() PROXY_SIGNALING_THREAD_DESTRUCTOR()
PROXY_METHOD0(rtc::scoped_refptr<StreamCollectionInterface>, local_streams) PROXY_METHOD0(rtc::scoped_refptr<StreamCollectionInterface>, local_streams)
PROXY_METHOD0(rtc::scoped_refptr<StreamCollectionInterface>, remote_streams) PROXY_METHOD0(rtc::scoped_refptr<StreamCollectionInterface>, remote_streams)
PROXY_METHOD1(bool, AddStream, MediaStreamInterface*) PROXY_METHOD1(bool, AddStream, MediaStreamInterface*)
PROXY_METHOD1(void, RemoveStream, MediaStreamInterface*) PROXY_METHOD1(void, RemoveStream, MediaStreamInterface*)
PROXY_METHOD2(RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>>, PROXY_METHOD2(RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>>,
AddTrack, AddTrack,
rtc::scoped_refptr<MediaStreamTrackInterface>, rtc::scoped_refptr<MediaStreamTrackInterface>,
const std::vector<std::string>&); const std::vector<std::string>&);
PROXY_METHOD2(rtc::scoped_refptr<RtpSenderInterface>, PROXY_METHOD2(rtc::scoped_refptr<RtpSenderInterface>,
AddTrack, AddTrack,
MediaStreamTrackInterface*, MediaStreamTrackInterface*,
std::vector<MediaStreamInterface*>) std::vector<MediaStreamInterface*>)
PROXY_METHOD1(bool, RemoveTrack, RtpSenderInterface*) PROXY_METHOD1(bool, RemoveTrack, RtpSenderInterface*)
PROXY_METHOD1(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>, PROXY_METHOD1(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
AddTransceiver, AddTransceiver,
rtc::scoped_refptr<MediaStreamTrackInterface>) rtc::scoped_refptr<MediaStreamTrackInterface>)
PROXY_METHOD2(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>, PROXY_METHOD2(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
AddTransceiver, AddTransceiver,
rtc::scoped_refptr<MediaStreamTrackInterface>, rtc::scoped_refptr<MediaStreamTrackInterface>,
const RtpTransceiverInit&) const RtpTransceiverInit&)
PROXY_METHOD1(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>, PROXY_METHOD1(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
AddTransceiver, AddTransceiver,
cricket::MediaType) cricket::MediaType)
PROXY_METHOD2(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>, PROXY_METHOD2(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
AddTransceiver, AddTransceiver,
cricket::MediaType, cricket::MediaType,
const RtpTransceiverInit&) const RtpTransceiverInit&)
PROXY_METHOD1(rtc::scoped_refptr<DtmfSenderInterface>, PROXY_METHOD1(rtc::scoped_refptr<DtmfSenderInterface>,
CreateDtmfSender, CreateDtmfSender,
AudioTrackInterface*) AudioTrackInterface*)
PROXY_METHOD2(rtc::scoped_refptr<RtpSenderInterface>, PROXY_METHOD2(rtc::scoped_refptr<RtpSenderInterface>,
CreateSender, CreateSender,
const std::string&, const std::string&,
const std::string&) const std::string&)
PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpSenderInterface>>, PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpSenderInterface>>,
GetSenders) GetSenders)
PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpReceiverInterface>>, PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpReceiverInterface>>,
GetReceivers) GetReceivers)
PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpTransceiverInterface>>, PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpTransceiverInterface>>,
GetTransceivers) GetTransceivers)
PROXY_METHOD3(bool, PROXY_METHOD3(bool,
GetStats, GetStats,
StatsObserver*, StatsObserver*,
MediaStreamTrackInterface*, MediaStreamTrackInterface*,
StatsOutputLevel) StatsOutputLevel)
PROXY_METHOD1(void, GetStats, RTCStatsCollectorCallback*) PROXY_METHOD1(void, GetStats, RTCStatsCollectorCallback*)
PROXY_METHOD2(void, PROXY_METHOD2(void,
GetStats, GetStats,
rtc::scoped_refptr<RtpSenderInterface>, rtc::scoped_refptr<RtpSenderInterface>,
rtc::scoped_refptr<RTCStatsCollectorCallback>); rtc::scoped_refptr<RTCStatsCollectorCallback>);
PROXY_METHOD2(void, PROXY_METHOD2(void,
GetStats, GetStats,
rtc::scoped_refptr<RtpReceiverInterface>, rtc::scoped_refptr<RtpReceiverInterface>,
rtc::scoped_refptr<RTCStatsCollectorCallback>); rtc::scoped_refptr<RTCStatsCollectorCallback>);
PROXY_METHOD2(rtc::scoped_refptr<DataChannelInterface>, PROXY_METHOD2(rtc::scoped_refptr<DataChannelInterface>,
CreateDataChannel, CreateDataChannel,
const std::string&, const std::string&,
const DataChannelInit*) const DataChannelInit*)
PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, local_description) PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, local_description)
PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, remote_description) PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, remote_description)
PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, PROXY_CONSTMETHOD0(const SessionDescriptionInterface*,
pending_local_description) pending_local_description)
PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, PROXY_CONSTMETHOD0(const SessionDescriptionInterface*,
pending_remote_description) pending_remote_description)
PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, PROXY_CONSTMETHOD0(const SessionDescriptionInterface*,
current_local_description) current_local_description)
PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, PROXY_CONSTMETHOD0(const SessionDescriptionInterface*,
current_remote_description) current_remote_description)
PROXY_METHOD2(void, PROXY_METHOD2(void,
CreateOffer, CreateOffer,
CreateSessionDescriptionObserver*, CreateSessionDescriptionObserver*,
const MediaConstraintsInterface*) const MediaConstraintsInterface*)
PROXY_METHOD2(void, PROXY_METHOD2(void,
CreateAnswer, CreateAnswer,
CreateSessionDescriptionObserver*, CreateSessionDescriptionObserver*,
const MediaConstraintsInterface*) const MediaConstraintsInterface*)
PROXY_METHOD2(void, PROXY_METHOD2(void,
CreateOffer, CreateOffer,
CreateSessionDescriptionObserver*, CreateSessionDescriptionObserver*,
const RTCOfferAnswerOptions&) const RTCOfferAnswerOptions&)
PROXY_METHOD2(void, PROXY_METHOD2(void,
CreateAnswer, CreateAnswer,
CreateSessionDescriptionObserver*, CreateSessionDescriptionObserver*,
const RTCOfferAnswerOptions&) const RTCOfferAnswerOptions&)
PROXY_METHOD2(void, PROXY_METHOD2(void,
SetLocalDescription, SetLocalDescription,
SetSessionDescriptionObserver*, SetSessionDescriptionObserver*,
SessionDescriptionInterface*) SessionDescriptionInterface*)
PROXY_METHOD2(void, PROXY_METHOD2(void,
SetRemoteDescription, SetRemoteDescription,
SetSessionDescriptionObserver*, SetSessionDescriptionObserver*,
SessionDescriptionInterface*) SessionDescriptionInterface*)
PROXY_METHOD2(void, PROXY_METHOD2(void,
SetRemoteDescription, SetRemoteDescription,
std::unique_ptr<SessionDescriptionInterface>, std::unique_ptr<SessionDescriptionInterface>,
rtc::scoped_refptr<SetRemoteDescriptionObserverInterface>); rtc::scoped_refptr<SetRemoteDescriptionObserverInterface>);
PROXY_METHOD0(PeerConnectionInterface::RTCConfiguration, GetConfiguration); PROXY_METHOD0(PeerConnectionInterface::RTCConfiguration, GetConfiguration);
PROXY_METHOD2(bool, PROXY_METHOD2(bool,
SetConfiguration, SetConfiguration,
const PeerConnectionInterface::RTCConfiguration&, const PeerConnectionInterface::RTCConfiguration&,
RTCError*); RTCError*);
PROXY_METHOD1(bool, PROXY_METHOD1(bool,
SetConfiguration, SetConfiguration,
const PeerConnectionInterface::RTCConfiguration&); const PeerConnectionInterface::RTCConfiguration&);
PROXY_METHOD1(bool, AddIceCandidate, const IceCandidateInterface*) PROXY_METHOD1(bool, AddIceCandidate, const IceCandidateInterface*)
PROXY_METHOD1(bool, PROXY_METHOD1(bool,
RemoveIceCandidates, RemoveIceCandidates,
const std::vector<cricket::Candidate>&); const std::vector<cricket::Candidate>&);
PROXY_METHOD1(void, SetAudioPlayout, bool) PROXY_METHOD1(void, SetAudioPlayout, bool)
PROXY_METHOD1(void, SetAudioRecording, bool) PROXY_METHOD1(void, SetAudioRecording, bool)
PROXY_METHOD1(void, RegisterUMAObserver, UMAObserver*) PROXY_METHOD1(void, RegisterUMAObserver, UMAObserver*)
PROXY_METHOD1(RTCError, SetBitrate, const BitrateSettings&); PROXY_METHOD1(RTCError, SetBitrate, const BitrateSettings&);
PROXY_METHOD1(void, PROXY_METHOD1(void,
SetBitrateAllocationStrategy, SetBitrateAllocationStrategy,
std::unique_ptr<rtc::BitrateAllocationStrategy>); std::unique_ptr<rtc::BitrateAllocationStrategy>);
PROXY_METHOD0(SignalingState, signaling_state) PROXY_METHOD0(SignalingState, signaling_state)
PROXY_METHOD0(IceConnectionState, ice_connection_state) PROXY_METHOD0(IceConnectionState, ice_connection_state)
PROXY_METHOD0(IceGatheringState, ice_gathering_state) PROXY_METHOD0(IceGatheringState, ice_gathering_state)
PROXY_METHOD2(bool, StartRtcEventLog, rtc::PlatformFile, int64_t) PROXY_METHOD2(bool, StartRtcEventLog, rtc::PlatformFile, int64_t)
PROXY_METHOD2(bool, PROXY_METHOD2(bool,
StartRtcEventLog, StartRtcEventLog,
std::unique_ptr<RtcEventLogOutput>, std::unique_ptr<RtcEventLogOutput>,
int64_t); int64_t);
PROXY_METHOD0(void, StopRtcEventLog) PROXY_METHOD0(void, StopRtcEventLog)
PROXY_METHOD0(void, Close) PROXY_METHOD0(void, Close)
END_PROXY_MAP() END_PROXY_MAP()
} // namespace webrtc } // namespace webrtc

View File

@ -64,8 +64,10 @@ namespace webrtc {
template <typename R> template <typename R>
class ReturnType { class ReturnType {
public: public:
template<typename C, typename M> template <typename C, typename M>
void Invoke(C* c, M m) { r_ = (c->*m)(); } void Invoke(C* c, M m) {
r_ = (c->*m)();
}
template <typename C, typename M, typename T1> template <typename C, typename M, typename T1>
void Invoke(C* c, M m, T1 a1) { void Invoke(C* c, M m, T1 a1) {
r_ = (c->*m)(std::move(a1)); r_ = (c->*m)(std::move(a1));
@ -78,13 +80,22 @@ class ReturnType {
void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3) { void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3) {
r_ = (c->*m)(std::move(a1), std::move(a2), std::move(a3)); r_ = (c->*m)(std::move(a1), std::move(a2), std::move(a3));
} }
template<typename C, typename M, typename T1, typename T2, typename T3, template <typename C,
typename T4> typename M,
typename T1,
typename T2,
typename T3,
typename T4>
void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3, T4 a4) { void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3, T4 a4) {
r_ = (c->*m)(std::move(a1), std::move(a2), std::move(a3), std::move(a4)); r_ = (c->*m)(std::move(a1), std::move(a2), std::move(a3), std::move(a4));
} }
template<typename C, typename M, typename T1, typename T2, typename T3, template <typename C,
typename T4, typename T5> typename M,
typename T1,
typename T2,
typename T3,
typename T4,
typename T5>
void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3, T4 a4, T5 a5) { void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3, T4 a4, T5 a5) {
r_ = (c->*m)(std::move(a1), std::move(a2), std::move(a3), std::move(a4), r_ = (c->*m)(std::move(a1), std::move(a2), std::move(a3), std::move(a4),
std::move(a5)); std::move(a5));
@ -99,8 +110,10 @@ class ReturnType {
template <> template <>
class ReturnType<void> { class ReturnType<void> {
public: public:
template<typename C, typename M> template <typename C, typename M>
void Invoke(C* c, M m) { (c->*m)(); } void Invoke(C* c, M m) {
(c->*m)();
}
template <typename C, typename M, typename T1> template <typename C, typename M, typename T1>
void Invoke(C* c, M m, T1 a1) { void Invoke(C* c, M m, T1 a1) {
(c->*m)(std::move(a1)); (c->*m)(std::move(a1));
@ -119,9 +132,8 @@ class ReturnType<void> {
namespace internal { namespace internal {
class SynchronousMethodCall class SynchronousMethodCall : public rtc::MessageData,
: public rtc::MessageData, public rtc::MessageHandler {
public rtc::MessageHandler {
public: public:
explicit SynchronousMethodCall(rtc::MessageHandler* proxy); explicit SynchronousMethodCall(rtc::MessageHandler* proxy);
~SynchronousMethodCall() override; ~SynchronousMethodCall() override;
@ -138,8 +150,7 @@ class SynchronousMethodCall
} // namespace internal } // namespace internal
template <typename C, typename R> template <typename C, typename R>
class MethodCall0 : public rtc::Message, class MethodCall0 : public rtc::Message, public rtc::MessageHandler {
public rtc::MessageHandler {
public: public:
typedef R (C::*Method)(); typedef R (C::*Method)();
MethodCall0(C* c, Method m) : c_(c), m_(m) {} MethodCall0(C* c, Method m) : c_(c), m_(m) {}
@ -150,7 +161,7 @@ class MethodCall0 : public rtc::Message,
} }
private: private:
void OnMessage(rtc::Message*) { r_.Invoke(c_, m_); } void OnMessage(rtc::Message*) { r_.Invoke(c_, m_); }
C* c_; C* c_;
Method m_; Method m_;
@ -158,8 +169,7 @@ class MethodCall0 : public rtc::Message,
}; };
template <typename C, typename R> template <typename C, typename R>
class ConstMethodCall0 : public rtc::Message, class ConstMethodCall0 : public rtc::Message, public rtc::MessageHandler {
public rtc::MessageHandler {
public: public:
typedef R (C::*Method)() const; typedef R (C::*Method)() const;
ConstMethodCall0(C* c, Method m) : c_(c), m_(m) {} ConstMethodCall0(C* c, Method m) : c_(c), m_(m) {}
@ -177,9 +187,8 @@ class ConstMethodCall0 : public rtc::Message,
ReturnType<R> r_; ReturnType<R> r_;
}; };
template <typename C, typename R, typename T1> template <typename C, typename R, typename T1>
class MethodCall1 : public rtc::Message, class MethodCall1 : public rtc::Message, public rtc::MessageHandler {
public rtc::MessageHandler {
public: public:
typedef R (C::*Method)(T1 a1); typedef R (C::*Method)(T1 a1);
MethodCall1(C* c, Method m, T1 a1) : c_(c), m_(m), a1_(std::move(a1)) {} MethodCall1(C* c, Method m, T1 a1) : c_(c), m_(m), a1_(std::move(a1)) {}
@ -198,9 +207,8 @@ class MethodCall1 : public rtc::Message,
T1 a1_; T1 a1_;
}; };
template <typename C, typename R, typename T1> template <typename C, typename R, typename T1>
class ConstMethodCall1 : public rtc::Message, class ConstMethodCall1 : public rtc::Message, public rtc::MessageHandler {
public rtc::MessageHandler {
public: public:
typedef R (C::*Method)(T1 a1) const; typedef R (C::*Method)(T1 a1) const;
ConstMethodCall1(C* c, Method m, T1 a1) : c_(c), m_(m), a1_(std::move(a1)) {} ConstMethodCall1(C* c, Method m, T1 a1) : c_(c), m_(m), a1_(std::move(a1)) {}
@ -220,8 +228,7 @@ class ConstMethodCall1 : public rtc::Message,
}; };
template <typename C, typename R, typename T1, typename T2> template <typename C, typename R, typename T1, typename T2>
class MethodCall2 : public rtc::Message, class MethodCall2 : public rtc::Message, public rtc::MessageHandler {
public rtc::MessageHandler {
public: public:
typedef R (C::*Method)(T1 a1, T2 a2); typedef R (C::*Method)(T1 a1, T2 a2);
MethodCall2(C* c, Method m, T1 a1, T2 a2) MethodCall2(C* c, Method m, T1 a1, T2 a2)
@ -245,8 +252,7 @@ class MethodCall2 : public rtc::Message,
}; };
template <typename C, typename R, typename T1, typename T2, typename T3> template <typename C, typename R, typename T1, typename T2, typename T3>
class MethodCall3 : public rtc::Message, class MethodCall3 : public rtc::Message, public rtc::MessageHandler {
public rtc::MessageHandler {
public: public:
typedef R (C::*Method)(T1 a1, T2 a2, T3 a3); typedef R (C::*Method)(T1 a1, T2 a2, T3 a3);
MethodCall3(C* c, Method m, T1 a1, T2 a2, T3 a3) MethodCall3(C* c, Method m, T1 a1, T2 a2, T3 a3)
@ -274,10 +280,13 @@ class MethodCall3 : public rtc::Message,
T3 a3_; T3 a3_;
}; };
template <typename C, typename R, typename T1, typename T2, typename T3, template <typename C,
typename T4> typename R,
class MethodCall4 : public rtc::Message, typename T1,
public rtc::MessageHandler { typename T2,
typename T3,
typename T4>
class MethodCall4 : public rtc::Message, public rtc::MessageHandler {
public: public:
typedef R (C::*Method)(T1 a1, T2 a2, T3 a3, T4 a4); typedef R (C::*Method)(T1 a1, T2 a2, T3 a3, T4 a4);
MethodCall4(C* c, Method m, T1 a1, T2 a2, T3 a3, T4 a4) MethodCall4(C* c, Method m, T1 a1, T2 a2, T3 a3, T4 a4)
@ -308,10 +317,14 @@ class MethodCall4 : public rtc::Message,
T4 a4_; T4 a4_;
}; };
template <typename C, typename R, typename T1, typename T2, typename T3, template <typename C,
typename T4, typename T5> typename R,
class MethodCall5 : public rtc::Message, typename T1,
public rtc::MessageHandler { typename T2,
typename T3,
typename T4,
typename T5>
class MethodCall5 : public rtc::Message, public rtc::MessageHandler {
public: public:
typedef R (C::*Method)(T1 a1, T2 a2, T3 a3, T4 a4, T5 a5); typedef R (C::*Method)(T1 a1, T2 a2, T3 a3, T4 a4, T5 a5);
MethodCall5(C* c, Method m, T1 a1, T2 a2, T3 a3, T4 a4, T5 a5) MethodCall5(C* c, Method m, T1 a1, T2 a2, T3 a3, T4 a4, T5 a5)
@ -344,7 +357,6 @@ class MethodCall5 : public rtc::Message,
T5 a5_; T5 a5_;
}; };
// Helper macros to reduce code duplication. // Helper macros to reduce code duplication.
#define PROXY_MAP_BOILERPLATE(c) \ #define PROXY_MAP_BOILERPLATE(c) \
template <class INTERNAL_CLASS> \ template <class INTERNAL_CLASS> \
@ -359,8 +371,12 @@ class MethodCall5 : public rtc::Message,
const INTERNAL_CLASS* internal() const { return c_; } \ const INTERNAL_CLASS* internal() const { return c_; } \
INTERNAL_CLASS* internal() { return c_; } INTERNAL_CLASS* internal() { return c_; }
// clang-format off
// clang-format would put the semicolon alone,
// leading to a presubmit error (cpplint.py)
#define END_PROXY_MAP() \ #define END_PROXY_MAP() \
}; };
// clang-format on
#define SIGNALING_PROXY_MAP_BOILERPLATE(c) \ #define SIGNALING_PROXY_MAP_BOILERPLATE(c) \
protected: \ protected: \

View File

@ -22,9 +22,9 @@
#include "api/video/video_rotation.h" #include "api/video/video_rotation.h"
#include "api/video/video_timing.h" #include "api/video/video_timing.h"
#include "common_types.h" // NOLINT(build/include)
#include "rtc_base/checks.h" #include "rtc_base/checks.h"
#include "rtc_base/deprecation.h" #include "rtc_base/deprecation.h"
#include "common_types.h" // NOLINT(build/include)
#include "typedefs.h" // NOLINT(build/include) #include "typedefs.h" // NOLINT(build/include)
namespace webrtc { namespace webrtc {

View File

@ -128,17 +128,17 @@ class RtpReceiverInterface : public rtc::RefCountInterface {
// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods // TODO(deadbeef): Move this to .cc file and out of api/. What threads methods
// are called on is an implementation detail. // are called on is an implementation detail.
BEGIN_SIGNALING_PROXY_MAP(RtpReceiver) BEGIN_SIGNALING_PROXY_MAP(RtpReceiver)
PROXY_SIGNALING_THREAD_DESTRUCTOR() PROXY_SIGNALING_THREAD_DESTRUCTOR()
PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track) PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track)
PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<MediaStreamInterface>>, PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<MediaStreamInterface>>,
streams) streams)
PROXY_CONSTMETHOD0(cricket::MediaType, media_type) PROXY_CONSTMETHOD0(cricket::MediaType, media_type)
PROXY_CONSTMETHOD0(std::string, id) PROXY_CONSTMETHOD0(std::string, id)
PROXY_CONSTMETHOD0(RtpParameters, GetParameters); PROXY_CONSTMETHOD0(RtpParameters, GetParameters);
PROXY_METHOD1(bool, SetParameters, const RtpParameters&) PROXY_METHOD1(bool, SetParameters, const RtpParameters&)
PROXY_METHOD1(void, SetObserver, RtpReceiverObserverInterface*); PROXY_METHOD1(void, SetObserver, RtpReceiverObserverInterface*);
PROXY_CONSTMETHOD0(std::vector<RtpSource>, GetSources); PROXY_CONSTMETHOD0(std::vector<RtpSource>, GetSources);
END_PROXY_MAP() END_PROXY_MAP()
} // namespace webrtc } // namespace webrtc

View File

@ -70,17 +70,17 @@ class RtpSenderInterface : public rtc::RefCountInterface {
// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods // TODO(deadbeef): Move this to .cc file and out of api/. What threads methods
// are called on is an implementation detail. // are called on is an implementation detail.
BEGIN_SIGNALING_PROXY_MAP(RtpSender) BEGIN_SIGNALING_PROXY_MAP(RtpSender)
PROXY_SIGNALING_THREAD_DESTRUCTOR() PROXY_SIGNALING_THREAD_DESTRUCTOR()
PROXY_METHOD1(bool, SetTrack, MediaStreamTrackInterface*) PROXY_METHOD1(bool, SetTrack, MediaStreamTrackInterface*)
PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track) PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track)
PROXY_CONSTMETHOD0(uint32_t, ssrc) PROXY_CONSTMETHOD0(uint32_t, ssrc)
PROXY_CONSTMETHOD0(cricket::MediaType, media_type) PROXY_CONSTMETHOD0(cricket::MediaType, media_type)
PROXY_CONSTMETHOD0(std::string, id) PROXY_CONSTMETHOD0(std::string, id)
PROXY_CONSTMETHOD0(std::vector<std::string>, stream_ids) PROXY_CONSTMETHOD0(std::vector<std::string>, stream_ids)
PROXY_METHOD0(RtpParameters, GetParameters); PROXY_METHOD0(RtpParameters, GetParameters);
PROXY_METHOD1(RTCError, SetParameters, const RtpParameters&) PROXY_METHOD1(RTCError, SetParameters, const RtpParameters&)
PROXY_CONSTMETHOD0(rtc::scoped_refptr<DtmfSenderInterface>, GetDtmfSender); PROXY_CONSTMETHOD0(rtc::scoped_refptr<DtmfSenderInterface>, GetDtmfSender);
END_PROXY_MAP() END_PROXY_MAP()
} // namespace webrtc } // namespace webrtc

View File

@ -78,7 +78,7 @@ class RTCStats {
// Downcasts the stats object to an |RTCStats| subclass |T|. DCHECKs that the // Downcasts the stats object to an |RTCStats| subclass |T|. DCHECKs that the
// object is of type |T|. // object is of type |T|.
template<typename T> template <typename T>
const T& cast_to() const { const T& cast_to() const {
RTC_DCHECK_EQ(type(), T::kType); RTC_DCHECK_EQ(type(), T::kType);
return static_cast<const T&>(*this); return static_cast<const T&>(*this);
@ -90,8 +90,7 @@ class RTCStats {
// shall be reserved in the vector (so that subclasses can allocate a vector // shall be reserved in the vector (so that subclasses can allocate a vector
// with room for both parent and child members without it having to resize). // with room for both parent and child members without it having to resize).
virtual std::vector<const RTCStatsMemberInterface*> virtual std::vector<const RTCStatsMemberInterface*>
MembersOfThisObjectAndAncestors( MembersOfThisObjectAndAncestors(size_t additional_capacity) const;
size_t additional_capacity) const;
std::string const id_; std::string const id_;
int64_t timestamp_us_; int64_t timestamp_us_;
@ -138,18 +137,18 @@ class RTCStats {
// bar("bar") { // bar("bar") {
// } // }
// //
#define WEBRTC_RTCSTATS_DECL() \ #define WEBRTC_RTCSTATS_DECL() \
public: \ public: \
static const char kType[]; \ static const char kType[]; \
\ \
std::unique_ptr<webrtc::RTCStats> copy() const override; \ std::unique_ptr<webrtc::RTCStats> copy() const override; \
const char* type() const override; \ const char* type() const override; \
\ \
protected: \ protected: \
std::vector<const webrtc::RTCStatsMemberInterface*> \ std::vector<const webrtc::RTCStatsMemberInterface*> \
MembersOfThisObjectAndAncestors( \ MembersOfThisObjectAndAncestors(size_t local_var_additional_capacity) \
size_t local_var_additional_capacity) const override; \ const override; \
\ \
public: public:
#define WEBRTC_RTCSTATS_IMPL(this_class, parent_class, type_str, ...) \ #define WEBRTC_RTCSTATS_IMPL(this_class, parent_class, type_str, ...) \
@ -159,20 +158,17 @@ class RTCStats {
return std::unique_ptr<webrtc::RTCStats>(new this_class(*this)); \ return std::unique_ptr<webrtc::RTCStats>(new this_class(*this)); \
} \ } \
\ \
const char* this_class::type() const { \ const char* this_class::type() const { return this_class::kType; } \
return this_class::kType; \
} \
\ \
std::vector<const webrtc::RTCStatsMemberInterface*> \ std::vector<const webrtc::RTCStatsMemberInterface*> \
this_class::MembersOfThisObjectAndAncestors( \ this_class::MembersOfThisObjectAndAncestors( \
size_t local_var_additional_capacity) const { \ size_t local_var_additional_capacity) const { \
const webrtc::RTCStatsMemberInterface* local_var_members[] = { \ const webrtc::RTCStatsMemberInterface* local_var_members[] = { \
__VA_ARGS__ \ __VA_ARGS__}; \
}; \
size_t local_var_members_count = \ size_t local_var_members_count = \
sizeof(local_var_members) / sizeof(local_var_members[0]); \ sizeof(local_var_members) / sizeof(local_var_members[0]); \
std::vector<const webrtc::RTCStatsMemberInterface*> local_var_members_vec =\ std::vector<const webrtc::RTCStatsMemberInterface*> \
parent_class::MembersOfThisObjectAndAncestors( \ local_var_members_vec = parent_class::MembersOfThisObjectAndAncestors( \
local_var_members_count + local_var_additional_capacity); \ local_var_members_count + local_var_additional_capacity); \
RTC_DCHECK_GE( \ RTC_DCHECK_GE( \
local_var_members_vec.capacity() - local_var_members_vec.size(), \ local_var_members_vec.capacity() - local_var_members_vec.size(), \
@ -191,21 +187,21 @@ class RTCStatsMemberInterface {
public: public:
// Member value types. // Member value types.
enum Type { enum Type {
kBool, // bool kBool, // bool
kInt32, // int32_t kInt32, // int32_t
kUint32, // uint32_t kUint32, // uint32_t
kInt64, // int64_t kInt64, // int64_t
kUint64, // uint64_t kUint64, // uint64_t
kDouble, // double kDouble, // double
kString, // std::string kString, // std::string
kSequenceBool, // std::vector<bool> kSequenceBool, // std::vector<bool>
kSequenceInt32, // std::vector<int32_t> kSequenceInt32, // std::vector<int32_t>
kSequenceUint32, // std::vector<uint32_t> kSequenceUint32, // std::vector<uint32_t>
kSequenceInt64, // std::vector<int64_t> kSequenceInt64, // std::vector<int64_t>
kSequenceUint64, // std::vector<uint64_t> kSequenceUint64, // std::vector<uint64_t>
kSequenceDouble, // std::vector<double> kSequenceDouble, // std::vector<double>
kSequenceString, // std::vector<std::string> kSequenceString, // std::vector<std::string>
}; };
virtual ~RTCStatsMemberInterface() {} virtual ~RTCStatsMemberInterface() {}
@ -229,7 +225,7 @@ class RTCStatsMemberInterface {
// instead. // instead.
virtual std::string ValueToJson() const = 0; virtual std::string ValueToJson() const = 0;
template<typename T> template <typename T>
const T& cast_to() const { const T& cast_to() const {
RTC_DCHECK_EQ(type(), T::kType); RTC_DCHECK_EQ(type(), T::kType);
return static_cast<const T&>(*this); return static_cast<const T&>(*this);
@ -247,20 +243,17 @@ class RTCStatsMemberInterface {
// specialized in rtcstats.cc, using a different |T| results in a linker error // specialized in rtcstats.cc, using a different |T| results in a linker error
// (undefined reference to |kType|). The supported types are the ones described // (undefined reference to |kType|). The supported types are the ones described
// by |RTCStatsMemberInterface::Type|. // by |RTCStatsMemberInterface::Type|.
template<typename T> template <typename T>
class RTCStatsMember : public RTCStatsMemberInterface { class RTCStatsMember : public RTCStatsMemberInterface {
public: public:
static const Type kType; static const Type kType;
explicit RTCStatsMember(const char* name) explicit RTCStatsMember(const char* name)
: RTCStatsMemberInterface(name, false), : RTCStatsMemberInterface(name, false), value_() {}
value_() {}
RTCStatsMember(const char* name, const T& value) RTCStatsMember(const char* name, const T& value)
: RTCStatsMemberInterface(name, true), : RTCStatsMemberInterface(name, true), value_(value) {}
value_(value) {}
RTCStatsMember(const char* name, T&& value) RTCStatsMember(const char* name, T&& value)
: RTCStatsMemberInterface(name, true), : RTCStatsMemberInterface(name, true), value_(std::move(value)) {}
value_(std::move(value)) {}
explicit RTCStatsMember(const RTCStatsMember<T>& other) explicit RTCStatsMember(const RTCStatsMember<T>& other)
: RTCStatsMemberInterface(other.name_, other.is_defined_), : RTCStatsMemberInterface(other.name_, other.is_defined_),
value_(other.value_) {} value_(other.value_) {}

View File

@ -209,8 +209,9 @@ class RTCIceCandidateStats : public RTCStats {
RTCStatsMember<bool> deleted; // = false RTCStatsMember<bool> deleted; // = false
protected: protected:
RTCIceCandidateStats( RTCIceCandidateStats(const std::string& id,
const std::string& id, int64_t timestamp_us, bool is_remote); int64_t timestamp_us,
bool is_remote);
RTCIceCandidateStats(std::string&& id, int64_t timestamp_us, bool is_remote); RTCIceCandidateStats(std::string&& id, int64_t timestamp_us, bool is_remote);
}; };
@ -258,9 +259,11 @@ class RTCMediaStreamTrackStats final : public RTCStats {
public: public:
WEBRTC_RTCSTATS_DECL(); WEBRTC_RTCSTATS_DECL();
RTCMediaStreamTrackStats(const std::string& id, int64_t timestamp_us, RTCMediaStreamTrackStats(const std::string& id,
int64_t timestamp_us,
const char* kind); const char* kind);
RTCMediaStreamTrackStats(std::string&& id, int64_t timestamp_us, RTCMediaStreamTrackStats(std::string&& id,
int64_t timestamp_us,
const char* kind); const char* kind);
RTCMediaStreamTrackStats(const RTCMediaStreamTrackStats& other); RTCMediaStreamTrackStats(const RTCMediaStreamTrackStats& other);
~RTCMediaStreamTrackStats() override; ~RTCMediaStreamTrackStats() override;

View File

@ -76,7 +76,7 @@ class RTCStatsReport : public rtc::RefCountInterface {
// Gets the subset of stats that are of type |T|, where |T| is any class // Gets the subset of stats that are of type |T|, where |T| is any class
// descending from |RTCStats|. // descending from |RTCStats|.
template<typename T> template <typename T>
std::vector<const T*> GetStatsOfType() const { std::vector<const T*> GetStatsOfType() const {
std::vector<const T*> stats_of_type; std::vector<const T*> stats_of_type;
for (const RTCStats& stats : *this) { for (const RTCStats& stats : *this) {

View File

@ -98,8 +98,7 @@ class TypedIntId : public StatsReport::IdBase {
} }
std::string ToString() const override { std::string ToString() const override {
return std::string(InternalTypeToString(type_)) + return std::string(InternalTypeToString(type_)) + kSeparator +
kSeparator +
rtc::ToString<int>(id_); rtc::ToString<int>(id_);
} }
@ -109,7 +108,8 @@ class TypedIntId : public StatsReport::IdBase {
class IdWithDirection : public TypedId { class IdWithDirection : public TypedId {
public: public:
IdWithDirection(StatsReport::StatsType type, const std::string& id, IdWithDirection(StatsReport::StatsType type,
const std::string& id,
StatsReport::Direction direction) StatsReport::Direction direction)
: TypedId(type, id), direction_(direction) {} : TypedId(type, id), direction_(direction) {}
@ -132,39 +132,34 @@ class IdWithDirection : public TypedId {
class CandidateId : public TypedId { class CandidateId : public TypedId {
public: public:
CandidateId(bool local, const std::string& id) CandidateId(bool local, const std::string& id)
: TypedId(local ? : TypedId(local ? StatsReport::kStatsReportTypeIceLocalCandidate
StatsReport::kStatsReportTypeIceLocalCandidate : : StatsReport::kStatsReportTypeIceRemoteCandidate,
StatsReport::kStatsReportTypeIceRemoteCandidate, id) {}
id) {
}
std::string ToString() const override { std::string ToString() const override { return "Cand-" + id_; }
return "Cand-" + id_;
}
}; };
class ComponentId : public StatsReport::IdBase { class ComponentId : public StatsReport::IdBase {
public: public:
ComponentId(const std::string& content_name, int component) ComponentId(const std::string& content_name, int component)
: ComponentId(StatsReport::kStatsReportTypeComponent, content_name, : ComponentId(StatsReport::kStatsReportTypeComponent,
component) {} content_name,
component) {}
bool Equals(const IdBase& other) const override { bool Equals(const IdBase& other) const override {
return IdBase::Equals(other) && return IdBase::Equals(other) &&
static_cast<const ComponentId&>(other).component_ == component_ && static_cast<const ComponentId&>(other).component_ == component_ &&
static_cast<const ComponentId&>(other).content_name_ == content_name_; static_cast<const ComponentId&>(other).content_name_ ==
content_name_;
} }
std::string ToString() const override { std::string ToString() const override { return ToString("Channel-"); }
return ToString("Channel-");
}
protected: protected:
ComponentId(StatsReport::StatsType type, const std::string& content_name, ComponentId(StatsReport::StatsType type,
const std::string& content_name,
int component) int component)
: IdBase(type), : IdBase(type), content_name_(content_name), component_(component) {}
content_name_(content_name),
component_(component) {}
std::string ToString(const char* prefix) const { std::string ToString(const char* prefix) const {
std::string ret(prefix); std::string ret(prefix);
@ -182,13 +177,14 @@ class ComponentId : public StatsReport::IdBase {
class CandidatePairId : public ComponentId { class CandidatePairId : public ComponentId {
public: public:
CandidatePairId(const std::string& content_name, int component, int index) CandidatePairId(const std::string& content_name, int component, int index)
: ComponentId(StatsReport::kStatsReportTypeCandidatePair, content_name, : ComponentId(StatsReport::kStatsReportTypeCandidatePair,
component), content_name,
component),
index_(index) {} index_(index) {}
bool Equals(const IdBase& other) const override { bool Equals(const IdBase& other) const override {
return ComponentId::Equals(other) && return ComponentId::Equals(other) &&
static_cast<const CandidatePairId&>(other).index_ == index_; static_cast<const CandidatePairId&>(other).index_ == index_;
} }
std::string ToString() const override { std::string ToString() const override {
@ -207,7 +203,9 @@ class CandidatePairId : public ComponentId {
StatsReport::IdBase::IdBase(StatsType type) : type_(type) {} StatsReport::IdBase::IdBase(StatsType type) : type_(type) {}
StatsReport::IdBase::~IdBase() {} StatsReport::IdBase::~IdBase() {}
StatsReport::StatsType StatsReport::IdBase::type() const { return type_; } StatsReport::StatsType StatsReport::IdBase::type() const {
return type_;
}
bool StatsReport::IdBase::Equals(const IdBase& other) const { bool StatsReport::IdBase::Equals(const IdBase& other) const {
return other.type_ == type_; return other.type_ == type_;
@ -316,8 +314,8 @@ bool StatsReport::Value::operator==(const char* value) const {
} }
bool StatsReport::Value::operator==(int64_t value) const { bool StatsReport::Value::operator==(int64_t value) const {
return type_ == kInt ? value_.int_ == static_cast<int>(value) : return type_ == kInt ? value_.int_ == static_cast<int>(value)
(type_ == kInt64 ? value_.int64_ == value : false); : (type_ == kInt64 ? value_.int64_ == value : false);
} }
bool StatsReport::Value::operator==(bool value) const { bool StatsReport::Value::operator==(bool value) const {
@ -699,7 +697,9 @@ StatsReport::Id StatsReport::NewTypedIntId(StatsType type, int id) {
// static // static
StatsReport::Id StatsReport::NewIdWithDirection( StatsReport::Id StatsReport::NewIdWithDirection(
StatsType type, const std::string& id, StatsReport::Direction direction) { StatsType type,
const std::string& id,
StatsReport::Direction direction) {
return Id(new RefCountedObject<IdWithDirection>(type, id, direction)); return Id(new RefCountedObject<IdWithDirection>(type, id, direction));
} }
@ -709,16 +709,17 @@ StatsReport::Id StatsReport::NewCandidateId(bool local, const std::string& id) {
} }
// static // static
StatsReport::Id StatsReport::NewComponentId( StatsReport::Id StatsReport::NewComponentId(const std::string& content_name,
const std::string& content_name, int component) { int component) {
return Id(new RefCountedObject<ComponentId>(content_name, component)); return Id(new RefCountedObject<ComponentId>(content_name, component));
} }
// static // static
StatsReport::Id StatsReport::NewCandidatePairId( StatsReport::Id StatsReport::NewCandidatePairId(const std::string& content_name,
const std::string& content_name, int component, int index) { int component,
return Id(new RefCountedObject<CandidatePairId>( int index) {
content_name, component, index)); return Id(
new RefCountedObject<CandidatePairId>(content_name, component, index));
} }
const char* StatsReport::TypeToString() const { const char* StatsReport::TypeToString() const {
@ -763,8 +764,7 @@ void StatsReport::AddBoolean(StatsReport::StatsValueName name, bool value) {
values_[name] = ValuePtr(new Value(name, value)); values_[name] = ValuePtr(new Value(name, value));
} }
void StatsReport::AddId(StatsReport::StatsValueName name, void StatsReport::AddId(StatsReport::StatsValueName name, const Id& value) {
const Id& value) {
const Value* found = FindValue(name); const Value* found = FindValue(name);
if (!found || !(*found == value)) if (!found || !(*found == value))
values_[name] = ValuePtr(new Value(name, value)); values_[name] = ValuePtr(new Value(name, value));
@ -775,8 +775,7 @@ const StatsReport::Value* StatsReport::FindValue(StatsValueName name) const {
return it == values_.end() ? nullptr : it->second.get(); return it == values_.end() ? nullptr : it->second.get();
} }
StatsCollection::StatsCollection() { StatsCollection::StatsCollection() {}
}
StatsCollection::~StatsCollection() { StatsCollection::~StatsCollection() {
RTC_DCHECK(thread_checker_.CalledOnValidThread()); RTC_DCHECK(thread_checker_.CalledOnValidThread());
@ -816,8 +815,9 @@ StatsReport* StatsCollection::FindOrAddNew(const StatsReport::Id& id) {
StatsReport* StatsCollection::ReplaceOrAddNew(const StatsReport::Id& id) { StatsReport* StatsCollection::ReplaceOrAddNew(const StatsReport::Id& id) {
RTC_DCHECK(thread_checker_.CalledOnValidThread()); RTC_DCHECK(thread_checker_.CalledOnValidThread());
RTC_DCHECK(id.get()); RTC_DCHECK(id.get());
Container::iterator it = std::find_if(list_.begin(), list_.end(), Container::iterator it = std::find_if(
[&id](const StatsReport* r)->bool { return r->id()->Equals(id); }); list_.begin(), list_.end(),
[&id](const StatsReport* r) -> bool { return r->id()->Equals(id); });
if (it != end()) { if (it != end()) {
StatsReport* report = new StatsReport((*it)->id()); StatsReport* report = new StatsReport((*it)->id());
delete *it; delete *it;
@ -831,8 +831,9 @@ StatsReport* StatsCollection::ReplaceOrAddNew(const StatsReport::Id& id) {
// will be returned. // will be returned.
StatsReport* StatsCollection::Find(const StatsReport::Id& id) { StatsReport* StatsCollection::Find(const StatsReport::Id& id) {
RTC_DCHECK(thread_checker_.CalledOnValidThread()); RTC_DCHECK(thread_checker_.CalledOnValidThread());
Container::iterator it = std::find_if(list_.begin(), list_.end(), Container::iterator it = std::find_if(
[&id](const StatsReport* r)->bool { return r->id()->Equals(id); }); list_.begin(), list_.end(),
[&id](const StatsReport* r) -> bool { return r->id()->Equals(id); });
return it == list_.end() ? nullptr : *it; return it == list_.end() ? nullptr : *it;
} }

View File

@ -369,13 +369,14 @@ class StatsReport {
static Id NewBandwidthEstimationId(); static Id NewBandwidthEstimationId();
static Id NewTypedId(StatsType type, const std::string& id); static Id NewTypedId(StatsType type, const std::string& id);
static Id NewTypedIntId(StatsType type, int id); static Id NewTypedIntId(StatsType type, int id);
static Id NewIdWithDirection( static Id NewIdWithDirection(StatsType type,
StatsType type, const std::string& id, Direction direction); const std::string& id,
Direction direction);
static Id NewCandidateId(bool local, const std::string& id); static Id NewCandidateId(bool local, const std::string& id);
static Id NewComponentId( static Id NewComponentId(const std::string& content_name, int component);
const std::string& content_name, int component); static Id NewCandidatePairId(const std::string& content_name,
static Id NewCandidatePairId( int component,
const std::string& content_name, int component, int index); int index);
const Id& id() const { return id_; } const Id& id() const { return id_; }
StatsType type() const { return id_->type(); } StatsType type() const { return id_->type(); }

View File

@ -22,13 +22,12 @@ namespace test {
using Config = VideoCodecTestFixture::Config; using Config = VideoCodecTestFixture::Config;
std::unique_ptr<VideoCodecTestFixture> std::unique_ptr<VideoCodecTestFixture> CreateVideoCodecTestFixture(
CreateVideoCodecTestFixture(const Config& config) { const Config& config) {
return rtc::MakeUnique<VideoCodecTestFixtureImpl>(config); return rtc::MakeUnique<VideoCodecTestFixtureImpl>(config);
} }
std::unique_ptr<VideoCodecTestFixture> std::unique_ptr<VideoCodecTestFixture> CreateVideoCodecTestFixture(
CreateVideoCodecTestFixture(
const Config& config, const Config& config,
std::unique_ptr<VideoDecoderFactory> decoder_factory, std::unique_ptr<VideoDecoderFactory> decoder_factory,
std::unique_ptr<VideoEncoderFactory> encoder_factory) { std::unique_ptr<VideoEncoderFactory> encoder_factory) {

View File

@ -21,16 +21,12 @@ namespace webrtc {
class FakeConstraints : public webrtc::MediaConstraintsInterface { class FakeConstraints : public webrtc::MediaConstraintsInterface {
public: public:
FakeConstraints() { } FakeConstraints() {}
virtual ~FakeConstraints() { } virtual ~FakeConstraints() {}
virtual const Constraints& GetMandatory() const { virtual const Constraints& GetMandatory() const { return mandatory_; }
return mandatory_;
}
virtual const Constraints& GetOptional() const { virtual const Constraints& GetOptional() const { return optional_; }
return optional_;
}
template <class T> template <class T>
void AddMandatory(const std::string& key, const T& value) { void AddMandatory(const std::string& key, const T& value) {

View File

@ -18,7 +18,6 @@ class PortInterface;
class StunMessage; class StunMessage;
} // namespace cricket } // namespace cricket
namespace webrtc { namespace webrtc {
class TurnCustomizer { class TurnCustomizer {

View File

@ -13,9 +13,13 @@
namespace webrtc { namespace webrtc {
namespace video_coding { namespace video_coding {
bool EncodedFrame::delayed_by_retransmission() const { return 0; } bool EncodedFrame::delayed_by_retransmission() const {
return 0;
}
uint32_t EncodedFrame::Timestamp() const { return timestamp; } uint32_t EncodedFrame::Timestamp() const {
return timestamp;
}
} // namespace video_coding } // namespace video_coding
} // namespace webrtc } // namespace webrtc

View File

@ -34,8 +34,7 @@ int I420DataSize(int height, int stride_y, int stride_u, int stride_v) {
} // namespace } // namespace
I420Buffer::I420Buffer(int width, int height) I420Buffer::I420Buffer(int width, int height)
: I420Buffer(width, height, width, (width + 1) / 2, (width + 1) / 2) { : I420Buffer(width, height, width, (width + 1) / 2, (width + 1) / 2) {}
}
I420Buffer::I420Buffer(int width, I420Buffer::I420Buffer(int width,
int height, int height,
@ -47,9 +46,9 @@ I420Buffer::I420Buffer(int width,
stride_y_(stride_y), stride_y_(stride_y),
stride_u_(stride_u), stride_u_(stride_u),
stride_v_(stride_v), stride_v_(stride_v),
data_(static_cast<uint8_t*>(AlignedMalloc( data_(static_cast<uint8_t*>(
I420DataSize(height, stride_y, stride_u, stride_v), AlignedMalloc(I420DataSize(height, stride_y, stride_u, stride_v),
kBufferAlignment))) { kBufferAlignment))) {
RTC_DCHECK_GT(width, 0); RTC_DCHECK_GT(width, 0);
RTC_DCHECK_GT(height, 0); RTC_DCHECK_GT(height, 0);
RTC_DCHECK_GE(stride_y, width); RTC_DCHECK_GE(stride_y, width);
@ -57,8 +56,7 @@ I420Buffer::I420Buffer(int width,
RTC_DCHECK_GE(stride_v, (width + 1) / 2); RTC_DCHECK_GE(stride_v, (width + 1) / 2);
} }
I420Buffer::~I420Buffer() { I420Buffer::~I420Buffer() {}
}
// static // static
rtc::scoped_refptr<I420Buffer> I420Buffer::Create(int width, int height) { rtc::scoped_refptr<I420Buffer> I420Buffer::Create(int width, int height) {
@ -71,34 +69,34 @@ rtc::scoped_refptr<I420Buffer> I420Buffer::Create(int width,
int stride_y, int stride_y,
int stride_u, int stride_u,
int stride_v) { int stride_v) {
return new rtc::RefCountedObject<I420Buffer>( return new rtc::RefCountedObject<I420Buffer>(width, height, stride_y,
width, height, stride_y, stride_u, stride_v); stride_u, stride_v);
} }
// static // static
rtc::scoped_refptr<I420Buffer> I420Buffer::Copy( rtc::scoped_refptr<I420Buffer> I420Buffer::Copy(
const I420BufferInterface& source) { const I420BufferInterface& source) {
return Copy(source.width(), source.height(), return Copy(source.width(), source.height(), source.DataY(), source.StrideY(),
source.DataY(), source.StrideY(), source.DataU(), source.StrideU(), source.DataV(),
source.DataU(), source.StrideU(), source.StrideV());
source.DataV(), source.StrideV());
} }
// static // static
rtc::scoped_refptr<I420Buffer> I420Buffer::Copy( rtc::scoped_refptr<I420Buffer> I420Buffer::Copy(int width,
int width, int height, int height,
const uint8_t* data_y, int stride_y, const uint8_t* data_y,
const uint8_t* data_u, int stride_u, int stride_y,
const uint8_t* data_v, int stride_v) { const uint8_t* data_u,
int stride_u,
const uint8_t* data_v,
int stride_v) {
// Note: May use different strides than the input data. // Note: May use different strides than the input data.
rtc::scoped_refptr<I420Buffer> buffer = Create(width, height); rtc::scoped_refptr<I420Buffer> buffer = Create(width, height);
RTC_CHECK_EQ(0, libyuv::I420Copy(data_y, stride_y, RTC_CHECK_EQ(0, libyuv::I420Copy(data_y, stride_y, data_u, stride_u, data_v,
data_u, stride_u, stride_v, buffer->MutableDataY(),
data_v, stride_v, buffer->StrideY(), buffer->MutableDataU(),
buffer->MutableDataY(), buffer->StrideY(), buffer->StrideU(), buffer->MutableDataV(),
buffer->MutableDataU(), buffer->StrideU(), buffer->StrideV(), width, height));
buffer->MutableDataV(), buffer->StrideV(),
width, height));
return buffer; return buffer;
} }
@ -120,14 +118,13 @@ rtc::scoped_refptr<I420Buffer> I420Buffer::Rotate(
rtc::scoped_refptr<webrtc::I420Buffer> buffer = rtc::scoped_refptr<webrtc::I420Buffer> buffer =
I420Buffer::Create(rotated_width, rotated_height); I420Buffer::Create(rotated_width, rotated_height);
RTC_CHECK_EQ(0, libyuv::I420Rotate( RTC_CHECK_EQ(0,
src.DataY(), src.StrideY(), libyuv::I420Rotate(
src.DataU(), src.StrideU(), src.DataY(), src.StrideY(), src.DataU(), src.StrideU(),
src.DataV(), src.StrideV(), src.DataV(), src.StrideV(), buffer->MutableDataY(),
buffer->MutableDataY(), buffer->StrideY(), buffer->MutableDataU(), buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
buffer->StrideU(), buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataV(), buffer->StrideV(), src.width(),
src.width(), src.height(), src.height(), static_cast<libyuv::RotationMode>(rotation)));
static_cast<libyuv::RotationMode>(rotation)));
return buffer; return buffer;
} }
@ -179,9 +176,9 @@ uint8_t* I420Buffer::MutableDataV() {
void I420Buffer::SetBlack(I420Buffer* buffer) { void I420Buffer::SetBlack(I420Buffer* buffer) {
RTC_CHECK(libyuv::I420Rect(buffer->MutableDataY(), buffer->StrideY(), RTC_CHECK(libyuv::I420Rect(buffer->MutableDataY(), buffer->StrideY(),
buffer->MutableDataU(), buffer->StrideU(), buffer->MutableDataU(), buffer->StrideU(),
buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataV(), buffer->StrideV(), 0, 0,
0, 0, buffer->width(), buffer->height(), buffer->width(), buffer->height(), 0, 128,
0, 128, 128) == 0); 128) == 0);
} }
void I420Buffer::CropAndScaleFrom(const I420BufferInterface& src, void I420Buffer::CropAndScaleFrom(const I420BufferInterface& src,
@ -202,20 +199,16 @@ void I420Buffer::CropAndScaleFrom(const I420BufferInterface& src,
offset_x = uv_offset_x * 2; offset_x = uv_offset_x * 2;
offset_y = uv_offset_y * 2; offset_y = uv_offset_y * 2;
const uint8_t* y_plane = const uint8_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x;
src.DataY() + src.StrideY() * offset_y + offset_x;
const uint8_t* u_plane = const uint8_t* u_plane =
src.DataU() + src.StrideU() * uv_offset_y + uv_offset_x; src.DataU() + src.StrideU() * uv_offset_y + uv_offset_x;
const uint8_t* v_plane = const uint8_t* v_plane =
src.DataV() + src.StrideV() * uv_offset_y + uv_offset_x; src.DataV() + src.StrideV() * uv_offset_y + uv_offset_x;
int res = libyuv::I420Scale(y_plane, src.StrideY(), int res =
u_plane, src.StrideU(), libyuv::I420Scale(y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane,
v_plane, src.StrideV(), src.StrideV(), crop_width, crop_height, MutableDataY(),
crop_width, crop_height, StrideY(), MutableDataU(), StrideU(), MutableDataV(),
MutableDataY(), StrideY(), StrideV(), width(), height(), libyuv::kFilterBox);
MutableDataU(), StrideU(),
MutableDataV(), StrideV(),
width(), height(), libyuv::kFilterBox);
RTC_DCHECK_EQ(res, 0); RTC_DCHECK_EQ(res, 0);
} }
@ -226,10 +219,8 @@ void I420Buffer::CropAndScaleFrom(const I420BufferInterface& src) {
const int crop_height = const int crop_height =
std::min(src.height(), height() * src.width() / width()); std::min(src.height(), height() * src.width() / width());
CropAndScaleFrom( CropAndScaleFrom(src, (src.width() - crop_width) / 2,
src, (src.height() - crop_height) / 2, crop_width, crop_height);
(src.width() - crop_width) / 2, (src.height() - crop_height) / 2,
crop_width, crop_height);
} }
void I420Buffer::ScaleFrom(const I420BufferInterface& src) { void I420Buffer::ScaleFrom(const I420BufferInterface& src) {

View File

@ -36,11 +36,14 @@ class I420Buffer : public I420BufferInterface {
return Copy(*buffer.GetI420()); return Copy(*buffer.GetI420());
} }
static rtc::scoped_refptr<I420Buffer> Copy( static rtc::scoped_refptr<I420Buffer> Copy(int width,
int width, int height, int height,
const uint8_t* data_y, int stride_y, const uint8_t* data_y,
const uint8_t* data_u, int stride_u, int stride_y,
const uint8_t* data_v, int stride_v); const uint8_t* data_u,
int stride_u,
const uint8_t* data_v,
int stride_v);
// Returns a rotated copy of |src|. // Returns a rotated copy of |src|.
static rtc::scoped_refptr<I420Buffer> Rotate(const I420BufferInterface& src, static rtc::scoped_refptr<I420Buffer> Rotate(const I420BufferInterface& src,

View File

@ -68,19 +68,16 @@ bool SetSimulcastId(VideoContentType* content_type, uint8_t simulcast_id) {
return true; return true;
} }
uint8_t GetExperimentId( uint8_t GetExperimentId(const VideoContentType& content_type) {
const VideoContentType& content_type) {
return (static_cast<uint8_t>(content_type) & kExperimentBitsMask) >> return (static_cast<uint8_t>(content_type) & kExperimentBitsMask) >>
kExperimentShift; kExperimentShift;
} }
uint8_t GetSimulcastId( uint8_t GetSimulcastId(const VideoContentType& content_type) {
const VideoContentType& content_type) {
return (static_cast<uint8_t>(content_type) & kSimulcastBitsMask) >> return (static_cast<uint8_t>(content_type) & kSimulcastBitsMask) >>
kSimulcastShift; kSimulcastShift;
} }
bool IsScreenshare( bool IsScreenshare(const VideoContentType& content_type) {
const VideoContentType& content_type) {
return (static_cast<uint8_t>(content_type) & kScreenshareBitsMask) > 0; return (static_cast<uint8_t>(content_type) & kScreenshareBitsMask) > 0;
} }

View File

@ -13,8 +13,8 @@
#include <stdint.h> #include <stdint.h>
#include "api/video/video_rotation.h"
#include "api/video/video_frame_buffer.h" #include "api/video/video_frame_buffer.h"
#include "api/video/video_rotation.h"
namespace webrtc { namespace webrtc {

View File

@ -46,21 +46,18 @@ bool VideoCodecVP9::operator==(const VideoCodecVP9& other) const {
bool VideoCodecH264::operator==(const VideoCodecH264& other) const { bool VideoCodecH264::operator==(const VideoCodecH264& other) const {
return (frameDroppingOn == other.frameDroppingOn && return (frameDroppingOn == other.frameDroppingOn &&
keyFrameInterval == other.keyFrameInterval && keyFrameInterval == other.keyFrameInterval &&
spsLen == other.spsLen && spsLen == other.spsLen && ppsLen == other.ppsLen &&
ppsLen == other.ppsLen &&
profile == other.profile && profile == other.profile &&
(spsLen == 0 || memcmp(spsData, other.spsData, spsLen) == 0) && (spsLen == 0 || memcmp(spsData, other.spsData, spsLen) == 0) &&
(ppsLen == 0 || memcmp(ppsData, other.ppsData, ppsLen) == 0)); (ppsLen == 0 || memcmp(ppsData, other.ppsData, ppsLen) == 0));
} }
bool SpatialLayer::operator==(const SpatialLayer& other) const { bool SpatialLayer::operator==(const SpatialLayer& other) const {
return (width == other.width && return (width == other.width && height == other.height &&
height == other.height &&
numberOfTemporalLayers == other.numberOfTemporalLayers && numberOfTemporalLayers == other.numberOfTemporalLayers &&
maxBitrate == other.maxBitrate && maxBitrate == other.maxBitrate &&
targetBitrate == other.targetBitrate && targetBitrate == other.targetBitrate &&
minBitrate == other.minBitrate && minBitrate == other.minBitrate && qpMax == other.qpMax &&
qpMax == other.qpMax &&
active == other.active); active == other.active);
} }

View File

@ -11,8 +11,8 @@
#ifndef API_VIDEOSOURCEPROXY_H_ #ifndef API_VIDEOSOURCEPROXY_H_
#define API_VIDEOSOURCEPROXY_H_ #define API_VIDEOSOURCEPROXY_H_
#include "api/proxy.h"
#include "api/mediastreaminterface.h" #include "api/mediastreaminterface.h"
#include "api/proxy.h"
namespace webrtc { namespace webrtc {
@ -21,19 +21,19 @@ namespace webrtc {
// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods // TODO(deadbeef): Move this to .cc file and out of api/. What threads methods
// are called on is an implementation detail. // are called on is an implementation detail.
BEGIN_PROXY_MAP(VideoTrackSource) BEGIN_PROXY_MAP(VideoTrackSource)
PROXY_SIGNALING_THREAD_DESTRUCTOR() PROXY_SIGNALING_THREAD_DESTRUCTOR()
PROXY_CONSTMETHOD0(SourceState, state) PROXY_CONSTMETHOD0(SourceState, state)
PROXY_CONSTMETHOD0(bool, remote) PROXY_CONSTMETHOD0(bool, remote)
PROXY_CONSTMETHOD0(bool, is_screencast) PROXY_CONSTMETHOD0(bool, is_screencast)
PROXY_CONSTMETHOD0(rtc::Optional<bool>, needs_denoising) PROXY_CONSTMETHOD0(rtc::Optional<bool>, needs_denoising)
PROXY_METHOD1(bool, GetStats, Stats*) PROXY_METHOD1(bool, GetStats, Stats*)
PROXY_WORKER_METHOD2(void, PROXY_WORKER_METHOD2(void,
AddOrUpdateSink, AddOrUpdateSink,
rtc::VideoSinkInterface<VideoFrame>*, rtc::VideoSinkInterface<VideoFrame>*,
const rtc::VideoSinkWants&) const rtc::VideoSinkWants&)
PROXY_WORKER_METHOD1(void, RemoveSink, rtc::VideoSinkInterface<VideoFrame>*) PROXY_WORKER_METHOD1(void, RemoveSink, rtc::VideoSinkInterface<VideoFrame>*)
PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
END_PROXY_MAP() END_PROXY_MAP()
} // namespace webrtc } // namespace webrtc

View File

@ -47,10 +47,12 @@ double AudioLevel::TotalDuration() const {
void AudioLevel::ComputeLevel(const AudioFrame& audioFrame, double duration) { void AudioLevel::ComputeLevel(const AudioFrame& audioFrame, double duration) {
// Check speech level (works for 2 channels as well) // Check speech level (works for 2 channels as well)
int16_t abs_value = audioFrame.muted() ? 0 : int16_t abs_value =
WebRtcSpl_MaxAbsValueW16( audioFrame.muted()
audioFrame.data(), ? 0
audioFrame.samples_per_channel_ * audioFrame.num_channels_); : WebRtcSpl_MaxAbsValueW16(
audioFrame.data(),
audioFrame.samples_per_channel_ * audioFrame.num_channels_);
// Protect member access using a lock since this method is called on a // Protect member access using a lock since this method is called on a
// dedicated audio thread in the RecordedDataIsAvailable() callback. // dedicated audio thread in the RecordedDataIsAvailable() callback.

View File

@ -102,8 +102,7 @@ AudioReceiveStream::AudioReceiveStream(
const rtc::scoped_refptr<webrtc::AudioState>& audio_state, const rtc::scoped_refptr<webrtc::AudioState>& audio_state,
webrtc::RtcEventLog* event_log, webrtc::RtcEventLog* event_log,
std::unique_ptr<voe::ChannelProxy> channel_proxy) std::unique_ptr<voe::ChannelProxy> channel_proxy)
: audio_state_(audio_state), : audio_state_(audio_state), channel_proxy_(std::move(channel_proxy)) {
channel_proxy_(std::move(channel_proxy)) {
RTC_LOG(LS_INFO) << "AudioReceiveStream: " << config.rtp.remote_ssrc; RTC_LOG(LS_INFO) << "AudioReceiveStream: " << config.rtp.remote_ssrc;
RTC_DCHECK(receiver_controller); RTC_DCHECK(receiver_controller);
RTC_DCHECK(packet_router); RTC_DCHECK(packet_router);
@ -120,9 +119,8 @@ AudioReceiveStream::AudioReceiveStream(
channel_proxy_->RegisterReceiverCongestionControlObjects(packet_router); channel_proxy_->RegisterReceiverCongestionControlObjects(packet_router);
// Register with transport. // Register with transport.
rtp_stream_receiver_ = rtp_stream_receiver_ = receiver_controller->CreateReceiver(
receiver_controller->CreateReceiver(config.rtp.remote_ssrc, config.rtp.remote_ssrc, channel_proxy_.get());
channel_proxy_.get());
ConfigureStream(this, config, true); ConfigureStream(this, config, true);
} }
@ -273,9 +271,7 @@ absl::optional<Syncable::Info> AudioReceiveStream::GetInfo() const {
return absl::nullopt; return absl::nullopt;
} }
if (rtp_rtcp->RemoteNTP(&info.capture_time_ntp_secs, if (rtp_rtcp->RemoteNTP(&info.capture_time_ntp_secs,
&info.capture_time_ntp_frac, &info.capture_time_ntp_frac, nullptr, nullptr,
nullptr,
nullptr,
&info.capture_time_source_clock) != 0) { &info.capture_time_source_clock) != 0) {
return absl::nullopt; return absl::nullopt;
} }
@ -329,8 +325,8 @@ const webrtc::AudioReceiveStream::Config& AudioReceiveStream::config() const {
return config_; return config_;
} }
const AudioSendStream* const AudioSendStream* AudioReceiveStream::GetAssociatedSendStreamForTesting()
AudioReceiveStream::GetAssociatedSendStreamForTesting() const { const {
RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK_RUN_ON(&worker_thread_checker_);
return associated_send_stream_; return associated_send_stream_;
} }

View File

@ -59,18 +59,16 @@ const unsigned int kSpeechOutputLevel = 99;
const double kTotalOutputEnergy = 0.25; const double kTotalOutputEnergy = 0.25;
const double kTotalOutputDuration = 0.5; const double kTotalOutputDuration = 0.5;
const CallStatistics kCallStats = { const CallStatistics kCallStats = {345, 678, 901, 234, -12,
345, 678, 901, 234, -12, 3456, 7890, 567, 890, 123}; 3456, 7890, 567, 890, 123};
const CodecInst kCodecInst = { const CodecInst kCodecInst = {123, "codec_name_recv", 96000, -187, 0, -103};
123, "codec_name_recv", 96000, -187, 0, -103};
const NetworkStatistics kNetworkStats = { const NetworkStatistics kNetworkStats = {
123, 456, false, 789012, 3456, 123, 456, 0, {}, 789, 12, 123, 456, false, 789012, 3456, 123, 456, 0, {}, 789, 12,
345, 678, 901, 0, -1, -1, -1, -1, -1, 0}; 345, 678, 901, 0, -1, -1, -1, -1, -1, 0};
const AudioDecodingCallStats kAudioDecodeStats = MakeAudioDecodeStatsForTest(); const AudioDecodingCallStats kAudioDecodeStats = MakeAudioDecodeStatsForTest();
struct ConfigHelper { struct ConfigHelper {
ConfigHelper() ConfigHelper() : ConfigHelper(new rtc::RefCountedObject<MockAudioMixer>()) {}
: ConfigHelper(new rtc::RefCountedObject<MockAudioMixer>()) {}
explicit ConfigHelper(rtc::scoped_refptr<MockAudioMixer> audio_mixer) explicit ConfigHelper(rtc::scoped_refptr<MockAudioMixer> audio_mixer)
: audio_mixer_(audio_mixer) { : audio_mixer_(audio_mixer) {
@ -88,23 +86,21 @@ struct ConfigHelper {
EXPECT_CALL(*channel_proxy_, SetRemoteSSRC(kRemoteSsrc)).Times(1); EXPECT_CALL(*channel_proxy_, SetRemoteSSRC(kRemoteSsrc)).Times(1);
EXPECT_CALL(*channel_proxy_, SetNACKStatus(true, 15)).Times(1); EXPECT_CALL(*channel_proxy_, SetNACKStatus(true, 15)).Times(1);
EXPECT_CALL(*channel_proxy_, EXPECT_CALL(*channel_proxy_,
RegisterReceiverCongestionControlObjects(&packet_router_)) RegisterReceiverCongestionControlObjects(&packet_router_))
.Times(1); .Times(1);
EXPECT_CALL(*channel_proxy_, ResetReceiverCongestionControlObjects()) EXPECT_CALL(*channel_proxy_, ResetReceiverCongestionControlObjects())
.Times(1); .Times(1);
EXPECT_CALL(*channel_proxy_, RegisterTransport(nullptr)).Times(2); EXPECT_CALL(*channel_proxy_, RegisterTransport(nullptr)).Times(2);
testing::Expectation expect_set = testing::Expectation expect_set =
EXPECT_CALL(*channel_proxy_, SetRtcEventLog(&event_log_)) EXPECT_CALL(*channel_proxy_, SetRtcEventLog(&event_log_)).Times(1);
.Times(1);
EXPECT_CALL(*channel_proxy_, SetRtcEventLog(testing::IsNull())) EXPECT_CALL(*channel_proxy_, SetRtcEventLog(testing::IsNull()))
.Times(1) .Times(1)
.After(expect_set); .After(expect_set);
EXPECT_CALL(*channel_proxy_, DisassociateSendChannel()).Times(1); EXPECT_CALL(*channel_proxy_, DisassociateSendChannel()).Times(1);
EXPECT_CALL(*channel_proxy_, SetReceiveCodecs(_)) EXPECT_CALL(*channel_proxy_, SetReceiveCodecs(_))
.WillRepeatedly( .WillRepeatedly(Invoke([](const std::map<int, SdpAudioFormat>& codecs) {
Invoke([](const std::map<int, SdpAudioFormat>& codecs) { EXPECT_THAT(codecs, testing::IsEmpty());
EXPECT_THAT(codecs, testing::IsEmpty()); }));
}));
stream_config_.rtp.local_ssrc = kLocalSsrc; stream_config_.rtp.local_ssrc = kLocalSsrc;
stream_config_.rtp.remote_ssrc = kRemoteSsrc; stream_config_.rtp.remote_ssrc = kRemoteSsrc;
@ -120,11 +116,8 @@ struct ConfigHelper {
std::unique_ptr<internal::AudioReceiveStream> CreateAudioReceiveStream() { std::unique_ptr<internal::AudioReceiveStream> CreateAudioReceiveStream() {
return std::unique_ptr<internal::AudioReceiveStream>( return std::unique_ptr<internal::AudioReceiveStream>(
new internal::AudioReceiveStream( new internal::AudioReceiveStream(
&rtp_stream_receiver_controller_, &rtp_stream_receiver_controller_, &packet_router_, stream_config_,
&packet_router_, audio_state_, &event_log_,
stream_config_,
audio_state_,
&event_log_,
std::unique_ptr<voe::ChannelProxy>(channel_proxy_))); std::unique_ptr<voe::ChannelProxy>(channel_proxy_)));
} }
@ -323,7 +316,7 @@ TEST(AudioReceiveStreamTest, SetGain) {
ConfigHelper helper; ConfigHelper helper;
auto recv_stream = helper.CreateAudioReceiveStream(); auto recv_stream = helper.CreateAudioReceiveStream();
EXPECT_CALL(*helper.channel_proxy(), EXPECT_CALL(*helper.channel_proxy(),
SetChannelOutputVolumeScaling(FloatEq(0.765f))); SetChannelOutputVolumeScaling(FloatEq(0.765f)));
recv_stream->SetGain(0.765f); recv_stream->SetGain(0.765f);
} }
@ -371,10 +364,10 @@ TEST(AudioReceiveStreamTest, ReconfigureWithUpdatedConfig) {
new_config.rtp.nack.rtp_history_ms = 300 + 20; new_config.rtp.nack.rtp_history_ms = 300 + 20;
new_config.rtp.extensions.clear(); new_config.rtp.extensions.clear();
new_config.rtp.extensions.push_back( new_config.rtp.extensions.push_back(
RtpExtension(RtpExtension::kAudioLevelUri, kAudioLevelId + 1)); RtpExtension(RtpExtension::kAudioLevelUri, kAudioLevelId + 1));
new_config.rtp.extensions.push_back(RtpExtension( new_config.rtp.extensions.push_back(
RtpExtension::kTransportSequenceNumberUri, RtpExtension(RtpExtension::kTransportSequenceNumberUri,
kTransportSequenceNumberId + 1)); kTransportSequenceNumberId + 1));
new_config.decoder_map.emplace(1, SdpAudioFormat("foo", 8000, 1)); new_config.decoder_map.emplace(1, SdpAudioFormat("foo", 8000, 1));
MockVoEChannelProxy& channel_proxy = *helper.channel_proxy(); MockVoEChannelProxy& channel_proxy = *helper.channel_proxy();

View File

@ -218,8 +218,7 @@ void AudioSendStream::ConfigureStream(
new_config.rtp.nack.rtp_history_ms / 20); new_config.rtp.nack.rtp_history_ms / 20);
} }
if (first_time || if (first_time || new_config.send_transport != old_config.send_transport) {
new_config.send_transport != old_config.send_transport) {
if (old_config.send_transport) { if (old_config.send_transport) {
channel_proxy->RegisterTransport(nullptr); channel_proxy->RegisterTransport(nullptr);
} }
@ -326,7 +325,8 @@ void AudioSendStream::SendAudioData(std::unique_ptr<AudioFrame> audio_frame) {
} }
bool AudioSendStream::SendTelephoneEvent(int payload_type, bool AudioSendStream::SendTelephoneEvent(int payload_type,
int payload_frequency, int event, int payload_frequency,
int event,
int duration_ms) { int duration_ms) {
RTC_DCHECK(worker_thread_checker_.CalledOnValidThread()); RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
return channel_proxy_->SetSendTelephoneEventPayloadType(payload_type, return channel_proxy_->SetSendTelephoneEventPayloadType(payload_type,
@ -415,8 +415,7 @@ uint32_t AudioSendStream::OnBitrateUpdated(uint32_t bitrate_bps,
if (bitrate_bps == 0) { if (bitrate_bps == 0) {
bitrate_bps = config_.min_bitrate_bps; bitrate_bps = config_.min_bitrate_bps;
} }
RTC_DCHECK_GE(bitrate_bps, RTC_DCHECK_GE(bitrate_bps, static_cast<uint32_t>(config_.min_bitrate_bps));
static_cast<uint32_t>(config_.min_bitrate_bps));
// The bitrate allocator might allocate an higher than max configured bitrate // The bitrate allocator might allocate an higher than max configured bitrate
// if there is room, to allow for, as example, extra FEC. Ignore that for now. // if there is room, to allow for, as example, extra FEC. Ignore that for now.
const uint32_t max_bitrate_bps = config_.max_bitrate_bps; const uint32_t max_bitrate_bps = config_.max_bitrate_bps;

View File

@ -70,7 +70,9 @@ class AudioSendStream final : public webrtc::AudioSendStream,
void Start() override; void Start() override;
void Stop() override; void Stop() override;
void SendAudioData(std::unique_ptr<AudioFrame> audio_frame) override; void SendAudioData(std::unique_ptr<AudioFrame> audio_frame) override;
bool SendTelephoneEvent(int payload_type, int payload_frequency, int event, bool SendTelephoneEvent(int payload_type,
int payload_frequency,
int event,
int duration_ms) override; int duration_ms) override;
void SetMuted(bool muted) override; void SetMuted(bool muted) override;
webrtc::AudioSendStream::Stats GetStats() const override; webrtc::AudioSendStream::Stats GetStats() const override;

View File

@ -20,15 +20,9 @@ class AudioSendTest : public SendTest {
public: public:
AudioSendTest() : SendTest(CallTest::kDefaultTimeoutMs) {} AudioSendTest() : SendTest(CallTest::kDefaultTimeoutMs) {}
size_t GetNumVideoStreams() const override { size_t GetNumVideoStreams() const override { return 0; }
return 0; size_t GetNumAudioStreams() const override { return 1; }
} size_t GetNumFlexfecStreams() const override { return 0; }
size_t GetNumAudioStreams() const override {
return 1;
}
size_t GetNumFlexfecStreams() const override {
return 0;
}
}; };
} // namespace } // namespace

View File

@ -56,8 +56,8 @@ const double kEchoReturnLoss = -65;
const double kEchoReturnLossEnhancement = 101; const double kEchoReturnLossEnhancement = 101;
const double kResidualEchoLikelihood = -1.0f; const double kResidualEchoLikelihood = -1.0f;
const double kResidualEchoLikelihoodMax = 23.0f; const double kResidualEchoLikelihoodMax = 23.0f;
const CallStatistics kCallStats = { const CallStatistics kCallStats = {1345, 1678, 1901, 1234, 112,
1345, 1678, 1901, 1234, 112, 13456, 17890, 1567, -1890, -1123}; 13456, 17890, 1567, -1890, -1123};
const ReportBlock kReportBlock = {456, 780, 123, 567, 890, 132, 143, 13354}; const ReportBlock kReportBlock = {456, 780, 123, 567, 890, 132, 143, 13354};
const int kTelephoneEventPayloadType = 123; const int kTelephoneEventPayloadType = 123;
const int kTelephoneEventPayloadFrequency = 65432; const int kTelephoneEventPayloadFrequency = 65432;
@ -181,9 +181,8 @@ struct ConfigHelper {
TimeInterval* active_lifetime() { return &active_lifetime_; } TimeInterval* active_lifetime() { return &active_lifetime_; }
static void AddBweToConfig(AudioSendStream::Config* config) { static void AddBweToConfig(AudioSendStream::Config* config) {
config->rtp.extensions.push_back( config->rtp.extensions.push_back(RtpExtension(
RtpExtension(RtpExtension::kTransportSequenceNumberUri, RtpExtension::kTransportSequenceNumberUri, kTransportSequenceNumberId));
kTransportSequenceNumberId));
config->send_codec_spec->transport_cc_enabled = true; config->send_codec_spec->transport_cc_enabled = true;
} }
@ -254,13 +253,14 @@ struct ConfigHelper {
void SetupMockForSendTelephoneEvent() { void SetupMockForSendTelephoneEvent() {
EXPECT_TRUE(channel_proxy_); EXPECT_TRUE(channel_proxy_);
EXPECT_CALL(*channel_proxy_, EXPECT_CALL(*channel_proxy_, SetSendTelephoneEventPayloadType(
SetSendTelephoneEventPayloadType(kTelephoneEventPayloadType, kTelephoneEventPayloadType,
kTelephoneEventPayloadFrequency)) kTelephoneEventPayloadFrequency))
.WillOnce(Return(true)); .WillOnce(Return(true));
EXPECT_CALL(*channel_proxy_, EXPECT_CALL(
*channel_proxy_,
SendTelephoneEventOutband(kTelephoneEventCode, kTelephoneEventDuration)) SendTelephoneEventOutband(kTelephoneEventCode, kTelephoneEventDuration))
.WillOnce(Return(true)); .WillOnce(Return(true));
} }
void SetupMockForGetStats() { void SetupMockForGetStats() {
@ -355,9 +355,9 @@ TEST(AudioSendStreamTest, SendTelephoneEvent) {
ConfigHelper helper(false, true); ConfigHelper helper(false, true);
auto send_stream = helper.CreateAudioSendStream(); auto send_stream = helper.CreateAudioSendStream();
helper.SetupMockForSendTelephoneEvent(); helper.SetupMockForSendTelephoneEvent();
EXPECT_TRUE(send_stream->SendTelephoneEvent(kTelephoneEventPayloadType, EXPECT_TRUE(send_stream->SendTelephoneEvent(
kTelephoneEventPayloadFrequency, kTelephoneEventCode, kTelephoneEventPayloadType, kTelephoneEventPayloadFrequency,
kTelephoneEventDuration)); kTelephoneEventCode, kTelephoneEventDuration));
} }
TEST(AudioSendStreamTest, SetMuted) { TEST(AudioSendStreamTest, SetMuted) {
@ -518,7 +518,7 @@ TEST(AudioSendStreamTest, ReconfigureTransportCcResetsFirst) {
EXPECT_CALL(*helper.channel_proxy(), ResetSenderCongestionControlObjects()) EXPECT_CALL(*helper.channel_proxy(), ResetSenderCongestionControlObjects())
.Times(1); .Times(1);
EXPECT_CALL(*helper.channel_proxy(), RegisterSenderCongestionControlObjects( EXPECT_CALL(*helper.channel_proxy(), RegisterSenderCongestionControlObjects(
helper.transport(), Ne(nullptr))) helper.transport(), Ne(nullptr)))
.Times(1); .Times(1);
} }
send_stream->Reconfigure(new_config); send_stream->Reconfigure(new_config);

View File

@ -27,8 +27,7 @@ namespace internal {
AudioState::AudioState(const AudioState::Config& config) AudioState::AudioState(const AudioState::Config& config)
: config_(config), : config_(config),
audio_transport_(config_.audio_mixer, audio_transport_(config_.audio_mixer, config_.audio_processing.get()) {
config_.audio_processing.get()) {
process_thread_checker_.DetachFromThread(); process_thread_checker_.DetachFromThread();
RTC_DCHECK(config_.audio_mixer); RTC_DCHECK(config_.audio_mixer);
RTC_DCHECK(config_.audio_device_module); RTC_DCHECK(config_.audio_device_module);
@ -50,7 +49,7 @@ void AudioState::AddReceivingStream(webrtc::AudioReceiveStream* stream) {
RTC_DCHECK_EQ(0, receiving_streams_.count(stream)); RTC_DCHECK_EQ(0, receiving_streams_.count(stream));
receiving_streams_.insert(stream); receiving_streams_.insert(stream);
if (!config_.audio_mixer->AddSource( if (!config_.audio_mixer->AddSource(
static_cast<internal::AudioReceiveStream*>(stream))) { static_cast<internal::AudioReceiveStream*>(stream))) {
RTC_DLOG(LS_ERROR) << "Failed to add source to mixer."; RTC_DLOG(LS_ERROR) << "Failed to add source to mixer.";
} }
@ -79,7 +78,8 @@ void AudioState::RemoveReceivingStream(webrtc::AudioReceiveStream* stream) {
} }
void AudioState::AddSendingStream(webrtc::AudioSendStream* stream, void AudioState::AddSendingStream(webrtc::AudioSendStream* stream,
int sample_rate_hz, size_t num_channels) { int sample_rate_hz,
size_t num_channels) {
RTC_DCHECK(thread_checker_.CalledOnValidThread()); RTC_DCHECK(thread_checker_.CalledOnValidThread());
auto& properties = sending_streams_[stream]; auto& properties = sending_streams_[stream];
properties.sample_rate_hz = sample_rate_hz; properties.sample_rate_hz = sample_rate_hz;
@ -121,8 +121,7 @@ void AudioState::SetPlayout(bool enabled) {
} }
} else { } else {
config_.audio_device_module->StopPlayout(); config_.audio_device_module->StopPlayout();
null_audio_poller_ = null_audio_poller_ = rtc::MakeUnique<NullAudioPoller>(&audio_transport_);
rtc::MakeUnique<NullAudioPoller>(&audio_transport_);
} }
} }
} }

View File

@ -39,9 +39,7 @@ class AudioState final : public webrtc::AudioState {
RTC_DCHECK(config_.audio_processing); RTC_DCHECK(config_.audio_processing);
return config_.audio_processing.get(); return config_.audio_processing.get();
} }
AudioTransport* audio_transport() override { AudioTransport* audio_transport() override { return &audio_transport_; }
return &audio_transport_;
}
void SetPlayout(bool enabled) override; void SetPlayout(bool enabled) override;
void SetRecording(bool enabled) override; void SetRecording(bool enabled) override;
@ -60,7 +58,8 @@ class AudioState final : public webrtc::AudioState {
void RemoveReceivingStream(webrtc::AudioReceiveStream* stream); void RemoveReceivingStream(webrtc::AudioReceiveStream* stream);
void AddSendingStream(webrtc::AudioSendStream* stream, void AddSendingStream(webrtc::AudioSendStream* stream,
int sample_rate_hz, size_t num_channels); int sample_rate_hz,
size_t num_channels);
void RemoveSendingStream(webrtc::AudioSendStream* stream); void RemoveSendingStream(webrtc::AudioSendStream* stream);
private: private:

View File

@ -71,8 +71,7 @@ std::vector<int16_t> Create10msTestData(int sample_rate_hz,
const float inc = (2 * 3.14159265f * 1000) / sample_rate_hz; const float inc = (2 * 3.14159265f * 1000) / sample_rate_hz;
float w = 0.f; float w = 0.f;
for (int i = 0; i < samples_per_channel; ++i) { for (int i = 0; i < samples_per_channel; ++i) {
audio_data[i * num_channels] = audio_data[i * num_channels] = static_cast<int16_t>(32767.f * std::sin(w));
static_cast<int16_t>(32767.f * std::sin(w));
w += inc; w += inc;
} }
return audio_data; return audio_data;
@ -111,16 +110,18 @@ TEST(AudioStateTest, RecordedAudioArrivesAtSingleStream) {
MockAudioSendStream stream; MockAudioSendStream stream;
audio_state->AddSendingStream(&stream, 8000, 2); audio_state->AddSendingStream(&stream, 8000, 2);
EXPECT_CALL(stream, SendAudioDataForMock(testing::AllOf( EXPECT_CALL(
testing::Field(&AudioFrame::sample_rate_hz_, testing::Eq(8000)), stream,
testing::Field(&AudioFrame::num_channels_, testing::Eq(2u))))) SendAudioDataForMock(testing::AllOf(
.WillOnce( testing::Field(&AudioFrame::sample_rate_hz_, testing::Eq(8000)),
// Verify that channels are not swapped by default. testing::Field(&AudioFrame::num_channels_, testing::Eq(2u)))))
testing::Invoke([](AudioFrame* audio_frame) { .WillOnce(
auto levels = ComputeChannelLevels(audio_frame); // Verify that channels are not swapped by default.
EXPECT_LT(0u, levels[0]); testing::Invoke([](AudioFrame* audio_frame) {
EXPECT_EQ(0u, levels[1]); auto levels = ComputeChannelLevels(audio_frame);
})); EXPECT_LT(0u, levels[0]);
EXPECT_EQ(0u, levels[1]);
}));
MockAudioProcessing* ap = MockAudioProcessing* ap =
static_cast<MockAudioProcessing*>(audio_state->audio_processing()); static_cast<MockAudioProcessing*>(audio_state->audio_processing());
EXPECT_CALL(*ap, set_stream_delay_ms(0)); EXPECT_CALL(*ap, set_stream_delay_ms(0));
@ -132,8 +133,8 @@ TEST(AudioStateTest, RecordedAudioArrivesAtSingleStream) {
auto audio_data = Create10msTestData(kSampleRate, kNumChannels); auto audio_data = Create10msTestData(kSampleRate, kNumChannels);
uint32_t new_mic_level = 667; uint32_t new_mic_level = 667;
audio_state->audio_transport()->RecordedDataIsAvailable( audio_state->audio_transport()->RecordedDataIsAvailable(
&audio_data[0], kSampleRate / 100, kNumChannels * 2, &audio_data[0], kSampleRate / 100, kNumChannels * 2, kNumChannels,
kNumChannels, kSampleRate, 0, 0, 0, false, new_mic_level); kSampleRate, 0, 0, 0, false, new_mic_level);
EXPECT_EQ(667u, new_mic_level); EXPECT_EQ(667u, new_mic_level);
audio_state->RemoveSendingStream(&stream); audio_state->RemoveSendingStream(&stream);
@ -149,24 +150,28 @@ TEST(AudioStateTest, RecordedAudioArrivesAtMultipleStreams) {
audio_state->AddSendingStream(&stream_1, 8001, 2); audio_state->AddSendingStream(&stream_1, 8001, 2);
audio_state->AddSendingStream(&stream_2, 32000, 1); audio_state->AddSendingStream(&stream_2, 32000, 1);
EXPECT_CALL(stream_1, SendAudioDataForMock(testing::AllOf( EXPECT_CALL(
testing::Field(&AudioFrame::sample_rate_hz_, testing::Eq(16000)), stream_1,
testing::Field(&AudioFrame::num_channels_, testing::Eq(1u))))) SendAudioDataForMock(testing::AllOf(
.WillOnce( testing::Field(&AudioFrame::sample_rate_hz_, testing::Eq(16000)),
// Verify that there is output signal. testing::Field(&AudioFrame::num_channels_, testing::Eq(1u)))))
testing::Invoke([](AudioFrame* audio_frame) { .WillOnce(
auto levels = ComputeChannelLevels(audio_frame); // Verify that there is output signal.
EXPECT_LT(0u, levels[0]); testing::Invoke([](AudioFrame* audio_frame) {
})); auto levels = ComputeChannelLevels(audio_frame);
EXPECT_CALL(stream_2, SendAudioDataForMock(testing::AllOf( EXPECT_LT(0u, levels[0]);
testing::Field(&AudioFrame::sample_rate_hz_, testing::Eq(16000)), }));
testing::Field(&AudioFrame::num_channels_, testing::Eq(1u))))) EXPECT_CALL(
.WillOnce( stream_2,
// Verify that there is output signal. SendAudioDataForMock(testing::AllOf(
testing::Invoke([](AudioFrame* audio_frame) { testing::Field(&AudioFrame::sample_rate_hz_, testing::Eq(16000)),
auto levels = ComputeChannelLevels(audio_frame); testing::Field(&AudioFrame::num_channels_, testing::Eq(1u)))))
EXPECT_LT(0u, levels[0]); .WillOnce(
})); // Verify that there is output signal.
testing::Invoke([](AudioFrame* audio_frame) {
auto levels = ComputeChannelLevels(audio_frame);
EXPECT_LT(0u, levels[0]);
}));
MockAudioProcessing* ap = MockAudioProcessing* ap =
static_cast<MockAudioProcessing*>(audio_state->audio_processing()); static_cast<MockAudioProcessing*>(audio_state->audio_processing());
EXPECT_CALL(*ap, set_stream_delay_ms(5)); EXPECT_CALL(*ap, set_stream_delay_ms(5));
@ -178,8 +183,8 @@ TEST(AudioStateTest, RecordedAudioArrivesAtMultipleStreams) {
auto audio_data = Create10msTestData(kSampleRate, kNumChannels); auto audio_data = Create10msTestData(kSampleRate, kNumChannels);
uint32_t new_mic_level = 667; uint32_t new_mic_level = 667;
audio_state->audio_transport()->RecordedDataIsAvailable( audio_state->audio_transport()->RecordedDataIsAvailable(
&audio_data[0], kSampleRate / 100, kNumChannels * 2, &audio_data[0], kSampleRate / 100, kNumChannels * 2, kNumChannels,
kNumChannels, kSampleRate, 5, 0, 0, true, new_mic_level); kSampleRate, 5, 0, 0, true, new_mic_level);
EXPECT_EQ(667u, new_mic_level); EXPECT_EQ(667u, new_mic_level);
audio_state->RemoveSendingStream(&stream_1); audio_state->RemoveSendingStream(&stream_1);
@ -210,8 +215,8 @@ TEST(AudioStateTest, EnableChannelSwap) {
auto audio_data = Create10msTestData(kSampleRate, kNumChannels); auto audio_data = Create10msTestData(kSampleRate, kNumChannels);
uint32_t new_mic_level = 667; uint32_t new_mic_level = 667;
audio_state->audio_transport()->RecordedDataIsAvailable( audio_state->audio_transport()->RecordedDataIsAvailable(
&audio_data[0], kSampleRate / 100, kNumChannels * 2, &audio_data[0], kSampleRate / 100, kNumChannels * 2, kNumChannels,
kNumChannels, kSampleRate, 0, 0, 0, false, new_mic_level); kSampleRate, 0, 0, 0, false, new_mic_level);
EXPECT_EQ(667u, new_mic_level); EXPECT_EQ(667u, new_mic_level);
audio_state->RemoveSendingStream(&stream); audio_state->RemoveSendingStream(&stream);
@ -230,8 +235,8 @@ TEST(AudioStateTest, InputLevelStats) {
auto audio_data = Create10msSilentTestData(kSampleRate, kNumChannels); auto audio_data = Create10msSilentTestData(kSampleRate, kNumChannels);
uint32_t new_mic_level = 667; uint32_t new_mic_level = 667;
audio_state->audio_transport()->RecordedDataIsAvailable( audio_state->audio_transport()->RecordedDataIsAvailable(
&audio_data[0], kSampleRate / 100, kNumChannels * 2, &audio_data[0], kSampleRate / 100, kNumChannels * 2, kNumChannels,
kNumChannels, kSampleRate, 0, 0, 0, false, new_mic_level); kSampleRate, 0, 0, 0, false, new_mic_level);
auto stats = audio_state->GetAudioInputStats(); auto stats = audio_state->GetAudioInputStats();
EXPECT_EQ(0, stats.audio_level); EXPECT_EQ(0, stats.audio_level);
EXPECT_THAT(stats.total_energy, testing::DoubleEq(0.0)); EXPECT_THAT(stats.total_energy, testing::DoubleEq(0.0));
@ -244,8 +249,8 @@ TEST(AudioStateTest, InputLevelStats) {
uint32_t new_mic_level = 667; uint32_t new_mic_level = 667;
for (int i = 0; i < 10; ++i) { for (int i = 0; i < 10; ++i) {
audio_state->audio_transport()->RecordedDataIsAvailable( audio_state->audio_transport()->RecordedDataIsAvailable(
&audio_data[0], kSampleRate / 100, kNumChannels * 2, &audio_data[0], kSampleRate / 100, kNumChannels * 2, kNumChannels,
kNumChannels, kSampleRate, 0, 0, 0, false, new_mic_level); kSampleRate, 0, 0, 0, false, new_mic_level);
} }
auto stats = audio_state->GetAudioInputStats(); auto stats = audio_state->GetAudioInputStats();
EXPECT_EQ(32767, stats.audio_level); EXPECT_EQ(32767, stats.audio_level);

View File

@ -83,8 +83,7 @@ int Resample(const AudioFrame& frame,
AudioTransportImpl::AudioTransportImpl(AudioMixer* mixer, AudioTransportImpl::AudioTransportImpl(AudioMixer* mixer,
AudioProcessing* audio_processing) AudioProcessing* audio_processing)
: audio_processing_(audio_processing), : audio_processing_(audio_processing), mixer_(mixer) {
mixer_(mixer) {
RTC_DCHECK(mixer); RTC_DCHECK(mixer);
RTC_DCHECK(audio_processing); RTC_DCHECK(audio_processing);
} }
@ -125,9 +124,8 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable(
} }
std::unique_ptr<AudioFrame> audio_frame(new AudioFrame()); std::unique_ptr<AudioFrame> audio_frame(new AudioFrame());
InitializeCaptureFrame(sample_rate, send_sample_rate_hz, InitializeCaptureFrame(sample_rate, send_sample_rate_hz, number_of_channels,
number_of_channels, send_num_channels, send_num_channels, audio_frame.get());
audio_frame.get());
voe::RemixAndResample(static_cast<const int16_t*>(audio_data), voe::RemixAndResample(static_cast<const int16_t*>(audio_data),
number_of_frames, number_of_channels, sample_rate, number_of_frames, number_of_channels, sample_rate,
&capture_resampler_, audio_frame.get()); &capture_resampler_, audio_frame.get());
@ -175,13 +173,13 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable(
// Mix all received streams, feed the result to the AudioProcessing module, then // Mix all received streams, feed the result to the AudioProcessing module, then
// resample the result to the requested output rate. // resample the result to the requested output rate.
int32_t AudioTransportImpl::NeedMorePlayData(const size_t nSamples, int32_t AudioTransportImpl::NeedMorePlayData(const size_t nSamples,
const size_t nBytesPerSample, const size_t nBytesPerSample,
const size_t nChannels, const size_t nChannels,
const uint32_t samplesPerSec, const uint32_t samplesPerSec,
void* audioSamples, void* audioSamples,
size_t& nSamplesOut, size_t& nSamplesOut,
int64_t* elapsed_time_ms, int64_t* elapsed_time_ms,
int64_t* ntp_time_ms) { int64_t* ntp_time_ms) {
RTC_DCHECK_EQ(sizeof(int16_t) * nChannels, nBytesPerSample); RTC_DCHECK_EQ(sizeof(int16_t) * nChannels, nBytesPerSample);
RTC_DCHECK_GE(nChannels, 1); RTC_DCHECK_GE(nChannels, 1);
RTC_DCHECK_LE(nChannels, 2); RTC_DCHECK_LE(nChannels, 2);
@ -210,12 +208,12 @@ int32_t AudioTransportImpl::NeedMorePlayData(const size_t nSamples,
// Used by Chromium - same as NeedMorePlayData() but because Chrome has its // Used by Chromium - same as NeedMorePlayData() but because Chrome has its
// own APM instance, does not call audio_processing_->ProcessReverseStream(). // own APM instance, does not call audio_processing_->ProcessReverseStream().
void AudioTransportImpl::PullRenderData(int bits_per_sample, void AudioTransportImpl::PullRenderData(int bits_per_sample,
int sample_rate, int sample_rate,
size_t number_of_channels, size_t number_of_channels,
size_t number_of_frames, size_t number_of_frames,
void* audio_data, void* audio_data,
int64_t* elapsed_time_ms, int64_t* elapsed_time_ms,
int64_t* ntp_time_ms) { int64_t* ntp_time_ms) {
RTC_DCHECK_EQ(bits_per_sample, 16); RTC_DCHECK_EQ(bits_per_sample, 16);
RTC_DCHECK_GE(number_of_channels, 1); RTC_DCHECK_GE(number_of_channels, 1);
RTC_DCHECK_LE(number_of_channels, 2); RTC_DCHECK_LE(number_of_channels, 2);
@ -237,7 +235,8 @@ void AudioTransportImpl::PullRenderData(int bits_per_sample,
} }
void AudioTransportImpl::UpdateSendingStreams( void AudioTransportImpl::UpdateSendingStreams(
std::vector<AudioSendStream*> streams, int send_sample_rate_hz, std::vector<AudioSendStream*> streams,
int send_sample_rate_hz,
size_t send_num_channels) { size_t send_num_channels) {
rtc::CritScope lock(&capture_lock_); rtc::CritScope lock(&capture_lock_);
sending_streams_ = std::move(streams); sending_streams_ = std::move(streams);

View File

@ -30,8 +30,7 @@ class AudioSendStream;
class AudioTransportImpl : public AudioTransport { class AudioTransportImpl : public AudioTransport {
public: public:
AudioTransportImpl(AudioMixer* mixer, AudioTransportImpl(AudioMixer* mixer, AudioProcessing* audio_processing);
AudioProcessing* audio_processing);
~AudioTransportImpl() override; ~AudioTransportImpl() override;
int32_t RecordedDataIsAvailable(const void* audioSamples, int32_t RecordedDataIsAvailable(const void* audioSamples,
@ -63,12 +62,11 @@ class AudioTransportImpl : public AudioTransport {
int64_t* ntp_time_ms) override; int64_t* ntp_time_ms) override;
void UpdateSendingStreams(std::vector<AudioSendStream*> streams, void UpdateSendingStreams(std::vector<AudioSendStream*> streams,
int send_sample_rate_hz, size_t send_num_channels); int send_sample_rate_hz,
size_t send_num_channels);
void SetStereoChannelSwapping(bool enable); void SetStereoChannelSwapping(bool enable);
bool typing_noise_detected() const; bool typing_noise_detected() const;
const voe::AudioLevel& audio_level() const { const voe::AudioLevel& audio_level() const { return audio_level_; }
return audio_level_;
}
private: private:
// Shared. // Shared.

View File

@ -20,8 +20,8 @@
#include "api/array_view.h" #include "api/array_view.h"
#include "audio/utility/audio_frame_operations.h" #include "audio/utility/audio_frame_operations.h"
#include "call/rtp_transport_controller_send_interface.h" #include "call/rtp_transport_controller_send_interface.h"
#include "logging/rtc_event_log/rtc_event_log.h"
#include "logging/rtc_event_log/events/rtc_event_audio_playout.h" #include "logging/rtc_event_log/events/rtc_event_audio_playout.h"
#include "logging/rtc_event_log/rtc_event_log.h"
#include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h"
#include "modules/audio_coding/codecs/audio_format_conversion.h" #include "modules/audio_coding/codecs/audio_format_conversion.h"
#include "modules/audio_device/include/audio_device.h" #include "modules/audio_device/include/audio_device.h"
@ -943,9 +943,8 @@ int32_t Channel::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
uint32_t ntp_secs = 0; uint32_t ntp_secs = 0;
uint32_t ntp_frac = 0; uint32_t ntp_frac = 0;
uint32_t rtp_timestamp = 0; uint32_t rtp_timestamp = 0;
if (0 != if (0 != _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
_rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL, &rtp_timestamp)) {
&rtp_timestamp)) {
// Waiting for RTCP. // Waiting for RTCP.
return 0; return 0;
} }
@ -993,7 +992,7 @@ int Channel::SendTelephoneEventOutband(int event, int duration_ms) {
return -1; return -1;
} }
if (_rtpRtcpModule->SendTelephoneEventOutband( if (_rtpRtcpModule->SendTelephoneEventOutband(
event, duration_ms, kTelephoneEventAttenuationdB) != 0) { event, duration_ms, kTelephoneEventAttenuationdB) != 0) {
RTC_DLOG(LS_ERROR) << "SendTelephoneEventOutband() failed to send event"; RTC_DLOG(LS_ERROR) << "SendTelephoneEventOutband() failed to send event";
return -1; return -1;
} }

View File

@ -22,8 +22,8 @@ namespace webrtc {
namespace voe { namespace voe {
ChannelProxy::ChannelProxy() {} ChannelProxy::ChannelProxy() {}
ChannelProxy::ChannelProxy(std::unique_ptr<Channel> channel) : ChannelProxy::ChannelProxy(std::unique_ptr<Channel> channel)
channel_(std::move(channel)) { : channel_(std::move(channel)) {
RTC_DCHECK(channel_); RTC_DCHECK(channel_);
module_process_thread_checker_.DetachFromThread(); module_process_thread_checker_.DetachFromThread();
} }
@ -92,7 +92,7 @@ void ChannelProxy::RegisterSenderCongestionControlObjects(
RtcpBandwidthObserver* bandwidth_observer) { RtcpBandwidthObserver* bandwidth_observer) {
RTC_DCHECK(worker_thread_checker_.CalledOnValidThread()); RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
channel_->RegisterSenderCongestionControlObjects(transport, channel_->RegisterSenderCongestionControlObjects(transport,
bandwidth_observer); bandwidth_observer);
} }
void ChannelProxy::RegisterReceiverCongestionControlObjects( void ChannelProxy::RegisterReceiverCongestionControlObjects(
@ -172,7 +172,7 @@ bool ChannelProxy::SetSendTelephoneEventPayloadType(int payload_type,
int payload_frequency) { int payload_frequency) {
RTC_DCHECK(worker_thread_checker_.CalledOnValidThread()); RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
return channel_->SetSendTelephoneEventPayloadType(payload_type, return channel_->SetSendTelephoneEventPayloadType(payload_type,
payload_frequency) == 0; payload_frequency) == 0;
} }
bool ChannelProxy::SendTelephoneEventOutband(int event, int duration_ms) { bool ChannelProxy::SendTelephoneEventOutband(int event, int duration_ms) {

View File

@ -108,8 +108,7 @@ class ChannelProxy : public RtpPacketSinkInterface {
virtual void SetTransportOverhead(int transport_overhead_per_packet); virtual void SetTransportOverhead(int transport_overhead_per_packet);
virtual void AssociateSendChannel(const ChannelProxy& send_channel_proxy); virtual void AssociateSendChannel(const ChannelProxy& send_channel_proxy);
virtual void DisassociateSendChannel(); virtual void DisassociateSendChannel();
virtual void GetRtpRtcp(RtpRtcp** rtp_rtcp, virtual void GetRtpRtcp(RtpRtcp** rtp_rtcp, RtpReceiver** rtp_receiver) const;
RtpReceiver** rtp_receiver) const;
virtual uint32_t GetPlayoutTimestamp() const; virtual uint32_t GetPlayoutTimestamp() const;
virtual void SetMinimumPlayoutDelay(int delay_ms); virtual void SetMinimumPlayoutDelay(int delay_ms);
virtual bool GetRecCodec(CodecInst* codec_inst) const; virtual bool GetRecCodec(CodecInst* codec_inst) const;

View File

@ -31,8 +31,7 @@ class MockVoEChannelProxy : public voe::ChannelProxy {
return SetEncoderForMock(payload_type, &encoder); return SetEncoderForMock(payload_type, &encoder);
} }
MOCK_METHOD2(SetEncoderForMock, MOCK_METHOD2(SetEncoderForMock,
bool(int payload_type, bool(int payload_type, std::unique_ptr<AudioEncoder>* encoder));
std::unique_ptr<AudioEncoder>* encoder));
MOCK_METHOD1( MOCK_METHOD1(
ModifyEncoder, ModifyEncoder,
void(rtc::FunctionView<void(std::unique_ptr<AudioEncoder>*)> modifier)); void(rtc::FunctionView<void(std::unique_ptr<AudioEncoder>*)> modifier));
@ -59,8 +58,8 @@ class MockVoEChannelProxy : public voe::ChannelProxy {
MOCK_CONST_METHOD0(GetTotalOutputEnergy, double()); MOCK_CONST_METHOD0(GetTotalOutputEnergy, double());
MOCK_CONST_METHOD0(GetTotalOutputDuration, double()); MOCK_CONST_METHOD0(GetTotalOutputDuration, double());
MOCK_CONST_METHOD0(GetDelayEstimate, uint32_t()); MOCK_CONST_METHOD0(GetDelayEstimate, uint32_t());
MOCK_METHOD2(SetSendTelephoneEventPayloadType, bool(int payload_type, MOCK_METHOD2(SetSendTelephoneEventPayloadType,
int payload_frequency)); bool(int payload_type, int payload_frequency));
MOCK_METHOD2(SendTelephoneEventOutband, bool(int event, int duration_ms)); MOCK_METHOD2(SendTelephoneEventOutband, bool(int event, int duration_ms));
MOCK_METHOD2(SetBitrate, void(int bitrate_bps, int64_t probing_interval_ms)); MOCK_METHOD2(SetBitrate, void(int bitrate_bps, int64_t probing_interval_ms));
MOCK_METHOD1(SetSink, void(AudioSinkInterface* sink)); MOCK_METHOD1(SetSink, void(AudioSinkInterface* sink));
@ -71,8 +70,8 @@ class MockVoEChannelProxy : public voe::ChannelProxy {
MOCK_METHOD1(SetChannelOutputVolumeScaling, void(float scaling)); MOCK_METHOD1(SetChannelOutputVolumeScaling, void(float scaling));
MOCK_METHOD1(SetRtcEventLog, void(RtcEventLog* event_log)); MOCK_METHOD1(SetRtcEventLog, void(RtcEventLog* event_log));
MOCK_METHOD2(GetAudioFrameWithInfo, MOCK_METHOD2(GetAudioFrameWithInfo,
AudioMixer::Source::AudioFrameInfo(int sample_rate_hz, AudioMixer::Source::AudioFrameInfo(int sample_rate_hz,
AudioFrame* audio_frame)); AudioFrame* audio_frame));
MOCK_CONST_METHOD0(PreferredSampleRate, int()); MOCK_CONST_METHOD0(PreferredSampleRate, int());
// GMock doesn't like move-only types, like std::unique_ptr. // GMock doesn't like move-only types, like std::unique_ptr.
virtual void ProcessAndEncodeAudio(std::unique_ptr<AudioFrame> audio_frame) { virtual void ProcessAndEncodeAudio(std::unique_ptr<AudioFrame> audio_frame) {
@ -84,8 +83,8 @@ class MockVoEChannelProxy : public voe::ChannelProxy {
MOCK_METHOD1(AssociateSendChannel, MOCK_METHOD1(AssociateSendChannel,
void(const ChannelProxy& send_channel_proxy)); void(const ChannelProxy& send_channel_proxy));
MOCK_METHOD0(DisassociateSendChannel, void()); MOCK_METHOD0(DisassociateSendChannel, void());
MOCK_CONST_METHOD2(GetRtpRtcp, void(RtpRtcp** rtp_rtcp, MOCK_CONST_METHOD2(GetRtpRtcp,
RtpReceiver** rtp_receiver)); void(RtpRtcp** rtp_rtcp, RtpReceiver** rtp_receiver));
MOCK_CONST_METHOD0(GetPlayoutTimestamp, uint32_t()); MOCK_CONST_METHOD0(GetPlayoutTimestamp, uint32_t());
MOCK_METHOD1(SetMinimumPlayoutDelay, void(int delay_ms)); MOCK_METHOD1(SetMinimumPlayoutDelay, void(int delay_ms));
MOCK_CONST_METHOD1(GetRecCodec, bool(CodecInst* codec_inst)); MOCK_CONST_METHOD1(GetRecCodec, bool(CodecInst* codec_inst));

View File

@ -68,9 +68,9 @@ void RemixAndResample(const int16_t* src_data,
// how much to zero here; or 2) make resampler accept a hint that the input is // how much to zero here; or 2) make resampler accept a hint that the input is
// zeroed. // zeroed.
const size_t src_length = samples_per_channel * audio_ptr_num_channels; const size_t src_length = samples_per_channel * audio_ptr_num_channels;
int out_length = resampler->Resample(audio_ptr, src_length, int out_length =
dst_frame->mutable_data(), resampler->Resample(audio_ptr, src_length, dst_frame->mutable_data(),
AudioFrame::kMaxDataSizeSamples); AudioFrame::kMaxDataSizeSamples);
if (out_length == -1) { if (out_length == -1) {
FATAL() << "Resample failed: audio_ptr = " << audio_ptr FATAL() << "Resample failed: audio_ptr = " << audio_ptr
<< ", src_length = " << src_length << ", src_length = " << src_length

View File

@ -113,7 +113,8 @@ void VerifyParams(const AudioFrame& ref_frame, const AudioFrame& test_frame) {
// Computes the best SNR based on the error between |ref_frame| and // Computes the best SNR based on the error between |ref_frame| and
// |test_frame|. It allows for up to a |max_delay| in samples between the // |test_frame|. It allows for up to a |max_delay| in samples between the
// signals to compensate for the resampling delay. // signals to compensate for the resampling delay.
float ComputeSNR(const AudioFrame& ref_frame, const AudioFrame& test_frame, float ComputeSNR(const AudioFrame& ref_frame,
const AudioFrame& test_frame,
size_t max_delay) { size_t max_delay) {
VerifyParams(ref_frame, test_frame); VerifyParams(ref_frame, test_frame);
float best_snr = 0; float best_snr = 0;
@ -123,8 +124,9 @@ float ComputeSNR(const AudioFrame& ref_frame, const AudioFrame& test_frame,
float variance = 0; float variance = 0;
const int16_t* ref_frame_data = ref_frame.data(); const int16_t* ref_frame_data = ref_frame.data();
const int16_t* test_frame_data = test_frame.data(); const int16_t* test_frame_data = test_frame.data();
for (size_t i = 0; i < ref_frame.samples_per_channel_ * for (size_t i = 0;
ref_frame.num_channels_ - delay; i++) { i < ref_frame.samples_per_channel_ * ref_frame.num_channels_ - delay;
i++) {
int error = ref_frame_data[i] - test_frame_data[i + delay]; int error = ref_frame_data[i] - test_frame_data[i + delay];
mse += error * error; mse += error * error;
variance += ref_frame_data[i] * ref_frame_data[i]; variance += ref_frame_data[i] * ref_frame_data[i];
@ -145,7 +147,7 @@ void VerifyFramesAreEqual(const AudioFrame& ref_frame,
const AudioFrame& test_frame) { const AudioFrame& test_frame) {
VerifyParams(ref_frame, test_frame); VerifyParams(ref_frame, test_frame);
const int16_t* ref_frame_data = ref_frame.data(); const int16_t* ref_frame_data = ref_frame.data();
const int16_t* test_frame_data = test_frame.data(); const int16_t* test_frame_data = test_frame.data();
for (size_t i = 0; for (size_t i = 0;
i < ref_frame.samples_per_channel_ * ref_frame.num_channels_; i++) { i < ref_frame.samples_per_channel_ * ref_frame.num_channels_; i++) {
EXPECT_EQ(ref_frame_data[i], test_frame_data[i]); EXPECT_EQ(ref_frame_data[i], test_frame_data[i]);
@ -161,8 +163,8 @@ void UtilityTest::RunResampleTest(int src_channels,
const int16_t kSrcCh2 = 15; const int16_t kSrcCh2 = 15;
const int16_t kSrcCh3 = 22; const int16_t kSrcCh3 = 22;
const int16_t kSrcCh4 = 8; const int16_t kSrcCh4 = 8;
const float resampling_factor = (1.0 * src_sample_rate_hz) / const float resampling_factor =
dst_sample_rate_hz; (1.0 * src_sample_rate_hz) / dst_sample_rate_hz;
const float dst_ch1 = resampling_factor * kSrcCh1; const float dst_ch1 = resampling_factor * kSrcCh1;
const float dst_ch2 = resampling_factor * kSrcCh2; const float dst_ch2 = resampling_factor * kSrcCh2;
const float dst_ch3 = resampling_factor * kSrcCh3; const float dst_ch3 = resampling_factor * kSrcCh3;
@ -206,7 +208,7 @@ void UtilityTest::RunResampleTest(int src_channels,
static_cast<double>(dst_sample_rate_hz) / src_sample_rate_hz * static_cast<double>(dst_sample_rate_hz) / src_sample_rate_hz *
kInputKernelDelaySamples * dst_channels * 2); kInputKernelDelaySamples * dst_channels * 2);
printf("(%d, %d Hz) -> (%d, %d Hz) ", // SNR reported on the same line later. printf("(%d, %d Hz) -> (%d, %d Hz) ", // SNR reported on the same line later.
src_channels, src_sample_rate_hz, dst_channels, dst_sample_rate_hz); src_channels, src_sample_rate_hz, dst_channels, dst_sample_rate_hz);
RemixAndResample(src_frame_, &resampler, &dst_frame_); RemixAndResample(src_frame_, &resampler, &dst_frame_);
if (src_sample_rate_hz == 96000 && dst_sample_rate_hz == 8000) { if (src_sample_rate_hz == 96000 && dst_sample_rate_hz == 8000) {
@ -258,8 +260,7 @@ TEST_F(UtilityTest, RemixAndResampleSucceeds) {
for (int src_rate = 0; src_rate < kSampleRatesSize; src_rate++) { for (int src_rate = 0; src_rate < kSampleRatesSize; src_rate++) {
for (int dst_rate = 0; dst_rate < kSampleRatesSize; dst_rate++) { for (int dst_rate = 0; dst_rate < kSampleRatesSize; dst_rate++) {
for (int src_channel = 0; src_channel < kSrcChannelsSize; for (int src_channel = 0; src_channel < kSrcChannelsSize; src_channel++) {
src_channel++) {
for (int dst_channel = 0; dst_channel < kDstChannelsSize; for (int dst_channel = 0; dst_channel < kDstChannelsSize;
dst_channel++) { dst_channel++) {
RunResampleTest(kSrcChannels[src_channel], kSampleRates[src_rate], RunResampleTest(kSrcChannels[src_channel], kSampleRates[src_rate],

View File

@ -67,15 +67,15 @@ test::PacketTransport* AudioEndToEndTest::CreateSendTransport(
} }
test::PacketTransport* AudioEndToEndTest::CreateReceiveTransport( test::PacketTransport* AudioEndToEndTest::CreateReceiveTransport(
SingleThreadedTaskQueueForTesting* task_queue) { SingleThreadedTaskQueueForTesting* task_queue) {
return new test::PacketTransport( return new test::PacketTransport(
task_queue, nullptr, this, test::PacketTransport::kReceiver, task_queue, nullptr, this, test::PacketTransport::kReceiver,
test::CallTest::payload_type_map_, GetNetworkPipeConfig()); test::CallTest::payload_type_map_, GetNetworkPipeConfig());
} }
void AudioEndToEndTest::ModifyAudioConfigs( void AudioEndToEndTest::ModifyAudioConfigs(
AudioSendStream::Config* send_config, AudioSendStream::Config* send_config,
std::vector<AudioReceiveStream::Config>* receive_configs) { std::vector<AudioReceiveStream::Config>* receive_configs) {
// Large bitrate by default. // Large bitrate by default.
const webrtc::SdpAudioFormat kDefaultFormat("opus", 48000, 2, const webrtc::SdpAudioFormat kDefaultFormat("opus", 48000, 2,
{{"stereo", "1"}}); {{"stereo", "1"}});

View File

@ -13,10 +13,12 @@
#include "system_wrappers/include/sleep.h" #include "system_wrappers/include/sleep.h"
#include "test/testsupport/fileutils.h" #include "test/testsupport/fileutils.h"
DEFINE_int(sample_rate_hz, 16000, DEFINE_int(sample_rate_hz,
16000,
"Sample rate (Hz) of the produced audio files."); "Sample rate (Hz) of the produced audio files.");
DEFINE_bool(quick, false, DEFINE_bool(quick,
false,
"Don't do the full audio recording. " "Don't do the full audio recording. "
"Used to quickly check that the test runs without crashing."); "Used to quickly check that the test runs without crashing.");
@ -42,7 +44,7 @@ class AudioQualityTest : public AudioEndToEndTest {
const ::testing::TestInfo* const test_info = const ::testing::TestInfo* const test_info =
::testing::UnitTest::GetInstance()->current_test_info(); ::testing::UnitTest::GetInstance()->current_test_info();
return webrtc::test::OutputPath() + "LowBandwidth_" + test_info->name() + return webrtc::test::OutputPath() + "LowBandwidth_" + test_info->name() +
"_" + FileSampleRateSuffix() + ".wav"; "_" + FileSampleRateSuffix() + ".wav";
} }
std::unique_ptr<TestAudioDeviceModule::Capturer> CreateCapturer() override { std::unique_ptr<TestAudioDeviceModule::Capturer> CreateCapturer() override {
@ -69,22 +71,21 @@ class AudioQualityTest : public AudioEndToEndTest {
// Output information about the input and output audio files so that further // Output information about the input and output audio files so that further
// processing can be done by an external process. // processing can be done by an external process.
printf("TEST %s %s %s\n", test_info->name(), printf("TEST %s %s %s\n", test_info->name(), AudioInputFile().c_str(),
AudioInputFile().c_str(), AudioOutputFile().c_str()); AudioOutputFile().c_str());
} }
}; };
class Mobile2GNetworkTest : public AudioQualityTest { class Mobile2GNetworkTest : public AudioQualityTest {
void ModifyAudioConfigs(AudioSendStream::Config* send_config, void ModifyAudioConfigs(
AudioSendStream::Config* send_config,
std::vector<AudioReceiveStream::Config>* receive_configs) override { std::vector<AudioReceiveStream::Config>* receive_configs) override {
send_config->send_codec_spec = AudioSendStream::Config::SendCodecSpec( send_config->send_codec_spec = AudioSendStream::Config::SendCodecSpec(
test::CallTest::kAudioSendPayloadType, test::CallTest::kAudioSendPayloadType,
{"OPUS", {"OPUS",
48000, 48000,
2, 2,
{{"maxaveragebitrate", "6000"}, {{"maxaveragebitrate", "6000"}, {"ptime", "60"}, {"stereo", "1"}}});
{"ptime", "60"},
{"stereo", "1"}}});
} }
FakeNetworkPipe::Config GetNetworkPipeConfig() const override { FakeNetworkPipe::Config GetNetworkPipeConfig() const override {

View File

@ -412,34 +412,30 @@ TEST_P(TransportFeedbackPacketLossTrackerTest, InsertionCompletesTwoPairs) {
TEST_P(TransportFeedbackPacketLossTrackerTest, SanityGapsInSequenceNumbers) { TEST_P(TransportFeedbackPacketLossTrackerTest, SanityGapsInSequenceNumbers) {
TransportFeedbackPacketLossTracker tracker(50 * kDefaultSendIntervalMs, 5, 1); TransportFeedbackPacketLossTracker tracker(50 * kDefaultSendIntervalMs, 5, 1);
SendPackets(&tracker, SendPackets(
{static_cast<uint16_t>(base_), &tracker,
static_cast<uint16_t>(base_ + 2), {static_cast<uint16_t>(base_), static_cast<uint16_t>(base_ + 2),
static_cast<uint16_t>(base_ + 4), static_cast<uint16_t>(base_ + 4), static_cast<uint16_t>(base_ + 6),
static_cast<uint16_t>(base_ + 6), static_cast<uint16_t>(base_ + 8)},
static_cast<uint16_t>(base_ + 8)}, kDefaultSendIntervalMs);
kDefaultSendIntervalMs);
// Gaps in sequence numbers not considered as gaps in window, because only // Gaps in sequence numbers not considered as gaps in window, because only
// those sequence numbers which were associated with the stream count. // those sequence numbers which were associated with the stream count.
// Expected window contents: [] -> [11011]. // Expected window contents: [] -> [11011].
AddTransportFeedbackAndValidate( AddTransportFeedbackAndValidate(
// Note: Left packets belong to this stream, right ones ignored. // Note: Left packets belong to this stream, right ones ignored.
&tracker, base_, {true, false, &tracker, base_,
true, false, {true, false, true, false, false, false, true, false, true, true});
false, false,
true, false,
true, true});
ValidatePacketLossStatistics(tracker, 1.0f / 5.0f, 1.0f / 4.0f); ValidatePacketLossStatistics(tracker, 1.0f / 5.0f, 1.0f / 4.0f);
// Create gap by sending [base + 10] but not acking it. // Create gap by sending [base + 10] but not acking it.
// Note: Acks for [base + 11] and [base + 13] ignored (other stream). // Note: Acks for [base + 11] and [base + 13] ignored (other stream).
// Expected window contents: [11011] -> [11011-GAP-01]. // Expected window contents: [11011] -> [11011-GAP-01].
SendPackets(&tracker, SendPackets(
{static_cast<uint16_t>(base_ + 10), &tracker,
static_cast<uint16_t>(base_ + 12), {static_cast<uint16_t>(base_ + 10), static_cast<uint16_t>(base_ + 12),
static_cast<uint16_t>(base_ + 14)}, static_cast<uint16_t>(base_ + 14)},
kDefaultSendIntervalMs); kDefaultSendIntervalMs);
AddTransportFeedbackAndValidate(&tracker, base_ + 11, AddTransportFeedbackAndValidate(&tracker, base_ + 11,
{false, false, false, true, true}); {false, false, false, true, true});
ValidatePacketLossStatistics(tracker, 2.0f / 7.0f, 2.0f / 5.0f); ValidatePacketLossStatistics(tracker, 2.0f / 7.0f, 2.0f / 5.0f);

View File

@ -159,7 +159,8 @@ void AudioFrameOperations::QuadToMono(const int16_t* src_audio,
for (size_t i = 0; i < samples_per_channel; i++) { for (size_t i = 0; i < samples_per_channel; i++) {
dst_audio[i] = dst_audio[i] =
(static_cast<int32_t>(src_audio[4 * i]) + src_audio[4 * i + 1] + (static_cast<int32_t>(src_audio[4 * i]) + src_audio[4 * i + 1] +
src_audio[4 * i + 2] + src_audio[4 * i + 3]) >> 2; src_audio[4 * i + 2] + src_audio[4 * i + 3]) >>
2;
} }
} }

View File

@ -50,27 +50,29 @@ void SetFrameData(int16_t left, int16_t right, AudioFrame* frame) {
void SetFrameData(int16_t data, AudioFrame* frame) { void SetFrameData(int16_t data, AudioFrame* frame) {
int16_t* frame_data = frame->mutable_data(); int16_t* frame_data = frame->mutable_data();
for (size_t i = 0; for (size_t i = 0; i < frame->samples_per_channel_ * frame->num_channels_;
i < frame->samples_per_channel_ * frame->num_channels_; i++) { i++) {
frame_data[i] = data; frame_data[i] = data;
} }
} }
void VerifyFramesAreEqual(const AudioFrame& frame1, const AudioFrame& frame2) { void VerifyFramesAreEqual(const AudioFrame& frame1, const AudioFrame& frame2) {
EXPECT_EQ(frame1.num_channels_, frame2.num_channels_); EXPECT_EQ(frame1.num_channels_, frame2.num_channels_);
EXPECT_EQ(frame1.samples_per_channel_, EXPECT_EQ(frame1.samples_per_channel_, frame2.samples_per_channel_);
frame2.samples_per_channel_);
const int16_t* frame1_data = frame1.data(); const int16_t* frame1_data = frame1.data();
const int16_t* frame2_data = frame2.data(); const int16_t* frame2_data = frame2.data();
for (size_t i = 0; i < frame1.samples_per_channel_ * frame1.num_channels_; for (size_t i = 0; i < frame1.samples_per_channel_ * frame1.num_channels_;
i++) { i++) {
EXPECT_EQ(frame1_data[i], frame2_data[i]); EXPECT_EQ(frame1_data[i], frame2_data[i]);
} }
EXPECT_EQ(frame1.muted(), frame2.muted()); EXPECT_EQ(frame1.muted(), frame2.muted());
} }
void InitFrame(AudioFrame* frame, size_t channels, size_t samples_per_channel, void InitFrame(AudioFrame* frame,
int16_t left_data, int16_t right_data) { size_t channels,
size_t samples_per_channel,
int16_t left_data,
int16_t right_data) {
RTC_DCHECK(frame); RTC_DCHECK(frame);
RTC_DCHECK_GE(2, channels); RTC_DCHECK_GE(2, channels);
RTC_DCHECK_GE(AudioFrame::kMaxDataSizeSamples, RTC_DCHECK_GE(AudioFrame::kMaxDataSizeSamples,
@ -90,7 +92,9 @@ int16_t GetChannelData(const AudioFrame& frame, size_t channel, size_t index) {
return frame.data()[index * frame.num_channels_ + channel]; return frame.data()[index * frame.num_channels_ + channel];
} }
void VerifyFrameDataBounds(const AudioFrame& frame, size_t channel, int16_t max, void VerifyFrameDataBounds(const AudioFrame& frame,
size_t channel,
int16_t max,
int16_t min) { int16_t min) {
for (size_t i = 0; i < frame.samples_per_channel_; ++i) { for (size_t i = 0; i < frame.samples_per_channel_; ++i) {
int16_t s = GetChannelData(frame, channel, i); int16_t s = GetChannelData(frame, channel, i);

View File

@ -150,8 +150,10 @@ class AudioSendStream {
std::unique_ptr<webrtc::AudioFrame> audio_frame) = 0; std::unique_ptr<webrtc::AudioFrame> audio_frame) = 0;
// TODO(solenberg): Make payload_type a config property instead. // TODO(solenberg): Make payload_type a config property instead.
virtual bool SendTelephoneEvent(int payload_type, int payload_frequency, virtual bool SendTelephoneEvent(int payload_type,
int event, int duration_ms) = 0; int payload_frequency,
int event,
int duration_ms) = 0;
virtual void SetMuted(bool muted) = 0; virtual void SetMuted(bool muted) = 0;

View File

@ -42,7 +42,8 @@ class AudioState : public rtc::RefCountInterface {
struct Stats { struct Stats {
// Audio peak level (max(abs())), linearly on the interval [0,32767]. // Audio peak level (max(abs())), linearly on the interval [0,32767].
int32_t audio_level = -1; int32_t audio_level = -1;
// See: https://w3c.github.io/webrtc-stats/#dom-rtcmediastreamtrackstats-totalaudioenergy // See:
// https://w3c.github.io/webrtc-stats/#dom-rtcmediastreamtrackstats-totalaudioenergy
double total_energy = 0.0f; double total_energy = 0.0f;
double total_duration = 0.0f; double total_duration = 0.0f;
}; };

View File

@ -110,8 +110,7 @@ void BitrateAllocator::OnNetworkChanged(uint32_t target_bitrate_bps,
for (auto& config : bitrate_observer_configs_) { for (auto& config : bitrate_observer_configs_) {
uint32_t allocated_bitrate = allocation[config.observer]; uint32_t allocated_bitrate = allocation[config.observer];
uint32_t protection_bitrate = config.observer->OnBitrateUpdated( uint32_t protection_bitrate = config.observer->OnBitrateUpdated(
allocated_bitrate, last_fraction_loss_, last_rtt_, allocated_bitrate, last_fraction_loss_, last_rtt_, last_bwe_period_ms_);
last_bwe_period_ms_);
if (allocated_bitrate == 0 && config.allocated_bitrate_bps > 0) { if (allocated_bitrate == 0 && config.allocated_bitrate_bps > 0) {
if (target_bitrate_bps > 0) if (target_bitrate_bps > 0)

View File

@ -205,8 +205,7 @@ class BitrateAllocator : public BitrateAllocatorInterface {
int max_multiplier, int max_multiplier,
ObserverAllocation* allocation) ObserverAllocation* allocation)
RTC_RUN_ON(&sequenced_checker_); RTC_RUN_ON(&sequenced_checker_);
bool EnoughBitrateForAllObservers(uint32_t bitrate, bool EnoughBitrateForAllObservers(uint32_t bitrate, uint32_t sum_min_bitrates)
uint32_t sum_min_bitrates)
RTC_RUN_ON(&sequenced_checker_); RTC_RUN_ON(&sequenced_checker_);
// From the available |bitrate|, each observer will be allocated a // From the available |bitrate|, each observer will be allocated a

View File

@ -78,7 +78,7 @@ class TestBitrateObserver : public BitrateAllocatorObserver {
namespace { namespace {
constexpr int64_t kDefaultProbingIntervalMs = 3000; constexpr int64_t kDefaultProbingIntervalMs = 3000;
const double kDefaultBitratePriority = 1.0; const double kDefaultBitratePriority = 1.0;
} } // namespace
class BitrateAllocatorTest : public ::testing::Test { class BitrateAllocatorTest : public ::testing::Test {
protected: protected:
@ -295,14 +295,14 @@ TEST_F(BitrateAllocatorTestNoEnforceMin, ThreeBitrateObservers) {
allocator_->OnNetworkChanged(300000, 0, 0, kDefaultProbingIntervalMs); allocator_->OnNetworkChanged(300000, 0, 0, kDefaultProbingIntervalMs);
EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_bps_); // Min bitrate. EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_bps_); // Min bitrate.
EXPECT_EQ(200000u, bitrate_observer_2.last_bitrate_bps_); // Min bitrate. EXPECT_EQ(200000u, bitrate_observer_2.last_bitrate_bps_); // Min bitrate.
EXPECT_EQ(0u, bitrate_observer_3.last_bitrate_bps_); // Nothing. EXPECT_EQ(0u, bitrate_observer_3.last_bitrate_bps_); // Nothing.
// Increased BWE, but still below the sum of configured min bitrates for all // Increased BWE, but still below the sum of configured min bitrates for all
// observers and too little for observer 3. 1 and 2 will share the rest. // observers and too little for observer 3. 1 and 2 will share the rest.
allocator_->OnNetworkChanged(500000, 0, 0, kDefaultProbingIntervalMs); allocator_->OnNetworkChanged(500000, 0, 0, kDefaultProbingIntervalMs);
EXPECT_EQ(200000u, bitrate_observer_1.last_bitrate_bps_); // Min + split. EXPECT_EQ(200000u, bitrate_observer_1.last_bitrate_bps_); // Min + split.
EXPECT_EQ(300000u, bitrate_observer_2.last_bitrate_bps_); // Min + split. EXPECT_EQ(300000u, bitrate_observer_2.last_bitrate_bps_); // Min + split.
EXPECT_EQ(0u, bitrate_observer_3.last_bitrate_bps_); // Nothing. EXPECT_EQ(0u, bitrate_observer_3.last_bitrate_bps_); // Nothing.
// Below min for all. // Below min for all.
allocator_->OnNetworkChanged(10000, 0, 0, kDefaultProbingIntervalMs); allocator_->OnNetworkChanged(10000, 0, 0, kDefaultProbingIntervalMs);

View File

@ -118,7 +118,7 @@ std::unique_ptr<rtclog::StreamConfig> CreateRtcLogStreamConfig(
const int* search = const int* search =
FindKeyByValue(config.rtp.rtx_associated_payload_types, d.payload_type); FindKeyByValue(config.rtp.rtx_associated_payload_types, d.payload_type);
rtclog_config->codecs.emplace_back(d.payload_name, d.payload_type, rtclog_config->codecs.emplace_back(d.payload_name, d.payload_type,
search ? *search : 0); search ? *search : 0);
} }
return rtclog_config; return rtclog_config;
} }
@ -239,7 +239,8 @@ class Call final : public webrtc::Call,
bool has_packet_feedback) override; bool has_packet_feedback) override;
private: private:
DeliveryStatus DeliverRtcp(MediaType media_type, const uint8_t* packet, DeliveryStatus DeliverRtcp(MediaType media_type,
const uint8_t* packet,
size_t length); size_t length);
DeliveryStatus DeliverRtp(MediaType media_type, DeliveryStatus DeliverRtp(MediaType media_type,
rtc::CopyOnWriteBuffer packet, rtc::CopyOnWriteBuffer packet,

View File

@ -30,12 +30,7 @@
namespace webrtc { namespace webrtc {
enum class MediaType { enum class MediaType { ANY, AUDIO, VIDEO, DATA };
ANY,
AUDIO,
VIDEO,
DATA
};
class PacketReceiver { class PacketReceiver {
public: public:

View File

@ -49,12 +49,8 @@ namespace webrtc {
class CallPerfTest : public test::CallTest { class CallPerfTest : public test::CallTest {
protected: protected:
enum class FecMode { enum class FecMode { kOn, kOff };
kOn, kOff enum class CreateOrder { kAudioFirst, kVideoFirst };
};
enum class CreateOrder {
kAudioFirst, kVideoFirst
};
void TestAudioVideoSync(FecMode fec, void TestAudioVideoSync(FecMode fec,
CreateOrder create_first, CreateOrder create_first,
float video_ntp_speed, float video_ntp_speed,
@ -545,8 +541,7 @@ TEST_F(CallPerfTest, ReceivesCpuOveruseAndUnderuse) {
void ModifyVideoConfigs( void ModifyVideoConfigs(
VideoSendStream::Config* send_config, VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs, std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) override { VideoEncoderConfig* encoder_config) override {}
}
void PerformTest() override { void PerformTest() override {
EXPECT_TRUE(Wait()) << "Timed out before receiving an overuse callback."; EXPECT_TRUE(Wait()) << "Timed out before receiving an overuse callback.";
@ -643,7 +638,9 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) {
RunBaseTest(&test); RunBaseTest(&test);
} }
TEST_F(CallPerfTest, PadsToMinTransmitBitrate) { TestMinTransmitBitrate(true); } TEST_F(CallPerfTest, PadsToMinTransmitBitrate) {
TestMinTransmitBitrate(true);
}
TEST_F(CallPerfTest, NoPadWithoutMinTransmitBitrate) { TEST_F(CallPerfTest, NoPadWithoutMinTransmitBitrate) {
TestMinTransmitBitrate(false); TestMinTransmitBitrate(false);
@ -938,11 +935,9 @@ void CallPerfTest::TestMinAudioVideoBitrate(
// TODO(bugs.webrtc.org/8878) // TODO(bugs.webrtc.org/8878)
#if defined(WEBRTC_MAC) #if defined(WEBRTC_MAC)
#define MAYBE_MinVideoAndAudioBitrate \ #define MAYBE_MinVideoAndAudioBitrate DISABLED_MinVideoAndAudioBitrate
DISABLED_MinVideoAndAudioBitrate
#else #else
#define MAYBE_MinVideoAndAudioBitrate \ #define MAYBE_MinVideoAndAudioBitrate MinVideoAndAudioBitrate
MinVideoAndAudioBitrate
#endif #endif
TEST_F(CallPerfTest, MAYBE_MinVideoAndAudioBitrate) { TEST_F(CallPerfTest, MAYBE_MinVideoAndAudioBitrate) {
TestMinAudioVideoBitrate(false, 110, 40, -10, 10000, 70000, 200000); TestMinAudioVideoBitrate(false, 110, 40, -10, 10000, 70000, 200000);

View File

@ -15,8 +15,8 @@
#include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "api/test/mock_audio_mixer.h" #include "api/test/mock_audio_mixer.h"
#include "audio/audio_send_stream.h"
#include "audio/audio_receive_stream.h" #include "audio/audio_receive_stream.h"
#include "audio/audio_send_stream.h"
#include "call/audio_state.h" #include "call/audio_state.h"
#include "call/call.h" #include "call/call.h"
#include "logging/rtc_event_log/rtc_event_log.h" #include "logging/rtc_event_log/rtc_event_log.h"
@ -250,7 +250,6 @@ TEST(CallTest, MultipleFlexfecReceiveStreamsProtectingSingleVideoStream) {
} }
} }
TEST(CallTest, RecreatingAudioStreamWithSameSsrcReusesRtpState) { TEST(CallTest, RecreatingAudioStreamWithSameSsrcReusesRtpState) {
constexpr uint32_t kSSRC = 12345; constexpr uint32_t kSSRC = 12345;
CallHelper call; CallHelper call;
@ -277,5 +276,4 @@ TEST(CallTest, RecreatingAudioStreamWithSameSsrcReusesRtpState) {
EXPECT_EQ(rtp_state1.media_has_been_sent, rtp_state2.media_has_been_sent); EXPECT_EQ(rtp_state1.media_has_been_sent, rtp_state2.media_has_been_sent);
} }
} // namespace webrtc } // namespace webrtc

View File

@ -16,8 +16,8 @@
#include <string> #include <string>
#include <vector> #include <vector>
#include "api/rtp_headers.h"
#include "api/call/transport.h" #include "api/call/transport.h"
#include "api/rtp_headers.h"
#include "api/rtpparameters.h" #include "api/rtpparameters.h"
#include "call/rtp_packet_sink_interface.h" #include "call/rtp_packet_sink_interface.h"
#include "common_types.h" // NOLINT(build/include) #include "common_types.h" // NOLINT(build/include)

View File

@ -75,8 +75,7 @@ RampUpTester::RampUpTester(size_t num_video_streams,
EXPECT_LE(num_audio_streams_, 1u); EXPECT_LE(num_audio_streams_, 1u);
} }
RampUpTester::~RampUpTester() { RampUpTester::~RampUpTester() {}
}
Call::Config RampUpTester::GetSenderCallConfig() { Call::Config RampUpTester::GetSenderCallConfig() {
Call::Config call_config(&event_log_); Call::Config call_config(&event_log_);

View File

@ -346,8 +346,8 @@ TEST_F(RtpDemuxerTest, OnRtpPacketCalledOnCorrectSinkByRsid) {
} }
for (size_t i = 0; i < arraysize(rsids); i++) { for (size_t i = 0; i < arraysize(rsids); i++) {
auto packet = CreatePacketWithSsrcRsid(rtc::checked_cast<uint32_t>(i), auto packet =
rsids[i]); CreatePacketWithSsrcRsid(rtc::checked_cast<uint32_t>(i), rsids[i]);
EXPECT_CALL(sinks[i], OnRtpPacket(SamePacketAs(*packet))).Times(1); EXPECT_CALL(sinks[i], OnRtpPacket(SamePacketAs(*packet))).Times(1);
EXPECT_TRUE(demuxer_.OnRtpPacket(*packet)); EXPECT_TRUE(demuxer_.OnRtpPacket(*packet));
} }
@ -361,8 +361,8 @@ TEST_F(RtpDemuxerTest, OnRtpPacketCalledOnCorrectSinkByMid) {
} }
for (size_t i = 0; i < arraysize(mids); i++) { for (size_t i = 0; i < arraysize(mids); i++) {
auto packet = CreatePacketWithSsrcMid(rtc::checked_cast<uint32_t>(i), auto packet =
mids[i]); CreatePacketWithSsrcMid(rtc::checked_cast<uint32_t>(i), mids[i]);
EXPECT_CALL(sinks[i], OnRtpPacket(SamePacketAs(*packet))).Times(1); EXPECT_CALL(sinks[i], OnRtpPacket(SamePacketAs(*packet))).Times(1);
EXPECT_TRUE(demuxer_.OnRtpPacket(*packet)); EXPECT_TRUE(demuxer_.OnRtpPacket(*packet));
} }

View File

@ -44,9 +44,8 @@ RtpStreamReceiverController::RtpStreamReceiverController() {
RtpStreamReceiverController::~RtpStreamReceiverController() = default; RtpStreamReceiverController::~RtpStreamReceiverController() = default;
std::unique_ptr<RtpStreamReceiverInterface> std::unique_ptr<RtpStreamReceiverInterface>
RtpStreamReceiverController::CreateReceiver( RtpStreamReceiverController::CreateReceiver(uint32_t ssrc,
uint32_t ssrc, RtpPacketSinkInterface* sink) {
RtpPacketSinkInterface* sink) {
return rtc::MakeUnique<Receiver>(this, ssrc, sink); return rtc::MakeUnique<Receiver>(this, ssrc, sink);
} }

View File

@ -83,8 +83,7 @@ class RtpTransportControllerSend final
void OnSentPacket(const rtc::SentPacket& sent_packet) override; void OnSentPacket(const rtc::SentPacket& sent_packet) override;
void SetSdpBitrateParameters(const BitrateConstraints& constraints) override; void SetSdpBitrateParameters(const BitrateConstraints& constraints) override;
void SetClientBitratePreferences( void SetClientBitratePreferences(const BitrateSettings& preferences) override;
const BitrateSettings& preferences) override;
private: private:
const Clock* const clock_; const Clock* const clock_;

View File

@ -63,8 +63,7 @@ void RtxReceiveStream::OnRtpPacket(const RtpPacketReceived& rtx_packet) {
media_packet.set_recovered(true); media_packet.set_recovered(true);
// Skip the RTX header. // Skip the RTX header.
rtc::ArrayView<const uint8_t> rtx_payload = rtc::ArrayView<const uint8_t> rtx_payload = payload.subview(kRtxHeaderSize);
payload.subview(kRtxHeaderSize);
uint8_t* media_payload = media_packet.AllocatePayload(rtx_payload.size()); uint8_t* media_payload = media_packet.AllocatePayload(rtx_payload.size());
RTC_DCHECK(media_payload != nullptr); RTC_DCHECK(media_payload != nullptr);

View File

@ -36,7 +36,7 @@ constexpr uint8_t kRtxPacket[] = {
0x11, 0x11, 0x11, 0x11, // Timestamp. 0x11, 0x11, 0x11, 0x11, // Timestamp.
0x22, 0x22, 0x22, 0x22, // SSRC. 0x22, 0x22, 0x22, 0x22, // SSRC.
// RTX header. // RTX header.
0x56, 0x57, // Orig seqno. 0x56, 0x57, // Orig seqno.
// Payload. // Payload.
0xee, 0xee,
}; };
@ -50,7 +50,7 @@ constexpr uint8_t kRtxPacketWithCVO[] = {
0xbe, 0xde, 0x00, 0x01, // Extension header. 0xbe, 0xde, 0x00, 0x01, // Extension header.
0x30, 0x01, 0x00, 0x00, // 90 degree rotation. 0x30, 0x01, 0x00, 0x00, // 90 degree rotation.
// RTX header. // RTX header.
0x56, 0x57, // Orig seqno. 0x56, 0x57, // Orig seqno.
// Payload. // Payload.
0xee, 0xee,
}; };
@ -73,8 +73,8 @@ TEST(RtxReceiveStreamTest, RestoresPacketPayload) {
RtpPacketReceived rtx_packet; RtpPacketReceived rtx_packet;
EXPECT_TRUE(rtx_packet.Parse(rtc::ArrayView<const uint8_t>(kRtxPacket))); EXPECT_TRUE(rtx_packet.Parse(rtc::ArrayView<const uint8_t>(kRtxPacket)));
EXPECT_CALL(media_sink, OnRtpPacket(_)).WillOnce(testing::Invoke( EXPECT_CALL(media_sink, OnRtpPacket(_))
[](const RtpPacketReceived& packet) { .WillOnce(testing::Invoke([](const RtpPacketReceived& packet) {
EXPECT_EQ(packet.SequenceNumber(), kMediaSeqno); EXPECT_EQ(packet.SequenceNumber(), kMediaSeqno);
EXPECT_EQ(packet.Ssrc(), kMediaSSRC); EXPECT_EQ(packet.Ssrc(), kMediaSSRC);
EXPECT_EQ(packet.PayloadType(), kMediaPayloadType); EXPECT_EQ(packet.PayloadType(), kMediaPayloadType);
@ -124,15 +124,15 @@ TEST(RtxReceiveStreamTest, CopiesRtpHeaderExtensions) {
RtpHeaderExtensionMap extension_map; RtpHeaderExtensionMap extension_map;
extension_map.RegisterByType(3, kRtpExtensionVideoRotation); extension_map.RegisterByType(3, kRtpExtensionVideoRotation);
RtpPacketReceived rtx_packet(&extension_map); RtpPacketReceived rtx_packet(&extension_map);
EXPECT_TRUE(rtx_packet.Parse( EXPECT_TRUE(
rtc::ArrayView<const uint8_t>(kRtxPacketWithCVO))); rtx_packet.Parse(rtc::ArrayView<const uint8_t>(kRtxPacketWithCVO)));
VideoRotation rotation = kVideoRotation_0; VideoRotation rotation = kVideoRotation_0;
EXPECT_TRUE(rtx_packet.GetExtension<VideoOrientation>(&rotation)); EXPECT_TRUE(rtx_packet.GetExtension<VideoOrientation>(&rotation));
EXPECT_EQ(kVideoRotation_90, rotation); EXPECT_EQ(kVideoRotation_90, rotation);
EXPECT_CALL(media_sink, OnRtpPacket(_)).WillOnce(testing::Invoke( EXPECT_CALL(media_sink, OnRtpPacket(_))
[](const RtpPacketReceived& packet) { .WillOnce(testing::Invoke([](const RtpPacketReceived& packet) {
EXPECT_EQ(packet.SequenceNumber(), kMediaSeqno); EXPECT_EQ(packet.SequenceNumber(), kMediaSeqno);
EXPECT_EQ(packet.Ssrc(), kMediaSSRC); EXPECT_EQ(packet.Ssrc(), kMediaSSRC);
EXPECT_EQ(packet.PayloadType(), kMediaPayloadType); EXPECT_EQ(packet.PayloadType(), kMediaPayloadType);

View File

@ -26,14 +26,14 @@ class MockAudioSendStream : public AudioSendStream {
MOCK_METHOD0(Start, void()); MOCK_METHOD0(Start, void());
MOCK_METHOD0(Stop, void()); MOCK_METHOD0(Stop, void());
// GMock doesn't like move-only types, such as std::unique_ptr. // GMock doesn't like move-only types, such as std::unique_ptr.
virtual void SendAudioData( virtual void SendAudioData(std::unique_ptr<webrtc::AudioFrame> audio_frame) {
std::unique_ptr<webrtc::AudioFrame> audio_frame) {
SendAudioDataForMock(audio_frame.get()); SendAudioDataForMock(audio_frame.get());
} }
MOCK_METHOD1(SendAudioDataForMock, MOCK_METHOD1(SendAudioDataForMock, void(webrtc::AudioFrame* audio_frame));
void(webrtc::AudioFrame* audio_frame));
MOCK_METHOD4(SendTelephoneEvent, MOCK_METHOD4(SendTelephoneEvent,
bool(int payload_type, int payload_frequency, int event, bool(int payload_type,
int payload_frequency,
int event,
int duration_ms)); int duration_ms));
MOCK_METHOD1(SetMuted, void(bool muted)); MOCK_METHOD1(SetMuted, void(bool muted));
MOCK_CONST_METHOD0(GetStats, Stats()); MOCK_CONST_METHOD0(GetStats, Stats());

View File

@ -50,8 +50,7 @@ class MockRtpTransportControllerSend
MOCK_METHOD1(EnablePeriodicAlrProbing, void(bool)); MOCK_METHOD1(EnablePeriodicAlrProbing, void(bool));
MOCK_METHOD1(OnSentPacket, void(const rtc::SentPacket&)); MOCK_METHOD1(OnSentPacket, void(const rtc::SentPacket&));
MOCK_METHOD1(SetSdpBitrateParameters, void(const BitrateConstraints&)); MOCK_METHOD1(SetSdpBitrateParameters, void(const BitrateConstraints&));
MOCK_METHOD1(SetClientBitratePreferences, MOCK_METHOD1(SetClientBitratePreferences, void(const BitrateSettings&));
void(const BitrateSettings&));
}; };
} // namespace webrtc } // namespace webrtc
#endif // CALL_TEST_MOCK_RTP_TRANSPORT_CONTROLLER_SEND_H_ #endif // CALL_TEST_MOCK_RTP_TRANSPORT_CONTROLLER_SEND_H_

View File

@ -16,12 +16,12 @@
#include <string> #include <string>
#include <vector> #include <vector>
#include "api/rtp_headers.h"
#include "api/call/transport.h" #include "api/call/transport.h"
#include "api/rtp_headers.h"
#include "api/rtpparameters.h" #include "api/rtpparameters.h"
#include "api/video/video_content_type.h" #include "api/video/video_content_type.h"
#include "api/video/video_timing.h"
#include "api/video/video_sink_interface.h" #include "api/video/video_sink_interface.h"
#include "api/video/video_timing.h"
#include "call/rtp_config.h" #include "call/rtp_config.h"
#include "common_types.h" // NOLINT(build/include) #include "common_types.h" // NOLINT(build/include)
#include "common_video/include/frame_callback.h" #include "common_video/include/frame_callback.h"

View File

@ -26,12 +26,16 @@ namespace webrtc {
class CopyConverter : public AudioConverter { class CopyConverter : public AudioConverter {
public: public:
CopyConverter(size_t src_channels, size_t src_frames, size_t dst_channels, CopyConverter(size_t src_channels,
size_t src_frames,
size_t dst_channels,
size_t dst_frames) size_t dst_frames)
: AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {} : AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {}
~CopyConverter() override {}; ~CopyConverter() override{};
void Convert(const float* const* src, size_t src_size, float* const* dst, void Convert(const float* const* src,
size_t src_size,
float* const* dst,
size_t dst_capacity) override { size_t dst_capacity) override {
CheckSizes(src_size, dst_capacity); CheckSizes(src_size, dst_capacity);
if (src != dst) { if (src != dst) {
@ -43,12 +47,16 @@ class CopyConverter : public AudioConverter {
class UpmixConverter : public AudioConverter { class UpmixConverter : public AudioConverter {
public: public:
UpmixConverter(size_t src_channels, size_t src_frames, size_t dst_channels, UpmixConverter(size_t src_channels,
size_t src_frames,
size_t dst_channels,
size_t dst_frames) size_t dst_frames)
: AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {} : AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {}
~UpmixConverter() override {}; ~UpmixConverter() override{};
void Convert(const float* const* src, size_t src_size, float* const* dst, void Convert(const float* const* src,
size_t src_size,
float* const* dst,
size_t dst_capacity) override { size_t dst_capacity) override {
CheckSizes(src_size, dst_capacity); CheckSizes(src_size, dst_capacity);
for (size_t i = 0; i < dst_frames(); ++i) { for (size_t i = 0; i < dst_frames(); ++i) {
@ -61,13 +69,16 @@ class UpmixConverter : public AudioConverter {
class DownmixConverter : public AudioConverter { class DownmixConverter : public AudioConverter {
public: public:
DownmixConverter(size_t src_channels, size_t src_frames, size_t dst_channels, DownmixConverter(size_t src_channels,
size_t src_frames,
size_t dst_channels,
size_t dst_frames) size_t dst_frames)
: AudioConverter(src_channels, src_frames, dst_channels, dst_frames) { : AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {}
} ~DownmixConverter() override{};
~DownmixConverter() override {};
void Convert(const float* const* src, size_t src_size, float* const* dst, void Convert(const float* const* src,
size_t src_size,
float* const* dst,
size_t dst_capacity) override { size_t dst_capacity) override {
CheckSizes(src_size, dst_capacity); CheckSizes(src_size, dst_capacity);
float* dst_mono = dst[0]; float* dst_mono = dst[0];
@ -82,7 +93,9 @@ class DownmixConverter : public AudioConverter {
class ResampleConverter : public AudioConverter { class ResampleConverter : public AudioConverter {
public: public:
ResampleConverter(size_t src_channels, size_t src_frames, size_t dst_channels, ResampleConverter(size_t src_channels,
size_t src_frames,
size_t dst_channels,
size_t dst_frames) size_t dst_frames)
: AudioConverter(src_channels, src_frames, dst_channels, dst_frames) { : AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {
resamplers_.reserve(src_channels); resamplers_.reserve(src_channels);
@ -90,9 +103,11 @@ class ResampleConverter : public AudioConverter {
resamplers_.push_back(std::unique_ptr<PushSincResampler>( resamplers_.push_back(std::unique_ptr<PushSincResampler>(
new PushSincResampler(src_frames, dst_frames))); new PushSincResampler(src_frames, dst_frames)));
} }
~ResampleConverter() override {}; ~ResampleConverter() override{};
void Convert(const float* const* src, size_t src_size, float* const* dst, void Convert(const float* const* src,
size_t src_size,
float* const* dst,
size_t dst_capacity) override { size_t dst_capacity) override {
CheckSizes(src_size, dst_capacity); CheckSizes(src_size, dst_capacity);
for (size_t i = 0; i < resamplers_.size(); ++i) for (size_t i = 0; i < resamplers_.size(); ++i)
@ -108,7 +123,7 @@ class ResampleConverter : public AudioConverter {
class CompositionConverter : public AudioConverter { class CompositionConverter : public AudioConverter {
public: public:
explicit CompositionConverter( explicit CompositionConverter(
std::vector<std::unique_ptr<AudioConverter>> converters) std::vector<std::unique_ptr<AudioConverter>> converters)
: converters_(std::move(converters)) { : converters_(std::move(converters)) {
RTC_CHECK_GE(converters_.size(), 2); RTC_CHECK_GE(converters_.size(), 2);
// We need an intermediate buffer after every converter. // We need an intermediate buffer after every converter.
@ -117,19 +132,19 @@ class CompositionConverter : public AudioConverter {
std::unique_ptr<ChannelBuffer<float>>(new ChannelBuffer<float>( std::unique_ptr<ChannelBuffer<float>>(new ChannelBuffer<float>(
(*it)->dst_frames(), (*it)->dst_channels()))); (*it)->dst_frames(), (*it)->dst_channels())));
} }
~CompositionConverter() override {}; ~CompositionConverter() override{};
void Convert(const float* const* src, size_t src_size, float* const* dst, void Convert(const float* const* src,
size_t src_size,
float* const* dst,
size_t dst_capacity) override { size_t dst_capacity) override {
converters_.front()->Convert(src, src_size, buffers_.front()->channels(), converters_.front()->Convert(src, src_size, buffers_.front()->channels(),
buffers_.front()->size()); buffers_.front()->size());
for (size_t i = 2; i < converters_.size(); ++i) { for (size_t i = 2; i < converters_.size(); ++i) {
auto& src_buffer = buffers_[i - 2]; auto& src_buffer = buffers_[i - 2];
auto& dst_buffer = buffers_[i - 1]; auto& dst_buffer = buffers_[i - 1];
converters_[i]->Convert(src_buffer->channels(), converters_[i]->Convert(src_buffer->channels(), src_buffer->size(),
src_buffer->size(), dst_buffer->channels(), dst_buffer->size());
dst_buffer->channels(),
dst_buffer->size());
} }
converters_.back()->Convert(buffers_.back()->channels(), converters_.back()->Convert(buffers_.back()->channels(),
buffers_.back()->size(), dst, dst_capacity); buffers_.back()->size(), dst, dst_capacity);
@ -175,8 +190,8 @@ std::unique_ptr<AudioConverter> AudioConverter::Create(size_t src_channels,
sp.reset(new ResampleConverter(src_channels, src_frames, dst_channels, sp.reset(new ResampleConverter(src_channels, src_frames, dst_channels,
dst_frames)); dst_frames));
} else { } else {
sp.reset(new CopyConverter(src_channels, src_frames, dst_channels, sp.reset(
dst_frames)); new CopyConverter(src_channels, src_frames, dst_channels, dst_frames));
} }
return sp; return sp;
@ -184,13 +199,12 @@ std::unique_ptr<AudioConverter> AudioConverter::Create(size_t src_channels,
// For CompositionConverter. // For CompositionConverter.
AudioConverter::AudioConverter() AudioConverter::AudioConverter()
: src_channels_(0), : src_channels_(0), src_frames_(0), dst_channels_(0), dst_frames_(0) {}
src_frames_(0),
dst_channels_(0),
dst_frames_(0) {}
AudioConverter::AudioConverter(size_t src_channels, size_t src_frames, AudioConverter::AudioConverter(size_t src_channels,
size_t dst_channels, size_t dst_frames) size_t src_frames,
size_t dst_channels,
size_t dst_frames)
: src_channels_(src_channels), : src_channels_(src_channels),
src_frames_(src_frames), src_frames_(src_frames),
dst_channels_(dst_channels), dst_channels_(dst_channels),

View File

@ -37,8 +37,10 @@ class AudioConverter {
// capacity of |dst_capacity|. Both point to a series of buffers containing // capacity of |dst_capacity|. Both point to a series of buffers containing
// the samples for each channel. The sizes must correspond to the format // the samples for each channel. The sizes must correspond to the format
// passed to Create(). // passed to Create().
virtual void Convert(const float* const* src, size_t src_size, virtual void Convert(const float* const* src,
float* const* dst, size_t dst_capacity) = 0; size_t src_size,
float* const* dst,
size_t dst_capacity) = 0;
size_t src_channels() const { return src_channels_; } size_t src_channels() const { return src_channels_; }
size_t src_frames() const { return src_frames_; } size_t src_frames() const { return src_frames_; }
@ -47,7 +49,9 @@ class AudioConverter {
protected: protected:
AudioConverter(); AudioConverter();
AudioConverter(size_t src_channels, size_t src_frames, size_t dst_channels, AudioConverter(size_t src_channels,
size_t src_frames,
size_t dst_channels,
size_t dst_frames); size_t dst_frames);
// Helper to RTC_CHECK that inputs are correctly sized. // Helper to RTC_CHECK that inputs are correctly sized.

View File

@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include <cmath>
#include <algorithm> #include <algorithm>
#include <cmath>
#include <memory> #include <memory>
#include <vector> #include <vector>
@ -52,8 +52,7 @@ float ComputeSNR(const ChannelBuffer<float>& ref,
// Search within one sample of the expected delay. // Search within one sample of the expected delay.
for (size_t delay = std::max(expected_delay, static_cast<size_t>(1)) - 1; for (size_t delay = std::max(expected_delay, static_cast<size_t>(1)) - 1;
delay <= std::min(expected_delay + 1, ref.num_frames()); delay <= std::min(expected_delay + 1, ref.num_frames()); ++delay) {
++delay) {
float mse = 0; float mse = 0;
float variance = 0; float variance = 0;
float mean = 0; float mean = 0;
@ -92,8 +91,8 @@ void RunAudioConverterTest(size_t src_channels,
int dst_sample_rate_hz) { int dst_sample_rate_hz) {
const float kSrcLeft = 0.0002f; const float kSrcLeft = 0.0002f;
const float kSrcRight = 0.0001f; const float kSrcRight = 0.0001f;
const float resampling_factor = (1.f * src_sample_rate_hz) / const float resampling_factor =
dst_sample_rate_hz; (1.f * src_sample_rate_hz) / dst_sample_rate_hz;
const float dst_left = resampling_factor * kSrcLeft; const float dst_left = resampling_factor * kSrcLeft;
const float dst_right = resampling_factor * kSrcRight; const float dst_right = resampling_factor * kSrcRight;
const float dst_mono = (dst_left + dst_right) / 2; const float dst_mono = (dst_left + dst_right) / 2;
@ -124,13 +123,15 @@ void RunAudioConverterTest(size_t src_channels,
ScopedBuffer ref_buffer = CreateBuffer(ref_data, dst_frames); ScopedBuffer ref_buffer = CreateBuffer(ref_data, dst_frames);
// The sinc resampler has a known delay, which we compute here. // The sinc resampler has a known delay, which we compute here.
const size_t delay_frames = src_sample_rate_hz == dst_sample_rate_hz ? 0 : const size_t delay_frames =
static_cast<size_t>( src_sample_rate_hz == dst_sample_rate_hz
PushSincResampler::AlgorithmicDelaySeconds(src_sample_rate_hz) * ? 0
dst_sample_rate_hz); : static_cast<size_t>(
PushSincResampler::AlgorithmicDelaySeconds(src_sample_rate_hz) *
dst_sample_rate_hz);
// SNR reported on the same line later. // SNR reported on the same line later.
printf("(%" PRIuS ", %d Hz) -> (%" PRIuS ", %d Hz) ", printf("(%" PRIuS ", %d Hz) -> (%" PRIuS ", %d Hz) ", src_channels,
src_channels, src_sample_rate_hz, dst_channels, dst_sample_rate_hz); src_sample_rate_hz, dst_channels, dst_sample_rate_hz);
std::unique_ptr<AudioConverter> converter = AudioConverter::Create( std::unique_ptr<AudioConverter> converter = AudioConverter::Create(
src_channels, src_frames, dst_channels, dst_frames); src_channels, src_frames, dst_channels, dst_frames);

View File

@ -28,7 +28,8 @@ AudioRingBuffer::~AudioRingBuffer() {
WebRtc_FreeBuffer(buf); WebRtc_FreeBuffer(buf);
} }
void AudioRingBuffer::Write(const float* const* data, size_t channels, void AudioRingBuffer::Write(const float* const* data,
size_t channels,
size_t frames) { size_t frames) {
RTC_DCHECK_EQ(buffers_.size(), channels); RTC_DCHECK_EQ(buffers_.size(), channels);
for (size_t i = 0; i < channels; ++i) { for (size_t i = 0; i < channels; ++i) {

View File

@ -17,8 +17,8 @@
namespace webrtc { namespace webrtc {
class AudioRingBufferTest : class AudioRingBufferTest
public ::testing::TestWithParam< ::testing::tuple<int, int, int, int> > { : public ::testing::TestWithParam< ::testing::tuple<int, int, int, int> > {
}; };
void ReadAndWriteTest(const ChannelBuffer<float>& input, void ReadAndWriteTest(const ChannelBuffer<float>& input,
@ -72,10 +72,8 @@ TEST_P(AudioRingBufferTest, ReadDataMatchesWrittenData) {
input.channels()[i][j] = (i + 1) * (j + 1); input.channels()[i][j] = (i + 1) * (j + 1);
ChannelBuffer<float> output(kFrames, static_cast<int>(num_channels)); ChannelBuffer<float> output(kFrames, static_cast<int>(num_channels));
ReadAndWriteTest(input, ReadAndWriteTest(input, ::testing::get<0>(GetParam()),
::testing::get<0>(GetParam()), ::testing::get<1>(GetParam()), ::testing::get<2>(GetParam()),
::testing::get<1>(GetParam()),
::testing::get<2>(GetParam()),
&output); &output);
// Verify the read data matches the input. // Verify the read data matches the input.
@ -85,7 +83,8 @@ TEST_P(AudioRingBufferTest, ReadDataMatchesWrittenData) {
} }
INSTANTIATE_TEST_CASE_P( INSTANTIATE_TEST_CASE_P(
AudioRingBufferTest, AudioRingBufferTest, AudioRingBufferTest,
AudioRingBufferTest,
::testing::Combine(::testing::Values(10, 20, 42), // num_write_chunk_frames ::testing::Combine(::testing::Values(10, 20, 42), // num_write_chunk_frames
::testing::Values(1, 10, 17), // num_read_chunk_frames ::testing::Values(1, 10, 17), // num_read_chunk_frames
::testing::Values(100, 256), // buffer_frames ::testing::Values(100, 256), // buffer_frames

View File

@ -41,8 +41,7 @@ void CopyFrames(const float* const* src,
float* const* dst, float* const* dst,
size_t dst_start_index) { size_t dst_start_index) {
for (size_t i = 0; i < num_channels; ++i) { for (size_t i = 0; i < num_channels; ++i) {
memcpy(&dst[i][dst_start_index], memcpy(&dst[i][dst_start_index], &src[i][src_start_index],
&src[i][src_start_index],
num_frames * sizeof(dst[i][dst_start_index])); num_frames * sizeof(dst[i][dst_start_index]));
} }
} }
@ -55,8 +54,7 @@ void MoveFrames(const float* const* src,
float* const* dst, float* const* dst,
size_t dst_start_index) { size_t dst_start_index) {
for (size_t i = 0; i < num_channels; ++i) { for (size_t i = 0; i < num_channels; ++i) {
memmove(&dst[i][dst_start_index], memmove(&dst[i][dst_start_index], &src[i][src_start_index],
&src[i][src_start_index],
num_frames * sizeof(dst[i][dst_start_index])); num_frames * sizeof(dst[i][dst_start_index]));
} }
} }
@ -87,9 +85,9 @@ void ApplyWindow(const float* window,
size_t gcd(size_t a, size_t b) { size_t gcd(size_t a, size_t b) {
size_t tmp; size_t tmp;
while (b) { while (b) {
tmp = a; tmp = a;
a = b; a = b;
b = tmp % b; b = tmp % b;
} }
return a; return a;
} }
@ -184,51 +182,30 @@ void Blocker::ProcessChunk(const float* const* input,
block_size_); block_size_);
input_buffer_.MoveReadPositionBackward(block_size_ - shift_amount_); input_buffer_.MoveReadPositionBackward(block_size_ - shift_amount_);
ApplyWindow(window_.get(), ApplyWindow(window_.get(), block_size_, num_input_channels_,
block_size_,
num_input_channels_,
input_block_.channels()); input_block_.channels());
callback_->ProcessBlock(input_block_.channels(), callback_->ProcessBlock(input_block_.channels(), block_size_,
block_size_, num_input_channels_, num_output_channels_,
num_input_channels_,
num_output_channels_,
output_block_.channels()); output_block_.channels());
ApplyWindow(window_.get(), ApplyWindow(window_.get(), block_size_, num_output_channels_,
block_size_,
num_output_channels_,
output_block_.channels()); output_block_.channels());
AddFrames(output_buffer_.channels(), AddFrames(output_buffer_.channels(), first_frame_in_block,
first_frame_in_block, output_block_.channels(), 0, block_size_, num_output_channels_,
output_block_.channels(), output_buffer_.channels(), first_frame_in_block);
0,
block_size_,
num_output_channels_,
output_buffer_.channels(),
first_frame_in_block);
first_frame_in_block += shift_amount_; first_frame_in_block += shift_amount_;
} }
// Copy output buffer to output // Copy output buffer to output
CopyFrames(output_buffer_.channels(), CopyFrames(output_buffer_.channels(), 0, chunk_size_, num_output_channels_,
0, output, 0);
chunk_size_,
num_output_channels_,
output,
0);
// Copy output buffer [chunk_size_, chunk_size_ + initial_delay] // Copy output buffer [chunk_size_, chunk_size_ + initial_delay]
// to output buffer [0, initial_delay], zero the rest. // to output buffer [0, initial_delay], zero the rest.
MoveFrames(output_buffer_.channels(), MoveFrames(output_buffer_.channels(), chunk_size, initial_delay_,
chunk_size, num_output_channels_, output_buffer_.channels(), 0);
initial_delay_, ZeroOut(output_buffer_.channels(), initial_delay_, chunk_size_,
num_output_channels_,
output_buffer_.channels(),
0);
ZeroOut(output_buffer_.channels(),
initial_delay_,
chunk_size_,
num_output_channels_); num_output_channels_);
// Calculate new starting frames. // Calculate new starting frames.

View File

@ -71,11 +71,8 @@ class BlockerTest : public ::testing::Test {
size_t end = chunk_size - 1; size_t end = chunk_size - 1;
while (end < num_frames) { while (end < num_frames) {
CopyTo(input_chunk, 0, start, num_input_channels, chunk_size, input); CopyTo(input_chunk, 0, start, num_input_channels, chunk_size, input);
blocker->ProcessChunk(input_chunk, blocker->ProcessChunk(input_chunk, chunk_size, num_input_channels,
chunk_size, num_output_channels, output_chunk);
num_input_channels,
num_output_channels,
output_chunk);
CopyTo(output, start, 0, num_output_channels, chunk_size, output_chunk); CopyTo(output, start, 0, num_output_channels, chunk_size, output_chunk);
start += chunk_size; start += chunk_size;
@ -116,8 +113,7 @@ class BlockerTest : public ::testing::Test {
size_t num_frames, size_t num_frames,
const float* const* src) { const float* const* src) {
for (size_t i = 0; i < num_channels; ++i) { for (size_t i = 0; i < num_channels; ++i) {
memcpy(&dst[i][start_index_dst], memcpy(&dst[i][start_index_dst], &src[i][start_index_src],
&src[i][start_index_src],
num_frames * sizeof(float)); num_frames * sizeof(float));
} }
} }
@ -152,27 +148,15 @@ TEST_F(BlockerTest, TestBlockerMutuallyPrimeChunkandBlockSize) {
ChannelBuffer<float> output_chunk_cb(kChunkSize, kNumOutputChannels); ChannelBuffer<float> output_chunk_cb(kChunkSize, kNumOutputChannels);
PlusThreeBlockerCallback callback; PlusThreeBlockerCallback callback;
Blocker blocker(kChunkSize, Blocker blocker(kChunkSize, kBlockSize, kNumInputChannels, kNumOutputChannels,
kBlockSize, kWindow, kShiftAmount, &callback);
kNumInputChannels,
kNumOutputChannels,
kWindow,
kShiftAmount,
&callback);
RunTest(&blocker, RunTest(&blocker, kChunkSize, kNumFrames, input_cb.channels(),
kChunkSize, input_chunk_cb.channels(), actual_output_cb.channels(),
kNumFrames, output_chunk_cb.channels(), kNumInputChannels, kNumOutputChannels);
input_cb.channels(),
input_chunk_cb.channels(),
actual_output_cb.channels(),
output_chunk_cb.channels(),
kNumInputChannels,
kNumOutputChannels);
ValidateSignalEquality(expected_output_cb.channels(), ValidateSignalEquality(expected_output_cb.channels(),
actual_output_cb.channels(), actual_output_cb.channels(), kNumOutputChannels,
kNumOutputChannels,
kNumFrames); kNumFrames);
} }
@ -205,27 +189,15 @@ TEST_F(BlockerTest, TestBlockerMutuallyPrimeShiftAndBlockSize) {
ChannelBuffer<float> output_chunk_cb(kChunkSize, kNumOutputChannels); ChannelBuffer<float> output_chunk_cb(kChunkSize, kNumOutputChannels);
PlusThreeBlockerCallback callback; PlusThreeBlockerCallback callback;
Blocker blocker(kChunkSize, Blocker blocker(kChunkSize, kBlockSize, kNumInputChannels, kNumOutputChannels,
kBlockSize, kWindow, kShiftAmount, &callback);
kNumInputChannels,
kNumOutputChannels,
kWindow,
kShiftAmount,
&callback);
RunTest(&blocker, RunTest(&blocker, kChunkSize, kNumFrames, input_cb.channels(),
kChunkSize, input_chunk_cb.channels(), actual_output_cb.channels(),
kNumFrames, output_chunk_cb.channels(), kNumInputChannels, kNumOutputChannels);
input_cb.channels(),
input_chunk_cb.channels(),
actual_output_cb.channels(),
output_chunk_cb.channels(),
kNumInputChannels,
kNumOutputChannels);
ValidateSignalEquality(expected_output_cb.channels(), ValidateSignalEquality(expected_output_cb.channels(),
actual_output_cb.channels(), actual_output_cb.channels(), kNumOutputChannels,
kNumOutputChannels,
kNumFrames); kNumFrames);
} }
@ -258,27 +230,15 @@ TEST_F(BlockerTest, TestBlockerNoOverlap) {
ChannelBuffer<float> output_chunk_cb(kChunkSize, kNumOutputChannels); ChannelBuffer<float> output_chunk_cb(kChunkSize, kNumOutputChannels);
PlusThreeBlockerCallback callback; PlusThreeBlockerCallback callback;
Blocker blocker(kChunkSize, Blocker blocker(kChunkSize, kBlockSize, kNumInputChannels, kNumOutputChannels,
kBlockSize, kWindow, kShiftAmount, &callback);
kNumInputChannels,
kNumOutputChannels,
kWindow,
kShiftAmount,
&callback);
RunTest(&blocker, RunTest(&blocker, kChunkSize, kNumFrames, input_cb.channels(),
kChunkSize, input_chunk_cb.channels(), actual_output_cb.channels(),
kNumFrames, output_chunk_cb.channels(), kNumInputChannels, kNumOutputChannels);
input_cb.channels(),
input_chunk_cb.channels(),
actual_output_cb.channels(),
output_chunk_cb.channels(),
kNumInputChannels,
kNumOutputChannels);
ValidateSignalEquality(expected_output_cb.channels(), ValidateSignalEquality(expected_output_cb.channels(),
actual_output_cb.channels(), actual_output_cb.channels(), kNumOutputChannels,
kNumOutputChannels,
kNumFrames); kNumFrames);
} }
@ -286,14 +246,14 @@ TEST_F(BlockerTest, InitialDelaysAreMinimum) {
const size_t kNumInputChannels = 3; const size_t kNumInputChannels = 3;
const size_t kNumOutputChannels = 2; const size_t kNumOutputChannels = 2;
const size_t kNumFrames = 1280; const size_t kNumFrames = 1280;
const size_t kChunkSize[] = const size_t kChunkSize[] = {80, 80, 80, 80, 80, 80,
{80, 80, 80, 80, 80, 80, 160, 160, 160, 160, 160, 160}; 160, 160, 160, 160, 160, 160};
const size_t kBlockSize[] = const size_t kBlockSize[] = {64, 64, 64, 128, 128, 128,
{64, 64, 64, 128, 128, 128, 128, 128, 128, 256, 256, 256}; 128, 128, 128, 256, 256, 256};
const size_t kShiftAmount[] = const size_t kShiftAmount[] = {16, 32, 64, 32, 64, 128,
{16, 32, 64, 32, 64, 128, 32, 64, 128, 64, 128, 256}; 32, 64, 128, 64, 128, 256};
const size_t kInitialDelay[] = const size_t kInitialDelay[] = {48, 48, 48, 112, 112, 112,
{48, 48, 48, 112, 112, 112, 96, 96, 96, 224, 224, 224}; 96, 96, 96, 224, 224, 224};
float input[kNumInputChannels][kNumFrames]; float input[kNumInputChannels][kNumFrames];
for (size_t i = 0; i < kNumInputChannels; ++i) { for (size_t i = 0; i < kNumInputChannels; ++i) {
@ -317,27 +277,15 @@ TEST_F(BlockerTest, InitialDelaysAreMinimum) {
ChannelBuffer<float> input_chunk_cb(kChunkSize[i], kNumInputChannels); ChannelBuffer<float> input_chunk_cb(kChunkSize[i], kNumInputChannels);
ChannelBuffer<float> output_chunk_cb(kChunkSize[i], kNumOutputChannels); ChannelBuffer<float> output_chunk_cb(kChunkSize[i], kNumOutputChannels);
Blocker blocker(kChunkSize[i], Blocker blocker(kChunkSize[i], kBlockSize[i], kNumInputChannels,
kBlockSize[i], kNumOutputChannels, window.get(), kShiftAmount[i],
kNumInputChannels,
kNumOutputChannels,
window.get(),
kShiftAmount[i],
&callback); &callback);
RunTest(&blocker, RunTest(&blocker, kChunkSize[i], kNumFrames, input_cb.channels(),
kChunkSize[i], input_chunk_cb.channels(), output_cb.channels(),
kNumFrames, output_chunk_cb.channels(), kNumInputChannels, kNumOutputChannels);
input_cb.channels(),
input_chunk_cb.channels(),
output_cb.channels(),
output_chunk_cb.channels(),
kNumInputChannels,
kNumOutputChannels);
ValidateInitialDelay(output_cb.channels(), ValidateInitialDelay(output_cb.channels(), kNumOutputChannels, kNumFrames,
kNumOutputChannels,
kNumFrames,
kInitialDelay[i]); kInitialDelay[i]);
} }
} }

View File

@ -68,9 +68,7 @@ void IFChannelBuffer::RefreshI() const {
ibuf_.set_num_channels(fbuf_.num_channels()); ibuf_.set_num_channels(fbuf_.num_channels());
const float* const* float_channels = fbuf_.channels(); const float* const* float_channels = fbuf_.channels();
for (size_t i = 0; i < fbuf_.num_channels(); ++i) { for (size_t i = 0; i < fbuf_.num_channels(); ++i) {
FloatS16ToS16(float_channels[i], FloatS16ToS16(float_channels[i], ibuf_.num_frames(), int_channels[i]);
ibuf_.num_frames(),
int_channels[i]);
} }
ivalid_ = true; ivalid_ = true;
} }

View File

@ -40,9 +40,7 @@ namespace webrtc {
template <typename T> template <typename T>
class ChannelBuffer { class ChannelBuffer {
public: public:
ChannelBuffer(size_t num_frames, ChannelBuffer(size_t num_frames, size_t num_channels, size_t num_bands = 1)
size_t num_channels,
size_t num_bands = 1)
: data_(new T[num_frames * num_channels]()), : data_(new T[num_frames * num_channels]()),
channels_(new T*[num_channels * num_bands]), channels_(new T*[num_channels * num_bands]),
bands_(new T*[num_channels * num_bands]), bands_(new T*[num_channels * num_bands]),
@ -119,7 +117,7 @@ class ChannelBuffer {
size_t num_frames_per_band() const { return num_frames_per_band_; } size_t num_frames_per_band() const { return num_frames_per_band_; }
size_t num_channels() const { return num_channels_; } size_t num_channels() const { return num_channels_; }
size_t num_bands() const { return num_bands_; } size_t num_bands() const { return num_bands_; }
size_t size() const {return num_frames_ * num_allocated_channels_; } size_t size() const { return num_frames_ * num_allocated_channels_; }
void set_num_channels(size_t num_channels) { void set_num_channels(size_t num_channels) {
RTC_DCHECK_LE(num_channels, num_allocated_channels_); RTC_DCHECK_LE(num_channels, num_allocated_channels_);

View File

@ -16,7 +16,7 @@ extern "C" {
#endif #endif
// Refer to fft4g.c for documentation. // Refer to fft4g.c for documentation.
void WebRtc_rdft(size_t n, int isgn, float *a, size_t *ip, float *w); void WebRtc_rdft(size_t n, int isgn, float* a, size_t* ip, float* w);
#if defined(__cplusplus) #if defined(__cplusplus)
} }

View File

@ -20,8 +20,7 @@
namespace webrtc { namespace webrtc {
FIRFilterC::~FIRFilterC() { FIRFilterC::~FIRFilterC() {}
}
FIRFilterC::FIRFilterC(const float* coefficients, size_t coefficients_length) FIRFilterC::FIRFilterC(const float* coefficients, size_t coefficients_length)
: coefficients_length_(coefficients_length), : coefficients_length_(coefficients_length),
@ -52,11 +51,10 @@ void FIRFilterC::Filter(const float* in, size_t length, float* out) {
// Update current state. // Update current state.
if (length >= state_length_) { if (length >= state_length_) {
memcpy( memcpy(state_.get(), &in[length - state_length_],
state_.get(), &in[length - state_length_], state_length_ * sizeof(*in)); state_length_ * sizeof(*in));
} else { } else {
memmove(state_.get(), memmove(state_.get(), &state_[length],
&state_[length],
(state_length_ - length) * sizeof(state_[0])); (state_length_ - length) * sizeof(state_[0]));
memcpy(&state_[state_length_ - length], in, length * sizeof(*in)); memcpy(&state_[state_length_ - length], in, length * sizeof(*in));
} }

View File

@ -20,8 +20,7 @@ namespace webrtc {
class FIRFilterC : public FIRFilter { class FIRFilterC : public FIRFilter {
public: public:
FIRFilterC(const float* coefficients, FIRFilterC(const float* coefficients, size_t coefficients_length);
size_t coefficients_length);
~FIRFilterC() override; ~FIRFilterC() override;
void Filter(const float* in, size_t length, float* out) override; void Filter(const float* in, size_t length, float* out) override;

View File

@ -1,4 +1,4 @@
/* /*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
* *
* Use of this source code is governed by a BSD-style license * Use of this source code is governed by a BSD-style license

Some files were not shown because too many files have changed in this diff Show More