Reformat the WebRTC code base

Running clang-format with chromium's style guide.

The goal is n-fold:
 * providing consistency and readability (that's what code guidelines are for)
 * preventing noise with presubmit checks and git cl format
 * building on the previous point: making it easier to automatically fix format issues
 * you name it

Please consider using git-hyper-blame to ignore this commit.

Bug: webrtc:9340
Change-Id: I694567c4cdf8cee2860958cfe82bfaf25848bb87
Reviewed-on: https://webrtc-review.googlesource.com/81185
Reviewed-by: Patrik Höglund <phoglund@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23660}
This commit is contained in:
Yves Gerey 2018-06-19 15:03:05 +02:00
parent b602123a5a
commit 665174fdbb
1569 changed files with 30495 additions and 30309 deletions

View File

@ -11,8 +11,8 @@
#ifndef API_ARRAY_VIEW_H_ #ifndef API_ARRAY_VIEW_H_
#define API_ARRAY_VIEW_H_ #define API_ARRAY_VIEW_H_
#include <array>
#include <algorithm> #include <algorithm>
#include <array>
#include <type_traits> #include <type_traits>
#include "rtc_base/checks.h" #include "rtc_base/checks.h"

View File

@ -66,7 +66,8 @@ void AudioFrame::UpdateFrame(uint32_t timestamp,
} }
void AudioFrame::CopyFrom(const AudioFrame& src) { void AudioFrame::CopyFrom(const AudioFrame& src) {
if (this == &src) return; if (this == &src)
return;
timestamp_ = src.timestamp_; timestamp_ = src.timestamp_;
elapsed_time_ms_ = src.elapsed_time_ms_; elapsed_time_ms_ = src.elapsed_time_ms_;
@ -116,7 +117,9 @@ void AudioFrame::Mute() {
muted_ = true; muted_ = true;
} }
bool AudioFrame::muted() const { return muted_; } bool AudioFrame::muted() const {
return muted_;
}
// static // static
const int16_t* AudioFrame::empty_data() { const int16_t* AudioFrame::empty_data() {

View File

@ -43,11 +43,7 @@ class AudioFrame {
kMaxDataSizeBytes = kMaxDataSizeSamples * sizeof(int16_t), kMaxDataSizeBytes = kMaxDataSizeSamples * sizeof(int16_t),
}; };
enum VADActivity { enum VADActivity { kVadActive = 0, kVadPassive = 1, kVadUnknown = 2 };
kVadActive = 0,
kVadPassive = 1,
kVadUnknown = 2
};
enum SpeechType { enum SpeechType {
kNormalSpeech = 0, kNormalSpeech = 0,
kPLC = 1, kPLC = 1,
@ -66,9 +62,12 @@ class AudioFrame {
// ResetWithoutMuting() to skip this wasteful zeroing. // ResetWithoutMuting() to skip this wasteful zeroing.
void ResetWithoutMuting(); void ResetWithoutMuting();
void UpdateFrame(uint32_t timestamp, const int16_t* data, void UpdateFrame(uint32_t timestamp,
size_t samples_per_channel, int sample_rate_hz, const int16_t* data,
SpeechType speech_type, VADActivity vad_activity, size_t samples_per_channel,
int sample_rate_hz,
SpeechType speech_type,
VADActivity vad_activity,
size_t num_channels = 1); size_t num_channels = 1);
void CopyFrom(const AudioFrame& src); void CopyFrom(const AudioFrame& src);

View File

@ -87,9 +87,8 @@ TEST(AudioFrameTest, CopyFrom) {
AudioFrame frame2; AudioFrame frame2;
int16_t samples[kNumChannels * kSamplesPerChannel] = {17}; int16_t samples[kNumChannels * kSamplesPerChannel] = {17};
frame2.UpdateFrame(kTimestamp, samples, kSamplesPerChannel, frame2.UpdateFrame(kTimestamp, samples, kSamplesPerChannel, kSampleRateHz,
kSampleRateHz, AudioFrame::kPLC, AudioFrame::kVadActive, AudioFrame::kPLC, AudioFrame::kVadActive, kNumChannels);
kNumChannels);
frame1.CopyFrom(frame2); frame1.CopyFrom(frame2);
EXPECT_EQ(frame2.timestamp_, frame1.timestamp_); EXPECT_EQ(frame2.timestamp_, frame1.timestamp_);

View File

@ -220,8 +220,7 @@ class AudioEncoder {
// Provides target audio bitrate and corresponding probing interval of // Provides target audio bitrate and corresponding probing interval of
// the bandwidth estimator to this encoder to allow it to adapt. // the bandwidth estimator to this encoder to allow it to adapt.
virtual void OnReceivedUplinkBandwidth( virtual void OnReceivedUplinkBandwidth(int target_audio_bitrate_bps,
int target_audio_bitrate_bps,
rtc::Optional<int64_t> bwe_period_ms); rtc::Optional<int64_t> bwe_period_ms);
// Provides RTT to this encoder to allow it to adapt. // Provides RTT to this encoder to allow it to adapt.

View File

@ -62,9 +62,7 @@ class Candidate {
} }
const rtc::SocketAddress& address() const { return address_; } const rtc::SocketAddress& address() const { return address_; }
void set_address(const rtc::SocketAddress & address) { void set_address(const rtc::SocketAddress& address) { address_ = address; }
address_ = address;
}
uint32_t priority() const { return priority_; } uint32_t priority() const { return priority_; }
void set_priority(const uint32_t priority) { priority_ = priority; } void set_priority(const uint32_t priority) { priority_ = priority; }
@ -127,24 +125,17 @@ class Candidate {
uint16_t network_id() const { return network_id_; } uint16_t network_id() const { return network_id_; }
void set_network_id(uint16_t network_id) { network_id_ = network_id; } void set_network_id(uint16_t network_id) { network_id_ = network_id; }
const std::string& foundation() const { const std::string& foundation() const { return foundation_; }
return foundation_;
}
void set_foundation(const std::string& foundation) { void set_foundation(const std::string& foundation) {
foundation_ = foundation; foundation_ = foundation;
} }
const rtc::SocketAddress & related_address() const { const rtc::SocketAddress& related_address() const { return related_address_; }
return related_address_; void set_related_address(const rtc::SocketAddress& related_address) {
}
void set_related_address(
const rtc::SocketAddress & related_address) {
related_address_ = related_address; related_address_ = related_address;
} }
const std::string& tcptype() const { return tcptype_; } const std::string& tcptype() const { return tcptype_; }
void set_tcptype(const std::string& tcptype) { void set_tcptype(const std::string& tcptype) { tcptype_ = tcptype; }
tcptype_ = tcptype;
}
// The name of the transport channel of this candidate. // The name of the transport channel of this candidate.
// TODO(phoglund): remove. // TODO(phoglund): remove.
@ -164,13 +155,9 @@ class Candidate {
// given one when looking for a matching candidate to remove. // given one when looking for a matching candidate to remove.
bool MatchesForRemoval(const Candidate& c) const; bool MatchesForRemoval(const Candidate& c) const;
std::string ToString() const { std::string ToString() const { return ToStringInternal(false); }
return ToStringInternal(false);
}
std::string ToSensitiveString() const { std::string ToSensitiveString() const { return ToStringInternal(true); }
return ToStringInternal(true);
}
uint32_t GetPriority(uint32_t type_preference, uint32_t GetPriority(uint32_t type_preference,
int network_adapter_preference, int network_adapter_preference,

View File

@ -61,14 +61,10 @@ struct DataChannelInit {
// as binary or text. // as binary or text.
struct DataBuffer { struct DataBuffer {
DataBuffer(const rtc::CopyOnWriteBuffer& data, bool binary) DataBuffer(const rtc::CopyOnWriteBuffer& data, bool binary)
: data(data), : data(data), binary(binary) {}
binary(binary) {
}
// For convenience for unit tests. // For convenience for unit tests.
explicit DataBuffer(const std::string& text) explicit DataBuffer(const std::string& text)
: data(text.data(), text.length()), : data(text.data(), text.length()), binary(false) {}
binary(false) {
}
size_t size() const { return data.size(); } size_t size() const { return data.size(); }
rtc::CopyOnWriteBuffer data; rtc::CopyOnWriteBuffer data;

View File

@ -67,7 +67,8 @@ class DtmfSenderInterface : public rtc::RefCountInterface {
// If InsertDtmf is called on the same object while an existing task for this // If InsertDtmf is called on the same object while an existing task for this
// object to generate DTMF is still running, the previous task is canceled. // object to generate DTMF is still running, the previous task is canceled.
// Returns true on success and false on failure. // Returns true on success and false on failure.
virtual bool InsertDtmf(const std::string& tones, int duration, virtual bool InsertDtmf(const std::string& tones,
int duration,
int inter_tone_gap) = 0; int inter_tone_gap) = 0;
// Returns the track given as argument to the constructor. Only exists for // Returns the track given as argument to the constructor. Only exists for

View File

@ -28,8 +28,7 @@ class FakeMetricsObserver : public MetricsObserverInterface {
void IncrementEnumCounter(PeerConnectionEnumCounterType, void IncrementEnumCounter(PeerConnectionEnumCounterType,
int counter, int counter,
int counter_max) override; int counter_max) override;
void AddHistogramSample(PeerConnectionMetricsName type, void AddHistogramSample(PeerConnectionMetricsName type, int value) override;
int value) override;
// Accessors to be used by the tests. // Accessors to be used by the tests.
int GetEnumCounter(PeerConnectionEnumCounterType type, int counter) const; int GetEnumCounter(PeerConnectionEnumCounterType type, int counter) const;

View File

@ -205,9 +205,7 @@ class CreateSessionDescriptionObserver : public rtc::RefCountInterface {
// is deprecated; in order to let clients remove the old version, it has a // is deprecated; in order to let clients remove the old version, it has a
// default implementation. If both versions are unimplemented, the // default implementation. If both versions are unimplemented, the
// result will be a runtime error (stack overflow). This is intentional. // result will be a runtime error (stack overflow). This is intentional.
virtual void OnFailure(RTCError error) { virtual void OnFailure(RTCError error) { OnFailure(error.message()); }
OnFailure(error.message());
}
virtual void OnFailure(const std::string& error) { virtual void OnFailure(const std::string& error) {
OnFailure(RTCError(RTCErrorType::INTERNAL_ERROR, std::string(error))); OnFailure(RTCError(RTCErrorType::INTERNAL_ERROR, std::string(error)));
} }

View File

@ -28,7 +28,8 @@ namespace webrtc {
class JsepIceCandidate : public IceCandidateInterface { class JsepIceCandidate : public IceCandidateInterface {
public: public:
JsepIceCandidate(const std::string& sdp_mid, int sdp_mline_index); JsepIceCandidate(const std::string& sdp_mid, int sdp_mline_index);
JsepIceCandidate(const std::string& sdp_mid, int sdp_mline_index, JsepIceCandidate(const std::string& sdp_mid,
int sdp_mline_index,
const cricket::Candidate& candidate); const cricket::Candidate& candidate);
~JsepIceCandidate(); ~JsepIceCandidate();
// |err| may be null. // |err| may be null.
@ -39,9 +40,7 @@ class JsepIceCandidate : public IceCandidateInterface {
virtual std::string sdp_mid() const { return sdp_mid_; } virtual std::string sdp_mid() const { return sdp_mid_; }
virtual int sdp_mline_index() const { return sdp_mline_index_; } virtual int sdp_mline_index() const { return sdp_mline_index_; }
virtual const cricket::Candidate& candidate() const { virtual const cricket::Candidate& candidate() const { return candidate_; }
return candidate_;
}
virtual std::string server_url() const { return candidate_.url(); } virtual std::string server_url() const { return candidate_.url(); }
@ -64,9 +63,7 @@ class JsepCandidateCollection : public IceCandidateCollection {
JsepCandidateCollection(JsepCandidateCollection&& o) JsepCandidateCollection(JsepCandidateCollection&& o)
: candidates_(std::move(o.candidates_)) {} : candidates_(std::move(o.candidates_)) {}
~JsepCandidateCollection(); ~JsepCandidateCollection();
virtual size_t count() const { virtual size_t count() const { return candidates_.size(); }
return candidates_.size();
}
virtual bool HasCandidate(const IceCandidateInterface* candidate) const; virtual bool HasCandidate(const IceCandidateInterface* candidate) const;
// Adds and takes ownership of the JsepIceCandidate. // Adds and takes ownership of the JsepIceCandidate.
// TODO(deadbeef): Make this use an std::unique_ptr<>, so ownership logic is // TODO(deadbeef): Make this use an std::unique_ptr<>, so ownership logic is

View File

@ -50,12 +50,8 @@ class JsepSessionDescription : public SessionDescriptionInterface {
virtual const cricket::SessionDescription* description() const { virtual const cricket::SessionDescription* description() const {
return description_.get(); return description_.get();
} }
virtual std::string session_id() const { virtual std::string session_id() const { return session_id_; }
return session_id_; virtual std::string session_version() const { return session_version_; }
}
virtual std::string session_version() const {
return session_version_;
}
virtual SdpType GetType() const { return type_; } virtual SdpType GetType() const { return type_; }
virtual std::string type() const { return SdpTypeToString(type_); } virtual std::string type() const { return SdpTypeToString(type_); }
// Allows changing the type. Used for testing. // Allows changing the type. Used for testing.

View File

@ -89,8 +89,7 @@ const char MediaConstraintsInterface::kMaxFrameRate[] = "maxFrameRate";
const char MediaConstraintsInterface::kMinFrameRate[] = "minFrameRate"; const char MediaConstraintsInterface::kMinFrameRate[] = "minFrameRate";
// Audio constraints. // Audio constraints.
const char MediaConstraintsInterface::kEchoCancellation[] = const char MediaConstraintsInterface::kEchoCancellation[] = "echoCancellation";
"echoCancellation";
const char MediaConstraintsInterface::kGoogEchoCancellation[] = const char MediaConstraintsInterface::kGoogEchoCancellation[] =
"googEchoCancellation"; "googEchoCancellation";
const char MediaConstraintsInterface::kExtendedFilterEchoCancellation[] = const char MediaConstraintsInterface::kExtendedFilterEchoCancellation[] =
@ -107,8 +106,7 @@ const char MediaConstraintsInterface::kExperimentalNoiseSuppression[] =
"googNoiseSuppression2"; "googNoiseSuppression2";
const char MediaConstraintsInterface::kIntelligibilityEnhancer[] = const char MediaConstraintsInterface::kIntelligibilityEnhancer[] =
"intelligibilityEnhancer"; "intelligibilityEnhancer";
const char MediaConstraintsInterface::kHighpassFilter[] = const char MediaConstraintsInterface::kHighpassFilter[] = "googHighpassFilter";
"googHighpassFilter";
const char MediaConstraintsInterface::kTypingNoiseDetection[] = const char MediaConstraintsInterface::kTypingNoiseDetection[] =
"googTypingNoiseDetection"; "googTypingNoiseDetection";
const char MediaConstraintsInterface::kAudioMirroring[] = "googAudioMirroring"; const char MediaConstraintsInterface::kAudioMirroring[] = "googAudioMirroring";
@ -125,11 +123,9 @@ const char MediaConstraintsInterface::kOfferToReceiveVideo[] =
"OfferToReceiveVideo"; "OfferToReceiveVideo";
const char MediaConstraintsInterface::kVoiceActivityDetection[] = const char MediaConstraintsInterface::kVoiceActivityDetection[] =
"VoiceActivityDetection"; "VoiceActivityDetection";
const char MediaConstraintsInterface::kIceRestart[] = const char MediaConstraintsInterface::kIceRestart[] = "IceRestart";
"IceRestart";
// Google specific constraint for BUNDLE enable/disable. // Google specific constraint for BUNDLE enable/disable.
const char MediaConstraintsInterface::kUseRtpMux[] = const char MediaConstraintsInterface::kUseRtpMux[] = "googUseRtpMUX";
"googUseRtpMUX";
// Below constraints should be used during PeerConnection construction. // Below constraints should be used during PeerConnection construction.
const char MediaConstraintsInterface::kEnableDtlsSrtp[] = const char MediaConstraintsInterface::kEnableDtlsSrtp[] =
@ -150,11 +146,11 @@ const char MediaConstraintsInterface::kCpuOveruseDetection[] =
"googCpuOveruseDetection"; "googCpuOveruseDetection";
const char MediaConstraintsInterface::kPayloadPadding[] = "googPayloadPadding"; const char MediaConstraintsInterface::kPayloadPadding[] = "googPayloadPadding";
// Set |value| to the value associated with the first appearance of |key|, or // Set |value| to the value associated with the first appearance of |key|, or
// return false if |key| is not found. // return false if |key| is not found.
bool MediaConstraintsInterface::Constraints::FindFirst( bool MediaConstraintsInterface::Constraints::FindFirst(
const std::string& key, std::string* value) const { const std::string& key,
std::string* value) const {
for (Constraints::const_iterator iter = begin(); iter != end(); ++iter) { for (Constraints::const_iterator iter = begin(); iter != end(); ++iter) {
if (iter->key == key) { if (iter->key == key) {
*value = iter->value; *value = iter->value;
@ -165,7 +161,8 @@ bool MediaConstraintsInterface::Constraints::FindFirst(
} }
bool FindConstraint(const MediaConstraintsInterface* constraints, bool FindConstraint(const MediaConstraintsInterface* constraints,
const std::string& key, bool* value, const std::string& key,
bool* value,
size_t* mandatory_constraints) { size_t* mandatory_constraints) {
return ::FindConstraint<bool>(constraints, key, value, mandatory_constraints); return ::FindConstraint<bool>(constraints, key, value, mandatory_constraints);
} }
@ -192,9 +189,9 @@ void CopyConstraintsIntoRtcConfiguration(
} }
FindConstraint(constraints, MediaConstraintsInterface::kEnableDscp, FindConstraint(constraints, MediaConstraintsInterface::kEnableDscp,
&configuration->media_config.enable_dscp, nullptr); &configuration->media_config.enable_dscp, nullptr);
FindConstraint( FindConstraint(constraints, MediaConstraintsInterface::kCpuOveruseDetection,
constraints, MediaConstraintsInterface::kCpuOveruseDetection, &configuration->media_config.video.enable_cpu_adaptation,
&configuration->media_config.video.enable_cpu_adaptation, nullptr); nullptr);
FindConstraint(constraints, MediaConstraintsInterface::kEnableRtpDataChannels, FindConstraint(constraints, MediaConstraintsInterface::kEnableRtpDataChannels,
&configuration->enable_rtp_data_channel, nullptr); &configuration->enable_rtp_data_channel, nullptr);
// Find Suspend Below Min Bitrate constraint. // Find Suspend Below Min Bitrate constraint.

View File

@ -39,8 +39,7 @@ class MediaConstraintsInterface {
struct Constraint { struct Constraint {
Constraint() {} Constraint() {}
Constraint(const std::string& key, const std::string value) Constraint(const std::string& key, const std::string value)
: key(key), value(value) { : key(key), value(value) {}
}
std::string key; std::string key;
std::string value; std::string value;
}; };
@ -126,7 +125,8 @@ class MediaConstraintsInterface {
}; };
bool FindConstraint(const MediaConstraintsInterface* constraints, bool FindConstraint(const MediaConstraintsInterface* constraints,
const std::string& key, bool* value, const std::string& key,
bool* value,
size_t* mandatory_constraints); size_t* mandatory_constraints);
bool FindConstraint(const MediaConstraintsInterface* constraints, bool FindConstraint(const MediaConstraintsInterface* constraints,

View File

@ -60,12 +60,7 @@ class NotifierInterface {
class MediaSourceInterface : public rtc::RefCountInterface, class MediaSourceInterface : public rtc::RefCountInterface,
public NotifierInterface { public NotifierInterface {
public: public:
enum SourceState { enum SourceState { kInitializing, kLive, kEnded, kMuted };
kInitializing,
kLive,
kEnded,
kMuted
};
virtual SourceState state() const = 0; virtual SourceState state() const = 0;
@ -116,8 +111,7 @@ class MediaStreamTrackInterface : public rtc::RefCountInterface,
// on the worker thread via a VideoTrack. A custom implementation of a source // on the worker thread via a VideoTrack. A custom implementation of a source
// can inherit AdaptedVideoTrackSource instead of directly implementing this // can inherit AdaptedVideoTrackSource instead of directly implementing this
// interface. // interface.
class VideoTrackSourceInterface class VideoTrackSourceInterface : public MediaSourceInterface,
: public MediaSourceInterface,
public rtc::VideoSourceInterface<VideoFrame> { public rtc::VideoSourceInterface<VideoFrame> {
public: public:
struct Stats { struct Stats {
@ -156,8 +150,7 @@ class VideoTrackSourceInterface
// PeerConnectionFactory::CreateVideoTrack can be used for creating a VideoTrack // PeerConnectionFactory::CreateVideoTrack can be used for creating a VideoTrack
// that ensures thread safety and that all methods are called on the right // that ensures thread safety and that all methods are called on the right
// thread. // thread.
class VideoTrackInterface class VideoTrackInterface : public MediaStreamTrackInterface,
: public MediaStreamTrackInterface,
public rtc::VideoSourceInterface<VideoFrame> { public rtc::VideoSourceInterface<VideoFrame> {
public: public:
// Video track content hint, used to override the source is_screencast // Video track content hint, used to override the source is_screencast
@ -297,10 +290,8 @@ class AudioTrackInterface : public MediaStreamTrackInterface {
~AudioTrackInterface() override = default; ~AudioTrackInterface() override = default;
}; };
typedef std::vector<rtc::scoped_refptr<AudioTrackInterface> > typedef std::vector<rtc::scoped_refptr<AudioTrackInterface> > AudioTrackVector;
AudioTrackVector; typedef std::vector<rtc::scoped_refptr<VideoTrackInterface> > VideoTrackVector;
typedef std::vector<rtc::scoped_refptr<VideoTrackInterface> >
VideoTrackVector;
// C++ version of https://www.w3.org/TR/mediacapture-streams/#mediastream. // C++ version of https://www.w3.org/TR/mediacapture-streams/#mediastream.
// //
@ -317,10 +308,10 @@ class MediaStreamInterface : public rtc::RefCountInterface,
virtual AudioTrackVector GetAudioTracks() = 0; virtual AudioTrackVector GetAudioTracks() = 0;
virtual VideoTrackVector GetVideoTracks() = 0; virtual VideoTrackVector GetVideoTracks() = 0;
virtual rtc::scoped_refptr<AudioTrackInterface> virtual rtc::scoped_refptr<AudioTrackInterface> FindAudioTrack(
FindAudioTrack(const std::string& track_id) = 0; const std::string& track_id) = 0;
virtual rtc::scoped_refptr<VideoTrackInterface> virtual rtc::scoped_refptr<VideoTrackInterface> FindVideoTrack(
FindVideoTrack(const std::string& track_id) = 0; const std::string& track_id) = 0;
virtual bool AddTrack(AudioTrackInterface* track) = 0; virtual bool AddTrack(AudioTrackInterface* track) = 0;
virtual bool AddTrack(VideoTrackInterface* track) = 0; virtual bool AddTrack(VideoTrackInterface* track) = 0;

View File

@ -15,11 +15,7 @@
namespace cricket { namespace cricket {
enum MediaType { enum MediaType { MEDIA_TYPE_AUDIO, MEDIA_TYPE_VIDEO, MEDIA_TYPE_DATA };
MEDIA_TYPE_AUDIO,
MEDIA_TYPE_VIDEO,
MEDIA_TYPE_DATA
};
std::string MediaTypeToString(MediaType type); std::string MediaTypeToString(MediaType type);
// Aborts on invalid string. Only expected to be used on strings that are // Aborts on invalid string. Only expected to be used on strings that are

View File

@ -23,8 +23,7 @@ namespace webrtc {
template <class T> template <class T>
class Notifier : public T { class Notifier : public T {
public: public:
Notifier() { Notifier() {}
}
virtual void RegisterObserver(ObserverInterface* observer) { virtual void RegisterObserver(ObserverInterface* observer) {
RTC_DCHECK(observer != nullptr); RTC_DCHECK(observer != nullptr);

View File

@ -20,4 +20,4 @@ TEST_F(SessionDescriptionTest, CreateSessionDescription) {
EXPECT_EQ(-1, s.session_id()); EXPECT_EQ(-1, s.session_id());
EXPECT_EQ("0", s.session_version()); EXPECT_EQ("0", s.session_version());
} }
} } // namespace webrtc

View File

@ -11,9 +11,9 @@
#ifndef API_ORTC_SRTPTRANSPORTINTERFACE_H_ #ifndef API_ORTC_SRTPTRANSPORTINTERFACE_H_
#define API_ORTC_SRTPTRANSPORTINTERFACE_H_ #define API_ORTC_SRTPTRANSPORTINTERFACE_H_
#include "api/cryptoparams.h"
#include "api/ortc/rtptransportinterface.h" #include "api/ortc/rtptransportinterface.h"
#include "api/rtcerror.h" #include "api/rtcerror.h"
#include "api/cryptoparams.h"
namespace webrtc { namespace webrtc {

View File

@ -49,7 +49,8 @@ BEGIN_SIGNALING_PROXY_MAP(PeerConnectionFactory)
const PeerConnectionInterface::RTCConfiguration&, const PeerConnectionInterface::RTCConfiguration&,
PeerConnectionDependencies); PeerConnectionDependencies);
PROXY_METHOD1(rtc::scoped_refptr<MediaStreamInterface>, PROXY_METHOD1(rtc::scoped_refptr<MediaStreamInterface>,
CreateLocalMediaStream, const std::string&) CreateLocalMediaStream,
const std::string&)
PROXY_METHOD1(rtc::scoped_refptr<AudioSourceInterface>, PROXY_METHOD1(rtc::scoped_refptr<AudioSourceInterface>,
CreateAudioSource, CreateAudioSource,
const cricket::AudioOptions&) const cricket::AudioOptions&)
@ -65,7 +66,9 @@ BEGIN_SIGNALING_PROXY_MAP(PeerConnectionFactory)
const std::string&, const std::string&,
VideoTrackSourceInterface*) VideoTrackSourceInterface*)
PROXY_METHOD2(rtc::scoped_refptr<AudioTrackInterface>, PROXY_METHOD2(rtc::scoped_refptr<AudioTrackInterface>,
CreateAudioTrack, const std::string&, AudioSourceInterface*) CreateAudioTrack,
const std::string&,
AudioSourceInterface*)
PROXY_METHOD2(bool, StartAecDump, rtc::PlatformFile, int64_t) PROXY_METHOD2(bool, StartAecDump, rtc::PlatformFile, int64_t)
PROXY_METHOD0(void, StopAecDump) PROXY_METHOD0(void, StopAecDump)
END_PROXY_MAP() END_PROXY_MAP()

View File

@ -116,13 +116,13 @@
namespace rtc { namespace rtc {
class SSLIdentity; class SSLIdentity;
class Thread; class Thread;
} } // namespace rtc
namespace cricket { namespace cricket {
class MediaEngineInterface; class MediaEngineInterface;
class WebRtcVideoDecoderFactory; class WebRtcVideoDecoderFactory;
class WebRtcVideoEncoderFactory; class WebRtcVideoEncoderFactory;
} } // namespace cricket
namespace webrtc { namespace webrtc {
class AudioDeviceModule; class AudioDeviceModule;
@ -139,10 +139,8 @@ class StreamCollectionInterface : public rtc::RefCountInterface {
virtual size_t count() = 0; virtual size_t count() = 0;
virtual MediaStreamInterface* at(size_t index) = 0; virtual MediaStreamInterface* at(size_t index) = 0;
virtual MediaStreamInterface* find(const std::string& label) = 0; virtual MediaStreamInterface* find(const std::string& label) = 0;
virtual MediaStreamTrackInterface* FindAudioTrack( virtual MediaStreamTrackInterface* FindAudioTrack(const std::string& id) = 0;
const std::string& id) = 0; virtual MediaStreamTrackInterface* FindVideoTrack(const std::string& id) = 0;
virtual MediaStreamTrackInterface* FindVideoTrack(
const std::string& id) = 0;
protected: protected:
// Dtor protected as objects shouldn't be deleted via this interface. // Dtor protected as objects shouldn't be deleted via this interface.
@ -262,10 +260,7 @@ class PeerConnectionInterface : public rtc::RefCountInterface {
kCandidateNetworkPolicyLowCost kCandidateNetworkPolicyLowCost
}; };
enum ContinualGatheringPolicy { enum ContinualGatheringPolicy { GATHER_ONCE, GATHER_CONTINUALLY };
GATHER_ONCE,
GATHER_CONTINUALLY
};
enum class RTCConfigurationType { enum class RTCConfigurationType {
// A configuration that is safer to use, despite not having the best // A configuration that is safer to use, despite not having the best
@ -634,14 +629,12 @@ class PeerConnectionInterface : public rtc::RefCountInterface {
// Accessor methods to active local streams. // Accessor methods to active local streams.
// This method is not supported with kUnifiedPlan semantics. Please use // This method is not supported with kUnifiedPlan semantics. Please use
// GetSenders() instead. // GetSenders() instead.
virtual rtc::scoped_refptr<StreamCollectionInterface> virtual rtc::scoped_refptr<StreamCollectionInterface> local_streams() = 0;
local_streams() = 0;
// Accessor methods to remote streams. // Accessor methods to remote streams.
// This method is not supported with kUnifiedPlan semantics. Please use // This method is not supported with kUnifiedPlan semantics. Please use
// GetReceivers() instead. // GetReceivers() instead.
virtual rtc::scoped_refptr<StreamCollectionInterface> virtual rtc::scoped_refptr<StreamCollectionInterface> remote_streams() = 0;
remote_streams() = 0;
// Add a new MediaStream to be sent on this PeerConnection. // Add a new MediaStream to be sent on this PeerConnection.
// Note that a SessionDescription negotiation is needed before the // Note that a SessionDescription negotiation is needed before the
@ -1374,8 +1367,8 @@ class PeerConnectionFactoryInterface : public rtc::RefCountInterface {
VideoTrackSourceInterface* source) = 0; VideoTrackSourceInterface* source) = 0;
// Creates an new AudioTrack. At the moment |source| can be null. // Creates an new AudioTrack. At the moment |source| can be null.
virtual rtc::scoped_refptr<AudioTrackInterface> virtual rtc::scoped_refptr<AudioTrackInterface> CreateAudioTrack(
CreateAudioTrack(const std::string& label, const std::string& label,
AudioSourceInterface* source) = 0; AudioSourceInterface* source) = 0;
// Starts AEC dump using existing file. Takes ownership of |file| and passes // Starts AEC dump using existing file. Takes ownership of |file| and passes

View File

@ -65,7 +65,9 @@ template <typename R>
class ReturnType { class ReturnType {
public: public:
template <typename C, typename M> template <typename C, typename M>
void Invoke(C* c, M m) { r_ = (c->*m)(); } void Invoke(C* c, M m) {
r_ = (c->*m)();
}
template <typename C, typename M, typename T1> template <typename C, typename M, typename T1>
void Invoke(C* c, M m, T1 a1) { void Invoke(C* c, M m, T1 a1) {
r_ = (c->*m)(std::move(a1)); r_ = (c->*m)(std::move(a1));
@ -78,13 +80,22 @@ class ReturnType {
void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3) { void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3) {
r_ = (c->*m)(std::move(a1), std::move(a2), std::move(a3)); r_ = (c->*m)(std::move(a1), std::move(a2), std::move(a3));
} }
template<typename C, typename M, typename T1, typename T2, typename T3, template <typename C,
typename M,
typename T1,
typename T2,
typename T3,
typename T4> typename T4>
void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3, T4 a4) { void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3, T4 a4) {
r_ = (c->*m)(std::move(a1), std::move(a2), std::move(a3), std::move(a4)); r_ = (c->*m)(std::move(a1), std::move(a2), std::move(a3), std::move(a4));
} }
template<typename C, typename M, typename T1, typename T2, typename T3, template <typename C,
typename T4, typename T5> typename M,
typename T1,
typename T2,
typename T3,
typename T4,
typename T5>
void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3, T4 a4, T5 a5) { void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3, T4 a4, T5 a5) {
r_ = (c->*m)(std::move(a1), std::move(a2), std::move(a3), std::move(a4), r_ = (c->*m)(std::move(a1), std::move(a2), std::move(a3), std::move(a4),
std::move(a5)); std::move(a5));
@ -100,7 +111,9 @@ template <>
class ReturnType<void> { class ReturnType<void> {
public: public:
template <typename C, typename M> template <typename C, typename M>
void Invoke(C* c, M m) { (c->*m)(); } void Invoke(C* c, M m) {
(c->*m)();
}
template <typename C, typename M, typename T1> template <typename C, typename M, typename T1>
void Invoke(C* c, M m, T1 a1) { void Invoke(C* c, M m, T1 a1) {
(c->*m)(std::move(a1)); (c->*m)(std::move(a1));
@ -119,8 +132,7 @@ class ReturnType<void> {
namespace internal { namespace internal {
class SynchronousMethodCall class SynchronousMethodCall : public rtc::MessageData,
: public rtc::MessageData,
public rtc::MessageHandler { public rtc::MessageHandler {
public: public:
explicit SynchronousMethodCall(rtc::MessageHandler* proxy); explicit SynchronousMethodCall(rtc::MessageHandler* proxy);
@ -138,8 +150,7 @@ class SynchronousMethodCall
} // namespace internal } // namespace internal
template <typename C, typename R> template <typename C, typename R>
class MethodCall0 : public rtc::Message, class MethodCall0 : public rtc::Message, public rtc::MessageHandler {
public rtc::MessageHandler {
public: public:
typedef R (C::*Method)(); typedef R (C::*Method)();
MethodCall0(C* c, Method m) : c_(c), m_(m) {} MethodCall0(C* c, Method m) : c_(c), m_(m) {}
@ -158,8 +169,7 @@ class MethodCall0 : public rtc::Message,
}; };
template <typename C, typename R> template <typename C, typename R>
class ConstMethodCall0 : public rtc::Message, class ConstMethodCall0 : public rtc::Message, public rtc::MessageHandler {
public rtc::MessageHandler {
public: public:
typedef R (C::*Method)() const; typedef R (C::*Method)() const;
ConstMethodCall0(C* c, Method m) : c_(c), m_(m) {} ConstMethodCall0(C* c, Method m) : c_(c), m_(m) {}
@ -178,8 +188,7 @@ class ConstMethodCall0 : public rtc::Message,
}; };
template <typename C, typename R, typename T1> template <typename C, typename R, typename T1>
class MethodCall1 : public rtc::Message, class MethodCall1 : public rtc::Message, public rtc::MessageHandler {
public rtc::MessageHandler {
public: public:
typedef R (C::*Method)(T1 a1); typedef R (C::*Method)(T1 a1);
MethodCall1(C* c, Method m, T1 a1) : c_(c), m_(m), a1_(std::move(a1)) {} MethodCall1(C* c, Method m, T1 a1) : c_(c), m_(m), a1_(std::move(a1)) {}
@ -199,8 +208,7 @@ class MethodCall1 : public rtc::Message,
}; };
template <typename C, typename R, typename T1> template <typename C, typename R, typename T1>
class ConstMethodCall1 : public rtc::Message, class ConstMethodCall1 : public rtc::Message, public rtc::MessageHandler {
public rtc::MessageHandler {
public: public:
typedef R (C::*Method)(T1 a1) const; typedef R (C::*Method)(T1 a1) const;
ConstMethodCall1(C* c, Method m, T1 a1) : c_(c), m_(m), a1_(std::move(a1)) {} ConstMethodCall1(C* c, Method m, T1 a1) : c_(c), m_(m), a1_(std::move(a1)) {}
@ -220,8 +228,7 @@ class ConstMethodCall1 : public rtc::Message,
}; };
template <typename C, typename R, typename T1, typename T2> template <typename C, typename R, typename T1, typename T2>
class MethodCall2 : public rtc::Message, class MethodCall2 : public rtc::Message, public rtc::MessageHandler {
public rtc::MessageHandler {
public: public:
typedef R (C::*Method)(T1 a1, T2 a2); typedef R (C::*Method)(T1 a1, T2 a2);
MethodCall2(C* c, Method m, T1 a1, T2 a2) MethodCall2(C* c, Method m, T1 a1, T2 a2)
@ -245,8 +252,7 @@ class MethodCall2 : public rtc::Message,
}; };
template <typename C, typename R, typename T1, typename T2, typename T3> template <typename C, typename R, typename T1, typename T2, typename T3>
class MethodCall3 : public rtc::Message, class MethodCall3 : public rtc::Message, public rtc::MessageHandler {
public rtc::MessageHandler {
public: public:
typedef R (C::*Method)(T1 a1, T2 a2, T3 a3); typedef R (C::*Method)(T1 a1, T2 a2, T3 a3);
MethodCall3(C* c, Method m, T1 a1, T2 a2, T3 a3) MethodCall3(C* c, Method m, T1 a1, T2 a2, T3 a3)
@ -274,10 +280,13 @@ class MethodCall3 : public rtc::Message,
T3 a3_; T3 a3_;
}; };
template <typename C, typename R, typename T1, typename T2, typename T3, template <typename C,
typename R,
typename T1,
typename T2,
typename T3,
typename T4> typename T4>
class MethodCall4 : public rtc::Message, class MethodCall4 : public rtc::Message, public rtc::MessageHandler {
public rtc::MessageHandler {
public: public:
typedef R (C::*Method)(T1 a1, T2 a2, T3 a3, T4 a4); typedef R (C::*Method)(T1 a1, T2 a2, T3 a3, T4 a4);
MethodCall4(C* c, Method m, T1 a1, T2 a2, T3 a3, T4 a4) MethodCall4(C* c, Method m, T1 a1, T2 a2, T3 a3, T4 a4)
@ -308,10 +317,14 @@ class MethodCall4 : public rtc::Message,
T4 a4_; T4 a4_;
}; };
template <typename C, typename R, typename T1, typename T2, typename T3, template <typename C,
typename T4, typename T5> typename R,
class MethodCall5 : public rtc::Message, typename T1,
public rtc::MessageHandler { typename T2,
typename T3,
typename T4,
typename T5>
class MethodCall5 : public rtc::Message, public rtc::MessageHandler {
public: public:
typedef R (C::*Method)(T1 a1, T2 a2, T3 a3, T4 a4, T5 a5); typedef R (C::*Method)(T1 a1, T2 a2, T3 a3, T4 a4, T5 a5);
MethodCall5(C* c, Method m, T1 a1, T2 a2, T3 a3, T4 a4, T5 a5) MethodCall5(C* c, Method m, T1 a1, T2 a2, T3 a3, T4 a4, T5 a5)
@ -344,7 +357,6 @@ class MethodCall5 : public rtc::Message,
T5 a5_; T5 a5_;
}; };
// Helper macros to reduce code duplication. // Helper macros to reduce code duplication.
#define PROXY_MAP_BOILERPLATE(c) \ #define PROXY_MAP_BOILERPLATE(c) \
template <class INTERNAL_CLASS> \ template <class INTERNAL_CLASS> \
@ -359,8 +371,12 @@ class MethodCall5 : public rtc::Message,
const INTERNAL_CLASS* internal() const { return c_; } \ const INTERNAL_CLASS* internal() const { return c_; } \
INTERNAL_CLASS* internal() { return c_; } INTERNAL_CLASS* internal() { return c_; }
// clang-format off
// clang-format would put the semicolon alone,
// leading to a presubmit error (cpplint.py)
#define END_PROXY_MAP() \ #define END_PROXY_MAP() \
}; };
// clang-format on
#define SIGNALING_PROXY_MAP_BOILERPLATE(c) \ #define SIGNALING_PROXY_MAP_BOILERPLATE(c) \
protected: \ protected: \

View File

@ -22,9 +22,9 @@
#include "api/video/video_rotation.h" #include "api/video/video_rotation.h"
#include "api/video/video_timing.h" #include "api/video/video_timing.h"
#include "common_types.h" // NOLINT(build/include)
#include "rtc_base/checks.h" #include "rtc_base/checks.h"
#include "rtc_base/deprecation.h" #include "rtc_base/deprecation.h"
#include "common_types.h" // NOLINT(build/include)
#include "typedefs.h" // NOLINT(build/include) #include "typedefs.h" // NOLINT(build/include)
namespace webrtc { namespace webrtc {

View File

@ -90,8 +90,7 @@ class RTCStats {
// shall be reserved in the vector (so that subclasses can allocate a vector // shall be reserved in the vector (so that subclasses can allocate a vector
// with room for both parent and child members without it having to resize). // with room for both parent and child members without it having to resize).
virtual std::vector<const RTCStatsMemberInterface*> virtual std::vector<const RTCStatsMemberInterface*>
MembersOfThisObjectAndAncestors( MembersOfThisObjectAndAncestors(size_t additional_capacity) const;
size_t additional_capacity) const;
std::string const id_; std::string const id_;
int64_t timestamp_us_; int64_t timestamp_us_;
@ -147,8 +146,8 @@ class RTCStats {
\ \
protected: \ protected: \
std::vector<const webrtc::RTCStatsMemberInterface*> \ std::vector<const webrtc::RTCStatsMemberInterface*> \
MembersOfThisObjectAndAncestors( \ MembersOfThisObjectAndAncestors(size_t local_var_additional_capacity) \
size_t local_var_additional_capacity) const override; \ const override; \
\ \
public: public:
@ -159,20 +158,17 @@ class RTCStats {
return std::unique_ptr<webrtc::RTCStats>(new this_class(*this)); \ return std::unique_ptr<webrtc::RTCStats>(new this_class(*this)); \
} \ } \
\ \
const char* this_class::type() const { \ const char* this_class::type() const { return this_class::kType; } \
return this_class::kType; \
} \
\ \
std::vector<const webrtc::RTCStatsMemberInterface*> \ std::vector<const webrtc::RTCStatsMemberInterface*> \
this_class::MembersOfThisObjectAndAncestors( \ this_class::MembersOfThisObjectAndAncestors( \
size_t local_var_additional_capacity) const { \ size_t local_var_additional_capacity) const { \
const webrtc::RTCStatsMemberInterface* local_var_members[] = { \ const webrtc::RTCStatsMemberInterface* local_var_members[] = { \
__VA_ARGS__ \ __VA_ARGS__}; \
}; \
size_t local_var_members_count = \ size_t local_var_members_count = \
sizeof(local_var_members) / sizeof(local_var_members[0]); \ sizeof(local_var_members) / sizeof(local_var_members[0]); \
std::vector<const webrtc::RTCStatsMemberInterface*> local_var_members_vec =\ std::vector<const webrtc::RTCStatsMemberInterface*> \
parent_class::MembersOfThisObjectAndAncestors( \ local_var_members_vec = parent_class::MembersOfThisObjectAndAncestors( \
local_var_members_count + local_var_additional_capacity); \ local_var_members_count + local_var_additional_capacity); \
RTC_DCHECK_GE( \ RTC_DCHECK_GE( \
local_var_members_vec.capacity() - local_var_members_vec.size(), \ local_var_members_vec.capacity() - local_var_members_vec.size(), \
@ -253,14 +249,11 @@ class RTCStatsMember : public RTCStatsMemberInterface {
static const Type kType; static const Type kType;
explicit RTCStatsMember(const char* name) explicit RTCStatsMember(const char* name)
: RTCStatsMemberInterface(name, false), : RTCStatsMemberInterface(name, false), value_() {}
value_() {}
RTCStatsMember(const char* name, const T& value) RTCStatsMember(const char* name, const T& value)
: RTCStatsMemberInterface(name, true), : RTCStatsMemberInterface(name, true), value_(value) {}
value_(value) {}
RTCStatsMember(const char* name, T&& value) RTCStatsMember(const char* name, T&& value)
: RTCStatsMemberInterface(name, true), : RTCStatsMemberInterface(name, true), value_(std::move(value)) {}
value_(std::move(value)) {}
explicit RTCStatsMember(const RTCStatsMember<T>& other) explicit RTCStatsMember(const RTCStatsMember<T>& other)
: RTCStatsMemberInterface(other.name_, other.is_defined_), : RTCStatsMemberInterface(other.name_, other.is_defined_),
value_(other.value_) {} value_(other.value_) {}

View File

@ -209,8 +209,9 @@ class RTCIceCandidateStats : public RTCStats {
RTCStatsMember<bool> deleted; // = false RTCStatsMember<bool> deleted; // = false
protected: protected:
RTCIceCandidateStats( RTCIceCandidateStats(const std::string& id,
const std::string& id, int64_t timestamp_us, bool is_remote); int64_t timestamp_us,
bool is_remote);
RTCIceCandidateStats(std::string&& id, int64_t timestamp_us, bool is_remote); RTCIceCandidateStats(std::string&& id, int64_t timestamp_us, bool is_remote);
}; };
@ -258,9 +259,11 @@ class RTCMediaStreamTrackStats final : public RTCStats {
public: public:
WEBRTC_RTCSTATS_DECL(); WEBRTC_RTCSTATS_DECL();
RTCMediaStreamTrackStats(const std::string& id, int64_t timestamp_us, RTCMediaStreamTrackStats(const std::string& id,
int64_t timestamp_us,
const char* kind); const char* kind);
RTCMediaStreamTrackStats(std::string&& id, int64_t timestamp_us, RTCMediaStreamTrackStats(std::string&& id,
int64_t timestamp_us,
const char* kind); const char* kind);
RTCMediaStreamTrackStats(const RTCMediaStreamTrackStats& other); RTCMediaStreamTrackStats(const RTCMediaStreamTrackStats& other);
~RTCMediaStreamTrackStats() override; ~RTCMediaStreamTrackStats() override;

View File

@ -98,8 +98,7 @@ class TypedIntId : public StatsReport::IdBase {
} }
std::string ToString() const override { std::string ToString() const override {
return std::string(InternalTypeToString(type_)) + return std::string(InternalTypeToString(type_)) + kSeparator +
kSeparator +
rtc::ToString<int>(id_); rtc::ToString<int>(id_);
} }
@ -109,7 +108,8 @@ class TypedIntId : public StatsReport::IdBase {
class IdWithDirection : public TypedId { class IdWithDirection : public TypedId {
public: public:
IdWithDirection(StatsReport::StatsType type, const std::string& id, IdWithDirection(StatsReport::StatsType type,
const std::string& id,
StatsReport::Direction direction) StatsReport::Direction direction)
: TypedId(type, id), direction_(direction) {} : TypedId(type, id), direction_(direction) {}
@ -132,39 +132,34 @@ class IdWithDirection : public TypedId {
class CandidateId : public TypedId { class CandidateId : public TypedId {
public: public:
CandidateId(bool local, const std::string& id) CandidateId(bool local, const std::string& id)
: TypedId(local ? : TypedId(local ? StatsReport::kStatsReportTypeIceLocalCandidate
StatsReport::kStatsReportTypeIceLocalCandidate : : StatsReport::kStatsReportTypeIceRemoteCandidate,
StatsReport::kStatsReportTypeIceRemoteCandidate, id) {}
id) {
}
std::string ToString() const override { std::string ToString() const override { return "Cand-" + id_; }
return "Cand-" + id_;
}
}; };
class ComponentId : public StatsReport::IdBase { class ComponentId : public StatsReport::IdBase {
public: public:
ComponentId(const std::string& content_name, int component) ComponentId(const std::string& content_name, int component)
: ComponentId(StatsReport::kStatsReportTypeComponent, content_name, : ComponentId(StatsReport::kStatsReportTypeComponent,
content_name,
component) {} component) {}
bool Equals(const IdBase& other) const override { bool Equals(const IdBase& other) const override {
return IdBase::Equals(other) && return IdBase::Equals(other) &&
static_cast<const ComponentId&>(other).component_ == component_ && static_cast<const ComponentId&>(other).component_ == component_ &&
static_cast<const ComponentId&>(other).content_name_ == content_name_; static_cast<const ComponentId&>(other).content_name_ ==
content_name_;
} }
std::string ToString() const override { std::string ToString() const override { return ToString("Channel-"); }
return ToString("Channel-");
}
protected: protected:
ComponentId(StatsReport::StatsType type, const std::string& content_name, ComponentId(StatsReport::StatsType type,
const std::string& content_name,
int component) int component)
: IdBase(type), : IdBase(type), content_name_(content_name), component_(component) {}
content_name_(content_name),
component_(component) {}
std::string ToString(const char* prefix) const { std::string ToString(const char* prefix) const {
std::string ret(prefix); std::string ret(prefix);
@ -182,7 +177,8 @@ class ComponentId : public StatsReport::IdBase {
class CandidatePairId : public ComponentId { class CandidatePairId : public ComponentId {
public: public:
CandidatePairId(const std::string& content_name, int component, int index) CandidatePairId(const std::string& content_name, int component, int index)
: ComponentId(StatsReport::kStatsReportTypeCandidatePair, content_name, : ComponentId(StatsReport::kStatsReportTypeCandidatePair,
content_name,
component), component),
index_(index) {} index_(index) {}
@ -207,7 +203,9 @@ class CandidatePairId : public ComponentId {
StatsReport::IdBase::IdBase(StatsType type) : type_(type) {} StatsReport::IdBase::IdBase(StatsType type) : type_(type) {}
StatsReport::IdBase::~IdBase() {} StatsReport::IdBase::~IdBase() {}
StatsReport::StatsType StatsReport::IdBase::type() const { return type_; } StatsReport::StatsType StatsReport::IdBase::type() const {
return type_;
}
bool StatsReport::IdBase::Equals(const IdBase& other) const { bool StatsReport::IdBase::Equals(const IdBase& other) const {
return other.type_ == type_; return other.type_ == type_;
@ -316,8 +314,8 @@ bool StatsReport::Value::operator==(const char* value) const {
} }
bool StatsReport::Value::operator==(int64_t value) const { bool StatsReport::Value::operator==(int64_t value) const {
return type_ == kInt ? value_.int_ == static_cast<int>(value) : return type_ == kInt ? value_.int_ == static_cast<int>(value)
(type_ == kInt64 ? value_.int64_ == value : false); : (type_ == kInt64 ? value_.int64_ == value : false);
} }
bool StatsReport::Value::operator==(bool value) const { bool StatsReport::Value::operator==(bool value) const {
@ -699,7 +697,9 @@ StatsReport::Id StatsReport::NewTypedIntId(StatsType type, int id) {
// static // static
StatsReport::Id StatsReport::NewIdWithDirection( StatsReport::Id StatsReport::NewIdWithDirection(
StatsType type, const std::string& id, StatsReport::Direction direction) { StatsType type,
const std::string& id,
StatsReport::Direction direction) {
return Id(new RefCountedObject<IdWithDirection>(type, id, direction)); return Id(new RefCountedObject<IdWithDirection>(type, id, direction));
} }
@ -709,16 +709,17 @@ StatsReport::Id StatsReport::NewCandidateId(bool local, const std::string& id) {
} }
// static // static
StatsReport::Id StatsReport::NewComponentId( StatsReport::Id StatsReport::NewComponentId(const std::string& content_name,
const std::string& content_name, int component) { int component) {
return Id(new RefCountedObject<ComponentId>(content_name, component)); return Id(new RefCountedObject<ComponentId>(content_name, component));
} }
// static // static
StatsReport::Id StatsReport::NewCandidatePairId( StatsReport::Id StatsReport::NewCandidatePairId(const std::string& content_name,
const std::string& content_name, int component, int index) { int component,
return Id(new RefCountedObject<CandidatePairId>( int index) {
content_name, component, index)); return Id(
new RefCountedObject<CandidatePairId>(content_name, component, index));
} }
const char* StatsReport::TypeToString() const { const char* StatsReport::TypeToString() const {
@ -763,8 +764,7 @@ void StatsReport::AddBoolean(StatsReport::StatsValueName name, bool value) {
values_[name] = ValuePtr(new Value(name, value)); values_[name] = ValuePtr(new Value(name, value));
} }
void StatsReport::AddId(StatsReport::StatsValueName name, void StatsReport::AddId(StatsReport::StatsValueName name, const Id& value) {
const Id& value) {
const Value* found = FindValue(name); const Value* found = FindValue(name);
if (!found || !(*found == value)) if (!found || !(*found == value))
values_[name] = ValuePtr(new Value(name, value)); values_[name] = ValuePtr(new Value(name, value));
@ -775,8 +775,7 @@ const StatsReport::Value* StatsReport::FindValue(StatsValueName name) const {
return it == values_.end() ? nullptr : it->second.get(); return it == values_.end() ? nullptr : it->second.get();
} }
StatsCollection::StatsCollection() { StatsCollection::StatsCollection() {}
}
StatsCollection::~StatsCollection() { StatsCollection::~StatsCollection() {
RTC_DCHECK(thread_checker_.CalledOnValidThread()); RTC_DCHECK(thread_checker_.CalledOnValidThread());
@ -816,7 +815,8 @@ StatsReport* StatsCollection::FindOrAddNew(const StatsReport::Id& id) {
StatsReport* StatsCollection::ReplaceOrAddNew(const StatsReport::Id& id) { StatsReport* StatsCollection::ReplaceOrAddNew(const StatsReport::Id& id) {
RTC_DCHECK(thread_checker_.CalledOnValidThread()); RTC_DCHECK(thread_checker_.CalledOnValidThread());
RTC_DCHECK(id.get()); RTC_DCHECK(id.get());
Container::iterator it = std::find_if(list_.begin(), list_.end(), Container::iterator it = std::find_if(
list_.begin(), list_.end(),
[&id](const StatsReport* r) -> bool { return r->id()->Equals(id); }); [&id](const StatsReport* r) -> bool { return r->id()->Equals(id); });
if (it != end()) { if (it != end()) {
StatsReport* report = new StatsReport((*it)->id()); StatsReport* report = new StatsReport((*it)->id());
@ -831,7 +831,8 @@ StatsReport* StatsCollection::ReplaceOrAddNew(const StatsReport::Id& id) {
// will be returned. // will be returned.
StatsReport* StatsCollection::Find(const StatsReport::Id& id) { StatsReport* StatsCollection::Find(const StatsReport::Id& id) {
RTC_DCHECK(thread_checker_.CalledOnValidThread()); RTC_DCHECK(thread_checker_.CalledOnValidThread());
Container::iterator it = std::find_if(list_.begin(), list_.end(), Container::iterator it = std::find_if(
list_.begin(), list_.end(),
[&id](const StatsReport* r) -> bool { return r->id()->Equals(id); }); [&id](const StatsReport* r) -> bool { return r->id()->Equals(id); });
return it == list_.end() ? nullptr : *it; return it == list_.end() ? nullptr : *it;
} }

View File

@ -369,13 +369,14 @@ class StatsReport {
static Id NewBandwidthEstimationId(); static Id NewBandwidthEstimationId();
static Id NewTypedId(StatsType type, const std::string& id); static Id NewTypedId(StatsType type, const std::string& id);
static Id NewTypedIntId(StatsType type, int id); static Id NewTypedIntId(StatsType type, int id);
static Id NewIdWithDirection( static Id NewIdWithDirection(StatsType type,
StatsType type, const std::string& id, Direction direction); const std::string& id,
Direction direction);
static Id NewCandidateId(bool local, const std::string& id); static Id NewCandidateId(bool local, const std::string& id);
static Id NewComponentId( static Id NewComponentId(const std::string& content_name, int component);
const std::string& content_name, int component); static Id NewCandidatePairId(const std::string& content_name,
static Id NewCandidatePairId( int component,
const std::string& content_name, int component, int index); int index);
const Id& id() const { return id_; } const Id& id() const { return id_; }
StatsType type() const { return id_->type(); } StatsType type() const { return id_->type(); }

View File

@ -22,13 +22,12 @@ namespace test {
using Config = VideoCodecTestFixture::Config; using Config = VideoCodecTestFixture::Config;
std::unique_ptr<VideoCodecTestFixture> std::unique_ptr<VideoCodecTestFixture> CreateVideoCodecTestFixture(
CreateVideoCodecTestFixture(const Config& config) { const Config& config) {
return rtc::MakeUnique<VideoCodecTestFixtureImpl>(config); return rtc::MakeUnique<VideoCodecTestFixtureImpl>(config);
} }
std::unique_ptr<VideoCodecTestFixture> std::unique_ptr<VideoCodecTestFixture> CreateVideoCodecTestFixture(
CreateVideoCodecTestFixture(
const Config& config, const Config& config,
std::unique_ptr<VideoDecoderFactory> decoder_factory, std::unique_ptr<VideoDecoderFactory> decoder_factory,
std::unique_ptr<VideoEncoderFactory> encoder_factory) { std::unique_ptr<VideoEncoderFactory> encoder_factory) {

View File

@ -24,13 +24,9 @@ class FakeConstraints : public webrtc::MediaConstraintsInterface {
FakeConstraints() {} FakeConstraints() {}
virtual ~FakeConstraints() {} virtual ~FakeConstraints() {}
virtual const Constraints& GetMandatory() const { virtual const Constraints& GetMandatory() const { return mandatory_; }
return mandatory_;
}
virtual const Constraints& GetOptional() const { virtual const Constraints& GetOptional() const { return optional_; }
return optional_;
}
template <class T> template <class T>
void AddMandatory(const std::string& key, const T& value) { void AddMandatory(const std::string& key, const T& value) {

View File

@ -18,7 +18,6 @@ class PortInterface;
class StunMessage; class StunMessage;
} // namespace cricket } // namespace cricket
namespace webrtc { namespace webrtc {
class TurnCustomizer { class TurnCustomizer {

View File

@ -13,9 +13,13 @@
namespace webrtc { namespace webrtc {
namespace video_coding { namespace video_coding {
bool EncodedFrame::delayed_by_retransmission() const { return 0; } bool EncodedFrame::delayed_by_retransmission() const {
return 0;
}
uint32_t EncodedFrame::Timestamp() const { return timestamp; } uint32_t EncodedFrame::Timestamp() const {
return timestamp;
}
} // namespace video_coding } // namespace video_coding
} // namespace webrtc } // namespace webrtc

View File

@ -34,8 +34,7 @@ int I420DataSize(int height, int stride_y, int stride_u, int stride_v) {
} // namespace } // namespace
I420Buffer::I420Buffer(int width, int height) I420Buffer::I420Buffer(int width, int height)
: I420Buffer(width, height, width, (width + 1) / 2, (width + 1) / 2) { : I420Buffer(width, height, width, (width + 1) / 2, (width + 1) / 2) {}
}
I420Buffer::I420Buffer(int width, I420Buffer::I420Buffer(int width,
int height, int height,
@ -47,8 +46,8 @@ I420Buffer::I420Buffer(int width,
stride_y_(stride_y), stride_y_(stride_y),
stride_u_(stride_u), stride_u_(stride_u),
stride_v_(stride_v), stride_v_(stride_v),
data_(static_cast<uint8_t*>(AlignedMalloc( data_(static_cast<uint8_t*>(
I420DataSize(height, stride_y, stride_u, stride_v), AlignedMalloc(I420DataSize(height, stride_y, stride_u, stride_v),
kBufferAlignment))) { kBufferAlignment))) {
RTC_DCHECK_GT(width, 0); RTC_DCHECK_GT(width, 0);
RTC_DCHECK_GT(height, 0); RTC_DCHECK_GT(height, 0);
@ -57,8 +56,7 @@ I420Buffer::I420Buffer(int width,
RTC_DCHECK_GE(stride_v, (width + 1) / 2); RTC_DCHECK_GE(stride_v, (width + 1) / 2);
} }
I420Buffer::~I420Buffer() { I420Buffer::~I420Buffer() {}
}
// static // static
rtc::scoped_refptr<I420Buffer> I420Buffer::Create(int width, int height) { rtc::scoped_refptr<I420Buffer> I420Buffer::Create(int width, int height) {
@ -71,34 +69,34 @@ rtc::scoped_refptr<I420Buffer> I420Buffer::Create(int width,
int stride_y, int stride_y,
int stride_u, int stride_u,
int stride_v) { int stride_v) {
return new rtc::RefCountedObject<I420Buffer>( return new rtc::RefCountedObject<I420Buffer>(width, height, stride_y,
width, height, stride_y, stride_u, stride_v); stride_u, stride_v);
} }
// static // static
rtc::scoped_refptr<I420Buffer> I420Buffer::Copy( rtc::scoped_refptr<I420Buffer> I420Buffer::Copy(
const I420BufferInterface& source) { const I420BufferInterface& source) {
return Copy(source.width(), source.height(), return Copy(source.width(), source.height(), source.DataY(), source.StrideY(),
source.DataY(), source.StrideY(), source.DataU(), source.StrideU(), source.DataV(),
source.DataU(), source.StrideU(), source.StrideV());
source.DataV(), source.StrideV());
} }
// static // static
rtc::scoped_refptr<I420Buffer> I420Buffer::Copy( rtc::scoped_refptr<I420Buffer> I420Buffer::Copy(int width,
int width, int height, int height,
const uint8_t* data_y, int stride_y, const uint8_t* data_y,
const uint8_t* data_u, int stride_u, int stride_y,
const uint8_t* data_v, int stride_v) { const uint8_t* data_u,
int stride_u,
const uint8_t* data_v,
int stride_v) {
// Note: May use different strides than the input data. // Note: May use different strides than the input data.
rtc::scoped_refptr<I420Buffer> buffer = Create(width, height); rtc::scoped_refptr<I420Buffer> buffer = Create(width, height);
RTC_CHECK_EQ(0, libyuv::I420Copy(data_y, stride_y, RTC_CHECK_EQ(0, libyuv::I420Copy(data_y, stride_y, data_u, stride_u, data_v,
data_u, stride_u, stride_v, buffer->MutableDataY(),
data_v, stride_v, buffer->StrideY(), buffer->MutableDataU(),
buffer->MutableDataY(), buffer->StrideY(), buffer->StrideU(), buffer->MutableDataV(),
buffer->MutableDataU(), buffer->StrideU(), buffer->StrideV(), width, height));
buffer->MutableDataV(), buffer->StrideV(),
width, height));
return buffer; return buffer;
} }
@ -120,14 +118,13 @@ rtc::scoped_refptr<I420Buffer> I420Buffer::Rotate(
rtc::scoped_refptr<webrtc::I420Buffer> buffer = rtc::scoped_refptr<webrtc::I420Buffer> buffer =
I420Buffer::Create(rotated_width, rotated_height); I420Buffer::Create(rotated_width, rotated_height);
RTC_CHECK_EQ(0, libyuv::I420Rotate( RTC_CHECK_EQ(0,
src.DataY(), src.StrideY(), libyuv::I420Rotate(
src.DataU(), src.StrideU(), src.DataY(), src.StrideY(), src.DataU(), src.StrideU(),
src.DataV(), src.StrideV(), src.DataV(), src.StrideV(), buffer->MutableDataY(),
buffer->MutableDataY(), buffer->StrideY(), buffer->MutableDataU(), buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
buffer->StrideU(), buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataV(), buffer->StrideV(), src.width(),
src.width(), src.height(), src.height(), static_cast<libyuv::RotationMode>(rotation)));
static_cast<libyuv::RotationMode>(rotation)));
return buffer; return buffer;
} }
@ -179,9 +176,9 @@ uint8_t* I420Buffer::MutableDataV() {
void I420Buffer::SetBlack(I420Buffer* buffer) { void I420Buffer::SetBlack(I420Buffer* buffer) {
RTC_CHECK(libyuv::I420Rect(buffer->MutableDataY(), buffer->StrideY(), RTC_CHECK(libyuv::I420Rect(buffer->MutableDataY(), buffer->StrideY(),
buffer->MutableDataU(), buffer->StrideU(), buffer->MutableDataU(), buffer->StrideU(),
buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataV(), buffer->StrideV(), 0, 0,
0, 0, buffer->width(), buffer->height(), buffer->width(), buffer->height(), 0, 128,
0, 128, 128) == 0); 128) == 0);
} }
void I420Buffer::CropAndScaleFrom(const I420BufferInterface& src, void I420Buffer::CropAndScaleFrom(const I420BufferInterface& src,
@ -202,20 +199,16 @@ void I420Buffer::CropAndScaleFrom(const I420BufferInterface& src,
offset_x = uv_offset_x * 2; offset_x = uv_offset_x * 2;
offset_y = uv_offset_y * 2; offset_y = uv_offset_y * 2;
const uint8_t* y_plane = const uint8_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x;
src.DataY() + src.StrideY() * offset_y + offset_x;
const uint8_t* u_plane = const uint8_t* u_plane =
src.DataU() + src.StrideU() * uv_offset_y + uv_offset_x; src.DataU() + src.StrideU() * uv_offset_y + uv_offset_x;
const uint8_t* v_plane = const uint8_t* v_plane =
src.DataV() + src.StrideV() * uv_offset_y + uv_offset_x; src.DataV() + src.StrideV() * uv_offset_y + uv_offset_x;
int res = libyuv::I420Scale(y_plane, src.StrideY(), int res =
u_plane, src.StrideU(), libyuv::I420Scale(y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane,
v_plane, src.StrideV(), src.StrideV(), crop_width, crop_height, MutableDataY(),
crop_width, crop_height, StrideY(), MutableDataU(), StrideU(), MutableDataV(),
MutableDataY(), StrideY(), StrideV(), width(), height(), libyuv::kFilterBox);
MutableDataU(), StrideU(),
MutableDataV(), StrideV(),
width(), height(), libyuv::kFilterBox);
RTC_DCHECK_EQ(res, 0); RTC_DCHECK_EQ(res, 0);
} }
@ -226,10 +219,8 @@ void I420Buffer::CropAndScaleFrom(const I420BufferInterface& src) {
const int crop_height = const int crop_height =
std::min(src.height(), height() * src.width() / width()); std::min(src.height(), height() * src.width() / width());
CropAndScaleFrom( CropAndScaleFrom(src, (src.width() - crop_width) / 2,
src, (src.height() - crop_height) / 2, crop_width, crop_height);
(src.width() - crop_width) / 2, (src.height() - crop_height) / 2,
crop_width, crop_height);
} }
void I420Buffer::ScaleFrom(const I420BufferInterface& src) { void I420Buffer::ScaleFrom(const I420BufferInterface& src) {

View File

@ -36,11 +36,14 @@ class I420Buffer : public I420BufferInterface {
return Copy(*buffer.GetI420()); return Copy(*buffer.GetI420());
} }
static rtc::scoped_refptr<I420Buffer> Copy( static rtc::scoped_refptr<I420Buffer> Copy(int width,
int width, int height, int height,
const uint8_t* data_y, int stride_y, const uint8_t* data_y,
const uint8_t* data_u, int stride_u, int stride_y,
const uint8_t* data_v, int stride_v); const uint8_t* data_u,
int stride_u,
const uint8_t* data_v,
int stride_v);
// Returns a rotated copy of |src|. // Returns a rotated copy of |src|.
static rtc::scoped_refptr<I420Buffer> Rotate(const I420BufferInterface& src, static rtc::scoped_refptr<I420Buffer> Rotate(const I420BufferInterface& src,

View File

@ -68,19 +68,16 @@ bool SetSimulcastId(VideoContentType* content_type, uint8_t simulcast_id) {
return true; return true;
} }
uint8_t GetExperimentId( uint8_t GetExperimentId(const VideoContentType& content_type) {
const VideoContentType& content_type) {
return (static_cast<uint8_t>(content_type) & kExperimentBitsMask) >> return (static_cast<uint8_t>(content_type) & kExperimentBitsMask) >>
kExperimentShift; kExperimentShift;
} }
uint8_t GetSimulcastId( uint8_t GetSimulcastId(const VideoContentType& content_type) {
const VideoContentType& content_type) {
return (static_cast<uint8_t>(content_type) & kSimulcastBitsMask) >> return (static_cast<uint8_t>(content_type) & kSimulcastBitsMask) >>
kSimulcastShift; kSimulcastShift;
} }
bool IsScreenshare( bool IsScreenshare(const VideoContentType& content_type) {
const VideoContentType& content_type) {
return (static_cast<uint8_t>(content_type) & kScreenshareBitsMask) > 0; return (static_cast<uint8_t>(content_type) & kScreenshareBitsMask) > 0;
} }

View File

@ -13,8 +13,8 @@
#include <stdint.h> #include <stdint.h>
#include "api/video/video_rotation.h"
#include "api/video/video_frame_buffer.h" #include "api/video/video_frame_buffer.h"
#include "api/video/video_rotation.h"
namespace webrtc { namespace webrtc {

View File

@ -46,21 +46,18 @@ bool VideoCodecVP9::operator==(const VideoCodecVP9& other) const {
bool VideoCodecH264::operator==(const VideoCodecH264& other) const { bool VideoCodecH264::operator==(const VideoCodecH264& other) const {
return (frameDroppingOn == other.frameDroppingOn && return (frameDroppingOn == other.frameDroppingOn &&
keyFrameInterval == other.keyFrameInterval && keyFrameInterval == other.keyFrameInterval &&
spsLen == other.spsLen && spsLen == other.spsLen && ppsLen == other.ppsLen &&
ppsLen == other.ppsLen &&
profile == other.profile && profile == other.profile &&
(spsLen == 0 || memcmp(spsData, other.spsData, spsLen) == 0) && (spsLen == 0 || memcmp(spsData, other.spsData, spsLen) == 0) &&
(ppsLen == 0 || memcmp(ppsData, other.ppsData, ppsLen) == 0)); (ppsLen == 0 || memcmp(ppsData, other.ppsData, ppsLen) == 0));
} }
bool SpatialLayer::operator==(const SpatialLayer& other) const { bool SpatialLayer::operator==(const SpatialLayer& other) const {
return (width == other.width && return (width == other.width && height == other.height &&
height == other.height &&
numberOfTemporalLayers == other.numberOfTemporalLayers && numberOfTemporalLayers == other.numberOfTemporalLayers &&
maxBitrate == other.maxBitrate && maxBitrate == other.maxBitrate &&
targetBitrate == other.targetBitrate && targetBitrate == other.targetBitrate &&
minBitrate == other.minBitrate && minBitrate == other.minBitrate && qpMax == other.qpMax &&
qpMax == other.qpMax &&
active == other.active); active == other.active);
} }

View File

@ -11,8 +11,8 @@
#ifndef API_VIDEOSOURCEPROXY_H_ #ifndef API_VIDEOSOURCEPROXY_H_
#define API_VIDEOSOURCEPROXY_H_ #define API_VIDEOSOURCEPROXY_H_
#include "api/proxy.h"
#include "api/mediastreaminterface.h" #include "api/mediastreaminterface.h"
#include "api/proxy.h"
namespace webrtc { namespace webrtc {

View File

@ -47,8 +47,10 @@ double AudioLevel::TotalDuration() const {
void AudioLevel::ComputeLevel(const AudioFrame& audioFrame, double duration) { void AudioLevel::ComputeLevel(const AudioFrame& audioFrame, double duration) {
// Check speech level (works for 2 channels as well) // Check speech level (works for 2 channels as well)
int16_t abs_value = audioFrame.muted() ? 0 : int16_t abs_value =
WebRtcSpl_MaxAbsValueW16( audioFrame.muted()
? 0
: WebRtcSpl_MaxAbsValueW16(
audioFrame.data(), audioFrame.data(),
audioFrame.samples_per_channel_ * audioFrame.num_channels_); audioFrame.samples_per_channel_ * audioFrame.num_channels_);

View File

@ -102,8 +102,7 @@ AudioReceiveStream::AudioReceiveStream(
const rtc::scoped_refptr<webrtc::AudioState>& audio_state, const rtc::scoped_refptr<webrtc::AudioState>& audio_state,
webrtc::RtcEventLog* event_log, webrtc::RtcEventLog* event_log,
std::unique_ptr<voe::ChannelProxy> channel_proxy) std::unique_ptr<voe::ChannelProxy> channel_proxy)
: audio_state_(audio_state), : audio_state_(audio_state), channel_proxy_(std::move(channel_proxy)) {
channel_proxy_(std::move(channel_proxy)) {
RTC_LOG(LS_INFO) << "AudioReceiveStream: " << config.rtp.remote_ssrc; RTC_LOG(LS_INFO) << "AudioReceiveStream: " << config.rtp.remote_ssrc;
RTC_DCHECK(receiver_controller); RTC_DCHECK(receiver_controller);
RTC_DCHECK(packet_router); RTC_DCHECK(packet_router);
@ -120,9 +119,8 @@ AudioReceiveStream::AudioReceiveStream(
channel_proxy_->RegisterReceiverCongestionControlObjects(packet_router); channel_proxy_->RegisterReceiverCongestionControlObjects(packet_router);
// Register with transport. // Register with transport.
rtp_stream_receiver_ = rtp_stream_receiver_ = receiver_controller->CreateReceiver(
receiver_controller->CreateReceiver(config.rtp.remote_ssrc, config.rtp.remote_ssrc, channel_proxy_.get());
channel_proxy_.get());
ConfigureStream(this, config, true); ConfigureStream(this, config, true);
} }
@ -273,9 +271,7 @@ absl::optional<Syncable::Info> AudioReceiveStream::GetInfo() const {
return absl::nullopt; return absl::nullopt;
} }
if (rtp_rtcp->RemoteNTP(&info.capture_time_ntp_secs, if (rtp_rtcp->RemoteNTP(&info.capture_time_ntp_secs,
&info.capture_time_ntp_frac, &info.capture_time_ntp_frac, nullptr, nullptr,
nullptr,
nullptr,
&info.capture_time_source_clock) != 0) { &info.capture_time_source_clock) != 0) {
return absl::nullopt; return absl::nullopt;
} }
@ -329,8 +325,8 @@ const webrtc::AudioReceiveStream::Config& AudioReceiveStream::config() const {
return config_; return config_;
} }
const AudioSendStream* const AudioSendStream* AudioReceiveStream::GetAssociatedSendStreamForTesting()
AudioReceiveStream::GetAssociatedSendStreamForTesting() const { const {
RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK_RUN_ON(&worker_thread_checker_);
return associated_send_stream_; return associated_send_stream_;
} }

View File

@ -59,18 +59,16 @@ const unsigned int kSpeechOutputLevel = 99;
const double kTotalOutputEnergy = 0.25; const double kTotalOutputEnergy = 0.25;
const double kTotalOutputDuration = 0.5; const double kTotalOutputDuration = 0.5;
const CallStatistics kCallStats = { const CallStatistics kCallStats = {345, 678, 901, 234, -12,
345, 678, 901, 234, -12, 3456, 7890, 567, 890, 123}; 3456, 7890, 567, 890, 123};
const CodecInst kCodecInst = { const CodecInst kCodecInst = {123, "codec_name_recv", 96000, -187, 0, -103};
123, "codec_name_recv", 96000, -187, 0, -103};
const NetworkStatistics kNetworkStats = { const NetworkStatistics kNetworkStats = {
123, 456, false, 789012, 3456, 123, 456, 0, {}, 789, 12, 123, 456, false, 789012, 3456, 123, 456, 0, {}, 789, 12,
345, 678, 901, 0, -1, -1, -1, -1, -1, 0}; 345, 678, 901, 0, -1, -1, -1, -1, -1, 0};
const AudioDecodingCallStats kAudioDecodeStats = MakeAudioDecodeStatsForTest(); const AudioDecodingCallStats kAudioDecodeStats = MakeAudioDecodeStatsForTest();
struct ConfigHelper { struct ConfigHelper {
ConfigHelper() ConfigHelper() : ConfigHelper(new rtc::RefCountedObject<MockAudioMixer>()) {}
: ConfigHelper(new rtc::RefCountedObject<MockAudioMixer>()) {}
explicit ConfigHelper(rtc::scoped_refptr<MockAudioMixer> audio_mixer) explicit ConfigHelper(rtc::scoped_refptr<MockAudioMixer> audio_mixer)
: audio_mixer_(audio_mixer) { : audio_mixer_(audio_mixer) {
@ -94,15 +92,13 @@ struct ConfigHelper {
.Times(1); .Times(1);
EXPECT_CALL(*channel_proxy_, RegisterTransport(nullptr)).Times(2); EXPECT_CALL(*channel_proxy_, RegisterTransport(nullptr)).Times(2);
testing::Expectation expect_set = testing::Expectation expect_set =
EXPECT_CALL(*channel_proxy_, SetRtcEventLog(&event_log_)) EXPECT_CALL(*channel_proxy_, SetRtcEventLog(&event_log_)).Times(1);
.Times(1);
EXPECT_CALL(*channel_proxy_, SetRtcEventLog(testing::IsNull())) EXPECT_CALL(*channel_proxy_, SetRtcEventLog(testing::IsNull()))
.Times(1) .Times(1)
.After(expect_set); .After(expect_set);
EXPECT_CALL(*channel_proxy_, DisassociateSendChannel()).Times(1); EXPECT_CALL(*channel_proxy_, DisassociateSendChannel()).Times(1);
EXPECT_CALL(*channel_proxy_, SetReceiveCodecs(_)) EXPECT_CALL(*channel_proxy_, SetReceiveCodecs(_))
.WillRepeatedly( .WillRepeatedly(Invoke([](const std::map<int, SdpAudioFormat>& codecs) {
Invoke([](const std::map<int, SdpAudioFormat>& codecs) {
EXPECT_THAT(codecs, testing::IsEmpty()); EXPECT_THAT(codecs, testing::IsEmpty());
})); }));
@ -120,11 +116,8 @@ struct ConfigHelper {
std::unique_ptr<internal::AudioReceiveStream> CreateAudioReceiveStream() { std::unique_ptr<internal::AudioReceiveStream> CreateAudioReceiveStream() {
return std::unique_ptr<internal::AudioReceiveStream>( return std::unique_ptr<internal::AudioReceiveStream>(
new internal::AudioReceiveStream( new internal::AudioReceiveStream(
&rtp_stream_receiver_controller_, &rtp_stream_receiver_controller_, &packet_router_, stream_config_,
&packet_router_, audio_state_, &event_log_,
stream_config_,
audio_state_,
&event_log_,
std::unique_ptr<voe::ChannelProxy>(channel_proxy_))); std::unique_ptr<voe::ChannelProxy>(channel_proxy_)));
} }
@ -372,8 +365,8 @@ TEST(AudioReceiveStreamTest, ReconfigureWithUpdatedConfig) {
new_config.rtp.extensions.clear(); new_config.rtp.extensions.clear();
new_config.rtp.extensions.push_back( new_config.rtp.extensions.push_back(
RtpExtension(RtpExtension::kAudioLevelUri, kAudioLevelId + 1)); RtpExtension(RtpExtension::kAudioLevelUri, kAudioLevelId + 1));
new_config.rtp.extensions.push_back(RtpExtension( new_config.rtp.extensions.push_back(
RtpExtension::kTransportSequenceNumberUri, RtpExtension(RtpExtension::kTransportSequenceNumberUri,
kTransportSequenceNumberId + 1)); kTransportSequenceNumberId + 1));
new_config.decoder_map.emplace(1, SdpAudioFormat("foo", 8000, 1)); new_config.decoder_map.emplace(1, SdpAudioFormat("foo", 8000, 1));

View File

@ -218,8 +218,7 @@ void AudioSendStream::ConfigureStream(
new_config.rtp.nack.rtp_history_ms / 20); new_config.rtp.nack.rtp_history_ms / 20);
} }
if (first_time || if (first_time || new_config.send_transport != old_config.send_transport) {
new_config.send_transport != old_config.send_transport) {
if (old_config.send_transport) { if (old_config.send_transport) {
channel_proxy->RegisterTransport(nullptr); channel_proxy->RegisterTransport(nullptr);
} }
@ -326,7 +325,8 @@ void AudioSendStream::SendAudioData(std::unique_ptr<AudioFrame> audio_frame) {
} }
bool AudioSendStream::SendTelephoneEvent(int payload_type, bool AudioSendStream::SendTelephoneEvent(int payload_type,
int payload_frequency, int event, int payload_frequency,
int event,
int duration_ms) { int duration_ms) {
RTC_DCHECK(worker_thread_checker_.CalledOnValidThread()); RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
return channel_proxy_->SetSendTelephoneEventPayloadType(payload_type, return channel_proxy_->SetSendTelephoneEventPayloadType(payload_type,
@ -415,8 +415,7 @@ uint32_t AudioSendStream::OnBitrateUpdated(uint32_t bitrate_bps,
if (bitrate_bps == 0) { if (bitrate_bps == 0) {
bitrate_bps = config_.min_bitrate_bps; bitrate_bps = config_.min_bitrate_bps;
} }
RTC_DCHECK_GE(bitrate_bps, RTC_DCHECK_GE(bitrate_bps, static_cast<uint32_t>(config_.min_bitrate_bps));
static_cast<uint32_t>(config_.min_bitrate_bps));
// The bitrate allocator might allocate an higher than max configured bitrate // The bitrate allocator might allocate an higher than max configured bitrate
// if there is room, to allow for, as example, extra FEC. Ignore that for now. // if there is room, to allow for, as example, extra FEC. Ignore that for now.
const uint32_t max_bitrate_bps = config_.max_bitrate_bps; const uint32_t max_bitrate_bps = config_.max_bitrate_bps;

View File

@ -70,7 +70,9 @@ class AudioSendStream final : public webrtc::AudioSendStream,
void Start() override; void Start() override;
void Stop() override; void Stop() override;
void SendAudioData(std::unique_ptr<AudioFrame> audio_frame) override; void SendAudioData(std::unique_ptr<AudioFrame> audio_frame) override;
bool SendTelephoneEvent(int payload_type, int payload_frequency, int event, bool SendTelephoneEvent(int payload_type,
int payload_frequency,
int event,
int duration_ms) override; int duration_ms) override;
void SetMuted(bool muted) override; void SetMuted(bool muted) override;
webrtc::AudioSendStream::Stats GetStats() const override; webrtc::AudioSendStream::Stats GetStats() const override;

View File

@ -20,15 +20,9 @@ class AudioSendTest : public SendTest {
public: public:
AudioSendTest() : SendTest(CallTest::kDefaultTimeoutMs) {} AudioSendTest() : SendTest(CallTest::kDefaultTimeoutMs) {}
size_t GetNumVideoStreams() const override { size_t GetNumVideoStreams() const override { return 0; }
return 0; size_t GetNumAudioStreams() const override { return 1; }
} size_t GetNumFlexfecStreams() const override { return 0; }
size_t GetNumAudioStreams() const override {
return 1;
}
size_t GetNumFlexfecStreams() const override {
return 0;
}
}; };
} // namespace } // namespace

View File

@ -56,8 +56,8 @@ const double kEchoReturnLoss = -65;
const double kEchoReturnLossEnhancement = 101; const double kEchoReturnLossEnhancement = 101;
const double kResidualEchoLikelihood = -1.0f; const double kResidualEchoLikelihood = -1.0f;
const double kResidualEchoLikelihoodMax = 23.0f; const double kResidualEchoLikelihoodMax = 23.0f;
const CallStatistics kCallStats = { const CallStatistics kCallStats = {1345, 1678, 1901, 1234, 112,
1345, 1678, 1901, 1234, 112, 13456, 17890, 1567, -1890, -1123}; 13456, 17890, 1567, -1890, -1123};
const ReportBlock kReportBlock = {456, 780, 123, 567, 890, 132, 143, 13354}; const ReportBlock kReportBlock = {456, 780, 123, 567, 890, 132, 143, 13354};
const int kTelephoneEventPayloadType = 123; const int kTelephoneEventPayloadType = 123;
const int kTelephoneEventPayloadFrequency = 65432; const int kTelephoneEventPayloadFrequency = 65432;
@ -181,9 +181,8 @@ struct ConfigHelper {
TimeInterval* active_lifetime() { return &active_lifetime_; } TimeInterval* active_lifetime() { return &active_lifetime_; }
static void AddBweToConfig(AudioSendStream::Config* config) { static void AddBweToConfig(AudioSendStream::Config* config) {
config->rtp.extensions.push_back( config->rtp.extensions.push_back(RtpExtension(
RtpExtension(RtpExtension::kTransportSequenceNumberUri, RtpExtension::kTransportSequenceNumberUri, kTransportSequenceNumberId));
kTransportSequenceNumberId));
config->send_codec_spec->transport_cc_enabled = true; config->send_codec_spec->transport_cc_enabled = true;
} }
@ -254,11 +253,12 @@ struct ConfigHelper {
void SetupMockForSendTelephoneEvent() { void SetupMockForSendTelephoneEvent() {
EXPECT_TRUE(channel_proxy_); EXPECT_TRUE(channel_proxy_);
EXPECT_CALL(*channel_proxy_, EXPECT_CALL(*channel_proxy_, SetSendTelephoneEventPayloadType(
SetSendTelephoneEventPayloadType(kTelephoneEventPayloadType, kTelephoneEventPayloadType,
kTelephoneEventPayloadFrequency)) kTelephoneEventPayloadFrequency))
.WillOnce(Return(true)); .WillOnce(Return(true));
EXPECT_CALL(*channel_proxy_, EXPECT_CALL(
*channel_proxy_,
SendTelephoneEventOutband(kTelephoneEventCode, kTelephoneEventDuration)) SendTelephoneEventOutband(kTelephoneEventCode, kTelephoneEventDuration))
.WillOnce(Return(true)); .WillOnce(Return(true));
} }
@ -355,9 +355,9 @@ TEST(AudioSendStreamTest, SendTelephoneEvent) {
ConfigHelper helper(false, true); ConfigHelper helper(false, true);
auto send_stream = helper.CreateAudioSendStream(); auto send_stream = helper.CreateAudioSendStream();
helper.SetupMockForSendTelephoneEvent(); helper.SetupMockForSendTelephoneEvent();
EXPECT_TRUE(send_stream->SendTelephoneEvent(kTelephoneEventPayloadType, EXPECT_TRUE(send_stream->SendTelephoneEvent(
kTelephoneEventPayloadFrequency, kTelephoneEventCode, kTelephoneEventPayloadType, kTelephoneEventPayloadFrequency,
kTelephoneEventDuration)); kTelephoneEventCode, kTelephoneEventDuration));
} }
TEST(AudioSendStreamTest, SetMuted) { TEST(AudioSendStreamTest, SetMuted) {

View File

@ -27,8 +27,7 @@ namespace internal {
AudioState::AudioState(const AudioState::Config& config) AudioState::AudioState(const AudioState::Config& config)
: config_(config), : config_(config),
audio_transport_(config_.audio_mixer, audio_transport_(config_.audio_mixer, config_.audio_processing.get()) {
config_.audio_processing.get()) {
process_thread_checker_.DetachFromThread(); process_thread_checker_.DetachFromThread();
RTC_DCHECK(config_.audio_mixer); RTC_DCHECK(config_.audio_mixer);
RTC_DCHECK(config_.audio_device_module); RTC_DCHECK(config_.audio_device_module);
@ -79,7 +78,8 @@ void AudioState::RemoveReceivingStream(webrtc::AudioReceiveStream* stream) {
} }
void AudioState::AddSendingStream(webrtc::AudioSendStream* stream, void AudioState::AddSendingStream(webrtc::AudioSendStream* stream,
int sample_rate_hz, size_t num_channels) { int sample_rate_hz,
size_t num_channels) {
RTC_DCHECK(thread_checker_.CalledOnValidThread()); RTC_DCHECK(thread_checker_.CalledOnValidThread());
auto& properties = sending_streams_[stream]; auto& properties = sending_streams_[stream];
properties.sample_rate_hz = sample_rate_hz; properties.sample_rate_hz = sample_rate_hz;
@ -121,8 +121,7 @@ void AudioState::SetPlayout(bool enabled) {
} }
} else { } else {
config_.audio_device_module->StopPlayout(); config_.audio_device_module->StopPlayout();
null_audio_poller_ = null_audio_poller_ = rtc::MakeUnique<NullAudioPoller>(&audio_transport_);
rtc::MakeUnique<NullAudioPoller>(&audio_transport_);
} }
} }
} }

View File

@ -39,9 +39,7 @@ class AudioState final : public webrtc::AudioState {
RTC_DCHECK(config_.audio_processing); RTC_DCHECK(config_.audio_processing);
return config_.audio_processing.get(); return config_.audio_processing.get();
} }
AudioTransport* audio_transport() override { AudioTransport* audio_transport() override { return &audio_transport_; }
return &audio_transport_;
}
void SetPlayout(bool enabled) override; void SetPlayout(bool enabled) override;
void SetRecording(bool enabled) override; void SetRecording(bool enabled) override;
@ -60,7 +58,8 @@ class AudioState final : public webrtc::AudioState {
void RemoveReceivingStream(webrtc::AudioReceiveStream* stream); void RemoveReceivingStream(webrtc::AudioReceiveStream* stream);
void AddSendingStream(webrtc::AudioSendStream* stream, void AddSendingStream(webrtc::AudioSendStream* stream,
int sample_rate_hz, size_t num_channels); int sample_rate_hz,
size_t num_channels);
void RemoveSendingStream(webrtc::AudioSendStream* stream); void RemoveSendingStream(webrtc::AudioSendStream* stream);
private: private:

View File

@ -71,8 +71,7 @@ std::vector<int16_t> Create10msTestData(int sample_rate_hz,
const float inc = (2 * 3.14159265f * 1000) / sample_rate_hz; const float inc = (2 * 3.14159265f * 1000) / sample_rate_hz;
float w = 0.f; float w = 0.f;
for (int i = 0; i < samples_per_channel; ++i) { for (int i = 0; i < samples_per_channel; ++i) {
audio_data[i * num_channels] = audio_data[i * num_channels] = static_cast<int16_t>(32767.f * std::sin(w));
static_cast<int16_t>(32767.f * std::sin(w));
w += inc; w += inc;
} }
return audio_data; return audio_data;
@ -111,7 +110,9 @@ TEST(AudioStateTest, RecordedAudioArrivesAtSingleStream) {
MockAudioSendStream stream; MockAudioSendStream stream;
audio_state->AddSendingStream(&stream, 8000, 2); audio_state->AddSendingStream(&stream, 8000, 2);
EXPECT_CALL(stream, SendAudioDataForMock(testing::AllOf( EXPECT_CALL(
stream,
SendAudioDataForMock(testing::AllOf(
testing::Field(&AudioFrame::sample_rate_hz_, testing::Eq(8000)), testing::Field(&AudioFrame::sample_rate_hz_, testing::Eq(8000)),
testing::Field(&AudioFrame::num_channels_, testing::Eq(2u))))) testing::Field(&AudioFrame::num_channels_, testing::Eq(2u)))))
.WillOnce( .WillOnce(
@ -132,8 +133,8 @@ TEST(AudioStateTest, RecordedAudioArrivesAtSingleStream) {
auto audio_data = Create10msTestData(kSampleRate, kNumChannels); auto audio_data = Create10msTestData(kSampleRate, kNumChannels);
uint32_t new_mic_level = 667; uint32_t new_mic_level = 667;
audio_state->audio_transport()->RecordedDataIsAvailable( audio_state->audio_transport()->RecordedDataIsAvailable(
&audio_data[0], kSampleRate / 100, kNumChannels * 2, &audio_data[0], kSampleRate / 100, kNumChannels * 2, kNumChannels,
kNumChannels, kSampleRate, 0, 0, 0, false, new_mic_level); kSampleRate, 0, 0, 0, false, new_mic_level);
EXPECT_EQ(667u, new_mic_level); EXPECT_EQ(667u, new_mic_level);
audio_state->RemoveSendingStream(&stream); audio_state->RemoveSendingStream(&stream);
@ -149,7 +150,9 @@ TEST(AudioStateTest, RecordedAudioArrivesAtMultipleStreams) {
audio_state->AddSendingStream(&stream_1, 8001, 2); audio_state->AddSendingStream(&stream_1, 8001, 2);
audio_state->AddSendingStream(&stream_2, 32000, 1); audio_state->AddSendingStream(&stream_2, 32000, 1);
EXPECT_CALL(stream_1, SendAudioDataForMock(testing::AllOf( EXPECT_CALL(
stream_1,
SendAudioDataForMock(testing::AllOf(
testing::Field(&AudioFrame::sample_rate_hz_, testing::Eq(16000)), testing::Field(&AudioFrame::sample_rate_hz_, testing::Eq(16000)),
testing::Field(&AudioFrame::num_channels_, testing::Eq(1u))))) testing::Field(&AudioFrame::num_channels_, testing::Eq(1u)))))
.WillOnce( .WillOnce(
@ -158,7 +161,9 @@ TEST(AudioStateTest, RecordedAudioArrivesAtMultipleStreams) {
auto levels = ComputeChannelLevels(audio_frame); auto levels = ComputeChannelLevels(audio_frame);
EXPECT_LT(0u, levels[0]); EXPECT_LT(0u, levels[0]);
})); }));
EXPECT_CALL(stream_2, SendAudioDataForMock(testing::AllOf( EXPECT_CALL(
stream_2,
SendAudioDataForMock(testing::AllOf(
testing::Field(&AudioFrame::sample_rate_hz_, testing::Eq(16000)), testing::Field(&AudioFrame::sample_rate_hz_, testing::Eq(16000)),
testing::Field(&AudioFrame::num_channels_, testing::Eq(1u))))) testing::Field(&AudioFrame::num_channels_, testing::Eq(1u)))))
.WillOnce( .WillOnce(
@ -178,8 +183,8 @@ TEST(AudioStateTest, RecordedAudioArrivesAtMultipleStreams) {
auto audio_data = Create10msTestData(kSampleRate, kNumChannels); auto audio_data = Create10msTestData(kSampleRate, kNumChannels);
uint32_t new_mic_level = 667; uint32_t new_mic_level = 667;
audio_state->audio_transport()->RecordedDataIsAvailable( audio_state->audio_transport()->RecordedDataIsAvailable(
&audio_data[0], kSampleRate / 100, kNumChannels * 2, &audio_data[0], kSampleRate / 100, kNumChannels * 2, kNumChannels,
kNumChannels, kSampleRate, 5, 0, 0, true, new_mic_level); kSampleRate, 5, 0, 0, true, new_mic_level);
EXPECT_EQ(667u, new_mic_level); EXPECT_EQ(667u, new_mic_level);
audio_state->RemoveSendingStream(&stream_1); audio_state->RemoveSendingStream(&stream_1);
@ -210,8 +215,8 @@ TEST(AudioStateTest, EnableChannelSwap) {
auto audio_data = Create10msTestData(kSampleRate, kNumChannels); auto audio_data = Create10msTestData(kSampleRate, kNumChannels);
uint32_t new_mic_level = 667; uint32_t new_mic_level = 667;
audio_state->audio_transport()->RecordedDataIsAvailable( audio_state->audio_transport()->RecordedDataIsAvailable(
&audio_data[0], kSampleRate / 100, kNumChannels * 2, &audio_data[0], kSampleRate / 100, kNumChannels * 2, kNumChannels,
kNumChannels, kSampleRate, 0, 0, 0, false, new_mic_level); kSampleRate, 0, 0, 0, false, new_mic_level);
EXPECT_EQ(667u, new_mic_level); EXPECT_EQ(667u, new_mic_level);
audio_state->RemoveSendingStream(&stream); audio_state->RemoveSendingStream(&stream);
@ -230,8 +235,8 @@ TEST(AudioStateTest, InputLevelStats) {
auto audio_data = Create10msSilentTestData(kSampleRate, kNumChannels); auto audio_data = Create10msSilentTestData(kSampleRate, kNumChannels);
uint32_t new_mic_level = 667; uint32_t new_mic_level = 667;
audio_state->audio_transport()->RecordedDataIsAvailable( audio_state->audio_transport()->RecordedDataIsAvailable(
&audio_data[0], kSampleRate / 100, kNumChannels * 2, &audio_data[0], kSampleRate / 100, kNumChannels * 2, kNumChannels,
kNumChannels, kSampleRate, 0, 0, 0, false, new_mic_level); kSampleRate, 0, 0, 0, false, new_mic_level);
auto stats = audio_state->GetAudioInputStats(); auto stats = audio_state->GetAudioInputStats();
EXPECT_EQ(0, stats.audio_level); EXPECT_EQ(0, stats.audio_level);
EXPECT_THAT(stats.total_energy, testing::DoubleEq(0.0)); EXPECT_THAT(stats.total_energy, testing::DoubleEq(0.0));
@ -244,8 +249,8 @@ TEST(AudioStateTest, InputLevelStats) {
uint32_t new_mic_level = 667; uint32_t new_mic_level = 667;
for (int i = 0; i < 10; ++i) { for (int i = 0; i < 10; ++i) {
audio_state->audio_transport()->RecordedDataIsAvailable( audio_state->audio_transport()->RecordedDataIsAvailable(
&audio_data[0], kSampleRate / 100, kNumChannels * 2, &audio_data[0], kSampleRate / 100, kNumChannels * 2, kNumChannels,
kNumChannels, kSampleRate, 0, 0, 0, false, new_mic_level); kSampleRate, 0, 0, 0, false, new_mic_level);
} }
auto stats = audio_state->GetAudioInputStats(); auto stats = audio_state->GetAudioInputStats();
EXPECT_EQ(32767, stats.audio_level); EXPECT_EQ(32767, stats.audio_level);

View File

@ -83,8 +83,7 @@ int Resample(const AudioFrame& frame,
AudioTransportImpl::AudioTransportImpl(AudioMixer* mixer, AudioTransportImpl::AudioTransportImpl(AudioMixer* mixer,
AudioProcessing* audio_processing) AudioProcessing* audio_processing)
: audio_processing_(audio_processing), : audio_processing_(audio_processing), mixer_(mixer) {
mixer_(mixer) {
RTC_DCHECK(mixer); RTC_DCHECK(mixer);
RTC_DCHECK(audio_processing); RTC_DCHECK(audio_processing);
} }
@ -125,9 +124,8 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable(
} }
std::unique_ptr<AudioFrame> audio_frame(new AudioFrame()); std::unique_ptr<AudioFrame> audio_frame(new AudioFrame());
InitializeCaptureFrame(sample_rate, send_sample_rate_hz, InitializeCaptureFrame(sample_rate, send_sample_rate_hz, number_of_channels,
number_of_channels, send_num_channels, send_num_channels, audio_frame.get());
audio_frame.get());
voe::RemixAndResample(static_cast<const int16_t*>(audio_data), voe::RemixAndResample(static_cast<const int16_t*>(audio_data),
number_of_frames, number_of_channels, sample_rate, number_of_frames, number_of_channels, sample_rate,
&capture_resampler_, audio_frame.get()); &capture_resampler_, audio_frame.get());
@ -237,7 +235,8 @@ void AudioTransportImpl::PullRenderData(int bits_per_sample,
} }
void AudioTransportImpl::UpdateSendingStreams( void AudioTransportImpl::UpdateSendingStreams(
std::vector<AudioSendStream*> streams, int send_sample_rate_hz, std::vector<AudioSendStream*> streams,
int send_sample_rate_hz,
size_t send_num_channels) { size_t send_num_channels) {
rtc::CritScope lock(&capture_lock_); rtc::CritScope lock(&capture_lock_);
sending_streams_ = std::move(streams); sending_streams_ = std::move(streams);

View File

@ -30,8 +30,7 @@ class AudioSendStream;
class AudioTransportImpl : public AudioTransport { class AudioTransportImpl : public AudioTransport {
public: public:
AudioTransportImpl(AudioMixer* mixer, AudioTransportImpl(AudioMixer* mixer, AudioProcessing* audio_processing);
AudioProcessing* audio_processing);
~AudioTransportImpl() override; ~AudioTransportImpl() override;
int32_t RecordedDataIsAvailable(const void* audioSamples, int32_t RecordedDataIsAvailable(const void* audioSamples,
@ -63,12 +62,11 @@ class AudioTransportImpl : public AudioTransport {
int64_t* ntp_time_ms) override; int64_t* ntp_time_ms) override;
void UpdateSendingStreams(std::vector<AudioSendStream*> streams, void UpdateSendingStreams(std::vector<AudioSendStream*> streams,
int send_sample_rate_hz, size_t send_num_channels); int send_sample_rate_hz,
size_t send_num_channels);
void SetStereoChannelSwapping(bool enable); void SetStereoChannelSwapping(bool enable);
bool typing_noise_detected() const; bool typing_noise_detected() const;
const voe::AudioLevel& audio_level() const { const voe::AudioLevel& audio_level() const { return audio_level_; }
return audio_level_;
}
private: private:
// Shared. // Shared.

View File

@ -20,8 +20,8 @@
#include "api/array_view.h" #include "api/array_view.h"
#include "audio/utility/audio_frame_operations.h" #include "audio/utility/audio_frame_operations.h"
#include "call/rtp_transport_controller_send_interface.h" #include "call/rtp_transport_controller_send_interface.h"
#include "logging/rtc_event_log/rtc_event_log.h"
#include "logging/rtc_event_log/events/rtc_event_audio_playout.h" #include "logging/rtc_event_log/events/rtc_event_audio_playout.h"
#include "logging/rtc_event_log/rtc_event_log.h"
#include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h"
#include "modules/audio_coding/codecs/audio_format_conversion.h" #include "modules/audio_coding/codecs/audio_format_conversion.h"
#include "modules/audio_device/include/audio_device.h" #include "modules/audio_device/include/audio_device.h"
@ -943,8 +943,7 @@ int32_t Channel::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
uint32_t ntp_secs = 0; uint32_t ntp_secs = 0;
uint32_t ntp_frac = 0; uint32_t ntp_frac = 0;
uint32_t rtp_timestamp = 0; uint32_t rtp_timestamp = 0;
if (0 != if (0 != _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
_rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
&rtp_timestamp)) { &rtp_timestamp)) {
// Waiting for RTCP. // Waiting for RTCP.
return 0; return 0;

View File

@ -22,8 +22,8 @@ namespace webrtc {
namespace voe { namespace voe {
ChannelProxy::ChannelProxy() {} ChannelProxy::ChannelProxy() {}
ChannelProxy::ChannelProxy(std::unique_ptr<Channel> channel) : ChannelProxy::ChannelProxy(std::unique_ptr<Channel> channel)
channel_(std::move(channel)) { : channel_(std::move(channel)) {
RTC_DCHECK(channel_); RTC_DCHECK(channel_);
module_process_thread_checker_.DetachFromThread(); module_process_thread_checker_.DetachFromThread();
} }

View File

@ -108,8 +108,7 @@ class ChannelProxy : public RtpPacketSinkInterface {
virtual void SetTransportOverhead(int transport_overhead_per_packet); virtual void SetTransportOverhead(int transport_overhead_per_packet);
virtual void AssociateSendChannel(const ChannelProxy& send_channel_proxy); virtual void AssociateSendChannel(const ChannelProxy& send_channel_proxy);
virtual void DisassociateSendChannel(); virtual void DisassociateSendChannel();
virtual void GetRtpRtcp(RtpRtcp** rtp_rtcp, virtual void GetRtpRtcp(RtpRtcp** rtp_rtcp, RtpReceiver** rtp_receiver) const;
RtpReceiver** rtp_receiver) const;
virtual uint32_t GetPlayoutTimestamp() const; virtual uint32_t GetPlayoutTimestamp() const;
virtual void SetMinimumPlayoutDelay(int delay_ms); virtual void SetMinimumPlayoutDelay(int delay_ms);
virtual bool GetRecCodec(CodecInst* codec_inst) const; virtual bool GetRecCodec(CodecInst* codec_inst) const;

View File

@ -31,8 +31,7 @@ class MockVoEChannelProxy : public voe::ChannelProxy {
return SetEncoderForMock(payload_type, &encoder); return SetEncoderForMock(payload_type, &encoder);
} }
MOCK_METHOD2(SetEncoderForMock, MOCK_METHOD2(SetEncoderForMock,
bool(int payload_type, bool(int payload_type, std::unique_ptr<AudioEncoder>* encoder));
std::unique_ptr<AudioEncoder>* encoder));
MOCK_METHOD1( MOCK_METHOD1(
ModifyEncoder, ModifyEncoder,
void(rtc::FunctionView<void(std::unique_ptr<AudioEncoder>*)> modifier)); void(rtc::FunctionView<void(std::unique_ptr<AudioEncoder>*)> modifier));
@ -59,8 +58,8 @@ class MockVoEChannelProxy : public voe::ChannelProxy {
MOCK_CONST_METHOD0(GetTotalOutputEnergy, double()); MOCK_CONST_METHOD0(GetTotalOutputEnergy, double());
MOCK_CONST_METHOD0(GetTotalOutputDuration, double()); MOCK_CONST_METHOD0(GetTotalOutputDuration, double());
MOCK_CONST_METHOD0(GetDelayEstimate, uint32_t()); MOCK_CONST_METHOD0(GetDelayEstimate, uint32_t());
MOCK_METHOD2(SetSendTelephoneEventPayloadType, bool(int payload_type, MOCK_METHOD2(SetSendTelephoneEventPayloadType,
int payload_frequency)); bool(int payload_type, int payload_frequency));
MOCK_METHOD2(SendTelephoneEventOutband, bool(int event, int duration_ms)); MOCK_METHOD2(SendTelephoneEventOutband, bool(int event, int duration_ms));
MOCK_METHOD2(SetBitrate, void(int bitrate_bps, int64_t probing_interval_ms)); MOCK_METHOD2(SetBitrate, void(int bitrate_bps, int64_t probing_interval_ms));
MOCK_METHOD1(SetSink, void(AudioSinkInterface* sink)); MOCK_METHOD1(SetSink, void(AudioSinkInterface* sink));
@ -84,8 +83,8 @@ class MockVoEChannelProxy : public voe::ChannelProxy {
MOCK_METHOD1(AssociateSendChannel, MOCK_METHOD1(AssociateSendChannel,
void(const ChannelProxy& send_channel_proxy)); void(const ChannelProxy& send_channel_proxy));
MOCK_METHOD0(DisassociateSendChannel, void()); MOCK_METHOD0(DisassociateSendChannel, void());
MOCK_CONST_METHOD2(GetRtpRtcp, void(RtpRtcp** rtp_rtcp, MOCK_CONST_METHOD2(GetRtpRtcp,
RtpReceiver** rtp_receiver)); void(RtpRtcp** rtp_rtcp, RtpReceiver** rtp_receiver));
MOCK_CONST_METHOD0(GetPlayoutTimestamp, uint32_t()); MOCK_CONST_METHOD0(GetPlayoutTimestamp, uint32_t());
MOCK_METHOD1(SetMinimumPlayoutDelay, void(int delay_ms)); MOCK_METHOD1(SetMinimumPlayoutDelay, void(int delay_ms));
MOCK_CONST_METHOD1(GetRecCodec, bool(CodecInst* codec_inst)); MOCK_CONST_METHOD1(GetRecCodec, bool(CodecInst* codec_inst));

View File

@ -68,8 +68,8 @@ void RemixAndResample(const int16_t* src_data,
// how much to zero here; or 2) make resampler accept a hint that the input is // how much to zero here; or 2) make resampler accept a hint that the input is
// zeroed. // zeroed.
const size_t src_length = samples_per_channel * audio_ptr_num_channels; const size_t src_length = samples_per_channel * audio_ptr_num_channels;
int out_length = resampler->Resample(audio_ptr, src_length, int out_length =
dst_frame->mutable_data(), resampler->Resample(audio_ptr, src_length, dst_frame->mutable_data(),
AudioFrame::kMaxDataSizeSamples); AudioFrame::kMaxDataSizeSamples);
if (out_length == -1) { if (out_length == -1) {
FATAL() << "Resample failed: audio_ptr = " << audio_ptr FATAL() << "Resample failed: audio_ptr = " << audio_ptr

View File

@ -113,7 +113,8 @@ void VerifyParams(const AudioFrame& ref_frame, const AudioFrame& test_frame) {
// Computes the best SNR based on the error between |ref_frame| and // Computes the best SNR based on the error between |ref_frame| and
// |test_frame|. It allows for up to a |max_delay| in samples between the // |test_frame|. It allows for up to a |max_delay| in samples between the
// signals to compensate for the resampling delay. // signals to compensate for the resampling delay.
float ComputeSNR(const AudioFrame& ref_frame, const AudioFrame& test_frame, float ComputeSNR(const AudioFrame& ref_frame,
const AudioFrame& test_frame,
size_t max_delay) { size_t max_delay) {
VerifyParams(ref_frame, test_frame); VerifyParams(ref_frame, test_frame);
float best_snr = 0; float best_snr = 0;
@ -123,8 +124,9 @@ float ComputeSNR(const AudioFrame& ref_frame, const AudioFrame& test_frame,
float variance = 0; float variance = 0;
const int16_t* ref_frame_data = ref_frame.data(); const int16_t* ref_frame_data = ref_frame.data();
const int16_t* test_frame_data = test_frame.data(); const int16_t* test_frame_data = test_frame.data();
for (size_t i = 0; i < ref_frame.samples_per_channel_ * for (size_t i = 0;
ref_frame.num_channels_ - delay; i++) { i < ref_frame.samples_per_channel_ * ref_frame.num_channels_ - delay;
i++) {
int error = ref_frame_data[i] - test_frame_data[i + delay]; int error = ref_frame_data[i] - test_frame_data[i + delay];
mse += error * error; mse += error * error;
variance += ref_frame_data[i] * ref_frame_data[i]; variance += ref_frame_data[i] * ref_frame_data[i];
@ -161,8 +163,8 @@ void UtilityTest::RunResampleTest(int src_channels,
const int16_t kSrcCh2 = 15; const int16_t kSrcCh2 = 15;
const int16_t kSrcCh3 = 22; const int16_t kSrcCh3 = 22;
const int16_t kSrcCh4 = 8; const int16_t kSrcCh4 = 8;
const float resampling_factor = (1.0 * src_sample_rate_hz) / const float resampling_factor =
dst_sample_rate_hz; (1.0 * src_sample_rate_hz) / dst_sample_rate_hz;
const float dst_ch1 = resampling_factor * kSrcCh1; const float dst_ch1 = resampling_factor * kSrcCh1;
const float dst_ch2 = resampling_factor * kSrcCh2; const float dst_ch2 = resampling_factor * kSrcCh2;
const float dst_ch3 = resampling_factor * kSrcCh3; const float dst_ch3 = resampling_factor * kSrcCh3;
@ -258,8 +260,7 @@ TEST_F(UtilityTest, RemixAndResampleSucceeds) {
for (int src_rate = 0; src_rate < kSampleRatesSize; src_rate++) { for (int src_rate = 0; src_rate < kSampleRatesSize; src_rate++) {
for (int dst_rate = 0; dst_rate < kSampleRatesSize; dst_rate++) { for (int dst_rate = 0; dst_rate < kSampleRatesSize; dst_rate++) {
for (int src_channel = 0; src_channel < kSrcChannelsSize; for (int src_channel = 0; src_channel < kSrcChannelsSize; src_channel++) {
src_channel++) {
for (int dst_channel = 0; dst_channel < kDstChannelsSize; for (int dst_channel = 0; dst_channel < kDstChannelsSize;
dst_channel++) { dst_channel++) {
RunResampleTest(kSrcChannels[src_channel], kSampleRates[src_rate], RunResampleTest(kSrcChannels[src_channel], kSampleRates[src_rate],

View File

@ -13,10 +13,12 @@
#include "system_wrappers/include/sleep.h" #include "system_wrappers/include/sleep.h"
#include "test/testsupport/fileutils.h" #include "test/testsupport/fileutils.h"
DEFINE_int(sample_rate_hz, 16000, DEFINE_int(sample_rate_hz,
16000,
"Sample rate (Hz) of the produced audio files."); "Sample rate (Hz) of the produced audio files.");
DEFINE_bool(quick, false, DEFINE_bool(quick,
false,
"Don't do the full audio recording. " "Don't do the full audio recording. "
"Used to quickly check that the test runs without crashing."); "Used to quickly check that the test runs without crashing.");
@ -69,22 +71,21 @@ class AudioQualityTest : public AudioEndToEndTest {
// Output information about the input and output audio files so that further // Output information about the input and output audio files so that further
// processing can be done by an external process. // processing can be done by an external process.
printf("TEST %s %s %s\n", test_info->name(), printf("TEST %s %s %s\n", test_info->name(), AudioInputFile().c_str(),
AudioInputFile().c_str(), AudioOutputFile().c_str()); AudioOutputFile().c_str());
} }
}; };
class Mobile2GNetworkTest : public AudioQualityTest { class Mobile2GNetworkTest : public AudioQualityTest {
void ModifyAudioConfigs(AudioSendStream::Config* send_config, void ModifyAudioConfigs(
AudioSendStream::Config* send_config,
std::vector<AudioReceiveStream::Config>* receive_configs) override { std::vector<AudioReceiveStream::Config>* receive_configs) override {
send_config->send_codec_spec = AudioSendStream::Config::SendCodecSpec( send_config->send_codec_spec = AudioSendStream::Config::SendCodecSpec(
test::CallTest::kAudioSendPayloadType, test::CallTest::kAudioSendPayloadType,
{"OPUS", {"OPUS",
48000, 48000,
2, 2,
{{"maxaveragebitrate", "6000"}, {{"maxaveragebitrate", "6000"}, {"ptime", "60"}, {"stereo", "1"}}});
{"ptime", "60"},
{"stereo", "1"}}});
} }
FakeNetworkPipe::Config GetNetworkPipeConfig() const override { FakeNetworkPipe::Config GetNetworkPipeConfig() const override {

View File

@ -412,11 +412,10 @@ TEST_P(TransportFeedbackPacketLossTrackerTest, InsertionCompletesTwoPairs) {
TEST_P(TransportFeedbackPacketLossTrackerTest, SanityGapsInSequenceNumbers) { TEST_P(TransportFeedbackPacketLossTrackerTest, SanityGapsInSequenceNumbers) {
TransportFeedbackPacketLossTracker tracker(50 * kDefaultSendIntervalMs, 5, 1); TransportFeedbackPacketLossTracker tracker(50 * kDefaultSendIntervalMs, 5, 1);
SendPackets(&tracker, SendPackets(
{static_cast<uint16_t>(base_), &tracker,
static_cast<uint16_t>(base_ + 2), {static_cast<uint16_t>(base_), static_cast<uint16_t>(base_ + 2),
static_cast<uint16_t>(base_ + 4), static_cast<uint16_t>(base_ + 4), static_cast<uint16_t>(base_ + 6),
static_cast<uint16_t>(base_ + 6),
static_cast<uint16_t>(base_ + 8)}, static_cast<uint16_t>(base_ + 8)},
kDefaultSendIntervalMs); kDefaultSendIntervalMs);
@ -425,19 +424,16 @@ TEST_P(TransportFeedbackPacketLossTrackerTest, SanityGapsInSequenceNumbers) {
// Expected window contents: [] -> [11011]. // Expected window contents: [] -> [11011].
AddTransportFeedbackAndValidate( AddTransportFeedbackAndValidate(
// Note: Left packets belong to this stream, right ones ignored. // Note: Left packets belong to this stream, right ones ignored.
&tracker, base_, {true, false, &tracker, base_,
true, false, {true, false, true, false, false, false, true, false, true, true});
false, false,
true, false,
true, true});
ValidatePacketLossStatistics(tracker, 1.0f / 5.0f, 1.0f / 4.0f); ValidatePacketLossStatistics(tracker, 1.0f / 5.0f, 1.0f / 4.0f);
// Create gap by sending [base + 10] but not acking it. // Create gap by sending [base + 10] but not acking it.
// Note: Acks for [base + 11] and [base + 13] ignored (other stream). // Note: Acks for [base + 11] and [base + 13] ignored (other stream).
// Expected window contents: [11011] -> [11011-GAP-01]. // Expected window contents: [11011] -> [11011-GAP-01].
SendPackets(&tracker, SendPackets(
{static_cast<uint16_t>(base_ + 10), &tracker,
static_cast<uint16_t>(base_ + 12), {static_cast<uint16_t>(base_ + 10), static_cast<uint16_t>(base_ + 12),
static_cast<uint16_t>(base_ + 14)}, static_cast<uint16_t>(base_ + 14)},
kDefaultSendIntervalMs); kDefaultSendIntervalMs);
AddTransportFeedbackAndValidate(&tracker, base_ + 11, AddTransportFeedbackAndValidate(&tracker, base_ + 11,

View File

@ -159,7 +159,8 @@ void AudioFrameOperations::QuadToMono(const int16_t* src_audio,
for (size_t i = 0; i < samples_per_channel; i++) { for (size_t i = 0; i < samples_per_channel; i++) {
dst_audio[i] = dst_audio[i] =
(static_cast<int32_t>(src_audio[4 * i]) + src_audio[4 * i + 1] + (static_cast<int32_t>(src_audio[4 * i]) + src_audio[4 * i + 1] +
src_audio[4 * i + 2] + src_audio[4 * i + 3]) >> 2; src_audio[4 * i + 2] + src_audio[4 * i + 3]) >>
2;
} }
} }

View File

@ -50,16 +50,15 @@ void SetFrameData(int16_t left, int16_t right, AudioFrame* frame) {
void SetFrameData(int16_t data, AudioFrame* frame) { void SetFrameData(int16_t data, AudioFrame* frame) {
int16_t* frame_data = frame->mutable_data(); int16_t* frame_data = frame->mutable_data();
for (size_t i = 0; for (size_t i = 0; i < frame->samples_per_channel_ * frame->num_channels_;
i < frame->samples_per_channel_ * frame->num_channels_; i++) { i++) {
frame_data[i] = data; frame_data[i] = data;
} }
} }
void VerifyFramesAreEqual(const AudioFrame& frame1, const AudioFrame& frame2) { void VerifyFramesAreEqual(const AudioFrame& frame1, const AudioFrame& frame2) {
EXPECT_EQ(frame1.num_channels_, frame2.num_channels_); EXPECT_EQ(frame1.num_channels_, frame2.num_channels_);
EXPECT_EQ(frame1.samples_per_channel_, EXPECT_EQ(frame1.samples_per_channel_, frame2.samples_per_channel_);
frame2.samples_per_channel_);
const int16_t* frame1_data = frame1.data(); const int16_t* frame1_data = frame1.data();
const int16_t* frame2_data = frame2.data(); const int16_t* frame2_data = frame2.data();
for (size_t i = 0; i < frame1.samples_per_channel_ * frame1.num_channels_; for (size_t i = 0; i < frame1.samples_per_channel_ * frame1.num_channels_;
@ -69,8 +68,11 @@ void VerifyFramesAreEqual(const AudioFrame& frame1, const AudioFrame& frame2) {
EXPECT_EQ(frame1.muted(), frame2.muted()); EXPECT_EQ(frame1.muted(), frame2.muted());
} }
void InitFrame(AudioFrame* frame, size_t channels, size_t samples_per_channel, void InitFrame(AudioFrame* frame,
int16_t left_data, int16_t right_data) { size_t channels,
size_t samples_per_channel,
int16_t left_data,
int16_t right_data) {
RTC_DCHECK(frame); RTC_DCHECK(frame);
RTC_DCHECK_GE(2, channels); RTC_DCHECK_GE(2, channels);
RTC_DCHECK_GE(AudioFrame::kMaxDataSizeSamples, RTC_DCHECK_GE(AudioFrame::kMaxDataSizeSamples,
@ -90,7 +92,9 @@ int16_t GetChannelData(const AudioFrame& frame, size_t channel, size_t index) {
return frame.data()[index * frame.num_channels_ + channel]; return frame.data()[index * frame.num_channels_ + channel];
} }
void VerifyFrameDataBounds(const AudioFrame& frame, size_t channel, int16_t max, void VerifyFrameDataBounds(const AudioFrame& frame,
size_t channel,
int16_t max,
int16_t min) { int16_t min) {
for (size_t i = 0; i < frame.samples_per_channel_; ++i) { for (size_t i = 0; i < frame.samples_per_channel_; ++i) {
int16_t s = GetChannelData(frame, channel, i); int16_t s = GetChannelData(frame, channel, i);

View File

@ -150,8 +150,10 @@ class AudioSendStream {
std::unique_ptr<webrtc::AudioFrame> audio_frame) = 0; std::unique_ptr<webrtc::AudioFrame> audio_frame) = 0;
// TODO(solenberg): Make payload_type a config property instead. // TODO(solenberg): Make payload_type a config property instead.
virtual bool SendTelephoneEvent(int payload_type, int payload_frequency, virtual bool SendTelephoneEvent(int payload_type,
int event, int duration_ms) = 0; int payload_frequency,
int event,
int duration_ms) = 0;
virtual void SetMuted(bool muted) = 0; virtual void SetMuted(bool muted) = 0;

View File

@ -42,7 +42,8 @@ class AudioState : public rtc::RefCountInterface {
struct Stats { struct Stats {
// Audio peak level (max(abs())), linearly on the interval [0,32767]. // Audio peak level (max(abs())), linearly on the interval [0,32767].
int32_t audio_level = -1; int32_t audio_level = -1;
// See: https://w3c.github.io/webrtc-stats/#dom-rtcmediastreamtrackstats-totalaudioenergy // See:
// https://w3c.github.io/webrtc-stats/#dom-rtcmediastreamtrackstats-totalaudioenergy
double total_energy = 0.0f; double total_energy = 0.0f;
double total_duration = 0.0f; double total_duration = 0.0f;
}; };

View File

@ -110,8 +110,7 @@ void BitrateAllocator::OnNetworkChanged(uint32_t target_bitrate_bps,
for (auto& config : bitrate_observer_configs_) { for (auto& config : bitrate_observer_configs_) {
uint32_t allocated_bitrate = allocation[config.observer]; uint32_t allocated_bitrate = allocation[config.observer];
uint32_t protection_bitrate = config.observer->OnBitrateUpdated( uint32_t protection_bitrate = config.observer->OnBitrateUpdated(
allocated_bitrate, last_fraction_loss_, last_rtt_, allocated_bitrate, last_fraction_loss_, last_rtt_, last_bwe_period_ms_);
last_bwe_period_ms_);
if (allocated_bitrate == 0 && config.allocated_bitrate_bps > 0) { if (allocated_bitrate == 0 && config.allocated_bitrate_bps > 0) {
if (target_bitrate_bps > 0) if (target_bitrate_bps > 0)

View File

@ -205,8 +205,7 @@ class BitrateAllocator : public BitrateAllocatorInterface {
int max_multiplier, int max_multiplier,
ObserverAllocation* allocation) ObserverAllocation* allocation)
RTC_RUN_ON(&sequenced_checker_); RTC_RUN_ON(&sequenced_checker_);
bool EnoughBitrateForAllObservers(uint32_t bitrate, bool EnoughBitrateForAllObservers(uint32_t bitrate, uint32_t sum_min_bitrates)
uint32_t sum_min_bitrates)
RTC_RUN_ON(&sequenced_checker_); RTC_RUN_ON(&sequenced_checker_);
// From the available |bitrate|, each observer will be allocated a // From the available |bitrate|, each observer will be allocated a

View File

@ -78,7 +78,7 @@ class TestBitrateObserver : public BitrateAllocatorObserver {
namespace { namespace {
constexpr int64_t kDefaultProbingIntervalMs = 3000; constexpr int64_t kDefaultProbingIntervalMs = 3000;
const double kDefaultBitratePriority = 1.0; const double kDefaultBitratePriority = 1.0;
} } // namespace
class BitrateAllocatorTest : public ::testing::Test { class BitrateAllocatorTest : public ::testing::Test {
protected: protected:

View File

@ -239,7 +239,8 @@ class Call final : public webrtc::Call,
bool has_packet_feedback) override; bool has_packet_feedback) override;
private: private:
DeliveryStatus DeliverRtcp(MediaType media_type, const uint8_t* packet, DeliveryStatus DeliverRtcp(MediaType media_type,
const uint8_t* packet,
size_t length); size_t length);
DeliveryStatus DeliverRtp(MediaType media_type, DeliveryStatus DeliverRtp(MediaType media_type,
rtc::CopyOnWriteBuffer packet, rtc::CopyOnWriteBuffer packet,

View File

@ -30,12 +30,7 @@
namespace webrtc { namespace webrtc {
enum class MediaType { enum class MediaType { ANY, AUDIO, VIDEO, DATA };
ANY,
AUDIO,
VIDEO,
DATA
};
class PacketReceiver { class PacketReceiver {
public: public:

View File

@ -49,12 +49,8 @@ namespace webrtc {
class CallPerfTest : public test::CallTest { class CallPerfTest : public test::CallTest {
protected: protected:
enum class FecMode { enum class FecMode { kOn, kOff };
kOn, kOff enum class CreateOrder { kAudioFirst, kVideoFirst };
};
enum class CreateOrder {
kAudioFirst, kVideoFirst
};
void TestAudioVideoSync(FecMode fec, void TestAudioVideoSync(FecMode fec,
CreateOrder create_first, CreateOrder create_first,
float video_ntp_speed, float video_ntp_speed,
@ -545,8 +541,7 @@ TEST_F(CallPerfTest, ReceivesCpuOveruseAndUnderuse) {
void ModifyVideoConfigs( void ModifyVideoConfigs(
VideoSendStream::Config* send_config, VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs, std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) override { VideoEncoderConfig* encoder_config) override {}
}
void PerformTest() override { void PerformTest() override {
EXPECT_TRUE(Wait()) << "Timed out before receiving an overuse callback."; EXPECT_TRUE(Wait()) << "Timed out before receiving an overuse callback.";
@ -643,7 +638,9 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) {
RunBaseTest(&test); RunBaseTest(&test);
} }
TEST_F(CallPerfTest, PadsToMinTransmitBitrate) { TestMinTransmitBitrate(true); } TEST_F(CallPerfTest, PadsToMinTransmitBitrate) {
TestMinTransmitBitrate(true);
}
TEST_F(CallPerfTest, NoPadWithoutMinTransmitBitrate) { TEST_F(CallPerfTest, NoPadWithoutMinTransmitBitrate) {
TestMinTransmitBitrate(false); TestMinTransmitBitrate(false);
@ -938,11 +935,9 @@ void CallPerfTest::TestMinAudioVideoBitrate(
// TODO(bugs.webrtc.org/8878) // TODO(bugs.webrtc.org/8878)
#if defined(WEBRTC_MAC) #if defined(WEBRTC_MAC)
#define MAYBE_MinVideoAndAudioBitrate \ #define MAYBE_MinVideoAndAudioBitrate DISABLED_MinVideoAndAudioBitrate
DISABLED_MinVideoAndAudioBitrate
#else #else
#define MAYBE_MinVideoAndAudioBitrate \ #define MAYBE_MinVideoAndAudioBitrate MinVideoAndAudioBitrate
MinVideoAndAudioBitrate
#endif #endif
TEST_F(CallPerfTest, MAYBE_MinVideoAndAudioBitrate) { TEST_F(CallPerfTest, MAYBE_MinVideoAndAudioBitrate) {
TestMinAudioVideoBitrate(false, 110, 40, -10, 10000, 70000, 200000); TestMinAudioVideoBitrate(false, 110, 40, -10, 10000, 70000, 200000);

View File

@ -15,8 +15,8 @@
#include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "api/test/mock_audio_mixer.h" #include "api/test/mock_audio_mixer.h"
#include "audio/audio_send_stream.h"
#include "audio/audio_receive_stream.h" #include "audio/audio_receive_stream.h"
#include "audio/audio_send_stream.h"
#include "call/audio_state.h" #include "call/audio_state.h"
#include "call/call.h" #include "call/call.h"
#include "logging/rtc_event_log/rtc_event_log.h" #include "logging/rtc_event_log/rtc_event_log.h"
@ -250,7 +250,6 @@ TEST(CallTest, MultipleFlexfecReceiveStreamsProtectingSingleVideoStream) {
} }
} }
TEST(CallTest, RecreatingAudioStreamWithSameSsrcReusesRtpState) { TEST(CallTest, RecreatingAudioStreamWithSameSsrcReusesRtpState) {
constexpr uint32_t kSSRC = 12345; constexpr uint32_t kSSRC = 12345;
CallHelper call; CallHelper call;
@ -277,5 +276,4 @@ TEST(CallTest, RecreatingAudioStreamWithSameSsrcReusesRtpState) {
EXPECT_EQ(rtp_state1.media_has_been_sent, rtp_state2.media_has_been_sent); EXPECT_EQ(rtp_state1.media_has_been_sent, rtp_state2.media_has_been_sent);
} }
} // namespace webrtc } // namespace webrtc

View File

@ -16,8 +16,8 @@
#include <string> #include <string>
#include <vector> #include <vector>
#include "api/rtp_headers.h"
#include "api/call/transport.h" #include "api/call/transport.h"
#include "api/rtp_headers.h"
#include "api/rtpparameters.h" #include "api/rtpparameters.h"
#include "call/rtp_packet_sink_interface.h" #include "call/rtp_packet_sink_interface.h"
#include "common_types.h" // NOLINT(build/include) #include "common_types.h" // NOLINT(build/include)

View File

@ -75,8 +75,7 @@ RampUpTester::RampUpTester(size_t num_video_streams,
EXPECT_LE(num_audio_streams_, 1u); EXPECT_LE(num_audio_streams_, 1u);
} }
RampUpTester::~RampUpTester() { RampUpTester::~RampUpTester() {}
}
Call::Config RampUpTester::GetSenderCallConfig() { Call::Config RampUpTester::GetSenderCallConfig() {
Call::Config call_config(&event_log_); Call::Config call_config(&event_log_);

View File

@ -346,8 +346,8 @@ TEST_F(RtpDemuxerTest, OnRtpPacketCalledOnCorrectSinkByRsid) {
} }
for (size_t i = 0; i < arraysize(rsids); i++) { for (size_t i = 0; i < arraysize(rsids); i++) {
auto packet = CreatePacketWithSsrcRsid(rtc::checked_cast<uint32_t>(i), auto packet =
rsids[i]); CreatePacketWithSsrcRsid(rtc::checked_cast<uint32_t>(i), rsids[i]);
EXPECT_CALL(sinks[i], OnRtpPacket(SamePacketAs(*packet))).Times(1); EXPECT_CALL(sinks[i], OnRtpPacket(SamePacketAs(*packet))).Times(1);
EXPECT_TRUE(demuxer_.OnRtpPacket(*packet)); EXPECT_TRUE(demuxer_.OnRtpPacket(*packet));
} }
@ -361,8 +361,8 @@ TEST_F(RtpDemuxerTest, OnRtpPacketCalledOnCorrectSinkByMid) {
} }
for (size_t i = 0; i < arraysize(mids); i++) { for (size_t i = 0; i < arraysize(mids); i++) {
auto packet = CreatePacketWithSsrcMid(rtc::checked_cast<uint32_t>(i), auto packet =
mids[i]); CreatePacketWithSsrcMid(rtc::checked_cast<uint32_t>(i), mids[i]);
EXPECT_CALL(sinks[i], OnRtpPacket(SamePacketAs(*packet))).Times(1); EXPECT_CALL(sinks[i], OnRtpPacket(SamePacketAs(*packet))).Times(1);
EXPECT_TRUE(demuxer_.OnRtpPacket(*packet)); EXPECT_TRUE(demuxer_.OnRtpPacket(*packet));
} }

View File

@ -44,8 +44,7 @@ RtpStreamReceiverController::RtpStreamReceiverController() {
RtpStreamReceiverController::~RtpStreamReceiverController() = default; RtpStreamReceiverController::~RtpStreamReceiverController() = default;
std::unique_ptr<RtpStreamReceiverInterface> std::unique_ptr<RtpStreamReceiverInterface>
RtpStreamReceiverController::CreateReceiver( RtpStreamReceiverController::CreateReceiver(uint32_t ssrc,
uint32_t ssrc,
RtpPacketSinkInterface* sink) { RtpPacketSinkInterface* sink) {
return rtc::MakeUnique<Receiver>(this, ssrc, sink); return rtc::MakeUnique<Receiver>(this, ssrc, sink);
} }

View File

@ -83,8 +83,7 @@ class RtpTransportControllerSend final
void OnSentPacket(const rtc::SentPacket& sent_packet) override; void OnSentPacket(const rtc::SentPacket& sent_packet) override;
void SetSdpBitrateParameters(const BitrateConstraints& constraints) override; void SetSdpBitrateParameters(const BitrateConstraints& constraints) override;
void SetClientBitratePreferences( void SetClientBitratePreferences(const BitrateSettings& preferences) override;
const BitrateSettings& preferences) override;
private: private:
const Clock* const clock_; const Clock* const clock_;

View File

@ -63,8 +63,7 @@ void RtxReceiveStream::OnRtpPacket(const RtpPacketReceived& rtx_packet) {
media_packet.set_recovered(true); media_packet.set_recovered(true);
// Skip the RTX header. // Skip the RTX header.
rtc::ArrayView<const uint8_t> rtx_payload = rtc::ArrayView<const uint8_t> rtx_payload = payload.subview(kRtxHeaderSize);
payload.subview(kRtxHeaderSize);
uint8_t* media_payload = media_packet.AllocatePayload(rtx_payload.size()); uint8_t* media_payload = media_packet.AllocatePayload(rtx_payload.size());
RTC_DCHECK(media_payload != nullptr); RTC_DCHECK(media_payload != nullptr);

View File

@ -73,8 +73,8 @@ TEST(RtxReceiveStreamTest, RestoresPacketPayload) {
RtpPacketReceived rtx_packet; RtpPacketReceived rtx_packet;
EXPECT_TRUE(rtx_packet.Parse(rtc::ArrayView<const uint8_t>(kRtxPacket))); EXPECT_TRUE(rtx_packet.Parse(rtc::ArrayView<const uint8_t>(kRtxPacket)));
EXPECT_CALL(media_sink, OnRtpPacket(_)).WillOnce(testing::Invoke( EXPECT_CALL(media_sink, OnRtpPacket(_))
[](const RtpPacketReceived& packet) { .WillOnce(testing::Invoke([](const RtpPacketReceived& packet) {
EXPECT_EQ(packet.SequenceNumber(), kMediaSeqno); EXPECT_EQ(packet.SequenceNumber(), kMediaSeqno);
EXPECT_EQ(packet.Ssrc(), kMediaSSRC); EXPECT_EQ(packet.Ssrc(), kMediaSSRC);
EXPECT_EQ(packet.PayloadType(), kMediaPayloadType); EXPECT_EQ(packet.PayloadType(), kMediaPayloadType);
@ -124,15 +124,15 @@ TEST(RtxReceiveStreamTest, CopiesRtpHeaderExtensions) {
RtpHeaderExtensionMap extension_map; RtpHeaderExtensionMap extension_map;
extension_map.RegisterByType(3, kRtpExtensionVideoRotation); extension_map.RegisterByType(3, kRtpExtensionVideoRotation);
RtpPacketReceived rtx_packet(&extension_map); RtpPacketReceived rtx_packet(&extension_map);
EXPECT_TRUE(rtx_packet.Parse( EXPECT_TRUE(
rtc::ArrayView<const uint8_t>(kRtxPacketWithCVO))); rtx_packet.Parse(rtc::ArrayView<const uint8_t>(kRtxPacketWithCVO)));
VideoRotation rotation = kVideoRotation_0; VideoRotation rotation = kVideoRotation_0;
EXPECT_TRUE(rtx_packet.GetExtension<VideoOrientation>(&rotation)); EXPECT_TRUE(rtx_packet.GetExtension<VideoOrientation>(&rotation));
EXPECT_EQ(kVideoRotation_90, rotation); EXPECT_EQ(kVideoRotation_90, rotation);
EXPECT_CALL(media_sink, OnRtpPacket(_)).WillOnce(testing::Invoke( EXPECT_CALL(media_sink, OnRtpPacket(_))
[](const RtpPacketReceived& packet) { .WillOnce(testing::Invoke([](const RtpPacketReceived& packet) {
EXPECT_EQ(packet.SequenceNumber(), kMediaSeqno); EXPECT_EQ(packet.SequenceNumber(), kMediaSeqno);
EXPECT_EQ(packet.Ssrc(), kMediaSSRC); EXPECT_EQ(packet.Ssrc(), kMediaSSRC);
EXPECT_EQ(packet.PayloadType(), kMediaPayloadType); EXPECT_EQ(packet.PayloadType(), kMediaPayloadType);

View File

@ -26,14 +26,14 @@ class MockAudioSendStream : public AudioSendStream {
MOCK_METHOD0(Start, void()); MOCK_METHOD0(Start, void());
MOCK_METHOD0(Stop, void()); MOCK_METHOD0(Stop, void());
// GMock doesn't like move-only types, such as std::unique_ptr. // GMock doesn't like move-only types, such as std::unique_ptr.
virtual void SendAudioData( virtual void SendAudioData(std::unique_ptr<webrtc::AudioFrame> audio_frame) {
std::unique_ptr<webrtc::AudioFrame> audio_frame) {
SendAudioDataForMock(audio_frame.get()); SendAudioDataForMock(audio_frame.get());
} }
MOCK_METHOD1(SendAudioDataForMock, MOCK_METHOD1(SendAudioDataForMock, void(webrtc::AudioFrame* audio_frame));
void(webrtc::AudioFrame* audio_frame));
MOCK_METHOD4(SendTelephoneEvent, MOCK_METHOD4(SendTelephoneEvent,
bool(int payload_type, int payload_frequency, int event, bool(int payload_type,
int payload_frequency,
int event,
int duration_ms)); int duration_ms));
MOCK_METHOD1(SetMuted, void(bool muted)); MOCK_METHOD1(SetMuted, void(bool muted));
MOCK_CONST_METHOD0(GetStats, Stats()); MOCK_CONST_METHOD0(GetStats, Stats());

View File

@ -50,8 +50,7 @@ class MockRtpTransportControllerSend
MOCK_METHOD1(EnablePeriodicAlrProbing, void(bool)); MOCK_METHOD1(EnablePeriodicAlrProbing, void(bool));
MOCK_METHOD1(OnSentPacket, void(const rtc::SentPacket&)); MOCK_METHOD1(OnSentPacket, void(const rtc::SentPacket&));
MOCK_METHOD1(SetSdpBitrateParameters, void(const BitrateConstraints&)); MOCK_METHOD1(SetSdpBitrateParameters, void(const BitrateConstraints&));
MOCK_METHOD1(SetClientBitratePreferences, MOCK_METHOD1(SetClientBitratePreferences, void(const BitrateSettings&));
void(const BitrateSettings&));
}; };
} // namespace webrtc } // namespace webrtc
#endif // CALL_TEST_MOCK_RTP_TRANSPORT_CONTROLLER_SEND_H_ #endif // CALL_TEST_MOCK_RTP_TRANSPORT_CONTROLLER_SEND_H_

View File

@ -16,12 +16,12 @@
#include <string> #include <string>
#include <vector> #include <vector>
#include "api/rtp_headers.h"
#include "api/call/transport.h" #include "api/call/transport.h"
#include "api/rtp_headers.h"
#include "api/rtpparameters.h" #include "api/rtpparameters.h"
#include "api/video/video_content_type.h" #include "api/video/video_content_type.h"
#include "api/video/video_timing.h"
#include "api/video/video_sink_interface.h" #include "api/video/video_sink_interface.h"
#include "api/video/video_timing.h"
#include "call/rtp_config.h" #include "call/rtp_config.h"
#include "common_types.h" // NOLINT(build/include) #include "common_types.h" // NOLINT(build/include)
#include "common_video/include/frame_callback.h" #include "common_video/include/frame_callback.h"

View File

@ -26,12 +26,16 @@ namespace webrtc {
class CopyConverter : public AudioConverter { class CopyConverter : public AudioConverter {
public: public:
CopyConverter(size_t src_channels, size_t src_frames, size_t dst_channels, CopyConverter(size_t src_channels,
size_t src_frames,
size_t dst_channels,
size_t dst_frames) size_t dst_frames)
: AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {} : AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {}
~CopyConverter() override{}; ~CopyConverter() override{};
void Convert(const float* const* src, size_t src_size, float* const* dst, void Convert(const float* const* src,
size_t src_size,
float* const* dst,
size_t dst_capacity) override { size_t dst_capacity) override {
CheckSizes(src_size, dst_capacity); CheckSizes(src_size, dst_capacity);
if (src != dst) { if (src != dst) {
@ -43,12 +47,16 @@ class CopyConverter : public AudioConverter {
class UpmixConverter : public AudioConverter { class UpmixConverter : public AudioConverter {
public: public:
UpmixConverter(size_t src_channels, size_t src_frames, size_t dst_channels, UpmixConverter(size_t src_channels,
size_t src_frames,
size_t dst_channels,
size_t dst_frames) size_t dst_frames)
: AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {} : AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {}
~UpmixConverter() override{}; ~UpmixConverter() override{};
void Convert(const float* const* src, size_t src_size, float* const* dst, void Convert(const float* const* src,
size_t src_size,
float* const* dst,
size_t dst_capacity) override { size_t dst_capacity) override {
CheckSizes(src_size, dst_capacity); CheckSizes(src_size, dst_capacity);
for (size_t i = 0; i < dst_frames(); ++i) { for (size_t i = 0; i < dst_frames(); ++i) {
@ -61,13 +69,16 @@ class UpmixConverter : public AudioConverter {
class DownmixConverter : public AudioConverter { class DownmixConverter : public AudioConverter {
public: public:
DownmixConverter(size_t src_channels, size_t src_frames, size_t dst_channels, DownmixConverter(size_t src_channels,
size_t src_frames,
size_t dst_channels,
size_t dst_frames) size_t dst_frames)
: AudioConverter(src_channels, src_frames, dst_channels, dst_frames) { : AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {}
}
~DownmixConverter() override{}; ~DownmixConverter() override{};
void Convert(const float* const* src, size_t src_size, float* const* dst, void Convert(const float* const* src,
size_t src_size,
float* const* dst,
size_t dst_capacity) override { size_t dst_capacity) override {
CheckSizes(src_size, dst_capacity); CheckSizes(src_size, dst_capacity);
float* dst_mono = dst[0]; float* dst_mono = dst[0];
@ -82,7 +93,9 @@ class DownmixConverter : public AudioConverter {
class ResampleConverter : public AudioConverter { class ResampleConverter : public AudioConverter {
public: public:
ResampleConverter(size_t src_channels, size_t src_frames, size_t dst_channels, ResampleConverter(size_t src_channels,
size_t src_frames,
size_t dst_channels,
size_t dst_frames) size_t dst_frames)
: AudioConverter(src_channels, src_frames, dst_channels, dst_frames) { : AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {
resamplers_.reserve(src_channels); resamplers_.reserve(src_channels);
@ -92,7 +105,9 @@ class ResampleConverter : public AudioConverter {
} }
~ResampleConverter() override{}; ~ResampleConverter() override{};
void Convert(const float* const* src, size_t src_size, float* const* dst, void Convert(const float* const* src,
size_t src_size,
float* const* dst,
size_t dst_capacity) override { size_t dst_capacity) override {
CheckSizes(src_size, dst_capacity); CheckSizes(src_size, dst_capacity);
for (size_t i = 0; i < resamplers_.size(); ++i) for (size_t i = 0; i < resamplers_.size(); ++i)
@ -119,17 +134,17 @@ class CompositionConverter : public AudioConverter {
} }
~CompositionConverter() override{}; ~CompositionConverter() override{};
void Convert(const float* const* src, size_t src_size, float* const* dst, void Convert(const float* const* src,
size_t src_size,
float* const* dst,
size_t dst_capacity) override { size_t dst_capacity) override {
converters_.front()->Convert(src, src_size, buffers_.front()->channels(), converters_.front()->Convert(src, src_size, buffers_.front()->channels(),
buffers_.front()->size()); buffers_.front()->size());
for (size_t i = 2; i < converters_.size(); ++i) { for (size_t i = 2; i < converters_.size(); ++i) {
auto& src_buffer = buffers_[i - 2]; auto& src_buffer = buffers_[i - 2];
auto& dst_buffer = buffers_[i - 1]; auto& dst_buffer = buffers_[i - 1];
converters_[i]->Convert(src_buffer->channels(), converters_[i]->Convert(src_buffer->channels(), src_buffer->size(),
src_buffer->size(), dst_buffer->channels(), dst_buffer->size());
dst_buffer->channels(),
dst_buffer->size());
} }
converters_.back()->Convert(buffers_.back()->channels(), converters_.back()->Convert(buffers_.back()->channels(),
buffers_.back()->size(), dst, dst_capacity); buffers_.back()->size(), dst, dst_capacity);
@ -175,8 +190,8 @@ std::unique_ptr<AudioConverter> AudioConverter::Create(size_t src_channels,
sp.reset(new ResampleConverter(src_channels, src_frames, dst_channels, sp.reset(new ResampleConverter(src_channels, src_frames, dst_channels,
dst_frames)); dst_frames));
} else { } else {
sp.reset(new CopyConverter(src_channels, src_frames, dst_channels, sp.reset(
dst_frames)); new CopyConverter(src_channels, src_frames, dst_channels, dst_frames));
} }
return sp; return sp;
@ -184,13 +199,12 @@ std::unique_ptr<AudioConverter> AudioConverter::Create(size_t src_channels,
// For CompositionConverter. // For CompositionConverter.
AudioConverter::AudioConverter() AudioConverter::AudioConverter()
: src_channels_(0), : src_channels_(0), src_frames_(0), dst_channels_(0), dst_frames_(0) {}
src_frames_(0),
dst_channels_(0),
dst_frames_(0) {}
AudioConverter::AudioConverter(size_t src_channels, size_t src_frames, AudioConverter::AudioConverter(size_t src_channels,
size_t dst_channels, size_t dst_frames) size_t src_frames,
size_t dst_channels,
size_t dst_frames)
: src_channels_(src_channels), : src_channels_(src_channels),
src_frames_(src_frames), src_frames_(src_frames),
dst_channels_(dst_channels), dst_channels_(dst_channels),

View File

@ -37,8 +37,10 @@ class AudioConverter {
// capacity of |dst_capacity|. Both point to a series of buffers containing // capacity of |dst_capacity|. Both point to a series of buffers containing
// the samples for each channel. The sizes must correspond to the format // the samples for each channel. The sizes must correspond to the format
// passed to Create(). // passed to Create().
virtual void Convert(const float* const* src, size_t src_size, virtual void Convert(const float* const* src,
float* const* dst, size_t dst_capacity) = 0; size_t src_size,
float* const* dst,
size_t dst_capacity) = 0;
size_t src_channels() const { return src_channels_; } size_t src_channels() const { return src_channels_; }
size_t src_frames() const { return src_frames_; } size_t src_frames() const { return src_frames_; }
@ -47,7 +49,9 @@ class AudioConverter {
protected: protected:
AudioConverter(); AudioConverter();
AudioConverter(size_t src_channels, size_t src_frames, size_t dst_channels, AudioConverter(size_t src_channels,
size_t src_frames,
size_t dst_channels,
size_t dst_frames); size_t dst_frames);
// Helper to RTC_CHECK that inputs are correctly sized. // Helper to RTC_CHECK that inputs are correctly sized.

View File

@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include <cmath>
#include <algorithm> #include <algorithm>
#include <cmath>
#include <memory> #include <memory>
#include <vector> #include <vector>
@ -52,8 +52,7 @@ float ComputeSNR(const ChannelBuffer<float>& ref,
// Search within one sample of the expected delay. // Search within one sample of the expected delay.
for (size_t delay = std::max(expected_delay, static_cast<size_t>(1)) - 1; for (size_t delay = std::max(expected_delay, static_cast<size_t>(1)) - 1;
delay <= std::min(expected_delay + 1, ref.num_frames()); delay <= std::min(expected_delay + 1, ref.num_frames()); ++delay) {
++delay) {
float mse = 0; float mse = 0;
float variance = 0; float variance = 0;
float mean = 0; float mean = 0;
@ -92,8 +91,8 @@ void RunAudioConverterTest(size_t src_channels,
int dst_sample_rate_hz) { int dst_sample_rate_hz) {
const float kSrcLeft = 0.0002f; const float kSrcLeft = 0.0002f;
const float kSrcRight = 0.0001f; const float kSrcRight = 0.0001f;
const float resampling_factor = (1.f * src_sample_rate_hz) / const float resampling_factor =
dst_sample_rate_hz; (1.f * src_sample_rate_hz) / dst_sample_rate_hz;
const float dst_left = resampling_factor * kSrcLeft; const float dst_left = resampling_factor * kSrcLeft;
const float dst_right = resampling_factor * kSrcRight; const float dst_right = resampling_factor * kSrcRight;
const float dst_mono = (dst_left + dst_right) / 2; const float dst_mono = (dst_left + dst_right) / 2;
@ -124,13 +123,15 @@ void RunAudioConverterTest(size_t src_channels,
ScopedBuffer ref_buffer = CreateBuffer(ref_data, dst_frames); ScopedBuffer ref_buffer = CreateBuffer(ref_data, dst_frames);
// The sinc resampler has a known delay, which we compute here. // The sinc resampler has a known delay, which we compute here.
const size_t delay_frames = src_sample_rate_hz == dst_sample_rate_hz ? 0 : const size_t delay_frames =
static_cast<size_t>( src_sample_rate_hz == dst_sample_rate_hz
? 0
: static_cast<size_t>(
PushSincResampler::AlgorithmicDelaySeconds(src_sample_rate_hz) * PushSincResampler::AlgorithmicDelaySeconds(src_sample_rate_hz) *
dst_sample_rate_hz); dst_sample_rate_hz);
// SNR reported on the same line later. // SNR reported on the same line later.
printf("(%" PRIuS ", %d Hz) -> (%" PRIuS ", %d Hz) ", printf("(%" PRIuS ", %d Hz) -> (%" PRIuS ", %d Hz) ", src_channels,
src_channels, src_sample_rate_hz, dst_channels, dst_sample_rate_hz); src_sample_rate_hz, dst_channels, dst_sample_rate_hz);
std::unique_ptr<AudioConverter> converter = AudioConverter::Create( std::unique_ptr<AudioConverter> converter = AudioConverter::Create(
src_channels, src_frames, dst_channels, dst_frames); src_channels, src_frames, dst_channels, dst_frames);

View File

@ -28,7 +28,8 @@ AudioRingBuffer::~AudioRingBuffer() {
WebRtc_FreeBuffer(buf); WebRtc_FreeBuffer(buf);
} }
void AudioRingBuffer::Write(const float* const* data, size_t channels, void AudioRingBuffer::Write(const float* const* data,
size_t channels,
size_t frames) { size_t frames) {
RTC_DCHECK_EQ(buffers_.size(), channels); RTC_DCHECK_EQ(buffers_.size(), channels);
for (size_t i = 0; i < channels; ++i) { for (size_t i = 0; i < channels; ++i) {

View File

@ -17,8 +17,8 @@
namespace webrtc { namespace webrtc {
class AudioRingBufferTest : class AudioRingBufferTest
public ::testing::TestWithParam< ::testing::tuple<int, int, int, int> > { : public ::testing::TestWithParam< ::testing::tuple<int, int, int, int> > {
}; };
void ReadAndWriteTest(const ChannelBuffer<float>& input, void ReadAndWriteTest(const ChannelBuffer<float>& input,
@ -72,10 +72,8 @@ TEST_P(AudioRingBufferTest, ReadDataMatchesWrittenData) {
input.channels()[i][j] = (i + 1) * (j + 1); input.channels()[i][j] = (i + 1) * (j + 1);
ChannelBuffer<float> output(kFrames, static_cast<int>(num_channels)); ChannelBuffer<float> output(kFrames, static_cast<int>(num_channels));
ReadAndWriteTest(input, ReadAndWriteTest(input, ::testing::get<0>(GetParam()),
::testing::get<0>(GetParam()), ::testing::get<1>(GetParam()), ::testing::get<2>(GetParam()),
::testing::get<1>(GetParam()),
::testing::get<2>(GetParam()),
&output); &output);
// Verify the read data matches the input. // Verify the read data matches the input.
@ -85,7 +83,8 @@ TEST_P(AudioRingBufferTest, ReadDataMatchesWrittenData) {
} }
INSTANTIATE_TEST_CASE_P( INSTANTIATE_TEST_CASE_P(
AudioRingBufferTest, AudioRingBufferTest, AudioRingBufferTest,
AudioRingBufferTest,
::testing::Combine(::testing::Values(10, 20, 42), // num_write_chunk_frames ::testing::Combine(::testing::Values(10, 20, 42), // num_write_chunk_frames
::testing::Values(1, 10, 17), // num_read_chunk_frames ::testing::Values(1, 10, 17), // num_read_chunk_frames
::testing::Values(100, 256), // buffer_frames ::testing::Values(100, 256), // buffer_frames

View File

@ -41,8 +41,7 @@ void CopyFrames(const float* const* src,
float* const* dst, float* const* dst,
size_t dst_start_index) { size_t dst_start_index) {
for (size_t i = 0; i < num_channels; ++i) { for (size_t i = 0; i < num_channels; ++i) {
memcpy(&dst[i][dst_start_index], memcpy(&dst[i][dst_start_index], &src[i][src_start_index],
&src[i][src_start_index],
num_frames * sizeof(dst[i][dst_start_index])); num_frames * sizeof(dst[i][dst_start_index]));
} }
} }
@ -55,8 +54,7 @@ void MoveFrames(const float* const* src,
float* const* dst, float* const* dst,
size_t dst_start_index) { size_t dst_start_index) {
for (size_t i = 0; i < num_channels; ++i) { for (size_t i = 0; i < num_channels; ++i) {
memmove(&dst[i][dst_start_index], memmove(&dst[i][dst_start_index], &src[i][src_start_index],
&src[i][src_start_index],
num_frames * sizeof(dst[i][dst_start_index])); num_frames * sizeof(dst[i][dst_start_index]));
} }
} }
@ -184,51 +182,30 @@ void Blocker::ProcessChunk(const float* const* input,
block_size_); block_size_);
input_buffer_.MoveReadPositionBackward(block_size_ - shift_amount_); input_buffer_.MoveReadPositionBackward(block_size_ - shift_amount_);
ApplyWindow(window_.get(), ApplyWindow(window_.get(), block_size_, num_input_channels_,
block_size_,
num_input_channels_,
input_block_.channels()); input_block_.channels());
callback_->ProcessBlock(input_block_.channels(), callback_->ProcessBlock(input_block_.channels(), block_size_,
block_size_, num_input_channels_, num_output_channels_,
num_input_channels_,
num_output_channels_,
output_block_.channels()); output_block_.channels());
ApplyWindow(window_.get(), ApplyWindow(window_.get(), block_size_, num_output_channels_,
block_size_,
num_output_channels_,
output_block_.channels()); output_block_.channels());
AddFrames(output_buffer_.channels(), AddFrames(output_buffer_.channels(), first_frame_in_block,
first_frame_in_block, output_block_.channels(), 0, block_size_, num_output_channels_,
output_block_.channels(), output_buffer_.channels(), first_frame_in_block);
0,
block_size_,
num_output_channels_,
output_buffer_.channels(),
first_frame_in_block);
first_frame_in_block += shift_amount_; first_frame_in_block += shift_amount_;
} }
// Copy output buffer to output // Copy output buffer to output
CopyFrames(output_buffer_.channels(), CopyFrames(output_buffer_.channels(), 0, chunk_size_, num_output_channels_,
0, output, 0);
chunk_size_,
num_output_channels_,
output,
0);
// Copy output buffer [chunk_size_, chunk_size_ + initial_delay] // Copy output buffer [chunk_size_, chunk_size_ + initial_delay]
// to output buffer [0, initial_delay], zero the rest. // to output buffer [0, initial_delay], zero the rest.
MoveFrames(output_buffer_.channels(), MoveFrames(output_buffer_.channels(), chunk_size, initial_delay_,
chunk_size, num_output_channels_, output_buffer_.channels(), 0);
initial_delay_, ZeroOut(output_buffer_.channels(), initial_delay_, chunk_size_,
num_output_channels_,
output_buffer_.channels(),
0);
ZeroOut(output_buffer_.channels(),
initial_delay_,
chunk_size_,
num_output_channels_); num_output_channels_);
// Calculate new starting frames. // Calculate new starting frames.

View File

@ -71,11 +71,8 @@ class BlockerTest : public ::testing::Test {
size_t end = chunk_size - 1; size_t end = chunk_size - 1;
while (end < num_frames) { while (end < num_frames) {
CopyTo(input_chunk, 0, start, num_input_channels, chunk_size, input); CopyTo(input_chunk, 0, start, num_input_channels, chunk_size, input);
blocker->ProcessChunk(input_chunk, blocker->ProcessChunk(input_chunk, chunk_size, num_input_channels,
chunk_size, num_output_channels, output_chunk);
num_input_channels,
num_output_channels,
output_chunk);
CopyTo(output, start, 0, num_output_channels, chunk_size, output_chunk); CopyTo(output, start, 0, num_output_channels, chunk_size, output_chunk);
start += chunk_size; start += chunk_size;
@ -116,8 +113,7 @@ class BlockerTest : public ::testing::Test {
size_t num_frames, size_t num_frames,
const float* const* src) { const float* const* src) {
for (size_t i = 0; i < num_channels; ++i) { for (size_t i = 0; i < num_channels; ++i) {
memcpy(&dst[i][start_index_dst], memcpy(&dst[i][start_index_dst], &src[i][start_index_src],
&src[i][start_index_src],
num_frames * sizeof(float)); num_frames * sizeof(float));
} }
} }
@ -152,27 +148,15 @@ TEST_F(BlockerTest, TestBlockerMutuallyPrimeChunkandBlockSize) {
ChannelBuffer<float> output_chunk_cb(kChunkSize, kNumOutputChannels); ChannelBuffer<float> output_chunk_cb(kChunkSize, kNumOutputChannels);
PlusThreeBlockerCallback callback; PlusThreeBlockerCallback callback;
Blocker blocker(kChunkSize, Blocker blocker(kChunkSize, kBlockSize, kNumInputChannels, kNumOutputChannels,
kBlockSize, kWindow, kShiftAmount, &callback);
kNumInputChannels,
kNumOutputChannels,
kWindow,
kShiftAmount,
&callback);
RunTest(&blocker, RunTest(&blocker, kChunkSize, kNumFrames, input_cb.channels(),
kChunkSize, input_chunk_cb.channels(), actual_output_cb.channels(),
kNumFrames, output_chunk_cb.channels(), kNumInputChannels, kNumOutputChannels);
input_cb.channels(),
input_chunk_cb.channels(),
actual_output_cb.channels(),
output_chunk_cb.channels(),
kNumInputChannels,
kNumOutputChannels);
ValidateSignalEquality(expected_output_cb.channels(), ValidateSignalEquality(expected_output_cb.channels(),
actual_output_cb.channels(), actual_output_cb.channels(), kNumOutputChannels,
kNumOutputChannels,
kNumFrames); kNumFrames);
} }
@ -205,27 +189,15 @@ TEST_F(BlockerTest, TestBlockerMutuallyPrimeShiftAndBlockSize) {
ChannelBuffer<float> output_chunk_cb(kChunkSize, kNumOutputChannels); ChannelBuffer<float> output_chunk_cb(kChunkSize, kNumOutputChannels);
PlusThreeBlockerCallback callback; PlusThreeBlockerCallback callback;
Blocker blocker(kChunkSize, Blocker blocker(kChunkSize, kBlockSize, kNumInputChannels, kNumOutputChannels,
kBlockSize, kWindow, kShiftAmount, &callback);
kNumInputChannels,
kNumOutputChannels,
kWindow,
kShiftAmount,
&callback);
RunTest(&blocker, RunTest(&blocker, kChunkSize, kNumFrames, input_cb.channels(),
kChunkSize, input_chunk_cb.channels(), actual_output_cb.channels(),
kNumFrames, output_chunk_cb.channels(), kNumInputChannels, kNumOutputChannels);
input_cb.channels(),
input_chunk_cb.channels(),
actual_output_cb.channels(),
output_chunk_cb.channels(),
kNumInputChannels,
kNumOutputChannels);
ValidateSignalEquality(expected_output_cb.channels(), ValidateSignalEquality(expected_output_cb.channels(),
actual_output_cb.channels(), actual_output_cb.channels(), kNumOutputChannels,
kNumOutputChannels,
kNumFrames); kNumFrames);
} }
@ -258,27 +230,15 @@ TEST_F(BlockerTest, TestBlockerNoOverlap) {
ChannelBuffer<float> output_chunk_cb(kChunkSize, kNumOutputChannels); ChannelBuffer<float> output_chunk_cb(kChunkSize, kNumOutputChannels);
PlusThreeBlockerCallback callback; PlusThreeBlockerCallback callback;
Blocker blocker(kChunkSize, Blocker blocker(kChunkSize, kBlockSize, kNumInputChannels, kNumOutputChannels,
kBlockSize, kWindow, kShiftAmount, &callback);
kNumInputChannels,
kNumOutputChannels,
kWindow,
kShiftAmount,
&callback);
RunTest(&blocker, RunTest(&blocker, kChunkSize, kNumFrames, input_cb.channels(),
kChunkSize, input_chunk_cb.channels(), actual_output_cb.channels(),
kNumFrames, output_chunk_cb.channels(), kNumInputChannels, kNumOutputChannels);
input_cb.channels(),
input_chunk_cb.channels(),
actual_output_cb.channels(),
output_chunk_cb.channels(),
kNumInputChannels,
kNumOutputChannels);
ValidateSignalEquality(expected_output_cb.channels(), ValidateSignalEquality(expected_output_cb.channels(),
actual_output_cb.channels(), actual_output_cb.channels(), kNumOutputChannels,
kNumOutputChannels,
kNumFrames); kNumFrames);
} }
@ -286,14 +246,14 @@ TEST_F(BlockerTest, InitialDelaysAreMinimum) {
const size_t kNumInputChannels = 3; const size_t kNumInputChannels = 3;
const size_t kNumOutputChannels = 2; const size_t kNumOutputChannels = 2;
const size_t kNumFrames = 1280; const size_t kNumFrames = 1280;
const size_t kChunkSize[] = const size_t kChunkSize[] = {80, 80, 80, 80, 80, 80,
{80, 80, 80, 80, 80, 80, 160, 160, 160, 160, 160, 160}; 160, 160, 160, 160, 160, 160};
const size_t kBlockSize[] = const size_t kBlockSize[] = {64, 64, 64, 128, 128, 128,
{64, 64, 64, 128, 128, 128, 128, 128, 128, 256, 256, 256}; 128, 128, 128, 256, 256, 256};
const size_t kShiftAmount[] = const size_t kShiftAmount[] = {16, 32, 64, 32, 64, 128,
{16, 32, 64, 32, 64, 128, 32, 64, 128, 64, 128, 256}; 32, 64, 128, 64, 128, 256};
const size_t kInitialDelay[] = const size_t kInitialDelay[] = {48, 48, 48, 112, 112, 112,
{48, 48, 48, 112, 112, 112, 96, 96, 96, 224, 224, 224}; 96, 96, 96, 224, 224, 224};
float input[kNumInputChannels][kNumFrames]; float input[kNumInputChannels][kNumFrames];
for (size_t i = 0; i < kNumInputChannels; ++i) { for (size_t i = 0; i < kNumInputChannels; ++i) {
@ -317,27 +277,15 @@ TEST_F(BlockerTest, InitialDelaysAreMinimum) {
ChannelBuffer<float> input_chunk_cb(kChunkSize[i], kNumInputChannels); ChannelBuffer<float> input_chunk_cb(kChunkSize[i], kNumInputChannels);
ChannelBuffer<float> output_chunk_cb(kChunkSize[i], kNumOutputChannels); ChannelBuffer<float> output_chunk_cb(kChunkSize[i], kNumOutputChannels);
Blocker blocker(kChunkSize[i], Blocker blocker(kChunkSize[i], kBlockSize[i], kNumInputChannels,
kBlockSize[i], kNumOutputChannels, window.get(), kShiftAmount[i],
kNumInputChannels,
kNumOutputChannels,
window.get(),
kShiftAmount[i],
&callback); &callback);
RunTest(&blocker, RunTest(&blocker, kChunkSize[i], kNumFrames, input_cb.channels(),
kChunkSize[i], input_chunk_cb.channels(), output_cb.channels(),
kNumFrames, output_chunk_cb.channels(), kNumInputChannels, kNumOutputChannels);
input_cb.channels(),
input_chunk_cb.channels(),
output_cb.channels(),
output_chunk_cb.channels(),
kNumInputChannels,
kNumOutputChannels);
ValidateInitialDelay(output_cb.channels(), ValidateInitialDelay(output_cb.channels(), kNumOutputChannels, kNumFrames,
kNumOutputChannels,
kNumFrames,
kInitialDelay[i]); kInitialDelay[i]);
} }
} }

View File

@ -68,9 +68,7 @@ void IFChannelBuffer::RefreshI() const {
ibuf_.set_num_channels(fbuf_.num_channels()); ibuf_.set_num_channels(fbuf_.num_channels());
const float* const* float_channels = fbuf_.channels(); const float* const* float_channels = fbuf_.channels();
for (size_t i = 0; i < fbuf_.num_channels(); ++i) { for (size_t i = 0; i < fbuf_.num_channels(); ++i) {
FloatS16ToS16(float_channels[i], FloatS16ToS16(float_channels[i], ibuf_.num_frames(), int_channels[i]);
ibuf_.num_frames(),
int_channels[i]);
} }
ivalid_ = true; ivalid_ = true;
} }

View File

@ -40,9 +40,7 @@ namespace webrtc {
template <typename T> template <typename T>
class ChannelBuffer { class ChannelBuffer {
public: public:
ChannelBuffer(size_t num_frames, ChannelBuffer(size_t num_frames, size_t num_channels, size_t num_bands = 1)
size_t num_channels,
size_t num_bands = 1)
: data_(new T[num_frames * num_channels]()), : data_(new T[num_frames * num_channels]()),
channels_(new T*[num_channels * num_bands]), channels_(new T*[num_channels * num_bands]),
bands_(new T*[num_channels * num_bands]), bands_(new T*[num_channels * num_bands]),

View File

@ -20,8 +20,7 @@
namespace webrtc { namespace webrtc {
FIRFilterC::~FIRFilterC() { FIRFilterC::~FIRFilterC() {}
}
FIRFilterC::FIRFilterC(const float* coefficients, size_t coefficients_length) FIRFilterC::FIRFilterC(const float* coefficients, size_t coefficients_length)
: coefficients_length_(coefficients_length), : coefficients_length_(coefficients_length),
@ -52,11 +51,10 @@ void FIRFilterC::Filter(const float* in, size_t length, float* out) {
// Update current state. // Update current state.
if (length >= state_length_) { if (length >= state_length_) {
memcpy( memcpy(state_.get(), &in[length - state_length_],
state_.get(), &in[length - state_length_], state_length_ * sizeof(*in)); state_length_ * sizeof(*in));
} else { } else {
memmove(state_.get(), memmove(state_.get(), &state_[length],
&state_[length],
(state_length_ - length) * sizeof(state_[0])); (state_length_ - length) * sizeof(state_[0]));
memcpy(&state_[state_length_ - length], in, length * sizeof(*in)); memcpy(&state_[state_length_ - length], in, length * sizeof(*in));
} }

View File

@ -20,8 +20,7 @@ namespace webrtc {
class FIRFilterC : public FIRFilter { class FIRFilterC : public FIRFilter {
public: public:
FIRFilterC(const float* coefficients, FIRFilterC(const float* coefficients, size_t coefficients_length);
size_t coefficients_length);
~FIRFilterC() override; ~FIRFilterC() override;
void Filter(const float* in, size_t length, float* out) override; void Filter(const float* in, size_t length, float* out) override;

View File

@ -18,8 +18,7 @@
namespace webrtc { namespace webrtc {
FIRFilterNEON::~FIRFilterNEON() { FIRFilterNEON::~FIRFilterNEON() {}
}
FIRFilterNEON::FIRFilterNEON(const float* coefficients, FIRFilterNEON::FIRFilterNEON(const float* coefficients,
size_t coefficients_length, size_t coefficients_length,
@ -40,8 +39,7 @@ FIRFilterNEON::FIRFilterNEON(const float* coefficients,
for (size_t i = 0; i < coefficients_length; ++i) { for (size_t i = 0; i < coefficients_length; ++i) {
coefficients_[i + padding] = coefficients[coefficients_length - i - 1]; coefficients_[i + padding] = coefficients[coefficients_length - i - 1];
} }
memset(state_.get(), memset(state_.get(), 0.f,
0.f,
(max_input_length + state_length_) * sizeof(state_[0])); (max_input_length + state_length_) * sizeof(state_[0]));
} }

View File

@ -19,8 +19,7 @@
namespace webrtc { namespace webrtc {
FIRFilterSSE2::~FIRFilterSSE2() { FIRFilterSSE2::~FIRFilterSSE2() {}
}
FIRFilterSSE2::FIRFilterSSE2(const float* coefficients, FIRFilterSSE2::FIRFilterSSE2(const float* coefficients,
size_t coefficients_length, size_t coefficients_length,
@ -41,8 +40,7 @@ FIRFilterSSE2::FIRFilterSSE2(const float* coefficients,
for (size_t i = 0; i < coefficients_length; ++i) { for (size_t i = 0; i < coefficients_length; ++i) {
coefficients_[i + padding] = coefficients[coefficients_length - i - 1]; coefficients_[i + padding] = coefficients[coefficients_length - i - 1];
} }
memset(state_.get(), memset(state_.get(), 0,
0,
(max_input_length + state_length_) * sizeof(state_[0])); (max_input_length + state_length_) * sizeof(state_[0]));
} }

View File

@ -21,20 +21,18 @@ namespace webrtc {
namespace { namespace {
static const float kCoefficients[] = {0.2f, 0.3f, 0.5f, 0.7f, 0.11f}; static const float kCoefficients[] = {0.2f, 0.3f, 0.5f, 0.7f, 0.11f};
static const size_t kCoefficientsLength = sizeof(kCoefficients) / static const size_t kCoefficientsLength =
sizeof(kCoefficients[0]); sizeof(kCoefficients) / sizeof(kCoefficients[0]);
static const float kInput[] = {1.f, 2.f, 3.f, 4.f, 5.f, 6.f, 7.f, static const float kInput[] = {1.f, 2.f, 3.f, 4.f, 5.f,
8.f, 9.f, 10.f}; 6.f, 7.f, 8.f, 9.f, 10.f};
static const size_t kInputLength = sizeof(kInput) / static const size_t kInputLength = sizeof(kInput) / sizeof(kInput[0]);
sizeof(kInput[0]);
void VerifyOutput(const float* expected_output, void VerifyOutput(const float* expected_output,
const float* output, const float* output,
size_t length) { size_t length) {
EXPECT_EQ(0, memcmp(expected_output, EXPECT_EQ(
output, 0, memcmp(expected_output, output, length * sizeof(expected_output[0])));
length * sizeof(expected_output[0])));
} }
} // namespace } // namespace
@ -97,8 +95,8 @@ TEST(FIRFilterTest, FilterInLengthLesserOrEqualToCoefficientsLength) {
EXPECT_FLOAT_EQ(0.2f, output[0]); EXPECT_FLOAT_EQ(0.2f, output[0]);
EXPECT_FLOAT_EQ(0.7f, output[1]); EXPECT_FLOAT_EQ(0.7f, output[1]);
filter.reset(CreateFirFilter( filter.reset(
kCoefficients, kCoefficientsLength, kCoefficientsLength)); CreateFirFilter(kCoefficients, kCoefficientsLength, kCoefficientsLength));
filter->Filter(kInput, kCoefficientsLength, output); filter->Filter(kInput, kCoefficientsLength, output);
EXPECT_FLOAT_EQ(0.2f, output[0]); EXPECT_FLOAT_EQ(0.2f, output[0]);
@ -149,19 +147,17 @@ TEST(FIRFilterTest, VerifySampleBasedVsBlockBasedFiltering) {
filter->Filter(&kInput[i], 1, &output_sample_based[i]); filter->Filter(&kInput[i], 1, &output_sample_based[i]);
} }
EXPECT_EQ(0, memcmp(output_sample_based, EXPECT_EQ(0, memcmp(output_sample_based, output_block_based, kInputLength));
output_block_based,
kInputLength));
} }
TEST(FIRFilterTest, SimplestHighPassFilter) { TEST(FIRFilterTest, SimplestHighPassFilter) {
const float kCoefficients[] = {1.f, -1.f}; const float kCoefficients[] = {1.f, -1.f};
const size_t kCoefficientsLength = sizeof(kCoefficients) / const size_t kCoefficientsLength =
sizeof(kCoefficients[0]); sizeof(kCoefficients) / sizeof(kCoefficients[0]);
float kConstantInput[] = {1.f, 1.f, 1.f, 1.f, 1.f, 1.f, 1.f, 1.f}; float kConstantInput[] = {1.f, 1.f, 1.f, 1.f, 1.f, 1.f, 1.f, 1.f};
const size_t kConstantInputLength = sizeof(kConstantInput) / const size_t kConstantInputLength =
sizeof(kConstantInput[0]); sizeof(kConstantInput) / sizeof(kConstantInput[0]);
float output[kConstantInputLength]; float output[kConstantInputLength];
std::unique_ptr<FIRFilter> filter(CreateFirFilter( std::unique_ptr<FIRFilter> filter(CreateFirFilter(
@ -175,12 +171,12 @@ TEST(FIRFilterTest, SimplestHighPassFilter) {
TEST(FIRFilterTest, SimplestLowPassFilter) { TEST(FIRFilterTest, SimplestLowPassFilter) {
const float kCoefficients[] = {1.f, 1.f}; const float kCoefficients[] = {1.f, 1.f};
const size_t kCoefficientsLength = sizeof(kCoefficients) / const size_t kCoefficientsLength =
sizeof(kCoefficients[0]); sizeof(kCoefficients) / sizeof(kCoefficients[0]);
float kHighFrequencyInput[] = {-1.f, 1.f, -1.f, 1.f, -1.f, 1.f, -1.f, 1.f}; float kHighFrequencyInput[] = {-1.f, 1.f, -1.f, 1.f, -1.f, 1.f, -1.f, 1.f};
const size_t kHighFrequencyInputLength = sizeof(kHighFrequencyInput) / const size_t kHighFrequencyInputLength =
sizeof(kHighFrequencyInput[0]); sizeof(kHighFrequencyInput) / sizeof(kHighFrequencyInput[0]);
float output[kHighFrequencyInputLength]; float output[kHighFrequencyInputLength];
std::unique_ptr<FIRFilter> filter(CreateFirFilter( std::unique_ptr<FIRFilter> filter(CreateFirFilter(
@ -195,13 +191,13 @@ TEST(FIRFilterTest, SimplestLowPassFilter) {
TEST(FIRFilterTest, SameOutputWhenSwapedCoefficientsAndInput) { TEST(FIRFilterTest, SameOutputWhenSwapedCoefficientsAndInput) {
float output[kCoefficientsLength]; float output[kCoefficientsLength];
float output_swaped[kCoefficientsLength]; float output_swaped[kCoefficientsLength];
std::unique_ptr<FIRFilter> filter(CreateFirFilter( std::unique_ptr<FIRFilter> filter(
kCoefficients, kCoefficientsLength, kCoefficientsLength)); CreateFirFilter(kCoefficients, kCoefficientsLength, kCoefficientsLength));
// Use kCoefficientsLength for in_length to get same-length outputs. // Use kCoefficientsLength for in_length to get same-length outputs.
filter->Filter(kInput, kCoefficientsLength, output); filter->Filter(kInput, kCoefficientsLength, output);
filter.reset(CreateFirFilter( filter.reset(
kInput, kCoefficientsLength, kCoefficientsLength)); CreateFirFilter(kInput, kCoefficientsLength, kCoefficientsLength));
filter->Filter(kCoefficients, kCoefficientsLength, output_swaped); filter->Filter(kCoefficients, kCoefficientsLength, output_swaped);
for (size_t i = 0; i < kCoefficientsLength; ++i) { for (size_t i = 0; i < kCoefficientsLength; ++i) {

View File

@ -29,20 +29,17 @@ void LappedTransform::BlockThunk::ProcessBlock(const float* const* input,
RTC_CHECK_EQ(parent_->block_length_, num_frames); RTC_CHECK_EQ(parent_->block_length_, num_frames);
for (size_t i = 0; i < num_input_channels; ++i) { for (size_t i = 0; i < num_input_channels; ++i) {
memcpy(parent_->real_buf_.Row(i), input[i], memcpy(parent_->real_buf_.Row(i), input[i], num_frames * sizeof(*input[0]));
num_frames * sizeof(*input[0]));
parent_->fft_->Forward(parent_->real_buf_.Row(i), parent_->fft_->Forward(parent_->real_buf_.Row(i),
parent_->cplx_pre_.Row(i)); parent_->cplx_pre_.Row(i));
} }
size_t block_length = RealFourier::ComplexLength( size_t block_length =
RealFourier::FftOrder(num_frames)); RealFourier::ComplexLength(RealFourier::FftOrder(num_frames));
RTC_CHECK_EQ(parent_->cplx_length_, block_length); RTC_CHECK_EQ(parent_->cplx_length_, block_length);
parent_->block_processor_->ProcessAudioBlock(parent_->cplx_pre_.Array(), parent_->block_processor_->ProcessAudioBlock(
num_input_channels, parent_->cplx_pre_.Array(), num_input_channels, parent_->cplx_length_,
parent_->cplx_length_, num_output_channels, parent_->cplx_post_.Array());
num_output_channels,
parent_->cplx_post_.Array());
for (size_t i = 0; i < num_output_channels; ++i) { for (size_t i = 0; i < num_output_channels; ++i) {
parent_->fft_->Inverse(parent_->cplx_post_.Row(i), parent_->fft_->Inverse(parent_->cplx_post_.Row(i),

View File

@ -35,7 +35,8 @@ class LappedTransform {
virtual ~Callback() {} virtual ~Callback() {}
virtual void ProcessAudioBlock(const std::complex<float>* const* in_block, virtual void ProcessAudioBlock(const std::complex<float>* const* in_block,
size_t num_in_channels, size_t frames, size_t num_in_channels,
size_t frames,
size_t num_out_channels, size_t num_out_channels,
std::complex<float>* const* out_block) = 0; std::complex<float>* const* out_block) = 0;
}; };
@ -128,4 +129,3 @@ class LappedTransform {
} // namespace webrtc } // namespace webrtc
#endif // COMMON_AUDIO_LAPPED_TRANSFORM_H_ #endif // COMMON_AUDIO_LAPPED_TRANSFORM_H_

View File

@ -36,9 +36,7 @@ class NoopCallback : public webrtc::LappedTransform::Callback {
++block_num_; ++block_num_;
} }
size_t block_num() { size_t block_num() { return block_num_; }
return block_num_;
}
private: private:
size_t block_num_; size_t block_num_;
@ -69,9 +67,7 @@ class FftCheckerCallback : public webrtc::LappedTransform::Callback {
} }
} }
size_t block_num() { size_t block_num() { return block_num_; }
return block_num_;
}
private: private:
size_t block_num_; size_t block_num_;
@ -150,8 +146,7 @@ TEST(LappedTransformTest, IdentityProcessor) {
trans.ProcessChunk(&in_chunk, &out_chunk); trans.ProcessChunk(&in_chunk, &out_chunk);
for (size_t i = 0; i < kChunkLength; ++i) { for (size_t i = 0; i < kChunkLength; ++i) {
ASSERT_NEAR(out_chunk[i], ASSERT_NEAR(out_chunk[i], (i < kBlockLength - kShiftAmount) ? 0.0f : 2.0f,
(i < kBlockLength - kShiftAmount) ? 0.0f : 2.0f,
1e-5f); 1e-5f);
} }
@ -167,8 +162,8 @@ TEST(LappedTransformTest, Callbacks) {
float window[kBlockLength]; float window[kBlockLength];
std::fill(window, &window[kBlockLength], 1.0f); std::fill(window, &window[kBlockLength], 1.0f);
LappedTransform trans(1, 1, kChunkLength, window, kBlockLength, LappedTransform trans(1, 1, kChunkLength, window, kBlockLength, kBlockLength,
kBlockLength, &call); &call);
float in_buffer[kChunkLength]; float in_buffer[kChunkLength];
float* in_chunk = in_buffer; float* in_chunk = in_buffer;
float out_buffer[kChunkLength]; float out_buffer[kChunkLength];

View File

@ -72,4 +72,3 @@ class RealFourier {
} // namespace webrtc } // namespace webrtc
#endif // COMMON_AUDIO_REAL_FOURIER_H_ #endif // COMMON_AUDIO_REAL_FOURIER_H_

View File

@ -10,8 +10,8 @@
#include "common_audio/real_fourier_ooura.h" #include "common_audio/real_fourier_ooura.h"
#include <cmath>
#include <algorithm> #include <algorithm>
#include <cmath>
#include "common_audio/fft4g.h" #include "common_audio/fft4g.h"
#include "rtc_base/checks.h" #include "rtc_base/checks.h"
@ -28,8 +28,8 @@ void Conjugate(complex<float>* array, size_t complex_length) {
} }
size_t ComputeWorkIpSize(size_t fft_length) { size_t ComputeWorkIpSize(size_t fft_length) {
return static_cast<size_t>(2 + std::ceil(std::sqrt( return static_cast<size_t>(
static_cast<float>(fft_length)))); 2 + std::ceil(std::sqrt(static_cast<float>(fft_length))));
} }
} // namespace } // namespace
@ -73,8 +73,8 @@ void RealFourierOoura::Inverse(const complex<float>* src, float* dest) const {
// Restore Ooura's conjugate definition. // Restore Ooura's conjugate definition.
Conjugate(dest_complex, dest_complex_length); Conjugate(dest_complex, dest_complex_length);
// Restore real[n/2] to imag[0]. // Restore real[n/2] to imag[0].
dest_complex[0] = complex<float>(dest_complex[0].real(), dest_complex[0] =
src[complex_length_ - 1].real()); complex<float>(dest_complex[0].real(), src[complex_length_ - 1].real());
} }
WebRtc_rdft(length_, -1, dest, work_ip_.get(), work_w_.get()); WebRtc_rdft(length_, -1, dest, work_ip_.get(), work_w_.get());

View File

@ -60,8 +60,7 @@ class RealFourierTest : public ::testing::Test {
real_buffer_(RealFourier::AllocRealBuffer(4)), real_buffer_(RealFourier::AllocRealBuffer(4)),
cplx_buffer_(RealFourier::AllocCplxBuffer(3)) {} cplx_buffer_(RealFourier::AllocCplxBuffer(3)) {}
~RealFourierTest() { ~RealFourierTest() {}
}
T rf_; T rf_;
const RealFourier::fft_real_scoper real_buffer_; const RealFourier::fft_real_scoper real_buffer_;

View File

@ -29,7 +29,8 @@ class PushResampler {
// Must be called whenever the parameters change. Free to be called at any // Must be called whenever the parameters change. Free to be called at any
// time as it is a no-op if parameters have not changed since the last call. // time as it is a no-op if parameters have not changed since the last call.
int InitializeIfNeeded(int src_sample_rate_hz, int dst_sample_rate_hz, int InitializeIfNeeded(int src_sample_rate_hz,
int dst_sample_rate_hz,
size_t num_channels); size_t num_channels);
// Returns the total number of samples provided in destination (e.g. 32 kHz, // Returns the total number of samples provided in destination (e.g. 32 kHz,

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
/* /*
* A wrapper for resampling a numerous amount of sampling combinations. * A wrapper for resampling a numerous amount of sampling combinations.
*/ */
@ -36,8 +35,11 @@ class Resampler {
int ResetIfNeeded(int inFreq, int outFreq, size_t num_channels); int ResetIfNeeded(int inFreq, int outFreq, size_t num_channels);
// Resample samplesIn to samplesOut. // Resample samplesIn to samplesOut.
int Push(const int16_t* samplesIn, size_t lengthIn, int16_t* samplesOut, int Push(const int16_t* samplesIn,
size_t maxLen, size_t& outLen); // NOLINT: to avoid changing APIs size_t lengthIn,
int16_t* samplesOut,
size_t maxLen,
size_t& outLen); // NOLINT: to avoid changing APIs
private: private:
enum ResamplerMode { enum ResamplerMode {

Some files were not shown because too many files have changed in this diff Show More