Run the ClangTidy analyser on the AudioProcessing submodule of WebRTC.

This CL contains automatically applied fixes suggested by the
ClangTidy analyzer (http://clang.llvm.org/extra/clang-tidy/). The
following kinds of fixes is present:

* renaming variables when the names in the method signature don't
  match the names in the method definition
  (ClangTidy:readability-inconsistent-declaration-parameter-name)

* ClangTidy:readability-container-size-empty,
  ClangTidy:misc-unused-using-decls,
  ClangTidy:performance-unnecessary-value-param,
  ClangTidy:readability-redundant-control-flow

This is a 'pilot' CL to check if automatic code analyzers can
feasibly be integrated into the WebRTC infrastructuve.

The renamings have been manually expected for consistency with 
surrounding code. In echo_cancellation.cc, I changed several names in
the function implementation to match the function declaration. The
tool suggested changing everything to match the function definitions
instead.

Bug: None
Change-Id: Id3b7ba18c51f15b025f26090c7bdcc642e48d8fd
Reviewed-on: https://chromium-review.googlesource.com/635766
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Commit-Queue: Alex Loiko <aleloi@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#19630}
This commit is contained in:
Alex Loiko 2017-08-31 10:25:48 +02:00 committed by Commit Bot
parent 5b0690699d
commit 890988c9cb
19 changed files with 130 additions and 133 deletions

View File

@ -836,8 +836,6 @@ static void UpdateDelayMetrics(AecCore* self) {
// Reset histogram.
memset(self->delay_histogram, 0, sizeof(self->delay_histogram));
self->num_delay_values = 0;
return;
}
static void ScaledInverseFft(const OouraFft& ooura_fft,

View File

@ -37,7 +37,7 @@ typedef struct {
static int EstimateSkew(const int* rawSkew,
int size,
int absLimit,
int deviceSampleRateHz,
float* skewEst);
void* WebRtcAec_CreateResampler() {

View File

@ -105,15 +105,15 @@ int Aec::instance_count = 0;
// (controlled by knownDelay)
static void EstBufDelayNormal(Aec* aecInst);
static void EstBufDelayExtended(Aec* aecInst);
static int ProcessNormal(Aec* self,
const float* const* near,
static int ProcessNormal(Aec* aecInst,
const float* const* nearend,
size_t num_bands,
float* const* out,
size_t num_samples,
int16_t reported_delay_ms,
int32_t skew);
static void ProcessExtended(Aec* self,
const float* const* near,
static void ProcessExtended(Aec* aecInst,
const float* const* nearend,
size_t num_bands,
float* const* out,
size_t num_samples,
@ -531,12 +531,12 @@ AecCore* WebRtcAec_aec_core(void* handle) {
return reinterpret_cast<Aec*>(handle)->aec;
}
static int ProcessNormal(Aec* aecpc,
static int ProcessNormal(Aec* aecInst,
const float* const* nearend,
size_t num_bands,
float* const* out,
size_t nrOfSamples,
int16_t msInSndCardBuf,
size_t num_samples,
int16_t reported_delay_ms,
int32_t skew) {
int retVal = 0;
size_t i;
@ -545,47 +545,48 @@ static int ProcessNormal(Aec* aecpc,
const float minSkewEst = -0.5f;
const float maxSkewEst = 1.0f;
msInSndCardBuf =
msInSndCardBuf > kMaxTrustedDelayMs ? kMaxTrustedDelayMs : msInSndCardBuf;
reported_delay_ms =
reported_delay_ms > kMaxTrustedDelayMs ? kMaxTrustedDelayMs :
reported_delay_ms;
// TODO(andrew): we need to investigate if this +10 is really wanted.
msInSndCardBuf += 10;
aecpc->msInSndCardBuf = msInSndCardBuf;
reported_delay_ms += 10;
aecInst->msInSndCardBuf = reported_delay_ms;
if (aecpc->skewMode == kAecTrue) {
if (aecpc->skewFrCtr < 25) {
aecpc->skewFrCtr++;
if (aecInst->skewMode == kAecTrue) {
if (aecInst->skewFrCtr < 25) {
aecInst->skewFrCtr++;
} else {
retVal = WebRtcAec_GetSkew(aecpc->resampler, skew, &aecpc->skew);
retVal = WebRtcAec_GetSkew(aecInst->resampler, skew, &aecInst->skew);
if (retVal == -1) {
aecpc->skew = 0;
aecInst->skew = 0;
retVal = AEC_BAD_PARAMETER_WARNING;
}
aecpc->skew /= aecpc->sampFactor * nrOfSamples;
aecInst->skew /= aecInst->sampFactor * num_samples;
if (aecpc->skew < 1.0e-3 && aecpc->skew > -1.0e-3) {
aecpc->resample = kAecFalse;
if (aecInst->skew < 1.0e-3 && aecInst->skew > -1.0e-3) {
aecInst->resample = kAecFalse;
} else {
aecpc->resample = kAecTrue;
aecInst->resample = kAecTrue;
}
if (aecpc->skew < minSkewEst) {
aecpc->skew = minSkewEst;
} else if (aecpc->skew > maxSkewEst) {
aecpc->skew = maxSkewEst;
if (aecInst->skew < minSkewEst) {
aecInst->skew = minSkewEst;
} else if (aecInst->skew > maxSkewEst) {
aecInst->skew = maxSkewEst;
}
aecpc->data_dumper->DumpRaw("aec_skew", 1, &aecpc->skew);
aecInst->data_dumper->DumpRaw("aec_skew", 1, &aecInst->skew);
}
}
nBlocks10ms = nrOfSamples / (FRAME_LEN * aecpc->rate_factor);
nBlocks10ms = num_samples / (FRAME_LEN * aecInst->rate_factor);
if (aecpc->startup_phase) {
if (aecInst->startup_phase) {
for (i = 0; i < num_bands; ++i) {
// Only needed if they don't already point to the same place.
if (nearend[i] != out[i]) {
memcpy(out[i], nearend[i], sizeof(nearend[i][0]) * nrOfSamples);
memcpy(out[i], nearend[i], sizeof(nearend[i][0]) * num_samples);
}
}
@ -593,82 +594,83 @@ static int ProcessNormal(Aec* aecpc,
// AEC is disabled until the system delay is OK
// Mechanism to ensure that the system delay is reasonably stable.
if (aecpc->checkBuffSize) {
aecpc->checkBufSizeCtr++;
if (aecInst->checkBuffSize) {
aecInst->checkBufSizeCtr++;
// Before we fill up the far-end buffer we require the system delay
// to be stable (+/-8 ms) compared to the first value. This
// comparison is made during the following 6 consecutive 10 ms
// blocks. If it seems to be stable then we start to fill up the
// far-end buffer.
if (aecpc->counter == 0) {
aecpc->firstVal = aecpc->msInSndCardBuf;
aecpc->sum = 0;
if (aecInst->counter == 0) {
aecInst->firstVal = aecInst->msInSndCardBuf;
aecInst->sum = 0;
}
if (abs(aecpc->firstVal - aecpc->msInSndCardBuf) <
WEBRTC_SPL_MAX(0.2 * aecpc->msInSndCardBuf, sampMsNb)) {
aecpc->sum += aecpc->msInSndCardBuf;
aecpc->counter++;
if (abs(aecInst->firstVal - aecInst->msInSndCardBuf) <
WEBRTC_SPL_MAX(0.2 * aecInst->msInSndCardBuf, sampMsNb)) {
aecInst->sum += aecInst->msInSndCardBuf;
aecInst->counter++;
} else {
aecpc->counter = 0;
aecInst->counter = 0;
}
if (aecpc->counter * nBlocks10ms >= 6) {
if (aecInst->counter * nBlocks10ms >= 6) {
// The far-end buffer size is determined in partitions of
// PART_LEN samples. Use 75% of the average value of the system
// delay as buffer size to start with.
aecpc->bufSizeStart =
WEBRTC_SPL_MIN((3 * aecpc->sum * aecpc->rate_factor * 8) /
(4 * aecpc->counter * PART_LEN),
aecInst->bufSizeStart =
WEBRTC_SPL_MIN((3 * aecInst->sum * aecInst->rate_factor * 8) /
(4 * aecInst->counter * PART_LEN),
kMaxBufSizeStart);
// Buffer size has now been determined.
aecpc->checkBuffSize = 0;
aecInst->checkBuffSize = 0;
}
if (aecpc->checkBufSizeCtr * nBlocks10ms > 50) {
if (aecInst->checkBufSizeCtr * nBlocks10ms > 50) {
// For really bad systems, don't disable the echo canceller for
// more than 0.5 sec.
aecpc->bufSizeStart = WEBRTC_SPL_MIN(
(aecpc->msInSndCardBuf * aecpc->rate_factor * 3) / 40,
aecInst->bufSizeStart = WEBRTC_SPL_MIN(
(aecInst->msInSndCardBuf * aecInst->rate_factor * 3) / 40,
kMaxBufSizeStart);
aecpc->checkBuffSize = 0;
aecInst->checkBuffSize = 0;
}
}
// If |checkBuffSize| changed in the if-statement above.
if (!aecpc->checkBuffSize) {
if (!aecInst->checkBuffSize) {
// The system delay is now reasonably stable (or has been unstable
// for too long). When the far-end buffer is filled with
// approximately the same amount of data as reported by the system
// we end the startup phase.
int overhead_elements =
WebRtcAec_system_delay(aecpc->aec) / PART_LEN - aecpc->bufSizeStart;
WebRtcAec_system_delay(aecInst->aec) / PART_LEN -
aecInst->bufSizeStart;
if (overhead_elements == 0) {
// Enable the AEC
aecpc->startup_phase = 0;
aecInst->startup_phase = 0;
} else if (overhead_elements > 0) {
// TODO(bjornv): Do we need a check on how much we actually
// moved the read pointer? It should always be possible to move
// the pointer |overhead_elements| since we have only added data
// to the buffer and no delay compensation nor AEC processing
// has been done.
WebRtcAec_AdjustFarendBufferSizeAndSystemDelay(aecpc->aec,
WebRtcAec_AdjustFarendBufferSizeAndSystemDelay(aecInst->aec,
overhead_elements);
// Enable the AEC
aecpc->startup_phase = 0;
aecInst->startup_phase = 0;
}
}
} else {
// AEC is enabled.
EstBufDelayNormal(aecpc);
EstBufDelayNormal(aecInst);
// Call the AEC.
// TODO(bjornv): Re-structure such that we don't have to pass
// |aecpc->knownDelay| as input. Change name to something like
// |aecInst->knownDelay| as input. Change name to something like
// |system_buffer_diff|.
WebRtcAec_ProcessFrames(aecpc->aec, nearend, num_bands, nrOfSamples,
aecpc->knownDelay, out);
WebRtcAec_ProcessFrames(aecInst->aec, nearend, num_bands, num_samples,
aecInst->knownDelay, out);
}
return retVal;
@ -749,9 +751,9 @@ static void ProcessExtended(Aec* self,
}
}
static void EstBufDelayNormal(Aec* aecpc) {
int nSampSndCard = aecpc->msInSndCardBuf * sampMsNb * aecpc->rate_factor;
int current_delay = nSampSndCard - WebRtcAec_system_delay(aecpc->aec);
static void EstBufDelayNormal(Aec* aecInst) {
int nSampSndCard = aecInst->msInSndCardBuf * sampMsNb * aecInst->rate_factor;
int current_delay = nSampSndCard - WebRtcAec_system_delay(aecInst->aec);
int delay_difference = 0;
// Before we proceed with the delay estimate filtering we:
@ -761,54 +763,55 @@ static void EstBufDelayNormal(Aec* aecpc) {
// be negative.
// 1) Compensating for the frame(s) that will be read/processed.
current_delay += FRAME_LEN * aecpc->rate_factor;
current_delay += FRAME_LEN * aecInst->rate_factor;
// 2) Account for resampling frame delay.
if (aecpc->skewMode == kAecTrue && aecpc->resample == kAecTrue) {
if (aecInst->skewMode == kAecTrue && aecInst->resample == kAecTrue) {
current_delay -= kResamplingDelay;
}
// 3) Compensate for non-causality, if needed, by flushing one block.
if (current_delay < PART_LEN) {
current_delay +=
WebRtcAec_AdjustFarendBufferSizeAndSystemDelay(aecpc->aec, 1) *
WebRtcAec_AdjustFarendBufferSizeAndSystemDelay(aecInst->aec, 1) *
PART_LEN;
}
// We use -1 to signal an initialized state in the "extended" implementation;
// compensate for that.
aecpc->filtDelay = aecpc->filtDelay < 0 ? 0 : aecpc->filtDelay;
aecpc->filtDelay =
aecInst->filtDelay = aecInst->filtDelay < 0 ? 0 : aecInst->filtDelay;
aecInst->filtDelay =
WEBRTC_SPL_MAX(0, static_cast<int16_t>(0.8 *
aecpc->filtDelay +
aecInst->filtDelay +
0.2 * current_delay));
delay_difference = aecpc->filtDelay - aecpc->knownDelay;
delay_difference = aecInst->filtDelay - aecInst->knownDelay;
if (delay_difference > 224) {
if (aecpc->lastDelayDiff < 96) {
aecpc->timeForDelayChange = 0;
if (aecInst->lastDelayDiff < 96) {
aecInst->timeForDelayChange = 0;
} else {
aecpc->timeForDelayChange++;
aecInst->timeForDelayChange++;
}
} else if (delay_difference < 96 && aecpc->knownDelay > 0) {
if (aecpc->lastDelayDiff > 224) {
aecpc->timeForDelayChange = 0;
} else if (delay_difference < 96 && aecInst->knownDelay > 0) {
if (aecInst->lastDelayDiff > 224) {
aecInst->timeForDelayChange = 0;
} else {
aecpc->timeForDelayChange++;
aecInst->timeForDelayChange++;
}
} else {
aecpc->timeForDelayChange = 0;
aecInst->timeForDelayChange = 0;
}
aecpc->lastDelayDiff = delay_difference;
aecInst->lastDelayDiff = delay_difference;
if (aecpc->timeForDelayChange > 25) {
aecpc->knownDelay = WEBRTC_SPL_MAX((int)aecpc->filtDelay - 160, 0);
if (aecInst->timeForDelayChange > 25) {
aecInst->knownDelay = WEBRTC_SPL_MAX((int)aecInst->filtDelay - 160, 0);
}
}
static void EstBufDelayExtended(Aec* self) {
int reported_delay = self->msInSndCardBuf * sampMsNb * self->rate_factor;
int current_delay = reported_delay - WebRtcAec_system_delay(self->aec);
static void EstBufDelayExtended(Aec* aecInst) {
int reported_delay = aecInst->msInSndCardBuf * sampMsNb *
aecInst->rate_factor;
int current_delay = reported_delay - WebRtcAec_system_delay(aecInst->aec);
int delay_difference = 0;
// Before we proceed with the delay estimate filtering we:
@ -818,46 +821,48 @@ static void EstBufDelayExtended(Aec* self) {
// be negative.
// 1) Compensating for the frame(s) that will be read/processed.
current_delay += FRAME_LEN * self->rate_factor;
current_delay += FRAME_LEN * aecInst->rate_factor;
// 2) Account for resampling frame delay.
if (self->skewMode == kAecTrue && self->resample == kAecTrue) {
if (aecInst->skewMode == kAecTrue && aecInst->resample == kAecTrue) {
current_delay -= kResamplingDelay;
}
// 3) Compensate for non-causality, if needed, by flushing two blocks.
if (current_delay < PART_LEN) {
current_delay +=
WebRtcAec_AdjustFarendBufferSizeAndSystemDelay(self->aec, 2) * PART_LEN;
WebRtcAec_AdjustFarendBufferSizeAndSystemDelay(aecInst->aec, 2) *
PART_LEN;
}
if (self->filtDelay == -1) {
self->filtDelay = WEBRTC_SPL_MAX(0, 0.5 * current_delay);
if (aecInst->filtDelay == -1) {
aecInst->filtDelay = WEBRTC_SPL_MAX(0, 0.5 * current_delay);
} else {
self->filtDelay = WEBRTC_SPL_MAX(
0, static_cast<int16_t>(0.95 * self->filtDelay + 0.05 * current_delay));
aecInst->filtDelay = WEBRTC_SPL_MAX(
0, static_cast<int16_t>(0.95 * aecInst->filtDelay + 0.05 *
current_delay));
}
delay_difference = self->filtDelay - self->knownDelay;
delay_difference = aecInst->filtDelay - aecInst->knownDelay;
if (delay_difference > 384) {
if (self->lastDelayDiff < 128) {
self->timeForDelayChange = 0;
if (aecInst->lastDelayDiff < 128) {
aecInst->timeForDelayChange = 0;
} else {
self->timeForDelayChange++;
aecInst->timeForDelayChange++;
}
} else if (delay_difference < 128 && self->knownDelay > 0) {
if (self->lastDelayDiff > 384) {
self->timeForDelayChange = 0;
} else if (delay_difference < 128 && aecInst->knownDelay > 0) {
if (aecInst->lastDelayDiff > 384) {
aecInst->timeForDelayChange = 0;
} else {
self->timeForDelayChange++;
aecInst->timeForDelayChange++;
}
} else {
self->timeForDelayChange = 0;
aecInst->timeForDelayChange = 0;
}
self->lastDelayDiff = delay_difference;
aecInst->lastDelayDiff = delay_difference;
if (self->timeForDelayChange > 25) {
self->knownDelay = WEBRTC_SPL_MAX((int)self->filtDelay - 256, 0);
if (aecInst->timeForDelayChange > 25) {
aecInst->knownDelay = WEBRTC_SPL_MAX((int)aecInst->filtDelay - 256, 0);
}
}
} // namespace webrtc

View File

@ -146,7 +146,7 @@ class EchoCanceller3::RenderWriter {
int frame_length,
int num_bands);
~RenderWriter();
void Insert(AudioBuffer* render);
void Insert(AudioBuffer* input);
private:
ApmDataDumper* data_dumper_;

View File

@ -28,7 +28,6 @@
namespace webrtc {
namespace {
using testing::Return;
using testing::StrictMock;
using testing::_;

View File

@ -57,12 +57,11 @@ class EchoRemoverImpl final : public EchoRemover {
// Removes the echo from a block of samples from the capture signal. The
// supplied render signal is assumed to be pre-aligned with the capture
// signal.
void ProcessCapture(
const rtc::Optional<size_t>& external_echo_path_delay_estimate,
const EchoPathVariability& echo_path_variability,
bool capture_signal_saturation,
const RenderBuffer& render_buffer,
std::vector<std::vector<float>>* capture) override;
void ProcessCapture(const rtc::Optional<size_t>& echo_path_delay_samples,
const EchoPathVariability& echo_path_variability,
bool capture_signal_saturation,
const RenderBuffer& render_buffer,
std::vector<std::vector<float>>* capture) override;
// Updates the status on whether echo leakage is detected in the output of the
// echo remover.

View File

@ -75,10 +75,10 @@ typedef struct
// Estimates delay to set the position of the farend buffer read pointer
// (controlled by knownDelay)
static int WebRtcAecm_EstBufDelay(AecMobile* aecmInst, short msInSndCardBuf);
static int WebRtcAecm_EstBufDelay(AecMobile* aecm, short msInSndCardBuf);
// Stuffs the farend buffer if the estimated delay is too large
static int WebRtcAecm_DelayComp(AecMobile* aecmInst);
static int WebRtcAecm_DelayComp(AecMobile* aecm);
void* WebRtcAecm_Create() {
AecMobile* aecm = static_cast<AecMobile*>(malloc(sizeof(AecMobile)));

View File

@ -20,11 +20,8 @@
using ::testing::_;
using ::testing::DoAll;
using ::testing::Eq;
using ::testing::Mock;
using ::testing::Return;
using ::testing::SetArgPointee;
using ::testing::SetArgReferee;
namespace webrtc {
namespace {

View File

@ -17,7 +17,6 @@
#include "webrtc/test/gtest.h"
using ::testing::Invoke;
using ::testing::Return;
namespace webrtc {
namespace {

View File

@ -258,7 +258,7 @@ class TimedThreadApiProcessor {
bool Process();
// Method for printing out the simulation statistics.
void print_processor_statistics(std::string processor_name) const {
void print_processor_statistics(const std::string& processor_name) const {
const std::string modifier = "_api_call_duration";
// Lambda function for creating a test printout string.

View File

@ -237,7 +237,7 @@ void WriteStatsMessage(const AudioProcessing::Statistic& output,
}
#endif
void OpenFileAndWriteMessage(const std::string filename,
void OpenFileAndWriteMessage(const std::string& filename,
const MessageLite& msg) {
FILE* file = fopen(filename.c_str(), "wb");
ASSERT_TRUE(file != NULL);
@ -253,7 +253,7 @@ void OpenFileAndWriteMessage(const std::string filename,
fclose(file);
}
std::string ResourceFilePath(std::string name, int sample_rate_hz) {
std::string ResourceFilePath(const std::string& name, int sample_rate_hz) {
std::ostringstream ss;
// Resource files are all stereo.
ss << name << sample_rate_hz / 1000 << "_stereo";
@ -265,7 +265,7 @@ std::string ResourceFilePath(std::string name, int sample_rate_hz) {
// have competing filenames.
std::map<std::string, std::string> temp_filenames;
std::string OutputFilePath(std::string name,
std::string OutputFilePath(const std::string& name,
int input_rate,
int output_rate,
int reverse_input_rate,
@ -307,7 +307,7 @@ void ClearTempFiles() {
remove(kv.second.c_str());
}
void OpenFileAndReadMessage(std::string filename, MessageLite* msg) {
void OpenFileAndReadMessage(const std::string& filename, MessageLite* msg) {
FILE* file = fopen(filename.c_str(), "rb");
ASSERT_TRUE(file != NULL);
ReadMessageFromFile(file, msg);
@ -2438,7 +2438,7 @@ class AudioProcessingTest
size_t num_output_channels,
size_t num_reverse_input_channels,
size_t num_reverse_output_channels,
std::string output_file_prefix) {
const std::string& output_file_prefix) {
Config config;
config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
std::unique_ptr<AudioProcessing> ap(AudioProcessing::Create(config));

View File

@ -67,7 +67,7 @@ void RunStandaloneSubmodule(int sample_rate_hz, size_t num_channels) {
false);
}
void RunTogetherWithApm(std::string test_description,
void RunTogetherWithApm(const std::string& test_description,
int render_input_sample_rate_hz,
int render_output_sample_rate_hz,
int capture_input_sample_rate_hz,

View File

@ -22,7 +22,7 @@
namespace {
float Power(rtc::ArrayView<const float> input) {
if (input.size() == 0) {
if (input.empty()) {
return 0.f;
}
return std::inner_product(input.begin(), input.end(), input.begin(), 0.f) /

View File

@ -89,7 +89,7 @@ void RunStandaloneSubmodule() {
"us", false);
}
void RunTogetherWithApm(std::string test_description,
void RunTogetherWithApm(const std::string& test_description,
bool use_mobile_aec,
bool include_default_apm_processing) {
test::SimulatorBuffers buffers(

View File

@ -482,7 +482,7 @@ void AecDumpBasedSimulator::HandleMessage(
}
if (settings_.use_verbose_logging && msg.has_experiments_description() &&
msg.experiments_description().size() > 0) {
!msg.experiments_description().empty()) {
std::cout << " experiments not included by default in the simulation: "
<< msg.experiments_description() << std::endl;
}

View File

@ -82,7 +82,7 @@ AudioProcessingSimulator::AudioProcessingSimulator(
const SimulationSettings& settings)
: settings_(settings), worker_queue_("file_writer_task_queue") {
if (settings_.ed_graph_output_filename &&
settings_.ed_graph_output_filename->size() > 0) {
!settings_.ed_graph_output_filename->empty()) {
residual_echo_likelihood_graph_writer_.open(
*settings_.ed_graph_output_filename);
RTC_CHECK(residual_echo_likelihood_graph_writer_.is_open());

View File

@ -175,7 +175,7 @@ DEFINE_bool(store_intermediate_output,
DEFINE_string(custom_call_order_file, "", "Custom process API call order file");
DEFINE_bool(help, false, "Print this message");
void SetSettingIfSpecified(const std::string value,
void SetSettingIfSpecified(const std::string& value,
rtc::Optional<std::string>* parameter) {
if (value.compare("") != 0) {
*parameter = rtc::Optional<std::string>(value);
@ -279,7 +279,7 @@ SimulationSettings CreateSettings() {
return settings;
}
void ReportConditionalErrorAndExit(bool condition, std::string message) {
void ReportConditionalErrorAndExit(bool condition, const std::string& message) {
if (condition) {
std::cerr << message << std::endl;
exit(1);

View File

@ -40,10 +40,10 @@ void MaybeResetBuffer(std::unique_ptr<ChannelBuffer<float>>* buffer,
class DebugDumpGenerator {
public:
DebugDumpGenerator(const std::string& input_file_name,
int input_file_rate_hz,
int input_rate_hz,
int input_channels,
const std::string& reverse_file_name,
int reverse_file_rate_hz,
int reverse_rate_hz,
int reverse_channels,
const Config& config,
const std::string& dump_file_name);
@ -244,7 +244,7 @@ class DebugDumpTest : public ::testing::Test {
// VerifyDebugDump replays a debug dump using APM and verifies that the result
// is bit-exact-identical to the output channel in the dump. This is only
// guaranteed if the debug dump is started on the first frame.
void VerifyDebugDump(const std::string& dump_file_name);
void VerifyDebugDump(const std::string& in_filename);
private:
DebugDumpReplayer debug_dump_replayer_;

View File

@ -24,7 +24,7 @@ template <typename T>
void DelayBuffer<T>::Delay(rtc::ArrayView<const T> x,
rtc::ArrayView<T> x_delayed) {
RTC_DCHECK_EQ(x.size(), x_delayed.size());
if (buffer_.size() == 0) {
if (buffer_.empty()) {
std::copy(x.begin(), x.end(), x_delayed.begin());
} else {
for (size_t k = 0; k < x.size(); ++k) {