Enable cpplint for more webrtc subfolders and fix all uncovered cpplint errors.

This CL enableds cpplint for webrtc/common_video, webrtc/sound and webrtc/tools.

BUG=webrtc:5273

TESTED=Fixed issues reported by:
find webrtc/common_video webrtc/sound webrtc/tools -type f -name *.cc -o -name *.h | xargs cpplint.py
followed by 'git cl presubmit'.

Review URL: https://codereview.webrtc.org/1511603004

Cr-Commit-Position: refs/heads/master@{#10967}
This commit is contained in:
jbauch 2015-12-10 03:11:42 -08:00 committed by Commit bot
parent 162abd3562
commit 0f2e939a92
15 changed files with 66 additions and 64 deletions

View File

@ -18,8 +18,11 @@ import sys
CPPLINT_DIRS = [ CPPLINT_DIRS = [
'webrtc/audio', 'webrtc/audio',
'webrtc/call', 'webrtc/call',
'webrtc/common_video',
'webrtc/examples', 'webrtc/examples',
'webrtc/modules/video_processing', 'webrtc/modules/video_processing',
'webrtc/sound',
'webrtc/tools',
'webrtc/video', 'webrtc/video',
] ]

View File

@ -8,8 +8,6 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include "webrtc/video_frame.h"
#include <math.h> #include <math.h>
#include <string.h> #include <string.h>
@ -17,6 +15,7 @@
#include "webrtc/base/bind.h" #include "webrtc/base/bind.h"
#include "webrtc/base/scoped_ptr.h" #include "webrtc/base/scoped_ptr.h"
#include "webrtc/test/fake_texture_frame.h" #include "webrtc/test/fake_texture_frame.h"
#include "webrtc/video_frame.h"
namespace webrtc { namespace webrtc {
@ -174,7 +173,7 @@ TEST(TestVideoFrame, ShallowCopy) {
TEST(TestVideoFrame, Reset) { TEST(TestVideoFrame, Reset) {
VideoFrame frame; VideoFrame frame;
ASSERT_TRUE(frame.CreateEmptyFrame(5, 5, 5, 5, 5) == 0); ASSERT_EQ(frame.CreateEmptyFrame(5, 5, 5, 5, 5), 0);
frame.set_ntp_time_ms(1); frame.set_ntp_time_ms(1);
frame.set_timestamp(2); frame.set_timestamp(2);
frame.set_render_time_ms(3); frame.set_render_time_ms(3);

View File

@ -152,6 +152,7 @@ int ConvertNV12ToRGB565(const uint8_t* src_frame,
double I420PSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame); double I420PSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame);
// Compute SSIM for an I420 frame (all planes). // Compute SSIM for an I420 frame (all planes).
double I420SSIM(const VideoFrame* ref_frame, const VideoFrame* test_frame); double I420SSIM(const VideoFrame* ref_frame, const VideoFrame* test_frame);
}
} // namespace webrtc
#endif // WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_WEBRTC_LIBYUV_H_ #endif // WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_WEBRTC_LIBYUV_H_

View File

@ -346,4 +346,4 @@ TEST_F(TestLibYuv, StrideAlignment) {
EXPECT_EQ(64, stride_uv); EXPECT_EQ(64, stride_uv);
} }
} // namespace } // namespace webrtc

View File

@ -11,6 +11,7 @@
#include "webrtc/sound/alsasoundsystem.h" #include "webrtc/sound/alsasoundsystem.h"
#include <algorithm> #include <algorithm>
#include <string>
#include "webrtc/base/arraysize.h" #include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h" #include "webrtc/base/common.h"
@ -259,12 +260,12 @@ class AlsaInputStream :
} }
bool GetVolume(int *volume) override { bool GetVolume(int *volume) override {
// TODO: Implement this. // TODO(henrika): Implement this.
return false; return false;
} }
bool SetVolume(int volume) override { bool SetVolume(int volume) override {
// TODO: Implement this. // TODO(henrika): Implement this.
return false; return false;
} }
@ -390,12 +391,12 @@ class AlsaOutputStream : public SoundOutputStreamInterface,
} }
bool GetVolume(int *volume) override { bool GetVolume(int *volume) override {
// TODO: Implement this. // TODO(henrika): Implement this.
return false; return false;
} }
bool SetVolume(int volume) override { bool SetVolume(int volume) override {
// TODO: Implement this. // TODO(henrika): Implement this.
return false; return false;
} }
@ -569,7 +570,6 @@ bool AlsaSoundSystem::EnumerateDevices(
strcmp(name, ignore_null) != 0 && strcmp(name, ignore_null) != 0 &&
strcmp(name, ignore_pulse) != 0 && strcmp(name, ignore_pulse) != 0 &&
!rtc::starts_with(name, ignore_prefix)) { !rtc::starts_with(name, ignore_prefix)) {
// Yes, we do. // Yes, we do.
char *desc = symbol_table_.snd_device_name_get_hint()(*list, "DESC"); char *desc = symbol_table_.snd_device_name_get_hint()(*list, "DESC");
if (!desc) { if (!desc) {
@ -622,7 +622,6 @@ StreamInterface *AlsaSoundSystem::OpenDevice(
int wait_timeout_ms, int wait_timeout_ms,
int flags, int flags,
int freq)) { int freq)) {
if (!IsInitialized()) { if (!IsInitialized()) {
return NULL; return NULL;
} }

View File

@ -13,6 +13,7 @@
#ifdef HAVE_LIBPULSE #ifdef HAVE_LIBPULSE
#include <algorithm> #include <algorithm>
#include <string>
#include "webrtc/base/arraysize.h" #include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h" #include "webrtc/base/common.h"
@ -216,17 +217,6 @@ class PulseAudioStream {
class PulseAudioInputStream : class PulseAudioInputStream :
public SoundInputStreamInterface, public SoundInputStreamInterface,
private rtc::Worker { private rtc::Worker {
struct GetVolumeCallbackData {
PulseAudioInputStream *instance;
pa_cvolume *channel_volumes;
};
struct GetSourceChannelCountCallbackData {
PulseAudioInputStream *instance;
uint8_t *channels;
};
public: public:
PulseAudioInputStream(PulseAudioSoundSystem *pulse, PulseAudioInputStream(PulseAudioSoundSystem *pulse,
pa_stream *stream, pa_stream *stream,
@ -386,6 +376,16 @@ class PulseAudioInputStream :
} }
private: private:
struct GetVolumeCallbackData {
PulseAudioInputStream* instance;
pa_cvolume* channel_volumes;
};
struct GetSourceChannelCountCallbackData {
PulseAudioInputStream* instance;
uint8_t* channels;
};
void Lock() { void Lock() {
stream_.Lock(); stream_.Lock();
} }
@ -580,12 +580,6 @@ class PulseAudioInputStream :
class PulseAudioOutputStream : class PulseAudioOutputStream :
public SoundOutputStreamInterface, public SoundOutputStreamInterface,
private rtc::Worker { private rtc::Worker {
struct GetVolumeCallbackData {
PulseAudioOutputStream *instance;
pa_cvolume *channel_volumes;
};
public: public:
PulseAudioOutputStream(PulseAudioSoundSystem *pulse, PulseAudioOutputStream(PulseAudioSoundSystem *pulse,
pa_stream *stream, pa_stream *stream,
@ -733,7 +727,7 @@ class PulseAudioOutputStream :
} }
#if 0 #if 0
// TODO: Versions 0.9.16 and later of Pulse have a new API for // TODO(henrika): Versions 0.9.16 and later of Pulse have a new API for
// zero-copy writes, but Hardy is not new enough to have that so we can't // zero-copy writes, but Hardy is not new enough to have that so we can't
// rely on it. Perhaps auto-detect if it's present or not and use it if we // rely on it. Perhaps auto-detect if it's present or not and use it if we
// can? // can?
@ -777,6 +771,11 @@ class PulseAudioOutputStream :
#endif #endif
private: private:
struct GetVolumeCallbackData {
PulseAudioOutputStream* instance;
pa_cvolume* channel_volumes;
};
void Lock() { void Lock() {
stream_.Lock(); stream_.Lock();
} }
@ -1165,7 +1164,7 @@ bool PulseAudioSoundSystem::ConnectToPulse(pa_context *context) {
pa_context *PulseAudioSoundSystem::CreateNewConnection() { pa_context *PulseAudioSoundSystem::CreateNewConnection() {
// Create connection context. // Create connection context.
std::string app_name; std::string app_name;
// TODO: Pulse etiquette says this name should be localized. Do // TODO(henrika): Pulse etiquette says this name should be localized. Do
// we care? // we care?
rtc::Filesystem::GetApplicationName(&app_name); rtc::Filesystem::GetApplicationName(&app_name);
pa_context *context = symbol_table_.pa_context_new()( pa_context *context = symbol_table_.pa_context_new()(

View File

@ -65,4 +65,4 @@ class SoundInputStreamInterface {
} // namespace rtc } // namespace rtc
#endif // WEBRTC_SOUND_SOUNDOUTPUTSTREAMINTERFACE_H_ #endif // WEBRTC_SOUND_SOUNDINPUTSTREAMINTERFACE_H_

View File

@ -65,7 +65,7 @@ static void DitherSilence(AudioFrame* frame) {
sum_squared += frame->data_[n] * frame->data_[n]; sum_squared += frame->data_[n] * frame->data_[n];
if (sum_squared <= sum_squared_silence) { if (sum_squared <= sum_squared_silence) {
for (size_t n = 0; n < frame->samples_per_channel_; n++) for (size_t n = 0; n < frame->samples_per_channel_; n++)
frame->data_[n] = (rand() & 0xF) - 8; frame->data_[n] = (rand() & 0xF) - 8; // NOLINT: ignore non-threadsafe.
} }
} }

View File

@ -15,6 +15,7 @@
#include "webrtc/base/scoped_ptr.h" #include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/tools/frame_editing/frame_editing_lib.h"
#include "webrtc/typedefs.h" #include "webrtc/typedefs.h"
using std::string; using std::string;

View File

@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#ifndef WEBRTC_TOOLS_FRAME_EDITING_FRAME_EDITING_H_ #ifndef WEBRTC_TOOLS_FRAME_EDITING_FRAME_EDITING_LIB_H_
#define WEBRTC_TOOLS_FRAME_EDITING_FRAME_EDITING_H_ #define WEBRTC_TOOLS_FRAME_EDITING_FRAME_EDITING_LIB_H_
#include <string> #include <string>
@ -36,4 +36,4 @@ int EditFrames(const std::string& in_path, int width, int height,
int last_frame_to_process, const std::string& out_path); int last_frame_to_process, const std::string& out_path);
} // namespace webrtc } // namespace webrtc
#endif // WEBRTC_TOOLS_FRAME_EDITING_FRAME_EDITING_H_ #endif // WEBRTC_TOOLS_FRAME_EDITING_FRAME_EDITING_LIB_H_