Enable cpplint for more webrtc subfolders and fix all uncovered cpplint errors.
This CL enableds cpplint for webrtc/common_video, webrtc/sound and webrtc/tools. BUG=webrtc:5273 TESTED=Fixed issues reported by: find webrtc/common_video webrtc/sound webrtc/tools -type f -name *.cc -o -name *.h | xargs cpplint.py followed by 'git cl presubmit'. Review URL: https://codereview.webrtc.org/1511603004 Cr-Commit-Position: refs/heads/master@{#10967}
This commit is contained in:
parent
162abd3562
commit
0f2e939a92
@ -18,8 +18,11 @@ import sys
|
||||
CPPLINT_DIRS = [
|
||||
'webrtc/audio',
|
||||
'webrtc/call',
|
||||
'webrtc/common_video',
|
||||
'webrtc/examples',
|
||||
'webrtc/modules/video_processing',
|
||||
'webrtc/sound',
|
||||
'webrtc/tools',
|
||||
'webrtc/video',
|
||||
]
|
||||
|
||||
|
||||
@ -8,8 +8,6 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "webrtc/video_frame.h"
|
||||
|
||||
#include <math.h>
|
||||
#include <string.h>
|
||||
|
||||
@ -17,6 +15,7 @@
|
||||
#include "webrtc/base/bind.h"
|
||||
#include "webrtc/base/scoped_ptr.h"
|
||||
#include "webrtc/test/fake_texture_frame.h"
|
||||
#include "webrtc/video_frame.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
@ -174,7 +173,7 @@ TEST(TestVideoFrame, ShallowCopy) {
|
||||
|
||||
TEST(TestVideoFrame, Reset) {
|
||||
VideoFrame frame;
|
||||
ASSERT_TRUE(frame.CreateEmptyFrame(5, 5, 5, 5, 5) == 0);
|
||||
ASSERT_EQ(frame.CreateEmptyFrame(5, 5, 5, 5, 5), 0);
|
||||
frame.set_ntp_time_ms(1);
|
||||
frame.set_timestamp(2);
|
||||
frame.set_render_time_ms(3);
|
||||
|
||||
@ -152,6 +152,7 @@ int ConvertNV12ToRGB565(const uint8_t* src_frame,
|
||||
double I420PSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame);
|
||||
// Compute SSIM for an I420 frame (all planes).
|
||||
double I420SSIM(const VideoFrame* ref_frame, const VideoFrame* test_frame);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_WEBRTC_LIBYUV_H_
|
||||
|
||||
@ -97,7 +97,7 @@ TestLibYuv::TestLibYuv()
|
||||
width_(352),
|
||||
height_(288),
|
||||
size_y_(width_ * height_),
|
||||
size_uv_(((width_ + 1 ) / 2) * ((height_ + 1) / 2)),
|
||||
size_uv_(((width_ + 1) / 2) * ((height_ + 1) / 2)),
|
||||
frame_length_(CalcBufferSize(kI420, 352, 288)) {
|
||||
orig_buffer_.reset(new uint8_t[frame_length_]);
|
||||
}
|
||||
@ -142,9 +142,9 @@ TEST_F(TestLibYuv, ConvertTest) {
|
||||
double psnr = 0.0;
|
||||
|
||||
VideoFrame res_i420_frame;
|
||||
EXPECT_EQ(0,res_i420_frame.CreateEmptyFrame(width_, height_, width_,
|
||||
(width_ + 1) / 2,
|
||||
(width_ + 1) / 2));
|
||||
EXPECT_EQ(0, res_i420_frame.CreateEmptyFrame(width_, height_, width_,
|
||||
(width_ + 1) / 2,
|
||||
(width_ + 1) / 2));
|
||||
printf("\nConvert #%d I420 <-> I420 \n", j);
|
||||
rtc::scoped_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
|
||||
@ -281,8 +281,8 @@ TEST_F(TestLibYuv, ConvertAlignedFrame) {
|
||||
int stride_y = 0;
|
||||
int stride_uv = 0;
|
||||
Calc16ByteAlignedStride(width_, &stride_y, &stride_uv);
|
||||
EXPECT_EQ(0,res_i420_frame.CreateEmptyFrame(width_, height_,
|
||||
stride_y, stride_uv, stride_uv));
|
||||
EXPECT_EQ(0, res_i420_frame.CreateEmptyFrame(width_, height_,
|
||||
stride_y, stride_uv, stride_uv));
|
||||
rtc::scoped_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
|
||||
out_i420_buffer.get()));
|
||||
@ -303,27 +303,27 @@ TEST_F(TestLibYuv, RotateTest) {
|
||||
VideoFrame rotated_res_i420_frame;
|
||||
int rotated_width = height_;
|
||||
int rotated_height = width_;
|
||||
int stride_y ;
|
||||
int stride_y;
|
||||
int stride_uv;
|
||||
Calc16ByteAlignedStride(rotated_width, &stride_y, &stride_uv);
|
||||
EXPECT_EQ(0,rotated_res_i420_frame.CreateEmptyFrame(rotated_width,
|
||||
rotated_height,
|
||||
stride_y,
|
||||
stride_uv,
|
||||
stride_uv));
|
||||
EXPECT_EQ(0, rotated_res_i420_frame.CreateEmptyFrame(rotated_width,
|
||||
rotated_height,
|
||||
stride_y,
|
||||
stride_uv,
|
||||
stride_uv));
|
||||
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
|
||||
0, kVideoRotation_90, &rotated_res_i420_frame));
|
||||
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
|
||||
0, kVideoRotation_270, &rotated_res_i420_frame));
|
||||
EXPECT_EQ(0,rotated_res_i420_frame.CreateEmptyFrame(width_, height_,
|
||||
width_, (width_ + 1) / 2,
|
||||
(width_ + 1) / 2));
|
||||
EXPECT_EQ(0, rotated_res_i420_frame.CreateEmptyFrame(width_, height_,
|
||||
width_, (width_ + 1) / 2,
|
||||
(width_ + 1) / 2));
|
||||
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
|
||||
0, kVideoRotation_180, &rotated_res_i420_frame));
|
||||
}
|
||||
|
||||
TEST_F(TestLibYuv, alignment) {
|
||||
int value = 0x3FF; // 1023
|
||||
int value = 0x3FF; // 1023
|
||||
EXPECT_EQ(0x400, AlignInt(value, 128)); // Low 7 bits are zero.
|
||||
EXPECT_EQ(0x400, AlignInt(value, 64)); // Low 6 bits are zero.
|
||||
EXPECT_EQ(0x400, AlignInt(value, 32)); // Low 5 bits are zero.
|
||||
@ -346,4 +346,4 @@ TEST_F(TestLibYuv, StrideAlignment) {
|
||||
EXPECT_EQ(64, stride_uv);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace webrtc
|
||||
|
||||
@ -114,7 +114,7 @@ TEST_F(TestScaler, ScaleSendingBufferTooSmall) {
|
||||
EXPECT_EQ(half_height_, test_frame2.height());
|
||||
}
|
||||
|
||||
//TODO (mikhal): Converge the test into one function that accepts the method.
|
||||
// TODO(mikhal): Converge the test into one function that accepts the method.
|
||||
TEST_F(TestScaler, DISABLED_ON_ANDROID(PointScaleTest)) {
|
||||
double avg_psnr;
|
||||
FILE* source_file2;
|
||||
@ -322,7 +322,7 @@ double TestScaler::ComputeAvgSequencePSNR(FILE* input_file,
|
||||
return avg_psnr;
|
||||
}
|
||||
|
||||
// TODO (mikhal): Move part to a separate scale test.
|
||||
// TODO(mikhal): Move part to a separate scale test.
|
||||
void TestScaler::ScaleSequence(ScaleMethod method,
|
||||
FILE* source_file, std::string out_name,
|
||||
int src_width, int src_height,
|
||||
|
||||
@ -58,7 +58,7 @@ VideoType RawVideoTypeToCommonVideoVideoType(RawVideoType type) {
|
||||
|
||||
int AlignInt(int value, int alignment) {
|
||||
assert(!((alignment - 1) & alignment));
|
||||
return ((value + alignment - 1) & ~ (alignment - 1));
|
||||
return ((value + alignment - 1) & ~(alignment - 1));
|
||||
}
|
||||
|
||||
void Calc16ByteAlignedStride(int width, int* stride_y, int* stride_uv) {
|
||||
@ -119,8 +119,8 @@ int PrintVideoFrame(const VideoFrame& frame, FILE* file) {
|
||||
}
|
||||
plane_buffer += frame.stride(plane_type);
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
int ExtractBuffer(const VideoFrame& input_frame, size_t size, uint8_t* buffer) {
|
||||
@ -176,7 +176,7 @@ int ConvertRGB24ToARGB(const uint8_t* src_frame, uint8_t* dst_frame,
|
||||
}
|
||||
|
||||
libyuv::RotationMode ConvertRotationMode(VideoRotation rotation) {
|
||||
switch(rotation) {
|
||||
switch (rotation) {
|
||||
case kVideoRotation_0:
|
||||
return libyuv::kRotate0;
|
||||
case kVideoRotation_90:
|
||||
@ -191,7 +191,7 @@ libyuv::RotationMode ConvertRotationMode(VideoRotation rotation) {
|
||||
}
|
||||
|
||||
int ConvertVideoType(VideoType video_type) {
|
||||
switch(video_type) {
|
||||
switch (video_type) {
|
||||
case kUnknown:
|
||||
return libyuv::FOURCC_ANY;
|
||||
case kI420:
|
||||
@ -243,7 +243,7 @@ int ConvertToI420(VideoType src_video_type,
|
||||
// Stride values should correspond to the destination values.
|
||||
if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) {
|
||||
dst_width = dst_frame->height();
|
||||
dst_height =dst_frame->width();
|
||||
dst_height = dst_frame->width();
|
||||
}
|
||||
return libyuv::ConvertToI420(src_frame, sample_size,
|
||||
dst_frame->buffer(kYPlane),
|
||||
|
||||
@ -20,7 +20,7 @@ namespace webrtc {
|
||||
|
||||
const uint32_t KEventMaxWaitTimeMs = 200;
|
||||
const uint32_t kMinRenderDelayMs = 10;
|
||||
const uint32_t kMaxRenderDelayMs= 500;
|
||||
const uint32_t kMaxRenderDelayMs = 500;
|
||||
|
||||
VideoRenderFrames::VideoRenderFrames()
|
||||
: render_delay_ms_(10) {
|
||||
|
||||
@ -11,6 +11,7 @@
|
||||
#include "webrtc/sound/alsasoundsystem.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <string>
|
||||
|
||||
#include "webrtc/base/arraysize.h"
|
||||
#include "webrtc/base/common.h"
|
||||
@ -259,12 +260,12 @@ class AlsaInputStream :
|
||||
}
|
||||
|
||||
bool GetVolume(int *volume) override {
|
||||
// TODO: Implement this.
|
||||
// TODO(henrika): Implement this.
|
||||
return false;
|
||||
}
|
||||
|
||||
bool SetVolume(int volume) override {
|
||||
// TODO: Implement this.
|
||||
// TODO(henrika): Implement this.
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -390,12 +391,12 @@ class AlsaOutputStream : public SoundOutputStreamInterface,
|
||||
}
|
||||
|
||||
bool GetVolume(int *volume) override {
|
||||
// TODO: Implement this.
|
||||
// TODO(henrika): Implement this.
|
||||
return false;
|
||||
}
|
||||
|
||||
bool SetVolume(int volume) override {
|
||||
// TODO: Implement this.
|
||||
// TODO(henrika): Implement this.
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -569,7 +570,6 @@ bool AlsaSoundSystem::EnumerateDevices(
|
||||
strcmp(name, ignore_null) != 0 &&
|
||||
strcmp(name, ignore_pulse) != 0 &&
|
||||
!rtc::starts_with(name, ignore_prefix)) {
|
||||
|
||||
// Yes, we do.
|
||||
char *desc = symbol_table_.snd_device_name_get_hint()(*list, "DESC");
|
||||
if (!desc) {
|
||||
@ -622,7 +622,6 @@ StreamInterface *AlsaSoundSystem::OpenDevice(
|
||||
int wait_timeout_ms,
|
||||
int flags,
|
||||
int freq)) {
|
||||
|
||||
if (!IsInitialized()) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
@ -13,6 +13,7 @@
|
||||
#ifdef HAVE_LIBPULSE
|
||||
|
||||
#include <algorithm>
|
||||
#include <string>
|
||||
|
||||
#include "webrtc/base/arraysize.h"
|
||||
#include "webrtc/base/common.h"
|
||||
@ -216,17 +217,6 @@ class PulseAudioStream {
|
||||
class PulseAudioInputStream :
|
||||
public SoundInputStreamInterface,
|
||||
private rtc::Worker {
|
||||
|
||||
struct GetVolumeCallbackData {
|
||||
PulseAudioInputStream *instance;
|
||||
pa_cvolume *channel_volumes;
|
||||
};
|
||||
|
||||
struct GetSourceChannelCountCallbackData {
|
||||
PulseAudioInputStream *instance;
|
||||
uint8_t *channels;
|
||||
};
|
||||
|
||||
public:
|
||||
PulseAudioInputStream(PulseAudioSoundSystem *pulse,
|
||||
pa_stream *stream,
|
||||
@ -386,6 +376,16 @@ class PulseAudioInputStream :
|
||||
}
|
||||
|
||||
private:
|
||||
struct GetVolumeCallbackData {
|
||||
PulseAudioInputStream* instance;
|
||||
pa_cvolume* channel_volumes;
|
||||
};
|
||||
|
||||
struct GetSourceChannelCountCallbackData {
|
||||
PulseAudioInputStream* instance;
|
||||
uint8_t* channels;
|
||||
};
|
||||
|
||||
void Lock() {
|
||||
stream_.Lock();
|
||||
}
|
||||
@ -580,12 +580,6 @@ class PulseAudioInputStream :
|
||||
class PulseAudioOutputStream :
|
||||
public SoundOutputStreamInterface,
|
||||
private rtc::Worker {
|
||||
|
||||
struct GetVolumeCallbackData {
|
||||
PulseAudioOutputStream *instance;
|
||||
pa_cvolume *channel_volumes;
|
||||
};
|
||||
|
||||
public:
|
||||
PulseAudioOutputStream(PulseAudioSoundSystem *pulse,
|
||||
pa_stream *stream,
|
||||
@ -733,7 +727,7 @@ class PulseAudioOutputStream :
|
||||
}
|
||||
|
||||
#if 0
|
||||
// TODO: Versions 0.9.16 and later of Pulse have a new API for
|
||||
// TODO(henrika): Versions 0.9.16 and later of Pulse have a new API for
|
||||
// zero-copy writes, but Hardy is not new enough to have that so we can't
|
||||
// rely on it. Perhaps auto-detect if it's present or not and use it if we
|
||||
// can?
|
||||
@ -777,6 +771,11 @@ class PulseAudioOutputStream :
|
||||
#endif
|
||||
|
||||
private:
|
||||
struct GetVolumeCallbackData {
|
||||
PulseAudioOutputStream* instance;
|
||||
pa_cvolume* channel_volumes;
|
||||
};
|
||||
|
||||
void Lock() {
|
||||
stream_.Lock();
|
||||
}
|
||||
@ -1165,7 +1164,7 @@ bool PulseAudioSoundSystem::ConnectToPulse(pa_context *context) {
|
||||
pa_context *PulseAudioSoundSystem::CreateNewConnection() {
|
||||
// Create connection context.
|
||||
std::string app_name;
|
||||
// TODO: Pulse etiquette says this name should be localized. Do
|
||||
// TODO(henrika): Pulse etiquette says this name should be localized. Do
|
||||
// we care?
|
||||
rtc::Filesystem::GetApplicationName(&app_name);
|
||||
pa_context *context = symbol_table_.pa_context_new()(
|
||||
|
||||
@ -65,4 +65,4 @@ class SoundInputStreamInterface {
|
||||
|
||||
} // namespace rtc
|
||||
|
||||
#endif // WEBRTC_SOUND_SOUNDOUTPUTSTREAMINTERFACE_H_
|
||||
#endif // WEBRTC_SOUND_SOUNDINPUTSTREAMINTERFACE_H_
|
||||
|
||||
@ -65,7 +65,7 @@ static void DitherSilence(AudioFrame* frame) {
|
||||
sum_squared += frame->data_[n] * frame->data_[n];
|
||||
if (sum_squared <= sum_squared_silence) {
|
||||
for (size_t n = 0; n < frame->samples_per_channel_; n++)
|
||||
frame->data_[n] = (rand() & 0xF) - 8;
|
||||
frame->data_[n] = (rand() & 0xF) - 8; // NOLINT: ignore non-threadsafe.
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -227,7 +227,7 @@ void RunAnalysis(const char* reference_file_name, const char* test_file_name,
|
||||
ResultsContainer* results) {
|
||||
// Check if the reference_file_name ends with "y4m".
|
||||
bool y4m_mode = false;
|
||||
if (std::string(reference_file_name).find("y4m") != std::string::npos){
|
||||
if (std::string(reference_file_name).find("y4m") != std::string::npos) {
|
||||
y4m_mode = true;
|
||||
}
|
||||
|
||||
|
||||
@ -15,6 +15,7 @@
|
||||
|
||||
#include "webrtc/base/scoped_ptr.h"
|
||||
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
||||
#include "webrtc/tools/frame_editing/frame_editing_lib.h"
|
||||
#include "webrtc/typedefs.h"
|
||||
|
||||
using std::string;
|
||||
|
||||
@ -8,8 +8,8 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_TOOLS_FRAME_EDITING_FRAME_EDITING_H_
|
||||
#define WEBRTC_TOOLS_FRAME_EDITING_FRAME_EDITING_H_
|
||||
#ifndef WEBRTC_TOOLS_FRAME_EDITING_FRAME_EDITING_LIB_H_
|
||||
#define WEBRTC_TOOLS_FRAME_EDITING_FRAME_EDITING_LIB_H_
|
||||
|
||||
#include <string>
|
||||
|
||||
@ -36,4 +36,4 @@ int EditFrames(const std::string& in_path, int width, int height,
|
||||
int last_frame_to_process, const std::string& out_path);
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_TOOLS_FRAME_EDITING_FRAME_EDITING_H_
|
||||
#endif // WEBRTC_TOOLS_FRAME_EDITING_FRAME_EDITING_LIB_H_
|
||||
|
||||
@ -25,7 +25,7 @@ void CompareFiles(const char* reference_file_name, const char* test_file_name,
|
||||
const char* results_file_name, int width, int height) {
|
||||
// Check if the reference_file_name ends with "y4m".
|
||||
bool y4m_mode = false;
|
||||
if (std::string(reference_file_name).find("y4m") != std::string::npos){
|
||||
if (std::string(reference_file_name).find("y4m") != std::string::npos) {
|
||||
y4m_mode = true;
|
||||
}
|
||||
|
||||
@ -38,8 +38,8 @@ void CompareFiles(const char* reference_file_name, const char* test_file_name,
|
||||
uint8_t* ref_frame = new uint8_t[size];
|
||||
|
||||
bool read_result = true;
|
||||
for(int frame_counter = 0; frame_counter < MAX_NUM_FRAMES_PER_FILE;
|
||||
++frame_counter){
|
||||
for (int frame_counter = 0; frame_counter < MAX_NUM_FRAMES_PER_FILE;
|
||||
++frame_counter) {
|
||||
read_result &= (y4m_mode) ? webrtc::test::ExtractFrameFromY4mFile(
|
||||
reference_file_name, width, height, frame_counter, ref_frame):
|
||||
webrtc::test::ExtractFrameFromYuvFile(reference_file_name, width,
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user