Replace scoped_ptr with unique_ptr in webrtc/modules/video_*/

BUG=webrtc:5520

Review URL: https://codereview.webrtc.org/1738863002

Cr-Commit-Position: refs/heads/master@{#11836}
This commit is contained in:
kwiberg 2016-03-02 01:01:11 -08:00 committed by Commit bot
parent 4eb1ddd817
commit e065fcf9a9
18 changed files with 72 additions and 49 deletions

View File

@ -11,6 +11,8 @@
#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_VIDEO_CAPTURE_LINUX_H_
#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_VIDEO_CAPTURE_LINUX_H_
#include <memory>
#include "webrtc/base/platform_thread.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/video_capture/video_capture_impl.h"
@ -39,8 +41,8 @@ private:
bool AllocateVideoBuffers();
bool DeAllocateVideoBuffers();
// TODO(pbos): Stop using scoped_ptr and resetting the thread.
rtc::scoped_ptr<rtc::PlatformThread> _captureThread;
// TODO(pbos): Stop using unique_ptr and resetting the thread.
std::unique_ptr<rtc::PlatformThread> _captureThread;
CriticalSectionWrapper* _captureCritSect;
int32_t _deviceId;

View File

@ -11,10 +11,10 @@
#include <stdio.h>
#include <map>
#include <memory>
#include <sstream>
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/utility/include/process_thread.h"
@ -25,7 +25,6 @@
#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/video_frame.h"
using rtc::scoped_ptr;
using webrtc::CriticalSectionWrapper;
using webrtc::CriticalSectionScoped;
using webrtc::SleepMs;
@ -183,7 +182,7 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback {
}
private:
scoped_ptr<CriticalSectionWrapper> capture_cs_;
std::unique_ptr<CriticalSectionWrapper> capture_cs_;
VideoCaptureCapability capability_;
int capture_delay_;
int64_t last_render_time_ms_;
@ -223,7 +222,7 @@ class TestVideoCaptureFeedBack : public VideoCaptureFeedBack {
}
private:
scoped_ptr<CriticalSectionWrapper> capture_cs_;
std::unique_ptr<CriticalSectionWrapper> capture_cs_;
unsigned int frame_rate_;
VideoCaptureAlarm alarm_;
};
@ -270,7 +269,7 @@ class VideoCaptureTest : public testing::Test {
EXPECT_EQ(capability.height, resulting_capability.height);
}
scoped_ptr<VideoCaptureModule::DeviceInfo> device_info_;
std::unique_ptr<VideoCaptureModule::DeviceInfo> device_info_;
unsigned int number_of_devices_;
};
@ -440,7 +439,8 @@ class VideoCaptureExternalTest : public testing::Test {
public:
void SetUp() {
capture_module_ = VideoCaptureFactory::Create(0, capture_input_interface_);
process_module_ = webrtc::ProcessThread::Create("ProcessThread");
process_module_ =
rtc::ScopedToUnique(webrtc::ProcessThread::Create("ProcessThread"));
process_module_->Start();
process_module_->RegisterModule(capture_module_);
@ -472,7 +472,7 @@ class VideoCaptureExternalTest : public testing::Test {
webrtc::VideoCaptureExternal* capture_input_interface_;
rtc::scoped_refptr<VideoCaptureModule> capture_module_;
rtc::scoped_ptr<webrtc::ProcessThread> process_module_;
std::unique_ptr<webrtc::ProcessThread> process_module_;
webrtc::VideoFrame test_frame_;
TestVideoCaptureCallback capture_callback_;
TestVideoCaptureFeedBack capture_feedback_;
@ -483,7 +483,7 @@ TEST_F(VideoCaptureExternalTest, TestExternalCapture) {
size_t length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_.width(),
test_frame_.height());
scoped_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
length, capture_callback_.capability(), 0));
@ -505,7 +505,7 @@ TEST_F(VideoCaptureExternalTest, MAYBE_FrameRate) {
size_t length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_.width(),
test_frame_.height());
scoped_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
length, capture_callback_.capability(), 0));
@ -521,7 +521,7 @@ TEST_F(VideoCaptureExternalTest, MAYBE_FrameRate) {
size_t length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_.width(),
test_frame_.height());
scoped_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
length, capture_callback_.capability(), 0));
@ -539,7 +539,7 @@ TEST_F(VideoCaptureExternalTest, Rotation) {
size_t length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_.width(),
test_frame_.height());
scoped_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
length, capture_callback_.capability(), 0));

View File

@ -11,7 +11,8 @@
#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_FRAME_PREPROCESSOR_H_
#define WEBRTC_MODULES_VIDEO_PROCESSING_FRAME_PREPROCESSOR_H_
#include "webrtc/base/scoped_ptr.h"
#include <memory>
#include "webrtc/modules/video_processing/include/video_processing.h"
#include "webrtc/modules/video_processing/content_analysis.h"
#include "webrtc/modules/video_processing/spatial_resampler.h"
@ -74,7 +75,7 @@ class VPMFramePreprocessor {
VPMSpatialResampler* spatial_resampler_;
VPMContentAnalysis* ca_;
VPMVideoDecimator* vd_;
rtc::scoped_ptr<VideoDenoiser> denoiser_;
std::unique_ptr<VideoDenoiser> denoiser_;
bool enable_ca_;
uint32_t frame_cnt_;
};

View File

@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include <memory>
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_processing/include/video_processing.h"
#include "webrtc/modules/video_processing/test/video_processing_unittest.h"
@ -23,7 +25,7 @@ TEST_F(VideoProcessingTest, MAYBE_BrightnessDetection) {
uint32_t frameNum = 0;
int32_t brightnessWarning = 0;
uint32_t warningCount = 0;
rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
frame_length_) {
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,

View File

@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include <memory>
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_processing/include/video_processing.h"
#include "webrtc/modules/video_processing/content_analysis.h"
@ -28,7 +30,7 @@ TEST_F(VideoProcessingTest, ContentAnalysis) {
ca__c.Initialize(width_, height_);
ca__sse.Initialize(width_, height_);
rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
frame_length_) {
// Using ConvertToI420 to add stride to the image.

View File

@ -11,6 +11,8 @@
#include <stdio.h>
#include <stdlib.h>
#include <memory>
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_processing/include/video_processing.h"
#include "webrtc/modules/video_processing/test/video_processing_unittest.h"
@ -46,7 +48,7 @@ TEST_F(VideoProcessingTest, Deflickering) {
<< "Could not open output file: " << output_file << "\n";
printf("\nRun time [us / frame]:\n");
rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) {
TickTime t0;
TickTime t1;

View File

@ -10,6 +10,8 @@
#include <string.h>
#include <memory>
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_processing/include/video_processing.h"
#include "webrtc/modules/video_processing/test/video_processing_unittest.h"
@ -19,8 +21,8 @@
namespace webrtc {
TEST_F(VideoProcessingTest, CopyMem) {
rtc::scoped_ptr<DenoiserFilter> df_c(DenoiserFilter::Create(false));
rtc::scoped_ptr<DenoiserFilter> df_sse_neon(DenoiserFilter::Create(true));
std::unique_ptr<DenoiserFilter> df_c(DenoiserFilter::Create(false));
std::unique_ptr<DenoiserFilter> df_sse_neon(DenoiserFilter::Create(true));
uint8_t src[16 * 16], dst[16 * 16];
for (int i = 0; i < 16; ++i) {
for (int j = 0; j < 16; ++j) {
@ -46,8 +48,8 @@ TEST_F(VideoProcessingTest, CopyMem) {
}
TEST_F(VideoProcessingTest, Variance) {
rtc::scoped_ptr<DenoiserFilter> df_c(DenoiserFilter::Create(false));
rtc::scoped_ptr<DenoiserFilter> df_sse_neon(DenoiserFilter::Create(true));
std::unique_ptr<DenoiserFilter> df_c(DenoiserFilter::Create(false));
std::unique_ptr<DenoiserFilter> df_sse_neon(DenoiserFilter::Create(true));
uint8_t src[16 * 16], dst[16 * 16];
uint32_t sum = 0, sse = 0, var;
for (int i = 0; i < 16; ++i) {
@ -69,8 +71,8 @@ TEST_F(VideoProcessingTest, Variance) {
}
TEST_F(VideoProcessingTest, MbDenoise) {
rtc::scoped_ptr<DenoiserFilter> df_c(DenoiserFilter::Create(false));
rtc::scoped_ptr<DenoiserFilter> df_sse_neon(DenoiserFilter::Create(true));
std::unique_ptr<DenoiserFilter> df_c(DenoiserFilter::Create(false));
std::unique_ptr<DenoiserFilter> df_sse_neon(DenoiserFilter::Create(true));
uint8_t running_src[16 * 16], src[16 * 16], dst[16 * 16], dst_ref[16 * 16];
// Test case: |diff| <= |3 + shift_inc1|
@ -138,7 +140,7 @@ TEST_F(VideoProcessingTest, Denoiser) {
VideoFrame denoised_frame_c;
VideoFrame denoised_frame_sse_neon;
rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
frame_length_) {
// Using ConvertToI420 to add stride to the image.

View File

@ -12,6 +12,7 @@
#include <gflags/gflags.h>
#include <memory>
#include <string>
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
@ -116,7 +117,7 @@ TEST_F(VideoProcessingTest, HandleBadStats) {
#endif
VideoProcessing::FrameStats stats;
vp_->ClearFrameStats(&stats);
rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
ASSERT_EQ(frame_length_,
fread(video_buffer.get(), 1, frame_length_, source_file_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
@ -135,7 +136,7 @@ TEST_F(VideoProcessingTest, IdenticalResultsAfterReset) {
VideoFrame video_frame2;
VideoProcessing::FrameStats stats;
// Only testing non-static functions here.
rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
ASSERT_EQ(frame_length_,
fread(video_buffer.get(), 1, frame_length_, source_file_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
@ -171,7 +172,7 @@ TEST_F(VideoProcessingTest, FrameStats) {
#endif
VideoProcessing::FrameStats stats;
vp_->ClearFrameStats(&stats);
rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
ASSERT_EQ(frame_length_,
fread(video_buffer.get(), 1, frame_length_, source_file_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
@ -235,7 +236,7 @@ TEST_F(VideoProcessingTest, Resampler) {
vp_->EnableTemporalDecimation(false);
// Reading test frame
rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
ASSERT_EQ(frame_length_,
fread(video_buffer.get(), 1, frame_length_, source_file_));
// Using ConvertToI420 to add stride to the image.

View File

@ -21,9 +21,9 @@ const int kMotionMagnitudeThreshold = 8 * 3;
const int kSumDiffThreshold = 16 * 16 * 2;
const int kSumDiffThresholdHigh = 600;
rtc::scoped_ptr<DenoiserFilter> DenoiserFilter::Create(
std::unique_ptr<DenoiserFilter> DenoiserFilter::Create(
bool runtime_cpu_detection) {
rtc::scoped_ptr<DenoiserFilter> filter;
std::unique_ptr<DenoiserFilter> filter;
if (runtime_cpu_detection) {
// If we know the minimum architecture at compile time, avoid CPU detection.

View File

@ -11,7 +11,8 @@
#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_H_
#define WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_H_
#include "webrtc/base/scoped_ptr.h"
#include <memory>
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/video_processing/include/video_processing_defines.h"
@ -31,7 +32,7 @@ struct DenoiseMetrics {
class DenoiserFilter {
public:
static rtc::scoped_ptr<DenoiserFilter> Create(bool runtime_cpu_detection);
static std::unique_ptr<DenoiserFilter> Create(bool runtime_cpu_detection);
virtual ~DenoiserFilter() {}

View File

@ -11,6 +11,8 @@
#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_VIDEO_DENOISER_H_
#define WEBRTC_MODULES_VIDEO_PROCESSING_VIDEO_DENOISER_H_
#include <memory>
#include "webrtc/modules/video_processing/util/denoiser_filter.h"
#include "webrtc/modules/video_processing/util/skin_detection.h"
@ -29,8 +31,8 @@ class VideoDenoiser {
uint8_t* y_dst);
int width_;
int height_;
rtc::scoped_ptr<DenoiseMetrics[]> metrics_;
rtc::scoped_ptr<DenoiserFilter> filter_;
std::unique_ptr<DenoiseMetrics[]> metrics_;
std::unique_ptr<DenoiserFilter> filter_;
};
} // namespace webrtc

View File

@ -14,6 +14,7 @@
#include <jni.h>
#include <map>
#include <memory>
#include "webrtc/base/platform_thread.h"
#include "webrtc/modules/video_render/i_video_render.h"
@ -144,8 +145,8 @@ class VideoRenderAndroid: IVideoRender {
EventWrapper& _javaRenderEvent;
int64_t _lastJavaRenderEvent;
JNIEnv* _javaRenderJniEnv; // JNIEnv for the java render thread.
// TODO(pbos): Remove scoped_ptr and use the member directly.
rtc::scoped_ptr<rtc::PlatformThread> _javaRenderThread;
// TODO(pbos): Remove unique_ptr and use the member directly.
std::unique_ptr<rtc::PlatformThread> _javaRenderThread;
};
} // namespace webrtc

View File

@ -13,9 +13,9 @@
#include <list>
#include <map>
#include <memory>
#include "webrtc/base/platform_thread.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/video_render/ios/video_render_ios_channel.h"
#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
@ -62,10 +62,10 @@ class VideoRenderIosGles20 {
int SwapAndDisplayBuffers();
private:
rtc::scoped_ptr<CriticalSectionWrapper> gles_crit_sec_;
std::unique_ptr<CriticalSectionWrapper> gles_crit_sec_;
EventTimerWrapper* screen_update_event_;
// TODO(pbos): Remove scoped_ptr and use member directly.
rtc::scoped_ptr<rtc::PlatformThread> screen_update_thread_;
// TODO(pbos): Remove unique_ptr and use member directly.
std::unique_ptr<rtc::PlatformThread> screen_update_thread_;
VideoRenderIosView* view_;
Rect window_rect_;

View File

@ -13,6 +13,7 @@
#include <list>
#include <map>
#include <memory>
#include "webrtc/modules/video_render/i_video_render.h"
@ -98,7 +99,7 @@ class VideoRenderIosImpl : IVideoRender {
bool full_screen_;
CriticalSectionWrapper* crit_sec_;
rtc::scoped_ptr<VideoRenderIosGles20> ptr_ios_render_;
std::unique_ptr<VideoRenderIosGles20> ptr_ios_render_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_

View File

@ -12,6 +12,8 @@
#error "This file requires ARC support."
#endif
#include <memory>
#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
#include "webrtc/system_wrappers/include/trace.h"
@ -19,7 +21,7 @@ using namespace webrtc;
@implementation VideoRenderIosView {
EAGLContext* _context;
rtc::scoped_ptr<webrtc::OpenGles20> _gles_renderer20;
std::unique_ptr<webrtc::OpenGles20> _gles_renderer20;
int _frameBufferWidth;
int _frameBufferHeight;
unsigned int _defaultFrameBuffer;

View File

@ -29,6 +29,7 @@
#include <OpenGL/glu.h>
#include <list>
#include <map>
#include <memory>
class VideoRenderAGL;
@ -142,8 +143,8 @@ class VideoRenderAGL {
bool _fullScreen;
int _id;
webrtc::CriticalSectionWrapper& _renderCritSec;
// TODO(pbos): Remove scoped_ptr and use PlatformThread directly.
rtc::scoped_ptr<rtc::PlatformThread> _screenUpdateThread;
// TODO(pbos): Remove unique_ptr and use PlatformThread directly.
std::unique_ptr<rtc::PlatformThread> _screenUpdateThread;
webrtc::EventWrapper* _screenUpdateEvent;
bool _isHIViewRef;
AGLContext _aglContext;

View File

@ -21,6 +21,7 @@
#include <QuickTime/QuickTime.h>
#include <list>
#include <map>
#include <memory>
#include "webrtc/base/thread_annotations.h"
#include "webrtc/modules/video_render/video_render_defines.h"
@ -169,8 +170,8 @@ private: // variables
bool _fullScreen;
int _id;
CriticalSectionWrapper& _nsglContextCritSec;
// TODO(pbos): Remove scoped_ptr and use PlatformThread directly.
rtc::scoped_ptr<rtc::PlatformThread> _screenUpdateThread;
// TODO(pbos): Remove unique_ptr and use PlatformThread directly.
std::unique_ptr<rtc::PlatformThread> _screenUpdateThread;
EventTimerWrapper* _screenUpdateEvent;
NSOpenGLContext* _nsglContext;
NSOpenGLContext* _nsglFullScreenContext;

View File

@ -11,6 +11,8 @@
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
#include <memory>
#include "webrtc/modules/video_render/windows/i_video_render_win.h"
#include <d3d9.h>
@ -203,8 +205,8 @@ private:
CriticalSectionWrapper& _refD3DCritsect;
Trace* _trace;
// TODO(pbos): Remove scoped_ptr and use PlatformThread directly.
rtc::scoped_ptr<rtc::PlatformThread> _screenUpdateThread;
// TODO(pbos): Remove unique_ptr and use PlatformThread directly.
std::unique_ptr<rtc::PlatformThread> _screenUpdateThread;
EventTimerWrapper* _screenUpdateEvent;
HWND _hWnd;