diff --git a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc index 30ca166992..df943ffdb9 100644 --- a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc +++ b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc @@ -658,11 +658,11 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( I420VideoFrame texture_image( &native_handle_, width, height, output_timestamp_, 0); texture_image.set_ntp_time_ms(output_ntp_time_ms_); - callback_status = callback_->Decoded(texture_image); + callback_status = callback_->Decoded(&texture_image); } else { decoded_image_.set_timestamp(output_timestamp_); decoded_image_.set_ntp_time_ms(output_ntp_time_ms_); - callback_status = callback_->Decoded(decoded_image_); + callback_status = callback_->Decoded(&decoded_image_); } if (callback_status > 0) { ALOGE("callback error"); diff --git a/talk/media/webrtc/fakewebrtcvideocapturemodule.h b/talk/media/webrtc/fakewebrtcvideocapturemodule.h index 93406cd55b..674ab25837 100644 --- a/talk/media/webrtc/fakewebrtcvideocapturemodule.h +++ b/talk/media/webrtc/fakewebrtcvideocapturemodule.h @@ -115,7 +115,7 @@ class FakeWebRtcVideoCaptureModule : public webrtc::VideoCaptureModule { return false; } if (callback_) { - callback_->OnIncomingCapturedFrame(id_, sample); + callback_->OnIncomingCapturedFrame(id_, &sample); } return true; } diff --git a/talk/media/webrtc/webrtcpassthroughrender.cc b/talk/media/webrtc/webrtcpassthroughrender.cc index 0c6029d73e..815577ae7b 100644 --- a/talk/media/webrtc/webrtcpassthroughrender.cc +++ b/talk/media/webrtc/webrtcpassthroughrender.cc @@ -44,7 +44,7 @@ class PassthroughStream: public webrtc::VideoRenderCallback { virtual ~PassthroughStream() { } virtual int32_t RenderFrame(const uint32_t stream_id, - webrtc::I420VideoFrame& videoFrame) { + webrtc::I420VideoFrame* videoFrame) { rtc::CritScope cs(&stream_critical_); // Send frame for rendering directly if (running_ && renderer_) { diff --git a/talk/media/webrtc/webrtcpassthroughrender_unittest.cc b/talk/media/webrtc/webrtcpassthroughrender_unittest.cc index 72aa549465..bbf6ae27ae 100644 --- a/talk/media/webrtc/webrtcpassthroughrender_unittest.cc +++ b/talk/media/webrtc/webrtcpassthroughrender_unittest.cc @@ -44,7 +44,7 @@ class WebRtcPassthroughRenderTest : public testing::Test { } virtual int32_t RenderFrame(const uint32_t stream_id, - webrtc::I420VideoFrame& videoFrame) { + webrtc::I420VideoFrame* videoFrame) { ++frame_num_; LOG(INFO) << "RenderFrame stream_id: " << stream_id << " frame_num: " << frame_num_; @@ -143,21 +143,21 @@ TEST_F(WebRtcPassthroughRenderTest, Renderer) { int test_frame_num = 10; // RenderFrame without starting the render for (int i = 0; i < test_frame_num; ++i) { - stream1->RenderFrame(stream_id1, frame); + stream1->RenderFrame(stream_id1, &frame); } EXPECT_EQ(0, renderer1.frame_num()); // Start the render and test again. EXPECT_FALSE(StartRender(stream_id3)); EXPECT_TRUE(StartRender(stream_id1)); for (int i = 0; i < test_frame_num; ++i) { - stream1->RenderFrame(stream_id1, frame); + stream1->RenderFrame(stream_id1, &frame); } EXPECT_EQ(test_frame_num, renderer1.frame_num()); // Stop the render and test again. EXPECT_FALSE(StopRender(stream_id3)); EXPECT_TRUE(StopRender(stream_id1)); for (int i = 0; i < test_frame_num; ++i) { - stream1->RenderFrame(stream_id1, frame); + stream1->RenderFrame(stream_id1, &frame); } // The frame number should not have changed. EXPECT_EQ(test_frame_num, renderer1.frame_num()); @@ -166,7 +166,7 @@ TEST_F(WebRtcPassthroughRenderTest, Renderer) { EXPECT_TRUE(StartRender(stream_id2)); test_frame_num = 30; for (int i = 0; i < test_frame_num; ++i) { - stream2->RenderFrame(stream_id2, frame); + stream2->RenderFrame(stream_id2, &frame); } EXPECT_EQ(test_frame_num, renderer2.frame_num()); } diff --git a/talk/media/webrtc/webrtcvideocapturer.cc b/talk/media/webrtc/webrtcvideocapturer.cc index aaa6f1e480..9e6f2d1b26 100644 --- a/talk/media/webrtc/webrtcvideocapturer.cc +++ b/talk/media/webrtc/webrtcvideocapturer.cc @@ -354,7 +354,7 @@ bool WebRtcVideoCapturer::GetPreferredFourccs( } void WebRtcVideoCapturer::OnIncomingCapturedFrame(const int32_t id, - webrtc::I420VideoFrame& sample) { + webrtc::I420VideoFrame* sample) { // This would be a normal CritScope, except that it's possible that: // (1) whatever system component producing this frame has taken a lock, and // (2) Stop() probably calls back into that system component, which may take @@ -371,12 +371,12 @@ void WebRtcVideoCapturer::OnIncomingCapturedFrame(const int32_t id, // Log the size and pixel aspect ratio of the first captured frame. if (1 == captured_frames_) { LOG(LS_INFO) << "Captured frame size " - << sample.width() << "x" << sample.height() + << sample->width() << "x" << sample->height() << ". Expected format " << GetCaptureFormat()->ToString(); } if (start_thread_->IsCurrent()) { - SignalFrameCapturedOnStartThread(&sample); + SignalFrameCapturedOnStartThread(sample); } else { // This currently happens on with at least VideoCaptureModuleV4L2 and // possibly other implementations of WebRTC's VideoCaptureModule. @@ -385,7 +385,7 @@ void WebRtcVideoCapturer::OnIncomingCapturedFrame(const int32_t id, // thread hop. start_thread_->Invoke( rtc::Bind(&WebRtcVideoCapturer::SignalFrameCapturedOnStartThread, - this, &sample)); + this, sample)); } } diff --git a/talk/media/webrtc/webrtcvideocapturer.h b/talk/media/webrtc/webrtcvideocapturer.h index c0f7807e6f..bf459acb05 100644 --- a/talk/media/webrtc/webrtcvideocapturer.h +++ b/talk/media/webrtc/webrtcvideocapturer.h @@ -81,7 +81,7 @@ class WebRtcVideoCapturer : public VideoCapturer, private: // Callback when a frame is captured by camera. virtual void OnIncomingCapturedFrame(const int32_t id, - webrtc::I420VideoFrame& frame); + webrtc::I420VideoFrame* frame); virtual void OnCaptureDelayChanged(const int32_t id, const int32_t delay); diff --git a/webrtc/common_video/i420_video_frame_unittest.cc b/webrtc/common_video/i420_video_frame_unittest.cc index 8495924059..7c9771326b 100644 --- a/webrtc/common_video/i420_video_frame_unittest.cc +++ b/webrtc/common_video/i420_video_frame_unittest.cc @@ -312,7 +312,7 @@ TEST(TestI420VideoFrame, TextureInitialValues) { TEST(TestI420VideoFrame, RefCount) { NativeHandleImpl handle; EXPECT_EQ(0, handle.ref_count()); - I420VideoFrame *frame = new I420VideoFrame(&handle, 640, 480, 100, 200); + I420VideoFrame* frame = new I420VideoFrame(&handle, 640, 480, 100, 200); EXPECT_EQ(1, handle.ref_count()); delete frame; EXPECT_EQ(0, handle.ref_count()); diff --git a/webrtc/common_video/libyuv/include/webrtc_libyuv.h b/webrtc/common_video/libyuv/include/webrtc_libyuv.h index d8e931d1df..6cbba22f0b 100644 --- a/webrtc/common_video/libyuv/include/webrtc_libyuv.h +++ b/webrtc/common_video/libyuv/include/webrtc_libyuv.h @@ -148,11 +148,11 @@ int ConvertNV12ToRGB565(const uint8_t* src_frame, // Compute PSNR for an I420 frame (all planes). // Returns the PSNR in decibel, to a maximum of kInfinitePSNR. -double I420PSNR(const I420VideoFrame* ref_frame, - const I420VideoFrame* test_frame); +double I420PSNR(const I420VideoFrame& ref_frame, + const I420VideoFrame& test_frame); // Compute SSIM for an I420 frame (all planes). -double I420SSIM(const I420VideoFrame* ref_frame, - const I420VideoFrame* test_frame); +double I420SSIM(const I420VideoFrame& ref_frame, + const I420VideoFrame& test_frame); } #endif // WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_WEBRTC_LIBYUV_H_ diff --git a/webrtc/common_video/libyuv/libyuv_unittest.cc b/webrtc/common_video/libyuv/libyuv_unittest.cc index 8491aa8373..b2022f518d 100644 --- a/webrtc/common_video/libyuv/libyuv_unittest.cc +++ b/webrtc/common_video/libyuv/libyuv_unittest.cc @@ -38,18 +38,16 @@ int PrintBuffer(const uint8_t* buffer, int width, int height, int stride) { } -int PrintFrame(const I420VideoFrame* frame, const char* str) { - if (frame == NULL) - return -1; - printf("%s %dx%d \n", str, frame->width(), frame->height()); +int PrintFrame(const I420VideoFrame& frame, const char* str) { + printf("%s %dx%d \n", str, frame.width(), frame.height()); int ret = 0; for (int plane_num = 0; plane_num < kNumOfPlanes; ++plane_num) { PlaneType plane_type = static_cast(plane_num); - int width = (plane_num ? (frame->width() + 1) / 2 : frame->width()); - int height = (plane_num ? (frame->height() + 1) / 2 : frame->height()); - ret += PrintBuffer(frame->buffer(plane_type), width, height, - frame->stride(plane_type)); + int width = (plane_num ? (frame.width() + 1) / 2 : frame.width()); + int height = (plane_num ? (frame.height() + 1) / 2 : frame.height()); + ret += PrintBuffer(frame.buffer(plane_type), width, height, + frame.stride(plane_type)); } return ret; } @@ -156,7 +154,7 @@ TEST_F(TestLibYuv, ConvertTest) { if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) { return; } - psnr = I420PSNR(&orig_frame_, &res_i420_frame); + psnr = I420PSNR(orig_frame_, res_i420_frame); EXPECT_EQ(48.0, psnr); j++; @@ -176,7 +174,7 @@ TEST_F(TestLibYuv, ConvertTest) { if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) { return; } - psnr = I420PSNR(&orig_frame_, &res_i420_frame); + psnr = I420PSNR(orig_frame_, res_i420_frame); // Optimization Speed- quality trade-off => 45 dB only (platform dependant). EXPECT_GT(ceil(psnr), 44); @@ -187,7 +185,7 @@ TEST_F(TestLibYuv, ConvertTest) { EXPECT_EQ(0, ConvertFromI420(orig_frame_, kUYVY, 0, out_uyvy_buffer.get())); EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer.get(), 0, 0, width_, height_, 0, kVideoRotation_0, &res_i420_frame)); - psnr = I420PSNR(&orig_frame_, &res_i420_frame); + psnr = I420PSNR(orig_frame_, res_i420_frame); EXPECT_EQ(48.0, psnr); if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) { return; @@ -212,7 +210,7 @@ TEST_F(TestLibYuv, ConvertTest) { ConvertToI420(kI420, res_i420_buffer.get(), 0, 0, width_, height_, 0, kVideoRotation_0, &res_i420_frame); - psnr = I420PSNR(&orig_frame_, &res_i420_frame); + psnr = I420PSNR(orig_frame_, res_i420_frame); EXPECT_EQ(48.0, psnr); j++; @@ -227,7 +225,7 @@ TEST_F(TestLibYuv, ConvertTest) { return; } - psnr = I420PSNR(&orig_frame_, &res_i420_frame); + psnr = I420PSNR(orig_frame_, res_i420_frame); EXPECT_EQ(48.0, psnr); printf("\nConvert #%d I420 <-> RGB565\n", j); rtc::scoped_ptr out_rgb565_buffer( @@ -243,7 +241,7 @@ TEST_F(TestLibYuv, ConvertTest) { } j++; - psnr = I420PSNR(&orig_frame_, &res_i420_frame); + psnr = I420PSNR(orig_frame_, res_i420_frame); // TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565, // Another example is I420ToRGB24, the psnr is 44 // TODO(mikhal): Add psnr for RGB565, 1555, 4444, convert to ARGB. @@ -262,7 +260,7 @@ TEST_F(TestLibYuv, ConvertTest) { return; } - psnr = I420PSNR(&orig_frame_, &res_i420_frame); + psnr = I420PSNR(orig_frame_, res_i420_frame); // TODO(leozwang) Investigate the right psnr should be set for I420ToARGB8888, EXPECT_GT(ceil(psnr), 42); @@ -293,7 +291,7 @@ TEST_F(TestLibYuv, ConvertAlignedFrame) { if (PrintI420VideoFrame(res_i420_frame, output_file) < 0) { return; } - psnr = I420PSNR(&orig_frame_, &res_i420_frame); + psnr = I420PSNR(orig_frame_, res_i420_frame); EXPECT_EQ(48.0, psnr); } diff --git a/webrtc/common_video/libyuv/scaler_unittest.cc b/webrtc/common_video/libyuv/scaler_unittest.cc index d77ce59e56..d8f718501f 100644 --- a/webrtc/common_video/libyuv/scaler_unittest.cc +++ b/webrtc/common_video/libyuv/scaler_unittest.cc @@ -312,7 +312,7 @@ double TestScaler::ComputeAvgSequencePSNR(FILE* input_file, required_size, kVideoRotation_0, &in_frame)); EXPECT_EQ(0, ConvertToI420(kI420, output_buffer, 0, 0, width, height, required_size, kVideoRotation_0, &out_frame)); - double psnr = I420PSNR(&in_frame, &out_frame); + double psnr = I420PSNR(in_frame, out_frame); avg_psnr += psnr; } avg_psnr = avg_psnr / frame_count; diff --git a/webrtc/common_video/libyuv/webrtc_libyuv.cc b/webrtc/common_video/libyuv/webrtc_libyuv.cc index 65b4d0c5cd..e62e2ea570 100644 --- a/webrtc/common_video/libyuv/webrtc_libyuv.cc +++ b/webrtc/common_video/libyuv/webrtc_libyuv.cc @@ -291,57 +291,47 @@ int ConvertFromYV12(const I420VideoFrame& src_frame, } // Compute PSNR for an I420 frame (all planes) -double I420PSNR(const I420VideoFrame* ref_frame, - const I420VideoFrame* test_frame) { - if (!ref_frame || !test_frame) +double I420PSNR(const I420VideoFrame& ref_frame, + const I420VideoFrame& test_frame) { + if (ref_frame.width() != test_frame.width() || + ref_frame.height() != test_frame.height()) return -1; - else if ((ref_frame->width() != test_frame->width()) || - (ref_frame->height() != test_frame->height())) - return -1; - else if (ref_frame->width() < 0 || ref_frame->height() < 0) - return -1; - - double psnr = libyuv::I420Psnr(ref_frame->buffer(kYPlane), - ref_frame->stride(kYPlane), - ref_frame->buffer(kUPlane), - ref_frame->stride(kUPlane), - ref_frame->buffer(kVPlane), - ref_frame->stride(kVPlane), - test_frame->buffer(kYPlane), - test_frame->stride(kYPlane), - test_frame->buffer(kUPlane), - test_frame->stride(kUPlane), - test_frame->buffer(kVPlane), - test_frame->stride(kVPlane), - test_frame->width(), test_frame->height()); + double psnr = libyuv::I420Psnr(ref_frame.buffer(kYPlane), + ref_frame.stride(kYPlane), + ref_frame.buffer(kUPlane), + ref_frame.stride(kUPlane), + ref_frame.buffer(kVPlane), + ref_frame.stride(kVPlane), + test_frame.buffer(kYPlane), + test_frame.stride(kYPlane), + test_frame.buffer(kUPlane), + test_frame.stride(kUPlane), + test_frame.buffer(kVPlane), + test_frame.stride(kVPlane), + test_frame.width(), test_frame.height()); // LibYuv sets the max psnr value to 128, we restrict it here. // In case of 0 mse in one frame, 128 can skew the results significantly. return (psnr > kPerfectPSNR) ? kPerfectPSNR : psnr; } // Compute SSIM for an I420 frame (all planes) -double I420SSIM(const I420VideoFrame* ref_frame, - const I420VideoFrame* test_frame) { - if (!ref_frame || !test_frame) +double I420SSIM(const I420VideoFrame& ref_frame, + const I420VideoFrame& test_frame) { + if (ref_frame.width() != test_frame.width() || + ref_frame.height() != test_frame.height()) return -1; - else if ((ref_frame->width() != test_frame->width()) || - (ref_frame->height() != test_frame->height())) - return -1; - else if (ref_frame->width() < 0 || ref_frame->height() < 0) - return -1; - - return libyuv::I420Ssim(ref_frame->buffer(kYPlane), - ref_frame->stride(kYPlane), - ref_frame->buffer(kUPlane), - ref_frame->stride(kUPlane), - ref_frame->buffer(kVPlane), - ref_frame->stride(kVPlane), - test_frame->buffer(kYPlane), - test_frame->stride(kYPlane), - test_frame->buffer(kUPlane), - test_frame->stride(kUPlane), - test_frame->buffer(kVPlane), - test_frame->stride(kVPlane), - test_frame->width(), test_frame->height()); + return libyuv::I420Ssim(ref_frame.buffer(kYPlane), + ref_frame.stride(kYPlane), + ref_frame.buffer(kUPlane), + ref_frame.stride(kUPlane), + ref_frame.buffer(kVPlane), + ref_frame.stride(kVPlane), + test_frame.buffer(kYPlane), + test_frame.stride(kYPlane), + test_frame.buffer(kUPlane), + test_frame.stride(kUPlane), + test_frame.buffer(kVPlane), + test_frame.stride(kVPlane), + test_frame.width(), test_frame.height()); } } // namespace webrtc diff --git a/webrtc/modules/utility/interface/file_player.h b/webrtc/modules/utility/interface/file_player.h index d812deb09e..bbcb8f67f3 100644 --- a/webrtc/modules/utility/interface/file_player.h +++ b/webrtc/modules/utility/interface/file_player.h @@ -93,12 +93,12 @@ public: virtual int32_t video_codec_info(VideoCodec& /*videoCodec*/) const {return -1;} - virtual int32_t GetVideoFromFile(I420VideoFrame& /*videoFrame*/) + virtual int32_t GetVideoFromFile(I420VideoFrame* /*videoFrame*/) { return -1;} // Same as GetVideoFromFile(). videoFrame will have the resolution specified // by the width outWidth and height outHeight in pixels. - virtual int32_t GetVideoFromFile(I420VideoFrame& /*videoFrame*/, + virtual int32_t GetVideoFromFile(I420VideoFrame* /*videoFrame*/, const uint32_t /*outWidth*/, const uint32_t /*outHeight*/) {return -1;} diff --git a/webrtc/modules/video_capture/include/video_capture_defines.h b/webrtc/modules/video_capture/include/video_capture_defines.h index 93a03f331d..e01980a248 100644 --- a/webrtc/modules/video_capture/include/video_capture_defines.h +++ b/webrtc/modules/video_capture/include/video_capture_defines.h @@ -98,7 +98,7 @@ class VideoCaptureDataCallback { public: virtual void OnIncomingCapturedFrame(const int32_t id, - I420VideoFrame& videoFrame) = 0; + I420VideoFrame* videoFrame) = 0; virtual void OnCaptureDelayChanged(const int32_t id, const int32_t delay) = 0; protected: diff --git a/webrtc/modules/video_capture/test/video_capture_unittest.cc b/webrtc/modules/video_capture/test/video_capture_unittest.cc index 04a93a86bf..8abbb2abe9 100644 --- a/webrtc/modules/video_capture/test/video_capture_unittest.cc +++ b/webrtc/modules/video_capture/test/video_capture_unittest.cc @@ -105,10 +105,10 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback { } virtual void OnIncomingCapturedFrame(const int32_t id, - webrtc::I420VideoFrame& videoFrame) { + webrtc::I420VideoFrame* videoFrame) { CriticalSectionScoped cs(capture_cs_.get()); - int height = videoFrame.height(); - int width = videoFrame.width(); + int height = videoFrame->height(); + int width = videoFrame->width(); #if ANDROID // Android camera frames may be rotated depending on test device // orientation. @@ -126,21 +126,21 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback { #endif // RenderTimstamp should be the time now. EXPECT_TRUE( - videoFrame.render_time_ms() >= TickTime::MillisecondTimestamp()-30 && - videoFrame.render_time_ms() <= TickTime::MillisecondTimestamp()); + videoFrame->render_time_ms() >= TickTime::MillisecondTimestamp()-30 && + videoFrame->render_time_ms() <= TickTime::MillisecondTimestamp()); - if ((videoFrame.render_time_ms() > + if ((videoFrame->render_time_ms() > last_render_time_ms_ + (1000 * 1.1) / capability_.maxFPS && last_render_time_ms_ > 0) || - (videoFrame.render_time_ms() < + (videoFrame->render_time_ms() < last_render_time_ms_ + (1000 * 0.9) / capability_.maxFPS && last_render_time_ms_ > 0)) { timing_warnings_++; } incoming_frames_++; - last_render_time_ms_ = videoFrame.render_time_ms(); - last_frame_.CopyFrame(videoFrame); + last_render_time_ms_ = videoFrame->render_time_ms(); + last_frame_.CopyFrame(*videoFrame); } virtual void OnCaptureDelayChanged(const int32_t id, diff --git a/webrtc/modules/video_capture/video_capture_impl.cc b/webrtc/modules/video_capture/video_capture_impl.cc index a6a9cd1a43..6eddd82f1c 100644 --- a/webrtc/modules/video_capture/video_capture_impl.cc +++ b/webrtc/modules/video_capture/video_capture_impl.cc @@ -215,7 +215,7 @@ int32_t VideoCaptureImpl::CaptureDelay() return _setCaptureDelay; } -int32_t VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame& captureFrame, +int32_t VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame* captureFrame, int64_t capture_time) { UpdateFrameCount(); // frame count used for local frame rate callback. @@ -227,16 +227,16 @@ int32_t VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame& captureFrame, // Set the capture time if (capture_time != 0) { - captureFrame.set_render_time_ms(capture_time - delta_ntp_internal_ms_); + captureFrame->set_render_time_ms(capture_time - delta_ntp_internal_ms_); } else { - captureFrame.set_render_time_ms(TickTime::MillisecondTimestamp()); + captureFrame->set_render_time_ms(TickTime::MillisecondTimestamp()); } - if (captureFrame.render_time_ms() == last_capture_time_) { + if (captureFrame->render_time_ms() == last_capture_time_) { // We don't allow the same capture time for two frames, drop this one. return -1; } - last_capture_time_ = captureFrame.render_time_ms(); + last_capture_time_ = captureFrame->render_time_ms(); if (_dataCallBack) { if (callOnCaptureDelayChanged) { @@ -322,7 +322,7 @@ int32_t VideoCaptureImpl::IncomingFrame( _captureFrame.set_rotation(kVideoRotation_0); } - DeliverCapturedFrame(_captureFrame, captureTime); + DeliverCapturedFrame(&_captureFrame, captureTime); } else // Encoded format { @@ -338,7 +338,7 @@ int32_t VideoCaptureImpl::IncomingI420VideoFrame(I420VideoFrame* video_frame, CriticalSectionScoped cs(&_apiCs); CriticalSectionScoped cs2(&_callBackCs); - DeliverCapturedFrame(*video_frame, captureTime); + DeliverCapturedFrame(video_frame, captureTime); return 0; } diff --git a/webrtc/modules/video_capture/video_capture_impl.h b/webrtc/modules/video_capture/video_capture_impl.h index 1a2c8bfac3..cf3ae4d337 100644 --- a/webrtc/modules/video_capture/video_capture_impl.h +++ b/webrtc/modules/video_capture/video_capture_impl.h @@ -107,7 +107,7 @@ public: protected: VideoCaptureImpl(const int32_t id); virtual ~VideoCaptureImpl(); - int32_t DeliverCapturedFrame(I420VideoFrame& captureFrame, + int32_t DeliverCapturedFrame(I420VideoFrame* captureFrame, int64_t capture_time); int32_t _id; // Module ID diff --git a/webrtc/modules/video_coding/codecs/i420/main/source/i420.cc b/webrtc/modules/video_coding/codecs/i420/main/source/i420.cc index be2d17d874..f2d99d90d9 100644 --- a/webrtc/modules/video_coding/codecs/i420/main/source/i420.cc +++ b/webrtc/modules/video_coding/codecs/i420/main/source/i420.cc @@ -215,7 +215,7 @@ int I420Decoder::Decode(const EncodedImage& inputImage, bool /*missingFrames*/, } _decodedImage.set_timestamp(inputImage._timeStamp); - _decodeCompleteCallback->Decoded(_decodedImage); + _decodeCompleteCallback->Decoded(&_decodedImage); return WEBRTC_VIDEO_CODEC_OK; } diff --git a/webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h b/webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h index ad72071840..2cca0b67bb 100644 --- a/webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h +++ b/webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h @@ -49,7 +49,7 @@ class MockVideoEncoder : public VideoEncoder { class MockDecodedImageCallback : public DecodedImageCallback { public: MOCK_METHOD1(Decoded, - int32_t(I420VideoFrame& decodedImage)); + int32_t(I420VideoFrame* decodedImage)); MOCK_METHOD1(ReceivedDecodedReferenceFrame, int32_t(const uint64_t pictureId)); MOCK_METHOD1(ReceivedDecodedFrame, diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor.cc b/webrtc/modules/video_coding/codecs/test/videoprocessor.cc index d6d8946dac..07caf62bc9 100644 --- a/webrtc/modules/video_coding/codecs/test/videoprocessor.cc +++ b/webrtc/modules/video_coding/codecs/test/videoprocessor.cc @@ -413,8 +413,8 @@ VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::Encoded( } int32_t VideoProcessorImpl::VideoProcessorDecodeCompleteCallback::Decoded( - I420VideoFrame& image) { - video_processor_->FrameDecoded(image); // forward to parent class + I420VideoFrame* image) { + video_processor_->FrameDecoded(*image); // forward to parent class return 0; } diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor.h b/webrtc/modules/video_coding/codecs/test/videoprocessor.h index 63d736394e..66c7a283fa 100644 --- a/webrtc/modules/video_coding/codecs/test/videoprocessor.h +++ b/webrtc/modules/video_coding/codecs/test/videoprocessor.h @@ -241,7 +241,7 @@ class VideoProcessorImpl : public VideoProcessor { explicit VideoProcessorDecodeCompleteCallback(VideoProcessorImpl* vp) : video_processor_(vp) { } - int32_t Decoded(webrtc::I420VideoFrame& image) override; + int32_t Decoded(webrtc::I420VideoFrame* image) override; private: VideoProcessorImpl* video_processor_; diff --git a/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h b/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h index 4925b93238..e93e8bc271 100644 --- a/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h +++ b/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h @@ -124,16 +124,16 @@ class Vp8TestDecodedImageCallback : public DecodedImageCallback { Vp8TestDecodedImageCallback() : decoded_frames_(0) { } - virtual int32_t Decoded(I420VideoFrame& decoded_image) { - last_decoded_frame_.CopyFrame(decoded_image); - for (int i = 0; i < decoded_image.width(); ++i) { - EXPECT_NEAR(kColorY, decoded_image.buffer(kYPlane)[i], 1); + virtual int32_t Decoded(I420VideoFrame* decoded_image) { + last_decoded_frame_.CopyFrame(*decoded_image); + for (int i = 0; i < decoded_image->width(); ++i) { + EXPECT_NEAR(kColorY, decoded_image->buffer(kYPlane)[i], 1); } // TODO(mikhal): Verify the difference between U,V and the original. - for (int i = 0; i < ((decoded_image.width() + 1) / 2); ++i) { - EXPECT_NEAR(kColorU, decoded_image.buffer(kUPlane)[i], 4); - EXPECT_NEAR(kColorV, decoded_image.buffer(kVPlane)[i], 4); + for (int i = 0; i < ((decoded_image->width() + 1) / 2); ++i) { + EXPECT_NEAR(kColorU, decoded_image->buffer(kUPlane)[i], 4); + EXPECT_NEAR(kColorV, decoded_image->buffer(kVPlane)[i], 4); } decoded_frames_++; return 0; diff --git a/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc b/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc index 43fc9c8e8a..2b621436c7 100644 --- a/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc +++ b/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc @@ -78,7 +78,7 @@ class Vp8UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback { public: explicit Vp8UnitTestDecodeCompleteCallback(I420VideoFrame* frame) : decoded_frame_(frame), decode_complete(false) {} - int Decoded(webrtc::I420VideoFrame& frame); + int Decoded(webrtc::I420VideoFrame* frame); bool DecodeComplete(); private: @@ -94,8 +94,8 @@ bool Vp8UnitTestDecodeCompleteCallback::DecodeComplete() { return false; } -int Vp8UnitTestDecodeCompleteCallback::Decoded(I420VideoFrame& image) { - decoded_frame_->CopyFrame(image); +int Vp8UnitTestDecodeCompleteCallback::Decoded(I420VideoFrame* image) { + decoded_frame_->CopyFrame(*image); decode_complete = true; return 0; } @@ -227,7 +227,7 @@ TEST_F(TestVp8Impl, DISABLED_ON_ANDROID(AlignedStrideEncodeDecode)) { decoder_->Decode(encoded_frame_, false, NULL)); EXPECT_GT(WaitForDecodedFrame(), 0u); // Compute PSNR on all planes (faster than SSIM). - EXPECT_GT(I420PSNR(&input_frame_, &decoded_frame_), 36); + EXPECT_GT(I420PSNR(input_frame_, decoded_frame_), 36); EXPECT_EQ(kTestTimestamp, decoded_frame_.timestamp()); EXPECT_EQ(kTestNtpTimeMs, decoded_frame_.ntp_time_ms()); } @@ -249,7 +249,7 @@ TEST_F(TestVp8Impl, DISABLED_ON_ANDROID(DecodeWithACompleteKeyFrame)) { encoded_frame_._frameType = kKeyFrame; EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame_, false, NULL)); - EXPECT_GT(I420PSNR(&input_frame_, &decoded_frame_), 36); + EXPECT_GT(I420PSNR(input_frame_, decoded_frame_), 36); } TEST_F(TestVp8Impl, TestReset) { diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc index 75282e9790..2e5989d566 100644 --- a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc +++ b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc @@ -1346,7 +1346,7 @@ int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img, img->stride[VPX_PLANE_V]); decoded_image_.set_timestamp(timestamp); decoded_image_.set_ntp_time_ms(ntp_time_ms); - int ret = decode_complete_callback_->Decoded(decoded_image_); + int ret = decode_complete_callback_->Decoded(&decoded_image_); if (ret != 0) return ret; diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc index 224d57cd29..f4ce0a1c0c 100644 --- a/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc +++ b/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc @@ -68,15 +68,15 @@ class Vp8SequenceCoderDecodeCallback : public webrtc::DecodedImageCallback { public: explicit Vp8SequenceCoderDecodeCallback(FILE* decoded_file) : decoded_file_(decoded_file) {} - int Decoded(webrtc::I420VideoFrame& frame); + int Decoded(webrtc::I420VideoFrame* frame); bool DecodeComplete(); private: FILE* decoded_file_; }; -int Vp8SequenceCoderDecodeCallback::Decoded(webrtc::I420VideoFrame& image) { - EXPECT_EQ(0, webrtc::PrintI420VideoFrame(image, decoded_file_)); +int Vp8SequenceCoderDecodeCallback::Decoded(webrtc::I420VideoFrame* image) { + EXPECT_EQ(0, webrtc::PrintI420VideoFrame(*image, decoded_file_)); return 0; } diff --git a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc index fb167d75a6..48b730538f 100644 --- a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc +++ b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc @@ -484,7 +484,7 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) { img->stride[VPX_PLANE_U], img->stride[VPX_PLANE_V]); decoded_image_.set_timestamp(timestamp); - int ret = decode_complete_callback_->Decoded(decoded_image_); + int ret = decode_complete_callback_->Decoded(&decoded_image_); if (ret != 0) return ret; return WEBRTC_VIDEO_CODEC_OK; diff --git a/webrtc/modules/video_coding/main/interface/video_coding_defines.h b/webrtc/modules/video_coding/main/interface/video_coding_defines.h index 50478cadc5..75362a575b 100644 --- a/webrtc/modules/video_coding/main/interface/video_coding_defines.h +++ b/webrtc/modules/video_coding/main/interface/video_coding_defines.h @@ -81,7 +81,7 @@ class VCMPacketizationCallback { // Callback class used for passing decoded frames which are ready to be rendered. class VCMReceiveCallback { public: - virtual int32_t FrameToRender(I420VideoFrame& videoFrame) = 0; + virtual int32_t FrameToRender(I420VideoFrame* videoFrame) = 0; virtual int32_t ReceivedDecodedReferenceFrame( const uint64_t pictureId) { return -1; diff --git a/webrtc/modules/video_coding/main/source/generic_decoder.cc b/webrtc/modules/video_coding/main/source/generic_decoder.cc index 4f8a7ca7d1..7be60cc155 100644 --- a/webrtc/modules/video_coding/main/source/generic_decoder.cc +++ b/webrtc/modules/video_coding/main/source/generic_decoder.cc @@ -46,7 +46,7 @@ VCMReceiveCallback* VCMDecodedFrameCallback::UserReceiveCallback() return _receiveCallback; } -int32_t VCMDecodedFrameCallback::Decoded(I420VideoFrame& decodedImage) +int32_t VCMDecodedFrameCallback::Decoded(I420VideoFrame* decodedImage) { // TODO(holmer): We should improve this so that we can handle multiple // callbacks from one call to Decode(). @@ -55,7 +55,7 @@ int32_t VCMDecodedFrameCallback::Decoded(I420VideoFrame& decodedImage) { CriticalSectionScoped cs(_critSect); frameInfo = static_cast( - _timestampMap.Pop(decodedImage.timestamp())); + _timestampMap.Pop(decodedImage->timestamp())); callback = _receiveCallback; } @@ -66,14 +66,14 @@ int32_t VCMDecodedFrameCallback::Decoded(I420VideoFrame& decodedImage) } _timing.StopDecodeTimer( - decodedImage.timestamp(), + decodedImage->timestamp(), frameInfo->decodeStartTimeMs, _clock->TimeInMilliseconds(), frameInfo->renderTimeMs); if (callback != NULL) { - decodedImage.set_render_time_ms(frameInfo->renderTimeMs); + decodedImage->set_render_time_ms(frameInfo->renderTimeMs); callback->FrameToRender(decodedImage); } return WEBRTC_VIDEO_CODEC_OK; diff --git a/webrtc/modules/video_coding/main/source/generic_decoder.h b/webrtc/modules/video_coding/main/source/generic_decoder.h index 846d4d3e11..76c5d5d172 100644 --- a/webrtc/modules/video_coding/main/source/generic_decoder.h +++ b/webrtc/modules/video_coding/main/source/generic_decoder.h @@ -39,7 +39,7 @@ public: void SetUserReceiveCallback(VCMReceiveCallback* receiveCallback); VCMReceiveCallback* UserReceiveCallback(); - virtual int32_t Decoded(I420VideoFrame& decodedImage); + virtual int32_t Decoded(I420VideoFrame* decodedImage); virtual int32_t ReceivedDecodedReferenceFrame(const uint64_t pictureId); virtual int32_t ReceivedDecodedFrame(const uint64_t pictureId); diff --git a/webrtc/modules/video_coding/main/test/normal_test.cc b/webrtc/modules/video_coding/main/test/normal_test.cc index 0a803d3b93..9940e9495c 100644 --- a/webrtc/modules/video_coding/main/test/normal_test.cc +++ b/webrtc/modules/video_coding/main/test/normal_test.cc @@ -147,13 +147,13 @@ VCMNTDecodeCompleteCallback::~VCMNTDecodeCompleteCallback() fclose(_decodedFile); } int32_t -VCMNTDecodeCompleteCallback::FrameToRender(webrtc::I420VideoFrame& videoFrame) +VCMNTDecodeCompleteCallback::FrameToRender(webrtc::I420VideoFrame* videoFrame) { - if (videoFrame.width() != _currentWidth || - videoFrame.height() != _currentHeight) + if (videoFrame->width() != _currentWidth || + videoFrame->height() != _currentHeight) { - _currentWidth = videoFrame.width(); - _currentHeight = videoFrame.height(); + _currentWidth = videoFrame->width(); + _currentHeight = videoFrame->height(); if (_decodedFile != NULL) { fclose(_decodedFile); @@ -161,11 +161,11 @@ VCMNTDecodeCompleteCallback::FrameToRender(webrtc::I420VideoFrame& videoFrame) } _decodedFile = fopen(_outname.c_str(), "wb"); } - if (PrintI420VideoFrame(videoFrame, _decodedFile) < 0) { + if (PrintI420VideoFrame(*videoFrame, _decodedFile) < 0) { return -1; } - _decodedBytes += webrtc::CalcBufferSize(webrtc::kI420, videoFrame.width(), - videoFrame.height()); + _decodedBytes += webrtc::CalcBufferSize(webrtc::kI420, videoFrame->width(), + videoFrame->height()); return VCM_OK; } diff --git a/webrtc/modules/video_coding/main/test/normal_test.h b/webrtc/modules/video_coding/main/test/normal_test.h index 0015171e80..e316fa9874 100644 --- a/webrtc/modules/video_coding/main/test/normal_test.h +++ b/webrtc/modules/video_coding/main/test/normal_test.h @@ -68,7 +68,7 @@ public: void SetUserReceiveCallback(webrtc::VCMReceiveCallback* receiveCallback); // will write decoded frame into file - int32_t FrameToRender(webrtc::I420VideoFrame& videoFrame) override; + int32_t FrameToRender(webrtc::I420VideoFrame* videoFrame) override; size_t DecodedBytes(); private: diff --git a/webrtc/modules/video_coding/main/test/quality_modes_test.cc b/webrtc/modules/video_coding/main/test/quality_modes_test.cc index 2993e53aea..d584a470dc 100644 --- a/webrtc/modules/video_coding/main/test/quality_modes_test.cc +++ b/webrtc/modules/video_coding/main/test/quality_modes_test.cc @@ -237,7 +237,7 @@ QualityModesTest::Perform(const CmdArgs& args) _vcm->EnableFrameDropper(false); I420VideoFrame sourceFrame; - I420VideoFrame *decimatedFrame = NULL; + I420VideoFrame* decimatedFrame = NULL; uint8_t* tmpBuffer = new uint8_t[_lengthSourceFrame]; double startTime = clock()/(double)CLOCKS_PER_SEC; _vcm->SetChannelParameters(static_cast(1000 * _bitRate), 0, 0); @@ -483,18 +483,18 @@ VCMQMDecodeCompleteCallback::~VCMQMDecodeCompleteCallback() } int32_t -VCMQMDecodeCompleteCallback::FrameToRender(I420VideoFrame& videoFrame) +VCMQMDecodeCompleteCallback::FrameToRender(I420VideoFrame* videoFrame) { ++frames_cnt_since_drop_; // When receiving the first coded frame the last_frame variable is not set if (last_frame_.IsZeroSize()) { - last_frame_.CopyFrame(videoFrame); + last_frame_.CopyFrame(*videoFrame); } // Check if there were frames skipped. int num_frames_skipped = static_cast( 0.5f + - (videoFrame.timestamp() - (last_frame_.timestamp() + (9e4 / frame_rate_))) / + (videoFrame->timestamp() - (last_frame_.timestamp() + (9e4 / frame_rate_))) / (9e4 / frame_rate_)); // If so...put the last frames into the encoded stream to make up for the @@ -510,9 +510,9 @@ VCMQMDecodeCompleteCallback::FrameToRender(I420VideoFrame& videoFrame) DataLog::InsertCell( feature_table_name_,"num frames since drop",frames_cnt_since_drop_); - if (_origWidth == videoFrame.width() && _origHeight == videoFrame.height()) + if (_origWidth == videoFrame->width() && _origHeight == videoFrame->height()) { - if (PrintI420VideoFrame(videoFrame, _decodedFile) < 0) { + if (PrintI420VideoFrame(*videoFrame, _decodedFile) < 0) { return -1; } _frameCnt++; @@ -531,9 +531,9 @@ VCMQMDecodeCompleteCallback::FrameToRender(I420VideoFrame& videoFrame) return -1; } - _decodedBytes += CalcBufferSize(kI420, videoFrame.width(), - videoFrame.height()); - videoFrame.SwapFrame(&last_frame_); + _decodedBytes += CalcBufferSize(kI420, videoFrame->width(), + videoFrame->height()); + videoFrame->SwapFrame(&last_frame_); return VCM_OK; } diff --git a/webrtc/modules/video_coding/main/test/quality_modes_test.h b/webrtc/modules/video_coding/main/test/quality_modes_test.h index 26c8229347..64f5df319e 100644 --- a/webrtc/modules/video_coding/main/test/quality_modes_test.h +++ b/webrtc/modules/video_coding/main/test/quality_modes_test.h @@ -61,7 +61,7 @@ public: virtual ~VCMQMDecodeCompleteCallback(); void SetUserReceiveCallback(webrtc::VCMReceiveCallback* receiveCallback); // will write decoded frame into file - int32_t FrameToRender(webrtc::I420VideoFrame& videoFrame); + int32_t FrameToRender(webrtc::I420VideoFrame* videoFrame); size_t DecodedBytes(); void SetOriginalFrameDimensions(int32_t width, int32_t height); int32_t buildInterpolator(); diff --git a/webrtc/modules/video_coding/main/test/test_callbacks.cc b/webrtc/modules/video_coding/main/test/test_callbacks.cc index 8a17a69b30..c47081121b 100644 --- a/webrtc/modules/video_coding/main/test/test_callbacks.cc +++ b/webrtc/modules/video_coding/main/test/test_callbacks.cc @@ -185,13 +185,13 @@ VCMRTPEncodeCompleteCallback::EncodeComplete() // Decoded Frame Callback Implementation int32_t -VCMDecodeCompleteCallback::FrameToRender(I420VideoFrame& videoFrame) +VCMDecodeCompleteCallback::FrameToRender(I420VideoFrame* videoFrame) { - if (PrintI420VideoFrame(videoFrame, _decodedFile) < 0) { + if (PrintI420VideoFrame(*videoFrame, _decodedFile) < 0) { return -1; } - _decodedBytes += CalcBufferSize(kI420, videoFrame.width(), - videoFrame.height()); + _decodedBytes += CalcBufferSize(kI420, videoFrame->width(), + videoFrame->height()); return VCM_OK; } diff --git a/webrtc/modules/video_coding/main/test/test_callbacks.h b/webrtc/modules/video_coding/main/test/test_callbacks.h index 3d3c543f04..874af7d067 100644 --- a/webrtc/modules/video_coding/main/test/test_callbacks.h +++ b/webrtc/modules/video_coding/main/test/test_callbacks.h @@ -136,7 +136,7 @@ public: _decodedFile(decodedFile), _decodedBytes(0) {} virtual ~VCMDecodeCompleteCallback() {} // Write decoded frame into file - int32_t FrameToRender(webrtc::I420VideoFrame& videoFrame) override; + int32_t FrameToRender(webrtc::I420VideoFrame* videoFrame) override; size_t DecodedBytes(); private: FILE* _decodedFile; diff --git a/webrtc/modules/video_coding/main/test/test_util.cc b/webrtc/modules/video_coding/main/test/test_util.cc index d2b8f8c7fa..2b5b38d069 100644 --- a/webrtc/modules/video_coding/main/test/test_util.cc +++ b/webrtc/modules/video_coding/main/test/test_util.cc @@ -113,7 +113,7 @@ FileOutputFrameReceiver::~FileOutputFrameReceiver() { } int32_t FileOutputFrameReceiver::FrameToRender( - webrtc::I420VideoFrame& video_frame) { + webrtc::I420VideoFrame* video_frame) { if (timing_file_ == NULL) { std::string basename; std::string extension; @@ -123,14 +123,14 @@ int32_t FileOutputFrameReceiver::FrameToRender( return -1; } } - if (out_file_ == NULL || video_frame.width() != width_ || - video_frame.height() != height_) { + if (out_file_ == NULL || video_frame->width() != width_ || + video_frame->height() != height_) { if (out_file_) { fclose(out_file_); } - printf("New size: %dx%d\n", video_frame.width(), video_frame.height()); - width_ = video_frame.width(); - height_ = video_frame.height(); + printf("New size: %dx%d\n", video_frame->width(), video_frame->height()); + width_ = video_frame->width(); + height_ = video_frame->height(); std::string filename_with_width_height = AppendWidthHeightCount( out_filename_, width_, height_, count_); ++count_; @@ -139,9 +139,9 @@ int32_t FileOutputFrameReceiver::FrameToRender( return -1; } } - fprintf(timing_file_, "%u, %u\n", video_frame.timestamp(), - webrtc::MaskWord64ToUWord32(video_frame.render_time_ms())); - if (PrintI420VideoFrame(video_frame, out_file_) < 0) { + fprintf(timing_file_, "%u, %u\n", video_frame->timestamp(), + webrtc::MaskWord64ToUWord32(video_frame->render_time_ms())); + if (PrintI420VideoFrame(*video_frame, out_file_) < 0) { return -1; } return 0; diff --git a/webrtc/modules/video_coding/main/test/test_util.h b/webrtc/modules/video_coding/main/test/test_util.h index b1c156d6db..9923d79151 100644 --- a/webrtc/modules/video_coding/main/test/test_util.h +++ b/webrtc/modules/video_coding/main/test/test_util.h @@ -87,7 +87,7 @@ class FileOutputFrameReceiver : public webrtc::VCMReceiveCallback { virtual ~FileOutputFrameReceiver(); // VCMReceiveCallback - virtual int32_t FrameToRender(webrtc::I420VideoFrame& video_frame); + virtual int32_t FrameToRender(webrtc::I420VideoFrame* video_frame); private: std::string out_filename_; diff --git a/webrtc/modules/video_processing/main/source/video_processing_impl.cc b/webrtc/modules/video_processing/main/source/video_processing_impl.cc index 6e7808ede3..bf2d622283 100644 --- a/webrtc/modules/video_processing/main/source/video_processing_impl.cc +++ b/webrtc/modules/video_processing/main/source/video_processing_impl.cc @@ -169,7 +169,7 @@ uint32_t VideoProcessingModuleImpl::DecimatedHeight() const { int32_t VideoProcessingModuleImpl::PreprocessFrame( const I420VideoFrame& frame, - I420VideoFrame **processed_frame) { + I420VideoFrame** processed_frame) { CriticalSectionScoped mutex(&mutex_); return frame_pre_processor_.PreprocessFrame(frame, processed_frame); } diff --git a/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc b/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc index 60a2e41c4c..1592e376b4 100644 --- a/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc +++ b/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc @@ -130,7 +130,7 @@ TEST_F(VideoProcessingModuleTest, HandleBadSize) { EXPECT_EQ(VPM_PARAMETER_ERROR, vpm_->SetTargetResolution(0,0,0)); - I420VideoFrame *out_frame = NULL; + I420VideoFrame* out_frame = NULL; EXPECT_EQ(VPM_PARAMETER_ERROR, vpm_->PreprocessFrame(bad_frame, &out_frame)); } @@ -358,7 +358,7 @@ void TestSize(const I420VideoFrame& source_frame, WriteProcessedFrameForVisualInspection(resampled_source_frame, *out_frame); // Compute PSNR against the cropped source frame and check expectation. - double psnr = I420PSNR(&cropped_source_frame, out_frame); + double psnr = I420PSNR(cropped_source_frame, *out_frame); EXPECT_GT(psnr, expected_psnr); printf("PSNR: %f. PSNR is between source of size %d %d, and a modified " "source which is scaled down/up to: %d %d, and back to source size \n", diff --git a/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc b/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc index c2afbbd639..1afb474fe6 100644 --- a/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc +++ b/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc @@ -383,10 +383,10 @@ int32_t AndroidNativeOpenGl2Channel::Init(int32_t zOrder, int32_t AndroidNativeOpenGl2Channel::RenderFrame( const uint32_t /*streamId*/, - I420VideoFrame& videoFrame) { + I420VideoFrame* videoFrame) { // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__); _renderCritSect.Enter(); - _bufferToRender.SwapFrame(&videoFrame); + _bufferToRender.SwapFrame(videoFrame); _renderCritSect.Leave(); _renderer.ReDraw(); return 0; diff --git a/webrtc/modules/video_render/android/video_render_android_native_opengl2.h b/webrtc/modules/video_render/android/video_render_android_native_opengl2.h index f5e5b57e54..a9b39cdd19 100644 --- a/webrtc/modules/video_render/android/video_render_android_native_opengl2.h +++ b/webrtc/modules/video_render/android/video_render_android_native_opengl2.h @@ -35,7 +35,7 @@ class AndroidNativeOpenGl2Channel: public AndroidStream { //Implement VideoRenderCallback virtual int32_t RenderFrame( const uint32_t streamId, - I420VideoFrame& videoFrame); + I420VideoFrame* videoFrame); //Implements AndroidStream virtual void DeliverFrame(JNIEnv* jniEnv); diff --git a/webrtc/modules/video_render/android/video_render_android_surface_view.cc b/webrtc/modules/video_render/android/video_render_android_surface_view.cc index 831308234b..0f36f40cfb 100644 --- a/webrtc/modules/video_render/android/video_render_android_surface_view.cc +++ b/webrtc/modules/video_render/android/video_render_android_surface_view.cc @@ -412,10 +412,10 @@ int32_t AndroidSurfaceViewChannel::Init( int32_t AndroidSurfaceViewChannel::RenderFrame( const uint32_t /*streamId*/, - I420VideoFrame& videoFrame) { + I420VideoFrame* videoFrame) { // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__); _renderCritSect.Enter(); - _bufferToRender.SwapFrame(&videoFrame); + _bufferToRender.SwapFrame(videoFrame); _renderCritSect.Leave(); _renderer.ReDraw(); return 0; diff --git a/webrtc/modules/video_render/android/video_render_android_surface_view.h b/webrtc/modules/video_render/android/video_render_android_surface_view.h index ac58bca928..327e4260b3 100644 --- a/webrtc/modules/video_render/android/video_render_android_surface_view.h +++ b/webrtc/modules/video_render/android/video_render_android_surface_view.h @@ -33,7 +33,7 @@ class AndroidSurfaceViewChannel : public AndroidStream { //Implement VideoRenderCallback virtual int32_t RenderFrame(const uint32_t streamId, - I420VideoFrame& videoFrame); + I420VideoFrame* videoFrame); //Implements AndroidStream virtual void DeliverFrame(JNIEnv* jniEnv); diff --git a/webrtc/modules/video_render/external/video_render_external_impl.cc b/webrtc/modules/video_render/external/video_render_external_impl.cc index 04ae205b17..afd68463fe 100644 --- a/webrtc/modules/video_render/external/video_render_external_impl.cc +++ b/webrtc/modules/video_render/external/video_render_external_impl.cc @@ -188,9 +188,8 @@ int32_t VideoRenderExternalImpl::SetBitmap(const void* bitMap, } // VideoRenderCallback -int32_t VideoRenderExternalImpl::RenderFrame( - const uint32_t streamId, - I420VideoFrame& videoFrame) +int32_t VideoRenderExternalImpl::RenderFrame(const uint32_t streamId, + I420VideoFrame* videoFrame) { return 0; } diff --git a/webrtc/modules/video_render/external/video_render_external_impl.h b/webrtc/modules/video_render/external/video_render_external_impl.h index e83d842d11..69a2ca5952 100644 --- a/webrtc/modules/video_render/external/video_render_external_impl.h +++ b/webrtc/modules/video_render/external/video_render_external_impl.h @@ -115,7 +115,7 @@ public: // VideoRenderCallback virtual int32_t RenderFrame(const uint32_t streamId, - I420VideoFrame& videoFrame); + I420VideoFrame* videoFrame); private: CriticalSectionWrapper& _critSect; diff --git a/webrtc/modules/video_render/include/video_render_defines.h b/webrtc/modules/video_render/include/video_render_defines.h index e5da2bb64e..4203ab2df3 100644 --- a/webrtc/modules/video_render/include/video_render_defines.h +++ b/webrtc/modules/video_render/include/video_render_defines.h @@ -49,7 +49,7 @@ class VideoRenderCallback { public: virtual int32_t RenderFrame(const uint32_t streamId, - I420VideoFrame& videoFrame) = 0; + I420VideoFrame* videoFrame) = 0; protected: virtual ~VideoRenderCallback() diff --git a/webrtc/modules/video_render/incoming_video_stream.cc b/webrtc/modules/video_render/incoming_video_stream.cc index 1fbd886bfc..2426fbca55 100644 --- a/webrtc/modules/video_render/incoming_video_stream.cc +++ b/webrtc/modules/video_render/incoming_video_stream.cc @@ -85,11 +85,11 @@ VideoRenderCallback* IncomingVideoStream::ModuleCallback() { } int32_t IncomingVideoStream::RenderFrame(const uint32_t stream_id, - I420VideoFrame& video_frame) { + I420VideoFrame* video_frame) { CriticalSectionScoped csS(&stream_critsect_); WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_, "%s for stream %d, render time: %u", __FUNCTION__, stream_id_, - video_frame.render_time_ms()); + video_frame->render_time_ms()); if (!running_) { WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_, @@ -110,7 +110,7 @@ int32_t IncomingVideoStream::RenderFrame(const uint32_t stream_id, // Insert frame. CriticalSectionScoped csB(&buffer_critsect_); - if (render_buffers_.AddFrame(video_frame) == 1) + if (render_buffers_.AddFrame(*video_frame) == 1) deliver_buffer_event_.Set(); return 0; @@ -285,13 +285,13 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() { if (last_render_time_ms_ == 0 && !start_image_.IsZeroSize()) { // We have not rendered anything and have a start image. temp_frame_.CopyFrame(start_image_); - render_callback_->RenderFrame(stream_id_, temp_frame_); + render_callback_->RenderFrame(stream_id_, &temp_frame_); } else if (!timeout_image_.IsZeroSize() && last_render_time_ms_ + timeout_time_ < TickTime::MillisecondTimestamp()) { // Render a timeout image. temp_frame_.CopyFrame(timeout_image_); - render_callback_->RenderFrame(stream_id_, temp_frame_); + render_callback_->RenderFrame(stream_id_, &temp_frame_); } } @@ -305,13 +305,13 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() { WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_, "%s: executing external renderer callback to deliver frame", __FUNCTION__, frame_to_render.render_time_ms()); - external_callback_->RenderFrame(stream_id_, frame_to_render); + external_callback_->RenderFrame(stream_id_, &frame_to_render); } else { if (render_callback_) { WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_, "%s: Render frame, time: ", __FUNCTION__, frame_to_render.render_time_ms()); - render_callback_->RenderFrame(stream_id_, frame_to_render); + render_callback_->RenderFrame(stream_id_, &frame_to_render); } } diff --git a/webrtc/modules/video_render/incoming_video_stream.h b/webrtc/modules/video_render/incoming_video_stream.h index a84d5b731c..390bce94c2 100644 --- a/webrtc/modules/video_render/incoming_video_stream.h +++ b/webrtc/modules/video_render/incoming_video_stream.h @@ -31,7 +31,7 @@ class IncomingVideoStream : public VideoRenderCallback { // Get callback to deliver frames to the module. VideoRenderCallback* ModuleCallback(); virtual int32_t RenderFrame(const uint32_t stream_id, - I420VideoFrame& video_frame); + I420VideoFrame* video_frame); // Set callback to the platform dependent code. int32_t SetRenderCallback(VideoRenderCallback* render_callback); diff --git a/webrtc/modules/video_render/ios/video_render_ios_channel.h b/webrtc/modules/video_render/ios/video_render_ios_channel.h index 2d635a700f..a4851eeea6 100644 --- a/webrtc/modules/video_render/ios/video_render_ios_channel.h +++ b/webrtc/modules/video_render/ios/video_render_ios_channel.h @@ -25,7 +25,7 @@ class VideoRenderIosChannel : public VideoRenderCallback { // Implementation of VideoRenderCallback. int32_t RenderFrame(const uint32_t stream_id, - I420VideoFrame& video_frame) override; + I420VideoFrame* video_frame) override; int SetStreamSettings(const float z_order, const float left, diff --git a/webrtc/modules/video_render/ios/video_render_ios_channel.mm b/webrtc/modules/video_render/ios/video_render_ios_channel.mm index 02814b2223..3ec782c021 100644 --- a/webrtc/modules/video_render/ios/video_render_ios_channel.mm +++ b/webrtc/modules/video_render/ios/video_render_ios_channel.mm @@ -24,10 +24,10 @@ VideoRenderIosChannel::VideoRenderIosChannel(VideoRenderIosView* view) VideoRenderIosChannel::~VideoRenderIosChannel() { delete current_frame_; } int32_t VideoRenderIosChannel::RenderFrame(const uint32_t stream_id, - I420VideoFrame& video_frame) { - video_frame.set_render_time_ms(0); + I420VideoFrame* video_frame) { + video_frame->set_render_time_ms(0); - current_frame_->CopyFrame(video_frame); + current_frame_->CopyFrame(*video_frame); buffer_is_updated_ = true; return 0; diff --git a/webrtc/modules/video_render/linux/video_x11_channel.cc b/webrtc/modules/video_render/linux/video_x11_channel.cc index d33dace8d7..77b36520f4 100644 --- a/webrtc/modules/video_render/linux/video_x11_channel.cc +++ b/webrtc/modules/video_render/linux/video_x11_channel.cc @@ -44,15 +44,14 @@ VideoX11Channel::~VideoX11Channel() } int32_t VideoX11Channel::RenderFrame(const uint32_t streamId, - I420VideoFrame& videoFrame) { + I420VideoFrame* videoFrame) { CriticalSectionScoped cs(&_crit); - if (_width != videoFrame.width() || _height - != videoFrame.height()) { - if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) { + if (_width != videoFrame->width() || _height != videoFrame->height()) { + if (FrameSizeChange(videoFrame->width(), videoFrame->height(), 1) == -1) { return -1; } } - return DeliverFrame(videoFrame); + return DeliverFrame(*videoFrame); } int32_t VideoX11Channel::FrameSizeChange(int32_t width, diff --git a/webrtc/modules/video_render/linux/video_x11_channel.h b/webrtc/modules/video_render/linux/video_x11_channel.h index 35c004c6a1..c54c7aabf5 100644 --- a/webrtc/modules/video_render/linux/video_x11_channel.h +++ b/webrtc/modules/video_render/linux/video_x11_channel.h @@ -34,7 +34,7 @@ public: virtual ~VideoX11Channel(); virtual int32_t RenderFrame(const uint32_t streamId, - I420VideoFrame& videoFrame); + I420VideoFrame* videoFrame); int32_t FrameSizeChange(int32_t width, int32_t height, int32_t numberOfStreams); diff --git a/webrtc/modules/video_render/mac/video_render_agl.cc b/webrtc/modules/video_render/mac/video_render_agl.cc index 72b57fae0c..3534221483 100644 --- a/webrtc/modules/video_render/mac/video_render_agl.cc +++ b/webrtc/modules/video_render/mac/video_render_agl.cc @@ -81,7 +81,7 @@ VideoChannelAGL::~VideoChannelAGL() } int32_t VideoChannelAGL::RenderFrame(const uint32_t streamId, - I420VideoFrame& videoFrame) { + I420VideoFrame* videoFrame) { _owner->LockAGLCntx(); if (_width != videoFrame.width() || _height != videoFrame.height()) { @@ -94,7 +94,7 @@ int32_t VideoChannelAGL::RenderFrame(const uint32_t streamId, } _owner->UnlockAGLCntx(); - return DeliverFrame(videoFrame); + return DeliverFrame(*videoFrame); } int VideoChannelAGL::UpdateSize(int /*width*/, int /*height*/) diff --git a/webrtc/modules/video_render/mac/video_render_agl.h b/webrtc/modules/video_render/mac/video_render_agl.h index 9846386197..4f6dd558a4 100644 --- a/webrtc/modules/video_render/mac/video_render_agl.h +++ b/webrtc/modules/video_render/mac/video_render_agl.h @@ -52,7 +52,7 @@ class VideoChannelAGL : public VideoRenderCallback { int IsUpdated(bool& isUpdated); virtual int UpdateStretchSize(int stretchHeight, int stretchWidth); virtual int32_t RenderFrame(const uint32_t streamId, - I420VideoFrame& videoFrame); + I420VideoFrame* videoFrame); private: diff --git a/webrtc/modules/video_render/mac/video_render_nsopengl.h b/webrtc/modules/video_render/mac/video_render_nsopengl.h index 3fb438aa44..eda2b0320c 100644 --- a/webrtc/modules/video_render/mac/video_render_nsopengl.h +++ b/webrtc/modules/video_render/mac/video_render_nsopengl.h @@ -66,7 +66,7 @@ public: // ********** new module functions ************ // virtual int32_t RenderFrame(const uint32_t streamId, - I420VideoFrame& videoFrame); + I420VideoFrame* videoFrame); // ********** new module helper functions ***** // int ChangeContext(NSOpenGLContext *nsglContext); diff --git a/webrtc/modules/video_render/mac/video_render_nsopengl.mm b/webrtc/modules/video_render/mac/video_render_nsopengl.mm index ca9a79ca7a..f8015ab232 100644 --- a/webrtc/modules/video_render/mac/video_render_nsopengl.mm +++ b/webrtc/modules/video_render/mac/video_render_nsopengl.mm @@ -90,18 +90,17 @@ int32_t VideoChannelNSOpenGL::GetChannelProperties(float& left, float& top, } int32_t VideoChannelNSOpenGL::RenderFrame( - const uint32_t /*streamId*/, I420VideoFrame& videoFrame) { + const uint32_t /*streamId*/, I420VideoFrame* videoFrame) { _owner->LockAGLCntx(); - if(_width != videoFrame.width() || - _height != videoFrame.height()) { - if(FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) { + if(_width != videoFrame->width() || _height != videoFrame->height()) { + if(FrameSizeChange(videoFrame->width(), videoFrame->height(), 1) == -1) { _owner->UnlockAGLCntx(); return -1; } } - int ret = DeliverFrame(videoFrame); + int ret = DeliverFrame(*videoFrame); _owner->UnlockAGLCntx(); return ret; diff --git a/webrtc/modules/video_render/test/testAPI/testAPI.cc b/webrtc/modules/video_render/test/testAPI/testAPI.cc index 3ec68ddb86..f560eebe7e 100644 --- a/webrtc/modules/video_render/test/testAPI/testAPI.cc +++ b/webrtc/modules/video_render/test/testAPI/testAPI.cc @@ -262,7 +262,7 @@ public: } ; virtual int32_t RenderFrame(const uint32_t streamId, - I420VideoFrame& videoFrame) + I420VideoFrame* videoFrame) { _cnt++; if (_cnt % 100 == 0) @@ -318,7 +318,7 @@ int TestSingleStream(VideoRender* renderModule) { // Render this frame with the specified delay videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() + renderDelayMs); - renderCallback0->RenderFrame(streamId0, videoFrame0); + renderCallback0->RenderFrame(streamId0, &videoFrame0); SleepMs(1000/TEST_FRAME_RATE); } @@ -392,7 +392,7 @@ int TestBitmapText(VideoRender* renderModule) { // Render this frame with the specified delay videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() + renderDelayMs); - renderCallback0->RenderFrame(streamId0, videoFrame0); + renderCallback0->RenderFrame(streamId0, &videoFrame0); SleepMs(1000/TEST_FRAME_RATE); } // Sleep and let all frames be rendered before closing @@ -477,22 +477,22 @@ int TestMultipleStreams(VideoRender* renderModule) { videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() + renderDelayMs); - renderCallback0->RenderFrame(streamId0, videoFrame0); + renderCallback0->RenderFrame(streamId0, &videoFrame0); GetTestVideoFrame(&videoFrame1, TEST_STREAM1_START_COLOR); videoFrame1.set_render_time_ms(TickTime::MillisecondTimestamp() + renderDelayMs); - renderCallback1->RenderFrame(streamId1, videoFrame1); + renderCallback1->RenderFrame(streamId1, &videoFrame1); GetTestVideoFrame(&videoFrame2, TEST_STREAM2_START_COLOR); videoFrame2.set_render_time_ms(TickTime::MillisecondTimestamp() + renderDelayMs); - renderCallback2->RenderFrame(streamId2, videoFrame2); + renderCallback2->RenderFrame(streamId2, &videoFrame2); GetTestVideoFrame(&videoFrame3, TEST_STREAM3_START_COLOR); videoFrame3.set_render_time_ms(TickTime::MillisecondTimestamp() + renderDelayMs); - renderCallback3->RenderFrame(streamId3, videoFrame3); + renderCallback3->RenderFrame(streamId3, &videoFrame3); SleepMs(1000/TEST_FRAME_RATE); } @@ -550,7 +550,7 @@ int TestExternalRender(VideoRender* renderModule) { for (int i=0; iRenderFrame(streamId0, videoFrame0); + renderCallback0->RenderFrame(streamId0, &videoFrame0); SleepMs(33); } diff --git a/webrtc/modules/video_render/windows/video_render_direct3d9.cc b/webrtc/modules/video_render/windows/video_render_direct3d9.cc index 1e8267a8a5..7c38b52cc0 100644 --- a/webrtc/modules/video_render/windows/video_render_direct3d9.cc +++ b/webrtc/modules/video_render/windows/video_render_direct3d9.cc @@ -143,17 +143,17 @@ int D3D9Channel::FrameSizeChange(int width, int height, int numberOfStreams) } int32_t D3D9Channel::RenderFrame(const uint32_t streamId, - I420VideoFrame& videoFrame) + I420VideoFrame* videoFrame) { CriticalSectionScoped cs(_critSect); - if (_width != videoFrame.width() || _height != videoFrame.height()) + if (_width != videoFrame->width() || _height != videoFrame->height()) { - if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) + if (FrameSizeChange(videoFrame->width(), videoFrame->height(), 1) == -1) { return -1; } } - return DeliverFrame(videoFrame); + return DeliverFrame(*videoFrame); } // Called from video engine when a new frame should be rendered. diff --git a/webrtc/modules/video_render/windows/video_render_direct3d9.h b/webrtc/modules/video_render/windows/video_render_direct3d9.h index f10e315e22..3e89c36042 100644 --- a/webrtc/modules/video_render/windows/video_render_direct3d9.h +++ b/webrtc/modules/video_render/windows/video_render_direct3d9.h @@ -46,7 +46,7 @@ public: // A new frame is delivered. virtual int DeliverFrame(const I420VideoFrame& videoFrame); virtual int32_t RenderFrame(const uint32_t streamId, - I420VideoFrame& videoFrame); + I420VideoFrame* videoFrame); // Called to check if the video frame is updated. int IsUpdated(bool& isUpdated); diff --git a/webrtc/test/fake_decoder.cc b/webrtc/test/fake_decoder.cc index 63316e0dab..234f97caba 100644 --- a/webrtc/test/fake_decoder.cc +++ b/webrtc/test/fake_decoder.cc @@ -39,7 +39,7 @@ int32_t FakeDecoder::Decode(const EncodedImage& input, frame_.set_ntp_time_ms(input.ntp_time_ms_); frame_.set_render_time_ms(render_time_ms); - callback_->Decoded(frame_); + callback_->Decoded(&frame_); return WEBRTC_VIDEO_CODEC_OK; } diff --git a/webrtc/test/testsupport/metrics/video_metrics.cc b/webrtc/test/testsupport/metrics/video_metrics.cc index 0202a71ebe..c56078bb9a 100644 --- a/webrtc/test/testsupport/metrics/video_metrics.cc +++ b/webrtc/test/testsupport/metrics/video_metrics.cc @@ -34,8 +34,8 @@ enum VideoMetricsType { kPSNR, kSSIM, kBoth }; // Calculates metrics for a frame and adds statistics to the result for it. void CalculateFrame(VideoMetricsType video_metrics_type, - const I420VideoFrame* ref, - const I420VideoFrame* test, + const I420VideoFrame& ref, + const I420VideoFrame& test, int frame_number, QualityMetricsResult* result) { FrameResult frame_result = {0, 0}; @@ -129,17 +129,17 @@ int CalculateMetrics(VideoMetricsType video_metrics_type, kVideoRotation_0, &test_frame); switch (video_metrics_type) { case kPSNR: - CalculateFrame(kPSNR, &ref_frame, &test_frame, frame_number, + CalculateFrame(kPSNR, ref_frame, test_frame, frame_number, psnr_result); break; case kSSIM: - CalculateFrame(kSSIM, &ref_frame, &test_frame, frame_number, + CalculateFrame(kSSIM, ref_frame, test_frame, frame_number, ssim_result); break; case kBoth: - CalculateFrame(kPSNR, &ref_frame, &test_frame, frame_number, + CalculateFrame(kPSNR, ref_frame, test_frame, frame_number, psnr_result); - CalculateFrame(kSSIM, &ref_frame, &test_frame, frame_number, + CalculateFrame(kSSIM, ref_frame, test_frame, frame_number, ssim_result); break; } diff --git a/webrtc/test/vcm_capturer.cc b/webrtc/test/vcm_capturer.cc index a5820bfe11..8615660669 100644 --- a/webrtc/test/vcm_capturer.cc +++ b/webrtc/test/vcm_capturer.cc @@ -87,9 +87,9 @@ void VcmCapturer::Destroy() { VcmCapturer::~VcmCapturer() { Destroy(); } void VcmCapturer::OnIncomingCapturedFrame(const int32_t id, - I420VideoFrame& frame) { + I420VideoFrame* frame) { if (started_) - input_->SwapFrame(&frame); + input_->SwapFrame(frame); } void VcmCapturer::OnCaptureDelayChanged(const int32_t id, const int32_t delay) { diff --git a/webrtc/test/vcm_capturer.h b/webrtc/test/vcm_capturer.h index 1cb5b4e006..2a89d10b7c 100644 --- a/webrtc/test/vcm_capturer.h +++ b/webrtc/test/vcm_capturer.h @@ -28,7 +28,7 @@ class VcmCapturer : public VideoCapturer, public VideoCaptureDataCallback { void Stop() override; void OnIncomingCapturedFrame(const int32_t id, - I420VideoFrame& frame) override; // NOLINT + I420VideoFrame* frame) override; // NOLINT void OnCaptureDelayChanged(const int32_t id, const int32_t delay) override; private: diff --git a/webrtc/video/full_stack.cc b/webrtc/video/full_stack.cc index e728a00355..f9748e5ae8 100644 --- a/webrtc/video/full_stack.cc +++ b/webrtc/video/full_stack.cc @@ -201,7 +201,7 @@ class VideoAnalyzer : public PacketReceiver, CriticalSectionScoped lock(crit_.get()); while (frames_.front()->timestamp() < send_timestamp) { AddFrameComparison( - frames_.front(), &last_rendered_frame_, true, render_time_ms); + *frames_.front(), last_rendered_frame_, true, render_time_ms); frame_pool_.push_back(frames_.front()); frames_.pop_front(); } @@ -212,7 +212,7 @@ class VideoAnalyzer : public PacketReceiver, EXPECT_EQ(reference_frame->timestamp(), send_timestamp); assert(reference_frame->timestamp() == send_timestamp); - AddFrameComparison(reference_frame, &video_frame, false, render_time_ms); + AddFrameComparison(*reference_frame, video_frame, false, render_time_ms); frame_pool_.push_back(reference_frame); last_rendered_frame_.CopyFrame(video_frame); @@ -253,8 +253,8 @@ class VideoAnalyzer : public PacketReceiver, FrameComparison() : dropped(false), send_time_ms(0), recv_time_ms(0), render_time_ms(0) {} - FrameComparison(const I420VideoFrame* reference, - const I420VideoFrame* render, + FrameComparison(const I420VideoFrame& reference, + const I420VideoFrame& render, bool dropped, int64_t send_time_ms, int64_t recv_time_ms, @@ -263,8 +263,8 @@ class VideoAnalyzer : public PacketReceiver, send_time_ms(send_time_ms), recv_time_ms(recv_time_ms), render_time_ms(render_time_ms) { - this->reference.CopyFrame(*reference); - this->render.CopyFrame(*render); + this->reference.CopyFrame(reference); + this->render.CopyFrame(render); } FrameComparison(const FrameComparison& compare) @@ -295,15 +295,15 @@ class VideoAnalyzer : public PacketReceiver, int64_t render_time_ms; }; - void AddFrameComparison(const I420VideoFrame* reference, - const I420VideoFrame* render, + void AddFrameComparison(const I420VideoFrame& reference, + const I420VideoFrame& render, bool dropped, int64_t render_time_ms) EXCLUSIVE_LOCKS_REQUIRED(crit_) { - int64_t send_time_ms = send_times_[reference->timestamp()]; - send_times_.erase(reference->timestamp()); - int64_t recv_time_ms = recv_times_[reference->timestamp()]; - recv_times_.erase(reference->timestamp()); + int64_t send_time_ms = send_times_[reference.timestamp()]; + send_times_.erase(reference.timestamp()); + int64_t recv_time_ms = recv_times_[reference.timestamp()]; + recv_times_.erase(reference.timestamp()); CriticalSectionScoped crit(comparison_lock_.get()); comparisons_.push_back(FrameComparison(reference, @@ -405,8 +405,8 @@ class VideoAnalyzer : public PacketReceiver, void PerformFrameComparison(const FrameComparison& comparison) { // Perform expensive psnr and ssim calculations while not holding lock. - double psnr = I420PSNR(&comparison.reference, &comparison.render); - double ssim = I420SSIM(&comparison.reference, &comparison.render); + double psnr = I420PSNR(comparison.reference, comparison.render); + double ssim = I420SSIM(comparison.reference, comparison.render); CriticalSectionScoped crit(comparison_lock_.get()); psnr_.AddSample(psnr); diff --git a/webrtc/video_decoder.h b/webrtc/video_decoder.h index 941c0ac197..b70c4e492e 100644 --- a/webrtc/video_decoder.h +++ b/webrtc/video_decoder.h @@ -28,7 +28,7 @@ class DecodedImageCallback { public: virtual ~DecodedImageCallback() {} - virtual int32_t Decoded(I420VideoFrame& decodedImage) = 0; + virtual int32_t Decoded(I420VideoFrame* decodedImage) = 0; virtual int32_t ReceivedDecodedReferenceFrame(const uint64_t pictureId) { return -1; } diff --git a/webrtc/video_engine/test/libvietest/testbed/tb_I420_codec.cc b/webrtc/video_engine/test/libvietest/testbed/tb_I420_codec.cc index e9cefcf84d..546e3242c6 100644 --- a/webrtc/video_engine/test/libvietest/testbed/tb_I420_codec.cc +++ b/webrtc/video_engine/test/libvietest/testbed/tb_I420_codec.cc @@ -246,7 +246,7 @@ int32_t TbI420Decoder::Decode( _decodedImage.set_timestamp(inputImage._timeStamp); - _decodeCompleteCallback->Decoded(_decodedImage); + _decodeCompleteCallback->Decoded(&_decodedImage); return WEBRTC_VIDEO_CODEC_OK; } diff --git a/webrtc/video_engine/vie_capturer.cc b/webrtc/video_engine/vie_capturer.cc index 079e20ac02..2fb13de67a 100644 --- a/webrtc/video_engine/vie_capturer.cc +++ b/webrtc/video_engine/vie_capturer.cc @@ -345,21 +345,21 @@ void ViECapturer::SwapFrame(I420VideoFrame* frame) { } void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id, - I420VideoFrame& video_frame) { + I420VideoFrame* video_frame) { CriticalSectionScoped cs(capture_cs_.get()); // Make sure we render this frame earlier since we know the render time set // is slightly off since it's being set when the frame has been received from // the camera, and not when the camera actually captured the frame. - video_frame.set_render_time_ms(video_frame.render_time_ms() - FrameDelay()); + video_frame->set_render_time_ms(video_frame->render_time_ms() - FrameDelay()); - overuse_detector_->FrameCaptured(video_frame.width(), - video_frame.height(), - video_frame.render_time_ms()); + overuse_detector_->FrameCaptured(video_frame->width(), + video_frame->height(), + video_frame->render_time_ms()); - TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(), - "render_time", video_frame.render_time_ms()); + TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame->render_time_ms(), + "render_time", video_frame->render_time_ms()); - captured_frame_ = video_frame; + captured_frame_ = *video_frame; capture_event_.Set(); } diff --git a/webrtc/video_engine/vie_capturer.h b/webrtc/video_engine/vie_capturer.h index 681b7e09a5..4d43f4cc5f 100644 --- a/webrtc/video_engine/vie_capturer.h +++ b/webrtc/video_engine/vie_capturer.h @@ -123,7 +123,7 @@ class ViECapturer // Implements VideoCaptureDataCallback. virtual void OnIncomingCapturedFrame(const int32_t id, - I420VideoFrame& video_frame); + I420VideoFrame* video_frame); virtual void OnCaptureDelayChanged(const int32_t id, const int32_t delay); diff --git a/webrtc/video_engine/vie_capturer_unittest.cc b/webrtc/video_engine/vie_capturer_unittest.cc index b0a7172930..9ed3d0226b 100644 --- a/webrtc/video_engine/vie_capturer_unittest.cc +++ b/webrtc/video_engine/vie_capturer_unittest.cc @@ -91,7 +91,7 @@ class ViECapturerTest : public ::testing::Test { } void AddInputFrame(I420VideoFrame* frame) { - data_callback_->OnIncomingCapturedFrame(0, *frame); + data_callback_->OnIncomingCapturedFrame(0, frame); } void AddOutputFrame(const I420VideoFrame* frame) { diff --git a/webrtc/video_engine/vie_channel.cc b/webrtc/video_engine/vie_channel.cc index bbf5d535ad..a4f8afd951 100644 --- a/webrtc/video_engine/vie_channel.cc +++ b/webrtc/video_engine/vie_channel.cc @@ -1620,7 +1620,7 @@ CallStatsObserver* ViEChannel::GetStatsObserver() { // held the lock when calling VideoDecoder::Decode, Reset, or Release. Acquiring // the same lock in the path of decode callback can deadlock. int32_t ViEChannel::FrameToRender( - I420VideoFrame& video_frame) { // NOLINT + I420VideoFrame* video_frame) { // NOLINT CriticalSectionScoped cs(callback_cs_.get()); if (decoder_reset_) { @@ -1628,30 +1628,30 @@ int32_t ViEChannel::FrameToRender( if (codec_observer_) { // The codec set by RegisterReceiveCodec might not be the size we're // actually decoding. - receive_codec_.width = static_cast(video_frame.width()); - receive_codec_.height = static_cast(video_frame.height()); + receive_codec_.width = static_cast(video_frame->width()); + receive_codec_.height = static_cast(video_frame->height()); codec_observer_->IncomingCodecChanged(channel_id_, receive_codec_); } decoder_reset_ = false; } // Post processing is not supported if the frame is backed by a texture. - if (video_frame.native_handle() == NULL) { + if (video_frame->native_handle() == NULL) { if (pre_render_callback_ != NULL) - pre_render_callback_->FrameCallback(&video_frame); + pre_render_callback_->FrameCallback(video_frame); if (effect_filter_) { size_t length = - CalcBufferSize(kI420, video_frame.width(), video_frame.height()); + CalcBufferSize(kI420, video_frame->width(), video_frame->height()); rtc::scoped_ptr video_buffer(new uint8_t[length]); - ExtractBuffer(video_frame, length, video_buffer.get()); + ExtractBuffer(*video_frame, length, video_buffer.get()); effect_filter_->Transform(length, video_buffer.get(), - video_frame.ntp_time_ms(), - video_frame.timestamp(), - video_frame.width(), - video_frame.height()); + video_frame->ntp_time_ms(), + video_frame->timestamp(), + video_frame->width(), + video_frame->height()); } if (color_enhancement_) { - VideoProcessingModule::ColorEnhancement(&video_frame); + VideoProcessingModule::ColorEnhancement(video_frame); } } @@ -1662,7 +1662,7 @@ int32_t ViEChannel::FrameToRender( no_of_csrcs = 1; } std::vector csrcs(arr_ofCSRC, arr_ofCSRC + no_of_csrcs); - DeliverFrame(&video_frame, csrcs); + DeliverFrame(video_frame, csrcs); return 0; } diff --git a/webrtc/video_engine/vie_channel.h b/webrtc/video_engine/vie_channel.h index 1bf95f81ea..de363870c6 100644 --- a/webrtc/video_engine/vie_channel.h +++ b/webrtc/video_engine/vie_channel.h @@ -313,7 +313,7 @@ class ViEChannel CallStatsObserver* GetStatsObserver(); // Implements VCMReceiveCallback. - virtual int32_t FrameToRender(I420VideoFrame& video_frame); // NOLINT + virtual int32_t FrameToRender(I420VideoFrame* video_frame); // NOLINT // Implements VCMReceiveCallback. virtual int32_t ReceivedDecodedReferenceFrame( diff --git a/webrtc/video_engine/vie_renderer.cc b/webrtc/video_engine/vie_renderer.cc index a2c90cb38c..ccb2d40024 100644 --- a/webrtc/video_engine/vie_renderer.cc +++ b/webrtc/video_engine/vie_renderer.cc @@ -124,7 +124,7 @@ int32_t ViERenderer::Init(const uint32_t z_order, void ViERenderer::DeliverFrame(int id, I420VideoFrame* video_frame, const std::vector& csrcs) { - render_callback_->RenderFrame(render_id_, *video_frame); + render_callback_->RenderFrame(render_id_, video_frame); } void ViERenderer::DelayChanged(int id, int frame_delay) {} @@ -156,15 +156,15 @@ int ViEExternalRendererImpl::SetViEExternalRenderer( } int32_t ViEExternalRendererImpl::RenderFrame(const uint32_t stream_id, - I420VideoFrame& video_frame) { + I420VideoFrame* video_frame) { if (external_renderer_format_ != kVideoI420) return ConvertAndRenderFrame(stream_id, video_frame); // Fast path for I420 without frame copy. NotifyFrameSizeChange(stream_id, video_frame); - if (video_frame.native_handle() == NULL || + if (video_frame->native_handle() == NULL || external_renderer_->IsTextureSupported()) { - external_renderer_->DeliverI420Frame(video_frame); + external_renderer_->DeliverI420Frame(*video_frame); } else { // TODO(wuchengli): readback the pixels and deliver the frame. } @@ -173,17 +173,17 @@ int32_t ViEExternalRendererImpl::RenderFrame(const uint32_t stream_id, int32_t ViEExternalRendererImpl::ConvertAndRenderFrame( uint32_t stream_id, - I420VideoFrame& video_frame) { - if (video_frame.native_handle() != NULL) { + I420VideoFrame* video_frame) { + if (video_frame->native_handle() != NULL) { NotifyFrameSizeChange(stream_id, video_frame); if (external_renderer_->IsTextureSupported()) { external_renderer_->DeliverFrame(NULL, 0, - video_frame.timestamp(), - video_frame.ntp_time_ms(), - video_frame.render_time_ms(), - video_frame.native_handle()); + video_frame->timestamp(), + video_frame->ntp_time_ms(), + video_frame->render_time_ms(), + video_frame->native_handle()); } else { // TODO(wuchengli): readback the pixels and deliver the frame. } @@ -193,8 +193,8 @@ int32_t ViEExternalRendererImpl::ConvertAndRenderFrame( // Convert to requested format. VideoType type = RawVideoTypeToCommonVideoVideoType(external_renderer_format_); - size_t buffer_size = CalcBufferSize(type, video_frame.width(), - video_frame.height()); + size_t buffer_size = CalcBufferSize(type, video_frame->width(), + video_frame->height()); if (buffer_size == 0) { // Unsupported video format. assert(false); @@ -212,7 +212,7 @@ int32_t ViEExternalRendererImpl::ConvertAndRenderFrame( case kVideoRGB565: case kVideoARGB4444: case kVideoARGB1555: - if (ConvertFromI420(video_frame, type, 0, out_frame) < 0) + if (ConvertFromI420(*video_frame, type, 0, out_frame) < 0) return -1; break; case kVideoIYUV: @@ -229,9 +229,9 @@ int32_t ViEExternalRendererImpl::ConvertAndRenderFrame( if (out_frame) { external_renderer_->DeliverFrame(out_frame, converted_frame_.size(), - video_frame.timestamp(), - video_frame.ntp_time_ms(), - video_frame.render_time_ms(), + video_frame->timestamp(), + video_frame->ntp_time_ms(), + video_frame->render_time_ms(), NULL); } return 0; @@ -239,11 +239,11 @@ int32_t ViEExternalRendererImpl::ConvertAndRenderFrame( void ViEExternalRendererImpl::NotifyFrameSizeChange( const uint32_t stream_id, - I420VideoFrame& video_frame) { - if (external_renderer_width_ != video_frame.width() || - external_renderer_height_ != video_frame.height()) { - external_renderer_width_ = video_frame.width(); - external_renderer_height_ = video_frame.height(); + I420VideoFrame* video_frame) { + if (external_renderer_width_ != video_frame->width() || + external_renderer_height_ != video_frame->height()) { + external_renderer_width_ = video_frame->width(); + external_renderer_height_ = video_frame->height(); external_renderer_->FrameSizeChange( external_renderer_width_, external_renderer_height_, stream_id); } diff --git a/webrtc/video_engine/vie_renderer.h b/webrtc/video_engine/vie_renderer.h index fcd80ab9a4..27ad3782f7 100644 --- a/webrtc/video_engine/vie_renderer.h +++ b/webrtc/video_engine/vie_renderer.h @@ -33,13 +33,13 @@ class ViEExternalRendererImpl : public VideoRenderCallback { // Implements VideoRenderCallback. virtual int32_t RenderFrame(const uint32_t stream_id, - I420VideoFrame& video_frame); + I420VideoFrame* video_frame); private: void NotifyFrameSizeChange(const uint32_t stream_id, - I420VideoFrame& video_frame); + I420VideoFrame* video_frame); int32_t ConvertAndRenderFrame(uint32_t stream_id, - I420VideoFrame& video_frame); + I420VideoFrame* video_frame); ExternalRenderer* external_renderer_; RawVideoType external_renderer_format_; int external_renderer_width_;