diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java index 72a592e831..109c29489a 100644 --- a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java +++ b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java @@ -122,6 +122,8 @@ public class VideoCapturerAndroidTest extends ActivityTestCase { RendererCallbacks callbacks = new RendererCallbacks(); track.addRenderer(new VideoRenderer(callbacks)); assertTrue(callbacks.WaitForNextFrameToRender() > 0); + track.dispose(); + source.dispose(); } @Override @@ -195,6 +197,32 @@ public class VideoCapturerAndroidTest extends ActivityTestCase { RendererCallbacks callbacks = new RendererCallbacks(); track.addRenderer(new VideoRenderer(callbacks)); assertTrue(callbacks.WaitForNextFrameToRender() > 0); + track.dispose(); + source.dispose(); + } + + @SmallTest + // This test that the VideoSource that the VideoCapturer is connected to can + // be stopped and restarted. It tests both the Java and the C++ layer. + public void testStopRestartVideoSource() throws Exception { + PeerConnectionFactory factory = new PeerConnectionFactory(); + VideoCapturerAndroid capturer = VideoCapturerAndroid.create(""); + VideoSource source = + factory.createVideoSource(capturer, new MediaConstraints()); + VideoTrack track = factory.createVideoTrack("dummy", source); + RendererCallbacks callbacks = new RendererCallbacks(); + track.addRenderer(new VideoRenderer(callbacks)); + assertTrue(callbacks.WaitForNextFrameToRender() > 0); + assertEquals(MediaSource.State.LIVE, source.state()); + + source.stop(); + assertEquals(MediaSource.State.ENDED, source.state()); + + source.restart(); + assertTrue(callbacks.WaitForNextFrameToRender() > 0); + assertEquals(MediaSource.State.LIVE, source.state()); + track.dispose(); + source.dispose(); } @SmallTest @@ -218,5 +246,6 @@ public class VideoCapturerAndroidTest extends ActivityTestCase { assertEquals((format.width*format.height*3)/2, observer.frameSize()); assertTrue(capturer.stopCapture()); } + capturer.dispose(); } } diff --git a/talk/app/webrtc/androidvideocapturer.cc b/talk/app/webrtc/androidvideocapturer.cc index fa9d925509..3264421db7 100644 --- a/talk/app/webrtc/androidvideocapturer.cc +++ b/talk/app/webrtc/androidvideocapturer.cc @@ -100,7 +100,8 @@ AndroidVideoCapturer::AndroidVideoCapturer( : running_(false), delegate_(delegate.Pass()), worker_thread_(NULL), - frame_factory_(NULL) { + frame_factory_(NULL), + current_state_(cricket::CS_STOPPED){ std::string json_string = delegate_->GetSupportedFormats(); LOG(LS_INFO) << json_string; @@ -132,7 +133,7 @@ AndroidVideoCapturer::~AndroidVideoCapturer() { cricket::CaptureState AndroidVideoCapturer::Start( const cricket::VideoFormat& capture_format) { DCHECK(!running_); - DCHECK(worker_thread_ == nullptr); + DCHECK(worker_thread_ == nullptr || worker_thread_ == rtc::Thread::Current()); // TODO(perkj): Better way to get a handle to the worker thread? worker_thread_ = rtc::Thread::Current(); @@ -146,7 +147,9 @@ cricket::CaptureState AndroidVideoCapturer::Start( delegate_->Start( capture_format.width, capture_format.height, cricket::VideoFormat::IntervalToFps(capture_format.interval), this); - return cricket::CS_STARTING; + SetCaptureFormat(&capture_format); + current_state_ = cricket::CS_STARTING; + return current_state_; } void AndroidVideoCapturer::Stop() { @@ -157,7 +160,8 @@ void AndroidVideoCapturer::Stop() { SetCaptureFormat(NULL); delegate_->Stop(); - SignalStateChange(this, cricket::CS_STOPPED); + current_state_ = cricket::CS_STOPPED; + SignalStateChange(this, current_state_); } bool AndroidVideoCapturer::IsRunning() { @@ -180,7 +184,14 @@ void AndroidVideoCapturer::OnCapturerStarted_w(bool success) { DCHECK(worker_thread_->IsCurrent()); cricket::CaptureState new_state = success ? cricket::CS_RUNNING : cricket::CS_FAILED; - SetCaptureState(new_state); + if (new_state == current_state_) + return; + current_state_ = new_state; + + // TODO(perkj): SetCaptureState can not be used since it posts to |thread_|. + // But |thread_ | is currently just the thread that happened to create the + // cricket::VideoCapturer. + SignalStateChange(this, new_state); } void AndroidVideoCapturer::OnIncomingFrame(signed char* videoFrame, diff --git a/talk/app/webrtc/androidvideocapturer.h b/talk/app/webrtc/androidvideocapturer.h index efbe0593e6..ef72cd6f79 100644 --- a/talk/app/webrtc/androidvideocapturer.h +++ b/talk/app/webrtc/androidvideocapturer.h @@ -102,6 +102,8 @@ class AndroidVideoCapturer : public cricket::VideoCapturer { class FrameFactory; FrameFactory* frame_factory_; // Owned by cricket::VideoCapturer. + + cricket::CaptureState current_state_; }; } // namespace webrtc diff --git a/talk/app/webrtc/java/jni/peerconnection_jni.cc b/talk/app/webrtc/java/jni/peerconnection_jni.cc index bddd4171aa..1851c391ae 100644 --- a/talk/app/webrtc/java/jni/peerconnection_jni.cc +++ b/talk/app/webrtc/java/jni/peerconnection_jni.cc @@ -2950,28 +2950,13 @@ JOW(void, VideoRenderer_nativeCopyPlane)( } } -JOW(jlong, VideoSource_stop)(JNIEnv* jni, jclass, jlong j_p) { - cricket::VideoCapturer* capturer = - reinterpret_cast(j_p)->GetVideoCapturer(); - scoped_ptr format( - new cricket::VideoFormatPod(*capturer->GetCaptureFormat())); - capturer->Stop(); - return jlongFromPointer(format.release()); +JOW(void, VideoSource_stop)(JNIEnv* jni, jclass, jlong j_p) { + reinterpret_cast(j_p)->Stop(); } JOW(void, VideoSource_restart)( JNIEnv* jni, jclass, jlong j_p_source, jlong j_p_format) { - CHECK(j_p_source); - CHECK(j_p_format); - scoped_ptr format( - reinterpret_cast(j_p_format)); - reinterpret_cast(j_p_source)->GetVideoCapturer()-> - StartCapturing(cricket::VideoFormat(*format)); -} - -JOW(void, VideoSource_freeNativeVideoFormat)( - JNIEnv* jni, jclass, jlong j_p) { - delete reinterpret_cast(j_p); + reinterpret_cast(j_p_source)->Restart(); } JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) { diff --git a/talk/app/webrtc/java/src/org/webrtc/VideoCapturerAndroid.java b/talk/app/webrtc/java/src/org/webrtc/VideoCapturerAndroid.java index f35b62944e..2650e7137c 100644 --- a/talk/app/webrtc/java/src/org/webrtc/VideoCapturerAndroid.java +++ b/talk/app/webrtc/java/src/org/webrtc/VideoCapturerAndroid.java @@ -422,8 +422,9 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba @Override public void run() { stopCaptureOnCameraThread(result); } - }); + }); boolean status = exchange(result, false); // |false| is a dummy value here. + Log.d(TAG, "stopCapture wait"); try { cameraThread.join(); } catch (InterruptedException e) { diff --git a/talk/app/webrtc/java/src/org/webrtc/VideoSource.java b/talk/app/webrtc/java/src/org/webrtc/VideoSource.java index 9df236cebe..7151748aa9 100644 --- a/talk/app/webrtc/java/src/org/webrtc/VideoSource.java +++ b/talk/app/webrtc/java/src/org/webrtc/VideoSource.java @@ -36,7 +36,6 @@ package org.webrtc; * its output to the encoder) can be too high to bear. */ public class VideoSource extends MediaSource { - private long nativeVideoFormatAtStop; public VideoSource(long nativeSource) { super(nativeSource); @@ -44,30 +43,21 @@ public class VideoSource extends MediaSource { // Stop capture feeding this source. public void stop() { - nativeVideoFormatAtStop = stop(nativeSource); + stop(nativeSource); } // Restart capture feeding this source. stop() must have been called since // the last call to restart() (if any). Note that this isn't "start()"; // sources are started by default at birth. public void restart() { - restart(nativeSource, nativeVideoFormatAtStop); - nativeVideoFormatAtStop = 0; + restart(nativeSource); } @Override public void dispose() { - if (nativeVideoFormatAtStop != 0) { - freeNativeVideoFormat(nativeVideoFormatAtStop); - nativeVideoFormatAtStop = 0; - } super.dispose(); } - // This stop() returns an owned C++ VideoFormat pointer for use in restart() - // and dispose(). - private static native long stop(long nativeSource); - private static native void restart( - long nativeSource, long nativeVideoFormatAtStop); - private static native void freeNativeVideoFormat(long nativeVideoFormat); + private static native void stop(long nativeSource); + private static native void restart(long nativeSource); } diff --git a/talk/app/webrtc/mediastreamhandler_unittest.cc b/talk/app/webrtc/mediastreamhandler_unittest.cc index d41a933ed2..62466437f4 100644 --- a/talk/app/webrtc/mediastreamhandler_unittest.cc +++ b/talk/app/webrtc/mediastreamhandler_unittest.cc @@ -85,6 +85,8 @@ class FakeVideoSource : public Notifier { virtual cricket::VideoCapturer* GetVideoCapturer() { return &fake_capturer_; } + virtual void Stop() {} + virtual void Restart() {} virtual void AddSink(cricket::VideoRenderer* output) {} virtual void RemoveSink(cricket::VideoRenderer* output) {} virtual SourceState state() const { return state_; } diff --git a/talk/app/webrtc/videosource.cc b/talk/app/webrtc/videosource.cc index ea670f5c2b..44f3e225f9 100644 --- a/talk/app/webrtc/videosource.cc +++ b/talk/app/webrtc/videosource.cc @@ -432,11 +432,27 @@ cricket::VideoRenderer* VideoSource::FrameInput() { return frame_input_.get(); } +void VideoSource::Stop() { + channel_manager_->StopVideoCapture(video_capturer_.get(), format_); +} + +void VideoSource::Restart() { + if (!channel_manager_->StartVideoCapture(video_capturer_.get(), format_)) { + SetState(kEnded); + return; + } + for(cricket::VideoRenderer* sink : sinks_) { + channel_manager_->AddVideoRenderer(video_capturer_.get(), sink); + } +} + void VideoSource::AddSink(cricket::VideoRenderer* output) { + sinks_.push_back(output); channel_manager_->AddVideoRenderer(video_capturer_.get(), output); } void VideoSource::RemoveSink(cricket::VideoRenderer* output) { + sinks_.remove(output); channel_manager_->RemoveVideoRenderer(video_capturer_.get(), output); } diff --git a/talk/app/webrtc/videosource.h b/talk/app/webrtc/videosource.h index d845ebfce6..8253cbac18 100644 --- a/talk/app/webrtc/videosource.h +++ b/talk/app/webrtc/videosource.h @@ -28,6 +28,8 @@ #ifndef TALK_APP_WEBRTC_VIDEOSOURCE_H_ #define TALK_APP_WEBRTC_VIDEOSOURCE_H_ +#include + #include "talk/app/webrtc/mediastreaminterface.h" #include "talk/app/webrtc/notifier.h" #include "talk/app/webrtc/videosourceinterface.h" @@ -73,6 +75,10 @@ class VideoSource : public Notifier, virtual cricket::VideoCapturer* GetVideoCapturer() { return video_capturer_.get(); } + + void Stop() override; + void Restart() override; + // |output| will be served video frames as long as the underlying capturer // is running video frames. virtual void AddSink(cricket::VideoRenderer* output); @@ -93,6 +99,8 @@ class VideoSource : public Notifier, rtc::scoped_ptr video_capturer_; rtc::scoped_ptr frame_input_; + std::list sinks_; + cricket::VideoFormat format_; cricket::VideoOptions options_; SourceState state_; diff --git a/talk/app/webrtc/videosource_unittest.cc b/talk/app/webrtc/videosource_unittest.cc index e83e50099c..ac072bd905 100644 --- a/talk/app/webrtc/videosource_unittest.cc +++ b/talk/app/webrtc/videosource_unittest.cc @@ -169,7 +169,7 @@ class VideoSourceTest : public testing::Test { // Test that a VideoSource transition to kLive state when the capture // device have started and kEnded if it is stopped. // It also test that an output can receive video frames. -TEST_F(VideoSourceTest, StartStop) { +TEST_F(VideoSourceTest, CapturerStartStop) { // Initialize without constraints. CreateVideoSource(); EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(), @@ -183,6 +183,30 @@ TEST_F(VideoSourceTest, StartStop) { kMaxWaitMs); } +// Test that a VideoSource can be stopped and restarted. +TEST_F(VideoSourceTest, StopRestart) { + // Initialize without constraints. + CreateVideoSource(); + EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(), + kMaxWaitMs); + + ASSERT_TRUE(capturer_->CaptureFrame()); + EXPECT_EQ(1, renderer_.num_rendered_frames()); + + source_->Stop(); + EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(), + kMaxWaitMs); + + source_->Restart(); + EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(), + kMaxWaitMs); + + ASSERT_TRUE(capturer_->CaptureFrame()); + EXPECT_EQ(2, renderer_.num_rendered_frames()); + + source_->Stop(); +} + // Test start stop with a remote VideoSource - the video source that has a // RemoteVideoCapturer and takes video frames from FrameInput. TEST_F(VideoSourceTest, StartStopRemote) { diff --git a/talk/app/webrtc/videosourceinterface.h b/talk/app/webrtc/videosourceinterface.h index 6f176aa3ef..a90e3d5a48 100644 --- a/talk/app/webrtc/videosourceinterface.h +++ b/talk/app/webrtc/videosourceinterface.h @@ -43,6 +43,11 @@ class VideoSourceInterface : public MediaSourceInterface { // This can be used for receiving frames and state notifications. // But it should not be used for starting or stopping capturing. virtual cricket::VideoCapturer* GetVideoCapturer() = 0; + + // Stop the video capturer. + virtual void Stop() = 0; + virtual void Restart() = 0; + // Adds |output| to the source to receive frames. virtual void AddSink(cricket::VideoRenderer* output) = 0; virtual void RemoveSink(cricket::VideoRenderer* output) = 0; diff --git a/talk/app/webrtc/videosourceproxy.h b/talk/app/webrtc/videosourceproxy.h index 3f6513b320..677fa9cf0f 100644 --- a/talk/app/webrtc/videosourceproxy.h +++ b/talk/app/webrtc/videosourceproxy.h @@ -39,6 +39,8 @@ namespace webrtc { BEGIN_PROXY_MAP(VideoSource) PROXY_CONSTMETHOD0(SourceState, state) PROXY_METHOD0(cricket::VideoCapturer*, GetVideoCapturer) + PROXY_METHOD0(void, Stop) + PROXY_METHOD0(void, Restart) PROXY_METHOD1(void, AddSink, cricket::VideoRenderer*) PROXY_METHOD1(void, RemoveSink, cricket::VideoRenderer*) PROXY_CONSTMETHOD0(const cricket::VideoOptions*, options)