Revert of Removed unused cricket::VideoCapturer methods (patchset #2 id:30001 of https://codereview.webrtc.org/1733673002/ )

Reason for revert:
Breaks remoting::protocol::WebrtcVideoCapturerAdapter::Pause'

See https://build.chromium.org/p/chromium.webrtc.fyi/builders/Win%20Builder/builds/3689/steps/compile/logs/stdio

Original issue's description:
> Removed unused cricket::VideoCapturer methods:
>
> void UpdateAspectRatio(int ratio_w, int ratio_h);
> void ClearAspectRatio();
> ool Pause(bool paused);
> Restart(const VideoFormat& capture_format);
> MuteToBlackThenPause(bool muted);
> IsMuted() const
> set_square_pixel_aspect_ratio
> bool square_pixel_aspect_ratio()
>
> This cl also remove the use of messages and posting of state change.
> Further more - a thread checker is added to make sure methods are called on only one thread. Construction can happen on a separate thred.
> It does not add restrictions on what thread frames are delivered on though.
>
> There is more features in VideoCapturer::Onframe related to screen share in ARGB that probably can be cleaned up in a follow up cl.
>
> BUG=webrtc:5426
>
> Committed: https://crrev.com/e9c0cdff2dad2553b6ff6820c0c7429cb2854861
> Cr-Commit-Position: refs/heads/master@{#11773}

TBR=magjed@webrtc.org,pthatcher@webrtc.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=webrtc:5426

Review URL: https://codereview.webrtc.org/1740963002

Cr-Commit-Position: refs/heads/master@{#11777}
This commit is contained in:
perkj 2016-02-26 02:54:38 -08:00 committed by Commit bot
parent 806706875d
commit 74622e0613
14 changed files with 449 additions and 43 deletions

View File

@ -152,7 +152,7 @@ void AndroidVideoCapturer::Stop() {
delegate_->Stop();
current_state_ = cricket::CS_STOPPED;
SetCaptureState(current_state_);
SignalStateChange(this, current_state_);
}
bool AndroidVideoCapturer::IsRunning() {
@ -173,7 +173,11 @@ void AndroidVideoCapturer::OnCapturerStarted(bool success) {
if (new_state == current_state_)
return;
current_state_ = new_state;
SetCaptureState(new_state);
// TODO(perkj): SetCaptureState can not be used since it posts to |thread_|.
// But |thread_ | is currently just the thread that happened to create the
// cricket::VideoCapturer.
SignalStateChange(this, new_state);
}
void AndroidVideoCapturer::OnIncomingFrame(

View File

@ -19,8 +19,7 @@
namespace webrtc {
class FakePeriodicVideoCapturer : public cricket::FakeVideoCapturer,
public rtc::MessageHandler {
class FakePeriodicVideoCapturer : public cricket::FakeVideoCapturer {
public:
FakePeriodicVideoCapturer() {
std::vector<cricket::VideoFormat> formats;
@ -56,6 +55,8 @@ class FakePeriodicVideoCapturer : public cricket::FakeVideoCapturer,
GetCaptureFormat()->interval / rtc::kNumNanosecsPerMillisec),
this, MSG_CREATEFRAME);
}
} else {
FakeVideoCapturer::OnMessage(msg);
}
}

View File

@ -55,6 +55,7 @@ GetReadyState(cricket::CaptureState state) {
case cricket::CS_RUNNING:
return MediaSourceInterface::kLive;
case cricket::CS_FAILED:
case cricket::CS_NO_DEVICE:
case cricket::CS_STOPPED:
return MediaSourceInterface::kEnded;
case cricket::CS_PAUSED:
@ -402,7 +403,7 @@ void VideoSource::RemoveSink(
// This signal is triggered for all video capturers. Not only the one we are
// interested in.
void VideoSource::OnStateChange(cricket::VideoCapturer* capturer,
cricket::CaptureState capture_state) {
cricket::CaptureState capture_state) {
if (capturer == video_capturer_.get()) {
SetState(GetReadyState(capture_state));
}

View File

@ -198,6 +198,62 @@ bool CaptureManager::StopVideoCapture(VideoCapturer* video_capturer,
return true;
}
bool CaptureManager::RestartVideoCapture(
VideoCapturer* video_capturer,
const VideoFormat& previous_format,
const VideoFormat& desired_format,
CaptureManager::RestartOptions options) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
if (!IsCapturerRegistered(video_capturer)) {
LOG(LS_ERROR) << "RestartVideoCapture: video_capturer is not registered.";
return false;
}
// Start the new format first. This keeps the capturer running.
if (!StartVideoCapture(video_capturer, desired_format)) {
LOG(LS_ERROR) << "RestartVideoCapture: unable to start video capture with "
"desired_format=" << desired_format.ToString();
return false;
}
// Stop the old format.
if (!StopVideoCapture(video_capturer, previous_format)) {
LOG(LS_ERROR) << "RestartVideoCapture: unable to stop video capture with "
"previous_format=" << previous_format.ToString();
// Undo the start request we just performed.
StopVideoCapture(video_capturer, desired_format);
return false;
}
switch (options) {
case kForceRestart: {
VideoCapturerState* capture_state = GetCaptureState(video_capturer);
ASSERT(capture_state && capture_state->start_count() > 0);
// Try a restart using the new best resolution.
VideoFormat highest_asked_format =
capture_state->GetHighestFormat(video_capturer);
VideoFormat capture_format;
if (video_capturer->GetBestCaptureFormat(highest_asked_format,
&capture_format)) {
if (!video_capturer->Restart(capture_format)) {
LOG(LS_ERROR) << "RestartVideoCapture: Restart failed.";
}
} else {
LOG(LS_WARNING)
<< "RestartVideoCapture: Couldn't find a best capture format for "
<< highest_asked_format.ToString();
}
break;
}
case kRequestRestart:
// TODO(ryanpetrie): Support restart requests. Should this
// to-be-implemented logic be used for {Start,Stop}VideoCapture as well?
break;
default:
LOG(LS_ERROR) << "Unknown/unimplemented RestartOption";
break;
}
return true;
}
void CaptureManager::AddVideoSink(VideoCapturer* video_capturer,
rtc::VideoSinkInterface<VideoFrame>* sink) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());

View File

@ -53,6 +53,15 @@ class CaptureManager : public sigslot::has_slots<> {
virtual bool StopVideoCapture(VideoCapturer* video_capturer,
const VideoFormat& format);
// Possibly restarts the capturer. If |options| is set to kRequestRestart,
// the CaptureManager chooses whether this request can be handled with the
// current state or if a restart is actually needed. If |options| is set to
// kForceRestart, the capturer is restarted.
virtual bool RestartVideoCapture(VideoCapturer* video_capturer,
const VideoFormat& previous_format,
const VideoFormat& desired_format,
RestartOptions options);
virtual void AddVideoSink(VideoCapturer* video_capturer,
rtc::VideoSinkInterface<VideoFrame>* sink);
virtual void RemoveVideoSink(VideoCapturer* video_capturer,

View File

@ -160,3 +160,41 @@ TEST_F(CaptureManagerTest, MultipleStartStops) {
EXPECT_FALSE(capture_manager_.StopVideoCapture(&video_capturer_,
format_vga_));
}
TEST_F(CaptureManagerTest, TestForceRestart) {
EXPECT_TRUE(capture_manager_.StartVideoCapture(&video_capturer_,
format_qvga_));
capture_manager_.AddVideoSink(&video_capturer_, &video_renderer_);
EXPECT_EQ_WAIT(1, callback_count(), kMsCallbackWait);
EXPECT_TRUE(video_capturer_.CaptureFrame());
EXPECT_EQ(1, NumFramesRendered());
EXPECT_TRUE(WasRenderedResolution(format_qvga_));
// Now restart with vga.
EXPECT_TRUE(capture_manager_.RestartVideoCapture(
&video_capturer_, format_qvga_, format_vga_,
cricket::CaptureManager::kForceRestart));
EXPECT_TRUE(video_capturer_.CaptureFrame());
EXPECT_EQ(2, NumFramesRendered());
EXPECT_TRUE(WasRenderedResolution(format_vga_));
EXPECT_TRUE(capture_manager_.StopVideoCapture(&video_capturer_,
format_vga_));
}
TEST_F(CaptureManagerTest, TestRequestRestart) {
EXPECT_TRUE(capture_manager_.StartVideoCapture(&video_capturer_,
format_vga_));
capture_manager_.AddVideoSink(&video_capturer_, &video_renderer_);
EXPECT_EQ_WAIT(1, callback_count(), kMsCallbackWait);
EXPECT_TRUE(video_capturer_.CaptureFrame());
EXPECT_EQ(1, NumFramesRendered());
EXPECT_TRUE(WasRenderedResolution(format_vga_));
// Now request restart with qvga.
EXPECT_TRUE(capture_manager_.RestartVideoCapture(
&video_capturer_, format_vga_, format_qvga_,
cricket::CaptureManager::kRequestRestart));
EXPECT_TRUE(video_capturer_.CaptureFrame());
EXPECT_EQ(2, NumFramesRendered());
EXPECT_TRUE(WasRenderedResolution(format_vga_));
EXPECT_TRUE(capture_manager_.StopVideoCapture(&video_capturer_,
format_qvga_));
}

View File

@ -19,18 +19,34 @@
#include "webrtc/base/logging.h"
#include "webrtc/base/systeminfo.h"
#include "webrtc/media/base/videoframefactory.h"
#if defined(HAVE_WEBRTC_VIDEO)
#include "webrtc/media/engine/webrtcvideoframe.h"
#include "webrtc/media/engine/webrtcvideoframefactory.h"
#endif // HAVE_WEBRTC_VIDEO
namespace cricket {
namespace {
// TODO(thorcarpenter): This is a BIG hack to flush the system with black
// frames. Frontends should coordinate to update the video state of a muted
// user. When all frontends to this consider removing the black frame business.
const int kNumBlackFramesOnMute = 30;
// MessageHandler constants.
enum {
MSG_DO_PAUSE = 0,
MSG_DO_UNPAUSE,
MSG_STATE_CHANGE
};
static const int64_t kMaxDistance = ~(static_cast<int64_t>(1) << 63);
#ifdef WEBRTC_LINUX
static const int kYU12Penalty = 16; // Needs to be higher than MJPG index.
#endif
static const int kDefaultScreencastFps = 5;
typedef rtc::TypedMessageData<CaptureState> StateChangeParams;
// Limit stats data collections to ~20 seconds of 30fps data before dropping
// old data in case stats aren't reset for long periods of time.
@ -65,16 +81,23 @@ bool CapturedFrame::GetDataSize(uint32_t* size) const {
// Implementation of class VideoCapturer
/////////////////////////////////////////////////////////////////////
VideoCapturer::VideoCapturer()
: adapt_frame_drops_data_(kMaxAccumulatorSize),
: thread_(rtc::Thread::Current()),
adapt_frame_drops_data_(kMaxAccumulatorSize),
frame_time_data_(kMaxAccumulatorSize),
apply_rotation_(true) {
Construct();
}
VideoCapturer::VideoCapturer(rtc::Thread* thread)
: thread_(thread),
adapt_frame_drops_data_(kMaxAccumulatorSize),
frame_time_data_(kMaxAccumulatorSize),
apply_rotation_(true) {
thread_checker_.DetachFromThread();
Construct();
}
void VideoCapturer::Construct() {
ratio_w_ = 0;
ratio_h_ = 0;
ClearAspectRatio();
enable_camera_list_ = false;
square_pixel_aspect_ratio_ = false;
capture_state_ = CS_STOPPED;
@ -85,15 +108,19 @@ void VideoCapturer::Construct() {
SignalVideoFrame.connect(this, &VideoCapturer::OnFrame);
scaled_width_ = 0;
scaled_height_ = 0;
muted_ = false;
black_frame_count_down_ = kNumBlackFramesOnMute;
enable_video_adapter_ = true;
adapt_frame_drops_ = 0;
previous_frame_time_ = 0.0;
#ifdef HAVE_WEBRTC_VIDEO
// There are lots of video capturers out there that don't call
// set_frame_factory. We can either go change all of them, or we
// can set this default.
// TODO(pthatcher): Remove this hack and require the frame factory
// to be passed in the constructor.
set_frame_factory(new WebRtcVideoFrameFactory());
#endif
}
const std::vector<VideoFormat>* VideoCapturer::GetSupportedFormats() const {
@ -101,7 +128,6 @@ const std::vector<VideoFormat>* VideoCapturer::GetSupportedFormats() const {
}
bool VideoCapturer::StartCapturing(const VideoFormat& capture_format) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
previous_frame_time_ = frame_length_time_reporter_.TimerNow();
CaptureState result = Start(capture_format);
const bool success = (result == CS_RUNNING) || (result == CS_STARTING);
@ -114,18 +140,104 @@ bool VideoCapturer::StartCapturing(const VideoFormat& capture_format) {
return true;
}
void VideoCapturer::UpdateAspectRatio(int ratio_w, int ratio_h) {
if (ratio_w == 0 || ratio_h == 0) {
LOG(LS_WARNING) << "UpdateAspectRatio ignored invalid ratio: "
<< ratio_w << "x" << ratio_h;
return;
}
ratio_w_ = ratio_w;
ratio_h_ = ratio_h;
}
void VideoCapturer::ClearAspectRatio() {
ratio_w_ = 0;
ratio_h_ = 0;
}
// Override this to have more control of how your device is started/stopped.
bool VideoCapturer::Pause(bool pause) {
if (pause) {
if (capture_state() == CS_PAUSED) {
return true;
}
bool is_running = capture_state() == CS_STARTING ||
capture_state() == CS_RUNNING;
if (!is_running) {
LOG(LS_ERROR) << "Cannot pause a stopped camera.";
return false;
}
LOG(LS_INFO) << "Pausing a camera.";
rtc::scoped_ptr<VideoFormat> capture_format_when_paused(
capture_format_ ? new VideoFormat(*capture_format_) : NULL);
Stop();
SetCaptureState(CS_PAUSED);
// If you override this function be sure to restore the capture format
// after calling Stop().
SetCaptureFormat(capture_format_when_paused.get());
} else { // Unpause.
if (capture_state() != CS_PAUSED) {
LOG(LS_WARNING) << "Cannot unpause a camera that hasn't been paused.";
return false;
}
if (!capture_format_) {
LOG(LS_ERROR) << "Missing capture_format_, cannot unpause a camera.";
return false;
}
if (muted_) {
LOG(LS_WARNING) << "Camera cannot be unpaused while muted.";
return false;
}
LOG(LS_INFO) << "Unpausing a camera.";
if (!Start(*capture_format_)) {
LOG(LS_ERROR) << "Camera failed to start when unpausing.";
return false;
}
}
return true;
}
bool VideoCapturer::Restart(const VideoFormat& capture_format) {
if (!IsRunning()) {
return StartCapturing(capture_format);
}
if (GetCaptureFormat() != NULL && *GetCaptureFormat() == capture_format) {
// The reqested format is the same; nothing to do.
return true;
}
Stop();
return StartCapturing(capture_format);
}
bool VideoCapturer::MuteToBlackThenPause(bool muted) {
if (muted == IsMuted()) {
return true;
}
LOG(LS_INFO) << (muted ? "Muting" : "Unmuting") << " this video capturer.";
muted_ = muted; // Do this before calling Pause().
if (muted) {
// Reset black frame count down.
black_frame_count_down_ = kNumBlackFramesOnMute;
// Following frames will be overritten with black, then the camera will be
// paused.
return true;
}
// Start the camera.
thread_->Clear(this, MSG_DO_PAUSE);
return Pause(false);
}
void VideoCapturer::SetSupportedFormats(
const std::vector<VideoFormat>& formats) {
// This method is OK to call during initialization on a separate thread.
RTC_DCHECK(capture_state_ == CS_STOPPED ||
thread_checker_.CalledOnValidThread());
supported_formats_ = formats;
UpdateFilteredSupportedFormats();
}
bool VideoCapturer::GetBestCaptureFormat(const VideoFormat& format,
VideoFormat* best_format) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
// TODO(fbarchard): Directly support max_format.
UpdateFilteredSupportedFormats();
const std::vector<VideoFormat>* supported_formats = GetSupportedFormats();
@ -164,7 +276,6 @@ bool VideoCapturer::GetBestCaptureFormat(const VideoFormat& format,
}
void VideoCapturer::ConstrainSupportedFormats(const VideoFormat& max_format) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
max_format_.reset(new VideoFormat(max_format));
LOG(LS_VERBOSE) << " ConstrainSupportedFormats " << max_format.ToString();
UpdateFilteredSupportedFormats();
@ -208,20 +319,17 @@ void VideoCapturer::GetStats(VariableInfo<int>* adapt_drops_stats,
void VideoCapturer::RemoveSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
broadcaster_.RemoveSink(sink);
}
void VideoCapturer::AddOrUpdateSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
broadcaster_.AddOrUpdateSink(sink, wants);
OnSinkWantsChanged(broadcaster_.wants());
}
void VideoCapturer::OnSinkWantsChanged(const rtc::VideoSinkWants& wants) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
apply_rotation_ = wants.rotation_applied;
if (frame_factory_) {
frame_factory_->SetApplyRotation(apply_rotation_);
@ -230,28 +338,37 @@ void VideoCapturer::OnSinkWantsChanged(const rtc::VideoSinkWants& wants) {
void VideoCapturer::OnFrameCaptured(VideoCapturer*,
const CapturedFrame* captured_frame) {
if (muted_) {
if (black_frame_count_down_ == 0) {
thread_->Post(this, MSG_DO_PAUSE, NULL);
} else {
--black_frame_count_down_;
}
}
if (!broadcaster_.frame_wanted()) {
return;
}
// Use a temporary buffer to scale
rtc::scoped_ptr<uint8_t[]> scale_buffer;
if (IsScreencast()) {
int scaled_width, scaled_height;
int desired_screencast_fps =
capture_format_.get()
? VideoFormat::IntervalToFps(capture_format_->interval)
: kDefaultScreencastFps;
int desired_screencast_fps = capture_format_.get() ?
VideoFormat::IntervalToFps(capture_format_->interval) :
kDefaultScreencastFps;
ComputeScale(captured_frame->width, captured_frame->height,
desired_screencast_fps, &scaled_width, &scaled_height);
if (FOURCC_ARGB == captured_frame->fourcc &&
(scaled_width != captured_frame->width ||
scaled_height != captured_frame->height)) {
scaled_height != captured_frame->height)) {
if (scaled_width != scaled_width_ || scaled_height != scaled_height_) {
LOG(LS_INFO) << "Scaling Screencast from " << captured_frame->width
<< "x" << captured_frame->height << " to " << scaled_width
<< "x" << scaled_height;
LOG(LS_INFO) << "Scaling Screencast from "
<< captured_frame->width << "x"
<< captured_frame->height << " to "
<< scaled_width << "x" << scaled_height;
scaled_width_ = scaled_width;
scaled_height_ = scaled_height;
}
@ -280,6 +397,7 @@ void VideoCapturer::OnFrameCaptured(VideoCapturer*,
const int kArgbBpp = 4;
// TODO(fbarchard): Make a helper function to adjust pixels to square.
// TODO(fbarchard): Hook up experiment to scaling.
// TODO(fbarchard): Avoid scale and convert if muted.
// Temporary buffer is scoped here so it will persist until i420_frame.Init()
// makes a copy of the frame, converting to I420.
rtc::scoped_ptr<uint8_t[]> temp_buffer;
@ -407,6 +525,10 @@ void VideoCapturer::OnFrameCaptured(VideoCapturer*,
return;
}
if (muted_) {
// TODO(pthatcher): Use frame_factory_->CreateBlackFrame() instead.
adapted_frame->SetToBlack();
}
SignalVideoFrame(this, adapted_frame.get());
UpdateStats(captured_frame);
}
@ -416,13 +538,35 @@ void VideoCapturer::OnFrame(VideoCapturer* capturer, const VideoFrame* frame) {
}
void VideoCapturer::SetCaptureState(CaptureState state) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
if (state == capture_state_) {
// Don't trigger a state changed callback if the state hasn't changed.
return;
}
StateChangeParams* state_params = new StateChangeParams(state);
capture_state_ = state;
SignalStateChange(this, capture_state_);
thread_->Post(this, MSG_STATE_CHANGE, state_params);
}
void VideoCapturer::OnMessage(rtc::Message* message) {
switch (message->message_id) {
case MSG_STATE_CHANGE: {
rtc::scoped_ptr<StateChangeParams> p(
static_cast<StateChangeParams*>(message->pdata));
SignalStateChange(this, p->data());
break;
}
case MSG_DO_PAUSE: {
Pause(true);
break;
}
case MSG_DO_UNPAUSE: {
Pause(false);
break;
}
default: {
ASSERT(false);
}
}
}
// Get the distance between the supported and desired formats.
@ -434,7 +578,6 @@ void VideoCapturer::SetCaptureState(CaptureState state) {
// otherwise, we use preference.
int64_t VideoCapturer::GetFormatDistance(const VideoFormat& desired,
const VideoFormat& supported) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
int64_t distance = kMaxDistance;
// Check fourcc.
@ -545,7 +688,6 @@ void VideoCapturer::UpdateFilteredSupportedFormats() {
}
bool VideoCapturer::ShouldFilterFormat(const VideoFormat& format) const {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
if (!enable_camera_list_) {
return false;
}

View File

@ -20,11 +20,12 @@
#include "webrtc/base/basictypes.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/media/base/videosourceinterface.h"
#include "webrtc/base/messagehandler.h"
#include "webrtc/base/rollingaccumulator.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/base/thread.h"
#include "webrtc/base/timing.h"
#include "webrtc/base/thread_checker.h"
#include "webrtc/media/base/mediachannel.h"
#include "webrtc/media/base/videoadapter.h"
#include "webrtc/media/base/videobroadcaster.h"
@ -36,6 +37,8 @@
namespace cricket {
// Current state of the capturer.
// TODO(hellner): CS_NO_DEVICE is an error code not a capture state. Separate
// error codes and states.
enum CaptureState {
CS_STOPPED, // The capturer has been stopped or hasn't started yet.
CS_STARTING, // The capturer is in the process of starting. Note, it may
@ -44,6 +47,7 @@ enum CaptureState {
// capturing.
CS_PAUSED, // The capturer has been paused.
CS_FAILED, // The capturer failed to start.
CS_NO_DEVICE, // The capturer has no device and consequently failed to start.
};
class VideoFrame;
@ -87,13 +91,13 @@ struct CapturedFrame {
// The captured frames may need to be adapted (for example, cropping).
// Video adaptation is built into and enabled by default. After a frame has
// been captured from the device, it is sent to the video adapter, then out to
// the sinks.
// the encoder.
//
// Programming model:
// Create an object of a subclass of VideoCapturer
// Initialize
// SignalStateChange.connect()
// AddOrUpdateSink()
// SignalFrameCaptured.connect()
// Find the capture format for Start() by either calling GetSupportedFormats()
// and selecting one of the supported or calling GetBestCaptureFormat().
// video_adapter()->OnOutputFormatRequest(desired_encoding_format)
@ -107,10 +111,13 @@ struct CapturedFrame {
// thread safe.
//
class VideoCapturer : public sigslot::has_slots<>,
public rtc::MessageHandler,
public rtc::VideoSourceInterface<cricket::VideoFrame> {
public:
// All signals are marshalled to |thread| or the creating thread if
// none is provided.
VideoCapturer();
explicit VideoCapturer(rtc::Thread* thread);
virtual ~VideoCapturer() {}
// Gets the id of the underlying device, which is available after the capturer
@ -156,6 +163,12 @@ class VideoCapturer : public sigslot::has_slots<>,
// CS_FAILED: if the capturer failes to start..
// CS_NO_DEVICE: if the capturer has no device and fails to start.
virtual CaptureState Start(const VideoFormat& capture_format) = 0;
// Sets the desired aspect ratio. If the capturer is capturing at another
// aspect ratio it will crop the width or the height so that asked for
// aspect ratio is acheived. Note that ratio_w and ratio_h do not need to be
// relatively prime.
void UpdateAspectRatio(int ratio_w, int ratio_h);
void ClearAspectRatio();
// Get the current capture format, which is set by the Start() call.
// Note that the width and height of the captured frames may differ from the
@ -165,10 +178,23 @@ class VideoCapturer : public sigslot::has_slots<>,
return capture_format_.get();
}
// Pause the video capturer.
virtual bool Pause(bool paused);
// Stop the video capturer.
virtual void Stop() = 0;
// Check if the video capturer is running.
virtual bool IsRunning() = 0;
// Restart the video capturer with the new |capture_format|.
// Default implementation stops and starts the capturer.
virtual bool Restart(const VideoFormat& capture_format);
// TODO(thorcarpenter): This behavior of keeping the camera open just to emit
// black frames is a total hack and should be fixed.
// When muting, produce black frames then pause the camera.
// When unmuting, start the camera. Camera starts unmuted.
virtual bool MuteToBlackThenPause(bool muted);
virtual bool IsMuted() const {
return muted_;
}
CaptureState capture_state() const {
return capture_state_;
}
@ -193,6 +219,14 @@ class VideoCapturer : public sigslot::has_slots<>,
return enable_camera_list_;
}
// Enable scaling to ensure square pixels.
void set_square_pixel_aspect_ratio(bool square_pixel_aspect_ratio) {
square_pixel_aspect_ratio_ = square_pixel_aspect_ratio;
}
bool square_pixel_aspect_ratio() {
return square_pixel_aspect_ratio_;
}
// Signal all capture state changes that are not a direct result of calling
// Start().
sigslot::signal2<VideoCapturer*, CaptureState> SignalStateChange;
@ -254,6 +288,9 @@ class VideoCapturer : public sigslot::has_slots<>,
void SetCaptureState(CaptureState state);
// Marshals SignalStateChange onto thread_.
void OnMessage(rtc::Message* message) override;
// subclasses override this virtual method to provide a vector of fourccs, in
// order of preference, that are expected by the media engine.
virtual bool GetPreferredFourccs(std::vector<uint32_t>* fourccs) = 0;
@ -302,7 +339,7 @@ class VideoCapturer : public sigslot::has_slots<>,
const rtc::RollingAccumulator<T>& data,
VariableInfo<T>* stats);
rtc::ThreadChecker thread_checker_;
rtc::Thread* thread_;
std::string id_;
CaptureState capture_state_;
rtc::scoped_ptr<VideoFrameFactory> frame_factory_;
@ -317,6 +354,8 @@ class VideoCapturer : public sigslot::has_slots<>,
bool square_pixel_aspect_ratio_; // Enable scaling to square pixels.
int scaled_width_; // Current output size from ComputeScale.
int scaled_height_;
bool muted_;
int black_frame_count_down_;
rtc::VideoBroadcaster broadcaster_;
bool enable_video_adapter_;

View File

@ -26,6 +26,7 @@ namespace {
const int kMsCallbackWait = 500;
// For HD only the height matters.
const int kMinHdHeight = 720;
const uint32_t kTimeout = 5000U;
} // namespace
@ -74,6 +75,90 @@ TEST_F(VideoCapturerTest, CaptureState) {
EXPECT_EQ(2, num_state_changes());
}
TEST_F(VideoCapturerTest, TestRestart) {
EXPECT_EQ(cricket::CS_RUNNING, capturer_.Start(cricket::VideoFormat(
640,
480,
cricket::VideoFormat::FpsToInterval(30),
cricket::FOURCC_I420)));
EXPECT_TRUE(capturer_.IsRunning());
EXPECT_EQ_WAIT(cricket::CS_RUNNING, capture_state(), kMsCallbackWait);
EXPECT_EQ(1, num_state_changes());
EXPECT_TRUE(capturer_.Restart(cricket::VideoFormat(
320,
240,
cricket::VideoFormat::FpsToInterval(30),
cricket::FOURCC_I420)));
EXPECT_EQ_WAIT(cricket::CS_RUNNING, capture_state(), kMsCallbackWait);
EXPECT_TRUE(capturer_.IsRunning());
EXPECT_GE(1, num_state_changes());
capturer_.Stop();
rtc::Thread::Current()->ProcessMessages(100);
EXPECT_FALSE(capturer_.IsRunning());
}
TEST_F(VideoCapturerTest, TestStartingWithRestart) {
EXPECT_FALSE(capturer_.IsRunning());
EXPECT_TRUE(capturer_.Restart(cricket::VideoFormat(
640,
480,
cricket::VideoFormat::FpsToInterval(30),
cricket::FOURCC_I420)));
EXPECT_TRUE(capturer_.IsRunning());
EXPECT_EQ_WAIT(cricket::CS_RUNNING, capture_state(), kMsCallbackWait);
}
TEST_F(VideoCapturerTest, TestRestartWithSameFormat) {
cricket::VideoFormat format(640, 480,
cricket::VideoFormat::FpsToInterval(30),
cricket::FOURCC_I420);
EXPECT_EQ(cricket::CS_RUNNING, capturer_.Start(format));
EXPECT_TRUE(capturer_.IsRunning());
EXPECT_EQ_WAIT(cricket::CS_RUNNING, capture_state(), kMsCallbackWait);
EXPECT_EQ(1, num_state_changes());
EXPECT_TRUE(capturer_.Restart(format));
EXPECT_EQ(cricket::CS_RUNNING, capture_state());
EXPECT_TRUE(capturer_.IsRunning());
EXPECT_EQ(1, num_state_changes());
}
TEST_F(VideoCapturerTest, CameraOffOnMute) {
EXPECT_EQ(cricket::CS_RUNNING, capturer_.Start(cricket::VideoFormat(
640,
480,
cricket::VideoFormat::FpsToInterval(30),
cricket::FOURCC_I420)));
EXPECT_TRUE(capturer_.IsRunning());
EXPECT_EQ(0, renderer_.num_rendered_frames());
EXPECT_TRUE(capturer_.CaptureFrame());
EXPECT_EQ(1, renderer_.num_rendered_frames());
EXPECT_FALSE(capturer_.IsMuted());
// Mute the camera and expect black output frame.
capturer_.MuteToBlackThenPause(true);
EXPECT_TRUE(capturer_.IsMuted());
for (int i = 0; i < 31; ++i) {
EXPECT_TRUE(capturer_.CaptureFrame());
EXPECT_TRUE(renderer_.black_frame());
}
EXPECT_EQ(32, renderer_.num_rendered_frames());
EXPECT_EQ_WAIT(cricket::CS_PAUSED,
capturer_.capture_state(), kTimeout);
// Verify that the camera is off.
EXPECT_FALSE(capturer_.CaptureFrame());
EXPECT_EQ(32, renderer_.num_rendered_frames());
// Unmute the camera and expect non-black output frame.
capturer_.MuteToBlackThenPause(false);
EXPECT_FALSE(capturer_.IsMuted());
EXPECT_EQ_WAIT(cricket::CS_RUNNING,
capturer_.capture_state(), kTimeout);
EXPECT_TRUE(capturer_.CaptureFrame());
EXPECT_FALSE(renderer_.black_frame());
EXPECT_EQ(33, renderer_.num_rendered_frames());
}
TEST_F(VideoCapturerTest, ScreencastScaledOddWidth) {
capturer_.SetScreencast(true);
@ -111,6 +196,8 @@ TEST_F(VideoCapturerTest, TestRotationAppliedBySource) {
capturer_.ResetSupportedFormats(formats);
// capturer_ should compensate rotation as default.
capturer_.UpdateAspectRatio(400, 200);
EXPECT_EQ(cricket::CS_RUNNING,
capturer_.Start(cricket::VideoFormat(
kWidth, kHeight, cricket::VideoFormat::FpsToInterval(30),
@ -162,6 +249,8 @@ TEST_F(VideoCapturerTest, TestRotationAppliedBySink) {
wants.rotation_applied = false;
capturer_.AddOrUpdateSink(&renderer_, wants);
capturer_.UpdateAspectRatio(400, 200);
EXPECT_EQ(cricket::CS_RUNNING,
capturer_.Start(cricket::VideoFormat(
kWidth, kHeight, cricket::VideoFormat::FpsToInterval(30),
@ -211,6 +300,8 @@ TEST_F(VideoCapturerTest, TestRotationAppliedBySourceWhenDifferentWants) {
wants.rotation_applied = false;
capturer_.AddOrUpdateSink(&renderer_, wants);
capturer_.UpdateAspectRatio(400, 200);
EXPECT_EQ(cricket::CS_RUNNING,
capturer_.Start(cricket::VideoFormat(
kWidth, kHeight, cricket::VideoFormat::FpsToInterval(30),

View File

@ -262,7 +262,7 @@ void WebRtcVideoCapturer::OnSinkWantsChanged(const rtc::VideoSinkWants& wants) {
CaptureState WebRtcVideoCapturer::Start(const VideoFormat& capture_format) {
if (!module_) {
LOG(LS_ERROR) << "The capturer has not been initialized";
return CS_FAILED;
return CS_NO_DEVICE;
}
if (start_thread_) {
LOG(LS_ERROR) << "The capturer is already running";

View File

@ -127,7 +127,7 @@ TEST_F(WebRtcVideoCapturerTest, TestCaptureVcm) {
TEST_F(WebRtcVideoCapturerTest, TestCaptureWithoutInit) {
cricket::VideoFormat format;
EXPECT_EQ(cricket::CS_FAILED, capturer_->Start(format));
EXPECT_EQ(cricket::CS_NO_DEVICE, capturer_->Start(format));
EXPECT_TRUE(capturer_->GetCaptureFormat() == NULL);
EXPECT_FALSE(capturer_->IsRunning());
}

View File

@ -1980,12 +1980,14 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::GetVideoSenderInfo() {
info.adapt_reason = CoordinatedVideoAdapter::ADAPTREASON_NONE;
if (capturer_ != NULL) {
VideoFormat last_captured_frame_format;
capturer_->GetStats(&info.adapt_frame_drops, &info.effects_frame_drops,
&info.capturer_frame_time,
&last_captured_frame_format);
info.input_frame_width = last_captured_frame_format.width;
info.input_frame_height = last_captured_frame_format.height;
if (!capturer_->IsMuted()) {
VideoFormat last_captured_frame_format;
capturer_->GetStats(&info.adapt_frame_drops, &info.effects_frame_drops,
&info.capturer_frame_time,
&last_captured_frame_format);
info.input_frame_width = last_captured_frame_format.width;
info.input_frame_height = last_captured_frame_format.height;
}
if (capturer_->video_adapter() != nullptr) {
info.adapt_changes += capturer_->video_adapter()->adaptation_changes();
info.adapt_reason = capturer_->video_adapter()->adapt_reason();

View File

@ -455,6 +455,16 @@ bool ChannelManager::StartVideoCapture(
capture_manager_.get(), capturer, video_format));
}
bool ChannelManager::MuteToBlackThenPause(
VideoCapturer* video_capturer, bool muted) {
if (!initialized_) {
return false;
}
worker_thread_->Invoke<void>(
Bind(&VideoCapturer::MuteToBlackThenPause, video_capturer, muted));
return true;
}
bool ChannelManager::StopVideoCapture(
VideoCapturer* capturer, const VideoFormat& video_format) {
return initialized_ && worker_thread_->Invoke<bool>(
@ -462,6 +472,16 @@ bool ChannelManager::StopVideoCapture(
capture_manager_.get(), capturer, video_format));
}
bool ChannelManager::RestartVideoCapture(
VideoCapturer* video_capturer,
const VideoFormat& previous_format,
const VideoFormat& desired_format,
CaptureManager::RestartOptions options) {
return initialized_ && worker_thread_->Invoke<bool>(
Bind(&CaptureManager::RestartVideoCapture, capture_manager_.get(),
video_capturer, previous_format, desired_format, options));
}
void ChannelManager::AddVideoSink(
VideoCapturer* capturer, rtc::VideoSinkInterface<VideoFrame>* sink) {
if (initialized_)

View File

@ -129,6 +129,9 @@ class ChannelManager : public rtc::MessageHandler,
// formats a a pseudo-handle.
bool StartVideoCapture(VideoCapturer* video_capturer,
const VideoFormat& video_format);
// When muting, produce black frames then pause the camera.
// When unmuting, start the camera. Camera starts unmuted.
bool MuteToBlackThenPause(VideoCapturer* video_capturer, bool muted);
bool StopVideoCapture(VideoCapturer* video_capturer,
const VideoFormat& video_format);
bool RestartVideoCapture(VideoCapturer* video_capturer,