Move encoder thread to VideoSendStream.

Makes VideoCaptureInput easier to test and enables running more things
outside VideoCaptureInput on the encoder thread in the future
(initializing encoders and reconfiguring them, for instance).

BUG=webrtc:5410, webrtc:5494
R=stefan@webrtc.org

Review URL: https://codereview.webrtc.org/1763693002 .

Cr-Commit-Position: refs/heads/master@{#11860}
This commit is contained in:
Peter Boström 2016-03-03 16:29:02 +01:00
parent 313afba2eb
commit a4c76882b9
8 changed files with 71 additions and 97 deletions

View File

@ -24,16 +24,13 @@
namespace webrtc {
namespace internal {
VideoCaptureInput::VideoCaptureInput(VideoCaptureCallback* frame_callback,
VideoCaptureInput::VideoCaptureInput(rtc::Event* capture_event,
VideoRenderer* local_renderer,
SendStatisticsProxy* stats_proxy,
OveruseFrameDetector* overuse_detector)
: frame_callback_(frame_callback),
local_renderer_(local_renderer),
: local_renderer_(local_renderer),
stats_proxy_(stats_proxy),
encoder_thread_(EncoderThreadFunction, this, "EncoderThread"),
capture_event_(false, false),
stop_(0),
capture_event_(capture_event),
// TODO(danilchap): Pass clock from outside to ensure it is same clock
// rtcp module use to calculate offset since last frame captured
// to estimate rtp timestamp for SenderReport.
@ -41,16 +38,9 @@ VideoCaptureInput::VideoCaptureInput(VideoCaptureCallback* frame_callback,
last_captured_timestamp_(0),
delta_ntp_internal_ms_(clock_->CurrentNtpInMilliseconds() -
clock_->TimeInMilliseconds()),
overuse_detector_(overuse_detector) {
encoder_thread_.Start();
encoder_thread_.SetPriority(rtc::kHighPriority);
}
overuse_detector_(overuse_detector) {}
VideoCaptureInput::~VideoCaptureInput() {
// Stop the thread.
rtc::AtomicOps::ReleaseStore(&stop_, 1);
capture_event_.Set();
encoder_thread_.Stop();
}
void VideoCaptureInput::IncomingCapturedFrame(const VideoFrame& video_frame) {
@ -101,31 +91,16 @@ void VideoCaptureInput::IncomingCapturedFrame(const VideoFrame& video_frame) {
TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(),
"render_time", video_frame.render_time_ms());
capture_event_.Set();
capture_event_->Set();
}
bool VideoCaptureInput::EncoderThreadFunction(void* obj) {
return static_cast<VideoCaptureInput*>(obj)->EncoderProcess();
}
bool VideoCaptureInput::GetVideoFrame(VideoFrame* video_frame) {
rtc::CritScope lock(&crit_);
if (captured_frame_.IsZeroSize())
return false;
bool VideoCaptureInput::EncoderProcess() {
static const int kThreadWaitTimeMs = 100;
if (capture_event_.Wait(kThreadWaitTimeMs)) {
if (rtc::AtomicOps::AcquireLoad(&stop_))
return false;
VideoFrame deliver_frame;
{
rtc::CritScope lock(&crit_);
if (!captured_frame_.IsZeroSize()) {
deliver_frame = captured_frame_;
captured_frame_.Reset();
}
}
if (!deliver_frame.IsZeroSize()) {
frame_callback_->DeliverFrame(deliver_frame);
}
}
*video_frame = captured_frame_;
captured_frame_.Reset();
return true;
}

View File

@ -34,17 +34,10 @@ class OveruseFrameDetector;
class SendStatisticsProxy;
class VideoRenderer;
class VideoCaptureCallback {
public:
virtual ~VideoCaptureCallback() {}
virtual void DeliverFrame(VideoFrame video_frame) = 0;
};
namespace internal {
class VideoCaptureInput : public webrtc::VideoCaptureInput {
public:
VideoCaptureInput(VideoCaptureCallback* frame_callback,
VideoCaptureInput(rtc::Event* capture_event,
VideoRenderer* local_renderer,
SendStatisticsProxy* send_stats_proxy,
OveruseFrameDetector* overuse_detector);
@ -52,21 +45,14 @@ class VideoCaptureInput : public webrtc::VideoCaptureInput {
void IncomingCapturedFrame(const VideoFrame& video_frame) override;
private:
// Thread functions for deliver captured frames to receivers.
static bool EncoderThreadFunction(void* obj);
bool EncoderProcess();
bool GetVideoFrame(VideoFrame* frame);
private:
rtc::CriticalSection crit_;
VideoCaptureCallback* const frame_callback_;
VideoRenderer* const local_renderer_;
SendStatisticsProxy* const stats_proxy_;
rtc::PlatformThread encoder_thread_;
rtc::Event capture_event_;
volatile int stop_;
rtc::Event* const capture_event_;
VideoFrame captured_frame_ GUARDED_BY(crit_);
Clock* const clock_;

View File

@ -12,7 +12,6 @@
#include <memory>
#include <vector>
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/event.h"
#include "webrtc/system_wrappers/include/ref_count.h"
@ -20,22 +19,11 @@
#include "webrtc/test/fake_texture_frame.h"
#include "webrtc/video/send_statistics_proxy.h"
using ::testing::_;
using ::testing::Invoke;
using ::testing::NiceMock;
using ::testing::Return;
using ::testing::WithArg;
// If an output frame does not arrive in 500ms, the test will fail.
#define FRAME_TIMEOUT_MS 500
namespace webrtc {
class MockVideoCaptureCallback : public VideoCaptureCallback {
public:
MOCK_METHOD1(DeliverFrame, void(VideoFrame video_frame));
};
bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2);
bool EqualTextureFrames(const VideoFrame& frame1, const VideoFrame& frame2);
bool EqualBufferFrames(const VideoFrame& frame1, const VideoFrame& frame2);
@ -49,40 +37,34 @@ class VideoCaptureInputTest : public ::testing::Test {
: stats_proxy_(Clock::GetRealTimeClock(),
webrtc::VideoSendStream::Config(nullptr),
webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo),
mock_frame_callback_(new NiceMock<MockVideoCaptureCallback>),
output_frame_event_(false, false) {}
capture_event_(false, false) {}
virtual void SetUp() {
EXPECT_CALL(*mock_frame_callback_, DeliverFrame(_))
.WillRepeatedly(
WithArg<0>(Invoke(this, &VideoCaptureInputTest::AddOutputFrame)));
overuse_detector_.reset(
new OveruseFrameDetector(Clock::GetRealTimeClock(), CpuOveruseOptions(),
nullptr, nullptr, &stats_proxy_));
input_.reset(new internal::VideoCaptureInput(mock_frame_callback_.get(),
nullptr, &stats_proxy_,
overuse_detector_.get()));
input_.reset(new internal::VideoCaptureInput(
&capture_event_, nullptr, &stats_proxy_, overuse_detector_.get()));
}
void AddInputFrame(VideoFrame* frame) {
input_->IncomingCapturedFrame(*frame);
}
void AddOutputFrame(const VideoFrame& frame) {
if (frame.native_handle() == NULL)
output_frame_ybuffers_.push_back(frame.buffer(kYPlane));
output_frames_.push_back(new VideoFrame(frame));
output_frame_event_.Set();
}
void WaitOutputFrame() {
EXPECT_TRUE(output_frame_event_.Wait(FRAME_TIMEOUT_MS));
EXPECT_TRUE(capture_event_.Wait(FRAME_TIMEOUT_MS));
VideoFrame frame;
EXPECT_TRUE(input_->GetVideoFrame(&frame));
if (!frame.native_handle()) {
output_frame_ybuffers_.push_back(
static_cast<const VideoFrame*>(&frame)->buffer(kYPlane));
}
output_frames_.push_back(new VideoFrame(frame));
}
SendStatisticsProxy stats_proxy_;
std::unique_ptr<MockVideoCaptureCallback> mock_frame_callback_;
rtc::Event capture_event_;
std::unique_ptr<OveruseFrameDetector> overuse_detector_;
@ -92,9 +74,6 @@ class VideoCaptureInputTest : public ::testing::Test {
// Input capture frames of VideoCaptureInput.
ScopedVector<VideoFrame> input_frames_;
// Indicate an output frame has arrived.
rtc::Event output_frame_event_;
// Output delivered frames of VideoCaptureInput.
ScopedVector<VideoFrame> output_frames_;
@ -164,12 +143,12 @@ TEST_F(VideoCaptureInputTest, DropsFramesWithSameOrOldNtpTimestamp) {
// Repeat frame with the same NTP timestamp should drop.
AddInputFrame(input_frames_[0]);
EXPECT_FALSE(output_frame_event_.Wait(FRAME_TIMEOUT_MS));
EXPECT_FALSE(capture_event_.Wait(FRAME_TIMEOUT_MS));
// As should frames with a decreased NTP timestamp.
input_frames_[0]->set_ntp_time_ms(input_frames_[0]->ntp_time_ms() - 1);
AddInputFrame(input_frames_[0]);
EXPECT_FALSE(output_frame_event_.Wait(FRAME_TIMEOUT_MS));
EXPECT_FALSE(capture_event_.Wait(FRAME_TIMEOUT_MS));
// But delivering with an increased NTP timestamp should succeed.
input_frames_[0]->set_ntp_time_ms(4711);

View File

@ -20,6 +20,7 @@
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/congestion_controller/include/congestion_controller.h"
#include "webrtc/modules/utility/include/process_thread.h"
#include "webrtc/modules/video_coding/include/video_coding.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/video/call_stats.h"
#include "webrtc/video/receive_statistics_proxy.h"

View File

@ -171,6 +171,9 @@ VideoSendStream::VideoSendStream(
call_stats_(call_stats),
congestion_controller_(congestion_controller),
remb_(remb),
encoder_thread_(EncoderThreadFunction, this, "EncoderThread"),
encoder_wakeup_event_(false, false),
stop_encoder_thread_(0),
overuse_detector_(
Clock::GetRealTimeClock(),
GetCpuOveruseOptions(config.encoder_settings.full_overuse_time),
@ -203,7 +206,7 @@ VideoSendStream::VideoSendStream(
bitrate_allocator),
vcm_(vie_encoder_.vcm()),
rtp_rtcp_modules_(vie_channel_.rtp_rtcp()),
input_(&vie_encoder_,
input_(&encoder_wakeup_event_,
config_.local_renderer,
&stats_proxy_,
&overuse_detector_) {
@ -303,12 +306,20 @@ VideoSendStream::VideoSendStream(
vie_channel_.RegisterSendFrameCountObserver(&stats_proxy_);
module_process_thread_->RegisterModule(&overuse_detector_);
encoder_thread_.Start();
encoder_thread_.SetPriority(rtc::kHighPriority);
}
VideoSendStream::~VideoSendStream() {
LOG(LS_INFO) << "~VideoSendStream: " << config_.ToString();
Stop();
// Stop the encoder thread permanently.
rtc::AtomicOps::ReleaseStore(&stop_encoder_thread_, 1);
encoder_wakeup_event_.Set();
encoder_thread_.Stop();
module_process_thread_->DeRegisterModule(&overuse_detector_);
vie_channel_.RegisterSendFrameCountObserver(nullptr);
vie_channel_.RegisterSendBitrateObserver(nullptr);
@ -351,6 +362,24 @@ void VideoSendStream::Stop() {
vie_receiver_->StopReceive();
}
bool VideoSendStream::EncoderThreadFunction(void* obj) {
static_cast<VideoSendStream*>(obj)->EncoderProcess();
// We're done, return false to abort.
return false;
}
void VideoSendStream::EncoderProcess() {
while (true) {
encoder_wakeup_event_.Wait(rtc::Event::kForever);
if (rtc::AtomicOps::AcquireLoad(&stop_encoder_thread_))
return;
VideoFrame frame;
if (input_.GetVideoFrame(&frame))
vie_encoder_.EncodeVideoFrame(frame);
}
}
void VideoSendStream::ReconfigureVideoEncoder(
const VideoEncoderConfig& config) {
TRACE_EVENT0("webrtc", "VideoSendStream::(Re)configureVideoEncoder");

View File

@ -75,6 +75,9 @@ class VideoSendStream : public webrtc::VideoSendStream,
int GetPaddingNeededBps() const;
private:
static bool EncoderThreadFunction(void* obj);
void EncoderProcess();
void ConfigureSsrcs();
SendStatisticsProxy stats_proxy_;
@ -87,6 +90,10 @@ class VideoSendStream : public webrtc::VideoSendStream,
CongestionController* const congestion_controller_;
VieRemb* const remb_;
rtc::PlatformThread encoder_thread_;
rtc::Event encoder_wakeup_event_;
volatile int stop_encoder_thread_;
OveruseFrameDetector overuse_detector_;
PayloadRouter payload_router_;
EncoderStateFeedback encoder_feedback_;

View File

@ -344,7 +344,7 @@ void ViEEncoder::TraceFrameDropEnd() {
encoder_paused_and_dropped_frame_ = false;
}
void ViEEncoder::DeliverFrame(VideoFrame video_frame) {
void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame) {
if (!send_payload_router_->active()) {
// We've paused or we have no channels attached, don't waste resources on
// encoding.

View File

@ -24,7 +24,6 @@
#include "webrtc/modules/video_coding/include/video_coding_defines.h"
#include "webrtc/modules/video_processing/include/video_processing.h"
#include "webrtc/typedefs.h"
#include "webrtc/video/video_capture_input.h"
namespace webrtc {
@ -41,11 +40,11 @@ class SendStatisticsProxy;
class ViEBitrateObserver;
class ViEEffectFilter;
class VideoCodingModule;
class VideoEncoder;
class ViEEncoder : public VideoEncoderRateObserver,
public VCMPacketizationCallback,
public VCMSendStatisticsCallback,
public VideoCaptureCallback {
public VCMSendStatisticsCallback {
public:
friend class ViEBitrateObserver;
@ -80,9 +79,7 @@ class ViEEncoder : public VideoEncoderRateObserver,
int32_t DeRegisterExternalEncoder(uint8_t pl_type);
void SetEncoder(const VideoCodec& video_codec, int min_transmit_bitrate_bps);
// Implementing VideoCaptureCallback.
void DeliverFrame(VideoFrame video_frame) override;
void EncodeVideoFrame(const VideoFrame& video_frame);
void SendKeyFrame();
uint32_t LastObservedBitrateBps() const;