Replace VideoCapturerInput with VideoSinkInterface.

Adds new method VideoSendStream::SetSource(rtc::VideoSourceInterface* and VieEncoder::SetSource(rtc::VideoSourceInterface*)

This is the first step needed in order for the ViEEncoder to request downscaling using rtc::VideoSinkWants instead of separately reporting CPU overuse and internally doing downscaling due to QP values.

BUG=webrtc:5687
// Android CQ seems broken.
NOTRY=true

Review-Url: https://codereview.webrtc.org/2257413002
Cr-Commit-Position: refs/heads/master@{#14238}
This commit is contained in:
perkj 2016-09-15 08:57:21 -07:00 committed by Commit bot
parent 91511f13e1
commit 95a226f55a
26 changed files with 388 additions and 227 deletions

View File

@ -177,9 +177,10 @@ class BitrateEstimatorTest : public test::CallTest {
test_->video_encoder_config_.Copy());
RTC_DCHECK_EQ(1u, test_->video_encoder_config_.streams.size());
frame_generator_capturer_.reset(test::FrameGeneratorCapturer::Create(
send_stream_->Input(), test_->video_encoder_config_.streams[0].width,
test_->video_encoder_config_.streams[0].width,
test_->video_encoder_config_.streams[0].height, 30,
Clock::GetRealTimeClock()));
send_stream_->SetSource(frame_generator_capturer_.get());
send_stream_->Start();
frame_generator_capturer_->Start();
@ -216,8 +217,8 @@ class BitrateEstimatorTest : public test::CallTest {
~Stream() {
EXPECT_FALSE(is_sending_receiving_);
frame_generator_capturer_.reset(nullptr);
test_->sender_call_->DestroyVideoSendStream(send_stream_);
frame_generator_capturer_.reset(nullptr);
send_stream_ = nullptr;
if (audio_receive_stream_) {
test_->receiver_call_->DestroyAudioReceiveStream(audio_receive_stream_);

View File

@ -103,11 +103,17 @@ FakeVideoSendStream::FakeVideoSendStream(
: sending_(false),
config_(std::move(config)),
codec_settings_set_(false),
source_(nullptr),
num_swapped_frames_(0) {
RTC_DCHECK(config.encoder_settings.encoder != NULL);
ReconfigureVideoEncoder(std::move(encoder_config));
}
FakeVideoSendStream::~FakeVideoSendStream() {
if (source_)
source_->RemoveSink(this);
}
const webrtc::VideoSendStream::Config& FakeVideoSendStream::GetConfig() const {
return config_;
}
@ -162,8 +168,7 @@ int64_t FakeVideoSendStream::GetLastTimestamp() const {
return last_frame_.render_time_ms();
}
void FakeVideoSendStream::IncomingCapturedFrame(
const webrtc::VideoFrame& frame) {
void FakeVideoSendStream::OnFrame(const webrtc::VideoFrame& frame) {
++num_swapped_frames_;
last_frame_.ShallowCopy(frame);
}
@ -204,10 +209,6 @@ void FakeVideoSendStream::ReconfigureVideoEncoder(
++num_encoder_reconfigurations_;
}
webrtc::VideoCaptureInput* FakeVideoSendStream::Input() {
return this;
}
void FakeVideoSendStream::Start() {
sending_ = true;
}
@ -216,6 +217,16 @@ void FakeVideoSendStream::Stop() {
sending_ = false;
}
void FakeVideoSendStream::SetSource(
rtc::VideoSourceInterface<webrtc::VideoFrame>* source) {
RTC_DCHECK(source != source_);
if (source_)
source_->RemoveSink(this);
source_ = source;
if (source)
source->AddOrUpdateSink(this, rtc::VideoSinkWants());
}
FakeVideoReceiveStream::FakeVideoReceiveStream(
webrtc::VideoReceiveStream::Config config)
: config_(std::move(config)), receiving_(false) {}

View File

@ -99,11 +99,13 @@ class FakeAudioReceiveStream final : public webrtc::AudioReceiveStream {
bool started_ = false;
};
class FakeVideoSendStream final : public webrtc::VideoSendStream,
public webrtc::VideoCaptureInput {
class FakeVideoSendStream final
: public webrtc::VideoSendStream,
public rtc::VideoSinkInterface<webrtc::VideoFrame> {
public:
FakeVideoSendStream(webrtc::VideoSendStream::Config config,
webrtc::VideoEncoderConfig encoder_config);
~FakeVideoSendStream() override;
const webrtc::VideoSendStream::Config& GetConfig() const;
const webrtc::VideoEncoderConfig& GetEncoderConfig() const;
std::vector<webrtc::VideoStream> GetVideoStreams();
@ -122,14 +124,16 @@ class FakeVideoSendStream final : public webrtc::VideoSendStream,
}
private:
void IncomingCapturedFrame(const webrtc::VideoFrame& frame) override;
// rtc::VideoSinkInterface<VideoFrame> implementation.
void OnFrame(const webrtc::VideoFrame& frame) override;
// webrtc::VideoSendStream implementation.
void Start() override;
void Stop() override;
void SetSource(
rtc::VideoSourceInterface<webrtc::VideoFrame>* source) override;
webrtc::VideoSendStream::Stats GetStats() override;
void ReconfigureVideoEncoder(webrtc::VideoEncoderConfig config) override;
webrtc::VideoCaptureInput* Input() override;
bool sending_;
webrtc::VideoSendStream::Config config_;
@ -139,6 +143,7 @@ class FakeVideoSendStream final : public webrtc::VideoSendStream,
webrtc::VideoCodecVP8 vp8;
webrtc::VideoCodecVP9 vp9;
} vpx_settings_;
rtc::VideoSourceInterface<webrtc::VideoFrame>* source_;
int num_swapped_frames_;
webrtc::VideoFrame last_frame_;
webrtc::VideoSendStream::Stats stats_;

View File

@ -1580,6 +1580,7 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream(
source_(nullptr),
external_encoder_factory_(external_encoder_factory),
stream_(nullptr),
encoder_sink_(nullptr),
parameters_(std::move(config), options, max_bitrate_bps, codec_settings),
rtp_parameters_(CreateRtpParametersWithOneEncoding()),
pending_encoder_reconfiguration_(false),
@ -1658,7 +1659,7 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame(
<< ", texture=" << last_frame_info_.is_texture;
}
if (stream_ == NULL) {
if (encoder_sink_ == NULL) {
// Frame input before send codecs are configured, dropping frame.
return;
}
@ -1681,7 +1682,7 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame(
if (cpu_restricted_counter_ > 0)
++cpu_restricted_frame_count_;
stream_->Input()->IncomingCapturedFrame(video_frame);
encoder_sink_->OnFrame(video_frame);
}
bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetVideoSend(
@ -1704,7 +1705,7 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetVideoSend(
if (options_present) {
VideoOptions old_options = parameters_.options;
parameters_.options.SetAll(*options);
// Reconfigure encoder settings on the naext frame or stream
// Reconfigure encoder settings on the next frame or stream
// recreation if the options changed.
if (parameters_.options != old_options) {
pending_encoder_reconfiguration_ = true;
@ -1712,7 +1713,7 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetVideoSend(
}
if (source_changing) {
if (source == nullptr && stream_ != nullptr) {
if (source == nullptr && encoder_sink_ != nullptr) {
LOG(LS_VERBOSE) << "Disabling capturer, sending black frame.";
// Force this black frame not to be dropped due to timestamp order
// check. As IncomingCapturedFrame will drop the frame if this frame's
@ -1725,9 +1726,8 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetVideoSend(
last_frame_info_.height));
black_buffer->SetToBlack();
stream_->Input()->IncomingCapturedFrame(webrtc::VideoFrame(
black_buffer, last_frame_info_.rotation,
last_frame_timestamp_us_));
encoder_sink_->OnFrame(webrtc::VideoFrame(
black_buffer, last_frame_info_.rotation, last_frame_timestamp_us_));
}
source_ = source;
}
@ -1743,7 +1743,7 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetVideoSend(
void WebRtcVideoChannel2::WebRtcVideoSendStream::DisconnectSource() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
if (source_ == NULL) {
if (source_ == nullptr) {
return;
}
@ -2049,6 +2049,23 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::SetSend(bool send) {
UpdateSendState();
}
void WebRtcVideoChannel2::WebRtcVideoSendStream::AddOrUpdateSink(
VideoSinkInterface<webrtc::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
// TODO(perkj): Actually consider the encoder |wants| and remove
// WebRtcVideoSendStream::OnLoadUpdate(Load load).
rtc::CritScope cs(&lock_);
RTC_DCHECK(!encoder_sink_ || encoder_sink_ == sink);
encoder_sink_ = sink;
}
void WebRtcVideoChannel2::WebRtcVideoSendStream::RemoveSink(
VideoSinkInterface<webrtc::VideoFrame>* sink) {
rtc::CritScope cs(&lock_);
RTC_DCHECK_EQ(encoder_sink_, sink);
encoder_sink_ = nullptr;
}
void WebRtcVideoChannel2::WebRtcVideoSendStream::OnLoadUpdate(Load load) {
if (worker_thread_ != rtc::Thread::Current()) {
invoker_.AsyncInvoke<void>(
@ -2241,6 +2258,7 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::RecreateWebRtcStream() {
}
stream_ = call_->CreateVideoSendStream(std::move(config),
parameters_.encoder_config.Copy());
stream_->SetSource(this);
parameters_.encoder_config.encoder_specific_settings = NULL;
pending_encoder_reconfiguration_ = false;

View File

@ -243,6 +243,7 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
// frames are then converted from cricket frames to webrtc frames.
class WebRtcVideoSendStream
: public rtc::VideoSinkInterface<cricket::VideoFrame>,
public rtc::VideoSourceInterface<webrtc::VideoFrame>,
public webrtc::LoadObserver {
public:
WebRtcVideoSendStream(
@ -262,6 +263,16 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
bool SetRtpParameters(const webrtc::RtpParameters& parameters);
webrtc::RtpParameters GetRtpParameters() const;
// Implements rtc::VideoSourceInterface<webrtc::VideoFrame>.
// WebRtcVideoSendStream acts as a source to the webrtc::VideoSendStream
// in |stream_|. The reason is that WebRtcVideoSendStream receives
// cricket::VideoFrames and forwards webrtc::VideoFrames to |source_|.
// TODO(perkj, nisse): Refactor WebRtcVideoSendStream to directly connect
// the camera input |source_|
void AddOrUpdateSink(VideoSinkInterface<webrtc::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override;
void RemoveSink(VideoSinkInterface<webrtc::VideoFrame>* sink) override;
void OnFrame(const cricket::VideoFrame& frame) override;
bool SetVideoSend(bool mute,
const VideoOptions* options,
@ -389,6 +400,8 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
rtc::CriticalSection lock_;
webrtc::VideoSendStream* stream_ GUARDED_BY(lock_);
rtc::VideoSinkInterface<webrtc::VideoFrame>* encoder_sink_
GUARDED_BY(lock_);
// Contains settings that are the same for all streams in the MediaChannel,
// such as codecs, header extensions, and the global bitrate limit for the
// entire channel.

View File

@ -298,7 +298,6 @@ rtc_source_set("test_common") {
"statistics.h",
"vcm_capturer.cc",
"vcm_capturer.h",
"video_capturer.cc",
"video_capturer.h",
"win/run_loop_win.cc",
]

View File

@ -239,15 +239,15 @@ void CallTest::CreateFrameGeneratorCapturerWithDrift(Clock* clock,
float speed) {
VideoStream stream = video_encoder_config_.streams.back();
frame_generator_capturer_.reset(test::FrameGeneratorCapturer::Create(
video_send_stream_->Input(), stream.width, stream.height,
stream.max_framerate * speed, clock));
stream.width, stream.height, stream.max_framerate * speed, clock));
video_send_stream_->SetSource(frame_generator_capturer_.get());
}
void CallTest::CreateFrameGeneratorCapturer() {
VideoStream stream = video_encoder_config_.streams.back();
frame_generator_capturer_.reset(test::FrameGeneratorCapturer::Create(
video_send_stream_->Input(), stream.width, stream.height,
stream.max_framerate, clock_));
stream.width, stream.height, stream.max_framerate, clock_));
video_send_stream_->SetSource(frame_generator_capturer_.get());
}
void CallTest::CreateFakeAudioDevices() {

View File

@ -239,6 +239,27 @@ class ScrollingImageFrameGenerator : public FrameGenerator {
} // namespace
FrameForwarder::FrameForwarder() : sink_(nullptr) {}
void FrameForwarder::IncomingCapturedFrame(const VideoFrame& video_frame) {
rtc::CritScope lock(&crit_);
if (sink_)
sink_->OnFrame(video_frame);
}
void FrameForwarder::AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
rtc::CritScope lock(&crit_);
RTC_DCHECK(!sink_ || sink_ == sink);
sink_ = sink;
}
void FrameForwarder::RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) {
rtc::CritScope lock(&crit_);
RTC_DCHECK_EQ(sink, sink_);
sink_ = nullptr;
}
FrameGenerator* FrameGenerator::CreateChromaGenerator(size_t width,
size_t height) {
return new ChromaGenerator(width, height);

View File

@ -7,12 +7,14 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_COMMON_VIDEO_TEST_FRAME_GENERATOR_H_
#define WEBRTC_COMMON_VIDEO_TEST_FRAME_GENERATOR_H_
#ifndef WEBRTC_TEST_FRAME_GENERATOR_H_
#define WEBRTC_TEST_FRAME_GENERATOR_H_
#include <string>
#include <vector>
#include "webrtc/base/criticalsection.h"
#include "webrtc/media/base/videosourceinterface.h"
#include "webrtc/typedefs.h"
#include "webrtc/video_frame.h"
@ -20,6 +22,25 @@ namespace webrtc {
class Clock;
namespace test {
// FrameForwarder can be used as an implementation
// of rtc::VideoSourceInterface<VideoFrame> where the caller controls when
// a frame should be forwarded to its sink.
// Currently this implementation only support one sink.
class FrameForwarder : public rtc::VideoSourceInterface<VideoFrame> {
public:
FrameForwarder();
// Forwards |video_frame| to the registered |sink_|.
void IncomingCapturedFrame(const VideoFrame& video_frame);
private:
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override;
void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override;
rtc::CriticalSection crit_;
rtc::VideoSinkInterface<VideoFrame>* sink_ GUARDED_BY(crit_);
};
class FrameGenerator {
public:
FrameGenerator() {}
@ -61,4 +82,4 @@ class FrameGenerator {
} // namespace test
} // namespace webrtc
#endif // WEBRTC_COMMON_VIDEO_TEST_FRAME_GENERATOR_H_
#endif // WEBRTC_TEST_FRAME_GENERATOR_H_

View File

@ -21,14 +21,12 @@
namespace webrtc {
namespace test {
FrameGeneratorCapturer* FrameGeneratorCapturer::Create(VideoCaptureInput* input,
size_t width,
FrameGeneratorCapturer* FrameGeneratorCapturer::Create(size_t width,
size_t height,
int target_fps,
Clock* clock) {
FrameGeneratorCapturer* capturer = new FrameGeneratorCapturer(
clock, input, FrameGenerator::CreateChromaGenerator(width, height),
target_fps);
clock, FrameGenerator::CreateChromaGenerator(width, height), target_fps);
if (!capturer->Init()) {
delete capturer;
return NULL;
@ -38,16 +36,14 @@ FrameGeneratorCapturer* FrameGeneratorCapturer::Create(VideoCaptureInput* input,
}
FrameGeneratorCapturer* FrameGeneratorCapturer::CreateFromYuvFile(
VideoCaptureInput* input,
const std::string& file_name,
size_t width,
size_t height,
int target_fps,
Clock* clock) {
FrameGeneratorCapturer* capturer = new FrameGeneratorCapturer(
clock, input,
FrameGenerator::CreateFromYuvFile(std::vector<std::string>(1, file_name),
width, height, 1),
clock, FrameGenerator::CreateFromYuvFile(
std::vector<std::string>(1, file_name), width, height, 1),
target_fps);
if (!capturer->Init()) {
delete capturer;
@ -58,20 +54,18 @@ FrameGeneratorCapturer* FrameGeneratorCapturer::CreateFromYuvFile(
}
FrameGeneratorCapturer::FrameGeneratorCapturer(Clock* clock,
VideoCaptureInput* input,
FrameGenerator* frame_generator,
int target_fps)
: VideoCapturer(input),
clock_(clock),
: clock_(clock),
sending_(false),
sink_(nullptr),
tick_(EventTimerWrapper::Create()),
thread_(FrameGeneratorCapturer::Run, this, "FrameGeneratorCapturer"),
frame_generator_(frame_generator),
target_fps_(target_fps),
first_frame_capture_time_(-1) {
assert(input != NULL);
assert(frame_generator != NULL);
assert(target_fps > 0);
RTC_DCHECK(frame_generator);
RTC_DCHECK_GT(target_fps, 0);
}
FrameGeneratorCapturer::~FrameGeneratorCapturer() {
@ -113,7 +107,8 @@ void FrameGeneratorCapturer::InsertFrame() {
if (first_frame_capture_time_ == -1) {
first_frame_capture_time_ = frame->ntp_time_ms();
}
input_->IncomingCapturedFrame(*frame);
if (sink_)
sink_->OnFrame(*frame);
}
}
tick_->Wait(WEBRTC_EVENT_INFINITE);
@ -129,6 +124,21 @@ void FrameGeneratorCapturer::Stop() {
sending_ = false;
}
void FrameGeneratorCapturer::AddOrUpdateSink(
rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
rtc::CritScope cs(&lock_);
RTC_CHECK(!sink_);
sink_ = sink;
}
void FrameGeneratorCapturer::RemoveSink(
rtc::VideoSinkInterface<VideoFrame>* sink) {
rtc::CritScope cs(&lock_);
RTC_CHECK(sink_ == sink);
sink_ = nullptr;
}
void FrameGeneratorCapturer::ForceFrame() {
tick_->Set();
}

View File

@ -18,6 +18,7 @@
#include "webrtc/common_video/rotation.h"
#include "webrtc/test/video_capturer.h"
#include "webrtc/typedefs.h"
#include "webrtc/video_frame.h"
namespace webrtc {
@ -30,14 +31,12 @@ class FrameGenerator;
class FrameGeneratorCapturer : public VideoCapturer {
public:
static FrameGeneratorCapturer* Create(VideoCaptureInput* input,
size_t width,
static FrameGeneratorCapturer* Create(size_t width,
size_t height,
int target_fps,
Clock* clock);
static FrameGeneratorCapturer* CreateFromYuvFile(VideoCaptureInput* input,
const std::string& file_name,
static FrameGeneratorCapturer* CreateFromYuvFile(const std::string& file_name,
size_t width,
size_t height,
int target_fps,
@ -46,13 +45,17 @@ class FrameGeneratorCapturer : public VideoCapturer {
void Start() override;
void Stop() override;
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override;
void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override;
void ForceFrame();
void SetFakeRotation(VideoRotation rotation);
int64_t first_frame_capture_time() const { return first_frame_capture_time_; }
FrameGeneratorCapturer(Clock* clock,
VideoCaptureInput* input,
FrameGenerator* frame_generator,
int target_fps);
bool Init();
@ -63,6 +66,7 @@ class FrameGeneratorCapturer : public VideoCapturer {
Clock* const clock_;
bool sending_;
rtc::VideoSinkInterface<VideoFrame>* sink_ GUARDED_BY(&lock_);
std::unique_ptr<EventTimerWrapper> tick_;
rtc::CriticalSection lock_;

View File

@ -187,7 +187,6 @@
'statistics.h',
'vcm_capturer.cc',
'vcm_capturer.h',
'video_capturer.cc',
'video_capturer.h',
'win/run_loop_win.cc',
],

View File

@ -16,9 +16,7 @@
namespace webrtc {
namespace test {
VcmCapturer::VcmCapturer(webrtc::VideoCaptureInput* input)
: VideoCapturer(input), started_(false), vcm_(NULL) {
}
VcmCapturer::VcmCapturer() : started_(false), sink_(nullptr), vcm_(NULL) {}
bool VcmCapturer::Init(size_t width, size_t height, size_t target_fps) {
VideoCaptureModule::DeviceInfo* device_info =
@ -54,11 +52,10 @@ bool VcmCapturer::Init(size_t width, size_t height, size_t target_fps) {
return true;
}
VcmCapturer* VcmCapturer::Create(VideoCaptureInput* input,
size_t width,
VcmCapturer* VcmCapturer::Create(size_t width,
size_t height,
size_t target_fps) {
VcmCapturer* vcm_capturer = new VcmCapturer(input);
VcmCapturer* vcm_capturer = new VcmCapturer();
if (!vcm_capturer->Init(width, height, target_fps)) {
// TODO(pbos): Log a warning that this failed.
delete vcm_capturer;
@ -78,6 +75,19 @@ void VcmCapturer::Stop() {
started_ = false;
}
void VcmCapturer::AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
rtc::CritScope lock(&crit_);
RTC_CHECK(!sink_);
sink_ = sink;
}
void VcmCapturer::RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) {
rtc::CritScope lock(&crit_);
RTC_CHECK(sink_ == sink);
sink_ = nullptr;
}
void VcmCapturer::Destroy() {
if (!vcm_)
return;
@ -93,8 +103,8 @@ VcmCapturer::~VcmCapturer() { Destroy(); }
void VcmCapturer::OnIncomingCapturedFrame(const int32_t id,
const VideoFrame& frame) {
rtc::CritScope lock(&crit_);
if (started_)
input_->IncomingCapturedFrame(frame);
if (started_ && sink_)
sink_->OnFrame(frame);
}
void VcmCapturer::OnCaptureDelayChanged(const int32_t id, const int32_t delay) {

View File

@ -22,29 +22,31 @@ namespace test {
class VcmCapturer : public VideoCapturer, public VideoCaptureDataCallback {
public:
static VcmCapturer* Create(VideoCaptureInput* input,
size_t width,
size_t height,
size_t target_fps);
static VcmCapturer* Create(size_t width, size_t height, size_t target_fps);
virtual ~VcmCapturer();
void Start() override;
void Stop() override;
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override;
void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override;
void OnIncomingCapturedFrame(const int32_t id,
const VideoFrame& frame) override; // NOLINT
void OnCaptureDelayChanged(const int32_t id, const int32_t delay) override;
private:
explicit VcmCapturer(VideoCaptureInput* input);
VcmCapturer();
bool Init(size_t width, size_t height, size_t target_fps);
void Destroy();
rtc::CriticalSection crit_;
bool started_ GUARDED_BY(crit_);
rtc::VideoSinkInterface<VideoFrame>* sink_ GUARDED_BY(crit_);
rtc::scoped_refptr<VideoCaptureModule> vcm_;
VideoCaptureCapability capability_;
};
} // test
} // webrtc

View File

@ -1,54 +0,0 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/test/video_capturer.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/test/frame_generator_capturer.h"
#include "webrtc/test/vcm_capturer.h"
namespace webrtc {
namespace test {
class NullCapturer : public VideoCapturer {
public:
NullCapturer() : VideoCapturer(NULL) {}
virtual ~NullCapturer() {}
virtual void Start() {}
virtual void Stop() {}
};
VideoCapturer::VideoCapturer(VideoCaptureInput* input) : input_(input) {
}
VideoCapturer* VideoCapturer::Create(VideoCaptureInput* input,
size_t width,
size_t height,
int fps,
Clock* clock) {
VcmCapturer* vcm_capturer = VcmCapturer::Create(input, width, height, fps);
if (vcm_capturer != NULL) {
return vcm_capturer;
}
// TODO(pbos): Log a warning that this failed.
FrameGeneratorCapturer* frame_generator_capturer =
FrameGeneratorCapturer::Create(input, width, height, fps, clock);
if (frame_generator_capturer != NULL) {
return frame_generator_capturer;
}
// TODO(pbos): Log a warning that this failed.
return new NullCapturer();
}
} // test
} // webrtc

View File

@ -12,29 +12,21 @@
#include <stddef.h>
#include "webrtc/media/base/videosourceinterface.h"
#include "webrtc/video_frame.h"
namespace webrtc {
class Clock;
class VideoCaptureInput;
namespace test {
class VideoCapturer {
class VideoCapturer : public rtc::VideoSourceInterface<VideoFrame> {
public:
static VideoCapturer* Create(VideoCaptureInput* input,
size_t width,
size_t height,
int fps,
Clock* clock);
virtual ~VideoCapturer() {}
virtual void Start() = 0;
virtual void Stop() = 0;
protected:
explicit VideoCapturer(VideoCaptureInput* input);
VideoCaptureInput* input_;
};
} // test
} // webrtc

View File

@ -214,8 +214,10 @@ TEST_F(EndToEndTest, RendersSingleDelayedFrame) {
// check that the callbacks are done after processing video.
std::unique_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::CreateChromaGenerator(kWidth, kHeight));
video_send_stream_->Input()->IncomingCapturedFrame(
*frame_generator->NextFrame());
test::FrameForwarder frame_forwarder;
video_send_stream_->SetSource(&frame_forwarder);
frame_forwarder.IncomingCapturedFrame(*frame_generator->NextFrame());
EXPECT_TRUE(pre_render_callback.Wait())
<< "Timed out while waiting for pre-render callback.";
EXPECT_TRUE(renderer.Wait())
@ -259,8 +261,9 @@ TEST_F(EndToEndTest, TransmitsFirstFrame) {
test::FrameGenerator::CreateChromaGenerator(
video_encoder_config_.streams[0].width,
video_encoder_config_.streams[0].height));
video_send_stream_->Input()->IncomingCapturedFrame(
*frame_generator->NextFrame());
test::FrameForwarder frame_forwarder;
video_send_stream_->SetSource(&frame_forwarder);
frame_forwarder.IncomingCapturedFrame(*frame_generator->NextFrame());
EXPECT_TRUE(renderer.Wait())
<< "Timed out while waiting for the frame to render.";
@ -1304,8 +1307,8 @@ class MultiStreamTest {
receive_streams[i]->Start();
frame_generators[i] = test::FrameGeneratorCapturer::Create(
send_streams[i]->Input(), width, height, 30,
Clock::GetRealTimeClock());
width, height, 30, Clock::GetRealTimeClock());
send_streams[i]->SetSource(frame_generators[i]);
frame_generators[i]->Start();
}
@ -1765,8 +1768,9 @@ TEST_F(EndToEndTest, ObserversEncodedFrames) {
test::FrameGenerator::CreateChromaGenerator(
video_encoder_config_.streams[0].width,
video_encoder_config_.streams[0].height));
video_send_stream_->Input()->IncomingCapturedFrame(
*frame_generator->NextFrame());
test::FrameForwarder forwarder;
video_send_stream_->SetSource(&forwarder);
forwarder.IncomingCapturedFrame(*frame_generator->NextFrame());
EXPECT_TRUE(post_encode_observer.Wait())
<< "Timed out while waiting for send-side encoded-frame callback.";

View File

@ -7,8 +7,9 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <stdio.h>
#include "webrtc/video/video_quality_test.h"
#include <stdio.h>
#include <algorithm>
#include <deque>
#include <map>
@ -17,7 +18,6 @@
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/event.h"
#include "webrtc/base/format_macros.h"
@ -32,8 +32,8 @@
#include "webrtc/test/run_loop.h"
#include "webrtc/test/statistics.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/test/vcm_capturer.h"
#include "webrtc/test/video_renderer.h"
#include "webrtc/video/video_quality_test.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_codec.h"
@ -99,7 +99,6 @@ namespace webrtc {
class VideoAnalyzer : public PacketReceiver,
public Transport,
public rtc::VideoSinkInterface<VideoFrame>,
public VideoCaptureInput,
public EncodedFrameObserver {
public:
VideoAnalyzer(test::LayerFilteringTransport* transport,
@ -110,10 +109,10 @@ class VideoAnalyzer : public PacketReceiver,
FILE* graph_data_output_file,
const std::string& graph_title,
uint32_t ssrc_to_analyze)
: input_(nullptr),
transport_(transport),
: transport_(transport),
receiver_(nullptr),
send_stream_(nullptr),
captured_frame_forwarder_(this),
test_label_(test_label),
graph_data_output_file_(graph_data_output_file),
graph_title_(graph_title),
@ -169,6 +168,19 @@ class VideoAnalyzer : public PacketReceiver,
virtual void SetReceiver(PacketReceiver* receiver) { receiver_ = receiver; }
void SetSendStream(VideoSendStream* stream) {
rtc::CritScope lock(&crit_);
RTC_DCHECK(!send_stream_);
send_stream_ = stream;
}
rtc::VideoSinkInterface<VideoFrame>* InputInterface() {
return &captured_frame_forwarder_;
}
rtc::VideoSourceInterface<VideoFrame>* OutputInterface() {
return &captured_frame_forwarder_;
}
DeliveryStatus DeliverPacket(MediaType media_type,
const uint8_t* packet,
size_t length,
@ -198,17 +210,6 @@ class VideoAnalyzer : public PacketReceiver,
samples_encode_time_ms_[ntp_time_ms] = encode_time_ms;
}
void IncomingCapturedFrame(const VideoFrame& video_frame) override {
VideoFrame copy = video_frame;
copy.set_timestamp(copy.ntp_time_ms() * 90);
{
rtc::CritScope lock(&crit_);
frames_.push_back(copy);
}
input_->IncomingCapturedFrame(video_frame);
}
void PreEncodeOnFrame(const VideoFrame& video_frame) {
rtc::CritScope lock(&crit_);
if (!first_send_timestamp_ && rtp_timestamp_delta_ == 0) {
@ -346,10 +347,8 @@ class VideoAnalyzer : public PacketReceiver,
}
EncodedFrameObserver* encode_timing_proxy() { return &encode_timing_proxy_; }
VideoCaptureInput* input_;
test::LayerFilteringTransport* const transport_;
PacketReceiver* receiver_;
VideoSendStream* send_stream_;
private:
struct FrameComparison {
@ -697,6 +696,55 @@ class VideoAnalyzer : public PacketReceiver,
}
}
// Implements VideoSinkInterface to receive captured frames from a
// FrameGeneratorCapturer. Implements VideoSourceInterface to be able to act
// as a source to VideoSendStream.
// It forwards all input frames to the VideoAnalyzer for later comparison and
// forwards the captured frames to the VideoSendStream.
class CapturedFrameForwarder : public rtc::VideoSinkInterface<VideoFrame>,
public rtc::VideoSourceInterface<VideoFrame> {
public:
explicit CapturedFrameForwarder(VideoAnalyzer* analyzer)
: analyzer_(analyzer), send_stream_input_(nullptr) {}
private:
void OnFrame(const VideoFrame& video_frame) override {
VideoFrame copy = video_frame;
copy.set_timestamp(copy.ntp_time_ms() * 90);
analyzer_->AddCapturedFrameForComparison(video_frame);
rtc::CritScope lock(&crit_);
if (send_stream_input_)
send_stream_input_->OnFrame(video_frame);
}
// Called when |send_stream_.SetSource()| is called.
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override {
rtc::CritScope lock(&crit_);
RTC_DCHECK(!send_stream_input_ || send_stream_input_ == sink);
send_stream_input_ = sink;
}
// Called by |send_stream_| when |send_stream_.SetSource()| is called.
void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override {
rtc::CritScope lock(&crit_);
RTC_DCHECK(sink == send_stream_input_);
send_stream_input_ = nullptr;
}
VideoAnalyzer* const analyzer_;
rtc::CriticalSection crit_;
rtc::VideoSinkInterface<VideoFrame>* send_stream_input_ GUARDED_BY(crit_);
};
void AddCapturedFrameForComparison(const VideoFrame& video_frame) {
rtc::CritScope lock(&crit_);
frames_.push_back(video_frame);
}
VideoSendStream* send_stream_;
CapturedFrameForwarder captured_frame_forwarder_;
const std::string test_label_;
FILE* const graph_data_output_file_;
const std::string graph_title_;
@ -1028,21 +1076,20 @@ void VideoQualityTest::SetupScreenshare() {
}
}
void VideoQualityTest::CreateCapturer(VideoCaptureInput* input) {
void VideoQualityTest::CreateCapturer() {
if (params_.screenshare.enabled) {
test::FrameGeneratorCapturer* frame_generator_capturer =
new test::FrameGeneratorCapturer(
clock_, input, frame_generator_.release(), params_.common.fps);
new test::FrameGeneratorCapturer(clock_, frame_generator_.release(),
params_.common.fps);
EXPECT_TRUE(frame_generator_capturer->Init());
capturer_.reset(frame_generator_capturer);
} else {
if (params_.video.clip_name.empty()) {
capturer_.reset(test::VideoCapturer::Create(input, params_.common.width,
params_.common.height,
params_.common.fps, clock_));
capturer_.reset(test::VcmCapturer::Create(
params_.common.width, params_.common.height, params_.common.fps));
} else {
capturer_.reset(test::FrameGeneratorCapturer::CreateFromYuvFile(
input, test::ResourcePath(params_.video.clip_name, "yuv"),
test::ResourcePath(params_.video.clip_name, "yuv"),
params_.common.width, params_.common.height, params_.common.fps,
clock_));
ASSERT_TRUE(capturer_) << "Could not create capturer for "
@ -1127,10 +1174,12 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) {
SetupScreenshare();
CreateVideoStreams();
analyzer.input_ = video_send_stream_->Input();
analyzer.send_stream_ = video_send_stream_;
analyzer.SetSendStream(video_send_stream_);
video_send_stream_->SetSource(analyzer.OutputInterface());
CreateCapturer(&analyzer);
CreateCapturer();
rtc::VideoSinkWants wants;
capturer_->AddOrUpdateSink(analyzer.InputInterface(), wants);
video_send_stream_->Start();
for (VideoReceiveStream* receive_stream : video_receive_streams_)
@ -1222,7 +1271,8 @@ void VideoQualityTest::RunWithRenderers(const Params& params) {
video_send_config_.Copy(), video_encoder_config_.Copy());
VideoReceiveStream* video_receive_stream =
call->CreateVideoReceiveStream(video_receive_configs_[stream_id].Copy());
CreateCapturer(video_send_stream_->Input());
CreateCapturer();
video_send_stream_->SetSource(capturer_.get());
AudioReceiveStream* audio_receive_stream = nullptr;
if (params_.audio) {

View File

@ -103,7 +103,7 @@ class VideoQualityTest : public test::CallTest {
static std::vector<int> ParseCSV(const std::string& str);
// Helper methods for setting up the call.
void CreateCapturer(VideoCaptureInput* input);
void CreateCapturer();
void SetupCommon(Transport* send_transport, Transport* recv_transport);
void SetupScreenshare();

View File

@ -545,12 +545,10 @@ void VideoSendStream::Stop() {
worker_queue_->PostTask([send_stream] { send_stream->Stop(); });
}
VideoCaptureInput* VideoSendStream::Input() {
// Input() will be called on the thread that deliverers video frames from
// libjingle.
// TODO(perkj): Refactor ViEEncoder to register directly as a VideoSink to the
// VideoSource.
return vie_encoder_.get();
void VideoSendStream::SetSource(
rtc::VideoSourceInterface<webrtc::VideoFrame>* source) {
RTC_DCHECK_RUN_ON(&thread_checker_);
vie_encoder_->SetSource(source);
}
void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config) {

View File

@ -71,7 +71,10 @@ class VideoSendStream : public webrtc::VideoSendStream {
// webrtc::VideoSendStream implementation.
void Start() override;
void Stop() override;
VideoCaptureInput* Input() override;
void SetSource(
rtc::VideoSourceInterface<webrtc::VideoFrame>* source) override;
void ReconfigureVideoEncoder(VideoEncoderConfig) override;
Stats GetStats() override;

View File

@ -33,6 +33,7 @@
#include "webrtc/test/call_test.h"
#include "webrtc/test/configurable_frame_size_encoder.h"
#include "webrtc/test/fake_texture_frame.h"
#include "webrtc/test/frame_generator.h"
#include "webrtc/test/frame_utils.h"
#include "webrtc/test/null_transport.h"
#include "webrtc/test/testsupport/perf_test.h"
@ -1473,8 +1474,10 @@ TEST_F(VideoSendStreamTest, CapturesTextureAndVideoFrames) {
handle3, width, height, 5, 5, kVideoRotation_0));
video_send_stream_->Start();
test::FrameForwarder forwarder;
video_send_stream_->SetSource(&forwarder);
for (size_t i = 0; i < input_frames.size(); i++) {
video_send_stream_->Input()->IncomingCapturedFrame(input_frames[i]);
forwarder.IncomingCapturedFrame(input_frames[i]);
// Do not send the next frame too fast, so the frame dropper won't drop it.
if (i < input_frames.size() - 1)
SleepMs(1000 / video_encoder_config_.streams[0].max_framerate);
@ -1483,6 +1486,7 @@ TEST_F(VideoSendStreamTest, CapturesTextureAndVideoFrames) {
observer.WaitOutputFrame();
}
video_send_stream_->Stop();
video_send_stream_->SetSource(nullptr);
// Test if the input and output frames are the same. render_time_ms and
// timestamp are not compared because capturer sets those values.

View File

@ -241,6 +241,47 @@ class ViEEncoder::EncodeTask : public rtc::QueuedTask {
const bool log_stats_;
};
// VideoSourceProxy is responsible ensuring thread safety between calls to
// ViEEncoder::SetSource that will happen on libjingles worker thread when a
// video capturer is connected to the encoder and the encoder task queue
// (encoder_queue_) where the encoder reports its VideoSinkWants.
class ViEEncoder::VideoSourceProxy {
public:
explicit VideoSourceProxy(ViEEncoder* vie_encoder)
: vie_encoder_(vie_encoder), source_(nullptr) {}
void SetSource(rtc::VideoSourceInterface<VideoFrame>* source) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&main_checker_);
rtc::VideoSourceInterface<VideoFrame>* old_source = nullptr;
{
rtc::CritScope lock(&crit_);
old_source = source_;
source_ = source;
}
if (old_source != source && old_source != nullptr) {
old_source->RemoveSink(vie_encoder_);
}
if (!source) {
return;
}
// TODO(perkj): Let VideoSourceProxy implement LoadObserver and truly send
// CPU load as sink wants.
rtc::VideoSinkWants wants;
source->AddOrUpdateSink(vie_encoder_, wants);
}
private:
rtc::CriticalSection crit_;
rtc::SequencedTaskChecker main_checker_;
ViEEncoder* vie_encoder_;
rtc::VideoSourceInterface<VideoFrame>* source_ GUARDED_BY(&crit_);
RTC_DISALLOW_COPY_AND_ASSIGN(VideoSourceProxy);
};
ViEEncoder::ViEEncoder(uint32_t number_of_cores,
SendStatisticsProxy* stats_proxy,
const VideoSendStream::Config::EncoderSettings& settings,
@ -249,6 +290,7 @@ ViEEncoder::ViEEncoder(uint32_t number_of_cores,
EncodedFrameObserver* encoder_timing)
: shutdown_event_(true /* manual_reset */, false),
number_of_cores_(number_of_cores),
source_proxy_(new VideoSourceProxy(this)),
settings_(settings),
vp_(VideoProcessing::Create()),
video_sender_(Clock::GetRealTimeClock(), this, this),
@ -288,23 +330,27 @@ ViEEncoder::ViEEncoder(uint32_t number_of_cores,
}
ViEEncoder::~ViEEncoder() {
RTC_DCHECK_RUN_ON(&thread_checker_);
RTC_DCHECK(shutdown_event_.Wait(0))
<< "Must call ::Stop() before destruction.";
}
void ViEEncoder::Stop() {
if (!encoder_queue_.IsCurrent()) {
encoder_queue_.PostTask([this] { Stop(); });
shutdown_event_.Wait(rtc::Event::kForever);
return;
}
RTC_DCHECK_RUN_ON(&encoder_queue_);
video_sender_.RegisterExternalEncoder(nullptr, settings_.payload_type, false);
overuse_detector_.StopCheckForOveruse();
shutdown_event_.Set();
RTC_DCHECK_RUN_ON(&thread_checker_);
source_proxy_->SetSource(nullptr);
encoder_queue_.PostTask([this] {
RTC_DCHECK_RUN_ON(&encoder_queue_);
video_sender_.RegisterExternalEncoder(nullptr, settings_.payload_type,
false);
overuse_detector_.StopCheckForOveruse();
shutdown_event_.Set();
});
shutdown_event_.Wait(rtc::Event::kForever);
}
void ViEEncoder::RegisterProcessThread(ProcessThread* module_process_thread) {
RTC_DCHECK_RUN_ON(&thread_checker_);
RTC_DCHECK(!module_process_thread_);
module_process_thread_ = module_process_thread;
module_process_thread_->RegisterModule(&video_sender_);
@ -312,9 +358,15 @@ void ViEEncoder::RegisterProcessThread(ProcessThread* module_process_thread) {
}
void ViEEncoder::DeRegisterProcessThread() {
RTC_DCHECK_RUN_ON(&thread_checker_);
module_process_thread_->DeRegisterModule(&video_sender_);
}
void ViEEncoder::SetSource(rtc::VideoSourceInterface<VideoFrame>* source) {
RTC_DCHECK_RUN_ON(&thread_checker_);
source_proxy_->SetSource(source);
}
void ViEEncoder::SetSink(EncodedImageCallback* sink) {
encoder_queue_.PostTask([this, sink] {
RTC_DCHECK_RUN_ON(&encoder_queue_);
@ -384,7 +436,7 @@ void ViEEncoder::ConfigureEncoderInternal(const VideoCodec& video_codec,
}
}
void ViEEncoder::IncomingCapturedFrame(const VideoFrame& video_frame) {
void ViEEncoder::OnFrame(const VideoFrame& video_frame) {
RTC_DCHECK_RUNS_SERIALIZED(&incoming_frame_race_checker_);
stats_proxy_->OnIncomingFrame(video_frame.width(), video_frame.height());

View File

@ -40,11 +40,11 @@ class SendStatisticsProxy;
// and produces an encoded bit stream.
// Usage:
// Instantiate.
// Call SetStartRate and SetSink.
// Call SetSink.
// Call SetSource.
// Call ConfigureEncoder with the codec settings.
// Provide frames to encode by calling IncomingCapturedFrame.
// Call Stop() when done.
class ViEEncoder : public VideoCaptureInput,
class ViEEncoder : public rtc::VideoSinkInterface<VideoFrame>,
public EncodedImageCallback,
public VCMSendStatisticsCallback,
public CpuOveruseObserver {
@ -63,6 +63,7 @@ class ViEEncoder : public VideoCaptureInput,
void RegisterProcessThread(ProcessThread* module_process_thread);
void DeRegisterProcessThread();
void SetSource(rtc::VideoSourceInterface<VideoFrame>* source);
void SetSink(EncodedImageCallback* sink);
// TODO(perkj): Can we remove VideoCodec.startBitrate ?
@ -75,11 +76,6 @@ class ViEEncoder : public VideoCaptureInput,
// guaranteed that no encoded frames will be delivered to the sink.
void Stop();
// Implements VideoCaptureInput.
// TODO(perkj): Refactor ViEEncoder to inherit rtc::VideoSink instead of
// VideoCaptureInput.
void IncomingCapturedFrame(const VideoFrame& video_frame) override;
void SendKeyFrame();
// virtual to test EncoderStateFeedback with mocks.
@ -93,10 +89,14 @@ class ViEEncoder : public VideoCaptureInput,
private:
class EncodeTask;
class VideoSourceProxy;
void ConfigureEncoderInternal(const VideoCodec& video_codec,
size_t max_data_payload_length);
// Implements VideoSinkInterface.
void OnFrame(const VideoFrame& video_frame) override;
// Implements VideoSendStatisticsCallback.
void SendStatistics(uint32_t bit_rate,
uint32_t frame_rate) override;
@ -121,6 +121,8 @@ class ViEEncoder : public VideoCaptureInput,
rtc::Event shutdown_event_;
const uint32_t number_of_cores_;
const std::unique_ptr<VideoSourceProxy> source_proxy_;
EncodedImageCallback* sink_;
const VideoSendStream::Config::EncoderSettings settings_;
@ -134,6 +136,9 @@ class ViEEncoder : public VideoCaptureInput,
rtc::VideoSinkInterface<VideoFrame>* const pre_encode_callback_;
ProcessThread* module_process_thread_;
rtc::ThreadChecker module_process_thread_checker_;
// |thread_checker_| checks that public methods that are related to lifetime
// of ViEEncoder are called on the same thread.
rtc::ThreadChecker thread_checker_;
VideoCodec encoder_config_ ACCESS_ON(&encoder_queue_);
@ -160,6 +165,8 @@ class ViEEncoder : public VideoCaptureInput,
// All public methods are proxied to |encoder_queue_|. It must must be
// destroyed first to make sure no tasks are run that use other members.
rtc::TaskQueue encoder_queue_;
RTC_DISALLOW_COPY_AND_ASSIGN(ViEEncoder);
};
} // namespace webrtc

View File

@ -12,6 +12,7 @@
#include "webrtc/base/logging.h"
#include "webrtc/test/encoder_settings.h"
#include "webrtc/test/fake_encoder.h"
#include "webrtc/test/frame_generator.h"
#include "webrtc/video/send_statistics_proxy.h"
#include "webrtc/video/vie_encoder.h"
@ -42,6 +43,7 @@ class ViEEncoderTest : public ::testing::Test {
video_send_config_.encoder_settings, nullptr /* pre_encode_callback */,
nullptr /* overuse_callback */, nullptr /* encoder_timing */));
vie_encoder_->SetSink(&sink_);
vie_encoder_->SetSource(&video_source_);
vie_encoder_->SetStartBitrate(10000);
vie_encoder_->ConfigureEncoder(video_encoder_config_, 1440);
}
@ -95,7 +97,7 @@ class ViEEncoderTest : public ::testing::Test {
int32_t result =
FakeEncoder::Encode(input_image, codec_specific_info, frame_types);
if (block_encode)
continue_encode_event_.Wait(kDefaultTimeoutMs);
EXPECT_TRUE(continue_encode_event_.Wait(kDefaultTimeoutMs));
return result;
}
@ -138,7 +140,7 @@ class ViEEncoderTest : public ::testing::Test {
void WaitForEncodedFrame(int64_t expected_ntp_time) {
uint32_t timestamp = 0;
encoded_frame_event_.Wait(kDefaultTimeoutMs);
EXPECT_TRUE(encoded_frame_event_.Wait(kDefaultTimeoutMs));
{
rtc::CritScope lock(&crit_);
timestamp = timestamp_;
@ -164,6 +166,7 @@ class ViEEncoderTest : public ::testing::Test {
TestEncoder fake_encoder_;
SendStatisticsProxy stats_proxy_;
TestSink sink_;
test::FrameForwarder video_source_;
std::unique_ptr<ViEEncoder> vie_encoder_;
};
@ -171,22 +174,22 @@ TEST_F(ViEEncoderTest, EncodeOneFrame) {
const int kTargetBitrateBps = 100000;
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
rtc::Event frame_destroyed_event(false, false);
vie_encoder_->IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event));
video_source_.IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event));
sink_.WaitForEncodedFrame(1);
frame_destroyed_event.Wait(kDefaultTimeoutMs);
EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeoutMs));
vie_encoder_->Stop();
}
TEST_F(ViEEncoderTest, DropsFramesBeforeFirstOnBitrateUpdated) {
// Dropped since no target bitrate has been set.
rtc::Event frame_destroyed_event(false, false);
vie_encoder_->IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event));
frame_destroyed_event.Wait(kDefaultTimeoutMs);
video_source_.IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event));
EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeoutMs));
const int kTargetBitrateBps = 100000;
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
vie_encoder_->IncomingCapturedFrame(CreateFrame(2, nullptr));
video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
sink_.WaitForEncodedFrame(2);
vie_encoder_->Stop();
}
@ -194,15 +197,15 @@ TEST_F(ViEEncoderTest, DropsFramesBeforeFirstOnBitrateUpdated) {
TEST_F(ViEEncoderTest, DropsFramesWhenRateSetToZero) {
const int kTargetBitrateBps = 100000;
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
vie_encoder_->IncomingCapturedFrame(CreateFrame(1, nullptr));
video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
sink_.WaitForEncodedFrame(1);
vie_encoder_->OnBitrateUpdated(0, 0, 0);
// Dropped since bitrate is zero.
vie_encoder_->IncomingCapturedFrame(CreateFrame(2, nullptr));
video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
vie_encoder_->IncomingCapturedFrame(CreateFrame(3, nullptr));
video_source_.IncomingCapturedFrame(CreateFrame(3, nullptr));
sink_.WaitForEncodedFrame(3);
vie_encoder_->Stop();
}
@ -210,13 +213,13 @@ TEST_F(ViEEncoderTest, DropsFramesWhenRateSetToZero) {
TEST_F(ViEEncoderTest, DropsFramesWithSameOrOldNtpTimestamp) {
const int kTargetBitrateBps = 100000;
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
vie_encoder_->IncomingCapturedFrame(CreateFrame(1, nullptr));
video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
sink_.WaitForEncodedFrame(1);
// This frame will be dropped since it has the same ntp timestamp.
vie_encoder_->IncomingCapturedFrame(CreateFrame(1, nullptr));
video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
vie_encoder_->IncomingCapturedFrame(CreateFrame(2, nullptr));
video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
sink_.WaitForEncodedFrame(2);
vie_encoder_->Stop();
}
@ -225,14 +228,14 @@ TEST_F(ViEEncoderTest, DropsFrameAfterStop) {
const int kTargetBitrateBps = 100000;
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
vie_encoder_->IncomingCapturedFrame(CreateFrame(1, nullptr));
video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
sink_.WaitForEncodedFrame(1);
vie_encoder_->Stop();
sink_.SetExpectNoFrames();
rtc::Event frame_destroyed_event(false, false);
vie_encoder_->IncomingCapturedFrame(CreateFrame(2, &frame_destroyed_event));
frame_destroyed_event.Wait(kDefaultTimeoutMs);
video_source_.IncomingCapturedFrame(CreateFrame(2, &frame_destroyed_event));
EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeoutMs));
}
TEST_F(ViEEncoderTest, DropsPendingFramesOnSlowEncode) {
@ -240,12 +243,12 @@ TEST_F(ViEEncoderTest, DropsPendingFramesOnSlowEncode) {
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
fake_encoder_.BlockNextEncode();
vie_encoder_->IncomingCapturedFrame(CreateFrame(1, nullptr));
video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
sink_.WaitForEncodedFrame(1);
// Here, the encoder thread will be blocked in the TestEncoder waiting for a
// call to ContinueEncode.
vie_encoder_->IncomingCapturedFrame(CreateFrame(2, nullptr));
vie_encoder_->IncomingCapturedFrame(CreateFrame(3, nullptr));
video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
video_source_.IncomingCapturedFrame(CreateFrame(3, nullptr));
fake_encoder_.ContinueEncode();
sink_.WaitForEncodedFrame(3);

View File

@ -19,6 +19,7 @@
#include "webrtc/common_video/include/frame_callback.h"
#include "webrtc/config.h"
#include "webrtc/media/base/videosinkinterface.h"
#include "webrtc/media/base/videosourceinterface.h"
#include "webrtc/transport.h"
namespace webrtc {
@ -26,18 +27,6 @@ namespace webrtc {
class LoadObserver;
class VideoEncoder;
// Class to deliver captured frame to the video send stream.
class VideoCaptureInput {
public:
// These methods do not lock internally and must be called sequentially.
// If your application switches input sources synchronization must be done
// externally to make sure that any old frames are not delivered concurrently.
virtual void IncomingCapturedFrame(const VideoFrame& video_frame) = 0;
protected:
virtual ~VideoCaptureInput() {}
};
class VideoSendStream {
public:
struct StreamStats {
@ -193,9 +182,8 @@ class VideoSendStream {
// When a stream is stopped, it can't receive, process or deliver packets.
virtual void Stop() = 0;
// Gets interface used to insert captured frames. Valid as long as the
// VideoSendStream is valid.
virtual VideoCaptureInput* Input() = 0;
virtual void SetSource(
rtc::VideoSourceInterface<webrtc::VideoFrame>* source) = 0;
// Set which streams to send. Must have at least as many SSRCs as configured
// in the config. Encoder settings are passed on to the encoder instance along