Refactor VideoTracks to forward all sinks to its source

This remove the use of VideoTrackRenderers within VideoTrack and instead all its sinks are passed to VideoSource.
That means that the source will handle all sinks and can (if the source implement it) handle different SinkWants for each sink.
The VideoBroadcaster is updated to produce black frames instead of as is today the deprecated VideoTrackRenderers.

BUG=webrtc:5426
R=nisse@webrtc.org, pthatcher@webrtc.org

Review URL: https://codereview.webrtc.org/1779063003 .

Cr-Commit-Position: refs/heads/master@{#12028}
This commit is contained in:
perkj 2016-03-17 10:35:23 +01:00
parent 292d658b20
commit d6c395441b
14 changed files with 221 additions and 213 deletions

View File

@ -90,14 +90,18 @@ const NSTimeInterval kRTCPeerConnectionTestTimeout = 20;
videoTrackID:(NSString*)videoTrackID
audioTrackID:(NSString*)audioTrackID {
RTCMediaStream* localMediaStream = [factory mediaStreamWithLabel:streamLabel];
RTCVideoTrack* videoTrack =
[factory videoTrackWithID:videoTrackID source:videoSource];
RTCFakeRenderer* videoRenderer = [[RTCFakeRenderer alloc] init];
[videoTrack addRenderer:videoRenderer];
[localMediaStream addVideoTrack:videoTrack];
// Test that removal/re-add works.
[localMediaStream removeVideoTrack:videoTrack];
[localMediaStream addVideoTrack:videoTrack];
// TODO(zeke): Fix this test to create a fake video capturer so that a track
// can be created.
if (videoSource) {
RTCVideoTrack* videoTrack =
[factory videoTrackWithID:videoTrackID source:videoSource];
RTCFakeRenderer* videoRenderer = [[RTCFakeRenderer alloc] init];
[videoTrack addRenderer:videoRenderer];
[localMediaStream addVideoTrack:videoTrack];
// Test that removal/re-add works.
[localMediaStream removeVideoTrack:videoTrack];
[localMediaStream addVideoTrack:videoTrack];
}
RTCAudioTrack* audioTrack = [factory audioTrackWithID:audioTrackID];
[localMediaStream addAudioTrack:audioTrack];
[pc addStream:localMediaStream];

View File

@ -321,8 +321,6 @@
'videosourceproxy.h',
'videotrack.cc',
'videotrack.h',
'videotrackrenderers.cc',
'videotrackrenderers.h',
'videotracksource.cc',
'videotracksource.h',
'webrtcsdp.cc',

View File

@ -20,19 +20,9 @@ VideoTrack::VideoTrack(const std::string& label,
VideoTrackSourceInterface* video_source)
: MediaStreamTrack<VideoTrackInterface>(label),
video_source_(video_source) {
// TODO(perkj): Sinks should register directly to the source so that
// VideoSinkWants can be applied correctly per sink. For now, |renderers_|
// must be able to apply rotation. Note that this is only actual renderers,
// not sinks that connect directly to cricket::VideoCapture.
rtc::VideoSinkWants wants;
wants.rotation_applied = false;
if (video_source_)
video_source_->AddOrUpdateSink(&renderers_, wants);
}
VideoTrack::~VideoTrack() {
if (video_source_)
video_source_->RemoveSink(&renderers_);
}
std::string VideoTrack::kind() const {
@ -42,16 +32,27 @@ std::string VideoTrack::kind() const {
void VideoTrack::AddOrUpdateSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
renderers_.AddOrUpdateSink(sink, wants);
RTC_DCHECK(thread_checker_.CalledOnValidThread());
VideoSourceBase::AddOrUpdateSink(sink, wants);
rtc::VideoSinkWants modified_wants = wants;
modified_wants.black_frames = !enabled();
video_source_->AddOrUpdateSink(sink, modified_wants);
}
void VideoTrack::RemoveSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
renderers_.RemoveSink(sink);
RTC_DCHECK(thread_checker_.CalledOnValidThread());
VideoSourceBase::RemoveSink(sink);
video_source_->RemoveSink(sink);
}
bool VideoTrack::set_enabled(bool enable) {
renderers_.SetEnabled(enable);
RTC_DCHECK(thread_checker_.CalledOnValidThread());
for (auto& sink_pair : sink_pairs()) {
rtc::VideoSinkWants modified_wants = sink_pair.wants;
modified_wants.black_frames = !enable;
video_source_->AddOrUpdateSink(sink_pair.sink, modified_wants);
}
return MediaStreamTrack<VideoTrackInterface>::set_enabled(enable);
}

View File

@ -12,15 +12,19 @@
#define WEBRTC_API_VIDEOTRACK_H_
#include <string>
#include <vector>
#include "webrtc/api/mediastreamtrack.h"
#include "webrtc/api/videosourceinterface.h"
#include "webrtc/api/videotrackrenderers.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/base/thread_checker.h"
#include "webrtc/media/base/videosourcebase.h"
namespace webrtc {
class VideoTrack : public MediaStreamTrack<VideoTrackInterface> {
class VideoTrack : public MediaStreamTrack<VideoTrackInterface>,
public rtc::VideoSourceBase {
public:
static rtc::scoped_refptr<VideoTrack> Create(
const std::string& label,
@ -41,7 +45,7 @@ class VideoTrack : public MediaStreamTrack<VideoTrackInterface> {
~VideoTrack();
private:
VideoTrackRenderers renderers_;
rtc::ThreadChecker thread_checker_;
rtc::scoped_refptr<VideoTrackSourceInterface> video_source_;
};

View File

@ -61,9 +61,8 @@ TEST_F(VideoTrackTest, RenderVideo) {
EXPECT_EQ(2, renderer_1->num_rendered_frames());
EXPECT_EQ(1, renderer_2->num_rendered_frames());
video_track_->RemoveSink(renderer_1.get());
renderer_1.reset(nullptr);
capturer_.CaptureFrame();
EXPECT_EQ(2, renderer_1->num_rendered_frames());
EXPECT_EQ(2, renderer_2->num_rendered_frames());
}
@ -86,9 +85,8 @@ TEST_F(VideoTrackTest, RenderVideoOld) {
EXPECT_EQ(2, renderer_1->num_rendered_frames());
EXPECT_EQ(1, renderer_2->num_rendered_frames());
video_track_->RemoveRenderer(renderer_1.get());
renderer_1.reset(nullptr);
capturer_.CaptureFrame();
EXPECT_EQ(2, renderer_1->num_rendered_frames());
EXPECT_EQ(2, renderer_2->num_rendered_frames());
}

View File

@ -9,76 +9,5 @@
*/
#include "webrtc/api/videotrackrenderers.h"
#include "webrtc/media/engine/webrtcvideoframe.h"
namespace webrtc {
VideoTrackRenderers::VideoTrackRenderers() : enabled_(true) {
}
VideoTrackRenderers::~VideoTrackRenderers() {
}
void VideoTrackRenderers::AddOrUpdateSink(
VideoSinkInterface<cricket::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
// TODO(nisse): Currently ignores wants. We should somehow use
// VideoBroadcaster, but we need to sort out its threading issues
// first.
rtc::CritScope cs(&critical_section_);
if (std::find(sinks_.begin(), sinks_.end(), sink) == sinks_.end())
sinks_.push_back(sink);
}
void VideoTrackRenderers::RemoveSink(
VideoSinkInterface<cricket::VideoFrame>* sink) {
rtc::CritScope cs(&critical_section_);
sinks_.erase(std::remove(sinks_.begin(), sinks_.end(), sink), sinks_.end());
}
void VideoTrackRenderers::SetEnabled(bool enable) {
rtc::CritScope cs(&critical_section_);
enabled_ = enable;
}
bool VideoTrackRenderers::RenderFrame(const cricket::VideoFrame* frame) {
{
rtc::CritScope cs(&critical_section_);
if (enabled_) {
RenderFrameToSinks(*frame);
return true;
}
}
// Generate the black frame outside of the critical section. Note
// that this may result in unexpected frame order, in the unlikely
// case that RenderFrame is called from multiple threads without
// proper serialization, and the track is switched from disabled to
// enabled in the middle of the first call.
cricket::WebRtcVideoFrame black(new rtc::RefCountedObject<I420Buffer>(
static_cast<int>(frame->GetWidth()),
static_cast<int>(frame->GetHeight())),
frame->GetTimeStamp(),
frame->GetVideoRotation());
black.SetToBlack();
{
rtc::CritScope cs(&critical_section_);
// Check enabled_ flag again, since the track might have been
// enabled while we generated the black frame. I think the
// enabled-ness ought to be applied at the track output, and hence
// an enabled track shouldn't send any blacked out frames.
RenderFrameToSinks(enabled_ ? *frame : black);
return true;
}
}
// Called with critical_section_ already locked
void VideoTrackRenderers::RenderFrameToSinks(const cricket::VideoFrame& frame) {
for (auto sink : sinks_) {
sink->OnFrame(frame);
}
}
} // namespace webrtc
// TODO(perkj): Remove this file once Chrome builds doesnt depend on it.

View File

@ -11,46 +11,6 @@
#ifndef WEBRTC_API_VIDEOTRACKRENDERERS_H_
#define WEBRTC_API_VIDEOTRACKRENDERERS_H_
#include <set>
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/media/base/videorenderer.h"
namespace webrtc {
// Class used for rendering cricket::VideoFrames to multiple renderers of type
// VideoRendererInterface.
// Each VideoTrack owns a VideoTrackRenderers instance.
// The class is thread safe. Rendering to the added VideoRendererInterfaces is
// done on the same thread as the cricket::VideoRenderer.
class VideoTrackRenderers
: public cricket::VideoRenderer,
public rtc::VideoSourceInterface<cricket::VideoFrame> {
public:
VideoTrackRenderers();
~VideoTrackRenderers();
// Implements cricket::VideoRenderer. If the track is disabled,
// incoming frames are replaced by black frames.
virtual bool RenderFrame(const cricket::VideoFrame* frame);
void AddOrUpdateSink(VideoSinkInterface<cricket::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override;
void RemoveSink(VideoSinkInterface<cricket::VideoFrame>* sink) override;
void SetEnabled(bool enable);
private:
// Pass the frame on to to each registered renderer. Requires
// critical_section_ already locked.
void RenderFrameToSinks(const cricket::VideoFrame& frame);
bool enabled_;
std::vector<VideoSinkInterface<cricket::VideoFrame>*> sinks_;
rtc::CriticalSection critical_section_; // Protects the above variables
};
} // namespace webrtc
// TODO(perkj): Remove this file once Chrome builds doesnt depend on it.
#endif // WEBRTC_API_VIDEOTRACKRENDERERS_H_

View File

@ -25,9 +25,10 @@ class FakeVideoRenderer : public VideoRenderer {
: errors_(0),
width_(0),
height_(0),
rotation_(webrtc::kVideoRotation_0),
timestamp_(0),
num_rendered_frames_(0),
black_frame_(false) {
}
black_frame_(false) {}
virtual bool RenderFrame(const VideoFrame* frame) {
rtc::CritScope cs(&crit_);
@ -44,6 +45,7 @@ class FakeVideoRenderer : public VideoRenderer {
width_ = static_cast<int>(frame->GetWidth());
height_ = static_cast<int>(frame->GetHeight());
rotation_ = frame->GetVideoRotation();
timestamp_ = frame->GetTimeStamp();
SignalRenderFrame(frame);
return true;
}
@ -61,6 +63,11 @@ class FakeVideoRenderer : public VideoRenderer {
rtc::CritScope cs(&crit_);
return rotation_;
}
int64_t timestamp() const {
rtc::CritScope cs(&crit_);
return timestamp_;
}
int num_rendered_frames() const {
rtc::CritScope cs(&crit_);
return num_rendered_frames_;
@ -129,6 +136,7 @@ class FakeVideoRenderer : public VideoRenderer {
int width_;
int height_;
webrtc::VideoRotation rotation_;
int64_t timestamp_;
int num_rendered_frames_;
bool black_frame_;
rtc::CriticalSection crit_;

View File

@ -26,13 +26,7 @@ void VideoBroadcaster::AddOrUpdateSink(
RTC_DCHECK(thread_checker_.CalledOnValidThread());
RTC_DCHECK(sink != nullptr);
rtc::CritScope cs(&sinks_and_wants_lock_);
SinkPair* sink_pair = FindSinkPair(sink);
if (!sink_pair) {
sinks_.push_back(SinkPair(sink, wants));
} else {
sink_pair->wants = wants;
}
VideoSourceBase::AddOrUpdateSink(sink, wants);
UpdateWants();
}
@ -41,19 +35,14 @@ void VideoBroadcaster::RemoveSink(
RTC_DCHECK(thread_checker_.CalledOnValidThread());
RTC_DCHECK(sink != nullptr);
rtc::CritScope cs(&sinks_and_wants_lock_);
RTC_DCHECK(FindSinkPair(sink));
sinks_.erase(std::remove_if(sinks_.begin(), sinks_.end(),
[sink](const SinkPair& sink_pair) {
return sink_pair.sink == sink;
}),
sinks_.end());
VideoSourceBase::RemoveSink(sink);
UpdateWants();
}
bool VideoBroadcaster::frame_wanted() const {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
rtc::CritScope cs(&sinks_and_wants_lock_);
return !sinks_.empty();
return !sink_pairs().empty();
}
VideoSinkWants VideoBroadcaster::wants() const {
@ -64,28 +53,21 @@ VideoSinkWants VideoBroadcaster::wants() const {
void VideoBroadcaster::OnFrame(const cricket::VideoFrame& frame) {
rtc::CritScope cs(&sinks_and_wants_lock_);
for (auto& sink_pair : sinks_) {
sink_pair.sink->OnFrame(frame);
for (auto& sink_pair : sink_pairs()) {
if (sink_pair.wants.black_frames) {
sink_pair.sink->OnFrame(GetBlackFrame(frame));
} else {
sink_pair.sink->OnFrame(frame);
}
}
}
VideoBroadcaster::SinkPair* VideoBroadcaster::FindSinkPair(
const VideoSinkInterface<cricket::VideoFrame>* sink) {
auto sink_pair_it = std::find_if(
sinks_.begin(), sinks_.end(),
[sink](const SinkPair& sink_pair) { return sink_pair.sink == sink; });
if (sink_pair_it != sinks_.end()) {
return &*sink_pair_it;
}
return nullptr;
}
void VideoBroadcaster::UpdateWants() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
VideoSinkWants wants;
wants.rotation_applied = false;
for (auto& sink : sinks_) {
for (auto& sink : sink_pairs()) {
// wants.rotation_applied == ANY(sink.wants.rotation_applied)
if (sink.wants.rotation_applied) {
wants.rotation_applied = true;
@ -112,4 +94,21 @@ void VideoBroadcaster::UpdateWants() {
current_wants_ = wants;
}
const cricket::VideoFrame& VideoBroadcaster::GetBlackFrame(
const cricket::VideoFrame& frame) {
if (black_frame_ && black_frame_->GetWidth() == frame.GetWidth() &&
black_frame_->GetHeight() == frame.GetHeight() &&
black_frame_->GetVideoRotation() == frame.GetVideoRotation()) {
black_frame_->SetTimeStamp(frame.GetTimeStamp());
return *black_frame_;
}
black_frame_.reset(new cricket::WebRtcVideoFrame(
new rtc::RefCountedObject<webrtc::I420Buffer>(
static_cast<int>(frame.GetWidth()),
static_cast<int>(frame.GetHeight())),
frame.GetTimeStamp(), frame.GetVideoRotation()));
black_frame_->SetToBlack();
return *black_frame_;
}
} // namespace rtc

View File

@ -18,7 +18,8 @@
#include "webrtc/base/thread_checker.h"
#include "webrtc/media/base/videoframe.h"
#include "webrtc/media/base/videosinkinterface.h"
#include "webrtc/media/base/videosourceinterface.h"
#include "webrtc/media/base/videosourcebase.h"
#include "webrtc/media/engine/webrtcvideoframe.h"
namespace rtc {
@ -28,7 +29,7 @@ namespace rtc {
// Sinks must be added and removed on one and only one thread.
// Video frames can be broadcasted on any thread. I.e VideoBroadcaster::OnFrame
// can be called on any thread.
class VideoBroadcaster : public VideoSourceInterface<cricket::VideoFrame>,
class VideoBroadcaster : public VideoSourceBase,
public VideoSinkInterface<cricket::VideoFrame> {
public:
VideoBroadcaster();
@ -46,22 +47,15 @@ class VideoBroadcaster : public VideoSourceInterface<cricket::VideoFrame>,
void OnFrame(const cricket::VideoFrame& frame) override;
protected:
struct SinkPair {
SinkPair(VideoSinkInterface<cricket::VideoFrame>* sink,
VideoSinkWants wants)
: sink(sink), wants(wants) {}
VideoSinkInterface<cricket::VideoFrame>* sink;
VideoSinkWants wants;
};
SinkPair* FindSinkPair(const VideoSinkInterface<cricket::VideoFrame>* sink)
EXCLUSIVE_LOCKS_REQUIRED(sinks_and_wants_lock_);
void UpdateWants() EXCLUSIVE_LOCKS_REQUIRED(sinks_and_wants_lock_);
const cricket::VideoFrame& GetBlackFrame(const cricket::VideoFrame& frame)
EXCLUSIVE_LOCKS_REQUIRED(sinks_and_wants_lock_);
ThreadChecker thread_checker_;
rtc::CriticalSection sinks_and_wants_lock_;
VideoSinkWants current_wants_ GUARDED_BY(sinks_and_wants_lock_);
std::vector<SinkPair> sinks_ GUARDED_BY(sinks_and_wants_lock_);
rtc::scoped_ptr<cricket::WebRtcVideoFrame> black_frame_;
};
} // namespace rtc

View File

@ -9,31 +9,21 @@
*/
#include "webrtc/base/gunit.h"
#include "webrtc/media/base/fakevideorenderer.h"
#include "webrtc/media/base/videobroadcaster.h"
#include "webrtc/media/engine/webrtcvideoframe.h"
using rtc::VideoBroadcaster;
using rtc::VideoSinkWants;
using cricket::FakeVideoRenderer;
using cricket::WebRtcVideoFrame;
namespace {
class TestSink : public rtc::VideoSinkInterface<cricket::VideoFrame> {
public:
void OnFrame(const cricket::VideoFrame& frame) override {
++number_of_rendered_frames_;
}
int number_of_rendered_frames_ = 0;
};
} // namespace
TEST(VideoBroadcasterTest, frame_wanted) {
VideoBroadcaster broadcaster;
EXPECT_FALSE(broadcaster.frame_wanted());
TestSink sink;
FakeVideoRenderer sink;
broadcaster.AddOrUpdateSink(&sink, rtc::VideoSinkWants());
EXPECT_TRUE(broadcaster.frame_wanted());
@ -44,40 +34,40 @@ TEST(VideoBroadcasterTest, frame_wanted) {
TEST(VideoBroadcasterTest, OnFrame) {
VideoBroadcaster broadcaster;
TestSink sink1;
TestSink sink2;
FakeVideoRenderer sink1;
FakeVideoRenderer sink2;
broadcaster.AddOrUpdateSink(&sink1, rtc::VideoSinkWants());
broadcaster.AddOrUpdateSink(&sink2, rtc::VideoSinkWants());
WebRtcVideoFrame frame;
broadcaster.OnFrame(frame);
EXPECT_EQ(1, sink1.number_of_rendered_frames_);
EXPECT_EQ(1, sink2.number_of_rendered_frames_);
EXPECT_EQ(1, sink1.num_rendered_frames());
EXPECT_EQ(1, sink2.num_rendered_frames());
broadcaster.RemoveSink(&sink1);
broadcaster.OnFrame(frame);
EXPECT_EQ(1, sink1.number_of_rendered_frames_);
EXPECT_EQ(2, sink2.number_of_rendered_frames_);
EXPECT_EQ(1, sink1.num_rendered_frames());
EXPECT_EQ(2, sink2.num_rendered_frames());
broadcaster.AddOrUpdateSink(&sink1, rtc::VideoSinkWants());
broadcaster.OnFrame(frame);
EXPECT_EQ(2, sink1.number_of_rendered_frames_);
EXPECT_EQ(3, sink2.number_of_rendered_frames_);
EXPECT_EQ(2, sink1.num_rendered_frames());
EXPECT_EQ(3, sink2.num_rendered_frames());
}
TEST(VideoBroadcasterTest, AppliesRotationIfAnySinkWantsRotationApplied) {
VideoBroadcaster broadcaster;
EXPECT_TRUE(broadcaster.wants().rotation_applied);
TestSink sink1;
FakeVideoRenderer sink1;
VideoSinkWants wants1;
wants1.rotation_applied = false;
broadcaster.AddOrUpdateSink(&sink1, wants1);
EXPECT_FALSE(broadcaster.wants().rotation_applied);
TestSink sink2;
FakeVideoRenderer sink2;
VideoSinkWants wants2;
wants2.rotation_applied = true;
@ -92,14 +82,14 @@ TEST(VideoBroadcasterTest, AppliesMinOfSinkWantsMaxPixelCount) {
VideoBroadcaster broadcaster;
EXPECT_TRUE(!broadcaster.wants().max_pixel_count);
TestSink sink1;
FakeVideoRenderer sink1;
VideoSinkWants wants1;
wants1.max_pixel_count = rtc::Optional<int>(1280 * 720);
broadcaster.AddOrUpdateSink(&sink1, wants1);
EXPECT_EQ(1280 * 720, *broadcaster.wants().max_pixel_count);
TestSink sink2;
FakeVideoRenderer sink2;
VideoSinkWants wants2;
wants2.max_pixel_count = rtc::Optional<int>(640 * 360);
broadcaster.AddOrUpdateSink(&sink2, wants2);
@ -113,14 +103,14 @@ TEST(VideoBroadcasterTest, AppliesMinOfSinkWantsMaxPixelCountStepUp) {
VideoBroadcaster broadcaster;
EXPECT_TRUE(!broadcaster.wants().max_pixel_count_step_up);
TestSink sink1;
FakeVideoRenderer sink1;
VideoSinkWants wants1;
wants1.max_pixel_count_step_up = rtc::Optional<int>(1280 * 720);
broadcaster.AddOrUpdateSink(&sink1, wants1);
EXPECT_EQ(1280 * 720, *broadcaster.wants().max_pixel_count_step_up);
TestSink sink2;
FakeVideoRenderer sink2;
VideoSinkWants wants2;
wants2.max_pixel_count_step_up = rtc::Optional<int>(640 * 360);
broadcaster.AddOrUpdateSink(&sink2, wants2);
@ -129,3 +119,44 @@ TEST(VideoBroadcasterTest, AppliesMinOfSinkWantsMaxPixelCountStepUp) {
broadcaster.RemoveSink(&sink2);
EXPECT_EQ(1280 * 720, *broadcaster.wants().max_pixel_count_step_up);
}
TEST(VideoBroadcasterTest, SinkWantsBlackFrames) {
VideoBroadcaster broadcaster;
EXPECT_TRUE(!broadcaster.wants().black_frames);
FakeVideoRenderer sink1;
VideoSinkWants wants1;
wants1.black_frames = true;
broadcaster.AddOrUpdateSink(&sink1, wants1);
FakeVideoRenderer sink2;
VideoSinkWants wants2;
wants1.black_frames = false;
broadcaster.AddOrUpdateSink(&sink2, wants2);
cricket::WebRtcVideoFrame frame1;
frame1.InitToBlack(100, 200, 10 /*ts*/);
// Make it not all-black
frame1.GetUPlane()[0] = 0;
broadcaster.OnFrame(frame1);
EXPECT_TRUE(sink1.black_frame());
EXPECT_EQ(10, sink1.timestamp());
EXPECT_FALSE(sink2.black_frame());
EXPECT_EQ(10, sink2.timestamp());
// Switch the sink wants.
wants1.black_frames = false;
broadcaster.AddOrUpdateSink(&sink1, wants1);
wants2.black_frames = true;
broadcaster.AddOrUpdateSink(&sink2, wants2);
cricket::WebRtcVideoFrame frame2;
frame2.InitToBlack(100, 200, 30 /*ts*/);
// Make it not all-black
frame2.GetUPlane()[0] = 0;
broadcaster.OnFrame(frame2);
EXPECT_FALSE(sink1.black_frame());
EXPECT_EQ(30, sink1.timestamp());
EXPECT_TRUE(sink2.black_frame());
EXPECT_EQ(30, sink2.timestamp());
}

View File

@ -10,4 +10,50 @@
#include "webrtc/media/base/videosourcebase.h"
// TODO(perkj): Add implementation.
#include "webrtc/base/checks.h"
namespace rtc {
VideoSourceBase::VideoSourceBase() {
thread_checker_.DetachFromThread();
}
void VideoSourceBase::AddOrUpdateSink(
VideoSinkInterface<cricket::VideoFrame>* sink,
const VideoSinkWants& wants) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
RTC_DCHECK(sink != nullptr);
SinkPair* sink_pair = FindSinkPair(sink);
if (!sink_pair) {
sinks_.push_back(SinkPair(sink, wants));
} else {
sink_pair->wants = wants;
}
}
void VideoSourceBase::RemoveSink(
VideoSinkInterface<cricket::VideoFrame>* sink) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
RTC_DCHECK(sink != nullptr);
RTC_DCHECK(FindSinkPair(sink));
sinks_.erase(std::remove_if(sinks_.begin(), sinks_.end(),
[sink](const SinkPair& sink_pair) {
return sink_pair.sink == sink;
}),
sinks_.end());
}
VideoSourceBase::SinkPair* VideoSourceBase::FindSinkPair(
const VideoSinkInterface<cricket::VideoFrame>* sink) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
auto sink_pair_it = std::find_if(
sinks_.begin(), sinks_.end(),
[sink](const SinkPair& sink_pair) { return sink_pair.sink == sink; });
if (sink_pair_it != sinks_.end()) {
return &*sink_pair_it;
}
return nullptr;
}
} // namespace rtc

View File

@ -11,6 +11,39 @@
#ifndef WEBRTC_MEDIA_BASE_VIDEOSOURCEBASE_H_
#define WEBRTC_MEDIA_BASE_VIDEOSOURCEBASE_H_
// TODO(perkj): Add implementation.
#include <vector>
#include "webrtc/base/thread_checker.h"
#include "webrtc/media/base/videoframe.h"
#include "webrtc/media/base/videosourceinterface.h"
namespace rtc {
// VideoSourceBase is not thread safe.
class VideoSourceBase : public VideoSourceInterface<cricket::VideoFrame> {
public:
VideoSourceBase();
void AddOrUpdateSink(VideoSinkInterface<cricket::VideoFrame>* sink,
const VideoSinkWants& wants) override;
void RemoveSink(VideoSinkInterface<cricket::VideoFrame>* sink) override;
protected:
struct SinkPair {
SinkPair(VideoSinkInterface<cricket::VideoFrame>* sink,
VideoSinkWants wants)
: sink(sink), wants(wants) {}
VideoSinkInterface<cricket::VideoFrame>* sink;
VideoSinkWants wants;
};
SinkPair* FindSinkPair(const VideoSinkInterface<cricket::VideoFrame>* sink);
const std::vector<SinkPair>& sink_pairs() const { return sinks_; }
ThreadChecker thread_checker_;
private:
std::vector<SinkPair> sinks_;
};
} // namespace rtc
#endif // WEBRTC_MEDIA_BASE_VIDEOSOURCEBASE_H_

View File

@ -23,6 +23,9 @@ struct VideoSinkWants {
// By default, the rotation is applied by the source.
bool rotation_applied = true;
// Tells the source that the sink only wants black frames.
bool black_frames = false;
// Tells the source the maximum number of pixels the sink wants.
rtc::Optional<int> max_pixel_count;
// Like |max_pixel_count| but relative to the given value. The source is