Move direct use of VideoCapturer::VideoAdapter to VideoSinkWants.
The purose of this cl is to remove dependency on cricket::VideoCapturer from WebRtcVideoChannel2. This cl change CPU adaptation to use a new VideoSinkWants.Resolution Cl is WIP and uploaded to start the discussion. Tested on a N5 with hw acceleration turned off. BUG=webrtc:5426 Review URL: https://codereview.webrtc.org/1695263002 Cr-Commit-Position: refs/heads/master@{#11804}
This commit is contained in:
parent
50772f1e16
commit
2d5f0913f2
@ -495,12 +495,28 @@ void CoordinatedVideoAdapter::OnEncoderResolutionRequest(
|
||||
<< " To: " << new_width << "x" << new_height;
|
||||
}
|
||||
|
||||
void CoordinatedVideoAdapter::OnCpuResolutionRequest(
|
||||
rtc::Optional<int> max_pixel_count,
|
||||
rtc::Optional<int> max_pixel_count_step_up) {
|
||||
rtc::CritScope cs(&request_critical_section_);
|
||||
// TODO(perkj): We should support taking larger steps up and down and
|
||||
// actually look at the values set in max_pixel_count and
|
||||
// max_pixel_count_step_up.
|
||||
if (max_pixel_count && *max_pixel_count < GetOutputNumPixels()) {
|
||||
OnCpuResolutionRequest(DOWNGRADE);
|
||||
} else if (max_pixel_count_step_up &&
|
||||
*max_pixel_count_step_up >= GetOutputNumPixels()) {
|
||||
OnCpuResolutionRequest(UPGRADE);
|
||||
}
|
||||
}
|
||||
|
||||
// A Bandwidth GD request for new resolution
|
||||
void CoordinatedVideoAdapter::OnCpuResolutionRequest(AdaptRequest request) {
|
||||
rtc::CritScope cs(&request_critical_section_);
|
||||
if (!cpu_adaptation_) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Update how many times we have downgraded due to the cpu load.
|
||||
switch (request) {
|
||||
case DOWNGRADE:
|
||||
|
||||
@ -13,6 +13,7 @@
|
||||
|
||||
#include "webrtc/base/common.h" // For ASSERT
|
||||
#include "webrtc/base/criticalsection.h"
|
||||
#include "webrtc/base/optional.h"
|
||||
#include "webrtc/base/sigslot.h"
|
||||
#include "webrtc/media/base/videocommon.h"
|
||||
|
||||
@ -139,6 +140,9 @@ class CoordinatedVideoAdapter
|
||||
void OnEncoderResolutionRequest(int width, int height, AdaptRequest request);
|
||||
// Handle the resolution request for CPU overuse.
|
||||
void OnCpuResolutionRequest(AdaptRequest request);
|
||||
void OnCpuResolutionRequest(rtc::Optional<int> max_pixel_count,
|
||||
rtc::Optional<int> max_pixel_count_step_up);
|
||||
|
||||
// Handle the CPU load provided by a CPU monitor.
|
||||
void OnCpuLoadUpdated(int current_cpus, int max_cpus,
|
||||
float process_load, float system_load);
|
||||
|
||||
@ -10,6 +10,8 @@
|
||||
|
||||
#include "webrtc/media/base/videobroadcaster.h"
|
||||
|
||||
#include <limits>
|
||||
|
||||
#include "webrtc/base/checks.h"
|
||||
|
||||
namespace rtc {
|
||||
@ -30,12 +32,7 @@ void VideoBroadcaster::AddOrUpdateSink(
|
||||
} else {
|
||||
sink_pair->wants = wants;
|
||||
}
|
||||
|
||||
// Rotation must be applied by the source if one sink wants it.
|
||||
current_wants_.rotation_applied = false;
|
||||
for (auto& sink_pair : sinks_) {
|
||||
current_wants_.rotation_applied |= sink_pair.wants.rotation_applied;
|
||||
}
|
||||
UpdateWants();
|
||||
}
|
||||
|
||||
void VideoBroadcaster::RemoveSink(
|
||||
@ -49,6 +46,7 @@ void VideoBroadcaster::RemoveSink(
|
||||
return sink_pair.sink == sink;
|
||||
}),
|
||||
sinks_.end());
|
||||
UpdateWants();
|
||||
}
|
||||
|
||||
bool VideoBroadcaster::frame_wanted() const {
|
||||
@ -79,4 +77,36 @@ VideoBroadcaster::SinkPair* VideoBroadcaster::FindSinkPair(
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
void VideoBroadcaster::UpdateWants() {
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
|
||||
VideoSinkWants wants;
|
||||
wants.rotation_applied = false;
|
||||
for (auto& sink : sinks_) {
|
||||
// wants.rotation_applied == ANY(sink.wants.rotation_applied)
|
||||
if (sink.wants.rotation_applied) {
|
||||
wants.rotation_applied = true;
|
||||
}
|
||||
// wants.max_pixel_count == MIN(sink.wants.max_pixel_count)
|
||||
if (sink.wants.max_pixel_count &&
|
||||
(!wants.max_pixel_count ||
|
||||
(*sink.wants.max_pixel_count < *wants.max_pixel_count))) {
|
||||
wants.max_pixel_count = sink.wants.max_pixel_count;
|
||||
}
|
||||
// wants.max_pixel_count_step_up == MIN(sink.wants.max_pixel_count_step_up)
|
||||
if (sink.wants.max_pixel_count_step_up &&
|
||||
(!wants.max_pixel_count_step_up ||
|
||||
(*sink.wants.max_pixel_count_step_up <
|
||||
*wants.max_pixel_count_step_up))) {
|
||||
wants.max_pixel_count_step_up = sink.wants.max_pixel_count_step_up;
|
||||
}
|
||||
}
|
||||
|
||||
if (wants.max_pixel_count && wants.max_pixel_count_step_up &&
|
||||
*wants.max_pixel_count_step_up >= *wants.max_pixel_count) {
|
||||
wants.max_pixel_count_step_up = Optional<int>();
|
||||
}
|
||||
current_wants_ = wants;
|
||||
}
|
||||
|
||||
} // namespace rtc
|
||||
|
||||
@ -47,11 +47,11 @@ class VideoBroadcaster : public VideoSourceInterface<cricket::VideoFrame>,
|
||||
VideoSinkWants wants;
|
||||
};
|
||||
SinkPair* FindSinkPair(const VideoSinkInterface<cricket::VideoFrame>* sink);
|
||||
void UpdateWants();
|
||||
|
||||
ThreadChecker thread_checker_;
|
||||
|
||||
VideoSinkWants current_wants_;
|
||||
|
||||
std::vector<SinkPair> sinks_;
|
||||
};
|
||||
|
||||
|
||||
131
webrtc/media/base/videobroadcaster_unittest.cc
Normal file
131
webrtc/media/base/videobroadcaster_unittest.cc
Normal file
@ -0,0 +1,131 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "webrtc/base/gunit.h"
|
||||
#include "webrtc/media/base/videobroadcaster.h"
|
||||
#include "webrtc/media/engine/webrtcvideoframe.h"
|
||||
|
||||
using rtc::VideoBroadcaster;
|
||||
using rtc::VideoSinkWants;
|
||||
using cricket::WebRtcVideoFrame;
|
||||
|
||||
namespace {
|
||||
|
||||
class TestSink : public rtc::VideoSinkInterface<cricket::VideoFrame> {
|
||||
public:
|
||||
void OnFrame(const cricket::VideoFrame& frame) override {
|
||||
++number_of_rendered_frames_;
|
||||
}
|
||||
|
||||
int number_of_rendered_frames_ = 0;
|
||||
};
|
||||
|
||||
} // namespace
|
||||
|
||||
TEST(VideoBroadcasterTest, frame_wanted) {
|
||||
VideoBroadcaster broadcaster;
|
||||
EXPECT_FALSE(broadcaster.frame_wanted());
|
||||
|
||||
TestSink sink;
|
||||
broadcaster.AddOrUpdateSink(&sink, rtc::VideoSinkWants());
|
||||
EXPECT_TRUE(broadcaster.frame_wanted());
|
||||
|
||||
broadcaster.RemoveSink(&sink);
|
||||
EXPECT_FALSE(broadcaster.frame_wanted());
|
||||
}
|
||||
|
||||
TEST(VideoBroadcasterTest, OnFrame) {
|
||||
VideoBroadcaster broadcaster;
|
||||
|
||||
TestSink sink1;
|
||||
TestSink sink2;
|
||||
broadcaster.AddOrUpdateSink(&sink1, rtc::VideoSinkWants());
|
||||
broadcaster.AddOrUpdateSink(&sink2, rtc::VideoSinkWants());
|
||||
|
||||
WebRtcVideoFrame frame;
|
||||
|
||||
broadcaster.OnFrame(frame);
|
||||
EXPECT_EQ(1, sink1.number_of_rendered_frames_);
|
||||
EXPECT_EQ(1, sink2.number_of_rendered_frames_);
|
||||
|
||||
broadcaster.RemoveSink(&sink1);
|
||||
broadcaster.OnFrame(frame);
|
||||
EXPECT_EQ(1, sink1.number_of_rendered_frames_);
|
||||
EXPECT_EQ(2, sink2.number_of_rendered_frames_);
|
||||
|
||||
broadcaster.AddOrUpdateSink(&sink1, rtc::VideoSinkWants());
|
||||
broadcaster.OnFrame(frame);
|
||||
EXPECT_EQ(2, sink1.number_of_rendered_frames_);
|
||||
EXPECT_EQ(3, sink2.number_of_rendered_frames_);
|
||||
}
|
||||
|
||||
TEST(VideoBroadcasterTest, AppliesRotationIfAnySinkWantsRotationApplied) {
|
||||
VideoBroadcaster broadcaster;
|
||||
EXPECT_TRUE(broadcaster.wants().rotation_applied);
|
||||
|
||||
TestSink sink1;
|
||||
VideoSinkWants wants1;
|
||||
wants1.rotation_applied = false;
|
||||
|
||||
broadcaster.AddOrUpdateSink(&sink1, wants1);
|
||||
EXPECT_FALSE(broadcaster.wants().rotation_applied);
|
||||
|
||||
TestSink sink2;
|
||||
VideoSinkWants wants2;
|
||||
wants2.rotation_applied = true;
|
||||
|
||||
broadcaster.AddOrUpdateSink(&sink2, wants2);
|
||||
EXPECT_TRUE(broadcaster.wants().rotation_applied);
|
||||
|
||||
broadcaster.RemoveSink(&sink2);
|
||||
EXPECT_FALSE(broadcaster.wants().rotation_applied);
|
||||
}
|
||||
|
||||
TEST(VideoBroadcasterTest, AppliesMinOfSinkWantsMaxPixelCount) {
|
||||
VideoBroadcaster broadcaster;
|
||||
EXPECT_TRUE(!broadcaster.wants().max_pixel_count);
|
||||
|
||||
TestSink sink1;
|
||||
VideoSinkWants wants1;
|
||||
wants1.max_pixel_count = rtc::Optional<int>(1280 * 720);
|
||||
|
||||
broadcaster.AddOrUpdateSink(&sink1, wants1);
|
||||
EXPECT_EQ(1280 * 720, *broadcaster.wants().max_pixel_count);
|
||||
|
||||
TestSink sink2;
|
||||
VideoSinkWants wants2;
|
||||
wants2.max_pixel_count = rtc::Optional<int>(640 * 360);
|
||||
broadcaster.AddOrUpdateSink(&sink2, wants2);
|
||||
EXPECT_EQ(640 * 360, *broadcaster.wants().max_pixel_count);
|
||||
|
||||
broadcaster.RemoveSink(&sink2);
|
||||
EXPECT_EQ(1280 * 720, *broadcaster.wants().max_pixel_count);
|
||||
}
|
||||
|
||||
TEST(VideoBroadcasterTest, AppliesMinOfSinkWantsMaxPixelCountStepUp) {
|
||||
VideoBroadcaster broadcaster;
|
||||
EXPECT_TRUE(!broadcaster.wants().max_pixel_count_step_up);
|
||||
|
||||
TestSink sink1;
|
||||
VideoSinkWants wants1;
|
||||
wants1.max_pixel_count_step_up = rtc::Optional<int>(1280 * 720);
|
||||
|
||||
broadcaster.AddOrUpdateSink(&sink1, wants1);
|
||||
EXPECT_EQ(1280 * 720, *broadcaster.wants().max_pixel_count_step_up);
|
||||
|
||||
TestSink sink2;
|
||||
VideoSinkWants wants2;
|
||||
wants2.max_pixel_count_step_up = rtc::Optional<int>(640 * 360);
|
||||
broadcaster.AddOrUpdateSink(&sink2, wants2);
|
||||
EXPECT_EQ(640 * 360, *broadcaster.wants().max_pixel_count_step_up);
|
||||
|
||||
broadcaster.RemoveSink(&sink2);
|
||||
EXPECT_EQ(1280 * 720, *broadcaster.wants().max_pixel_count_step_up);
|
||||
}
|
||||
@ -320,6 +320,7 @@ void VideoCapturer::GetStats(VariableInfo<int>* adapt_drops_stats,
|
||||
void VideoCapturer::RemoveSink(
|
||||
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
|
||||
broadcaster_.RemoveSink(sink);
|
||||
OnSinkWantsChanged(broadcaster_.wants());
|
||||
}
|
||||
|
||||
void VideoCapturer::AddOrUpdateSink(
|
||||
@ -334,6 +335,11 @@ void VideoCapturer::OnSinkWantsChanged(const rtc::VideoSinkWants& wants) {
|
||||
if (frame_factory_) {
|
||||
frame_factory_->SetApplyRotation(apply_rotation_);
|
||||
}
|
||||
|
||||
if (video_adapter()) {
|
||||
video_adapter()->OnCpuResolutionRequest(wants.max_pixel_count,
|
||||
wants.max_pixel_count_step_up);
|
||||
}
|
||||
}
|
||||
|
||||
void VideoCapturer::OnFrameCaptured(VideoCapturer*,
|
||||
|
||||
@ -246,11 +246,6 @@ class VideoCapturer : public sigslot::has_slots<>,
|
||||
enable_video_adapter_ = enable_video_adapter;
|
||||
}
|
||||
|
||||
CoordinatedVideoAdapter* video_adapter() { return &video_adapter_; }
|
||||
const CoordinatedVideoAdapter* video_adapter() const {
|
||||
return &video_adapter_;
|
||||
}
|
||||
|
||||
// Takes ownership.
|
||||
void set_frame_factory(VideoFrameFactory* frame_factory);
|
||||
|
||||
@ -286,6 +281,8 @@ class VideoCapturer : public sigslot::has_slots<>,
|
||||
// TODO(perkj): Remove once SignalVideoFrame is removed.
|
||||
void OnFrame(VideoCapturer* capturer, const VideoFrame* frame);
|
||||
|
||||
CoordinatedVideoAdapter* video_adapter() { return &video_adapter_; }
|
||||
|
||||
void SetCaptureState(CaptureState state);
|
||||
|
||||
// Marshals SignalStateChange onto thread_.
|
||||
|
||||
@ -327,6 +327,71 @@ TEST_F(VideoCapturerTest, TestRotationAppliedBySourceWhenDifferentWants) {
|
||||
EXPECT_EQ(webrtc::kVideoRotation_0, renderer2.rotation());
|
||||
}
|
||||
|
||||
TEST_F(VideoCapturerTest, SinkWantsMaxPixelAndMaxPixelCountStepUp) {
|
||||
EXPECT_EQ(cricket::CS_RUNNING,
|
||||
capturer_.Start(cricket::VideoFormat(
|
||||
1280, 720, cricket::VideoFormat::FpsToInterval(30),
|
||||
cricket::FOURCC_I420)));
|
||||
EXPECT_TRUE(capturer_.IsRunning());
|
||||
|
||||
EXPECT_EQ(0, renderer_.num_rendered_frames());
|
||||
EXPECT_TRUE(capturer_.CaptureFrame());
|
||||
EXPECT_EQ(1, renderer_.num_rendered_frames());
|
||||
EXPECT_EQ(1280, renderer_.width());
|
||||
EXPECT_EQ(720, renderer_.height());
|
||||
|
||||
// Request a lower resolution.
|
||||
rtc::VideoSinkWants wants;
|
||||
wants.max_pixel_count = rtc::Optional<int>(1280 * 720 / 2);
|
||||
capturer_.AddOrUpdateSink(&renderer_, wants);
|
||||
EXPECT_TRUE(capturer_.CaptureFrame());
|
||||
EXPECT_EQ(2, renderer_.num_rendered_frames());
|
||||
EXPECT_EQ(960, renderer_.width());
|
||||
EXPECT_EQ(540, renderer_.height());
|
||||
|
||||
// Request a lower resolution.
|
||||
wants.max_pixel_count =
|
||||
rtc::Optional<int>(renderer_.width() * renderer_.height() / 2);
|
||||
capturer_.AddOrUpdateSink(&renderer_, wants);
|
||||
EXPECT_TRUE(capturer_.CaptureFrame());
|
||||
EXPECT_EQ(3, renderer_.num_rendered_frames());
|
||||
EXPECT_EQ(640, renderer_.width());
|
||||
EXPECT_EQ(360, renderer_.height());
|
||||
|
||||
// Adding a new renderer should not affect resolution.
|
||||
cricket::FakeVideoRenderer renderer2;
|
||||
capturer_.AddOrUpdateSink(&renderer2, rtc::VideoSinkWants());
|
||||
EXPECT_TRUE(capturer_.CaptureFrame());
|
||||
EXPECT_EQ(4, renderer_.num_rendered_frames());
|
||||
EXPECT_EQ(640, renderer_.width());
|
||||
EXPECT_EQ(360, renderer_.height());
|
||||
EXPECT_EQ(1, renderer2.num_rendered_frames());
|
||||
EXPECT_EQ(640, renderer2.width());
|
||||
EXPECT_EQ(360, renderer2.height());
|
||||
|
||||
// Request higher resolution.
|
||||
wants.max_pixel_count_step_up = wants.max_pixel_count;
|
||||
wants.max_pixel_count = rtc::Optional<int>();
|
||||
capturer_.AddOrUpdateSink(&renderer_, wants);
|
||||
EXPECT_TRUE(capturer_.CaptureFrame());
|
||||
EXPECT_EQ(5, renderer_.num_rendered_frames());
|
||||
EXPECT_EQ(960, renderer_.width());
|
||||
EXPECT_EQ(540, renderer_.height());
|
||||
EXPECT_EQ(2, renderer2.num_rendered_frames());
|
||||
EXPECT_EQ(960, renderer2.width());
|
||||
EXPECT_EQ(540, renderer2.height());
|
||||
|
||||
// Updating with no wants should not affect resolution.
|
||||
capturer_.AddOrUpdateSink(&renderer2, rtc::VideoSinkWants());
|
||||
EXPECT_TRUE(capturer_.CaptureFrame());
|
||||
EXPECT_EQ(6, renderer_.num_rendered_frames());
|
||||
EXPECT_EQ(960, renderer_.width());
|
||||
EXPECT_EQ(540, renderer_.height());
|
||||
EXPECT_EQ(3, renderer2.num_rendered_frames());
|
||||
EXPECT_EQ(960, renderer2.width());
|
||||
EXPECT_EQ(540, renderer2.height());
|
||||
}
|
||||
|
||||
TEST_F(VideoCapturerTest, ScreencastScaledSuperLarge) {
|
||||
capturer_.SetScreencast(true);
|
||||
|
||||
|
||||
@ -13,20 +13,25 @@
|
||||
|
||||
#include "webrtc/media/base/videosinkinterface.h"
|
||||
#include "webrtc/base/callback.h"
|
||||
#include "webrtc/base/optional.h"
|
||||
|
||||
namespace rtc {
|
||||
|
||||
// VideoSinkWants is used for notifying the source of properties a video frame
|
||||
// should have when it is delivered to a certain sink.
|
||||
struct VideoSinkWants {
|
||||
bool operator==(const VideoSinkWants& rh) const {
|
||||
return rotation_applied == rh.rotation_applied;
|
||||
}
|
||||
bool operator!=(const VideoSinkWants& rh) const { return !operator==(rh); }
|
||||
|
||||
// Tells the source whether the sink wants frames with rotation applied.
|
||||
// By default, the rotation is applied by the source.
|
||||
bool rotation_applied = true;
|
||||
|
||||
// Tells the source the maximum number of pixels the sink wants.
|
||||
rtc::Optional<int> max_pixel_count;
|
||||
// Like |max_pixel_count| but relative to the given value. The source is
|
||||
// requested to produce frames with a resolution one "step up" from the given
|
||||
// value. In practice, this means that the sink can consume this amount of
|
||||
// pixels but wants more and the source should produce a resolution one
|
||||
// "step" higher than this but not higher.
|
||||
rtc::Optional<int> max_pixel_count_step_up;
|
||||
};
|
||||
|
||||
template <typename VideoFrameT>
|
||||
|
||||
@ -314,6 +314,7 @@ static int GetMaxDefaultVideoBitrateKbps(int width, int height) {
|
||||
return 2500;
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
// Constants defined in webrtc/media/engine/constants.h
|
||||
@ -1000,12 +1001,10 @@ bool WebRtcVideoChannel2::AddSendStream(const StreamParams& sp) {
|
||||
send_ssrcs_.insert(used_ssrc);
|
||||
|
||||
webrtc::VideoSendStream::Config config(this);
|
||||
config.overuse_callback = this;
|
||||
|
||||
WebRtcVideoSendStream* stream =
|
||||
new WebRtcVideoSendStream(call_, sp, config, external_encoder_factory_,
|
||||
bitrate_config_.max_bitrate_bps, send_codec_,
|
||||
send_rtp_extensions_, send_params_);
|
||||
WebRtcVideoSendStream* stream = new WebRtcVideoSendStream(
|
||||
call_, sp, config, external_encoder_factory_, signal_cpu_adaptation_,
|
||||
bitrate_config_.max_bitrate_bps, send_codec_, send_rtp_extensions_,
|
||||
send_params_);
|
||||
|
||||
uint32_t ssrc = sp.first_ssrc();
|
||||
RTC_DCHECK(ssrc != 0);
|
||||
@ -1283,10 +1282,6 @@ bool WebRtcVideoChannel2::SetCapturer(uint32_t ssrc, VideoCapturer* capturer) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
{
|
||||
rtc::CritScope lock(&capturer_crit_);
|
||||
capturers_[ssrc] = capturer;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -1412,26 +1407,6 @@ void WebRtcVideoChannel2::SetInterface(NetworkInterface* iface) {
|
||||
kVideoRtpBufferSize);
|
||||
}
|
||||
|
||||
void WebRtcVideoChannel2::OnLoadUpdate(Load load) {
|
||||
// OnLoadUpdate can not take any locks that are held while creating streams
|
||||
// etc. Doing so establishes lock-order inversions between the webrtc process
|
||||
// thread on stream creation and locks such as stream_crit_ while calling out.
|
||||
rtc::CritScope stream_lock(&capturer_crit_);
|
||||
if (!signal_cpu_adaptation_)
|
||||
return;
|
||||
// Do not adapt resolution for screen content as this will likely result in
|
||||
// blurry and unreadable text.
|
||||
for (auto& kv : capturers_) {
|
||||
if (kv.second != nullptr
|
||||
&& !kv.second->IsScreencast()
|
||||
&& kv.second->video_adapter() != nullptr) {
|
||||
kv.second->video_adapter()->OnCpuResolutionRequest(
|
||||
load == kOveruse ? CoordinatedVideoAdapter::DOWNGRADE
|
||||
: CoordinatedVideoAdapter::UPGRADE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool WebRtcVideoChannel2::SendRtp(const uint8_t* data,
|
||||
size_t len,
|
||||
const webrtc::PacketOptions& options) {
|
||||
@ -1495,24 +1470,28 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream(
|
||||
const StreamParams& sp,
|
||||
const webrtc::VideoSendStream::Config& config,
|
||||
WebRtcVideoEncoderFactory* external_encoder_factory,
|
||||
bool enable_cpu_overuse_detection,
|
||||
int max_bitrate_bps,
|
||||
const rtc::Optional<VideoCodecSettings>& codec_settings,
|
||||
const std::vector<webrtc::RtpExtension>& rtp_extensions,
|
||||
// TODO(deadbeef): Don't duplicate information between send_params,
|
||||
// rtp_extensions, options, etc.
|
||||
const VideoSendParameters& send_params)
|
||||
: ssrcs_(sp.ssrcs),
|
||||
: worker_thread_(rtc::Thread::Current()),
|
||||
ssrcs_(sp.ssrcs),
|
||||
ssrc_groups_(sp.ssrc_groups),
|
||||
call_(call),
|
||||
cpu_restricted_counter_(0),
|
||||
number_of_cpu_adapt_changes_(0),
|
||||
capturer_(nullptr),
|
||||
external_encoder_factory_(external_encoder_factory),
|
||||
stream_(NULL),
|
||||
stream_(nullptr),
|
||||
parameters_(config, send_params.options, max_bitrate_bps, codec_settings),
|
||||
pending_encoder_reconfiguration_(false),
|
||||
allocated_encoder_(NULL, webrtc::kVideoCodecUnknown, false),
|
||||
capturer_(NULL),
|
||||
allocated_encoder_(nullptr, webrtc::kVideoCodecUnknown, false),
|
||||
capturer_is_screencast_(false),
|
||||
sending_(false),
|
||||
muted_(false),
|
||||
old_adapt_changes_(0),
|
||||
first_frame_timestamp_ms_(0),
|
||||
last_frame_timestamp_ms_(0) {
|
||||
parameters_.config.rtp.max_packet_size = kVideoMtu;
|
||||
@ -1526,6 +1505,8 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream(
|
||||
parameters_.config.rtp.rtcp_mode = send_params.rtcp.reduced_size
|
||||
? webrtc::RtcpMode::kReducedSize
|
||||
: webrtc::RtcpMode::kCompound;
|
||||
parameters_.config.overuse_callback =
|
||||
enable_cpu_overuse_detection ? this : nullptr;
|
||||
|
||||
if (codec_settings) {
|
||||
SetCodecAndOptions(*codec_settings, parameters_.options);
|
||||
@ -1589,7 +1570,7 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame(
|
||||
video_frame.set_render_time_ms(last_frame_timestamp_ms_);
|
||||
// Reconfigure codec if necessary.
|
||||
SetDimensions(video_frame.width(), video_frame.height(),
|
||||
capturer_->IsScreencast());
|
||||
capturer_is_screencast_);
|
||||
last_rotation_ = video_frame.rotation();
|
||||
|
||||
stream_->Input()->IncomingCapturedFrame(video_frame);
|
||||
@ -1598,6 +1579,7 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame(
|
||||
bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetCapturer(
|
||||
VideoCapturer* capturer) {
|
||||
TRACE_EVENT0("webrtc", "WebRtcVideoSendStream::SetCapturer");
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
if (!DisconnectCapturer() && capturer == NULL) {
|
||||
return false;
|
||||
}
|
||||
@ -1630,10 +1612,10 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetCapturer(
|
||||
capturer_ = NULL;
|
||||
return true;
|
||||
}
|
||||
|
||||
capturer_ = capturer;
|
||||
capturer_->AddOrUpdateSink(this, sink_wants_);
|
||||
capturer_is_screencast_ = capturer->IsScreencast();
|
||||
}
|
||||
capturer_ = capturer;
|
||||
capturer_->AddOrUpdateSink(this, sink_wants_);
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -1643,20 +1625,18 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::MuteStream(bool mute) {
|
||||
}
|
||||
|
||||
bool WebRtcVideoChannel2::WebRtcVideoSendStream::DisconnectCapturer() {
|
||||
cricket::VideoCapturer* capturer;
|
||||
{
|
||||
rtc::CritScope cs(&lock_);
|
||||
if (capturer_ == NULL)
|
||||
return false;
|
||||
|
||||
if (capturer_->video_adapter() != nullptr)
|
||||
old_adapt_changes_ += capturer_->video_adapter()->adaptation_changes();
|
||||
|
||||
capturer = capturer_;
|
||||
capturer_ = NULL;
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
if (capturer_ == NULL) {
|
||||
return false;
|
||||
}
|
||||
capturer->RemoveSink(this);
|
||||
|
||||
capturer_->RemoveSink(this);
|
||||
capturer_ = NULL;
|
||||
// Reset |cpu_restricted_counter_| if the capturer is changed. It is not
|
||||
// possible to know if the video resolution is restricted by CPU usage after
|
||||
// the capturer is changed since the next capturer might be screen capture
|
||||
// with another resolution and frame rate.
|
||||
cpu_restricted_counter_ = 0;
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -1822,8 +1802,7 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::SetSendParameters(
|
||||
} else {
|
||||
parameters_.options = *params.options;
|
||||
}
|
||||
}
|
||||
else if (params.conference_mode && parameters_.codec_settings) {
|
||||
} else if (params.conference_mode && parameters_.codec_settings) {
|
||||
SetCodecAndOptions(*parameters_.codec_settings, parameters_.options);
|
||||
return;
|
||||
}
|
||||
@ -1950,10 +1929,66 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::Stop() {
|
||||
sending_ = false;
|
||||
}
|
||||
|
||||
void WebRtcVideoChannel2::WebRtcVideoSendStream::OnLoadUpdate(Load load) {
|
||||
if (worker_thread_ != rtc::Thread::Current()) {
|
||||
invoker_.AsyncInvoke<void>(
|
||||
worker_thread_,
|
||||
rtc::Bind(&WebRtcVideoChannel2::WebRtcVideoSendStream::OnLoadUpdate,
|
||||
this, load));
|
||||
return;
|
||||
}
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
LOG(LS_INFO) << "OnLoadUpdate " << load;
|
||||
if (!capturer_) {
|
||||
return;
|
||||
}
|
||||
{
|
||||
rtc::CritScope cs(&lock_);
|
||||
// Do not adapt resolution for screen content as this will likely result in
|
||||
// blurry and unreadable text.
|
||||
if (capturer_is_screencast_)
|
||||
return;
|
||||
|
||||
rtc::Optional<int> max_pixel_count;
|
||||
rtc::Optional<int> max_pixel_count_step_up;
|
||||
if (load == kOveruse) {
|
||||
max_pixel_count = rtc::Optional<int>(
|
||||
(last_dimensions_.height * last_dimensions_.width) / 2);
|
||||
// Increase |number_of_cpu_adapt_changes_| if
|
||||
// sink_wants_.max_pixel_count will be changed since
|
||||
// last time |capturer_->AddOrUpdateSink| was called. That is, this will
|
||||
// result in a new request for the capturer to change resolution.
|
||||
if (!sink_wants_.max_pixel_count ||
|
||||
*sink_wants_.max_pixel_count > *max_pixel_count) {
|
||||
++number_of_cpu_adapt_changes_;
|
||||
++cpu_restricted_counter_;
|
||||
}
|
||||
} else {
|
||||
RTC_DCHECK(load == kUnderuse);
|
||||
max_pixel_count_step_up = rtc::Optional<int>(last_dimensions_.height *
|
||||
last_dimensions_.width);
|
||||
// Increase |number_of_cpu_adapt_changes_| if
|
||||
// sink_wants_.max_pixel_count_step_up will be changed since
|
||||
// last time |capturer_->AddOrUpdateSink| was called. That is, this will
|
||||
// result in a new request for the capturer to change resolution.
|
||||
if (sink_wants_.max_pixel_count ||
|
||||
(sink_wants_.max_pixel_count_step_up &&
|
||||
*sink_wants_.max_pixel_count_step_up < *max_pixel_count_step_up)) {
|
||||
++number_of_cpu_adapt_changes_;
|
||||
--cpu_restricted_counter_;
|
||||
}
|
||||
}
|
||||
sink_wants_.max_pixel_count = max_pixel_count;
|
||||
sink_wants_.max_pixel_count_step_up = max_pixel_count_step_up;
|
||||
}
|
||||
capturer_->AddOrUpdateSink(this, sink_wants_);
|
||||
}
|
||||
|
||||
VideoSenderInfo
|
||||
WebRtcVideoChannel2::WebRtcVideoSendStream::GetVideoSenderInfo() {
|
||||
VideoSenderInfo info;
|
||||
webrtc::VideoSendStream::Stats stats;
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
{
|
||||
rtc::CritScope cs(&lock_);
|
||||
for (uint32_t ssrc : parameters_.config.rtp.ssrcs)
|
||||
@ -1975,23 +2010,20 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::GetVideoSenderInfo() {
|
||||
return info;
|
||||
|
||||
stats = stream_->GetStats();
|
||||
}
|
||||
info.adapt_changes = number_of_cpu_adapt_changes_;
|
||||
info.adapt_reason = cpu_restricted_counter_ <= 0
|
||||
? CoordinatedVideoAdapter::ADAPTREASON_NONE
|
||||
: CoordinatedVideoAdapter::ADAPTREASON_CPU;
|
||||
|
||||
info.adapt_changes = old_adapt_changes_;
|
||||
info.adapt_reason = CoordinatedVideoAdapter::ADAPTREASON_NONE;
|
||||
|
||||
if (capturer_ != NULL) {
|
||||
if (!capturer_->IsMuted()) {
|
||||
VideoFormat last_captured_frame_format;
|
||||
capturer_->GetStats(&info.adapt_frame_drops, &info.effects_frame_drops,
|
||||
&info.capturer_frame_time,
|
||||
&last_captured_frame_format);
|
||||
info.input_frame_width = last_captured_frame_format.width;
|
||||
info.input_frame_height = last_captured_frame_format.height;
|
||||
}
|
||||
if (capturer_->video_adapter() != nullptr) {
|
||||
info.adapt_changes += capturer_->video_adapter()->adaptation_changes();
|
||||
info.adapt_reason = capturer_->video_adapter()->adapt_reason();
|
||||
}
|
||||
if (capturer_) {
|
||||
if (!capturer_->IsMuted()) {
|
||||
VideoFormat last_captured_frame_format;
|
||||
capturer_->GetStats(&info.adapt_frame_drops, &info.effects_frame_drops,
|
||||
&info.capturer_frame_time,
|
||||
&last_captured_frame_format);
|
||||
info.input_frame_width = last_captured_frame_format.width;
|
||||
info.input_frame_height = last_captured_frame_format.height;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -17,6 +17,7 @@
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "webrtc/base/asyncinvoker.h"
|
||||
#include "webrtc/base/criticalsection.h"
|
||||
#include "webrtc/base/thread_annotations.h"
|
||||
#include "webrtc/base/thread_checker.h"
|
||||
@ -129,9 +130,7 @@ class WebRtcVideoEngine2 {
|
||||
std::unique_ptr<WebRtcVideoEncoderFactory> simulcast_encoder_factory_;
|
||||
};
|
||||
|
||||
class WebRtcVideoChannel2 : public VideoMediaChannel,
|
||||
public webrtc::Transport,
|
||||
public webrtc::LoadObserver {
|
||||
class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
|
||||
public:
|
||||
WebRtcVideoChannel2(webrtc::Call* call,
|
||||
const MediaConfig& config,
|
||||
@ -168,8 +167,6 @@ class WebRtcVideoChannel2 : public VideoMediaChannel,
|
||||
void OnReadyToSend(bool ready) override;
|
||||
void SetInterface(NetworkInterface* iface) override;
|
||||
|
||||
void OnLoadUpdate(Load load) override;
|
||||
|
||||
// Implemented for VideoMediaChannelTest.
|
||||
bool sending() const { return sending_; }
|
||||
uint32_t GetDefaultSendChannelSsrc() { return default_send_ssrc_; }
|
||||
@ -230,13 +227,15 @@ class WebRtcVideoChannel2 : public VideoMediaChannel,
|
||||
// Wrapper for the sender part, this is where the capturer is connected and
|
||||
// frames are then converted from cricket frames to webrtc frames.
|
||||
class WebRtcVideoSendStream
|
||||
: public rtc::VideoSinkInterface<cricket::VideoFrame> {
|
||||
: public rtc::VideoSinkInterface<cricket::VideoFrame>,
|
||||
public webrtc::LoadObserver {
|
||||
public:
|
||||
WebRtcVideoSendStream(
|
||||
webrtc::Call* call,
|
||||
const StreamParams& sp,
|
||||
const webrtc::VideoSendStream::Config& config,
|
||||
WebRtcVideoEncoderFactory* external_encoder_factory,
|
||||
bool enable_cpu_overuse_detection,
|
||||
int max_bitrate_bps,
|
||||
const rtc::Optional<VideoCodecSettings>& codec_settings,
|
||||
const std::vector<webrtc::RtpExtension>& rtp_extensions,
|
||||
@ -255,6 +254,9 @@ class WebRtcVideoChannel2 : public VideoMediaChannel,
|
||||
void Start();
|
||||
void Stop();
|
||||
|
||||
// Implements webrtc::LoadObserver.
|
||||
void OnLoadUpdate(Load load) override;
|
||||
|
||||
const std::vector<uint32_t>& GetSsrcs() const;
|
||||
VideoSenderInfo GetVideoSenderInfo();
|
||||
void FillBandwidthEstimationInfo(BandwidthEstimationInfo* bwe_info);
|
||||
@ -341,10 +343,20 @@ class WebRtcVideoChannel2 : public VideoMediaChannel,
|
||||
void SetDimensions(int width, int height, bool is_screencast)
|
||||
EXCLUSIVE_LOCKS_REQUIRED(lock_);
|
||||
|
||||
rtc::ThreadChecker thread_checker_;
|
||||
rtc::AsyncInvoker invoker_;
|
||||
rtc::Thread* worker_thread_;
|
||||
const std::vector<uint32_t> ssrcs_;
|
||||
const std::vector<SsrcGroup> ssrc_groups_;
|
||||
webrtc::Call* const call_;
|
||||
rtc::VideoSinkWants sink_wants_;
|
||||
// Counter used for deciding if the video resolution is currently
|
||||
// restricted by CPU usage. It is reset if |capturer_| is changed.
|
||||
int cpu_restricted_counter_;
|
||||
// Total number of times resolution as been requested to be changed due to
|
||||
// CPU adaptation.
|
||||
int number_of_cpu_adapt_changes_;
|
||||
VideoCapturer* capturer_;
|
||||
WebRtcVideoEncoderFactory* const external_encoder_factory_
|
||||
GUARDED_BY(lock_);
|
||||
|
||||
@ -358,10 +370,9 @@ class WebRtcVideoChannel2 : public VideoMediaChannel,
|
||||
webrtc::VideoRotation last_rotation_ GUARDED_BY(lock_) =
|
||||
webrtc::kVideoRotation_0;
|
||||
|
||||
VideoCapturer* capturer_ GUARDED_BY(lock_);
|
||||
bool capturer_is_screencast_ GUARDED_BY(lock_);
|
||||
bool sending_ GUARDED_BY(lock_);
|
||||
bool muted_ GUARDED_BY(lock_);
|
||||
int old_adapt_changes_ GUARDED_BY(lock_);
|
||||
|
||||
// The timestamp of the first frame received
|
||||
// Used to generate the timestamps of subsequent frames
|
||||
@ -491,12 +502,6 @@ class WebRtcVideoChannel2 : public VideoMediaChannel,
|
||||
const bool signal_cpu_adaptation_;
|
||||
const bool disable_prerenderer_smoothing_;
|
||||
|
||||
// Separate list of set capturers used to signal CPU adaptation. These should
|
||||
// not be locked while calling methods that take other locks to prevent
|
||||
// lock-order inversions.
|
||||
rtc::CriticalSection capturer_crit_;
|
||||
std::map<uint32_t, VideoCapturer*> capturers_ GUARDED_BY(capturer_crit_);
|
||||
|
||||
rtc::CriticalSection stream_crit_;
|
||||
// Using primary-ssrc (first ssrc) as key.
|
||||
std::map<uint32_t, WebRtcVideoSendStream*> send_streams_
|
||||
|
||||
@ -1491,10 +1491,13 @@ TEST_F(WebRtcVideoChannel2Test, UsesCorrectSettingsForScreencast) {
|
||||
EXPECT_EQ(0, encoder_config.min_transmit_bitrate_bps)
|
||||
<< "Non-screenshare shouldn't use min-transmit bitrate.";
|
||||
|
||||
capturer.SetScreencast(true);
|
||||
EXPECT_TRUE(capturer.CaptureFrame());
|
||||
|
||||
EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, nullptr));
|
||||
// Removing a capturer triggers a black frame to be sent.
|
||||
EXPECT_EQ(2, send_stream->GetNumberOfSwappedFrames());
|
||||
capturer.SetScreencast(true);
|
||||
EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, &capturer));
|
||||
EXPECT_TRUE(capturer.CaptureFrame());
|
||||
EXPECT_EQ(3, send_stream->GetNumberOfSwappedFrames());
|
||||
|
||||
// Verify screencast settings.
|
||||
encoder_config = send_stream->GetEncoderConfig();
|
||||
@ -1625,7 +1628,9 @@ TEST_F(WebRtcVideoChannel2Test, VerifyVp8SpecificSettings) {
|
||||
EXPECT_TRUE(vp8_settings.frameDroppingOn);
|
||||
|
||||
// In screen-share mode, denoising is forced off and simulcast disabled.
|
||||
EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, NULL));
|
||||
capturer.SetScreencast(true);
|
||||
EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, &capturer));
|
||||
EXPECT_TRUE(capturer.CaptureFrame());
|
||||
stream = SetDenoisingOption(parameters, false);
|
||||
|
||||
@ -1704,7 +1709,10 @@ TEST_F(Vp9SettingsTest, VerifyVp9SpecificSettings) {
|
||||
EXPECT_TRUE(vp9_settings.frameDroppingOn);
|
||||
|
||||
// In screen-share mode, denoising is forced off.
|
||||
EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, nullptr));
|
||||
capturer.SetScreencast(true);
|
||||
EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, &capturer));
|
||||
|
||||
EXPECT_TRUE(capturer.CaptureFrame());
|
||||
stream = SetDenoisingOption(parameters, false);
|
||||
|
||||
@ -1734,6 +1742,73 @@ TEST_F(WebRtcVideoChannel2Test, DoesNotAdaptOnOveruseWhenScreensharing) {
|
||||
TestCpuAdaptation(true, true);
|
||||
}
|
||||
|
||||
TEST_F(WebRtcVideoChannel2Test, AdaptsOnOveruseAndChangeResolution) {
|
||||
cricket::VideoCodec codec = kVp8Codec720p;
|
||||
cricket::VideoSendParameters parameters;
|
||||
parameters.codecs.push_back(codec);
|
||||
|
||||
MediaConfig media_config = MediaConfig();
|
||||
channel_.reset(
|
||||
engine_.CreateChannel(fake_call_.get(), media_config, VideoOptions()));
|
||||
ASSERT_TRUE(channel_->SetSendParameters(parameters));
|
||||
|
||||
AddSendStream();
|
||||
|
||||
cricket::FakeVideoCapturer capturer;
|
||||
capturer.SetScreencast(false);
|
||||
ASSERT_TRUE(channel_->SetCapturer(last_ssrc_, &capturer));
|
||||
ASSERT_EQ(cricket::CS_RUNNING,
|
||||
capturer.Start(capturer.GetSupportedFormats()->front()));
|
||||
ASSERT_TRUE(channel_->SetSend(true));
|
||||
|
||||
ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size());
|
||||
FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front();
|
||||
webrtc::LoadObserver* overuse_callback =
|
||||
send_stream->GetConfig().overuse_callback;
|
||||
ASSERT_TRUE(overuse_callback != NULL);
|
||||
|
||||
EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
|
||||
EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
|
||||
EXPECT_EQ(1280, send_stream->GetLastWidth());
|
||||
EXPECT_EQ(720, send_stream->GetLastHeight());
|
||||
|
||||
// Trigger overuse.
|
||||
overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kOveruse);
|
||||
EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
|
||||
EXPECT_EQ(2, send_stream->GetNumberOfSwappedFrames());
|
||||
EXPECT_EQ(1280 * 3 / 4, send_stream->GetLastWidth());
|
||||
EXPECT_EQ(720 * 3 / 4, send_stream->GetLastHeight());
|
||||
|
||||
// Trigger overuse again.
|
||||
overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kOveruse);
|
||||
EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
|
||||
EXPECT_EQ(3, send_stream->GetNumberOfSwappedFrames());
|
||||
EXPECT_EQ(1280 * 2 / 4, send_stream->GetLastWidth());
|
||||
EXPECT_EQ(720 * 2 / 4, send_stream->GetLastHeight());
|
||||
|
||||
// Change input resolution.
|
||||
EXPECT_TRUE(capturer.CaptureCustomFrame(1284, 724, cricket::FOURCC_I420));
|
||||
EXPECT_EQ(4, send_stream->GetNumberOfSwappedFrames());
|
||||
EXPECT_EQ(1284 / 2, send_stream->GetLastWidth());
|
||||
EXPECT_EQ(724 / 2, send_stream->GetLastHeight());
|
||||
|
||||
// Trigger underuse which should go back up in resolution.
|
||||
overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kUnderuse);
|
||||
EXPECT_TRUE(capturer.CaptureCustomFrame(1284, 724, cricket::FOURCC_I420));
|
||||
EXPECT_EQ(5, send_stream->GetNumberOfSwappedFrames());
|
||||
EXPECT_EQ(1284 * 3 / 4, send_stream->GetLastWidth());
|
||||
EXPECT_EQ(724 * 3 / 4, send_stream->GetLastHeight());
|
||||
|
||||
// Trigger underuse which should go back up in resolution.
|
||||
overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kUnderuse);
|
||||
EXPECT_TRUE(capturer.CaptureCustomFrame(1284, 724, cricket::FOURCC_I420));
|
||||
EXPECT_EQ(6, send_stream->GetNumberOfSwappedFrames());
|
||||
EXPECT_EQ(1284, send_stream->GetLastWidth());
|
||||
EXPECT_EQ(724, send_stream->GetLastHeight());
|
||||
|
||||
EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, NULL));
|
||||
}
|
||||
|
||||
void WebRtcVideoChannel2Test::TestCpuAdaptation(bool enable_overuse,
|
||||
bool is_screenshare) {
|
||||
cricket::VideoCodec codec = kVp8Codec720p;
|
||||
@ -1764,25 +1839,41 @@ void WebRtcVideoChannel2Test::TestCpuAdaptation(bool enable_overuse,
|
||||
FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front();
|
||||
webrtc::LoadObserver* overuse_callback =
|
||||
send_stream->GetConfig().overuse_callback;
|
||||
|
||||
if (!enable_overuse) {
|
||||
ASSERT_TRUE(overuse_callback == NULL);
|
||||
|
||||
EXPECT_TRUE(capturer.CaptureFrame());
|
||||
EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
|
||||
|
||||
EXPECT_EQ(codec.width, send_stream->GetLastWidth());
|
||||
EXPECT_EQ(codec.height, send_stream->GetLastHeight());
|
||||
|
||||
EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, NULL));
|
||||
return;
|
||||
}
|
||||
|
||||
ASSERT_TRUE(overuse_callback != NULL);
|
||||
EXPECT_TRUE(capturer.CaptureFrame());
|
||||
EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
|
||||
overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kOveruse);
|
||||
|
||||
EXPECT_TRUE(capturer.CaptureFrame());
|
||||
EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
|
||||
EXPECT_EQ(2, send_stream->GetNumberOfSwappedFrames());
|
||||
|
||||
if (enable_overuse && !is_screenshare) {
|
||||
EXPECT_LT(send_stream->GetLastWidth(), codec.width);
|
||||
EXPECT_LT(send_stream->GetLastHeight(), codec.height);
|
||||
} else {
|
||||
if (is_screenshare) {
|
||||
// Do not adapt screen share.
|
||||
EXPECT_EQ(codec.width, send_stream->GetLastWidth());
|
||||
EXPECT_EQ(codec.height, send_stream->GetLastHeight());
|
||||
} else {
|
||||
EXPECT_LT(send_stream->GetLastWidth(), codec.width);
|
||||
EXPECT_LT(send_stream->GetLastHeight(), codec.height);
|
||||
}
|
||||
|
||||
// Trigger underuse which should go back to normal resolution.
|
||||
overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kUnderuse);
|
||||
EXPECT_TRUE(capturer.CaptureFrame());
|
||||
|
||||
EXPECT_EQ(2, send_stream->GetNumberOfSwappedFrames());
|
||||
EXPECT_EQ(3, send_stream->GetNumberOfSwappedFrames());
|
||||
|
||||
EXPECT_EQ(codec.width, send_stream->GetLastWidth());
|
||||
EXPECT_EQ(codec.height, send_stream->GetLastHeight());
|
||||
|
||||
@ -85,6 +85,7 @@
|
||||
'base/streamparams_unittest.cc',
|
||||
'base/turnutils_unittest.cc',
|
||||
'base/videoadapter_unittest.cc',
|
||||
'base/videobroadcaster_unittest.cc',
|
||||
'base/videocapturer_unittest.cc',
|
||||
'base/videocommon_unittest.cc',
|
||||
'base/videoengine_unittest.h',
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user