Reland of Add framerate to VideoSinkWants and ability to signal on overuse (patchset #1 id:1 of https://codereview.webrtc.org/2783183003/ )

Reason for revert:
Seem to be a flaky test rather than an issue with this cl. Creating reland, will add code to reduce flakiness to that test.

Original issue's description:
> Revert of Add framerate to VideoSinkWants and ability to signal on overuse (patchset #8 id:410001 of https://codereview.webrtc.org/2781433002/ )
>
> Reason for revert:
> This has resulted in failure of CallPerfTest.ReceivesCpuOveruseAndUnderuse test on the Win7 build bot https://build.chromium.org/p/client.webrtc.perf/builders/Win7/builds/1780
>
> Original issue's description:
> > Reland of Add framerate to VideoSinkWants and ability to signal on overuse (patchset #1 id:1 of https://codereview.webrtc.org/2764133002/ )
> >
> > Reason for revert:
> > Found issue with test case, will add fix to reland cl.
> >
> > Original issue's description:
> > > Revert of Add framerate to VideoSinkWants and ability to signal on overuse (patchset #14 id:250001 of https://codereview.webrtc.org/2716643002/ )
> > >
> > > Reason for revert:
> > > Breaks perf tests:
> > > https://build.chromium.org/p/client.webrtc.perf/builders/Win7/builds/1679
> > > https://build.chromium.org/p/client.webrtc.perf/builders/Android32%20Tests%20%28L%20Nexus5%29/builds/2325
> > >
> > > Original issue's description:
> > > > Add framerate to VideoSinkWants and ability to signal on overuse
> > > >
> > > > In ViEEncoder, try to reduce framerate instead of resolution if the
> > > > current degradation preference is maintain-resolution rather than
> > > > balanced.
> > > >
> > > > BUG=webrtc:4172
> > > >
> > > > Review-Url: https://codereview.webrtc.org/2716643002
> > > > Cr-Commit-Position: refs/heads/master@{#17327}
> > > > Committed: 72acf25261
> > >
> > > TBR=nisse@webrtc.org,magjed@webrtc.org,kthelgason@webrtc.org,ilnik@webrtc.org,stefan@webrtc.org,sprang@webrtc.org
> > > # Skipping CQ checks because original CL landed less than 1 days ago.
> > > NOPRESUBMIT=true
> > > NOTREECHECKS=true
> > > NOTRY=true
> > > BUG=webrtc:4172
> > >
> > > Review-Url: https://codereview.webrtc.org/2764133002
> > > Cr-Commit-Position: refs/heads/master@{#17331}
> > > Committed: 8b45b11144
> >
> > TBR=nisse@webrtc.org,magjed@webrtc.org,kthelgason@webrtc.org,ilnik@webrtc.org,stefan@webrtc.org,skvlad@webrtc.org
> > # Not skipping CQ checks because original CL landed more than 1 days ago.
> > BUG=webrtc:4172
> >
> > Review-Url: https://codereview.webrtc.org/2781433002
> > Cr-Commit-Position: refs/heads/master@{#17474}
> > Committed: 3ea3c77e93
>
> TBR=ilnik@webrtc.org,stefan@webrtc.org,asapersson@webrtc.org,sprang@webrtc.org
> # Skipping CQ checks because original CL landed less than 1 days ago.
> NOPRESUBMIT=true
> NOTREECHECKS=true
> NOTRY=true
> BUG=webrtc:4172
>
> Review-Url: https://codereview.webrtc.org/2783183003
> Cr-Commit-Position: refs/heads/master@{#17477}
> Committed: f9ed235c9b

R=ilnik@webrtc.org,stefan@webrtc.org
BUG=webrtc:4172

Review-Url: https://codereview.webrtc.org/2789823002
Cr-Commit-Position: refs/heads/master@{#17498}
This commit is contained in:
sprang 2017-04-02 23:53:04 -07:00 committed by Commit bot
parent 76d9c9c382
commit c5d62e29ca
37 changed files with 1388 additions and 403 deletions

View File

@ -123,6 +123,7 @@ if (rtc_include_tests) {
"../video",
"../voice_engine",
"//testing/gtest",
"//webrtc/test:field_trial",
"//webrtc/test:test_common",
]
if (!build_with_chromium && is_clang) {

View File

@ -172,7 +172,7 @@ class BitrateEstimatorTest : public test::CallTest {
Clock::GetRealTimeClock()));
send_stream_->SetSource(
frame_generator_capturer_.get(),
VideoSendStream::DegradationPreference::kBalanced);
VideoSendStream::DegradationPreference::kMaintainFramerate);
send_stream_->Start();
frame_generator_capturer_->Start();

View File

@ -30,6 +30,7 @@
#include "webrtc/test/fake_audio_device.h"
#include "webrtc/test/fake_decoder.h"
#include "webrtc/test/fake_encoder.h"
#include "webrtc/test/field_trial.h"
#include "webrtc/test/frame_generator.h"
#include "webrtc/test/frame_generator_capturer.h"
#include "webrtc/test/gtest.h"
@ -477,13 +478,15 @@ TEST_F(CallPerfTest, CaptureNtpTimeWithNetworkJitter) {
}
TEST_F(CallPerfTest, ReceivesCpuOveruseAndUnderuse) {
// Minimal normal usage at the start, then 30s overuse to allow filter to
// settle, and then 80s underuse to allow plenty of time for rampup again.
test::ScopedFieldTrials fake_overuse_settings(
"WebRTC-ForceSimulatedOveruseIntervalMs/1-30000-80000/");
class LoadObserver : public test::SendTest,
public test::FrameGeneratorCapturer::SinkWantsObserver {
public:
LoadObserver()
: SendTest(kLongTimeoutMs),
expect_lower_resolution_wants_(true),
encoder_(Clock::GetRealTimeClock(), 60 /* delay_ms */) {}
LoadObserver() : SendTest(kLongTimeoutMs), test_phase_(TestPhase::kStart) {}
void OnFrameGeneratorCapturerCreated(
test::FrameGeneratorCapturer* frame_generator_capturer) override {
@ -494,24 +497,43 @@ TEST_F(CallPerfTest, ReceivesCpuOveruseAndUnderuse) {
// OnSinkWantsChanged is called when FrameGeneratorCapturer::AddOrUpdateSink
// is called.
// TODO(sprang): Add integration test for maintain-framerate mode?
void OnSinkWantsChanged(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override {
// First expect CPU overuse. Then expect CPU underuse when the encoder
// delay has been decreased.
if (wants.target_pixel_count &&
*wants.target_pixel_count <
wants.max_pixel_count.value_or(std::numeric_limits<int>::max())) {
// On adapting up, ViEEncoder::VideoSourceProxy will set the target
// pixel count to a step up from the current and the max value to
// something higher than the target.
EXPECT_FALSE(expect_lower_resolution_wants_);
observation_complete_.Set();
} else if (wants.max_pixel_count) {
// On adapting down, ViEEncoder::VideoSourceProxy will set only the max
// pixel count, leaving the target unset.
EXPECT_TRUE(expect_lower_resolution_wants_);
expect_lower_resolution_wants_ = false;
encoder_.SetDelay(2);
switch (test_phase_) {
case TestPhase::kStart:
if (wants.max_pixel_count < std::numeric_limits<int>::max()) {
// On adapting down, ViEEncoder::VideoSourceProxy will set only the
// max pixel count, leaving the target unset.
test_phase_ = TestPhase::kAdaptedDown;
} else {
ADD_FAILURE() << "Got unexpected adaptation request, max res = "
<< wants.max_pixel_count << ", target res = "
<< wants.target_pixel_count.value_or(-1)
<< ", max fps = " << wants.max_framerate_fps;
}
break;
case TestPhase::kAdaptedDown:
// On adapting up, the adaptation counter will again be at zero, and
// so all constraints will be reset.
if (wants.max_pixel_count == std::numeric_limits<int>::max() &&
!wants.target_pixel_count) {
test_phase_ = TestPhase::kAdaptedUp;
observation_complete_.Set();
} else {
ADD_FAILURE() << "Got unexpected adaptation request, max res = "
<< wants.max_pixel_count << ", target res = "
<< wants.target_pixel_count.value_or(-1)
<< ", max fps = " << wants.max_framerate_fps;
}
break;
case TestPhase::kAdaptedUp:
ADD_FAILURE() << "Got unexpected adaptation request, max res = "
<< wants.max_pixel_count << ", target res = "
<< wants.target_pixel_count.value_or(-1)
<< ", max fps = " << wants.max_framerate_fps;
}
}
@ -519,15 +541,13 @@ TEST_F(CallPerfTest, ReceivesCpuOveruseAndUnderuse) {
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = &encoder_;
}
void PerformTest() override {
EXPECT_TRUE(Wait()) << "Timed out before receiving an overuse callback.";
}
bool expect_lower_resolution_wants_;
test::DelayedEncoder encoder_;
enum class TestPhase { kStart, kAdaptedDown, kAdaptedUp } test_phase_;
} test;
RunBaseTest(&test);

View File

@ -81,8 +81,8 @@ bool AdaptedVideoTrackSource::apply_rotation() {
void AdaptedVideoTrackSource::OnSinkWantsChanged(
const rtc::VideoSinkWants& wants) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
video_adapter_.OnResolutionRequest(wants.target_pixel_count,
wants.max_pixel_count);
video_adapter_.OnResolutionFramerateRequest(
wants.target_pixel_count, wants.max_pixel_count, wants.max_framerate_fps);
}
bool AdaptedVideoTrackSource::AdaptFrame(int width,

View File

@ -66,8 +66,9 @@ class FakeVideoCapturer : public cricket::VideoCapturer {
GetCaptureFormat()->fourcc);
}
bool CaptureCustomFrame(int width, int height, uint32_t fourcc) {
// default to 30fps
return CaptureCustomFrame(width, height, 33333333, fourcc);
// Default to 30fps.
return CaptureCustomFrame(width, height, rtc::kNumNanosecsPerSec / 30,
fourcc);
}
bool CaptureCustomFrame(int width,
int height,
@ -92,8 +93,11 @@ class FakeVideoCapturer : public cricket::VideoCapturer {
// AdaptFrame, and the test case
// VideoCapturerTest.SinkWantsMaxPixelAndMaxPixelCountStepUp
// depends on this.
if (AdaptFrame(width, height, 0, 0, &adapted_width, &adapted_height,
&crop_width, &crop_height, &crop_x, &crop_y, nullptr)) {
if (AdaptFrame(width, height,
next_timestamp_ / rtc::kNumNanosecsPerMicrosec,
next_timestamp_ / rtc::kNumNanosecsPerMicrosec,
&adapted_width, &adapted_height, &crop_width, &crop_height,
&crop_x, &crop_y, nullptr)) {
rtc::scoped_refptr<webrtc::I420Buffer> buffer(
webrtc::I420Buffer::Create(adapted_width, adapted_height));
buffer->InitializeData();

View File

@ -12,6 +12,7 @@
#define WEBRTC_MEDIA_BASE_FAKEVIDEORENDERER_H_
#include "webrtc/api/video/video_frame.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/logging.h"
#include "webrtc/media/base/videosinkinterface.h"

View File

@ -106,7 +106,8 @@ VideoAdapter::VideoAdapter(int required_resolution_alignment)
previous_height_(0),
required_resolution_alignment_(required_resolution_alignment),
resolution_request_target_pixel_count_(std::numeric_limits<int>::max()),
resolution_request_max_pixel_count_(std::numeric_limits<int>::max()) {}
resolution_request_max_pixel_count_(std::numeric_limits<int>::max()),
max_framerate_request_(std::numeric_limits<int>::max()) {}
VideoAdapter::VideoAdapter() : VideoAdapter(1) {}
@ -114,21 +115,34 @@ VideoAdapter::~VideoAdapter() {}
bool VideoAdapter::KeepFrame(int64_t in_timestamp_ns) {
rtc::CritScope cs(&critical_section_);
if (!requested_format_ || requested_format_->interval == 0)
if (max_framerate_request_ <= 0)
return false;
int64_t frame_interval_ns =
requested_format_ ? requested_format_->interval : 0;
// If |max_framerate_request_| is not set, it will default to maxint, which
// will lead to a frame_interval_ns rounded to 0.
frame_interval_ns = std::max<int64_t>(
frame_interval_ns, rtc::kNumNanosecsPerSec / max_framerate_request_);
if (frame_interval_ns <= 0) {
// Frame rate throttling not enabled.
return true;
}
if (next_frame_timestamp_ns_) {
// Time until next frame should be outputted.
const int64_t time_until_next_frame_ns =
(*next_frame_timestamp_ns_ - in_timestamp_ns);
// Continue if timestamp is withing expected range.
if (std::abs(time_until_next_frame_ns) < 2 * requested_format_->interval) {
// Continue if timestamp is within expected range.
if (std::abs(time_until_next_frame_ns) < 2 * frame_interval_ns) {
// Drop if a frame shouldn't be outputted yet.
if (time_until_next_frame_ns > 0)
return false;
// Time to output new frame.
*next_frame_timestamp_ns_ += requested_format_->interval;
*next_frame_timestamp_ns_ += frame_interval_ns;
return true;
}
}
@ -137,7 +151,7 @@ bool VideoAdapter::KeepFrame(int64_t in_timestamp_ns) {
// reset. Set first timestamp target to just half the interval to prefer
// keeping frames in case of jitter.
next_frame_timestamp_ns_ =
rtc::Optional<int64_t>(in_timestamp_ns + requested_format_->interval / 2);
rtc::Optional<int64_t>(in_timestamp_ns + frame_interval_ns / 2);
return true;
}
@ -249,14 +263,15 @@ void VideoAdapter::OnOutputFormatRequest(const VideoFormat& format) {
next_frame_timestamp_ns_ = rtc::Optional<int64_t>();
}
void VideoAdapter::OnResolutionRequest(
void VideoAdapter::OnResolutionFramerateRequest(
const rtc::Optional<int>& target_pixel_count,
const rtc::Optional<int>& max_pixel_count) {
int max_pixel_count,
int max_framerate_fps) {
rtc::CritScope cs(&critical_section_);
resolution_request_max_pixel_count_ =
max_pixel_count.value_or(std::numeric_limits<int>::max());
resolution_request_max_pixel_count_ = max_pixel_count;
resolution_request_target_pixel_count_ =
target_pixel_count.value_or(resolution_request_max_pixel_count_);
max_framerate_request_ = max_framerate_fps;
}
} // namespace cricket

View File

@ -25,7 +25,9 @@ namespace cricket {
class VideoAdapter {
public:
VideoAdapter();
VideoAdapter(int required_resolution_alignment);
// The output frames will have height and width that is divisible by
// |required_resolution_alignment|.
explicit VideoAdapter(int required_resolution_alignment);
virtual ~VideoAdapter();
// Return the adapted resolution and cropping parameters given the
@ -49,12 +51,16 @@ class VideoAdapter {
void OnOutputFormatRequest(const VideoFormat& format);
// Requests the output frame size from |AdaptFrameResolution| to have as close
// as possible to |target_pixel_count|, but no more than |max_pixel_count|
// pixels. If |target_pixel_count| is not set, treat it as being equal to
// |max_pixel_count|. If |max_pixel_count| is not set, treat is as being the
// highest resolution available.
void OnResolutionRequest(const rtc::Optional<int>& target_pixel_count,
const rtc::Optional<int>& max_pixel_count);
// as possible to |target_pixel_count| pixels (if set) but no more than
// |max_pixel_count|.
// |max_framerate_fps| is essentially analogous to |max_pixel_count|, but for
// framerate rather than resolution.
// Set |max_pixel_count| and/or |max_framerate_fps| to
// std::numeric_limit<int>::max() if no upper limit is desired.
void OnResolutionFramerateRequest(
const rtc::Optional<int>& target_pixel_count,
int max_pixel_count,
int max_framerate_fps);
private:
// Determine if frame should be dropped based on input fps and requested fps.
@ -77,6 +83,7 @@ class VideoAdapter {
rtc::Optional<VideoFormat> requested_format_ GUARDED_BY(critical_section_);
int resolution_request_target_pixel_count_ GUARDED_BY(critical_section_);
int resolution_request_max_pixel_count_ GUARDED_BY(critical_section_);
int max_framerate_request_ GUARDED_BY(critical_section_);
// The critical section to protect the above variables.
rtc::CriticalSection critical_section_;

View File

@ -22,13 +22,16 @@
#include "webrtc/media/base/videoadapter.h"
namespace cricket {
namespace {
const int kDefaultFps = 30;
} // namespace
class VideoAdapterTest : public testing::Test {
public:
virtual void SetUp() {
capturer_.reset(new FakeVideoCapturer);
capture_format_ = capturer_->GetSupportedFormats()->at(0);
capture_format_.interval = VideoFormat::FpsToInterval(30);
capture_format_.interval = VideoFormat::FpsToInterval(kDefaultFps);
listener_.reset(new VideoCapturerListener(&adapter_));
capturer_->AddOrUpdateSink(listener_.get(), rtc::VideoSinkWants());
@ -290,7 +293,7 @@ TEST_F(VideoAdapterTest, AdaptFramerateHighLimit) {
// the adapter is conservative and resets to the new offset and does not drop
// any frame.
TEST_F(VideoAdapterTest, AdaptFramerateTimestampOffset) {
const int64_t capture_interval = VideoFormat::FpsToInterval(30);
const int64_t capture_interval = VideoFormat::FpsToInterval(kDefaultFps);
adapter_.OnOutputFormatRequest(
VideoFormat(640, 480, capture_interval, cricket::FOURCC_ANY));
@ -319,7 +322,7 @@ TEST_F(VideoAdapterTest, AdaptFramerateTimestampOffset) {
// Request 30 fps and send 30 fps with jitter. Expect that no frame is dropped.
TEST_F(VideoAdapterTest, AdaptFramerateTimestampJitter) {
const int64_t capture_interval = VideoFormat::FpsToInterval(30);
const int64_t capture_interval = VideoFormat::FpsToInterval(kDefaultFps);
adapter_.OnOutputFormatRequest(
VideoFormat(640, 480, capture_interval, cricket::FOURCC_ANY));
@ -384,6 +387,56 @@ TEST_F(VideoAdapterTest, AdaptFramerateOntheFly) {
EXPECT_GT(listener_->GetStats().dropped_frames, 0);
}
// Do not adapt the frame rate or the resolution. Expect no frame drop, no
// cropping, and no resolution change.
TEST_F(VideoAdapterTest, OnFramerateRequestMax) {
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(),
std::numeric_limits<int>::max(),
std::numeric_limits<int>::max());
EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
for (int i = 0; i < 10; ++i)
capturer_->CaptureFrame();
// Verify no frame drop and no resolution change.
VideoCapturerListener::Stats stats = listener_->GetStats();
EXPECT_GE(stats.captured_frames, 10);
EXPECT_EQ(0, stats.dropped_frames);
VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
capture_format_.width, capture_format_.height);
EXPECT_TRUE(stats.last_adapt_was_no_op);
}
TEST_F(VideoAdapterTest, OnFramerateRequestZero) {
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(),
std::numeric_limits<int>::max(), 0);
EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
for (int i = 0; i < 10; ++i)
capturer_->CaptureFrame();
// Verify no crash and that frames aren't dropped.
VideoCapturerListener::Stats stats = listener_->GetStats();
EXPECT_GE(stats.captured_frames, 10);
EXPECT_EQ(10, stats.dropped_frames);
}
// Adapt the frame rate to be half of the capture rate at the beginning. Expect
// the number of dropped frames to be half of the number the captured frames.
TEST_F(VideoAdapterTest, OnFramerateRequestHalf) {
adapter_.OnResolutionFramerateRequest(
rtc::Optional<int>(), std::numeric_limits<int>::max(), kDefaultFps / 2);
EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
for (int i = 0; i < 10; ++i)
capturer_->CaptureFrame();
// Verify no crash and that frames aren't dropped.
VideoCapturerListener::Stats stats = listener_->GetStats();
EXPECT_GE(stats.captured_frames, 10);
EXPECT_EQ(5, stats.dropped_frames);
VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
capture_format_.width, capture_format_.height);
}
// Set a very high output pixel resolution. Expect no cropping or resolution
// change.
TEST_F(VideoAdapterTest, AdaptFrameResolutionHighLimit) {
@ -696,8 +749,8 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestInSmallSteps) {
EXPECT_EQ(720, out_height_);
// Adapt down one step.
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(1280 * 720 - 1));
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(), 1280 * 720 - 1,
std::numeric_limits<int>::max());
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -707,8 +760,8 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestInSmallSteps) {
EXPECT_EQ(540, out_height_);
// Adapt down one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(960 * 540 - 1));
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(), 960 * 540 - 1,
std::numeric_limits<int>::max());
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -718,8 +771,8 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestInSmallSteps) {
EXPECT_EQ(360, out_height_);
// Adapt down one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(640 * 360 - 1));
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(), 640 * 360 - 1,
std::numeric_limits<int>::max());
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -729,8 +782,9 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestInSmallSteps) {
EXPECT_EQ(270, out_height_);
// Adapt up one step.
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360),
rtc::Optional<int>(960 * 540));
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(640 * 360),
960 * 540,
std::numeric_limits<int>::max());
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -740,8 +794,9 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestInSmallSteps) {
EXPECT_EQ(360, out_height_);
// Adapt up one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(960 * 540),
rtc::Optional<int>(1280 * 720));
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(960 * 540),
1280 * 720,
std::numeric_limits<int>::max());
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -751,8 +806,9 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestInSmallSteps) {
EXPECT_EQ(540, out_height_);
// Adapt up one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(1280 * 720),
rtc::Optional<int>(1920 * 1080));
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(1280 * 720),
1920 * 1080,
std::numeric_limits<int>::max());
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -771,7 +827,8 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestMaxZero) {
EXPECT_EQ(1280, out_width_);
EXPECT_EQ(720, out_height_);
adapter_.OnResolutionRequest(rtc::Optional<int>(), rtc::Optional<int>(0));
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(), 0,
std::numeric_limits<int>::max());
EXPECT_FALSE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -779,8 +836,8 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestMaxZero) {
TEST_F(VideoAdapterTest, TestOnResolutionRequestInLargeSteps) {
// Large step down.
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(640 * 360 - 1));
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(), 640 * 360 - 1,
std::numeric_limits<int>::max());
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -790,8 +847,9 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestInLargeSteps) {
EXPECT_EQ(270, out_height_);
// Large step up.
adapter_.OnResolutionRequest(rtc::Optional<int>(1280 * 720),
rtc::Optional<int>(1920 * 1080));
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(1280 * 720),
1920 * 1080,
std::numeric_limits<int>::max());
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -802,8 +860,8 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestInLargeSteps) {
}
TEST_F(VideoAdapterTest, TestOnOutputFormatRequestCapsMaxResolution) {
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(640 * 360 - 1));
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(), 640 * 360 - 1,
std::numeric_limits<int>::max());
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -822,8 +880,8 @@ TEST_F(VideoAdapterTest, TestOnOutputFormatRequestCapsMaxResolution) {
EXPECT_EQ(480, out_width_);
EXPECT_EQ(270, out_height_);
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(960 * 720));
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(), 960 * 720,
std::numeric_limits<int>::max());
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -842,8 +900,8 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestReset) {
EXPECT_EQ(1280, out_width_);
EXPECT_EQ(720, out_height_);
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(640 * 360 - 1));
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(), 640 * 360 - 1,
std::numeric_limits<int>::max());
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -852,7 +910,9 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestReset) {
EXPECT_EQ(480, out_width_);
EXPECT_EQ(270, out_height_);
adapter_.OnResolutionRequest(rtc::Optional<int>(), rtc::Optional<int>());
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(),
std::numeric_limits<int>::max(),
std::numeric_limits<int>::max());
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -876,8 +936,8 @@ TEST_F(VideoAdapterTest, TestCroppingWithResolutionRequest) {
EXPECT_EQ(360, out_height_);
// Adapt down one step.
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(640 * 360 - 1));
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(), 640 * 360 - 1,
std::numeric_limits<int>::max());
// Expect cropping to 16:9 format and 3/4 scaling.
EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
&cropped_width_, &cropped_height_,
@ -888,8 +948,8 @@ TEST_F(VideoAdapterTest, TestCroppingWithResolutionRequest) {
EXPECT_EQ(270, out_height_);
// Adapt down one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(480 * 270 - 1));
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(), 480 * 270 - 1,
std::numeric_limits<int>::max());
// Expect cropping to 16:9 format and 1/2 scaling.
EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
&cropped_width_, &cropped_height_,
@ -900,8 +960,9 @@ TEST_F(VideoAdapterTest, TestCroppingWithResolutionRequest) {
EXPECT_EQ(180, out_height_);
// Adapt up one step.
adapter_.OnResolutionRequest(rtc::Optional<int>(480 * 270),
rtc::Optional<int>(640 * 360));
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(480 * 270),
640 * 360,
std::numeric_limits<int>::max());
// Expect cropping to 16:9 format and 3/4 scaling.
EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
&cropped_width_, &cropped_height_,
@ -912,8 +973,9 @@ TEST_F(VideoAdapterTest, TestCroppingWithResolutionRequest) {
EXPECT_EQ(270, out_height_);
// Adapt up one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360),
rtc::Optional<int>(960 * 540));
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(640 * 360),
960 * 540,
std::numeric_limits<int>::max());
// Expect cropping to 16:9 format and no scaling.
EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
&cropped_width_, &cropped_height_,
@ -924,8 +986,9 @@ TEST_F(VideoAdapterTest, TestCroppingWithResolutionRequest) {
EXPECT_EQ(360, out_height_);
// Try to adapt up one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(960 * 540),
rtc::Optional<int>(1280 * 720));
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(960 * 540),
1280 * 720,
std::numeric_limits<int>::max());
// Expect cropping to 16:9 format and no scaling.
EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
&cropped_width_, &cropped_height_,
@ -940,8 +1003,9 @@ TEST_F(VideoAdapterTest, TestCroppingOddResolution) {
// Ask for 640x360 (16:9 aspect), with 3/16 scaling.
adapter_.OnOutputFormatRequest(
VideoFormat(640, 360, 0, FOURCC_I420));
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(640 * 360 * 3 / 16 * 3 / 16));
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(),
640 * 360 * 3 / 16 * 3 / 16,
std::numeric_limits<int>::max());
// Send 640x480 (4:3 aspect).
EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
@ -961,8 +1025,9 @@ TEST_F(VideoAdapterTest, TestAdaptToVerySmallResolution) {
const int w = 1920;
const int h = 1080;
adapter_.OnOutputFormatRequest(VideoFormat(w, h, 0, FOURCC_I420));
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(w * h * 1 / 16 * 1 / 16));
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(),
w * h * 1 / 16 * 1 / 16,
std::numeric_limits<int>::max());
// Send 1920x1080 (16:9 aspect).
EXPECT_TRUE(adapter_.AdaptFrameResolution(
@ -976,8 +1041,9 @@ TEST_F(VideoAdapterTest, TestAdaptToVerySmallResolution) {
EXPECT_EQ(67, out_height_);
// Adapt back up one step to 3/32.
adapter_.OnResolutionRequest(rtc::Optional<int>(w * h * 3 / 32 * 3 / 32),
rtc::Optional<int>(w * h * 1 / 8 * 1 / 8));
adapter_.OnResolutionFramerateRequest(
rtc::Optional<int>(w * h * 3 / 32 * 3 / 32), w * h * 1 / 8 * 1 / 8,
std::numeric_limits<int>::max());
// Send 1920x1080 (16:9 aspect).
EXPECT_TRUE(adapter_.AdaptFrameResolution(
@ -997,8 +1063,9 @@ TEST_F(VideoAdapterTest, AdaptFrameResolutionDropWithResolutionRequest) {
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
adapter_.OnResolutionRequest(rtc::Optional<int>(960 * 540),
rtc::Optional<int>());
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(960 * 540),
std::numeric_limits<int>::max(),
std::numeric_limits<int>::max());
// Still expect all frames to be dropped
EXPECT_FALSE(adapter_.AdaptFrameResolution(
@ -1006,8 +1073,8 @@ TEST_F(VideoAdapterTest, AdaptFrameResolutionDropWithResolutionRequest) {
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(640 * 480 - 1));
adapter_.OnResolutionFramerateRequest(rtc::Optional<int>(), 640 * 480 - 1,
std::numeric_limits<int>::max());
// Still expect all frames to be dropped
EXPECT_FALSE(adapter_.AdaptFrameResolution(
@ -1019,8 +1086,9 @@ TEST_F(VideoAdapterTest, AdaptFrameResolutionDropWithResolutionRequest) {
// Test that we will adapt to max given a target pixel count close to max.
TEST_F(VideoAdapterTest, TestAdaptToMax) {
adapter_.OnOutputFormatRequest(VideoFormat(640, 360, 0, FOURCC_I420));
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360 - 1) /* target */,
rtc::Optional<int>());
adapter_.OnResolutionFramerateRequest(
rtc::Optional<int>(640 * 360 - 1) /* target */,
std::numeric_limits<int>::max(), std::numeric_limits<int>::max());
EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 360, 0, &cropped_width_,
&cropped_height_, &out_width_,
@ -1028,5 +1096,4 @@ TEST_F(VideoAdapterTest, TestAdaptToMax) {
EXPECT_EQ(640, out_width_);
EXPECT_EQ(360, out_height_);
}
} // namespace cricket

View File

@ -84,9 +84,7 @@ void VideoBroadcaster::UpdateWants() {
wants.rotation_applied = true;
}
// wants.max_pixel_count == MIN(sink.wants.max_pixel_count)
if (sink.wants.max_pixel_count &&
(!wants.max_pixel_count ||
(*sink.wants.max_pixel_count < *wants.max_pixel_count))) {
if (sink.wants.max_pixel_count < wants.max_pixel_count) {
wants.max_pixel_count = sink.wants.max_pixel_count;
}
// Select the minimum requested target_pixel_count, if any, of all sinks so
@ -98,11 +96,15 @@ void VideoBroadcaster::UpdateWants() {
(*sink.wants.target_pixel_count < *wants.target_pixel_count))) {
wants.target_pixel_count = sink.wants.target_pixel_count;
}
// Select the minimum for the requested max framerates.
if (sink.wants.max_framerate_fps < wants.max_framerate_fps) {
wants.max_framerate_fps = sink.wants.max_framerate_fps;
}
}
if (wants.max_pixel_count && wants.target_pixel_count &&
*wants.target_pixel_count >= *wants.max_pixel_count) {
wants.target_pixel_count = wants.max_pixel_count;
if (wants.target_pixel_count &&
*wants.target_pixel_count >= wants.max_pixel_count) {
wants.target_pixel_count.emplace(wants.max_pixel_count);
}
current_wants_ = wants;
}

View File

@ -87,23 +87,24 @@ TEST(VideoBroadcasterTest, AppliesRotationIfAnySinkWantsRotationApplied) {
TEST(VideoBroadcasterTest, AppliesMinOfSinkWantsMaxPixelCount) {
VideoBroadcaster broadcaster;
EXPECT_TRUE(!broadcaster.wants().max_pixel_count);
EXPECT_EQ(std::numeric_limits<int>::max(),
broadcaster.wants().max_pixel_count);
FakeVideoRenderer sink1;
VideoSinkWants wants1;
wants1.max_pixel_count = rtc::Optional<int>(1280 * 720);
wants1.max_pixel_count = 1280 * 720;
broadcaster.AddOrUpdateSink(&sink1, wants1);
EXPECT_EQ(1280 * 720, *broadcaster.wants().max_pixel_count);
EXPECT_EQ(1280 * 720, broadcaster.wants().max_pixel_count);
FakeVideoRenderer sink2;
VideoSinkWants wants2;
wants2.max_pixel_count = rtc::Optional<int>(640 * 360);
wants2.max_pixel_count = 640 * 360;
broadcaster.AddOrUpdateSink(&sink2, wants2);
EXPECT_EQ(640 * 360, *broadcaster.wants().max_pixel_count);
EXPECT_EQ(640 * 360, broadcaster.wants().max_pixel_count);
broadcaster.RemoveSink(&sink2);
EXPECT_EQ(1280 * 720, *broadcaster.wants().max_pixel_count);
EXPECT_EQ(1280 * 720, broadcaster.wants().max_pixel_count);
}
TEST(VideoBroadcasterTest, AppliesMinOfSinkWantsMaxAndTargetPixelCount) {
@ -127,6 +128,28 @@ TEST(VideoBroadcasterTest, AppliesMinOfSinkWantsMaxAndTargetPixelCount) {
EXPECT_EQ(1280 * 720, *broadcaster.wants().target_pixel_count);
}
TEST(VideoBroadcasterTest, AppliesMinOfSinkWantsMaxFramerate) {
VideoBroadcaster broadcaster;
EXPECT_EQ(std::numeric_limits<int>::max(),
broadcaster.wants().max_framerate_fps);
FakeVideoRenderer sink1;
VideoSinkWants wants1;
wants1.max_framerate_fps = 30;
broadcaster.AddOrUpdateSink(&sink1, wants1);
EXPECT_EQ(30, broadcaster.wants().max_framerate_fps);
FakeVideoRenderer sink2;
VideoSinkWants wants2;
wants2.max_framerate_fps = 15;
broadcaster.AddOrUpdateSink(&sink2, wants2);
EXPECT_EQ(15, broadcaster.wants().max_framerate_fps);
broadcaster.RemoveSink(&sink2);
EXPECT_EQ(30, broadcaster.wants().max_framerate_fps);
}
TEST(VideoBroadcasterTest, SinkWantsBlackFrames) {
VideoBroadcaster broadcaster;
EXPECT_TRUE(!broadcaster.wants().black_frames);

View File

@ -149,8 +149,9 @@ void VideoCapturer::OnSinkWantsChanged(const rtc::VideoSinkWants& wants) {
apply_rotation_ = wants.rotation_applied;
if (video_adapter()) {
video_adapter()->OnResolutionRequest(wants.target_pixel_count,
wants.max_pixel_count);
video_adapter()->OnResolutionFramerateRequest(wants.target_pixel_count,
wants.max_pixel_count,
wants.max_framerate_fps);
}
}

View File

@ -266,7 +266,7 @@ TEST_F(VideoCapturerTest, SinkWantsMaxPixelAndMaxPixelCountStepUp) {
// with less than or equal to |wants.max_pixel_count| depending on how the
// capturer can scale the input frame size.
rtc::VideoSinkWants wants;
wants.max_pixel_count = rtc::Optional<int>(1280 * 720 * 3 / 5);
wants.max_pixel_count = 1280 * 720 * 3 / 5;
capturer_->AddOrUpdateSink(&renderer_, wants);
EXPECT_TRUE(capturer_->CaptureFrame());
EXPECT_EQ(2, renderer_.num_rendered_frames());
@ -274,8 +274,7 @@ TEST_F(VideoCapturerTest, SinkWantsMaxPixelAndMaxPixelCountStepUp) {
EXPECT_EQ(540, renderer_.height());
// Request a lower resolution.
wants.max_pixel_count =
rtc::Optional<int>((renderer_.width() * renderer_.height() * 3) / 5);
wants.max_pixel_count = (renderer_.width() * renderer_.height() * 3) / 5;
capturer_->AddOrUpdateSink(&renderer_, wants);
EXPECT_TRUE(capturer_->CaptureFrame());
EXPECT_EQ(3, renderer_.num_rendered_frames());
@ -294,8 +293,8 @@ TEST_F(VideoCapturerTest, SinkWantsMaxPixelAndMaxPixelCountStepUp) {
EXPECT_EQ(360, renderer2.height());
// Request higher resolution.
wants.target_pixel_count.emplace((*wants.max_pixel_count * 5) / 3);
wants.max_pixel_count.emplace(*wants.max_pixel_count * 4);
wants.target_pixel_count.emplace((wants.max_pixel_count * 5) / 3);
wants.max_pixel_count = wants.max_pixel_count * 4;
capturer_->AddOrUpdateSink(&renderer_, wants);
EXPECT_TRUE(capturer_->CaptureFrame());
EXPECT_EQ(5, renderer_.num_rendered_frames());

View File

@ -27,13 +27,15 @@ struct VideoSinkWants {
bool black_frames = false;
// Tells the source the maximum number of pixels the sink wants.
rtc::Optional<int> max_pixel_count;
int max_pixel_count = std::numeric_limits<int>::max();
// Tells the source the desired number of pixels the sinks wants. This will
// typically be used when stepping the resolution up again when conditions
// have improved after an earlier downgrade. The source should select the
// closest resolution to this pixel count, but if max_pixel_count is set, it
// still sets the absolute upper bound.
rtc::Optional<int> target_pixel_count;
// Tells the source the maximum framerate the sink wants.
int max_framerate_fps = std::numeric_limits<int>::max();
};
template <typename VideoFrameT>

View File

@ -108,6 +108,7 @@ FakeVideoSendStream::FakeVideoSendStream(
config_(std::move(config)),
codec_settings_set_(false),
resolution_scaling_enabled_(false),
framerate_scaling_enabled_(false),
source_(nullptr),
num_swapped_frames_(0) {
RTC_DCHECK(config.encoder_settings.encoder != NULL);
@ -252,9 +253,24 @@ void FakeVideoSendStream::SetSource(
if (source_)
source_->RemoveSink(this);
source_ = source;
resolution_scaling_enabled_ =
degradation_preference !=
webrtc::VideoSendStream::DegradationPreference::kMaintainResolution;
switch (degradation_preference) {
case DegradationPreference::kMaintainFramerate:
resolution_scaling_enabled_ = true;
framerate_scaling_enabled_ = false;
break;
case DegradationPreference::kMaintainResolution:
resolution_scaling_enabled_ = false;
framerate_scaling_enabled_ = true;
break;
case DegradationPreference::kBalanced:
resolution_scaling_enabled_ = true;
framerate_scaling_enabled_ = true;
break;
case DegradationPreference::kDegradationDisabled:
resolution_scaling_enabled_ = false;
framerate_scaling_enabled_ = false;
break;
}
if (source)
source->AddOrUpdateSink(this, resolution_scaling_enabled_
? sink_wants_
@ -333,7 +349,9 @@ FakeCall::FakeCall(const webrtc::Call::Config& config)
audio_network_state_(webrtc::kNetworkUp),
video_network_state_(webrtc::kNetworkUp),
num_created_send_streams_(0),
num_created_receive_streams_(0) {}
num_created_receive_streams_(0),
audio_transport_overhead_(0),
video_transport_overhead_(0) {}
FakeCall::~FakeCall() {
EXPECT_EQ(0u, video_send_streams_.size());

View File

@ -138,6 +138,7 @@ class FakeVideoSendStream final
bool resolution_scaling_enabled() const {
return resolution_scaling_enabled_;
}
bool framerate_scaling_enabled() const { return framerate_scaling_enabled_; }
void InjectVideoSinkWants(const rtc::VideoSinkWants& wants);
rtc::VideoSourceInterface<webrtc::VideoFrame>* source() const {
@ -169,6 +170,7 @@ class FakeVideoSendStream final
webrtc::VideoCodecVP9 vp9;
} vpx_settings_;
bool resolution_scaling_enabled_;
bool framerate_scaling_enabled_;
rtc::VideoSourceInterface<webrtc::VideoFrame>* source_;
int num_swapped_frames_;
rtc::Optional<webrtc::VideoFrame> last_frame_;

View File

@ -38,9 +38,10 @@
#include "webrtc/video_decoder.h"
#include "webrtc/video_encoder.h"
using DegradationPreference = webrtc::VideoSendStream::DegradationPreference;
namespace cricket {
namespace {
// If this field trial is enabled, we will enable sending FlexFEC and disable
// sending ULPFEC whenever the former has been negotiated. Receiving FlexFEC
// is enabled whenever FlexFEC has been negotiated.
@ -1637,26 +1638,35 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetVideoSend(
}
if (source_ && stream_) {
stream_->SetSource(
nullptr, webrtc::VideoSendStream::DegradationPreference::kBalanced);
stream_->SetSource(nullptr, DegradationPreference::kDegradationDisabled);
}
// Switch to the new source.
source_ = source;
if (source && stream_) {
// Do not adapt resolution for screen content as this will likely
// result in blurry and unreadable text.
// |this| acts like a VideoSource to make sure SinkWants are handled on the
// correct thread.
stream_->SetSource(
this, enable_cpu_overuse_detection_ &&
!parameters_.options.is_screencast.value_or(false)
? webrtc::VideoSendStream::DegradationPreference::kBalanced
: webrtc::VideoSendStream::DegradationPreference::
kMaintainResolution);
stream_->SetSource(this, GetDegradationPreference());
}
return true;
}
webrtc::VideoSendStream::DegradationPreference
WebRtcVideoChannel2::WebRtcVideoSendStream::GetDegradationPreference() const {
// Do not adapt resolution for screen content as this will likely
// result in blurry and unreadable text.
// |this| acts like a VideoSource to make sure SinkWants are handled on the
// correct thread.
DegradationPreference degradation_preference;
if (!enable_cpu_overuse_detection_) {
degradation_preference = DegradationPreference::kDegradationDisabled;
} else {
if (parameters_.options.is_screencast.value_or(false)) {
degradation_preference = DegradationPreference::kMaintainResolution;
} else {
degradation_preference = DegradationPreference::kMaintainFramerate;
}
}
return degradation_preference;
}
const std::vector<uint32_t>&
WebRtcVideoChannel2::WebRtcVideoSendStream::GetSsrcs() const {
return ssrcs_;
@ -2095,16 +2105,7 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::RecreateWebRtcStream() {
parameters_.encoder_config.encoder_specific_settings = NULL;
if (source_) {
// Do not adapt resolution for screen content as this will likely result in
// blurry and unreadable text.
// |this| acts like a VideoSource to make sure SinkWants are handled on the
// correct thread.
stream_->SetSource(
this, enable_cpu_overuse_detection_ &&
!parameters_.options.is_screencast.value_or(false)
? webrtc::VideoSendStream::DegradationPreference::kBalanced
: webrtc::VideoSendStream::DegradationPreference::
kMaintainResolution);
stream_->SetSource(this, GetDegradationPreference());
}
// Call stream_->Start() if necessary conditions are met.

View File

@ -324,6 +324,9 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
// and whether or not the encoding in |rtp_parameters_| is active.
void UpdateSendState();
webrtc::VideoSendStream::DegradationPreference GetDegradationPreference()
const EXCLUSIVE_LOCKS_REQUIRED(&thread_checker_);
rtc::ThreadChecker thread_checker_;
rtc::AsyncInvoker invoker_;
rtc::Thread* worker_thread_;

View File

@ -2115,8 +2115,8 @@ TEST_F(WebRtcVideoChannel2Test, AdaptsOnOveruseAndChangeResolution) {
// Trigger overuse.
rtc::VideoSinkWants wants;
wants.max_pixel_count = rtc::Optional<int>(
send_stream->GetLastWidth() * send_stream->GetLastHeight() - 1);
wants.max_pixel_count =
send_stream->GetLastWidth() * send_stream->GetLastHeight() - 1;
send_stream->InjectVideoSinkWants(wants);
EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
EXPECT_EQ(2, send_stream->GetNumberOfSwappedFrames());
@ -2124,8 +2124,8 @@ TEST_F(WebRtcVideoChannel2Test, AdaptsOnOveruseAndChangeResolution) {
EXPECT_EQ(720 * 3 / 4, send_stream->GetLastHeight());
// Trigger overuse again.
wants.max_pixel_count = rtc::Optional<int>(
send_stream->GetLastWidth() * send_stream->GetLastHeight() - 1);
wants.max_pixel_count =
send_stream->GetLastWidth() * send_stream->GetLastHeight() - 1;
send_stream->InjectVideoSinkWants(wants);
EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
EXPECT_EQ(3, send_stream->GetNumberOfSwappedFrames());
@ -2143,7 +2143,7 @@ TEST_F(WebRtcVideoChannel2Test, AdaptsOnOveruseAndChangeResolution) {
send_stream->GetLastWidth() * send_stream->GetLastHeight();
// Cap the max to 4x the pixel count (assuming max 1/2 x 1/2 scale downs)
// of the current stream, so we don't take too large steps.
wants.max_pixel_count = rtc::Optional<int>(current_pixel_count * 4);
wants.max_pixel_count = current_pixel_count * 4;
// Default step down is 3/5 pixel count, so go up by 5/3.
wants.target_pixel_count = rtc::Optional<int>((current_pixel_count * 5) / 3);
send_stream->InjectVideoSinkWants(wants);
@ -2155,7 +2155,7 @@ TEST_F(WebRtcVideoChannel2Test, AdaptsOnOveruseAndChangeResolution) {
// Trigger underuse again, should go back up to full resolution.
current_pixel_count =
send_stream->GetLastWidth() * send_stream->GetLastHeight();
wants.max_pixel_count = rtc::Optional<int>(current_pixel_count * 4);
wants.max_pixel_count = current_pixel_count * 4;
wants.target_pixel_count = rtc::Optional<int>((current_pixel_count * 5) / 3);
send_stream->InjectVideoSinkWants(wants);
EXPECT_TRUE(capturer.CaptureCustomFrame(1284, 724, cricket::FOURCC_I420));
@ -2199,8 +2199,8 @@ TEST_F(WebRtcVideoChannel2Test, PreviousAdaptationDoesNotApplyToScreenshare) {
// Trigger overuse.
rtc::VideoSinkWants wants;
wants.max_pixel_count = rtc::Optional<int>(
send_stream->GetLastWidth() * send_stream->GetLastHeight() - 1);
wants.max_pixel_count =
send_stream->GetLastWidth() * send_stream->GetLastHeight() - 1;
send_stream->InjectVideoSinkWants(wants);
EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
EXPECT_EQ(2, send_stream->GetNumberOfSwappedFrames());
@ -2242,6 +2242,7 @@ TEST_F(WebRtcVideoChannel2Test, PreviousAdaptationDoesNotApplyToScreenshare) {
void WebRtcVideoChannel2Test::TestCpuAdaptation(bool enable_overuse,
bool is_screenshare) {
const int kDefaultFps = 30;
cricket::VideoCodec codec = GetEngineCodec("VP8");
cricket::VideoSendParameters parameters;
parameters.codecs.push_back(codec);
@ -2263,14 +2264,17 @@ void WebRtcVideoChannel2Test::TestCpuAdaptation(bool enable_overuse,
options.is_screencast = rtc::Optional<bool>(is_screenshare);
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, &options, &capturer));
cricket::VideoFormat capture_format = capturer.GetSupportedFormats()->front();
capture_format.interval = rtc::kNumNanosecsPerSec / kDefaultFps;
EXPECT_EQ(cricket::CS_RUNNING, capturer.Start(capture_format));
EXPECT_TRUE(channel_->SetSend(true));
FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front();
if (!enable_overuse || is_screenshare) {
if (!enable_overuse) {
EXPECT_FALSE(send_stream->resolution_scaling_enabled());
EXPECT_FALSE(send_stream->framerate_scaling_enabled());
EXPECT_EQ(is_screenshare, send_stream->framerate_scaling_enabled());
EXPECT_TRUE(capturer.CaptureFrame());
EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
@ -2282,33 +2286,59 @@ void WebRtcVideoChannel2Test::TestCpuAdaptation(bool enable_overuse,
return;
}
EXPECT_TRUE(send_stream->resolution_scaling_enabled());
if (is_screenshare) {
EXPECT_FALSE(send_stream->resolution_scaling_enabled());
EXPECT_TRUE(send_stream->framerate_scaling_enabled());
} else {
EXPECT_TRUE(send_stream->resolution_scaling_enabled());
EXPECT_FALSE(send_stream->framerate_scaling_enabled());
}
// Trigger overuse.
ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size());
rtc::VideoSinkWants wants;
wants.max_pixel_count =
rtc::Optional<int>(capture_format.width * capture_format.height - 1);
if (is_screenshare) {
wants.max_framerate_fps = (kDefaultFps * 2) / 3;
} else {
wants.max_pixel_count = capture_format.width * capture_format.height - 1;
}
send_stream->InjectVideoSinkWants(wants);
EXPECT_TRUE(capturer.CaptureFrame());
EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
for (int i = 0; i < kDefaultFps; ++i)
EXPECT_TRUE(capturer.CaptureFrame());
EXPECT_TRUE(capturer.CaptureFrame());
EXPECT_EQ(2, send_stream->GetNumberOfSwappedFrames());
EXPECT_LT(send_stream->GetLastWidth(), capture_format.width);
EXPECT_LT(send_stream->GetLastHeight(), capture_format.height);
if (is_screenshare) {
// Drops every third frame.
EXPECT_EQ(kDefaultFps * 2 / 3, send_stream->GetNumberOfSwappedFrames());
EXPECT_EQ(send_stream->GetLastWidth(), capture_format.width);
EXPECT_EQ(send_stream->GetLastHeight(), capture_format.height);
} else {
EXPECT_EQ(kDefaultFps, send_stream->GetNumberOfSwappedFrames());
EXPECT_LT(send_stream->GetLastWidth(), capture_format.width);
EXPECT_LT(send_stream->GetLastHeight(), capture_format.height);
}
// Trigger underuse which should go back to normal resolution.
int last_pixel_count =
send_stream->GetLastWidth() * send_stream->GetLastHeight();
wants.max_pixel_count = rtc::Optional<int>(last_pixel_count * 4);
wants.target_pixel_count = rtc::Optional<int>((last_pixel_count * 5) / 3);
if (is_screenshare) {
wants.max_framerate_fps = kDefaultFps;
} else {
wants.max_pixel_count = last_pixel_count * 4;
wants.target_pixel_count.emplace((last_pixel_count * 5) / 3);
}
send_stream->InjectVideoSinkWants(wants);
EXPECT_TRUE(capturer.CaptureFrame());
EXPECT_EQ(3, send_stream->GetNumberOfSwappedFrames());
for (int i = 0; i < kDefaultFps; ++i)
EXPECT_TRUE(capturer.CaptureFrame());
if (is_screenshare) {
EXPECT_EQ(kDefaultFps + (kDefaultFps * 2 / 3),
send_stream->GetNumberOfSwappedFrames());
} else {
EXPECT_EQ(kDefaultFps * 2, send_stream->GetNumberOfSwappedFrames());
}
EXPECT_EQ(capture_format.width, send_stream->GetLastWidth());
EXPECT_EQ(capture_format.height, send_stream->GetLastHeight());

View File

@ -43,6 +43,7 @@ rtc_source_set("video_test_common") {
"frame_utils.h",
"vcm_capturer.cc",
"vcm_capturer.h",
"video_capturer.cc",
"video_capturer.h",
]
@ -53,6 +54,7 @@ rtc_source_set("video_test_common") {
deps = [
"../common_video",
"../media:rtc_media_base",
"../modules/video_capture:video_capture_module",
]
}

View File

@ -326,7 +326,7 @@ void CallTest::CreateFrameGeneratorCapturerWithDrift(Clock* clock,
width, height, framerate * speed, clock));
video_send_stream_->SetSource(
frame_generator_capturer_.get(),
VideoSendStream::DegradationPreference::kBalanced);
VideoSendStream::DegradationPreference::kMaintainFramerate);
}
void CallTest::CreateFrameGeneratorCapturer(int framerate,
@ -336,7 +336,7 @@ void CallTest::CreateFrameGeneratorCapturer(int framerate,
test::FrameGeneratorCapturer::Create(width, height, framerate, clock_));
video_send_stream_->SetSource(
frame_generator_capturer_.get(),
VideoSendStream::DegradationPreference::kBalanced);
VideoSendStream::DegradationPreference::kMaintainFramerate);
}
void CallTest::CreateFakeAudioDevices(

View File

@ -37,30 +37,47 @@ class FrameGeneratorCapturer::InsertFrameTask : public rtc::QueuedTask {
private:
bool Run() override {
bool task_completed = true;
if (repeat_interval_ms_ > 0) {
int64_t delay_ms;
int64_t time_now_ms = rtc::TimeMillis();
if (intended_run_time_ms_ > 0) {
delay_ms = time_now_ms - intended_run_time_ms_;
} else {
delay_ms = 0;
intended_run_time_ms_ = time_now_ms;
}
intended_run_time_ms_ += repeat_interval_ms_;
if (delay_ms < repeat_interval_ms_) {
// This is not a one-off frame. Check if the frame interval for this
// task queue is the same same as the current configured frame rate.
uint32_t current_interval_ms =
1000 / frame_generator_capturer_->GetCurrentConfiguredFramerate();
if (repeat_interval_ms_ != current_interval_ms) {
// Frame rate has changed since task was started, create a new instance.
rtc::TaskQueue::Current()->PostDelayedTask(
std::unique_ptr<rtc::QueuedTask>(this),
repeat_interval_ms_ - delay_ms);
std::unique_ptr<rtc::QueuedTask>(new InsertFrameTask(
frame_generator_capturer_, current_interval_ms)),
current_interval_ms);
} else {
rtc::TaskQueue::Current()->PostDelayedTask(
std::unique_ptr<rtc::QueuedTask>(this), 0);
LOG(LS_ERROR)
<< "Frame Generator Capturer can't keep up with requested fps";
// Schedule the next frame capture event to happen at approximately the
// correct absolute time point.
int64_t delay_ms;
int64_t time_now_ms = rtc::TimeMillis();
if (intended_run_time_ms_ > 0) {
delay_ms = time_now_ms - intended_run_time_ms_;
} else {
delay_ms = 0;
intended_run_time_ms_ = time_now_ms;
}
intended_run_time_ms_ += repeat_interval_ms_;
if (delay_ms < repeat_interval_ms_) {
rtc::TaskQueue::Current()->PostDelayedTask(
std::unique_ptr<rtc::QueuedTask>(this),
repeat_interval_ms_ - delay_ms);
} else {
rtc::TaskQueue::Current()->PostDelayedTask(
std::unique_ptr<rtc::QueuedTask>(this), 0);
LOG(LS_ERROR)
<< "Frame Generator Capturer can't keep up with requested fps";
}
// Repost of this instance, make sure it is not deleted.
task_completed = false;
}
}
frame_generator_capturer_->InsertFrame();
// Task should be deleted only if it's not repeating.
return repeat_interval_ms_ == 0;
return task_completed;
}
webrtc::test::FrameGeneratorCapturer* const frame_generator_capturer_;
@ -72,14 +89,12 @@ FrameGeneratorCapturer* FrameGeneratorCapturer::Create(int width,
int height,
int target_fps,
Clock* clock) {
FrameGeneratorCapturer* capturer = new FrameGeneratorCapturer(
clock, FrameGenerator::CreateSquareGenerator(width, height), target_fps);
if (!capturer->Init()) {
delete capturer;
return NULL;
}
std::unique_ptr<FrameGeneratorCapturer> capturer(new FrameGeneratorCapturer(
clock, FrameGenerator::CreateSquareGenerator(width, height), target_fps));
if (!capturer->Init())
return nullptr;
return capturer;
return capturer.release();
}
FrameGeneratorCapturer* FrameGeneratorCapturer::CreateFromYuvFile(
@ -88,16 +103,15 @@ FrameGeneratorCapturer* FrameGeneratorCapturer::CreateFromYuvFile(
size_t height,
int target_fps,
Clock* clock) {
FrameGeneratorCapturer* capturer = new FrameGeneratorCapturer(
clock, FrameGenerator::CreateFromYuvFile(
std::vector<std::string>(1, file_name), width, height, 1),
target_fps);
if (!capturer->Init()) {
delete capturer;
return NULL;
}
std::unique_ptr<FrameGeneratorCapturer> capturer(new FrameGeneratorCapturer(
clock,
FrameGenerator::CreateFromYuvFile(std::vector<std::string>(1, file_name),
width, height, 1),
target_fps));
if (!capturer->Init())
return nullptr;
return capturer;
return capturer.release();
}
FrameGeneratorCapturer::FrameGeneratorCapturer(
@ -129,29 +143,32 @@ void FrameGeneratorCapturer::SetFakeRotation(VideoRotation rotation) {
bool FrameGeneratorCapturer::Init() {
// This check is added because frame_generator_ might be file based and should
// not crash because a file moved.
if (frame_generator_.get() == NULL)
if (frame_generator_.get() == nullptr)
return false;
int framerate_fps = GetCurrentConfiguredFramerate();
task_queue_.PostDelayedTask(
std::unique_ptr<rtc::QueuedTask>(
new InsertFrameTask(this, 1000 / target_fps_)),
1000 / target_fps_);
new InsertFrameTask(this, 1000 / framerate_fps)),
1000 / framerate_fps);
return true;
}
void FrameGeneratorCapturer::InsertFrame() {
{
rtc::CritScope cs(&lock_);
if (sending_) {
VideoFrame* frame = frame_generator_->NextFrame();
frame->set_ntp_time_ms(clock_->CurrentNtpInMilliseconds());
frame->set_rotation(fake_rotation_);
if (first_frame_capture_time_ == -1) {
first_frame_capture_time_ = frame->ntp_time_ms();
}
if (sink_)
sink_->OnFrame(*frame);
rtc::CritScope cs(&lock_);
if (sending_) {
VideoFrame* frame = frame_generator_->NextFrame();
frame->set_ntp_time_ms(clock_->CurrentNtpInMilliseconds());
frame->set_rotation(fake_rotation_);
if (first_frame_capture_time_ == -1) {
first_frame_capture_time_ = frame->ntp_time_ms();
}
if (sink_) {
rtc::Optional<VideoFrame> out_frame = AdaptFrame(*frame);
if (out_frame)
sink_->OnFrame(*out_frame);
}
}
}
@ -185,6 +202,19 @@ void FrameGeneratorCapturer::AddOrUpdateSink(
sink_ = sink;
if (sink_wants_observer_)
sink_wants_observer_->OnSinkWantsChanged(sink, wants);
// Handle framerate within this class, just pass on resolution for possible
// adaptation.
rtc::VideoSinkWants resolution_wants = wants;
resolution_wants.max_framerate_fps = std::numeric_limits<int>::max();
VideoCapturer::AddOrUpdateSink(sink, resolution_wants);
// Ignore any requests for framerate higher than initially configured.
if (wants.max_framerate_fps < target_fps_) {
wanted_fps_.emplace(wants.max_framerate_fps);
} else {
wanted_fps_.reset();
}
}
void FrameGeneratorCapturer::RemoveSink(
@ -201,5 +231,12 @@ void FrameGeneratorCapturer::ForceFrame() {
std::unique_ptr<rtc::QueuedTask>(new InsertFrameTask(this, 0)));
}
int FrameGeneratorCapturer::GetCurrentConfiguredFramerate() {
rtc::CritScope cs(&lock_);
if (wanted_fps_ && *wanted_fps_ < target_fps_)
return *wanted_fps_;
return target_fps_;
}
} // namespace test
} // namespace webrtc

View File

@ -77,6 +77,7 @@ class FrameGeneratorCapturer : public VideoCapturer {
void InsertFrame();
static bool Run(void* obj);
int GetCurrentConfiguredFramerate();
Clock* const clock_;
bool sending_;
@ -86,7 +87,8 @@ class FrameGeneratorCapturer : public VideoCapturer {
rtc::CriticalSection lock_;
std::unique_ptr<FrameGenerator> frame_generator_;
int target_fps_;
int target_fps_ GUARDED_BY(&lock_);
rtc::Optional<int> wanted_fps_ GUARDED_BY(&lock_);
VideoRotation fake_rotation_ = kVideoRotation_0;
int64_t first_frame_capture_time_;

View File

@ -10,17 +10,18 @@
#include "webrtc/test/vcm_capturer.h"
#include "webrtc/base/logging.h"
#include "webrtc/modules/video_capture/video_capture_factory.h"
#include "webrtc/video_send_stream.h"
namespace webrtc {
namespace test {
VcmCapturer::VcmCapturer() : started_(false), sink_(nullptr), vcm_(NULL) {}
VcmCapturer::VcmCapturer() : started_(false), sink_(nullptr), vcm_(nullptr) {}
bool VcmCapturer::Init(size_t width, size_t height, size_t target_fps) {
VideoCaptureModule::DeviceInfo* device_info =
VideoCaptureFactory::CreateDeviceInfo();
std::unique_ptr<VideoCaptureModule::DeviceInfo> device_info(
VideoCaptureFactory::CreateDeviceInfo());
char device_name[256];
char unique_name[256];
@ -35,7 +36,6 @@ bool VcmCapturer::Init(size_t width, size_t height, size_t target_fps) {
vcm_->RegisterCaptureDataCallback(this);
device_info->GetCapability(vcm_->CurrentDeviceName(), 0, capability_);
delete device_info;
capability_.width = static_cast<int32_t>(width);
capability_.height = static_cast<int32_t>(height);
@ -47,7 +47,7 @@ bool VcmCapturer::Init(size_t width, size_t height, size_t target_fps) {
return false;
}
assert(vcm_->CaptureStarted());
RTC_CHECK(vcm_->CaptureStarted());
return true;
}
@ -55,13 +55,13 @@ bool VcmCapturer::Init(size_t width, size_t height, size_t target_fps) {
VcmCapturer* VcmCapturer::Create(size_t width,
size_t height,
size_t target_fps) {
VcmCapturer* vcm_capturer = new VcmCapturer();
std::unique_ptr<VcmCapturer> vcm_capturer(new VcmCapturer());
if (!vcm_capturer->Init(width, height, target_fps)) {
// TODO(pbos): Log a warning that this failed.
delete vcm_capturer;
return NULL;
LOG(LS_WARNING) << "Failed to create VcmCapturer(w = " << width
<< ", h = " << height << ", fps = " << target_fps << ")";
return nullptr;
}
return vcm_capturer;
return vcm_capturer.release();
}
@ -80,6 +80,7 @@ void VcmCapturer::AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
rtc::CritScope lock(&crit_);
RTC_CHECK(!sink_ || sink_ == sink);
sink_ = sink;
VideoCapturer::AddOrUpdateSink(sink, wants);
}
void VcmCapturer::RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) {
@ -102,8 +103,11 @@ VcmCapturer::~VcmCapturer() { Destroy(); }
void VcmCapturer::OnFrame(const VideoFrame& frame) {
rtc::CritScope lock(&crit_);
if (started_ && sink_)
sink_->OnFrame(frame);
if (started_ && sink_) {
rtc::Optional<VideoFrame> out_frame = AdaptFrame(frame);
if (out_frame)
sink_->OnFrame(*out_frame);
}
}
} // test

View File

@ -10,6 +10,8 @@
#ifndef WEBRTC_TEST_VCM_CAPTURER_H_
#define WEBRTC_TEST_VCM_CAPTURER_H_
#include <memory>
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/common_types.h"

View File

@ -0,0 +1,58 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/test/video_capturer.h"
#include "webrtc/base/basictypes.h"
#include "webrtc/base/constructormagic.h"
namespace webrtc {
namespace test {
VideoCapturer::VideoCapturer() : video_adapter_(new cricket::VideoAdapter()) {}
VideoCapturer::~VideoCapturer() {}
rtc::Optional<VideoFrame> VideoCapturer::AdaptFrame(const VideoFrame& frame) {
int cropped_width = 0;
int cropped_height = 0;
int out_width = 0;
int out_height = 0;
if (!video_adapter_->AdaptFrameResolution(
frame.width(), frame.height(), frame.timestamp_us() * 1000,
&cropped_width, &cropped_height, &out_width, &out_height)) {
// Drop frame in order to respect frame rate constraint.
return rtc::Optional<VideoFrame>();
}
rtc::Optional<VideoFrame> out_frame;
if (out_height != frame.height() || out_width != frame.width()) {
// Video adapter has requested a down-scale. Allocate a new buffer and
// return scaled version.
rtc::scoped_refptr<I420Buffer> scaled_buffer =
I420Buffer::Create(out_width, out_height);
scaled_buffer->ScaleFrom(*frame.video_frame_buffer().get());
out_frame.emplace(
VideoFrame(scaled_buffer, kVideoRotation_0, frame.timestamp_us()));
} else {
// No adaptations needed, just return the frame as is.
out_frame.emplace(frame);
}
return out_frame;
}
void VideoCapturer::AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
video_adapter_->OnResolutionFramerateRequest(
wants.target_pixel_count, wants.max_pixel_count, wants.max_framerate_fps);
}
} // namespace test
} // namespace webrtc

View File

@ -12,23 +12,42 @@
#include <stddef.h>
#include <memory>
#include "webrtc/api/video/i420_buffer.h"
#include "webrtc/api/video/video_frame.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/optional.h"
#include "webrtc/media/base/videoadapter.h"
#include "webrtc/media/base/videosourceinterface.h"
namespace cricket {
class VideoAdapter;
} // namespace cricket
namespace webrtc {
class Clock;
namespace test {
class VideoCapturer : public rtc::VideoSourceInterface<VideoFrame> {
public:
virtual ~VideoCapturer() {}
VideoCapturer();
virtual ~VideoCapturer();
virtual void Start() = 0;
virtual void Stop() = 0;
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override;
protected:
rtc::Optional<VideoFrame> AdaptFrame(const VideoFrame& frame);
rtc::VideoSinkWants GetSinkWants();
private:
const std::unique_ptr<cricket::VideoAdapter> video_adapter_;
};
} // test
} // webrtc
} // namespace test
} // namespace webrtc
#endif // WEBRTC_TEST_VIDEO_CAPTURER_H_

View File

@ -231,7 +231,8 @@ TEST_F(EndToEndTest, RendersSingleDelayedFrame) {
test::FrameGenerator::CreateSquareGenerator(kWidth, kHeight));
test::FrameForwarder frame_forwarder;
video_send_stream_->SetSource(
&frame_forwarder, VideoSendStream::DegradationPreference::kBalanced);
&frame_forwarder,
VideoSendStream::DegradationPreference::kMaintainFramerate);
frame_forwarder.IncomingCapturedFrame(*frame_generator->NextFrame());
EXPECT_TRUE(renderer.Wait())
@ -278,7 +279,8 @@ TEST_F(EndToEndTest, TransmitsFirstFrame) {
kDefaultHeight));
test::FrameForwarder frame_forwarder;
video_send_stream_->SetSource(
&frame_forwarder, VideoSendStream::DegradationPreference::kBalanced);
&frame_forwarder,
VideoSendStream::DegradationPreference::kMaintainFramerate);
frame_forwarder.IncomingCapturedFrame(*frame_generator->NextFrame());
EXPECT_TRUE(renderer.Wait())
@ -1517,7 +1519,7 @@ class MultiStreamTest {
width, height, 30, Clock::GetRealTimeClock());
send_streams[i]->SetSource(
frame_generators[i],
VideoSendStream::DegradationPreference::kBalanced);
VideoSendStream::DegradationPreference::kMaintainFramerate);
frame_generators[i]->Start();
}
@ -1966,7 +1968,7 @@ TEST_F(EndToEndTest, ObserversEncodedFrames) {
kDefaultHeight));
test::FrameForwarder forwarder;
video_send_stream_->SetSource(
&forwarder, VideoSendStream::DegradationPreference::kBalanced);
&forwarder, VideoSendStream::DegradationPreference::kMaintainFramerate);
forwarder.IncomingCapturedFrame(*frame_generator->NextFrame());
EXPECT_TRUE(post_encode_observer.Wait())

View File

@ -16,12 +16,15 @@
#include <algorithm>
#include <list>
#include <map>
#include <string>
#include <utility>
#include "webrtc/api/video/video_frame.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/numerics/exp_filter.h"
#include "webrtc/common_video/include/frame_callback.h"
#include "webrtc/system_wrappers/include/field_trial.h"
#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
#include <mach/mach.h>
@ -116,7 +119,7 @@ class OveruseFrameDetector::SendProcessingUsage {
filtered_frame_diff_ms_(new rtc::ExpFilter(kWeightFactorFrameDiff)) {
Reset();
}
~SendProcessingUsage() {}
virtual ~SendProcessingUsage() {}
void Reset() {
count_ = 0;
@ -139,7 +142,7 @@ class OveruseFrameDetector::SendProcessingUsage {
filtered_processing_ms_->Apply(exp, processing_ms);
}
int Value() const {
virtual int Value() {
if (count_ < static_cast<uint32_t>(options_.min_frame_samples)) {
return static_cast<int>(InitialUsageInPercent() + 0.5f);
}
@ -171,6 +174,117 @@ class OveruseFrameDetector::SendProcessingUsage {
std::unique_ptr<rtc::ExpFilter> filtered_frame_diff_ms_;
};
// Class used for manual testing of overuse, enabled via field trial flag.
class OveruseFrameDetector::OverdoseInjector
: public OveruseFrameDetector::SendProcessingUsage {
public:
OverdoseInjector(const CpuOveruseOptions& options,
int64_t normal_period_ms,
int64_t overuse_period_ms,
int64_t underuse_period_ms)
: OveruseFrameDetector::SendProcessingUsage(options),
normal_period_ms_(normal_period_ms),
overuse_period_ms_(overuse_period_ms),
underuse_period_ms_(underuse_period_ms),
state_(State::kNormal),
last_toggling_ms_(-1) {
RTC_DCHECK_GT(overuse_period_ms, 0);
RTC_DCHECK_GT(normal_period_ms, 0);
LOG(LS_INFO) << "Simulating overuse with intervals " << normal_period_ms
<< "ms normal mode, " << overuse_period_ms
<< "ms overuse mode.";
}
~OverdoseInjector() override {}
int Value() override {
int64_t now_ms = rtc::TimeMillis();
if (last_toggling_ms_ == -1) {
last_toggling_ms_ = now_ms;
} else {
switch (state_) {
case State::kNormal:
if (now_ms > last_toggling_ms_ + normal_period_ms_) {
state_ = State::kOveruse;
last_toggling_ms_ = now_ms;
LOG(LS_INFO) << "Simulating CPU overuse.";
}
break;
case State::kOveruse:
if (now_ms > last_toggling_ms_ + overuse_period_ms_) {
state_ = State::kUnderuse;
last_toggling_ms_ = now_ms;
LOG(LS_INFO) << "Simulating CPU underuse.";
}
break;
case State::kUnderuse:
if (now_ms > last_toggling_ms_ + underuse_period_ms_) {
state_ = State::kNormal;
last_toggling_ms_ = now_ms;
LOG(LS_INFO) << "Actual CPU overuse measurements in effect.";
}
break;
}
}
rtc::Optional<int> overried_usage_value;
switch (state_) {
case State::kNormal:
break;
case State::kOveruse:
overried_usage_value.emplace(250);
break;
case State::kUnderuse:
overried_usage_value.emplace(5);
break;
}
return overried_usage_value.value_or(SendProcessingUsage::Value());
}
private:
const int64_t normal_period_ms_;
const int64_t overuse_period_ms_;
const int64_t underuse_period_ms_;
enum class State { kNormal, kOveruse, kUnderuse } state_;
int64_t last_toggling_ms_;
};
std::unique_ptr<OveruseFrameDetector::SendProcessingUsage>
OveruseFrameDetector::CreateSendProcessingUsage(
const CpuOveruseOptions& options) {
std::unique_ptr<SendProcessingUsage> instance;
std::string toggling_interval =
field_trial::FindFullName("WebRTC-ForceSimulatedOveruseIntervalMs");
if (!toggling_interval.empty()) {
int normal_period_ms = 0;
int overuse_period_ms = 0;
int underuse_period_ms = 0;
if (sscanf(toggling_interval.c_str(), "%d-%d-%d", &normal_period_ms,
&overuse_period_ms, &underuse_period_ms) == 3) {
if (normal_period_ms > 0 && overuse_period_ms > 0 &&
underuse_period_ms > 0) {
instance.reset(new OverdoseInjector(
options, normal_period_ms, overuse_period_ms, underuse_period_ms));
} else {
LOG(LS_WARNING)
<< "Invalid (non-positive) normal/overuse/underuse periods: "
<< normal_period_ms << " / " << overuse_period_ms << " / "
<< underuse_period_ms;
}
} else {
LOG(LS_WARNING) << "Malformed toggling interval: " << toggling_interval;
}
}
if (!instance) {
// No valid overuse simulation parameters set, use normal usage class.
instance.reset(new SendProcessingUsage(options));
}
return instance;
}
class OveruseFrameDetector::CheckOveruseTask : public rtc::QueuedTask {
public:
explicit CheckOveruseTask(OveruseFrameDetector* overuse_detector)
@ -222,7 +336,7 @@ OveruseFrameDetector::OveruseFrameDetector(
last_rampup_time_ms_(-1),
in_quick_rampup_(false),
current_rampup_delay_ms_(kStandardRampUpDelayMs),
usage_(new SendProcessingUsage(options)) {
usage_(CreateSendProcessingUsage(options)) {
task_checker_.Detach();
}
@ -320,8 +434,9 @@ void OveruseFrameDetector::FrameSent(uint32_t timestamp,
while (!frame_timing_.empty()) {
FrameTiming timing = frame_timing_.front();
if (time_sent_in_us - timing.capture_us <
kEncodingTimeMeasureWindowMs * rtc::kNumMicrosecsPerMillisec)
kEncodingTimeMeasureWindowMs * rtc::kNumMicrosecsPerMillisec) {
break;
}
if (timing.last_send_us != -1) {
int encode_duration_us =
static_cast<int>(timing.last_send_us - timing.capture_us);
@ -396,6 +511,7 @@ void OveruseFrameDetector::CheckForOveruse() {
bool OveruseFrameDetector::IsOverusing(const CpuOveruseMetrics& metrics) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&task_checker_);
if (metrics.encode_usage_percent >=
options_.high_encode_usage_threshold_percent) {
++checks_above_threshold_;

View File

@ -87,6 +87,7 @@ class OveruseFrameDetector {
void CheckForOveruse(); // Protected for test purposes.
private:
class OverdoseInjector;
class SendProcessingUsage;
class CheckOveruseTask;
struct FrameTiming {
@ -110,6 +111,9 @@ class OveruseFrameDetector {
void ResetAll(int num_pixels);
static std::unique_ptr<SendProcessingUsage> CreateSendProcessingUsage(
const CpuOveruseOptions& options);
rtc::SequencedTaskChecker task_checker_;
// Owned by the task queue from where StartCheckForOveruse is called.
CheckOveruseTask* check_overuse_task_;

View File

@ -50,7 +50,7 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
VideoEncoderConfig::ContentType content_type);
virtual ~SendStatisticsProxy();
VideoSendStream::Stats GetStats();
virtual VideoSendStream::Stats GetStats();
virtual void OnSendEncodedImage(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_info);
@ -211,6 +211,7 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
TargetRateUpdates target_rate_updates_;
ReportBlockStats report_block_stats_;
const VideoSendStream::Stats start_stats_;
std::map<int, QpCounters>
qp_counters_; // QP counters mapped by spatial idx.
};

View File

@ -146,7 +146,7 @@ class VideoQualityTest : public test::CallTest {
int send_logs_;
VideoSendStream::DegradationPreference degradation_preference_ =
VideoSendStream::DegradationPreference::kBalanced;
VideoSendStream::DegradationPreference::kMaintainFramerate;
Params params_;
};

View File

@ -1943,7 +1943,7 @@ TEST_F(VideoSendStreamTest, CapturesTextureAndVideoFrames) {
video_send_stream_->Start();
test::FrameForwarder forwarder;
video_send_stream_->SetSource(
&forwarder, VideoSendStream::DegradationPreference::kBalanced);
&forwarder, VideoSendStream::DegradationPreference::kMaintainFramerate);
for (size_t i = 0; i < input_frames.size(); i++) {
forwarder.IncomingCapturedFrame(input_frames[i]);
// Wait until the output frame is received before sending the next input
@ -1952,7 +1952,7 @@ TEST_F(VideoSendStreamTest, CapturesTextureAndVideoFrames) {
}
video_send_stream_->Stop();
video_send_stream_->SetSource(
nullptr, VideoSendStream::DegradationPreference::kBalanced);
nullptr, VideoSendStream::DegradationPreference::kMaintainFramerate);
// Test if the input and output frames are the same. render_time_ms and
// timestamp are not compared because capturer sets those values.
@ -3201,7 +3201,7 @@ void VideoSendStreamTest::TestRequestSourceRotateVideo(
CreateVideoStreams();
test::FrameForwarder forwarder;
video_send_stream_->SetSource(
&forwarder, VideoSendStream::DegradationPreference::kBalanced);
&forwarder, VideoSendStream::DegradationPreference::kMaintainFramerate);
EXPECT_TRUE(forwarder.sink_wants().rotation_applied !=
support_orientation_ext);

View File

@ -12,6 +12,7 @@
#include <algorithm>
#include <limits>
#include <numeric>
#include <utility>
#include "webrtc/base/arraysize.h"
@ -42,6 +43,7 @@ const int64_t kFrameLogIntervalMs = 60000;
// on MediaCodec and fallback implementations are in place.
// See https://bugs.chromium.org/p/webrtc/issues/detail?id=7206
const int kMinPixelsPerFrame = 320 * 180;
const int kMinFramerateFps = 2;
// The maximum number of frames to drop at beginning of stream
// to try and achieve desired bitrate.
@ -150,7 +152,7 @@ class ViEEncoder::VideoSourceProxy {
public:
explicit VideoSourceProxy(ViEEncoder* vie_encoder)
: vie_encoder_(vie_encoder),
degradation_preference_(DegradationPreference::kMaintainResolution),
degradation_preference_(DegradationPreference::kDegradationDisabled),
source_(nullptr) {}
void SetSource(rtc::VideoSourceInterface<VideoFrame>* source,
@ -161,10 +163,10 @@ class ViEEncoder::VideoSourceProxy {
rtc::VideoSinkWants wants;
{
rtc::CritScope lock(&crit_);
degradation_preference_ = degradation_preference;
old_source = source_;
source_ = source;
degradation_preference_ = degradation_preference;
wants = current_wants();
wants = GetActiveSinkWants();
}
if (old_source != source && old_source != nullptr) {
@ -181,10 +183,30 @@ class ViEEncoder::VideoSourceProxy {
void SetWantsRotationApplied(bool rotation_applied) {
rtc::CritScope lock(&crit_);
sink_wants_.rotation_applied = rotation_applied;
disabled_scaling_sink_wants_.rotation_applied = rotation_applied;
if (source_) {
source_->AddOrUpdateSink(vie_encoder_, current_wants());
if (source_)
source_->AddOrUpdateSink(vie_encoder_, sink_wants_);
}
rtc::VideoSinkWants GetActiveSinkWants() EXCLUSIVE_LOCKS_REQUIRED(&crit_) {
rtc::VideoSinkWants wants = sink_wants_;
// Clear any constraints from the current sink wants that don't apply to
// the used degradation_preference.
switch (degradation_preference_) {
case DegradationPreference::kBalanced:
FALLTHROUGH();
case DegradationPreference::kMaintainFramerate:
wants.max_framerate_fps = std::numeric_limits<int>::max();
break;
case DegradationPreference::kMaintainResolution:
wants.max_pixel_count = std::numeric_limits<int>::max();
wants.target_pixel_count.reset();
break;
case DegradationPreference::kDegradationDisabled:
wants.max_pixel_count = std::numeric_limits<int>::max();
wants.target_pixel_count.reset();
wants.max_framerate_fps = std::numeric_limits<int>::max();
}
return wants;
}
void RequestResolutionLowerThan(int pixel_count) {
@ -202,10 +224,28 @@ class ViEEncoder::VideoSourceProxy {
const int pixels_wanted = (pixel_count * 3) / 5;
if (pixels_wanted < kMinPixelsPerFrame)
return;
sink_wants_.max_pixel_count = rtc::Optional<int>(pixels_wanted);
sink_wants_.max_pixel_count = pixels_wanted;
sink_wants_.target_pixel_count = rtc::Optional<int>();
if (source_)
source_->AddOrUpdateSink(vie_encoder_, sink_wants_);
source_->AddOrUpdateSink(vie_encoder_, GetActiveSinkWants());
}
void RequestFramerateLowerThan(int framerate_fps) {
// Called on the encoder task queue.
rtc::CritScope lock(&crit_);
if (!IsFramerateScalingEnabledLocked()) {
// This can happen since |degradation_preference_| is set on
// libjingle's worker thread but the adaptation is done on the encoder
// task queue.
return;
}
// The input video frame rate will be scaled down to 2/3 of input fps,
// rounding down.
const int framerate_wanted =
std::max(kMinFramerateFps, (framerate_fps * 2) / 3);
sink_wants_.max_framerate_fps = framerate_wanted;
if (source_)
source_->AddOrUpdateSink(vie_encoder_, GetActiveSinkWants());
}
void RequestHigherResolutionThan(int pixel_count) {
@ -216,36 +256,67 @@ class ViEEncoder::VideoSourceProxy {
// task queue.
return;
}
// On step down we request at most 3/5 the pixel count of the previous
// resolution, so in order to take "one step up" we request a resolution as
// close as possible to 5/3 of the current resolution. The actual pixel
// count selected depends on the capabilities of the source. In order to not
// take a too large step up, we cap the requested pixel count to be at most
// four time the current number of pixels.
sink_wants_.target_pixel_count = rtc::Optional<int>((pixel_count * 5) / 3);
sink_wants_.max_pixel_count = rtc::Optional<int>(pixel_count * 4);
if (pixel_count == std::numeric_limits<int>::max()) {
// Remove any constraints.
sink_wants_.target_pixel_count.reset();
sink_wants_.max_pixel_count = std::numeric_limits<int>::max();
} else {
// On step down we request at most 3/5 the pixel count of the previous
// resolution, so in order to take "one step up" we request a resolution
// as close as possible to 5/3 of the current resolution. The actual pixel
// count selected depends on the capabilities of the source. In order to
// not take a too large step up, we cap the requested pixel count to be at
// most four time the current number of pixels.
sink_wants_.target_pixel_count =
rtc::Optional<int>((pixel_count * 5) / 3);
sink_wants_.max_pixel_count = pixel_count * 4;
}
if (source_)
source_->AddOrUpdateSink(vie_encoder_, sink_wants_);
source_->AddOrUpdateSink(vie_encoder_, GetActiveSinkWants());
}
void RequestHigherFramerateThan(int framerate_fps) {
// Called on the encoder task queue.
rtc::CritScope lock(&crit_);
if (!IsFramerateScalingEnabledLocked()) {
// This can happen since |degradation_preference_| is set on
// libjingle's worker thread but the adaptation is done on the encoder
// task queue.
return;
}
if (framerate_fps == std::numeric_limits<int>::max()) {
// Remove any restrains.
sink_wants_.max_framerate_fps = std::numeric_limits<int>::max();
} else {
// The input video frame rate will be scaled up to the last step, with
// rounding.
const int framerate_wanted = (framerate_fps * 3) / 2;
sink_wants_.max_framerate_fps = framerate_wanted;
}
if (source_)
source_->AddOrUpdateSink(vie_encoder_, GetActiveSinkWants());
}
private:
bool IsResolutionScalingEnabledLocked() const
EXCLUSIVE_LOCKS_REQUIRED(&crit_) {
return degradation_preference_ !=
DegradationPreference::kMaintainResolution;
return degradation_preference_ ==
DegradationPreference::kMaintainFramerate ||
degradation_preference_ == DegradationPreference::kBalanced;
}
const rtc::VideoSinkWants& current_wants() const
bool IsFramerateScalingEnabledLocked() const
EXCLUSIVE_LOCKS_REQUIRED(&crit_) {
return IsResolutionScalingEnabledLocked() ? sink_wants_
: disabled_scaling_sink_wants_;
// TODO(sprang): Also accept kBalanced here?
return degradation_preference_ ==
DegradationPreference::kMaintainResolution;
}
rtc::CriticalSection crit_;
rtc::SequencedTaskChecker main_checker_;
ViEEncoder* const vie_encoder_;
rtc::VideoSinkWants sink_wants_ GUARDED_BY(&crit_);
rtc::VideoSinkWants disabled_scaling_sink_wants_ GUARDED_BY(&crit_);
DegradationPreference degradation_preference_ GUARDED_BY(&crit_);
rtc::VideoSourceInterface<VideoFrame>* source_ GUARDED_BY(&crit_);
@ -280,8 +351,7 @@ ViEEncoder::ViEEncoder(uint32_t number_of_cores,
last_observed_bitrate_bps_(0),
encoder_paused_and_dropped_frame_(false),
clock_(Clock::GetRealTimeClock()),
scale_counter_(kScaleReasonSize, 0),
degradation_preference_(DegradationPreference::kMaintainResolution),
degradation_preference_(DegradationPreference::kDegradationDisabled),
last_captured_timestamp_(0),
delta_ntp_internal_ms_(clock_->CurrentNtpInMilliseconds() -
clock_->TimeInMilliseconds()),
@ -352,12 +422,16 @@ void ViEEncoder::SetSource(
source_proxy_->SetSource(source, degradation_preference);
encoder_queue_.PostTask([this, degradation_preference] {
RTC_DCHECK_RUN_ON(&encoder_queue_);
if (degradation_preference_ != degradation_preference) {
// Reset adaptation state, so that we're not tricked into thinking there's
// an already pending request of the same type.
last_adaptation_request_.reset();
}
degradation_preference_ = degradation_preference;
initial_rampup_ =
degradation_preference_ != DegradationPreference::kMaintainResolution
? 0
: kMaxInitialFramedrop;
bool allow_scaling =
degradation_preference_ == DegradationPreference::kMaintainFramerate ||
degradation_preference_ == DegradationPreference::kBalanced;
initial_rampup_ = allow_scaling ? 0 : kMaxInitialFramedrop;
ConfigureQualityScaler();
});
}
@ -460,14 +534,16 @@ void ViEEncoder::ConfigureQualityScaler() {
RTC_DCHECK_RUN_ON(&encoder_queue_);
const auto scaling_settings = settings_.encoder->GetScalingSettings();
const bool degradation_preference_allows_scaling =
degradation_preference_ != DegradationPreference::kMaintainResolution;
degradation_preference_ == DegradationPreference::kMaintainFramerate ||
degradation_preference_ == DegradationPreference::kBalanced;
const bool quality_scaling_allowed =
degradation_preference_allows_scaling && scaling_settings.enabled;
const std::vector<int>& scale_counters = GetScaleCounters();
stats_proxy_->SetCpuScalingStats(
degradation_preference_allows_scaling ? scale_counter_[kCpu] > 0 : false);
degradation_preference_allows_scaling ? scale_counters[kCpu] > 0 : false);
stats_proxy_->SetQualityScalingStats(
quality_scaling_allowed ? scale_counter_[kQuality] : -1);
quality_scaling_allowed ? scale_counters[kQuality] : -1);
if (quality_scaling_allowed) {
// Abort if quality scaler has already been configured.
@ -712,79 +788,193 @@ void ViEEncoder::OnBitrateUpdated(uint32_t bitrate_bps,
void ViEEncoder::AdaptDown(AdaptReason reason) {
RTC_DCHECK_RUN_ON(&encoder_queue_);
if (degradation_preference_ != DegradationPreference::kBalanced)
return;
RTC_DCHECK(static_cast<bool>(last_frame_info_));
int current_pixel_count = last_frame_info_->pixel_count();
if (last_adaptation_request_ &&
last_adaptation_request_->mode_ == AdaptationRequest::Mode::kAdaptDown &&
current_pixel_count >= last_adaptation_request_->input_pixel_count_) {
// Don't request lower resolution if the current resolution is not lower
// than the last time we asked for the resolution to be lowered.
return;
AdaptationRequest adaptation_request = {
last_frame_info_->pixel_count(),
stats_proxy_->GetStats().input_frame_rate,
AdaptationRequest::Mode::kAdaptDown};
bool downgrade_requested =
last_adaptation_request_ &&
last_adaptation_request_->mode_ == AdaptationRequest::Mode::kAdaptDown;
int max_downgrades = 0;
switch (degradation_preference_) {
case DegradationPreference::kBalanced:
FALLTHROUGH();
case DegradationPreference::kMaintainFramerate:
max_downgrades = kMaxCpuResolutionDowngrades;
if (downgrade_requested &&
adaptation_request.input_pixel_count_ >=
last_adaptation_request_->input_pixel_count_) {
// Don't request lower resolution if the current resolution is not
// lower than the last time we asked for the resolution to be lowered.
return;
}
break;
case DegradationPreference::kMaintainResolution:
max_downgrades = kMaxCpuFramerateDowngrades;
if (adaptation_request.framerate_fps_ <= 0 ||
(downgrade_requested &&
adaptation_request.framerate_fps_ < kMinFramerateFps)) {
// If no input fps estimate available, can't determine how to scale down
// framerate. Otherwise, don't request lower framerate if we don't have
// a valid frame rate. Since framerate, unlike resolution, is a measure
// we have to estimate, and can fluctuate naturally over time, don't
// make the same kind of limitations as for resolution, but trust the
// overuse detector to not trigger too often.
return;
}
break;
case DegradationPreference::kDegradationDisabled:
return;
}
last_adaptation_request_.emplace(AdaptationRequest{
current_pixel_count, AdaptationRequest::Mode::kAdaptDown});
last_adaptation_request_.emplace(adaptation_request);
const std::vector<int>& scale_counter = GetScaleCounters();
switch (reason) {
case kQuality:
stats_proxy_->OnQualityRestrictedResolutionChanged(
scale_counter_[reason] + 1);
stats_proxy_->OnQualityRestrictedResolutionChanged(scale_counter[reason] +
1);
break;
case kCpu:
if (scale_counter_[reason] >= kMaxCpuDowngrades)
if (scale_counter[reason] >= max_downgrades)
return;
// Update stats accordingly.
stats_proxy_->OnCpuRestrictedResolutionChanged(true);
break;
}
++scale_counter_[reason];
source_proxy_->RequestResolutionLowerThan(current_pixel_count);
LOG(LS_INFO) << "Scaling down resolution.";
IncrementScaleCounter(reason, 1);
switch (degradation_preference_) {
case DegradationPreference::kBalanced:
FALLTHROUGH();
case DegradationPreference::kMaintainFramerate:
source_proxy_->RequestResolutionLowerThan(
adaptation_request.input_pixel_count_);
LOG(LS_INFO) << "Scaling down resolution.";
break;
case DegradationPreference::kMaintainResolution:
source_proxy_->RequestFramerateLowerThan(
adaptation_request.framerate_fps_);
LOG(LS_INFO) << "Scaling down framerate.";
break;
case DegradationPreference::kDegradationDisabled:
RTC_NOTREACHED();
}
for (size_t i = 0; i < kScaleReasonSize; ++i) {
LOG(LS_INFO) << "Scaled " << scale_counter_[i]
LOG(LS_INFO) << "Scaled " << GetScaleCounters()[i]
<< " times for reason: " << (i ? "cpu" : "quality");
}
}
void ViEEncoder::AdaptUp(AdaptReason reason) {
RTC_DCHECK_RUN_ON(&encoder_queue_);
if (scale_counter_[reason] == 0 ||
degradation_preference_ != DegradationPreference::kBalanced) {
int scale_counter = GetScaleCounters()[reason];
if (scale_counter == 0)
return;
RTC_DCHECK_GT(scale_counter, 0);
AdaptationRequest adaptation_request = {
last_frame_info_->pixel_count(),
stats_proxy_->GetStats().input_frame_rate,
AdaptationRequest::Mode::kAdaptUp};
bool adapt_up_requested =
last_adaptation_request_ &&
last_adaptation_request_->mode_ == AdaptationRequest::Mode::kAdaptUp;
switch (degradation_preference_) {
case DegradationPreference::kBalanced:
FALLTHROUGH();
case DegradationPreference::kMaintainFramerate:
if (adapt_up_requested &&
adaptation_request.input_pixel_count_ <=
last_adaptation_request_->input_pixel_count_) {
// Don't request higher resolution if the current resolution is not
// higher than the last time we asked for the resolution to be higher.
return;
}
break;
case DegradationPreference::kMaintainResolution:
// TODO(sprang): Don't request higher framerate if we are already at
// max requested fps?
break;
case DegradationPreference::kDegradationDisabled:
return;
}
// Only scale if resolution is higher than last time we requested higher
// resolution.
RTC_DCHECK(static_cast<bool>(last_frame_info_));
int current_pixel_count = last_frame_info_->pixel_count();
if (last_adaptation_request_ &&
last_adaptation_request_->mode_ == AdaptationRequest::Mode::kAdaptUp &&
current_pixel_count <= last_adaptation_request_->input_pixel_count_) {
// Don't request higher resolution if the current resolution is not higher
// than the last time we asked for the resolution to be higher.
return;
}
last_adaptation_request_.emplace(AdaptationRequest{
current_pixel_count, AdaptationRequest::Mode::kAdaptUp});
last_adaptation_request_.emplace(adaptation_request);
switch (reason) {
case kQuality:
stats_proxy_->OnQualityRestrictedResolutionChanged(
scale_counter_[reason] - 1);
stats_proxy_->OnQualityRestrictedResolutionChanged(scale_counter - 1);
break;
case kCpu:
// Update stats accordingly.
stats_proxy_->OnCpuRestrictedResolutionChanged(scale_counter_[reason] >
1);
stats_proxy_->OnCpuRestrictedResolutionChanged(scale_counter > 1);
break;
}
--scale_counter_[reason];
source_proxy_->RequestHigherResolutionThan(current_pixel_count);
LOG(LS_INFO) << "Scaling up resolution.";
// Decrease counter of how many times we have scaled down, for this
// degradation preference mode and reason.
IncrementScaleCounter(reason, -1);
// Get a sum of how many times have scaled down, in total, for this
// degradation preference mode. If it is 0, remove any restraints.
const std::vector<int>& current_scale_counters = GetScaleCounters();
const int scale_sum = std::accumulate(current_scale_counters.begin(),
current_scale_counters.end(), 0);
switch (degradation_preference_) {
case DegradationPreference::kBalanced:
FALLTHROUGH();
case DegradationPreference::kMaintainFramerate:
if (scale_sum == 0) {
LOG(LS_INFO) << "Removing resolution down-scaling setting.";
source_proxy_->RequestHigherResolutionThan(
std::numeric_limits<int>::max());
} else {
source_proxy_->RequestHigherResolutionThan(
adaptation_request.input_pixel_count_);
LOG(LS_INFO) << "Scaling up resolution.";
}
break;
case DegradationPreference::kMaintainResolution:
if (scale_sum == 0) {
LOG(LS_INFO) << "Removing framerate down-scaling setting.";
source_proxy_->RequestHigherFramerateThan(
std::numeric_limits<int>::max());
} else {
source_proxy_->RequestHigherFramerateThan(
adaptation_request.framerate_fps_);
LOG(LS_INFO) << "Scaling up framerate.";
}
break;
case DegradationPreference::kDegradationDisabled:
RTC_NOTREACHED();
}
for (size_t i = 0; i < kScaleReasonSize; ++i) {
LOG(LS_INFO) << "Scaled " << scale_counter_[i]
LOG(LS_INFO) << "Scaled " << current_scale_counters[i]
<< " times for reason: " << (i ? "cpu" : "quality");
}
}
const std::vector<int>& ViEEncoder::GetScaleCounters() {
auto it = scale_counters_.find(degradation_preference_);
if (it == scale_counters_.end()) {
scale_counters_[degradation_preference_].resize(kScaleReasonSize);
return scale_counters_[degradation_preference_];
}
return it->second;
}
void ViEEncoder::IncrementScaleCounter(int reason, int delta) {
// Get the counters and validate. This may also lazily initialize the state.
const std::vector<int>& counter = GetScaleCounters();
if (delta < 0) {
RTC_DCHECK_GE(counter[reason], delta);
}
scale_counters_[degradation_preference_][reason] += delta;
}
} // namespace webrtc

View File

@ -11,6 +11,7 @@
#ifndef WEBRTC_VIDEO_VIE_ENCODER_H_
#define WEBRTC_VIDEO_VIE_ENCODER_H_
#include <map>
#include <memory>
#include <string>
#include <vector>
@ -62,7 +63,9 @@ class ViEEncoder : public rtc::VideoSinkInterface<VideoFrame>,
};
// Downscale resolution at most 2 times for CPU reasons.
static const int kMaxCpuDowngrades = 2;
static const int kMaxCpuResolutionDowngrades = 2;
// Downscale framerate at most 4 times.
static const int kMaxCpuFramerateDowngrades = 4;
ViEEncoder(uint32_t number_of_cores,
SendStatisticsProxy* stats_proxy,
@ -172,6 +175,11 @@ class ViEEncoder : public rtc::VideoSinkInterface<VideoFrame>,
void TraceFrameDropStart();
void TraceFrameDropEnd();
const std::vector<int>& GetScaleCounters()
EXCLUSIVE_LOCKS_REQUIRED(&encoder_queue_);
void IncrementScaleCounter(int reason, int delta)
EXCLUSIVE_LOCKS_REQUIRED(&encoder_queue_);
rtc::Event shutdown_event_;
const uint32_t number_of_cores_;
@ -210,8 +218,11 @@ class ViEEncoder : public rtc::VideoSinkInterface<VideoFrame>,
bool encoder_paused_and_dropped_frame_ ACCESS_ON(&encoder_queue_);
Clock* const clock_;
// Counters used for deciding if the video resolution is currently
// restricted, and if so, why.
std::vector<int> scale_counter_ ACCESS_ON(&encoder_queue_);
// restricted, and if so, why, on a per degradation preference basis.
// TODO(sprang): Replace this with a state holding a relative overuse measure
// instead, that can be translated into suitable down-scale or fps limit.
std::map<const VideoSendStream::DegradationPreference, std::vector<int>>
scale_counters_ ACCESS_ON(&encoder_queue_);
// Set depending on degradation preferences
VideoSendStream::DegradationPreference degradation_preference_
ACCESS_ON(&encoder_queue_);
@ -219,6 +230,8 @@ class ViEEncoder : public rtc::VideoSinkInterface<VideoFrame>,
struct AdaptationRequest {
// The pixel count produced by the source at the time of the adaptation.
int input_pixel_count_;
// Framerate received from the source at the time of the adaptation.
int framerate_fps_;
// Indicates if request was to adapt up or down.
enum class Mode { kAdaptUp, kAdaptDown } mode_;
};

View File

@ -13,6 +13,7 @@
#include <utility>
#include "webrtc/api/video/i420_buffer.h"
#include "webrtc/base/fakeclock.h"
#include "webrtc/base/logging.h"
#include "webrtc/media/base/videoadapter.h"
#include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h"
@ -35,7 +36,9 @@ const int kMinPixelsPerFrame = 320 * 180;
#else
const int kMinPixelsPerFrame = 120 * 90;
#endif
}
const int kMinFramerateFps = 2;
const int64_t kFrameTimeoutMs = 100;
} // namespace
namespace webrtc {
@ -145,16 +148,17 @@ class AdaptingFrameForwarder : public test::FrameForwarder {
int cropped_height = 0;
int out_width = 0;
int out_height = 0;
if (adaption_enabled() &&
adapter_.AdaptFrameResolution(video_frame.width(), video_frame.height(),
video_frame.timestamp_us() * 1000,
&cropped_width, &cropped_height,
&out_width, &out_height)) {
VideoFrame adapted_frame(
new rtc::RefCountedObject<TestBuffer>(nullptr, out_width, out_height),
99, 99, kVideoRotation_0);
adapted_frame.set_ntp_time_ms(video_frame.ntp_time_ms());
test::FrameForwarder::IncomingCapturedFrame(adapted_frame);
if (adaption_enabled()) {
if (adapter_.AdaptFrameResolution(
video_frame.width(), video_frame.height(),
video_frame.timestamp_us() * 1000, &cropped_width,
&cropped_height, &out_width, &out_height)) {
VideoFrame adapted_frame(new rtc::RefCountedObject<TestBuffer>(
nullptr, out_width, out_height),
99, 99, kVideoRotation_0);
adapted_frame.set_ntp_time_ms(video_frame.ntp_time_ms());
test::FrameForwarder::IncomingCapturedFrame(adapted_frame);
}
} else {
test::FrameForwarder::IncomingCapturedFrame(video_frame);
}
@ -163,14 +167,45 @@ class AdaptingFrameForwarder : public test::FrameForwarder {
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override {
rtc::CritScope cs(&crit_);
adapter_.OnResolutionRequest(wants.target_pixel_count,
wants.max_pixel_count);
adapter_.OnResolutionFramerateRequest(wants.target_pixel_count,
wants.max_pixel_count,
wants.max_framerate_fps);
test::FrameForwarder::AddOrUpdateSink(sink, wants);
}
cricket::VideoAdapter adapter_;
bool adaptation_enabled_ GUARDED_BY(crit_);
};
class MockableSendStatisticsProxy : public SendStatisticsProxy {
public:
MockableSendStatisticsProxy(Clock* clock,
const VideoSendStream::Config& config,
VideoEncoderConfig::ContentType content_type)
: SendStatisticsProxy(clock, config, content_type) {}
VideoSendStream::Stats GetStats() override {
rtc::CritScope cs(&lock_);
if (mock_stats_)
return *mock_stats_;
return SendStatisticsProxy::GetStats();
}
void SetMockStats(const VideoSendStream::Stats& stats) {
rtc::CritScope cs(&lock_);
mock_stats_.emplace(stats);
}
void ResetMockStats() {
rtc::CritScope cs(&lock_);
mock_stats_.reset();
}
private:
rtc::CriticalSection lock_;
rtc::Optional<VideoSendStream::Stats> mock_stats_ GUARDED_BY(lock_);
};
} // namespace
class ViEEncoderTest : public ::testing::Test {
@ -182,7 +217,7 @@ class ViEEncoderTest : public ::testing::Test {
codec_width_(320),
codec_height_(240),
fake_encoder_(),
stats_proxy_(new SendStatisticsProxy(
stats_proxy_(new MockableSendStatisticsProxy(
Clock::GetRealTimeClock(),
video_send_config_,
webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo)),
@ -208,8 +243,9 @@ class ViEEncoderTest : public ::testing::Test {
vie_encoder_.reset(new ViEEncoderUnderTest(
stats_proxy_.get(), video_send_config_.encoder_settings));
vie_encoder_->SetSink(&sink_, false /* rotation_applied */);
vie_encoder_->SetSource(&video_source_,
VideoSendStream::DegradationPreference::kBalanced);
vie_encoder_->SetSource(
&video_source_,
VideoSendStream::DegradationPreference::kMaintainFramerate);
vie_encoder_->SetStartBitrate(kTargetBitrateBps);
vie_encoder_->ConfigureEncoder(std::move(video_encoder_config),
kMaxPayloadLength, nack_enabled);
@ -244,6 +280,7 @@ class ViEEncoderTest : public ::testing::Test {
new rtc::RefCountedObject<TestBuffer>(nullptr, width, height), 99, 99,
kVideoRotation_0);
frame.set_ntp_time_ms(ntp_time_ms);
frame.set_timestamp_us(ntp_time_ms * 1000);
return frame;
}
@ -366,9 +403,14 @@ class ViEEncoderTest : public ::testing::Test {
void WaitForEncodedFrame(uint32_t expected_width,
uint32_t expected_height) {
EXPECT_TRUE(encoded_frame_event_.Wait(kDefaultTimeoutMs));
CheckLastFrameSizeMathces(expected_width, expected_height);
}
void CheckLastFrameSizeMathces(uint32_t expected_width,
uint32_t expected_height) {
uint32_t width = 0;
uint32_t height = 0;
EXPECT_TRUE(encoded_frame_event_.Wait(kDefaultTimeoutMs));
{
rtc::CritScope lock(&crit_);
width = last_width_;
@ -380,6 +422,10 @@ class ViEEncoderTest : public ::testing::Test {
void ExpectDroppedFrame() { EXPECT_FALSE(encoded_frame_event_.Wait(100)); }
bool WaitForFrame(int64_t timeout_ms) {
return encoded_frame_event_.Wait(timeout_ms);
}
void SetExpectNoFrames() {
rtc::CritScope lock(&crit_);
expect_frames_ = false;
@ -432,7 +478,7 @@ class ViEEncoderTest : public ::testing::Test {
int codec_width_;
int codec_height_;
TestEncoder fake_encoder_;
std::unique_ptr<SendStatisticsProxy> stats_proxy_;
std::unique_ptr<MockableSendStatisticsProxy> stats_proxy_;
TestSink sink_;
AdaptingFrameForwarder video_source_;
std::unique_ptr<ViEEncoderUnderTest> vie_encoder_;
@ -650,8 +696,9 @@ TEST_F(ViEEncoderTest, Vp8ResilienceIsOnFor1S2TlWithNackEnabled) {
TEST_F(ViEEncoderTest, SwitchSourceDeregisterEncoderAsSink) {
EXPECT_TRUE(video_source_.has_sinks());
test::FrameForwarder new_video_source;
vie_encoder_->SetSource(&new_video_source,
VideoSendStream::DegradationPreference::kBalanced);
vie_encoder_->SetSource(
&new_video_source,
VideoSendStream::DegradationPreference::kMaintainFramerate);
EXPECT_FALSE(video_source_.has_sinks());
EXPECT_TRUE(new_video_source.has_sinks());
@ -669,14 +716,15 @@ TEST_F(ViEEncoderTest, SinkWantsFromOveruseDetector) {
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
EXPECT_FALSE(video_source_.sink_wants().target_pixel_count);
EXPECT_FALSE(video_source_.sink_wants().max_pixel_count);
EXPECT_EQ(std::numeric_limits<int>::max(),
video_source_.sink_wants().max_pixel_count);
int frame_width = 1280;
int frame_height = 720;
// Trigger CPU overuse kMaxCpuDowngrades times. Every time, ViEEncoder should
// request lower resolution.
for (int i = 1; i <= ViEEncoder::kMaxCpuDowngrades; ++i) {
for (int i = 1; i <= ViEEncoder::kMaxCpuResolutionDowngrades; ++i) {
video_source_.IncomingCapturedFrame(
CreateFrame(i, frame_width, frame_height));
sink_.WaitForEncodedFrame(i);
@ -684,8 +732,7 @@ TEST_F(ViEEncoderTest, SinkWantsFromOveruseDetector) {
vie_encoder_->TriggerCpuOveruse();
EXPECT_FALSE(video_source_.sink_wants().target_pixel_count);
EXPECT_LT(video_source_.sink_wants().max_pixel_count.value_or(
std::numeric_limits<int>::max()),
EXPECT_LT(video_source_.sink_wants().max_pixel_count,
frame_width * frame_height);
frame_width /= 2;
@ -696,8 +743,8 @@ TEST_F(ViEEncoderTest, SinkWantsFromOveruseDetector) {
// lower resolution.
rtc::VideoSinkWants current_wants = video_source_.sink_wants();
video_source_.IncomingCapturedFrame(CreateFrame(
ViEEncoder::kMaxCpuDowngrades + 1, frame_width, frame_height));
sink_.WaitForEncodedFrame(ViEEncoder::kMaxCpuDowngrades + 1);
ViEEncoder::kMaxCpuResolutionDowngrades + 1, frame_width, frame_height));
sink_.WaitForEncodedFrame(ViEEncoder::kMaxCpuResolutionDowngrades + 1);
vie_encoder_->TriggerCpuOveruse();
EXPECT_EQ(video_source_.sink_wants().target_pixel_count,
current_wants.target_pixel_count);
@ -709,57 +756,120 @@ TEST_F(ViEEncoderTest, SinkWantsFromOveruseDetector) {
EXPECT_EQ(frame_width * frame_height * 5 / 3,
video_source_.sink_wants().target_pixel_count.value_or(0));
EXPECT_EQ(frame_width * frame_height * 4,
video_source_.sink_wants().max_pixel_count.value_or(0));
video_source_.sink_wants().max_pixel_count);
vie_encoder_->Stop();
}
TEST_F(ViEEncoderTest,
ResolutionSinkWantsResetOnSetSourceWithDisabledResolutionScaling) {
TEST_F(ViEEncoderTest, SinkWantsStoredByDegradationPreference) {
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
EXPECT_FALSE(video_source_.sink_wants().target_pixel_count);
EXPECT_FALSE(video_source_.sink_wants().max_pixel_count);
EXPECT_EQ(std::numeric_limits<int>::max(),
video_source_.sink_wants().max_pixel_count);
EXPECT_EQ(std::numeric_limits<int>::max(),
video_source_.sink_wants().max_framerate_fps);
int frame_width = 1280;
int frame_height = 720;
const int kFrameWidth = 1280;
const int kFrameHeight = 720;
const int kFrameIntervalMs = 1000 / 30;
int frame_timestamp = 1;
video_source_.IncomingCapturedFrame(
CreateFrame(1, frame_width, frame_height));
sink_.WaitForEncodedFrame(1);
CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight));
sink_.WaitForEncodedFrame(frame_timestamp);
frame_timestamp += kFrameIntervalMs;
// Trigger CPU overuse.
vie_encoder_->TriggerCpuOveruse();
video_source_.IncomingCapturedFrame(
CreateFrame(2, frame_width, frame_height));
sink_.WaitForEncodedFrame(2);
EXPECT_FALSE(video_source_.sink_wants().target_pixel_count);
EXPECT_LT(video_source_.sink_wants().max_pixel_count.value_or(
std::numeric_limits<int>::max()),
frame_width * frame_height);
CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight));
sink_.WaitForEncodedFrame(frame_timestamp);
frame_timestamp += kFrameIntervalMs;
// Set new source.
// Default degradation preference in maintain-framerate, so will lower max
// wanted resolution.
EXPECT_FALSE(video_source_.sink_wants().target_pixel_count);
EXPECT_LT(video_source_.sink_wants().max_pixel_count,
kFrameWidth * kFrameHeight);
EXPECT_EQ(std::numeric_limits<int>::max(),
video_source_.sink_wants().max_framerate_fps);
// Set new source, switch to maintain-resolution.
test::FrameForwarder new_video_source;
vie_encoder_->SetSource(
&new_video_source,
VideoSendStream::DegradationPreference::kMaintainResolution);
// Initially no degradation registered.
EXPECT_FALSE(new_video_source.sink_wants().target_pixel_count);
EXPECT_FALSE(new_video_source.sink_wants().max_pixel_count);
EXPECT_EQ(std::numeric_limits<int>::max(),
new_video_source.sink_wants().max_pixel_count);
EXPECT_EQ(std::numeric_limits<int>::max(),
new_video_source.sink_wants().max_framerate_fps);
// Force an input frame rate to be available, or the adaptation call won't
// know what framerate to adapt form.
VideoSendStream::Stats stats = stats_proxy_->GetStats();
stats.input_frame_rate = 30;
stats_proxy_->SetMockStats(stats);
vie_encoder_->TriggerCpuOveruse();
new_video_source.IncomingCapturedFrame(
CreateFrame(3, frame_width, frame_height));
sink_.WaitForEncodedFrame(3);
CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight));
sink_.WaitForEncodedFrame(frame_timestamp);
frame_timestamp += kFrameIntervalMs;
// Some framerate constraint should be set.
EXPECT_FALSE(new_video_source.sink_wants().target_pixel_count);
EXPECT_FALSE(new_video_source.sink_wants().max_pixel_count);
EXPECT_EQ(std::numeric_limits<int>::max(),
new_video_source.sink_wants().max_pixel_count);
EXPECT_TRUE(new_video_source.sink_wants().max_framerate_fps);
// Turn of degradation completely.
vie_encoder_->SetSource(
&new_video_source,
VideoSendStream::DegradationPreference::kDegradationDisabled);
// Initially no degradation registered.
EXPECT_FALSE(new_video_source.sink_wants().target_pixel_count);
EXPECT_EQ(std::numeric_limits<int>::max(),
new_video_source.sink_wants().max_pixel_count);
EXPECT_EQ(std::numeric_limits<int>::max(),
new_video_source.sink_wants().max_framerate_fps);
vie_encoder_->TriggerCpuOveruse();
new_video_source.IncomingCapturedFrame(
CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight));
sink_.WaitForEncodedFrame(frame_timestamp);
frame_timestamp += kFrameIntervalMs;
// Still no degradation.
EXPECT_FALSE(new_video_source.sink_wants().target_pixel_count);
EXPECT_EQ(std::numeric_limits<int>::max(),
new_video_source.sink_wants().max_pixel_count);
EXPECT_EQ(std::numeric_limits<int>::max(),
new_video_source.sink_wants().max_framerate_fps);
// Calling SetSource with resolution scaling enabled apply the old SinkWants.
vie_encoder_->SetSource(&new_video_source,
VideoSendStream::DegradationPreference::kBalanced);
EXPECT_LT(new_video_source.sink_wants().max_pixel_count.value_or(
std::numeric_limits<int>::max()),
frame_width * frame_height);
vie_encoder_->SetSource(
&new_video_source,
VideoSendStream::DegradationPreference::kMaintainFramerate);
EXPECT_LT(new_video_source.sink_wants().max_pixel_count,
kFrameWidth * kFrameHeight);
EXPECT_FALSE(new_video_source.sink_wants().target_pixel_count);
EXPECT_EQ(std::numeric_limits<int>::max(),
new_video_source.sink_wants().max_framerate_fps);
// Calling SetSource with framerate scaling enabled apply the old SinkWants.
vie_encoder_->SetSource(
&new_video_source,
VideoSendStream::DegradationPreference::kMaintainResolution);
EXPECT_FALSE(new_video_source.sink_wants().target_pixel_count);
EXPECT_EQ(std::numeric_limits<int>::max(),
new_video_source.sink_wants().max_pixel_count);
EXPECT_TRUE(new_video_source.sink_wants().max_framerate_fps);
vie_encoder_->Stop();
}
@ -824,8 +934,9 @@ TEST_F(ViEEncoderTest, SwitchingSourceKeepsCpuAdaptation) {
// Set new source with adaptation still enabled.
test::FrameForwarder new_video_source;
vie_encoder_->SetSource(&new_video_source,
VideoSendStream::DegradationPreference::kBalanced);
vie_encoder_->SetSource(
&new_video_source,
VideoSendStream::DegradationPreference::kMaintainFramerate);
new_video_source.IncomingCapturedFrame(
CreateFrame(3, frame_width, frame_height));
@ -837,7 +948,7 @@ TEST_F(ViEEncoderTest, SwitchingSourceKeepsCpuAdaptation) {
// Set adaptation disabled.
vie_encoder_->SetSource(
&new_video_source,
VideoSendStream::DegradationPreference::kMaintainResolution);
VideoSendStream::DegradationPreference::kDegradationDisabled);
new_video_source.IncomingCapturedFrame(
CreateFrame(4, frame_width, frame_height));
@ -847,8 +958,9 @@ TEST_F(ViEEncoderTest, SwitchingSourceKeepsCpuAdaptation) {
EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
// Set adaptation back to enabled.
vie_encoder_->SetSource(&new_video_source,
VideoSendStream::DegradationPreference::kBalanced);
vie_encoder_->SetSource(
&new_video_source,
VideoSendStream::DegradationPreference::kMaintainFramerate);
new_video_source.IncomingCapturedFrame(
CreateFrame(5, frame_width, frame_height));
@ -960,8 +1072,9 @@ TEST_F(ViEEncoderTest, QualityAdaptationStatsAreResetWhenScalerIsDisabled) {
// Set source with adaptation still enabled but quality scaler is off.
fake_encoder_.SetQualityScaling(false);
vie_encoder_->SetSource(&video_source_,
VideoSendStream::DegradationPreference::kBalanced);
vie_encoder_->SetSource(
&video_source_,
VideoSendStream::DegradationPreference::kMaintainFramerate);
video_source_.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
sink_.WaitForEncodedFrame(4);
@ -999,8 +1112,9 @@ TEST_F(ViEEncoderTest, StatsTracksAdaptationStatsWhenSwitchingSource) {
// Set new source with adaptation still enabled.
test::FrameForwarder new_video_source;
vie_encoder_->SetSource(&new_video_source,
VideoSendStream::DegradationPreference::kBalanced);
vie_encoder_->SetSource(
&new_video_source,
VideoSendStream::DegradationPreference::kMaintainFramerate);
new_video_source.IncomingCapturedFrame(
CreateFrame(sequence, frame_width, frame_height));
@ -1009,7 +1123,7 @@ TEST_F(ViEEncoderTest, StatsTracksAdaptationStatsWhenSwitchingSource) {
EXPECT_TRUE(stats.cpu_limited_resolution);
EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
// Set adaptation disabled.
// Set cpu adaptation by frame dropping.
vie_encoder_->SetSource(
&new_video_source,
VideoSendStream::DegradationPreference::kMaintainResolution);
@ -1017,18 +1131,58 @@ TEST_F(ViEEncoderTest, StatsTracksAdaptationStatsWhenSwitchingSource) {
CreateFrame(sequence, frame_width, frame_height));
sink_.WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
// Not adapted at first.
EXPECT_FALSE(stats.cpu_limited_resolution);
EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
// Switch back the source with adaptation enabled.
vie_encoder_->SetSource(&video_source_,
VideoSendStream::DegradationPreference::kBalanced);
// Force an input frame rate to be available, or the adaptation call won't
// know what framerate to adapt form.
VideoSendStream::Stats mock_stats = stats_proxy_->GetStats();
mock_stats.input_frame_rate = 30;
stats_proxy_->SetMockStats(mock_stats);
vie_encoder_->TriggerCpuOveruse();
stats_proxy_->ResetMockStats();
new_video_source.IncomingCapturedFrame(
CreateFrame(sequence, frame_width, frame_height));
sink_.WaitForEncodedFrame(sequence++);
// Framerate now adapted.
stats = stats_proxy_->GetStats();
EXPECT_TRUE(stats.cpu_limited_resolution);
EXPECT_EQ(2, stats.number_of_cpu_adapt_changes);
// Disable CPU adaptation.
vie_encoder_->SetSource(
&new_video_source,
VideoSendStream::DegradationPreference::kDegradationDisabled);
new_video_source.IncomingCapturedFrame(
CreateFrame(sequence, frame_width, frame_height));
sink_.WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.cpu_limited_resolution);
EXPECT_EQ(2, stats.number_of_cpu_adapt_changes);
// Try to trigger overuse. Should not succeed.
stats_proxy_->SetMockStats(mock_stats);
vie_encoder_->TriggerCpuOveruse();
stats_proxy_->ResetMockStats();
stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.cpu_limited_resolution);
EXPECT_EQ(2, stats.number_of_cpu_adapt_changes);
// Switch back the source with resolution adaptation enabled.
vie_encoder_->SetSource(
&video_source_,
VideoSendStream::DegradationPreference::kMaintainFramerate);
video_source_.IncomingCapturedFrame(
CreateFrame(sequence, frame_width, frame_height));
sink_.WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
EXPECT_TRUE(stats.cpu_limited_resolution);
EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
EXPECT_EQ(2, stats.number_of_cpu_adapt_changes);
// Trigger CPU normal usage.
vie_encoder_->TriggerCpuNormalUsage();
@ -1037,7 +1191,28 @@ TEST_F(ViEEncoderTest, StatsTracksAdaptationStatsWhenSwitchingSource) {
sink_.WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.cpu_limited_resolution);
EXPECT_EQ(2, stats.number_of_cpu_adapt_changes);
EXPECT_EQ(3, stats.number_of_cpu_adapt_changes);
// Back to the source with adaptation off, set it back to maintain-resolution.
vie_encoder_->SetSource(
&new_video_source,
VideoSendStream::DegradationPreference::kMaintainResolution);
new_video_source.IncomingCapturedFrame(
CreateFrame(sequence, frame_width, frame_height));
sink_.WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
// Disabled, since we previously switched the source too disabled.
EXPECT_FALSE(stats.cpu_limited_resolution);
EXPECT_EQ(3, stats.number_of_cpu_adapt_changes);
// Trigger CPU normal usage.
vie_encoder_->TriggerCpuNormalUsage();
new_video_source.IncomingCapturedFrame(
CreateFrame(sequence, frame_width, frame_height));
sink_.WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.cpu_limited_resolution);
EXPECT_EQ(4, stats.number_of_cpu_adapt_changes);
vie_encoder_->Stop();
}
@ -1062,7 +1237,8 @@ TEST_F(ViEEncoderTest, ScalingUpAndDownDoesNothingWithMaintainResolution) {
// Expect no scaling to begin with
EXPECT_FALSE(video_source_.sink_wants().target_pixel_count);
EXPECT_FALSE(video_source_.sink_wants().max_pixel_count);
EXPECT_EQ(std::numeric_limits<int>::max(),
video_source_.sink_wants().max_pixel_count);
video_source_.IncomingCapturedFrame(
CreateFrame(1, frame_width, frame_height));
@ -1077,7 +1253,7 @@ TEST_F(ViEEncoderTest, ScalingUpAndDownDoesNothingWithMaintainResolution) {
// Expect a scale down.
EXPECT_TRUE(video_source_.sink_wants().max_pixel_count);
EXPECT_LT(*video_source_.sink_wants().max_pixel_count,
EXPECT_LT(video_source_.sink_wants().max_pixel_count,
frame_width * frame_height);
// Set adaptation disabled.
@ -1093,7 +1269,8 @@ TEST_F(ViEEncoderTest, ScalingUpAndDownDoesNothingWithMaintainResolution) {
sink_.WaitForEncodedFrame(3);
// Expect no scaling
EXPECT_FALSE(new_video_source.sink_wants().max_pixel_count);
EXPECT_EQ(std::numeric_limits<int>::max(),
new_video_source.sink_wants().max_pixel_count);
// Trigger scale up
vie_encoder_->TriggerQualityHigh();
@ -1102,7 +1279,8 @@ TEST_F(ViEEncoderTest, ScalingUpAndDownDoesNothingWithMaintainResolution) {
sink_.WaitForEncodedFrame(4);
// Expect nothing to change, still no scaling
EXPECT_FALSE(new_video_source.sink_wants().max_pixel_count);
EXPECT_EQ(std::numeric_limits<int>::max(),
new_video_source.sink_wants().max_pixel_count);
vie_encoder_->Stop();
}
@ -1118,7 +1296,7 @@ TEST_F(ViEEncoderTest, DoesNotScaleBelowSetLimit) {
sink_.WaitForEncodedFrame(i);
// Trigger scale down
vie_encoder_->TriggerQualityLow();
EXPECT_GE(*video_source_.sink_wants().max_pixel_count, kMinPixelsPerFrame);
EXPECT_GE(video_source_.sink_wants().max_pixel_count, kMinPixelsPerFrame);
}
vie_encoder_->Stop();
@ -1211,10 +1389,9 @@ TEST_F(ViEEncoderTest, DropsFramesAndScalesWhenBitrateIsTooLow) {
sink_.ExpectDroppedFrame();
// Expect the sink_wants to specify a scaled frame.
EXPECT_TRUE(video_source_.sink_wants().max_pixel_count);
EXPECT_LT(*video_source_.sink_wants().max_pixel_count, 1000 * 1000);
EXPECT_LT(video_source_.sink_wants().max_pixel_count, 1000 * 1000);
int last_pixel_count = *video_source_.sink_wants().max_pixel_count;
int last_pixel_count = video_source_.sink_wants().max_pixel_count;
// Next frame is scaled
video_source_.IncomingCapturedFrame(
@ -1223,7 +1400,7 @@ TEST_F(ViEEncoderTest, DropsFramesAndScalesWhenBitrateIsTooLow) {
// Expect to drop this frame, the wait should time out.
sink_.ExpectDroppedFrame();
EXPECT_LT(*video_source_.sink_wants().max_pixel_count, last_pixel_count);
EXPECT_LT(video_source_.sink_wants().max_pixel_count, last_pixel_count);
vie_encoder_->Stop();
}
@ -1247,8 +1424,7 @@ TEST_F(ViEEncoderTest, NrOfDroppedFramesLimited) {
sink_.WaitForEncodedFrame(i);
// Expect the sink_wants to specify a scaled frame.
EXPECT_TRUE(video_source_.sink_wants().max_pixel_count);
EXPECT_LT(*video_source_.sink_wants().max_pixel_count, 1000 * 1000);
EXPECT_LT(video_source_.sink_wants().max_pixel_count, 1000 * 1000);
vie_encoder_->Stop();
}
@ -1309,7 +1485,7 @@ TEST_F(ViEEncoderTest, AdaptsResolutionOnOveruse) {
CreateFrame(2, kFrameWidth, kFrameHeight));
sink_.WaitForEncodedFrame((kFrameWidth * 3) / 4, (kFrameHeight * 3) / 4);
// Trigger CPU normal use, return to original resoluton;
// Trigger CPU normal use, return to original resolution;
vie_encoder_->TriggerCpuNormalUsage();
video_source_.IncomingCapturedFrame(
CreateFrame(3, kFrameWidth, kFrameHeight));
@ -1329,4 +1505,158 @@ TEST_F(ViEEncoderTest, FailingInitEncodeDoesntCauseCrash) {
sink_.ExpectDroppedFrame();
vie_encoder_->Stop();
}
TEST_F(ViEEncoderTest, AdaptsFrameOnOveruseWithMaintainResolution) {
const int kDefaultFramerateFps = 30;
const int kFrameIntervalMs = rtc::kNumMillisecsPerSec / kDefaultFramerateFps;
const int kFrameWidth = 1280;
const int kFrameHeight = 720;
rtc::ScopedFakeClock fake_clock;
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
vie_encoder_->SetSource(
&video_source_,
VideoSendStream::DegradationPreference::kMaintainResolution);
video_source_.set_adaptation_enabled(true);
fake_clock.SetTimeMicros(kFrameIntervalMs * 1000);
int64_t timestamp_ms = kFrameIntervalMs;
video_source_.IncomingCapturedFrame(
CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
sink_.WaitForEncodedFrame(timestamp_ms);
// Try to trigger overuse. No fps estimate available => no effect.
vie_encoder_->TriggerCpuOveruse();
// Insert frames for one second to get a stable estimate.
for (int i = 0; i < kDefaultFramerateFps; ++i) {
timestamp_ms += kFrameIntervalMs;
fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
video_source_.IncomingCapturedFrame(
CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
sink_.WaitForEncodedFrame(timestamp_ms);
}
// Trigger CPU overuse, reduce framerate by 2/3.
vie_encoder_->TriggerCpuOveruse();
int num_frames_dropped = 0;
for (int i = 0; i < kDefaultFramerateFps; ++i) {
timestamp_ms += kFrameIntervalMs;
fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
video_source_.IncomingCapturedFrame(
CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
if (!sink_.WaitForFrame(kFrameTimeoutMs)) {
++num_frames_dropped;
} else {
sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight);
}
}
// TODO(sprang): Find where there's rounding errors or stuff causing the
// margin here to be a little larger than we'd like (input fps estimate is
// off) and the frame dropping is a little too aggressive.
const int kErrorMargin = 5;
EXPECT_NEAR(num_frames_dropped,
kDefaultFramerateFps - (kDefaultFramerateFps * 2 / 3),
kErrorMargin);
// Trigger CPU overuse, reduce framerate by 2/3 again.
vie_encoder_->TriggerCpuOveruse();
num_frames_dropped = 0;
for (int i = 0; i < kDefaultFramerateFps; ++i) {
timestamp_ms += kFrameIntervalMs;
fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
video_source_.IncomingCapturedFrame(
CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
if (!sink_.WaitForFrame(kFrameTimeoutMs)) {
++num_frames_dropped;
} else {
sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight);
}
}
EXPECT_NEAR(num_frames_dropped,
kDefaultFramerateFps - (kDefaultFramerateFps * 4 / 9),
kErrorMargin);
// Go back up one step.
vie_encoder_->TriggerCpuNormalUsage();
num_frames_dropped = 0;
for (int i = 0; i < kDefaultFramerateFps; ++i) {
timestamp_ms += kFrameIntervalMs;
fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
video_source_.IncomingCapturedFrame(
CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
if (!sink_.WaitForFrame(kFrameTimeoutMs)) {
++num_frames_dropped;
} else {
sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight);
}
}
EXPECT_NEAR(num_frames_dropped,
kDefaultFramerateFps - (kDefaultFramerateFps * 2 / 3),
kErrorMargin);
// Go back up to original mode.
vie_encoder_->TriggerCpuNormalUsage();
num_frames_dropped = 0;
for (int i = 0; i < kDefaultFramerateFps; ++i) {
timestamp_ms += kFrameIntervalMs;
fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
video_source_.IncomingCapturedFrame(
CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
if (!sink_.WaitForFrame(kFrameTimeoutMs)) {
++num_frames_dropped;
} else {
sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight);
}
}
EXPECT_NEAR(num_frames_dropped, 0, kErrorMargin);
vie_encoder_->Stop();
}
TEST_F(ViEEncoderTest, DoesntAdaptDownPastMinFramerate) {
const int kFramerateFps = 5;
const int kFrameIntervalMs = rtc::kNumMillisecsPerSec / kFramerateFps;
const int kMinFpsFrameInterval = rtc::kNumMillisecsPerSec / kMinFramerateFps;
const int kFrameWidth = 1280;
const int kFrameHeight = 720;
rtc::ScopedFakeClock fake_clock;
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
vie_encoder_->SetSource(
&video_source_,
VideoSendStream::DegradationPreference::kMaintainResolution);
video_source_.set_adaptation_enabled(true);
fake_clock.SetTimeMicros(kFrameIntervalMs * 1000);
int64_t timestamp_ms = kFrameIntervalMs;
// Trigger overuse as much as we can.
for (int i = 0; i < ViEEncoder::kMaxCpuResolutionDowngrades; ++i) {
// Insert frames to get a new fps estimate...
for (int j = 0; j < kFramerateFps; ++j) {
video_source_.IncomingCapturedFrame(
CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
timestamp_ms += kFrameIntervalMs;
fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000);
}
// ...and then try to adapt again.
vie_encoder_->TriggerCpuOveruse();
}
// Drain any frame in the pipeline.
sink_.WaitForFrame(kDefaultTimeoutMs);
// Insert frames at min fps, all should go through.
for (int i = 0; i < 10; ++i) {
timestamp_ms += kMinFpsFrameInterval;
fake_clock.AdvanceTimeMicros(kMinFpsFrameInterval * 1000);
video_source_.IncomingCapturedFrame(
CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
sink_.WaitForEncodedFrame(timestamp_ms);
}
vie_encoder_->Stop();
}
} // namespace webrtc

View File

@ -214,12 +214,21 @@ class VideoSendStream {
// Based on the spec in
// https://w3c.github.io/webrtc-pc/#idl-def-rtcdegradationpreference.
// These options are enforced on a best-effort basis. For instance, all of
// these options may suffer some frame drops in order to avoid queuing.
// TODO(sprang): Look into possibility of more strictly enforcing the
// maintain-framerate option.
enum class DegradationPreference {
// Don't take any actions based on over-utilization signals.
kDegradationDisabled,
// On over-use, request lost resolution, possibly causing down-scaling.
kMaintainResolution,
// TODO(perkj): Implement kMaintainFrameRate. kBalanced will drop frames
// if the encoder overshoots or the encoder can not encode fast enough.
// On over-use, request lower frame rate, possible causing frame drops.
kMaintainFramerate,
// Try to strike a "pleasing" balance between frame rate or resolution.
kBalanced,
};
virtual void SetSource(
rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
const DegradationPreference& degradation_preference) = 0;