Reland "Add initial support for RtpEncodingParameters max_framerate."
This reverts commit 948b7e37557af68b3bc9b81b29ae2daffb2784ad. Reason for revert: downstream project fixed. Original change's description: > Revert "Add initial support for RtpEncodingParameters max_framerate." > > This reverts commit ced5cfdb35a20c684df927eab37e16d35979555f. > > Reason for revert: Breaks downstream project. > > Original change's description: > > Add initial support for RtpEncodingParameters max_framerate. > > > > Add support to set the framerate to the maximum of |max_framerate|. > > Different framerates are currently not supported per stream for video. > > > > Bug: webrtc:9597 > > Change-Id: Ie326617b66bd97be387f809a7f82b97b8f3ff5fe > > Reviewed-on: https://webrtc-review.googlesource.com/92392 > > Reviewed-by: Sebastian Jansson <srte@webrtc.org> > > Reviewed-by: Erik Språng <sprang@webrtc.org> > > Reviewed-by: Magnus Jedvert <magjed@webrtc.org> > > Reviewed-by: Steve Anton <steveanton@webrtc.org> > > Commit-Queue: Åsa Persson <asapersson@webrtc.org> > > Cr-Commit-Position: refs/heads/master@{#24270} > > TBR=steveanton@webrtc.org,magjed@webrtc.org,asapersson@webrtc.org,sprang@webrtc.org,srte@webrtc.org > > Change-Id: I508fe48e0c53996654f657357913ac307dc256bd > No-Presubmit: true > No-Tree-Checks: true > No-Try: true > Bug: webrtc:9597 > Reviewed-on: https://webrtc-review.googlesource.com/94060 > Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org> > Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org> > Cr-Commit-Position: refs/heads/master@{#24277} TBR=steveanton@webrtc.org,mbonadei@webrtc.org,magjed@webrtc.org,asapersson@webrtc.org,sprang@webrtc.org,srte@webrtc.org Bug: webrtc:9597 Change-Id: Ieed9d62787f3e9dcb439399bfe7529012292381e Reviewed-on: https://webrtc-review.googlesource.com/100080 Reviewed-by: Åsa Persson <asapersson@webrtc.org> Cr-Commit-Position: refs/heads/master@{#24720}
This commit is contained in:
parent
1417ae8662
commit
8c1bf9595a
@ -421,7 +421,10 @@ struct RtpEncodingParameters {
|
||||
// TODO(asapersson): Not implemented for ORTC API.
|
||||
absl::optional<int> min_bitrate_bps;
|
||||
|
||||
// TODO(deadbeef): Not implemented.
|
||||
// Specifies the maximum framerate in fps for video.
|
||||
// TODO(asapersson): Different framerates are not supported per stream.
|
||||
// If set, the maximum |max_framerate| is currently used.
|
||||
// Not supported for screencast.
|
||||
absl::optional<int> max_framerate;
|
||||
|
||||
// For video, scale the resolution down by this factor.
|
||||
@ -455,6 +458,7 @@ struct RtpEncodingParameters {
|
||||
fec == o.fec && rtx == o.rtx && dtx == o.dtx &&
|
||||
bitrate_priority == o.bitrate_priority && ptime == o.ptime &&
|
||||
max_bitrate_bps == o.max_bitrate_bps &&
|
||||
min_bitrate_bps == o.min_bitrate_bps &&
|
||||
max_framerate == o.max_framerate &&
|
||||
scale_resolution_down_by == o.scale_resolution_down_by &&
|
||||
scale_framerate_down_by == o.scale_framerate_down_by &&
|
||||
|
||||
@ -510,7 +510,7 @@ TEST_F(CallPerfTest, ReceivesCpuOveruseAndUnderuse) {
|
||||
class LoadObserver : public test::SendTest,
|
||||
public test::FrameGeneratorCapturer::SinkWantsObserver {
|
||||
public:
|
||||
LoadObserver() : SendTest(kLongTimeoutMs), test_phase_(TestPhase::kStart) {}
|
||||
LoadObserver() : SendTest(kLongTimeoutMs), test_phase_(TestPhase::kInit) {}
|
||||
|
||||
void OnFrameGeneratorCapturerCreated(
|
||||
test::FrameGeneratorCapturer* frame_generator_capturer) override {
|
||||
@ -524,9 +524,21 @@ TEST_F(CallPerfTest, ReceivesCpuOveruseAndUnderuse) {
|
||||
// TODO(sprang): Add integration test for maintain-framerate mode?
|
||||
void OnSinkWantsChanged(rtc::VideoSinkInterface<VideoFrame>* sink,
|
||||
const rtc::VideoSinkWants& wants) override {
|
||||
// First expect CPU overuse. Then expect CPU underuse when the encoder
|
||||
// At kStart expect CPU overuse. Then expect CPU underuse when the encoder
|
||||
// delay has been decreased.
|
||||
switch (test_phase_) {
|
||||
case TestPhase::kInit:
|
||||
// Max framerate should be set initially.
|
||||
if (wants.max_framerate_fps != std::numeric_limits<int>::max() &&
|
||||
wants.max_pixel_count == std::numeric_limits<int>::max()) {
|
||||
test_phase_ = TestPhase::kStart;
|
||||
} else {
|
||||
ADD_FAILURE() << "Got unexpected adaptation request, max res = "
|
||||
<< wants.max_pixel_count << ", target res = "
|
||||
<< wants.target_pixel_count.value_or(-1)
|
||||
<< ", max fps = " << wants.max_framerate_fps;
|
||||
}
|
||||
break;
|
||||
case TestPhase::kStart:
|
||||
if (wants.max_pixel_count < std::numeric_limits<int>::max()) {
|
||||
// On adapting down, VideoStreamEncoder::VideoSourceProxy will set
|
||||
@ -570,7 +582,12 @@ TEST_F(CallPerfTest, ReceivesCpuOveruseAndUnderuse) {
|
||||
EXPECT_TRUE(Wait()) << "Timed out before receiving an overuse callback.";
|
||||
}
|
||||
|
||||
enum class TestPhase { kStart, kAdaptedDown, kAdaptedUp } test_phase_;
|
||||
enum class TestPhase {
|
||||
kInit,
|
||||
kStart,
|
||||
kAdaptedDown,
|
||||
kAdaptedUp
|
||||
} test_phase_;
|
||||
} test;
|
||||
|
||||
RunBaseTest(&test);
|
||||
|
||||
@ -12,6 +12,7 @@
|
||||
#include <algorithm>
|
||||
#include <string>
|
||||
|
||||
#include "media/base/mediaconstants.h"
|
||||
#include "media/base/streamparams.h"
|
||||
#include "media/engine/constants.h"
|
||||
#include "media/engine/simulcast.h"
|
||||
@ -199,17 +200,16 @@ std::vector<webrtc::VideoStream> GetSimulcastConfig(
|
||||
int /*max_bitrate_bps*/,
|
||||
double bitrate_priority,
|
||||
int max_qp,
|
||||
int max_framerate,
|
||||
int /*max_framerate*/,
|
||||
bool is_screenshare,
|
||||
bool temporal_layers_supported) {
|
||||
if (is_screenshare) {
|
||||
return GetScreenshareLayers(
|
||||
max_layers, width, height, bitrate_priority, max_qp, max_framerate,
|
||||
ScreenshareSimulcastFieldTrialEnabled(), temporal_layers_supported);
|
||||
return GetScreenshareLayers(max_layers, width, height, bitrate_priority,
|
||||
max_qp, ScreenshareSimulcastFieldTrialEnabled(),
|
||||
temporal_layers_supported);
|
||||
} else {
|
||||
return GetNormalSimulcastLayers(max_layers, width, height, bitrate_priority,
|
||||
max_qp, max_framerate,
|
||||
temporal_layers_supported);
|
||||
max_qp, temporal_layers_supported);
|
||||
}
|
||||
}
|
||||
|
||||
@ -219,7 +219,6 @@ std::vector<webrtc::VideoStream> GetNormalSimulcastLayers(
|
||||
int height,
|
||||
double bitrate_priority,
|
||||
int max_qp,
|
||||
int max_framerate,
|
||||
bool temporal_layers_supported) {
|
||||
// TODO(bugs.webrtc.org/8785): Currently if the resolution isn't large enough
|
||||
// (defined in kSimulcastFormats) we scale down the number of simulcast
|
||||
@ -279,7 +278,7 @@ std::vector<webrtc::VideoStream> GetNormalSimulcastLayers(
|
||||
static_cast<int>(layers[s].target_bitrate_bps * rate_factor);
|
||||
}
|
||||
layers[s].min_bitrate_bps = FindSimulcastMinBitrateBps(width, height);
|
||||
layers[s].max_framerate = max_framerate;
|
||||
layers[s].max_framerate = kDefaultVideoMaxFramerate;
|
||||
|
||||
width /= 2;
|
||||
height /= 2;
|
||||
@ -303,7 +302,6 @@ std::vector<webrtc::VideoStream> GetScreenshareLayers(
|
||||
int height,
|
||||
double bitrate_priority,
|
||||
int max_qp,
|
||||
int max_framerate,
|
||||
bool screenshare_simulcast_enabled,
|
||||
bool temporal_layers_supported) {
|
||||
auto max_screenshare_layers =
|
||||
@ -358,7 +356,7 @@ std::vector<webrtc::VideoStream> GetScreenshareLayers(
|
||||
layers[1].width = width;
|
||||
layers[1].height = height;
|
||||
layers[1].max_qp = max_qp;
|
||||
layers[1].max_framerate = max_framerate;
|
||||
layers[1].max_framerate = kDefaultVideoMaxFramerate;
|
||||
layers[1].num_temporal_layers =
|
||||
temporal_layers_supported ? DefaultNumberOfTemporalLayers(1, true) : 0;
|
||||
layers[1].min_bitrate_bps =
|
||||
|
||||
@ -26,7 +26,7 @@ void BoostMaxSimulcastLayer(int max_bitrate_bps,
|
||||
std::vector<webrtc::VideoStream>* layers);
|
||||
|
||||
// Gets simulcast settings.
|
||||
// TODO(asapersson): Remove max_bitrate_bps.
|
||||
// TODO(asapersson): Remove max_bitrate_bps and max_framerate.
|
||||
std::vector<webrtc::VideoStream> GetSimulcastConfig(
|
||||
size_t max_layers,
|
||||
int width,
|
||||
@ -34,7 +34,7 @@ std::vector<webrtc::VideoStream> GetSimulcastConfig(
|
||||
int /*max_bitrate_bps*/,
|
||||
double bitrate_priority,
|
||||
int max_qp,
|
||||
int max_framerate,
|
||||
int /*max_framerate*/,
|
||||
bool is_screenshare,
|
||||
bool temporal_layers_supported = true);
|
||||
|
||||
@ -45,7 +45,6 @@ std::vector<webrtc::VideoStream> GetNormalSimulcastLayers(
|
||||
int height,
|
||||
double bitrate_priority,
|
||||
int max_qp,
|
||||
int max_framerate,
|
||||
bool temporal_layers_supported = true);
|
||||
|
||||
// Gets simulcast config layers for screenshare settings.
|
||||
@ -55,7 +54,6 @@ std::vector<webrtc::VideoStream> GetScreenshareLayers(
|
||||
int height,
|
||||
double bitrate_priority,
|
||||
int max_qp,
|
||||
int max_framerate,
|
||||
bool screenshare_simulcast_enabled,
|
||||
bool temporal_layers_supported = true);
|
||||
|
||||
|
||||
@ -10,6 +10,7 @@
|
||||
|
||||
#include "media/engine/simulcast.h"
|
||||
|
||||
#include "media/base/mediaconstants.h"
|
||||
#include "media/engine/constants.h"
|
||||
#include "test/field_trial.h"
|
||||
#include "test/gtest.h"
|
||||
@ -94,7 +95,7 @@ TEST(SimulcastTest, GetConfig) {
|
||||
|
||||
for (size_t i = 0; i < streams.size(); ++i) {
|
||||
EXPECT_EQ(size_t{kDefaultTemporalLayers}, streams[i].num_temporal_layers);
|
||||
EXPECT_EQ(kMaxFps, streams[i].max_framerate);
|
||||
EXPECT_EQ(cricket::kDefaultVideoMaxFramerate, streams[i].max_framerate);
|
||||
EXPECT_EQ(kQpMax, streams[i].max_qp);
|
||||
EXPECT_EQ(kExpected[i].min_bitrate_bps, streams[i].min_bitrate_bps);
|
||||
EXPECT_EQ(kExpected[i].target_bitrate_bps, streams[i].target_bitrate_bps);
|
||||
|
||||
@ -168,6 +168,18 @@ std::vector<VideoCodec> AssignPayloadTypesAndDefaultCodecs(
|
||||
: std::vector<VideoCodec>();
|
||||
}
|
||||
|
||||
int GetMaxFramerate(const webrtc::VideoEncoderConfig& encoder_config,
|
||||
size_t num_layers) {
|
||||
int max_fps = -1;
|
||||
for (size_t i = 0; i < num_layers; ++i) {
|
||||
int fps = (encoder_config.simulcast_layers[i].max_framerate > 0)
|
||||
? encoder_config.simulcast_layers[i].max_framerate
|
||||
: kDefaultVideoMaxFramerate;
|
||||
max_fps = std::max(fps, max_fps);
|
||||
}
|
||||
return max_fps;
|
||||
}
|
||||
|
||||
static std::string CodecVectorToString(const std::vector<VideoCodec>& codecs) {
|
||||
rtc::StringBuilder out;
|
||||
out << "{";
|
||||
@ -1735,13 +1747,16 @@ webrtc::RTCError WebRtcVideoChannel::WebRtcVideoSendStream::SetRtpParameters(
|
||||
return error;
|
||||
}
|
||||
|
||||
bool new_bitrate = false;
|
||||
bool new_param = false;
|
||||
for (size_t i = 0; i < rtp_parameters_.encodings.size(); ++i) {
|
||||
if ((new_parameters.encodings[i].min_bitrate_bps !=
|
||||
rtp_parameters_.encodings[i].min_bitrate_bps) ||
|
||||
(new_parameters.encodings[i].max_bitrate_bps !=
|
||||
rtp_parameters_.encodings[i].max_bitrate_bps)) {
|
||||
new_bitrate = true;
|
||||
rtp_parameters_.encodings[i].max_bitrate_bps) ||
|
||||
(new_parameters.encodings[i].max_framerate !=
|
||||
rtp_parameters_.encodings[i].max_framerate)) {
|
||||
new_param = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@ -1755,7 +1770,7 @@ webrtc::RTCError WebRtcVideoChannel::WebRtcVideoSendStream::SetRtpParameters(
|
||||
// entire encoder reconfiguration, it just needs to update the bitrate
|
||||
// allocator.
|
||||
bool reconfigure_encoder =
|
||||
new_bitrate || (new_parameters.encodings[0].bitrate_priority !=
|
||||
new_param || (new_parameters.encodings[0].bitrate_priority !=
|
||||
rtp_parameters_.encodings[0].bitrate_priority);
|
||||
|
||||
// TODO(bugs.webrtc.org/8807): The active field as well should not require
|
||||
@ -1919,7 +1934,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig(
|
||||
|
||||
// Application-controlled state is held in the encoder_config's
|
||||
// simulcast_layers. Currently this is used to control which simulcast layers
|
||||
// are active and for configuring the min/max bitrate.
|
||||
// are active and for configuring the min/max bitrate and max framerate.
|
||||
// The encoder_config's simulcast_layers is also used for non-simulcast (when
|
||||
// there is a single layer).
|
||||
RTC_DCHECK_GE(rtp_parameters_.encodings.size(),
|
||||
@ -1937,14 +1952,17 @@ WebRtcVideoChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig(
|
||||
encoder_config.simulcast_layers[i].max_bitrate_bps =
|
||||
*rtp_parameters_.encodings[i].max_bitrate_bps;
|
||||
}
|
||||
if (rtp_parameters_.encodings[i].max_framerate) {
|
||||
encoder_config.simulcast_layers[i].max_framerate =
|
||||
*rtp_parameters_.encodings[i].max_framerate;
|
||||
}
|
||||
}
|
||||
|
||||
int max_qp = kDefaultQpMax;
|
||||
codec.GetParam(kCodecParamMaxQuantization, &max_qp);
|
||||
encoder_config.video_stream_factory =
|
||||
new rtc::RefCountedObject<EncoderStreamFactory>(
|
||||
codec.name, max_qp, kDefaultVideoMaxFramerate, is_screencast,
|
||||
parameters_.conference_mode);
|
||||
codec.name, max_qp, is_screencast, parameters_.conference_mode);
|
||||
return encoder_config;
|
||||
}
|
||||
|
||||
@ -2637,19 +2655,17 @@ WebRtcVideoChannel::MapCodecs(const std::vector<VideoCodec>& codecs) {
|
||||
return video_codecs;
|
||||
}
|
||||
|
||||
// TODO(bugs.webrtc.org/8785): Consider removing max_qp and max_framerate
|
||||
// as members of EncoderStreamFactory and instead set these values individually
|
||||
// for each stream in the VideoEncoderConfig.simulcast_layers.
|
||||
// TODO(bugs.webrtc.org/8785): Consider removing max_qp as member of
|
||||
// EncoderStreamFactory and instead set this value individually for each stream
|
||||
// in the VideoEncoderConfig.simulcast_layers.
|
||||
EncoderStreamFactory::EncoderStreamFactory(
|
||||
std::string codec_name,
|
||||
int max_qp,
|
||||
int max_framerate,
|
||||
bool is_screenshare,
|
||||
bool screenshare_config_explicitly_enabled)
|
||||
|
||||
: codec_name_(codec_name),
|
||||
max_qp_(max_qp),
|
||||
max_framerate_(max_framerate),
|
||||
is_screenshare_(is_screenshare),
|
||||
screenshare_config_explicitly_enabled_(
|
||||
screenshare_config_explicitly_enabled) {}
|
||||
@ -2676,12 +2692,18 @@ std::vector<webrtc::VideoStream> EncoderStreamFactory::CreateEncoderStreams(
|
||||
bool temporal_layers_supported = CodecNamesEq(codec_name_, kVp8CodecName);
|
||||
layers = GetSimulcastConfig(encoder_config.number_of_streams, width, height,
|
||||
0 /*not used*/, encoder_config.bitrate_priority,
|
||||
max_qp_, max_framerate_, is_screenshare_,
|
||||
max_qp_, 0 /*not_used*/, is_screenshare_,
|
||||
temporal_layers_supported);
|
||||
// The maximum |max_framerate| is currently used for video.
|
||||
int max_framerate = GetMaxFramerate(encoder_config, layers.size());
|
||||
// Update the active simulcast layers and configured bitrates.
|
||||
bool is_highest_layer_max_bitrate_configured = false;
|
||||
for (size_t i = 0; i < layers.size(); ++i) {
|
||||
layers[i].active = encoder_config.simulcast_layers[i].active;
|
||||
if (!is_screenshare_) {
|
||||
// Update simulcast framerates with max configured max framerate.
|
||||
layers[i].max_framerate = max_framerate;
|
||||
}
|
||||
// Update simulcast bitrates with configured min and max bitrate.
|
||||
if (encoder_config.simulcast_layers[i].min_bitrate_bps > 0) {
|
||||
layers[i].min_bitrate_bps =
|
||||
@ -2738,11 +2760,14 @@ std::vector<webrtc::VideoStream> EncoderStreamFactory::CreateEncoderStreams(
|
||||
if (encoder_config.max_bitrate_bps <= 0)
|
||||
max_bitrate_bps = std::max(min_bitrate_bps, max_bitrate_bps);
|
||||
}
|
||||
int max_framerate = (encoder_config.simulcast_layers[0].max_framerate > 0)
|
||||
? encoder_config.simulcast_layers[0].max_framerate
|
||||
: kDefaultVideoMaxFramerate;
|
||||
|
||||
webrtc::VideoStream layer;
|
||||
layer.width = width;
|
||||
layer.height = height;
|
||||
layer.max_framerate = max_framerate_;
|
||||
layer.max_framerate = max_framerate;
|
||||
|
||||
// In the case that the application sets a max bitrate that's lower than the
|
||||
// min bitrate, we adjust it down (see bugs.webrtc.org/9141).
|
||||
|
||||
@ -504,7 +504,6 @@ class EncoderStreamFactory
|
||||
public:
|
||||
EncoderStreamFactory(std::string codec_name,
|
||||
int max_qp,
|
||||
int max_framerate,
|
||||
bool is_screenshare,
|
||||
bool screenshare_config_explicitly_enabled);
|
||||
|
||||
@ -516,7 +515,6 @@ class EncoderStreamFactory
|
||||
|
||||
const std::string codec_name_;
|
||||
const int max_qp_;
|
||||
const int max_framerate_;
|
||||
const bool is_screenshare_;
|
||||
// Allows a screenshare specific configuration, which enables temporal
|
||||
// layering and allows simulcast.
|
||||
|
||||
@ -5402,6 +5402,32 @@ TEST_F(WebRtcVideoChannelTest,
|
||||
stream->GetVideoStreams()[0].max_bitrate_bps);
|
||||
}
|
||||
|
||||
TEST_F(WebRtcVideoChannelTest, SetMaxFramerateOneStream) {
|
||||
FakeVideoSendStream* stream = AddSendStream();
|
||||
|
||||
webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_);
|
||||
EXPECT_EQ(1UL, parameters.encodings.size());
|
||||
EXPECT_FALSE(parameters.encodings[0].max_framerate.has_value());
|
||||
EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok());
|
||||
|
||||
// Note that this is testing the behavior of the FakeVideoSendStream, which
|
||||
// also calls to CreateEncoderStreams to get the VideoStreams, so essentially
|
||||
// we are just testing the behavior of
|
||||
// EncoderStreamFactory::CreateEncoderStreams.
|
||||
ASSERT_EQ(1UL, stream->GetVideoStreams().size());
|
||||
EXPECT_EQ(kDefaultVideoMaxFramerate,
|
||||
stream->GetVideoStreams()[0].max_framerate);
|
||||
|
||||
// Set max framerate and check that VideoStream.max_framerate is set.
|
||||
const int kNewMaxFramerate = kDefaultVideoMaxFramerate - 1;
|
||||
parameters = channel_->GetRtpSendParameters(last_ssrc_);
|
||||
parameters.encodings[0].max_framerate = kNewMaxFramerate;
|
||||
EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok());
|
||||
|
||||
ASSERT_EQ(1UL, stream->GetVideoStreams().size());
|
||||
EXPECT_EQ(kNewMaxFramerate, stream->GetVideoStreams()[0].max_framerate);
|
||||
}
|
||||
|
||||
TEST_F(WebRtcVideoChannelTest,
|
||||
CannotSetRtpSendParametersWithIncorrectNumberOfEncodings) {
|
||||
AddSendStream();
|
||||
@ -5554,6 +5580,120 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersPrioritySimulcastStreams) {
|
||||
EXPECT_TRUE(channel_->SetVideoSend(primary_ssrc, nullptr, nullptr));
|
||||
}
|
||||
|
||||
TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersMaxFramerate) {
|
||||
const size_t kNumSimulcastStreams = 3;
|
||||
SetUpSimulcast(true, false);
|
||||
|
||||
// Get and set the rtp encoding parameters.
|
||||
webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_);
|
||||
EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size());
|
||||
for (const auto& encoding : parameters.encodings) {
|
||||
EXPECT_FALSE(encoding.max_framerate);
|
||||
}
|
||||
|
||||
// Change the value and set it on the VideoChannel.
|
||||
parameters.encodings[0].max_framerate = 10;
|
||||
parameters.encodings[1].max_framerate = 20;
|
||||
parameters.encodings[2].max_framerate = 25;
|
||||
EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok());
|
||||
|
||||
// Verify that the bitrates are set on the VideoChannel.
|
||||
parameters = channel_->GetRtpSendParameters(last_ssrc_);
|
||||
EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size());
|
||||
EXPECT_EQ(10, parameters.encodings[0].max_framerate);
|
||||
EXPECT_EQ(20, parameters.encodings[1].max_framerate);
|
||||
EXPECT_EQ(25, parameters.encodings[2].max_framerate);
|
||||
}
|
||||
|
||||
TEST_F(WebRtcVideoChannelTest, MaxSimulcastFrameratePropagatedToEncoder) {
|
||||
const size_t kNumSimulcastStreams = 3;
|
||||
FakeVideoSendStream* stream = SetUpSimulcast(true, false);
|
||||
|
||||
// Send a full size frame so all simulcast layers are used when reconfiguring.
|
||||
FakeVideoCapturerWithTaskQueue capturer;
|
||||
VideoOptions options;
|
||||
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &capturer));
|
||||
EXPECT_EQ(cricket::CS_RUNNING,
|
||||
capturer.Start(capturer.GetSupportedFormats()->front()));
|
||||
channel_->SetSend(true);
|
||||
EXPECT_TRUE(capturer.CaptureFrame());
|
||||
|
||||
// Get and set the rtp encoding parameters.
|
||||
// Change the value and set it on the VideoChannel.
|
||||
webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_);
|
||||
EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size());
|
||||
parameters.encodings[0].max_framerate = 15;
|
||||
parameters.encodings[1].max_framerate = 25;
|
||||
parameters.encodings[2].max_framerate = 20;
|
||||
EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok());
|
||||
|
||||
// Verify that the new value propagated down to the encoder.
|
||||
// Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly.
|
||||
EXPECT_EQ(2, stream->num_encoder_reconfigurations());
|
||||
webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy();
|
||||
EXPECT_EQ(kNumSimulcastStreams, encoder_config.number_of_streams);
|
||||
EXPECT_EQ(kNumSimulcastStreams, encoder_config.simulcast_layers.size());
|
||||
EXPECT_EQ(15, encoder_config.simulcast_layers[0].max_framerate);
|
||||
EXPECT_EQ(25, encoder_config.simulcast_layers[1].max_framerate);
|
||||
EXPECT_EQ(20, encoder_config.simulcast_layers[2].max_framerate);
|
||||
|
||||
// FakeVideoSendStream calls CreateEncoderStreams, test that the vector of
|
||||
// VideoStreams are created appropriately for the simulcast case.
|
||||
// Currently the maximum |max_framerate| is used.
|
||||
EXPECT_EQ(kNumSimulcastStreams, stream->GetVideoStreams().size());
|
||||
EXPECT_EQ(25, stream->GetVideoStreams()[0].max_framerate);
|
||||
EXPECT_EQ(25, stream->GetVideoStreams()[1].max_framerate);
|
||||
EXPECT_EQ(25, stream->GetVideoStreams()[2].max_framerate);
|
||||
|
||||
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr));
|
||||
}
|
||||
|
||||
TEST_F(WebRtcVideoChannelTest,
|
||||
DefaultValuePropagatedToEncoderForUnsetFramerate) {
|
||||
const size_t kNumSimulcastStreams = 3;
|
||||
const std::vector<webrtc::VideoStream> kDefault = GetSimulcastBitrates720p();
|
||||
FakeVideoSendStream* stream = SetUpSimulcast(true, false);
|
||||
|
||||
// Send a full size frame so all simulcast layers are used when reconfiguring.
|
||||
FakeVideoCapturerWithTaskQueue capturer;
|
||||
VideoOptions options;
|
||||
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &capturer));
|
||||
EXPECT_EQ(cricket::CS_RUNNING,
|
||||
capturer.Start(capturer.GetSupportedFormats()->front()));
|
||||
channel_->SetSend(true);
|
||||
EXPECT_TRUE(capturer.CaptureFrame());
|
||||
|
||||
// Get and set the rtp encoding parameters.
|
||||
// Change the value and set it on the VideoChannel.
|
||||
webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_);
|
||||
EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size());
|
||||
parameters.encodings[0].max_framerate = 15;
|
||||
parameters.encodings[2].max_framerate = 20;
|
||||
EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters).ok());
|
||||
|
||||
// Verify that the new value propagated down to the encoder.
|
||||
// Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly.
|
||||
webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy();
|
||||
EXPECT_EQ(kNumSimulcastStreams, encoder_config.number_of_streams);
|
||||
EXPECT_EQ(kNumSimulcastStreams, encoder_config.simulcast_layers.size());
|
||||
EXPECT_EQ(15, encoder_config.simulcast_layers[0].max_framerate);
|
||||
EXPECT_EQ(-1, encoder_config.simulcast_layers[1].max_framerate);
|
||||
EXPECT_EQ(20, encoder_config.simulcast_layers[2].max_framerate);
|
||||
|
||||
// FakeVideoSendStream calls CreateEncoderStreams, test that the vector of
|
||||
// VideoStreams are created appropriately for the simulcast case.
|
||||
// The maximum |max_framerate| is used, kDefaultVideoMaxFramerate: 60.
|
||||
EXPECT_EQ(kNumSimulcastStreams, stream->GetVideoStreams().size());
|
||||
EXPECT_EQ(kDefaultVideoMaxFramerate,
|
||||
stream->GetVideoStreams()[0].max_framerate);
|
||||
EXPECT_EQ(kDefaultVideoMaxFramerate,
|
||||
stream->GetVideoStreams()[1].max_framerate);
|
||||
EXPECT_EQ(kDefaultVideoMaxFramerate,
|
||||
stream->GetVideoStreams()[2].max_framerate);
|
||||
|
||||
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr));
|
||||
}
|
||||
|
||||
TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersMinAndMaxBitrate) {
|
||||
const size_t kNumSimulcastStreams = 3;
|
||||
SetUpSimulcast(true, false);
|
||||
|
||||
@ -38,7 +38,6 @@ bool UnimplementedRtpEncodingParameterHasValue(
|
||||
if (encoding_params.codec_payload_type.has_value() ||
|
||||
encoding_params.fec.has_value() || encoding_params.rtx.has_value() ||
|
||||
encoding_params.dtx.has_value() || encoding_params.ptime.has_value() ||
|
||||
encoding_params.max_framerate.has_value() ||
|
||||
!encoding_params.rid.empty() ||
|
||||
encoding_params.scale_resolution_down_by.has_value() ||
|
||||
encoding_params.scale_framerate_down_by.has_value() ||
|
||||
|
||||
@ -691,8 +691,7 @@ TEST_F(RtpSenderReceiverTest,
|
||||
EXPECT_EQ(1u, params.encodings.size());
|
||||
|
||||
// Unimplemented RtpParameters: codec_payload_type, fec, rtx, dtx, ptime,
|
||||
// max_framerate, scale_resolution_down_by, scale_framerate_down_by, rid,
|
||||
// dependency_rids.
|
||||
// scale_resolution_down_by, scale_framerate_down_by, rid, dependency_rids.
|
||||
params.encodings[0].codec_payload_type = 1;
|
||||
EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER,
|
||||
audio_rtp_sender_->SetParameters(params).type());
|
||||
@ -718,11 +717,6 @@ TEST_F(RtpSenderReceiverTest,
|
||||
audio_rtp_sender_->SetParameters(params).type());
|
||||
params = audio_rtp_sender_->GetParameters();
|
||||
|
||||
params.encodings[0].max_framerate = 1;
|
||||
EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER,
|
||||
audio_rtp_sender_->SetParameters(params).type());
|
||||
params = audio_rtp_sender_->GetParameters();
|
||||
|
||||
params.encodings[0].scale_resolution_down_by = 2.0;
|
||||
EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER,
|
||||
audio_rtp_sender_->SetParameters(params).type());
|
||||
@ -878,8 +872,7 @@ TEST_F(RtpSenderReceiverTest,
|
||||
EXPECT_EQ(1u, params.encodings.size());
|
||||
|
||||
// Unimplemented RtpParameters: codec_payload_type, fec, rtx, dtx, ptime,
|
||||
// max_framerate, scale_resolution_down_by, scale_framerate_down_by, rid,
|
||||
// dependency_rids.
|
||||
// scale_resolution_down_by, scale_framerate_down_by, rid, dependency_rids.
|
||||
params.encodings[0].codec_payload_type = 1;
|
||||
EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER,
|
||||
video_rtp_sender_->SetParameters(params).type());
|
||||
@ -905,11 +898,6 @@ TEST_F(RtpSenderReceiverTest,
|
||||
video_rtp_sender_->SetParameters(params).type());
|
||||
params = video_rtp_sender_->GetParameters();
|
||||
|
||||
params.encodings[0].max_framerate = 1;
|
||||
EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER,
|
||||
video_rtp_sender_->SetParameters(params).type());
|
||||
params = video_rtp_sender_->GetParameters();
|
||||
|
||||
params.encodings[0].scale_resolution_down_by = 2.0;
|
||||
EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER,
|
||||
video_rtp_sender_->SetParameters(params).type());
|
||||
|
||||
@ -100,7 +100,7 @@ void QualityScalingTest::RunTest(VideoEncoderFactory* encoder_factory,
|
||||
// Called when FrameGeneratorCapturer::AddOrUpdateSink is called.
|
||||
void OnSinkWantsChanged(rtc::VideoSinkInterface<VideoFrame>* sink,
|
||||
const rtc::VideoSinkWants& wants) override {
|
||||
EXPECT_LT(wants.max_pixel_count, kWidth * kHeight) << "Not a downscale.";
|
||||
if (wants.max_pixel_count < kWidth * kHeight)
|
||||
observation_complete_.Set();
|
||||
}
|
||||
void ModifySenderCallConfig(Call::Config* config) override {
|
||||
|
||||
@ -423,7 +423,6 @@ void VideoQualityTest::FillScalabilitySettings(
|
||||
encoder_config.video_stream_factory =
|
||||
new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
|
||||
params->video[video_idx].codec, kDefaultMaxQp,
|
||||
params->video[video_idx].fps,
|
||||
params->screenshare[video_idx].enabled, true);
|
||||
params->ss[video_idx].streams =
|
||||
encoder_config.video_stream_factory->CreateEncoderStreams(
|
||||
@ -579,7 +578,6 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
|
||||
new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
|
||||
params_.video[video_idx].codec,
|
||||
params_.ss[video_idx].streams[0].max_qp,
|
||||
params_.video[video_idx].fps,
|
||||
params_.screenshare[video_idx].enabled, true);
|
||||
} else {
|
||||
video_encoder_configs_[video_idx].video_stream_factory =
|
||||
@ -733,7 +731,7 @@ void VideoQualityTest::SetupThumbnails(Transport* send_transport,
|
||||
thumbnail_encoder_config.video_stream_factory =
|
||||
new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
|
||||
params_.video[0].codec, params_.ss[0].streams[0].max_qp,
|
||||
params_.video[0].fps, params_.screenshare[0].enabled, true);
|
||||
params_.screenshare[0].enabled, true);
|
||||
}
|
||||
thumbnail_encoder_config.spatial_layers = params_.ss[0].spatial_layers;
|
||||
|
||||
|
||||
@ -38,7 +38,6 @@ namespace {
|
||||
// Time interval for logging frame counts.
|
||||
const int64_t kFrameLogIntervalMs = 60000;
|
||||
const int kMinFramerateFps = 2;
|
||||
const int kMaxFramerateFps = 120;
|
||||
|
||||
// Time to keep a single cached pending frame in paused state.
|
||||
const int64_t kPendingFrameTimeoutMs = 1000;
|
||||
@ -120,7 +119,8 @@ class VideoStreamEncoder::VideoSourceProxy {
|
||||
explicit VideoSourceProxy(VideoStreamEncoder* video_stream_encoder)
|
||||
: video_stream_encoder_(video_stream_encoder),
|
||||
degradation_preference_(DegradationPreference::DISABLED),
|
||||
source_(nullptr) {}
|
||||
source_(nullptr),
|
||||
max_framerate_(std::numeric_limits<int>::max()) {}
|
||||
|
||||
void SetSource(rtc::VideoSourceInterface<VideoFrame>* source,
|
||||
const DegradationPreference& degradation_preference) {
|
||||
@ -147,11 +147,27 @@ class VideoStreamEncoder::VideoSourceProxy {
|
||||
source->AddOrUpdateSink(video_stream_encoder_, wants);
|
||||
}
|
||||
|
||||
void SetMaxFramerate(int max_framerate) {
|
||||
RTC_DCHECK_GT(max_framerate, 0);
|
||||
rtc::CritScope lock(&crit_);
|
||||
if (max_framerate == max_framerate_)
|
||||
return;
|
||||
|
||||
RTC_LOG(LS_INFO) << "Set max framerate: " << max_framerate;
|
||||
max_framerate_ = max_framerate;
|
||||
if (source_) {
|
||||
source_->AddOrUpdateSink(video_stream_encoder_,
|
||||
GetActiveSinkWantsInternal());
|
||||
}
|
||||
}
|
||||
|
||||
void SetWantsRotationApplied(bool rotation_applied) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
sink_wants_.rotation_applied = rotation_applied;
|
||||
if (source_)
|
||||
source_->AddOrUpdateSink(video_stream_encoder_, sink_wants_);
|
||||
if (source_) {
|
||||
source_->AddOrUpdateSink(video_stream_encoder_,
|
||||
GetActiveSinkWantsInternal());
|
||||
}
|
||||
}
|
||||
|
||||
rtc::VideoSinkWants GetActiveSinkWants() {
|
||||
@ -165,7 +181,8 @@ class VideoStreamEncoder::VideoSourceProxy {
|
||||
sink_wants_.target_pixel_count.reset();
|
||||
sink_wants_.max_framerate_fps = std::numeric_limits<int>::max();
|
||||
if (source_)
|
||||
source_->AddOrUpdateSink(video_stream_encoder_, sink_wants_);
|
||||
source_->AddOrUpdateSink(video_stream_encoder_,
|
||||
GetActiveSinkWantsInternal());
|
||||
}
|
||||
|
||||
bool RequestResolutionLowerThan(int pixel_count,
|
||||
@ -308,6 +325,8 @@ class VideoStreamEncoder::VideoSourceProxy {
|
||||
wants.target_pixel_count.reset();
|
||||
wants.max_framerate_fps = std::numeric_limits<int>::max();
|
||||
}
|
||||
// Limit to configured max framerate.
|
||||
wants.max_framerate_fps = std::min(max_framerate_, wants.max_framerate_fps);
|
||||
return wants;
|
||||
}
|
||||
|
||||
@ -317,6 +336,7 @@ class VideoStreamEncoder::VideoSourceProxy {
|
||||
rtc::VideoSinkWants sink_wants_ RTC_GUARDED_BY(&crit_);
|
||||
DegradationPreference degradation_preference_ RTC_GUARDED_BY(&crit_);
|
||||
rtc::VideoSourceInterface<VideoFrame>* source_ RTC_GUARDED_BY(&crit_);
|
||||
int max_framerate_ RTC_GUARDED_BY(&crit_);
|
||||
|
||||
RTC_DISALLOW_COPY_AND_ASSIGN(VideoSourceProxy);
|
||||
};
|
||||
@ -543,7 +563,13 @@ void VideoStreamEncoder::ReconfigureEncoder() {
|
||||
codec.startBitrate = std::min(codec.startBitrate, codec.maxBitrate);
|
||||
codec.expect_encode_from_texture = last_frame_info_->is_texture;
|
||||
max_framerate_ = codec.maxFramerate;
|
||||
RTC_DCHECK_LE(max_framerate_, kMaxFramerateFps);
|
||||
|
||||
// Inform source about max configured framerate.
|
||||
int max_framerate = 0;
|
||||
for (const auto& stream : streams) {
|
||||
max_framerate = std::max(stream.max_framerate, max_framerate);
|
||||
}
|
||||
source_proxy_->SetMaxFramerate(max_framerate);
|
||||
|
||||
// Keep the same encoder, as long as the video_format is unchanged.
|
||||
if (pending_encoder_creation_) {
|
||||
|
||||
@ -32,13 +32,6 @@
|
||||
#include "video/send_statistics_proxy.h"
|
||||
#include "video/video_stream_encoder.h"
|
||||
|
||||
namespace {
|
||||
const int kMinPixelsPerFrame = 320 * 180;
|
||||
const int kMinFramerateFps = 2;
|
||||
const int kMinBalancedFramerateFps = 7;
|
||||
const int64_t kFrameTimeoutMs = 100;
|
||||
} // namespace
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
using ScaleReason = AdaptationObserverInterface::AdaptReason;
|
||||
@ -46,11 +39,16 @@ using ::testing::_;
|
||||
using ::testing::Return;
|
||||
|
||||
namespace {
|
||||
const int kMinPixelsPerFrame = 320 * 180;
|
||||
const int kMinFramerateFps = 2;
|
||||
const int kMinBalancedFramerateFps = 7;
|
||||
const int64_t kFrameTimeoutMs = 100;
|
||||
const size_t kMaxPayloadLength = 1440;
|
||||
const int kTargetBitrateBps = 1000000;
|
||||
const int kLowTargetBitrateBps = kTargetBitrateBps / 10;
|
||||
const int kMaxInitialFramedrop = 4;
|
||||
const int kDefaultFramerate = 30;
|
||||
const int64_t kFrameIntervalMs = rtc::kNumMillisecsPerSec / kDefaultFramerate;
|
||||
|
||||
class TestBuffer : public webrtc::I420Buffer {
|
||||
public:
|
||||
@ -276,7 +274,7 @@ class VideoStreamEncoderTest : public ::testing::Test {
|
||||
: video_send_config_(VideoSendStream::Config(nullptr)),
|
||||
codec_width_(320),
|
||||
codec_height_(240),
|
||||
max_framerate_(30),
|
||||
max_framerate_(kDefaultFramerate),
|
||||
fake_encoder_(),
|
||||
encoder_factory_(&fake_encoder_),
|
||||
stats_proxy_(new MockableSendStatisticsProxy(
|
||||
@ -379,22 +377,28 @@ class VideoStreamEncoderTest : public ::testing::Test {
|
||||
EXPECT_EQ(wants1.max_pixel_count, wants2.max_pixel_count);
|
||||
}
|
||||
|
||||
void VerifyFpsMaxResolutionMax(const rtc::VideoSinkWants& wants) {
|
||||
EXPECT_EQ(kDefaultFramerate, wants.max_framerate_fps);
|
||||
EXPECT_EQ(std::numeric_limits<int>::max(), wants.max_pixel_count);
|
||||
EXPECT_FALSE(wants.target_pixel_count);
|
||||
}
|
||||
|
||||
void VerifyFpsMaxResolutionLt(const rtc::VideoSinkWants& wants1,
|
||||
const rtc::VideoSinkWants& wants2) {
|
||||
EXPECT_EQ(std::numeric_limits<int>::max(), wants1.max_framerate_fps);
|
||||
EXPECT_EQ(kDefaultFramerate, wants1.max_framerate_fps);
|
||||
EXPECT_LT(wants1.max_pixel_count, wants2.max_pixel_count);
|
||||
EXPECT_GT(wants1.max_pixel_count, 0);
|
||||
}
|
||||
|
||||
void VerifyFpsMaxResolutionGt(const rtc::VideoSinkWants& wants1,
|
||||
const rtc::VideoSinkWants& wants2) {
|
||||
EXPECT_EQ(std::numeric_limits<int>::max(), wants1.max_framerate_fps);
|
||||
EXPECT_EQ(kDefaultFramerate, wants1.max_framerate_fps);
|
||||
EXPECT_GT(wants1.max_pixel_count, wants2.max_pixel_count);
|
||||
}
|
||||
|
||||
void VerifyFpsMaxResolutionEq(const rtc::VideoSinkWants& wants1,
|
||||
const rtc::VideoSinkWants& wants2) {
|
||||
EXPECT_EQ(std::numeric_limits<int>::max(), wants1.max_framerate_fps);
|
||||
EXPECT_EQ(kDefaultFramerate, wants1.max_framerate_fps);
|
||||
EXPECT_EQ(wants1.max_pixel_count, wants2.max_pixel_count);
|
||||
}
|
||||
|
||||
@ -425,7 +429,7 @@ class VideoStreamEncoderTest : public ::testing::Test {
|
||||
|
||||
void VerifyFpsMaxResolutionLt(const rtc::VideoSinkWants& wants,
|
||||
int pixel_count) {
|
||||
EXPECT_EQ(std::numeric_limits<int>::max(), wants.max_framerate_fps);
|
||||
EXPECT_EQ(kDefaultFramerate, wants.max_framerate_fps);
|
||||
EXPECT_LT(wants.max_pixel_count, pixel_count);
|
||||
EXPECT_GT(wants.max_pixel_count, 0);
|
||||
}
|
||||
@ -455,7 +459,7 @@ class VideoStreamEncoderTest : public ::testing::Test {
|
||||
} else if (last_frame_pixels <= 640 * 480) {
|
||||
EXPECT_LE(15, fps_limit);
|
||||
} else {
|
||||
EXPECT_EQ(std::numeric_limits<int>::max(), fps_limit);
|
||||
EXPECT_EQ(kDefaultFramerate, fps_limit);
|
||||
}
|
||||
}
|
||||
|
||||
@ -936,7 +940,7 @@ TEST_F(VideoStreamEncoderTest, TestCpuDowngrades_BalancedMode) {
|
||||
last_wants.max_framerate_fps);
|
||||
}
|
||||
|
||||
VerifyNoLimitation(video_source_.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(video_source_.sink_wants());
|
||||
stats_proxy_->ResetMockStats();
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
|
||||
@ -951,9 +955,8 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) {
|
||||
|
||||
const int kFrameWidth = 1280;
|
||||
const int kFrameHeight = 720;
|
||||
const int kFrameIntervalMs = 1000 / 30;
|
||||
|
||||
int frame_timestamp = 1;
|
||||
int64_t frame_timestamp = 1;
|
||||
|
||||
video_source_.IncomingCapturedFrame(
|
||||
CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight));
|
||||
@ -972,8 +975,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) {
|
||||
EXPECT_FALSE(video_source_.sink_wants().target_pixel_count);
|
||||
EXPECT_LT(video_source_.sink_wants().max_pixel_count,
|
||||
kFrameWidth * kFrameHeight);
|
||||
EXPECT_EQ(std::numeric_limits<int>::max(),
|
||||
video_source_.sink_wants().max_framerate_fps);
|
||||
EXPECT_EQ(kDefaultFramerate, video_source_.sink_wants().max_framerate_fps);
|
||||
|
||||
// Set new source, switch to maintain-resolution.
|
||||
test::FrameForwarder new_video_source;
|
||||
@ -981,7 +983,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) {
|
||||
&new_video_source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
|
||||
|
||||
// Initially no degradation registered.
|
||||
VerifyNoLimitation(new_video_source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(new_video_source.sink_wants());
|
||||
|
||||
// Force an input frame rate to be available, or the adaptation call won't
|
||||
// know what framerate to adapt form.
|
||||
@ -1005,7 +1007,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) {
|
||||
// Turn off degradation completely.
|
||||
video_stream_encoder_->SetSource(&new_video_source,
|
||||
webrtc::DegradationPreference::DISABLED);
|
||||
VerifyNoLimitation(new_video_source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(new_video_source.sink_wants());
|
||||
|
||||
video_stream_encoder_->TriggerCpuOveruse();
|
||||
new_video_source.IncomingCapturedFrame(
|
||||
@ -1014,7 +1016,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) {
|
||||
frame_timestamp += kFrameIntervalMs;
|
||||
|
||||
// Still no degradation.
|
||||
VerifyNoLimitation(new_video_source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(new_video_source.sink_wants());
|
||||
|
||||
// Calling SetSource with resolution scaling enabled apply the old SinkWants.
|
||||
video_stream_encoder_->SetSource(
|
||||
@ -1022,8 +1024,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) {
|
||||
EXPECT_LT(new_video_source.sink_wants().max_pixel_count,
|
||||
kFrameWidth * kFrameHeight);
|
||||
EXPECT_FALSE(new_video_source.sink_wants().target_pixel_count);
|
||||
EXPECT_EQ(std::numeric_limits<int>::max(),
|
||||
new_video_source.sink_wants().max_framerate_fps);
|
||||
EXPECT_EQ(kDefaultFramerate, new_video_source.sink_wants().max_framerate_fps);
|
||||
|
||||
// Calling SetSource with framerate scaling enabled apply the old SinkWants.
|
||||
video_stream_encoder_->SetSource(
|
||||
@ -1235,25 +1236,31 @@ TEST_F(VideoStreamEncoderTest,
|
||||
|
||||
const int kWidth = 1280;
|
||||
const int kHeight = 720;
|
||||
int64_t timestamp_ms = kFrameIntervalMs;
|
||||
video_source_.set_adaptation_enabled(true);
|
||||
video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
|
||||
WaitForEncodedFrame(1);
|
||||
video_source_.IncomingCapturedFrame(
|
||||
CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(timestamp_ms);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
|
||||
|
||||
// Trigger adapt down.
|
||||
video_stream_encoder_->TriggerQualityLow();
|
||||
video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
|
||||
WaitForEncodedFrame(2);
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
video_source_.IncomingCapturedFrame(
|
||||
CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(timestamp_ms);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
|
||||
|
||||
// Trigger overuse.
|
||||
video_stream_encoder_->TriggerCpuOveruse();
|
||||
video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
|
||||
WaitForEncodedFrame(3);
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
video_source_.IncomingCapturedFrame(
|
||||
CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(timestamp_ms);
|
||||
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
|
||||
@ -1267,9 +1274,10 @@ TEST_F(VideoStreamEncoderTest,
|
||||
video_encoder_config.video_format.parameters["foo"] = "foo";
|
||||
video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config),
|
||||
kMaxPayloadLength);
|
||||
|
||||
video_source_.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
|
||||
WaitForEncodedFrame(4);
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
video_source_.IncomingCapturedFrame(
|
||||
CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(timestamp_ms);
|
||||
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
|
||||
@ -1472,7 +1480,7 @@ TEST_F(VideoStreamEncoderTest,
|
||||
|
||||
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
|
||||
WaitForEncodedFrame(1);
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
|
||||
|
||||
@ -1503,7 +1511,7 @@ TEST_F(VideoStreamEncoderTest, SkipsSameOrLargerAdaptDownRequest_BalancedMode) {
|
||||
webrtc::DegradationPreference::BALANCED);
|
||||
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
|
||||
sink_.WaitForEncodedFrame(1);
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
|
||||
// Trigger adapt down, expect scaled down resolution.
|
||||
video_stream_encoder_->TriggerQualityLow();
|
||||
@ -1544,13 +1552,13 @@ TEST_F(VideoStreamEncoderTest,
|
||||
|
||||
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
|
||||
WaitForEncodedFrame(kWidth, kHeight);
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
|
||||
|
||||
// Trigger adapt up, expect no change.
|
||||
video_stream_encoder_->TriggerCpuNormalUsage();
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
|
||||
|
||||
@ -1570,13 +1578,13 @@ TEST_F(VideoStreamEncoderTest,
|
||||
|
||||
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
|
||||
WaitForEncodedFrame(kWidth, kHeight);
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
|
||||
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
|
||||
|
||||
// Trigger adapt up, expect no change.
|
||||
video_stream_encoder_->TriggerCpuNormalUsage();
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
|
||||
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
|
||||
|
||||
@ -1595,14 +1603,14 @@ TEST_F(VideoStreamEncoderTest, NoChangeForInitialNormalUsage_BalancedMode) {
|
||||
|
||||
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
|
||||
sink_.WaitForEncodedFrame(kWidth, kHeight);
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
|
||||
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
|
||||
|
||||
// Trigger adapt up, expect no change.
|
||||
video_stream_encoder_->TriggerQualityHigh();
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
|
||||
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
|
||||
@ -1622,14 +1630,14 @@ TEST_F(VideoStreamEncoderTest, NoChangeForInitialNormalUsage_DisabledMode) {
|
||||
|
||||
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
|
||||
sink_.WaitForEncodedFrame(kWidth, kHeight);
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
|
||||
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
|
||||
|
||||
// Trigger adapt up, expect no change.
|
||||
video_stream_encoder_->TriggerQualityHigh();
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
|
||||
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
|
||||
@ -1651,7 +1659,7 @@ TEST_F(VideoStreamEncoderTest,
|
||||
|
||||
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
|
||||
WaitForEncodedFrame(1);
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
|
||||
|
||||
@ -1665,7 +1673,7 @@ TEST_F(VideoStreamEncoderTest,
|
||||
|
||||
// Trigger adapt up, expect no restriction.
|
||||
video_stream_encoder_->TriggerQualityHigh();
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
|
||||
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
|
||||
@ -1687,7 +1695,7 @@ TEST_F(VideoStreamEncoderTest,
|
||||
// Expect no scaling to begin with (preference: MAINTAIN_FRAMERATE).
|
||||
video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
|
||||
sink_.WaitForEncodedFrame(1);
|
||||
VerifyNoLimitation(video_source_.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(video_source_.sink_wants());
|
||||
|
||||
// Trigger adapt down, expect scaled down resolution.
|
||||
video_stream_encoder_->TriggerQualityLow();
|
||||
@ -1699,7 +1707,7 @@ TEST_F(VideoStreamEncoderTest,
|
||||
test::FrameForwarder new_video_source;
|
||||
video_stream_encoder_->SetSource(
|
||||
&new_video_source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
|
||||
VerifyNoLimitation(new_video_source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(new_video_source.sink_wants());
|
||||
|
||||
// Trigger adapt down, expect reduced framerate.
|
||||
video_stream_encoder_->TriggerQualityLow();
|
||||
@ -1709,7 +1717,7 @@ TEST_F(VideoStreamEncoderTest,
|
||||
|
||||
// Trigger adapt up, expect no restriction.
|
||||
video_stream_encoder_->TriggerQualityHigh();
|
||||
VerifyNoLimitation(new_video_source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(new_video_source.sink_wants());
|
||||
|
||||
video_stream_encoder_->Stop();
|
||||
}
|
||||
@ -1730,8 +1738,9 @@ TEST_F(VideoStreamEncoderTest, DoesNotScaleBelowSetResolutionLimit) {
|
||||
|
||||
int downscales = 0;
|
||||
for (size_t i = 1; i <= kNumFrames; i++) {
|
||||
video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight));
|
||||
WaitForEncodedFrame(i);
|
||||
video_source_.IncomingCapturedFrame(
|
||||
CreateFrame(i * kFrameIntervalMs, kWidth, kHeight));
|
||||
WaitForEncodedFrame(i * kFrameIntervalMs);
|
||||
|
||||
// Trigger scale down.
|
||||
rtc::VideoSinkWants last_wants = video_source_.sink_wants();
|
||||
@ -1761,41 +1770,46 @@ TEST_F(VideoStreamEncoderTest,
|
||||
video_stream_encoder_->SetSource(
|
||||
&source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
|
||||
|
||||
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
|
||||
int64_t timestamp_ms = kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(kWidth, kHeight);
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
|
||||
|
||||
// Trigger adapt down, expect scaled down resolution.
|
||||
video_stream_encoder_->TriggerCpuOveruse();
|
||||
source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
|
||||
WaitForEncodedFrame(2);
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(timestamp_ms);
|
||||
VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
|
||||
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
|
||||
|
||||
// Trigger adapt up, expect no restriction.
|
||||
video_stream_encoder_->TriggerCpuNormalUsage();
|
||||
source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(kWidth, kHeight);
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
|
||||
|
||||
// Trigger adapt down, expect scaled down resolution.
|
||||
video_stream_encoder_->TriggerCpuOveruse();
|
||||
source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
|
||||
WaitForEncodedFrame(4);
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(timestamp_ms);
|
||||
VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
|
||||
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
|
||||
|
||||
// Trigger adapt up, expect no restriction.
|
||||
video_stream_encoder_->TriggerCpuNormalUsage();
|
||||
source.IncomingCapturedFrame(CreateFrame(5, kWidth, kHeight));
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
sink_.WaitForEncodedFrame(kWidth, kHeight);
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
|
||||
|
||||
@ -1814,41 +1828,46 @@ TEST_F(VideoStreamEncoderTest,
|
||||
video_stream_encoder_->SetSource(&source,
|
||||
webrtc::DegradationPreference::BALANCED);
|
||||
|
||||
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
|
||||
int64_t timestamp_ms = kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
sink_.WaitForEncodedFrame(kWidth, kHeight);
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
|
||||
|
||||
// Trigger adapt down, expect scaled down resolution.
|
||||
video_stream_encoder_->TriggerQualityLow();
|
||||
source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
|
||||
sink_.WaitForEncodedFrame(2);
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
sink_.WaitForEncodedFrame(timestamp_ms);
|
||||
VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
|
||||
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
|
||||
|
||||
// Trigger adapt up, expect no restriction.
|
||||
video_stream_encoder_->TriggerQualityHigh();
|
||||
source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
sink_.WaitForEncodedFrame(kWidth, kHeight);
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
|
||||
|
||||
// Trigger adapt down, expect scaled down resolution.
|
||||
video_stream_encoder_->TriggerQualityLow();
|
||||
source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
|
||||
sink_.WaitForEncodedFrame(4);
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
sink_.WaitForEncodedFrame(timestamp_ms);
|
||||
VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
|
||||
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
|
||||
|
||||
// Trigger adapt up, expect no restriction.
|
||||
video_stream_encoder_->TriggerQualityHigh();
|
||||
source.IncomingCapturedFrame(CreateFrame(5, kWidth, kHeight));
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
sink_.WaitForEncodedFrame(kWidth, kHeight);
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
|
||||
|
||||
@ -1867,9 +1886,10 @@ TEST_F(VideoStreamEncoderTest,
|
||||
video_stream_encoder_->SetSource(
|
||||
&source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
|
||||
|
||||
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
|
||||
int64_t timestamp_ms = kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(kWidth, kHeight);
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
|
||||
@ -1877,8 +1897,9 @@ TEST_F(VideoStreamEncoderTest,
|
||||
|
||||
// Trigger cpu adapt down, expect scaled down resolution (960x540).
|
||||
video_stream_encoder_->TriggerCpuOveruse();
|
||||
source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
|
||||
WaitForEncodedFrame(2);
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(timestamp_ms);
|
||||
VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
|
||||
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
@ -1887,8 +1908,9 @@ TEST_F(VideoStreamEncoderTest,
|
||||
|
||||
// Trigger cpu adapt down, expect scaled down resolution (640x360).
|
||||
video_stream_encoder_->TriggerCpuOveruse();
|
||||
source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
|
||||
WaitForEncodedFrame(3);
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(timestamp_ms);
|
||||
VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
|
||||
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
@ -1897,8 +1919,9 @@ TEST_F(VideoStreamEncoderTest,
|
||||
|
||||
// Trigger cpu adapt down, expect scaled down resolution (480x270).
|
||||
video_stream_encoder_->TriggerCpuOveruse();
|
||||
source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
|
||||
WaitForEncodedFrame(4);
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(timestamp_ms);
|
||||
VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
|
||||
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
@ -1907,8 +1930,9 @@ TEST_F(VideoStreamEncoderTest,
|
||||
|
||||
// Trigger quality adapt down, expect scaled down resolution (320x180).
|
||||
video_stream_encoder_->TriggerQualityLow();
|
||||
source.IncomingCapturedFrame(CreateFrame(5, kWidth, kHeight));
|
||||
WaitForEncodedFrame(5);
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(timestamp_ms);
|
||||
VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
|
||||
rtc::VideoSinkWants last_wants = source.sink_wants();
|
||||
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
@ -1918,8 +1942,9 @@ TEST_F(VideoStreamEncoderTest,
|
||||
|
||||
// Trigger quality adapt down, expect no change (min resolution reached).
|
||||
video_stream_encoder_->TriggerQualityLow();
|
||||
source.IncomingCapturedFrame(CreateFrame(6, kWidth, kHeight));
|
||||
WaitForEncodedFrame(6);
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(timestamp_ms);
|
||||
VerifyFpsMaxResolutionEq(source.sink_wants(), last_wants);
|
||||
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
@ -1928,8 +1953,9 @@ TEST_F(VideoStreamEncoderTest,
|
||||
|
||||
// Trigger cpu adapt up, expect upscaled resolution (480x270).
|
||||
video_stream_encoder_->TriggerCpuNormalUsage();
|
||||
source.IncomingCapturedFrame(CreateFrame(7, kWidth, kHeight));
|
||||
WaitForEncodedFrame(7);
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(timestamp_ms);
|
||||
VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
|
||||
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
@ -1938,8 +1964,9 @@ TEST_F(VideoStreamEncoderTest,
|
||||
|
||||
// Trigger cpu adapt up, expect upscaled resolution (640x360).
|
||||
video_stream_encoder_->TriggerCpuNormalUsage();
|
||||
source.IncomingCapturedFrame(CreateFrame(8, kWidth, kHeight));
|
||||
WaitForEncodedFrame(8);
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(timestamp_ms);
|
||||
VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
|
||||
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
@ -1948,8 +1975,9 @@ TEST_F(VideoStreamEncoderTest,
|
||||
|
||||
// Trigger cpu adapt up, expect upscaled resolution (960x540).
|
||||
video_stream_encoder_->TriggerCpuNormalUsage();
|
||||
source.IncomingCapturedFrame(CreateFrame(9, kWidth, kHeight));
|
||||
WaitForEncodedFrame(9);
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(timestamp_ms);
|
||||
VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
|
||||
last_wants = source.sink_wants();
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
@ -1959,8 +1987,9 @@ TEST_F(VideoStreamEncoderTest,
|
||||
|
||||
// Trigger cpu adapt up, no cpu downgrades, expect no change (960x540).
|
||||
video_stream_encoder_->TriggerCpuNormalUsage();
|
||||
source.IncomingCapturedFrame(CreateFrame(10, kWidth, kHeight));
|
||||
WaitForEncodedFrame(10);
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(timestamp_ms);
|
||||
VerifyFpsEqResolutionEq(source.sink_wants(), last_wants);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
@ -1969,10 +1998,11 @@ TEST_F(VideoStreamEncoderTest,
|
||||
|
||||
// Trigger quality adapt up, expect no restriction (1280x720).
|
||||
video_stream_encoder_->TriggerQualityHigh();
|
||||
source.IncomingCapturedFrame(CreateFrame(11, kWidth, kHeight));
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(kWidth, kHeight);
|
||||
VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_EQ(6, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
|
||||
@ -2376,7 +2406,7 @@ TEST_F(VideoStreamEncoderTest,
|
||||
source.IncomingCapturedFrame(CreateFrame(1, kTooSmallWidth, kTooSmallHeight));
|
||||
WaitForEncodedFrame(1);
|
||||
video_stream_encoder_->TriggerCpuOveruse();
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
|
||||
|
||||
@ -2444,19 +2474,19 @@ TEST_F(VideoStreamEncoderTest,
|
||||
video_source_.set_adaptation_enabled(true);
|
||||
|
||||
video_source_.IncomingCapturedFrame(
|
||||
CreateFrame(1, kFrameWidth, kFrameHeight));
|
||||
CreateFrame(1 * kFrameIntervalMs, kFrameWidth, kFrameHeight));
|
||||
WaitForEncodedFrame(kFrameWidth, kFrameHeight);
|
||||
|
||||
// Trigger CPU overuse, downscale by 3/4.
|
||||
video_stream_encoder_->TriggerCpuOveruse();
|
||||
video_source_.IncomingCapturedFrame(
|
||||
CreateFrame(2, kFrameWidth, kFrameHeight));
|
||||
CreateFrame(2 * kFrameIntervalMs, kFrameWidth, kFrameHeight));
|
||||
WaitForEncodedFrame((kFrameWidth * 3) / 4, (kFrameHeight * 3) / 4);
|
||||
|
||||
// Trigger CPU normal use, return to original resolution.
|
||||
video_stream_encoder_->TriggerCpuNormalUsage();
|
||||
video_source_.IncomingCapturedFrame(
|
||||
CreateFrame(3, kFrameWidth, kFrameHeight));
|
||||
CreateFrame(3 * kFrameIntervalMs, kFrameWidth, kFrameHeight));
|
||||
WaitForEncodedFrame(kFrameWidth, kFrameHeight);
|
||||
|
||||
video_stream_encoder_->Stop();
|
||||
@ -2466,7 +2496,6 @@ TEST_F(VideoStreamEncoderTest,
|
||||
AdaptsFramerateOnOveruse_MaintainResolutionMode) {
|
||||
const int kFrameWidth = 1280;
|
||||
const int kFrameHeight = 720;
|
||||
int kFrameIntervalMs = rtc::kNumMillisecsPerSec / max_framerate_;
|
||||
|
||||
video_stream_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
|
||||
video_stream_encoder_->SetSource(
|
||||
@ -2512,7 +2541,7 @@ TEST_F(VideoStreamEncoderTest,
|
||||
// Trigger CPU overuse, reduce framerate by 2/3 again.
|
||||
video_stream_encoder_->TriggerCpuOveruse();
|
||||
num_frames_dropped = 0;
|
||||
for (int i = 0; i < max_framerate_; ++i) {
|
||||
for (int i = 0; i <= max_framerate_; ++i) {
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
video_source_.IncomingCapturedFrame(
|
||||
CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight));
|
||||
@ -2618,7 +2647,7 @@ TEST_F(VideoStreamEncoderTest,
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(kWidth, kHeight);
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
|
||||
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
|
||||
@ -2770,14 +2799,14 @@ TEST_F(VideoStreamEncoderTest,
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(kWidth, kHeight);
|
||||
VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
|
||||
EXPECT_EQ(14, stats_proxy_->GetStats().number_of_quality_adapt_changes);
|
||||
|
||||
// Trigger adapt up, expect no change.
|
||||
video_stream_encoder_->TriggerQualityHigh();
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_EQ(14, stats_proxy_->GetStats().number_of_quality_adapt_changes);
|
||||
|
||||
video_stream_encoder_->Stop();
|
||||
@ -2798,7 +2827,7 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) {
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(kWidth, kHeight);
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
@ -2877,7 +2906,7 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) {
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(kWidth, kHeight);
|
||||
VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
@ -2887,7 +2916,7 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) {
|
||||
|
||||
// Trigger adapt up, expect no change.
|
||||
video_stream_encoder_->TriggerQualityHigh();
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
|
||||
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
|
||||
|
||||
@ -2911,7 +2940,7 @@ TEST_F(VideoStreamEncoderTest,
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(kWidth, kHeight);
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
@ -2963,7 +2992,7 @@ TEST_F(VideoStreamEncoderTest,
|
||||
timestamp_ms += kFrameIntervalMs;
|
||||
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
|
||||
WaitForEncodedFrame(timestamp_ms);
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
|
||||
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
|
||||
@ -2973,7 +3002,7 @@ TEST_F(VideoStreamEncoderTest,
|
||||
|
||||
// Trigger adapt up, expect no change.
|
||||
video_stream_encoder_->TriggerQualityHigh();
|
||||
VerifyNoLimitation(source.sink_wants());
|
||||
VerifyFpsMaxResolutionMax(source.sink_wants());
|
||||
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
|
||||
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user