Reland "Pass true stream resolutions to GetSimulcastConfig()"

This is a reland of commit 09f03be54804e81f626c26e8fde8c86cc952545f

Use max_num_layers instead of encoder_config.number_of_streams when calculation stream resolutions in EncoderStreamFactory::GetStreamResolutions().

Original change's description:
> Pass true stream resolutions to GetSimulcastConfig()
>
> Before this change GetSimulcastConfig() received only maximum resolution as an input parameter and derived resolutions for low quality simulcast streams assuming 1/2 scaling factor [1]. These days applications can configure resolution scaling factors via RtpEncodingParameters. If the configured resolution scaling factors were different from 1/2 then we got wrong bitrate limits from GetSimulcastConfig(). Now resolutions are calculated using scaling factor from RtpEncodingParameters (or default 1/2) for all streams in EncoderStreamFactory::CreateEncoderStreams() and then passed to GetSimulcastConfig().
>
> Moved tests from simulcast_unittest.cc to encoder_stream_factory_unittest.cc. Mapping of old to new tests:
> * GetConfigWithLimitedMaxLayersForResolution -> ReducesStreamCountWhenResolutionIsLow
> * GetConfigWithLowResolutionScreenshare -> ReducesLegacyScreencastStreamCountWhenResolutionIsLow
> * GetConfigWithNotLimitedMaxLayersForResolution -> KeepsStreamCountUnchangedWhenLegacyLimitIsDisabled
> * GetConfigWithNormalizedResolution -> AdjustsResolutionWhenUnaligned
> * GetConfigWithNormalizedResolutionDivisibleBy4 -> MakesResolutionDivisibleBy4
> * GetConfigWithNormalizedResolutionDivisibleBy8 -> not needed (MakesResolutionDivisibleBy4 should be enough).
> * GetConfigForLegacyLayerLimit -> KeepsStreamCountUnchangedWhenResolutionIsHigh and ReducesStreamCountWhenResolutionIsLow
> * GetConfigForLegacyLayerLimitWithRequiredHD -> KeepsStreamCountUnchangedWhenLegacyLimitIsDisabled
>
> [1] https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/video/config/simulcast.cc;l=297-298;drc=1b78a7eb3f418460da03672b1d1af1d9488bb544
>
> Bug: webrtc:351644568, b/352504711
> Change-Id: I0028904ab0bb1e27b9c1b7cd3fb9a8ccf447fa35
> No-Try: true
> Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/357280
> Commit-Queue: Sergey Silkin <ssilkin@webrtc.org>
> Reviewed-by: Danil Chapovalov <danilchap@webrtc.org>
> Cr-Commit-Position: refs/heads/main@{#42651}

Bug: webrtc:351644568, b/352504711
Change-Id: Ib3fd859257b61c2a5d695b8b8f45c95495117c0e
No-Try: true
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/357520
Reviewed-by: Danil Chapovalov <danilchap@webrtc.org>
Commit-Queue: Sergey Silkin <ssilkin@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#42654}
This commit is contained in:
Sergey Silkin 2024-07-19 13:18:55 +02:00 committed by WebRTC LUCI CQ
parent ede05c35e4
commit 5fe85d23a2
8 changed files with 472 additions and 404 deletions

View File

@ -18,8 +18,10 @@ rtc_library("streams_config") {
deps = [ deps = [
":encoder_config", ":encoder_config",
"../../api:array_view",
"../../api:field_trials_view", "../../api:field_trials_view",
"../../api/units:data_rate", "../../api/units:data_rate",
"../../api/video:resolution",
"../../api/video:video_codec_constants", "../../api/video:video_codec_constants",
"../../api/video_codecs:video_codecs_api", "../../api/video_codecs:video_codecs_api",
"../../call/adaptation:resource_adaptation", "../../call/adaptation:resource_adaptation",
@ -69,6 +71,7 @@ if (rtc_include_tests) {
] ]
deps = [ deps = [
":streams_config", ":streams_config",
"../../api/video_codecs:scalability_mode",
"../../call/adaptation:resource_adaptation", "../../call/adaptation:resource_adaptation",
"../../media:media_constants", "../../media:media_constants",
"../../rtc_base/experiments:min_video_bitrate_experiment", "../../rtc_base/experiments:min_video_bitrate_experiment",

View File

@ -111,6 +111,22 @@ int GetDefaultMaxQp(webrtc::VideoCodecType codec_type) {
} }
} }
// Round size to nearest simulcast-friendly size.
// Simulcast stream width and height must both be dividable by
// |2 ^ (simulcast_layers - 1)|.
int NormalizeSimulcastSize(const FieldTrialsView& field_trials,
int size,
size_t simulcast_layers) {
int base2_exponent = static_cast<int>(simulcast_layers) - 1;
const absl::optional<int> experimental_base2_exponent =
webrtc::NormalizeSimulcastSizeExperiment::GetBase2Exponent(field_trials);
if (experimental_base2_exponent &&
(size > (1 << *experimental_base2_exponent))) {
base2_exponent = *experimental_base2_exponent;
}
return ((size >> base2_exponent) << base2_exponent);
}
} // namespace } // namespace
EncoderStreamFactory::EncoderStreamFactory( EncoderStreamFactory::EncoderStreamFactory(
@ -322,16 +338,17 @@ EncoderStreamFactory::CreateSimulcastOrConferenceModeScreenshareStreams(
webrtc::VideoEncoderConfig::ContentType::kScreen; webrtc::VideoEncoderConfig::ContentType::kScreen;
const bool is_legacy_screencast = const bool is_legacy_screencast =
webrtc::SimulcastUtility::IsConferenceModeScreenshare(encoder_config); webrtc::SimulcastUtility::IsConferenceModeScreenshare(encoder_config);
std::vector<webrtc::VideoStream> layers;
std::vector<webrtc::Resolution> resolutions =
GetStreamResolutions(trials, width, height, encoder_config);
const bool temporal_layers_supported = const bool temporal_layers_supported =
IsTemporalLayersSupported(encoder_config.codec_type); IsTemporalLayersSupported(encoder_config.codec_type);
// Use legacy simulcast screenshare if conference mode is explicitly enabled // Use legacy simulcast screenshare if conference mode is explicitly enabled
// or use the regular simulcast configuration path which is generic. // or use the regular simulcast configuration path which is generic.
layers = GetSimulcastConfig(FindRequiredActiveLayers(encoder_config), std::vector<webrtc::VideoStream> layers = GetSimulcastConfig(
encoder_config.number_of_streams, width, height, resolutions, is_legacy_screencast, temporal_layers_supported, trials,
is_legacy_screencast, temporal_layers_supported, encoder_config.codec_type);
trials, encoder_config.codec_type);
// Allow an experiment to override the minimum bitrate for the lowest // Allow an experiment to override the minimum bitrate for the lowest
// spatial layer. The experiment's configuration has the lowest priority. // spatial layer. The experiment's configuration has the lowest priority.
layers[0].min_bitrate_bps = experimental_min_bitrate layers[0].min_bitrate_bps = experimental_min_bitrate
@ -339,31 +356,6 @@ EncoderStreamFactory::CreateSimulcastOrConferenceModeScreenshareStreams(
webrtc::kDefaultMinVideoBitrateBps)) webrtc::kDefaultMinVideoBitrateBps))
.bps<int>(); .bps<int>();
const bool has_scale_resolution_down_by = absl::c_any_of(
encoder_config.simulcast_layers, [](const webrtc::VideoStream& layer) {
return layer.scale_resolution_down_by != -1.;
});
bool default_scale_factors_used = true;
if (has_scale_resolution_down_by) {
default_scale_factors_used = IsScaleFactorsPowerOfTwo(encoder_config);
}
const bool norm_size_configured =
webrtc::NormalizeSimulcastSizeExperiment::GetBase2Exponent(trials)
.has_value();
const int normalized_width =
(default_scale_factors_used || norm_size_configured) &&
(width >= kMinLayerSize)
? NormalizeSimulcastSize(trials, width,
encoder_config.number_of_streams)
: width;
const int normalized_height =
(default_scale_factors_used || norm_size_configured) &&
(height >= kMinLayerSize)
? NormalizeSimulcastSize(trials, height,
encoder_config.number_of_streams)
: height;
// Update the active simulcast layers and configured bitrates. // Update the active simulcast layers and configured bitrates.
for (size_t i = 0; i < layers.size(); ++i) { for (size_t i = 0; i < layers.size(); ++i) {
layers[i].active = encoder_config.simulcast_layers[i].active; layers[i].active = encoder_config.simulcast_layers[i].active;
@ -381,20 +373,6 @@ EncoderStreamFactory::CreateSimulcastOrConferenceModeScreenshareStreams(
layers[i].max_framerate = layers[i].max_framerate =
encoder_config.simulcast_layers[i].max_framerate; encoder_config.simulcast_layers[i].max_framerate;
} }
if (encoder_config.simulcast_layers[i].requested_resolution.has_value()) {
auto res = GetLayerResolutionFromRequestedResolution(
normalized_width, normalized_height,
*encoder_config.simulcast_layers[i].requested_resolution);
layers[i].width = res.width;
layers[i].height = res.height;
} else if (has_scale_resolution_down_by) {
const double scale_resolution_down_by = std::max(
encoder_config.simulcast_layers[i].scale_resolution_down_by, 1.0);
layers[i].width = ScaleDownResolution(
normalized_width, scale_resolution_down_by, kMinLayerSize);
layers[i].height = ScaleDownResolution(
normalized_height, scale_resolution_down_by, kMinLayerSize);
}
// Update simulcast bitrates with configured min and max bitrate. // Update simulcast bitrates with configured min and max bitrate.
if (encoder_config.simulcast_layers[i].min_bitrate_bps > 0) { if (encoder_config.simulcast_layers[i].min_bitrate_bps > 0) {
layers[i].min_bitrate_bps = layers[i].min_bitrate_bps =
@ -516,4 +494,69 @@ EncoderStreamFactory::GetLayerResolutionFromRequestedResolution(
return {.width = out_width, .height = out_height}; return {.width = out_width, .height = out_height};
} }
std::vector<webrtc::Resolution> EncoderStreamFactory::GetStreamResolutions(
const webrtc::FieldTrialsView& trials,
int width,
int height,
const webrtc::VideoEncoderConfig& encoder_config) const {
std::vector<webrtc::Resolution> resolutions;
if (webrtc::SimulcastUtility::IsConferenceModeScreenshare(encoder_config)) {
for (size_t i = 0; i < encoder_config.number_of_streams; ++i) {
resolutions.push_back({.width = width, .height = height});
}
} else {
size_t min_num_layers = FindRequiredActiveLayers(encoder_config);
size_t max_num_layers = LimitSimulcastLayerCount(
min_num_layers, encoder_config.number_of_streams, width, height, trials,
encoder_config.codec_type);
RTC_DCHECK_LE(max_num_layers, encoder_config.number_of_streams);
const bool has_scale_resolution_down_by = absl::c_any_of(
encoder_config.simulcast_layers, [](const webrtc::VideoStream& layer) {
return layer.scale_resolution_down_by != -1.;
});
bool default_scale_factors_used = true;
if (has_scale_resolution_down_by) {
default_scale_factors_used = IsScaleFactorsPowerOfTwo(encoder_config);
}
const bool norm_size_configured =
webrtc::NormalizeSimulcastSizeExperiment::GetBase2Exponent(trials)
.has_value();
const int normalized_width =
(default_scale_factors_used || norm_size_configured) &&
(width >= kMinLayerSize)
? NormalizeSimulcastSize(trials, width, max_num_layers)
: width;
const int normalized_height =
(default_scale_factors_used || norm_size_configured) &&
(height >= kMinLayerSize)
? NormalizeSimulcastSize(trials, height, max_num_layers)
: height;
resolutions.resize(max_num_layers);
for (size_t i = 0; i < max_num_layers; i++) {
if (encoder_config.simulcast_layers[i].requested_resolution.has_value()) {
resolutions[i] = GetLayerResolutionFromRequestedResolution(
normalized_width, normalized_height,
*encoder_config.simulcast_layers[i].requested_resolution);
} else if (has_scale_resolution_down_by) {
const double scale_resolution_down_by = std::max(
encoder_config.simulcast_layers[i].scale_resolution_down_by, 1.0);
resolutions[i].width = ScaleDownResolution(
normalized_width, scale_resolution_down_by, kMinLayerSize);
resolutions[i].height = ScaleDownResolution(
normalized_height, scale_resolution_down_by, kMinLayerSize);
} else {
// Resolutions with default 1/2 scale factor, from low to high.
resolutions[i].width = normalized_width >> (max_num_layers - i - 1);
resolutions[i].height = normalized_height >> (max_num_layers - i - 1);
}
}
}
return resolutions;
}
} // namespace cricket } // namespace cricket

View File

@ -54,6 +54,12 @@ class EncoderStreamFactory
int in_frame_height, int in_frame_height,
webrtc::Resolution requested_resolution) const; webrtc::Resolution requested_resolution) const;
std::vector<webrtc::Resolution> GetStreamResolutions(
const webrtc::FieldTrialsView& trials,
int width,
int height,
const webrtc::VideoEncoderConfig& encoder_config) const;
const int encoder_info_requested_resolution_alignment_; const int encoder_info_requested_resolution_alignment_;
const absl::optional<webrtc::VideoSourceRestrictions> restrictions_; const absl::optional<webrtc::VideoSourceRestrictions> restrictions_;
}; };

View File

@ -10,6 +10,9 @@
#include "video/config/encoder_stream_factory.h" #include "video/config/encoder_stream_factory.h"
#include <tuple>
#include "api/video_codecs/scalability_mode.h"
#include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_source_restrictions.h"
#include "rtc_base/experiments/min_video_bitrate_experiment.h" #include "rtc_base/experiments/min_video_bitrate_experiment.h"
#include "test/explicit_key_value_config.h" #include "test/explicit_key_value_config.h"
@ -20,39 +23,71 @@ namespace webrtc {
namespace { namespace {
using ::cricket::EncoderStreamFactory; using ::cricket::EncoderStreamFactory;
using test::ExplicitKeyValueConfig; using test::ExplicitKeyValueConfig;
using ::testing::Combine;
using ::testing::ElementsAre;
using ::testing::IsEmpty; using ::testing::IsEmpty;
using ::testing::Not; using ::testing::Not;
using ::testing::SizeIs;
using ::testing::Values;
struct CreateVideoStreamParams {
int width = 0;
int height = 0;
int max_framerate_fps = -1;
int min_bitrate_bps = -1;
int target_bitrate_bps = -1;
int max_bitrate_bps = -1;
int scale_resolution_down_by = -1;
std::optional<ScalabilityMode> scalability_mode;
};
// A helper function that creates `VideoStream` with given settings.
VideoStream CreateVideoStream(const CreateVideoStreamParams& params) {
VideoStream stream;
stream.width = params.width;
stream.height = params.height;
stream.max_framerate = params.max_framerate_fps;
stream.min_bitrate_bps = params.min_bitrate_bps;
stream.target_bitrate_bps = params.target_bitrate_bps;
stream.max_bitrate_bps = params.max_bitrate_bps;
stream.scale_resolution_down_by = params.scale_resolution_down_by;
stream.scalability_mode = params.scalability_mode;
return stream;
}
std::vector<Resolution> GetStreamResolutions( std::vector<Resolution> GetStreamResolutions(
const std::vector<VideoStream>& streams) { const std::vector<VideoStream>& streams) {
std::vector<Resolution> res; std::vector<Resolution> res;
for (const auto& s : streams) { for (const auto& s : streams) {
if (s.active) { res.push_back(
res.push_back( {rtc::checked_cast<int>(s.width), rtc::checked_cast<int>(s.height)});
{rtc::checked_cast<int>(s.width), rtc::checked_cast<int>(s.height)});
}
} }
return res; return res;
} }
VideoStream LayerWithRequestedResolution(Resolution res) { std::vector<VideoStream> CreateEncoderStreams(
VideoStream s; const FieldTrialsView& field_trials,
s.requested_resolution = res; const Resolution& resolution,
return s; const VideoEncoderConfig& encoder_config,
absl::optional<VideoSourceRestrictions> restrictions = absl::nullopt) {
VideoEncoder::EncoderInfo encoder_info;
auto factory =
rtc::make_ref_counted<EncoderStreamFactory>(encoder_info, restrictions);
return factory->CreateEncoderStreams(field_trials, resolution.width,
resolution.height, encoder_config);
} }
} // namespace } // namespace
TEST(EncoderStreamFactory, SinglecastRequestedResolution) { TEST(EncoderStreamFactory, SinglecastRequestedResolution) {
ExplicitKeyValueConfig field_trials(""); ExplicitKeyValueConfig field_trials("");
VideoEncoder::EncoderInfo encoder_info;
auto factory = rtc::make_ref_counted<EncoderStreamFactory>(encoder_info);
VideoEncoderConfig encoder_config; VideoEncoderConfig encoder_config;
encoder_config.number_of_streams = 1; encoder_config.number_of_streams = 1;
encoder_config.simulcast_layers.push_back( encoder_config.simulcast_layers.resize(1);
LayerWithRequestedResolution({.width = 640, .height = 360})); encoder_config.simulcast_layers[0].requested_resolution = {.width = 640,
auto streams = .height = 360};
factory->CreateEncoderStreams(field_trials, 1280, 720, encoder_config); auto streams = CreateEncoderStreams(
field_trials, {.width = 1280, .height = 720}, encoder_config);
EXPECT_EQ(streams[0].requested_resolution, EXPECT_EQ(streams[0].requested_resolution,
(Resolution{.width = 640, .height = 360})); (Resolution{.width = 640, .height = 360}));
EXPECT_EQ(GetStreamResolutions(streams), (std::vector<Resolution>{ EXPECT_EQ(GetStreamResolutions(streams), (std::vector<Resolution>{
@ -66,15 +101,14 @@ TEST(EncoderStreamFactory, SinglecastRequestedResolutionWithAdaptation) {
/* max_pixels_per_frame= */ (320 * 320), /* max_pixels_per_frame= */ (320 * 320),
/* target_pixels_per_frame= */ absl::nullopt, /* target_pixels_per_frame= */ absl::nullopt,
/* max_frame_rate= */ absl::nullopt); /* max_frame_rate= */ absl::nullopt);
VideoEncoder::EncoderInfo encoder_info;
auto factory =
rtc::make_ref_counted<EncoderStreamFactory>(encoder_info, restrictions);
VideoEncoderConfig encoder_config; VideoEncoderConfig encoder_config;
encoder_config.number_of_streams = 1; encoder_config.number_of_streams = 1;
encoder_config.simulcast_layers.push_back( encoder_config.simulcast_layers.resize(1);
LayerWithRequestedResolution({.width = 640, .height = 360})); encoder_config.simulcast_layers[0].requested_resolution = {.width = 640,
.height = 360};
auto streams = auto streams =
factory->CreateEncoderStreams(field_trials, 1280, 720, encoder_config); CreateEncoderStreams(field_trials, {.width = 1280, .height = 720},
encoder_config, restrictions);
EXPECT_EQ(streams[0].requested_resolution, EXPECT_EQ(streams[0].requested_resolution,
(Resolution{.width = 640, .height = 360})); (Resolution{.width = 640, .height = 360}));
EXPECT_EQ(GetStreamResolutions(streams), (std::vector<Resolution>{ EXPECT_EQ(GetStreamResolutions(streams), (std::vector<Resolution>{
@ -84,18 +118,14 @@ TEST(EncoderStreamFactory, SinglecastRequestedResolutionWithAdaptation) {
TEST(EncoderStreamFactory, BitratePriority) { TEST(EncoderStreamFactory, BitratePriority) {
constexpr double kBitratePriority = 0.123; constexpr double kBitratePriority = 0.123;
ExplicitKeyValueConfig field_trials("");
VideoEncoder::EncoderInfo encoder_info;
auto factory = rtc::make_ref_counted<EncoderStreamFactory>(encoder_info);
VideoEncoderConfig encoder_config; VideoEncoderConfig encoder_config;
encoder_config.number_of_streams = 2; encoder_config.number_of_streams = 2;
encoder_config.simulcast_layers.resize(encoder_config.number_of_streams);
encoder_config.bitrate_priority = kBitratePriority; encoder_config.bitrate_priority = kBitratePriority;
encoder_config.simulcast_layers = { auto streams = CreateEncoderStreams(
LayerWithRequestedResolution({.width = 320, .height = 180}), /*field_trials=*/ExplicitKeyValueConfig(""),
LayerWithRequestedResolution({.width = 640, .height = 360})}; {.width = 640, .height = 360}, encoder_config);
auto streams = ASSERT_THAT(streams, SizeIs(2));
factory->CreateEncoderStreams(field_trials, 640, 360, encoder_config);
ASSERT_EQ(streams.size(), 2u);
EXPECT_EQ(streams[0].bitrate_priority, kBitratePriority); EXPECT_EQ(streams[0].bitrate_priority, kBitratePriority);
EXPECT_FALSE(streams[1].bitrate_priority); EXPECT_FALSE(streams[1].bitrate_priority);
} }
@ -125,4 +155,182 @@ TEST(EncoderStreamFactory, SetsMinBitrateToExperimentalValue) {
EXPECT_NE(streams[0].min_bitrate_bps, kDefaultMinVideoBitrateBps); EXPECT_NE(streams[0].min_bitrate_bps, kDefaultMinVideoBitrateBps);
EXPECT_EQ(streams[0].min_bitrate_bps, 1000); EXPECT_EQ(streams[0].min_bitrate_bps, 1000);
} }
struct StreamResolutionTestParams {
absl::string_view field_trials;
size_t number_of_streams = 1;
Resolution resolution = {.width = 640, .height = 480};
bool is_legacy_screencast = false;
size_t first_active_layer_idx = 0;
};
std::vector<Resolution> CreateStreamResolutions(
const StreamResolutionTestParams& test_params) {
VideoEncoderConfig encoder_config;
encoder_config.codec_type = VideoCodecType::kVideoCodecVP8;
encoder_config.number_of_streams = test_params.number_of_streams;
encoder_config.simulcast_layers.resize(test_params.number_of_streams);
for (size_t i = 0; i < encoder_config.number_of_streams; ++i) {
encoder_config.simulcast_layers[i].active =
(i >= test_params.first_active_layer_idx);
}
if (test_params.is_legacy_screencast) {
encoder_config.content_type = VideoEncoderConfig::ContentType::kScreen;
encoder_config.legacy_conference_mode = true;
}
return GetStreamResolutions(
CreateEncoderStreams(ExplicitKeyValueConfig(test_params.field_trials),
test_params.resolution, encoder_config));
}
TEST(EncoderStreamFactory, KeepsResolutionUnchangedWhenAligned) {
EXPECT_THAT(
CreateStreamResolutions({.number_of_streams = 2,
.resolution = {.width = 516, .height = 526}}),
ElementsAre(Resolution{.width = 516 / 2, .height = 526 / 2},
Resolution{.width = 516, .height = 526}));
}
TEST(EncoderStreamFactory, AdjustsResolutionWhenUnaligned) {
// By default width and height of the smallest simulcast stream are required
// to be whole numbers. To achieve that, the resolution of the highest
// simulcast stream is adjusted to be multiple of (2 ^ (number_of_streams -
// 1)) by rounding down.
EXPECT_THAT(
CreateStreamResolutions({.number_of_streams = 2,
.resolution = {.width = 515, .height = 517}}),
ElementsAre(Resolution{.width = 514 / 2, .height = 516 / 2},
Resolution{.width = 514, .height = 516}));
}
TEST(EncoderStreamFactory, MakesResolutionDivisibleBy4) {
EXPECT_THAT(
CreateStreamResolutions(
{.field_trials = "WebRTC-NormalizeSimulcastResolution/Enabled-2/",
.number_of_streams = 2,
.resolution = {.width = 515, .height = 517}}),
ElementsAre(Resolution{.width = 512 / 2, .height = 516 / 2},
Resolution{.width = 512, .height = 516}));
}
TEST(EncoderStreamFactory, KeepsStreamCountUnchangedWhenResolutionIsHigh) {
EXPECT_THAT(
CreateStreamResolutions({.number_of_streams = 3,
.resolution = {.width = 1000, .height = 1000}}),
SizeIs(3));
}
TEST(EncoderStreamFactory, ReducesStreamCountWhenResolutionIsLow) {
EXPECT_THAT(
CreateStreamResolutions({.number_of_streams = 3,
.resolution = {.width = 100, .height = 100}}),
SizeIs(1));
}
TEST(EncoderStreamFactory, ReducesStreamCountDownToFirstActiveStream) {
EXPECT_THAT(
CreateStreamResolutions({.number_of_streams = 3,
.resolution = {.width = 100, .height = 100},
.first_active_layer_idx = 1}),
SizeIs(2));
}
TEST(EncoderStreamFactory,
ReducesLegacyScreencastStreamCountWhenResolutionIsLow) {
// At least 2 streams are expected to be configured in legacy screencast mode.
EXPECT_THAT(
CreateStreamResolutions({.number_of_streams = 3,
.resolution = {.width = 100, .height = 100},
.is_legacy_screencast = true}),
SizeIs(2));
}
TEST(EncoderStreamFactory, KeepsStreamCountUnchangedWhenLegacyLimitIsDisabled) {
EXPECT_THAT(CreateStreamResolutions(
{.field_trials = "WebRTC-LegacySimulcastLayerLimit/Disabled/",
.number_of_streams = 3,
.resolution = {.width = 100, .height = 100}}),
SizeIs(3));
}
TEST(EncoderStreamFactory, KeepsHighResolutionWhenStreamCountIsReduced) {
EXPECT_THAT(
CreateStreamResolutions({.number_of_streams = 3,
.resolution = {.width = 640, .height = 360}}),
ElementsAre(Resolution{.width = 320, .height = 180},
Resolution{.width = 640, .height = 360}));
}
struct OverrideStreamSettingsTestParams {
std::string field_trials;
Resolution input_resolution;
VideoEncoderConfig::ContentType content_type;
std::vector<VideoStream> requested_streams;
std::vector<VideoStream> expected_streams;
};
class EncoderStreamFactoryOverrideStreamSettinsTest
: public ::testing::TestWithParam<
std::tuple<OverrideStreamSettingsTestParams, VideoCodecType>> {};
TEST_P(EncoderStreamFactoryOverrideStreamSettinsTest, OverrideStreamSettings) {
OverrideStreamSettingsTestParams test_params = std::get<0>(GetParam());
VideoEncoderConfig encoder_config;
encoder_config.codec_type = std::get<1>(GetParam());
encoder_config.number_of_streams = test_params.requested_streams.size();
encoder_config.simulcast_layers = test_params.requested_streams;
encoder_config.content_type = test_params.content_type;
auto streams =
CreateEncoderStreams(ExplicitKeyValueConfig(test_params.field_trials),
test_params.input_resolution, encoder_config);
ASSERT_EQ(streams.size(), test_params.expected_streams.size());
for (size_t i = 0; i < streams.size(); ++i) {
SCOPED_TRACE(i);
const VideoStream& expected = test_params.expected_streams[i];
EXPECT_EQ(streams[i].width, expected.width);
EXPECT_EQ(streams[i].height, expected.height);
EXPECT_EQ(streams[i].max_framerate, expected.max_framerate);
EXPECT_EQ(streams[i].min_bitrate_bps, expected.min_bitrate_bps);
EXPECT_EQ(streams[i].target_bitrate_bps, expected.target_bitrate_bps);
EXPECT_EQ(streams[i].max_bitrate_bps, expected.max_bitrate_bps);
EXPECT_EQ(streams[i].scalability_mode, expected.scalability_mode);
}
}
INSTANTIATE_TEST_SUITE_P(
Screencast,
EncoderStreamFactoryOverrideStreamSettinsTest,
Combine(Values(OverrideStreamSettingsTestParams{
.input_resolution = {.width = 1920, .height = 1080},
.content_type = VideoEncoderConfig::ContentType::kScreen,
.requested_streams =
{CreateVideoStream(
{.max_framerate_fps = 5,
.max_bitrate_bps = 420'000,
.scale_resolution_down_by = 1,
.scalability_mode = ScalabilityMode::kL1T2}),
CreateVideoStream(
{.max_framerate_fps = 30,
.max_bitrate_bps = 2'500'000,
.scale_resolution_down_by = 1,
.scalability_mode = ScalabilityMode::kL1T2})},
.expected_streams =
{CreateVideoStream(
{.width = 1920,
.height = 1080,
.max_framerate_fps = 5,
.min_bitrate_bps = 30'000,
.target_bitrate_bps = 420'000,
.max_bitrate_bps = 420'000,
.scalability_mode = ScalabilityMode::kL1T2}),
CreateVideoStream(
{.width = 1920,
.height = 1080,
.max_framerate_fps = 30,
.min_bitrate_bps = 800'000,
.target_bitrate_bps = 2'500'000,
.max_bitrate_bps = 2'500'000,
.scalability_mode = ScalabilityMode::kL1T2})}}),
Values(VideoCodecType::kVideoCodecVP8,
VideoCodecType::kVideoCodecAV1)));
} // namespace webrtc } // namespace webrtc

View File

@ -229,31 +229,25 @@ SimulcastFormat InterpolateSimulcastFormat(
} }
std::vector<webrtc::VideoStream> GetNormalSimulcastLayers( std::vector<webrtc::VideoStream> GetNormalSimulcastLayers(
size_t layer_count, rtc::ArrayView<const webrtc::Resolution> resolutions,
int width,
int height,
bool temporal_layers_supported, bool temporal_layers_supported,
bool base_heavy_tl3_rate_alloc, bool base_heavy_tl3_rate_alloc,
const webrtc::FieldTrialsView& trials, const webrtc::FieldTrialsView& trials,
webrtc::VideoCodecType codec) { webrtc::VideoCodecType codec) {
std::vector<webrtc::VideoStream> layers(layer_count);
const bool enable_lowres_bitrate_interpolation = const bool enable_lowres_bitrate_interpolation =
EnableLowresBitrateInterpolation(trials); EnableLowresBitrateInterpolation(trials);
const int num_temporal_layers = const int num_temporal_layers =
temporal_layers_supported ? kDefaultNumTemporalLayers : 1; temporal_layers_supported ? kDefaultNumTemporalLayers : 1;
// Format width and height has to be divisible by |2 ^ num_simulcast_layers -
// 1|.
width = NormalizeSimulcastSize(trials, width, layer_count);
height = NormalizeSimulcastSize(trials, height, layer_count);
// Add simulcast streams, from highest resolution (`s` = num_simulcast_layers // Add simulcast streams, from highest resolution (`s` = num_simulcast_layers
// -1) to lowest resolution at `s` = 0. // -1) to lowest resolution at `s` = 0.
for (size_t s = layer_count - 1;; --s) { std::vector<webrtc::VideoStream> layers(resolutions.size());
layers[s].width = width; for (size_t s = 0; s < resolutions.size(); ++s) {
layers[s].height = height; layers[s].width = resolutions[s].width;
layers[s].height = resolutions[s].height;
layers[s].num_temporal_layers = num_temporal_layers; layers[s].num_temporal_layers = num_temporal_layers;
SimulcastFormat interpolated_format = InterpolateSimulcastFormat( SimulcastFormat interpolated_format = InterpolateSimulcastFormat(
width, height, /*max_roundup_rate=*/absl::nullopt, layers[s].width, layers[s].height, /*max_roundup_rate=*/absl::nullopt,
enable_lowres_bitrate_interpolation, codec); enable_lowres_bitrate_interpolation, codec);
layers[s].max_bitrate_bps = interpolated_format.max_bitrate.bps(); layers[s].max_bitrate_bps = interpolated_format.max_bitrate.bps();
@ -293,13 +287,6 @@ std::vector<webrtc::VideoStream> GetNormalSimulcastLayers(
std::max(layers[s].min_bitrate_bps, layers[s].target_bitrate_bps); std::max(layers[s].min_bitrate_bps, layers[s].target_bitrate_bps);
layers[s].max_framerate = kDefaultVideoMaxFramerate; layers[s].max_framerate = kDefaultVideoMaxFramerate;
width /= 2;
height /= 2;
if (s == 0) {
break;
}
} }
return layers; return layers;
@ -362,10 +349,12 @@ std::vector<webrtc::VideoStream> GetScreenshareLayers(
return layers; return layers;
} }
size_t LimitSimulcastLayerCount(int width, } // namespace
size_t LimitSimulcastLayerCount(size_t min_num_layers,
size_t max_num_layers,
int width,
int height, int height,
size_t need_layers,
size_t layer_count,
const webrtc::FieldTrialsView& trials, const webrtc::FieldTrialsView& trials,
webrtc::VideoCodecType codec) { webrtc::VideoCodecType codec) {
if (!absl::StartsWith(trials.Lookup(kUseLegacySimulcastLayerLimitFieldTrial), if (!absl::StartsWith(trials.Lookup(kUseLegacySimulcastLayerLimitFieldTrial),
@ -378,36 +367,19 @@ size_t LimitSimulcastLayerCount(int width,
webrtc::ParseFieldTrial({&max_ratio}, webrtc::ParseFieldTrial({&max_ratio},
trials.Lookup("WebRTC-SimulcastLayerLimitRoundUp")); trials.Lookup("WebRTC-SimulcastLayerLimitRoundUp"));
size_t adaptive_layer_count = std::max( size_t reduced_num_layers =
need_layers, InterpolateSimulcastFormat( std::max(min_num_layers,
width, height, max_ratio.GetOptional(), InterpolateSimulcastFormat(
/*enable_lowres_bitrate_interpolation=*/false, codec) width, height, max_ratio.GetOptional(),
.max_layers); /*enable_lowres_bitrate_interpolation=*/false, codec)
if (layer_count > adaptive_layer_count) { .max_layers);
if (max_num_layers > reduced_num_layers) {
RTC_LOG(LS_WARNING) << "Reducing simulcast layer count from " RTC_LOG(LS_WARNING) << "Reducing simulcast layer count from "
<< layer_count << " to " << adaptive_layer_count; << max_num_layers << " to " << reduced_num_layers;
layer_count = adaptive_layer_count; return reduced_num_layers;
} }
} }
return layer_count; return max_num_layers;
}
} // namespace
// Round size to nearest simulcast-friendly size.
// Simulcast stream width and height must both be dividable by
// |2 ^ (simulcast_layers - 1)|.
int NormalizeSimulcastSize(const FieldTrialsView& field_trials,
int size,
size_t simulcast_layers) {
int base2_exponent = static_cast<int>(simulcast_layers) - 1;
const absl::optional<int> experimental_base2_exponent =
webrtc::NormalizeSimulcastSizeExperiment::GetBase2Exponent(field_trials);
if (experimental_base2_exponent &&
(size > (1 << *experimental_base2_exponent))) {
base2_exponent = *experimental_base2_exponent;
}
return ((size >> base2_exponent) << base2_exponent);
} }
void BoostMaxSimulcastLayer(webrtc::DataRate max_bitrate, void BoostMaxSimulcastLayer(webrtc::DataRate max_bitrate,
@ -439,32 +411,21 @@ webrtc::DataRate GetTotalMaxBitrate(
} }
std::vector<webrtc::VideoStream> GetSimulcastConfig( std::vector<webrtc::VideoStream> GetSimulcastConfig(
size_t min_layers, rtc::ArrayView<const webrtc::Resolution> resolutions,
size_t max_layers,
int width,
int height,
bool is_screenshare_with_conference_mode, bool is_screenshare_with_conference_mode,
bool temporal_layers_supported, bool temporal_layers_supported,
const webrtc::FieldTrialsView& trials, const webrtc::FieldTrialsView& trials,
webrtc::VideoCodecType codec) { webrtc::VideoCodecType codec) {
RTC_DCHECK_LE(min_layers, max_layers); RTC_DCHECK(!resolutions.empty());
RTC_DCHECK(max_layers > 1 || is_screenshare_with_conference_mode);
const bool base_heavy_tl3_rate_alloc = const bool base_heavy_tl3_rate_alloc =
webrtc::RateControlSettings(trials).Vp8BaseHeavyTl3RateAllocation(); webrtc::RateControlSettings(trials).Vp8BaseHeavyTl3RateAllocation();
if (is_screenshare_with_conference_mode) { if (is_screenshare_with_conference_mode) {
return GetScreenshareLayers(max_layers, width, height, return GetScreenshareLayers(
temporal_layers_supported, resolutions.size(), resolutions[0].width, resolutions[0].height,
base_heavy_tl3_rate_alloc, trials); temporal_layers_supported, base_heavy_tl3_rate_alloc, trials);
} else { } else {
// Some applications rely on the old behavior limiting the simulcast layer return GetNormalSimulcastLayers(resolutions, temporal_layers_supported,
// count based on the resolution automatically, which they can get through
// the WebRTC-LegacySimulcastLayerLimit field trial until they update.
max_layers = LimitSimulcastLayerCount(width, height, min_layers, max_layers,
trials, codec);
return GetNormalSimulcastLayers(max_layers, width, height,
temporal_layers_supported,
base_heavy_tl3_rate_alloc, trials, codec); base_heavy_tl3_rate_alloc, trials, codec);
} }
} }

View File

@ -15,8 +15,10 @@
#include <vector> #include <vector>
#include "api/array_view.h"
#include "api/field_trials_view.h" #include "api/field_trials_view.h"
#include "api/units/data_rate.h" #include "api/units/data_rate.h"
#include "api/video/resolution.h"
#include "video/config/video_encoder_config.h" #include "video/config/video_encoder_config.h"
namespace cricket { namespace cricket {
@ -30,17 +32,19 @@ webrtc::DataRate GetTotalMaxBitrate(
void BoostMaxSimulcastLayer(webrtc::DataRate max_bitrate, void BoostMaxSimulcastLayer(webrtc::DataRate max_bitrate,
std::vector<webrtc::VideoStream>* layers); std::vector<webrtc::VideoStream>* layers);
// Round size to nearest simulcast-friendly size // Returns number of simulcast streams. The value depends on the resolution and
int NormalizeSimulcastSize(const webrtc::FieldTrialsView& field_trials, // is restricted to the range from `min_num_layers` to `max_num_layers`,
int size, // inclusive.
size_t simulcast_layers); size_t LimitSimulcastLayerCount(size_t min_num_layers,
size_t max_num_layers,
int width,
int height,
const webrtc::FieldTrialsView& trials,
webrtc::VideoCodecType codec);
// Gets simulcast settings. // Gets simulcast settings.
std::vector<webrtc::VideoStream> GetSimulcastConfig( std::vector<webrtc::VideoStream> GetSimulcastConfig(
size_t min_layers, rtc::ArrayView<const webrtc::Resolution> resolutions,
size_t max_layers,
int width,
int height,
bool is_screenshare_with_conference_mode, bool is_screenshare_with_conference_mode,
bool temporal_layers_supported, bool temporal_layers_supported,
const webrtc::FieldTrialsView& trials, const webrtc::FieldTrialsView& trials,

View File

@ -12,12 +12,13 @@
#include "media/base/media_constants.h" #include "media/base/media_constants.h"
#include "test/explicit_key_value_config.h" #include "test/explicit_key_value_config.h"
#include "test/gmock.h"
#include "test/gtest.h" #include "test/gtest.h"
namespace webrtc { namespace webrtc {
namespace { namespace {
using test::ExplicitKeyValueConfig; using test::ExplicitKeyValueConfig;
using ::testing::SizeIs;
constexpr bool kScreenshare = true; constexpr bool kScreenshare = true;
constexpr int kDefaultTemporalLayers = 3; // Value from simulcast.cc. constexpr int kDefaultTemporalLayers = 3; // Value from simulcast.cc.
@ -36,6 +37,19 @@ const std::vector<VideoStream> GetSimulcastBitrates720p() {
streams[2].max_bitrate_bps = 2500000; streams[2].max_bitrate_bps = 2500000;
return streams; return streams;
} }
// Creates a vector of resolutions scaled down with 1/2 factor ordered from low
// to high.
std::vector<Resolution> CreateResolutions(int max_width,
int max_height,
int num_streams) {
std::vector<webrtc::Resolution> resolutions(num_streams);
for (int i = 0; i < num_streams; ++i) {
resolutions[i].width = max_width >> (num_streams - i - 1);
resolutions[i].height = max_height >> (num_streams - i - 1);
}
return resolutions;
}
} // namespace } // namespace
TEST(SimulcastTest, TotalMaxBitrateIsZeroForNoStreams) { TEST(SimulcastTest, TotalMaxBitrateIsZeroForNoStreams) {
@ -83,13 +97,12 @@ TEST(SimulcastTest, GetConfig) {
const std::vector<VideoStream> kExpected = GetSimulcastBitrates720p(); const std::vector<VideoStream> kExpected = GetSimulcastBitrates720p();
const size_t kMinLayers = 1;
const size_t kMaxLayers = 3; const size_t kMaxLayers = 3;
std::vector<VideoStream> streams = cricket::GetSimulcastConfig( std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
kMinLayers, kMaxLayers, 1280, 720, !kScreenshare, true, trials, CreateResolutions(1280, 720, kMaxLayers), !kScreenshare, true, trials,
webrtc::kVideoCodecVP8); webrtc::kVideoCodecVP8);
EXPECT_EQ(kMaxLayers, streams.size()); ASSERT_THAT(streams, SizeIs(kMaxLayers));
EXPECT_EQ(320u, streams[0].width); EXPECT_EQ(320u, streams[0].width);
EXPECT_EQ(180u, streams[0].height); EXPECT_EQ(180u, streams[0].height);
EXPECT_EQ(640u, streams[1].width); EXPECT_EQ(640u, streams[1].width);
@ -114,12 +127,12 @@ TEST(SimulcastTest, GetConfigWithBaseHeavyVP8TL3RateAllocation) {
const std::vector<VideoStream> kExpected = GetSimulcastBitrates720p(); const std::vector<VideoStream> kExpected = GetSimulcastBitrates720p();
const size_t kMinLayers = 1;
const size_t kMaxLayers = 3; const size_t kMaxLayers = 3;
std::vector<VideoStream> streams = cricket::GetSimulcastConfig( std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
kMinLayers, kMaxLayers, 1280, 720, !kScreenshare, true, trials, CreateResolutions(1280, 720, kMaxLayers), !kScreenshare, true, trials,
webrtc::kVideoCodecVP8); webrtc::kVideoCodecVP8);
ASSERT_THAT(streams, SizeIs(kMaxLayers));
EXPECT_EQ(kExpected[0].min_bitrate_bps, streams[0].min_bitrate_bps); EXPECT_EQ(kExpected[0].min_bitrate_bps, streams[0].min_bitrate_bps);
EXPECT_EQ(static_cast<int>(0.4 * kExpected[0].target_bitrate_bps / 0.6), EXPECT_EQ(static_cast<int>(0.4 * kExpected[0].target_bitrate_bps / 0.6),
streams[0].target_bitrate_bps); streams[0].target_bitrate_bps);
@ -135,176 +148,27 @@ TEST(SimulcastTest, GetConfigWithBaseHeavyVP8TL3RateAllocation) {
TEST(SimulcastTest, GetConfigWithLimitedMaxLayers) { TEST(SimulcastTest, GetConfigWithLimitedMaxLayers) {
ExplicitKeyValueConfig trials(""); ExplicitKeyValueConfig trials("");
const size_t kMinLayers = 1;
const size_t kMaxLayers = 2; const size_t kMaxLayers = 2;
std::vector<VideoStream> streams = cricket::GetSimulcastConfig( std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
kMinLayers, kMaxLayers, 1280, 720, !kScreenshare, true, trials, CreateResolutions(1280, 720, kMaxLayers), !kScreenshare, true, trials,
webrtc::kVideoCodecVP8); webrtc::kVideoCodecVP8);
EXPECT_EQ(kMaxLayers, streams.size()); ASSERT_THAT(streams, SizeIs(kMaxLayers));
EXPECT_EQ(640u, streams[0].width); EXPECT_EQ(640u, streams[0].width);
EXPECT_EQ(360u, streams[0].height); EXPECT_EQ(360u, streams[0].height);
EXPECT_EQ(1280u, streams[1].width); EXPECT_EQ(1280u, streams[1].width);
EXPECT_EQ(720u, streams[1].height); EXPECT_EQ(720u, streams[1].height);
} }
TEST(SimulcastTest, GetConfigWithLimitedMaxLayersForResolution) {
ExplicitKeyValueConfig trials(
"WebRTC-LegacySimulcastLayerLimit/Enabled/");
const size_t kMinLayers = 1;
const size_t kMaxLayers = 3;
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
kMinLayers, kMaxLayers, 800, 600, !kScreenshare, true, trials,
webrtc::kVideoCodecVP8);
EXPECT_EQ(2u, streams.size());
EXPECT_EQ(400u, streams[0].width);
EXPECT_EQ(300u, streams[0].height);
EXPECT_EQ(800u, streams[1].width);
EXPECT_EQ(600u, streams[1].height);
}
TEST(SimulcastTest, GetConfigWithLowResolutionScreenshare) {
ExplicitKeyValueConfig trials(
"WebRTC-LegacySimulcastLayerLimit/Enabled/");
const size_t kMinLayers = 1;
const size_t kMaxLayers = 3;
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
kMinLayers, kMaxLayers, 100, 100, kScreenshare, true, trials,
webrtc::kVideoCodecVP8);
// Simulcast streams number is never decreased for screenshare,
// even for very low resolution.
EXPECT_GT(streams.size(), 1u);
}
TEST(SimulcastTest, GetConfigWithNotLimitedMaxLayersForResolution) {
ExplicitKeyValueConfig trials(
"WebRTC-LegacySimulcastLayerLimit/Disabled/");
const size_t kMinLayers = 1;
const size_t kMaxLayers = 3;
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
kMinLayers, kMaxLayers, 800, 600, !kScreenshare, true, trials,
webrtc::kVideoCodecVP8);
EXPECT_EQ(kMaxLayers, streams.size());
EXPECT_EQ(200u, streams[0].width);
EXPECT_EQ(150u, streams[0].height);
EXPECT_EQ(400u, streams[1].width);
EXPECT_EQ(300u, streams[1].height);
EXPECT_EQ(800u, streams[2].width);
EXPECT_EQ(600u, streams[2].height);
}
TEST(SimulcastTest, GetConfigWithNormalizedResolution) {
ExplicitKeyValueConfig trials("");
const size_t kMinLayers = 1;
const size_t kMaxLayers = 2;
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
kMinLayers, kMaxLayers, 640 + 1, 360 + 1, !kScreenshare, true, trials,
webrtc::kVideoCodecVP8);
// Must be divisible by |2 ^ (num_layers - 1)|.
EXPECT_EQ(kMaxLayers, streams.size());
EXPECT_EQ(320u, streams[0].width);
EXPECT_EQ(180u, streams[0].height);
EXPECT_EQ(640u, streams[1].width);
EXPECT_EQ(360u, streams[1].height);
}
TEST(SimulcastTest, GetConfigWithNormalizedResolutionDivisibleBy4) {
ExplicitKeyValueConfig trials(
"WebRTC-NormalizeSimulcastResolution/Enabled-2/");
const size_t kMinLayers = 1;
const size_t kMaxLayers = 2;
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
kMinLayers, kMaxLayers, 709, 501, !kScreenshare, true, trials,
webrtc::kVideoCodecVP8);
// Must be divisible by |2 ^ 2|.
EXPECT_EQ(kMaxLayers, streams.size());
EXPECT_EQ(354u, streams[0].width);
EXPECT_EQ(250u, streams[0].height);
EXPECT_EQ(708u, streams[1].width);
EXPECT_EQ(500u, streams[1].height);
}
TEST(SimulcastTest, GetConfigWithNormalizedResolutionDivisibleBy8) {
ExplicitKeyValueConfig trials(
"WebRTC-NormalizeSimulcastResolution/Enabled-3/");
const size_t kMinLayers = 1;
const size_t kMaxLayers = 2;
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
kMinLayers, kMaxLayers, 709, 501, !kScreenshare, true, trials,
webrtc::kVideoCodecVP8);
// Must be divisible by |2 ^ 3|.
EXPECT_EQ(kMaxLayers, streams.size());
EXPECT_EQ(352u, streams[0].width);
EXPECT_EQ(248u, streams[0].height);
EXPECT_EQ(704u, streams[1].width);
EXPECT_EQ(496u, streams[1].height);
}
TEST(SimulcastTest, GetConfigForLegacyLayerLimit) {
ExplicitKeyValueConfig trials(
"WebRTC-LegacySimulcastLayerLimit/Enabled/");
const size_t kMinLayers = 1;
const int kMaxLayers = 3;
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
kMinLayers, kMaxLayers, 320, 180, !kScreenshare, true, trials,
webrtc::kVideoCodecVP8);
EXPECT_EQ(1u, streams.size());
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 640, 360,
!kScreenshare, true, trials,
webrtc::kVideoCodecVP8);
EXPECT_EQ(2u, streams.size());
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 1920, 1080,
!kScreenshare, true, trials,
webrtc::kVideoCodecVP8);
EXPECT_EQ(3u, streams.size());
}
TEST(SimulcastTest, GetConfigForLegacyLayerLimitWithRequiredHD) {
ExplicitKeyValueConfig trials(
"WebRTC-LegacySimulcastLayerLimit/Enabled/");
const size_t kMinLayers = 3; // "HD" layer must be present!
const int kMaxLayers = 3;
std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
kMinLayers, kMaxLayers, 320, 180, !kScreenshare, true, trials,
webrtc::kVideoCodecVP8);
EXPECT_EQ(3u, streams.size());
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 640, 360,
!kScreenshare, true, trials,
webrtc::kVideoCodecVP8);
EXPECT_EQ(3u, streams.size());
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 1920, 1080,
!kScreenshare, true, trials,
webrtc::kVideoCodecVP8);
EXPECT_EQ(3u, streams.size());
}
TEST(SimulcastTest, GetConfigForScreenshareSimulcast) { TEST(SimulcastTest, GetConfigForScreenshareSimulcast) {
ExplicitKeyValueConfig trials(""); ExplicitKeyValueConfig trials("");
const size_t kMinLayers = 1;
const size_t kMaxLayers = 3;
std::vector<VideoStream> streams = cricket::GetSimulcastConfig( std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
kMinLayers, kMaxLayers, 1400, 800, kScreenshare, true, trials, std::vector<Resolution>{{.width = 1400, .height = 800},
webrtc::kVideoCodecVP8); {.width = 1400, .height = 800},
{.width = 1400, .height = 800}},
kScreenshare, true, trials, webrtc::kVideoCodecVP8);
EXPECT_GT(streams.size(), 1u); EXPECT_THAT(streams, SizeIs(2));
for (size_t i = 0; i < streams.size(); ++i) { for (size_t i = 0; i < streams.size(); ++i) {
EXPECT_EQ(1400u, streams[i].width) << "Screen content never scaled."; EXPECT_EQ(1400u, streams[i].width) << "Screen content never scaled.";
EXPECT_EQ(800u, streams[i].height) << "Screen content never scaled."; EXPECT_EQ(800u, streams[i].height) << "Screen content never scaled.";
@ -320,35 +184,29 @@ TEST(SimulcastTest, GetConfigForScreenshareSimulcast) {
TEST(SimulcastTest, GetConfigForScreenshareSimulcastWithLimitedMaxLayers) { TEST(SimulcastTest, GetConfigForScreenshareSimulcastWithLimitedMaxLayers) {
ExplicitKeyValueConfig trials(""); ExplicitKeyValueConfig trials("");
const size_t kMinLayers = 1;
const size_t kMaxLayers = 1;
std::vector<VideoStream> streams = cricket::GetSimulcastConfig( std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
kMinLayers, kMaxLayers, 1400, 800, kScreenshare, true, trials, std::vector<Resolution>{{.width = 1400, .height = 800}}, kScreenshare,
webrtc::kVideoCodecVP8); true, trials, webrtc::kVideoCodecVP8);
EXPECT_THAT(streams, SizeIs(1));
EXPECT_EQ(kMaxLayers, streams.size());
} }
TEST(SimulcastTest, AveragesBitratesForNonStandardResolution) { TEST(SimulcastTest, AveragesBitratesForNonStandardResolution) {
ExplicitKeyValueConfig trials(""); ExplicitKeyValueConfig trials("");
const size_t kMinLayers = 1;
const size_t kMaxLayers = 3;
std::vector<VideoStream> streams = cricket::GetSimulcastConfig( std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
kMinLayers, kMaxLayers, 900, 800, !kScreenshare, true, trials, std::vector<Resolution>{{.width = 900, .height = 800}}, !kScreenshare,
webrtc::kVideoCodecVP8); true, trials, webrtc::kVideoCodecVP8);
EXPECT_EQ(kMaxLayers, streams.size()); ASSERT_THAT(streams, SizeIs(1));
EXPECT_EQ(900u, streams[2].width); EXPECT_EQ(900u, streams[0].width);
EXPECT_EQ(800u, streams[2].height); EXPECT_EQ(800u, streams[0].height);
EXPECT_EQ(1850000, streams[2].max_bitrate_bps); EXPECT_EQ(1850000, streams[0].max_bitrate_bps);
EXPECT_EQ(1850000, streams[2].target_bitrate_bps); EXPECT_EQ(1850000, streams[0].target_bitrate_bps);
EXPECT_EQ(475000, streams[2].min_bitrate_bps); EXPECT_EQ(475000, streams[0].min_bitrate_bps);
} }
TEST(SimulcastTest, BitratesForCloseToStandardResolution) { TEST(SimulcastTest, BitratesForCloseToStandardResolution) {
ExplicitKeyValueConfig trials(""); ExplicitKeyValueConfig trials("");
const size_t kMinLayers = 1;
const size_t kMaxLayers = 3; const size_t kMaxLayers = 3;
// Resolution very close to 720p in number of pixels // Resolution very close to 720p in number of pixels
const size_t kWidth = 1280; const size_t kWidth = 1280;
@ -356,10 +214,10 @@ TEST(SimulcastTest, BitratesForCloseToStandardResolution) {
const std::vector<VideoStream> kExpectedNear = GetSimulcastBitrates720p(); const std::vector<VideoStream> kExpectedNear = GetSimulcastBitrates720p();
std::vector<VideoStream> streams = cricket::GetSimulcastConfig( std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
kMinLayers, kMaxLayers, kWidth, kHeight, !kScreenshare, true, trials, CreateResolutions(kWidth, kHeight, kMaxLayers), !kScreenshare, true,
webrtc::kVideoCodecVP8); trials, webrtc::kVideoCodecVP8);
EXPECT_EQ(kMaxLayers, streams.size()); ASSERT_THAT(streams, SizeIs(kMaxLayers));
EXPECT_EQ(kWidth, streams[2].width); EXPECT_EQ(kWidth, streams[2].width);
EXPECT_EQ(kHeight, streams[2].height); EXPECT_EQ(kHeight, streams[2].height);
for (size_t i = 0; i < streams.size(); ++i) { for (size_t i = 0; i < streams.size(); ++i) {
@ -379,25 +237,20 @@ TEST(SimulcastTest, MaxLayersWithRoundUpDisabled) {
const size_t kMinLayers = 1; const size_t kMinLayers = 1;
const int kMaxLayers = 3; const int kMaxLayers = 3;
std::vector<VideoStream> streams; size_t num_layers = cricket::LimitSimulcastLayerCount(
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 960, 540, kMinLayers, kMaxLayers, 960, 540, trials, webrtc::kVideoCodecVP8);
!kScreenshare, true, trials, EXPECT_EQ(num_layers, 3u);
webrtc::kVideoCodecVP8);
EXPECT_EQ(3u, streams.size());
// <960x540: 2 layers // <960x540: 2 layers
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 960, 539, num_layers = cricket::LimitSimulcastLayerCount(
!kScreenshare, true, trials, kMinLayers, kMaxLayers, 960, 539, trials, webrtc::kVideoCodecVP8);
webrtc::kVideoCodecVP8); EXPECT_EQ(num_layers, 2u);
EXPECT_EQ(2u, streams.size()); num_layers = cricket::LimitSimulcastLayerCount(
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 480, 270, kMinLayers, kMaxLayers, 480, 270, trials, webrtc::kVideoCodecVP8);
!kScreenshare, true, trials, EXPECT_EQ(num_layers, 2u);
webrtc::kVideoCodecVP8);
EXPECT_EQ(2u, streams.size());
// <480x270: 1 layer // <480x270: 1 layer
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 480, 269, num_layers = cricket::LimitSimulcastLayerCount(
!kScreenshare, true, trials, kMinLayers, kMaxLayers, 480, 269, trials, webrtc::kVideoCodecVP8);
webrtc::kVideoCodecVP8); EXPECT_EQ(num_layers, 1u);
EXPECT_EQ(1u, streams.size());
} }
TEST(SimulcastTest, MaxLayersWithDefaultRoundUpRatio) { TEST(SimulcastTest, MaxLayersWithDefaultRoundUpRatio) {
@ -406,33 +259,26 @@ TEST(SimulcastTest, MaxLayersWithDefaultRoundUpRatio) {
const size_t kMinLayers = 1; const size_t kMinLayers = 1;
const int kMaxLayers = 3; const int kMaxLayers = 3;
std::vector<VideoStream> streams; size_t num_layers = cricket::LimitSimulcastLayerCount(
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 960, 540, kMinLayers, kMaxLayers, 960, 540, trials, webrtc::kVideoCodecVP8);
!kScreenshare, true, trials, EXPECT_EQ(num_layers, 3u);
webrtc::kVideoCodecVP8);
EXPECT_EQ(3u, streams.size());
// Lowest cropped height where max layers from higher resolution is used. // Lowest cropped height where max layers from higher resolution is used.
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 960, 512, num_layers = cricket::LimitSimulcastLayerCount(
!kScreenshare, true, trials, kMinLayers, kMaxLayers, 960, 512, trials, webrtc::kVideoCodecVP8);
webrtc::kVideoCodecVP8); EXPECT_EQ(num_layers, 3u);
EXPECT_EQ(3u, streams.size()); num_layers = cricket::LimitSimulcastLayerCount(
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 960, 508, kMinLayers, kMaxLayers, 960, 508, trials, webrtc::kVideoCodecVP8);
!kScreenshare, true, trials, EXPECT_EQ(num_layers, 2u);
webrtc::kVideoCodecVP8); num_layers = cricket::LimitSimulcastLayerCount(
EXPECT_EQ(2u, streams.size()); kMinLayers, kMaxLayers, 480, 270, trials, webrtc::kVideoCodecVP8);
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 480, 270, EXPECT_EQ(num_layers, 2u);
!kScreenshare, true, trials,
webrtc::kVideoCodecVP8);
EXPECT_EQ(2u, streams.size());
// Lowest cropped height where max layers from higher resolution is used. // Lowest cropped height where max layers from higher resolution is used.
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 480, 256, num_layers = cricket::LimitSimulcastLayerCount(
!kScreenshare, true, trials, kMinLayers, kMaxLayers, 480, 256, trials, webrtc::kVideoCodecVP8);
webrtc::kVideoCodecVP8); EXPECT_EQ(num_layers, 2u);
EXPECT_EQ(2u, streams.size()); num_layers = cricket::LimitSimulcastLayerCount(
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 480, 254, kMinLayers, kMaxLayers, 480, 254, trials, webrtc::kVideoCodecVP8);
!kScreenshare, true, trials, EXPECT_EQ(num_layers, 1u);
webrtc::kVideoCodecVP8);
EXPECT_EQ(1u, streams.size());
} }
TEST(SimulcastTest, MaxLayersWithRoundUpRatio) { TEST(SimulcastTest, MaxLayersWithRoundUpRatio) {
@ -442,20 +288,16 @@ TEST(SimulcastTest, MaxLayersWithRoundUpRatio) {
const size_t kMinLayers = 1; const size_t kMinLayers = 1;
const int kMaxLayers = 3; const int kMaxLayers = 3;
std::vector<VideoStream> streams; size_t num_layers = cricket::LimitSimulcastLayerCount(
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 480, 270, kMinLayers, kMaxLayers, 480, 270, trials, webrtc::kVideoCodecVP8);
!kScreenshare, true, trials, EXPECT_EQ(num_layers, 2u);
webrtc::kVideoCodecVP8);
EXPECT_EQ(2u, streams.size());
// Lowest cropped height where max layers from higher resolution is used. // Lowest cropped height where max layers from higher resolution is used.
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 480, 252, num_layers = cricket::LimitSimulcastLayerCount(
!kScreenshare, true, trials, kMinLayers, kMaxLayers, 480, 252, trials, webrtc::kVideoCodecVP8);
webrtc::kVideoCodecVP8); EXPECT_EQ(num_layers, 2u);
EXPECT_EQ(2u, streams.size()); num_layers = cricket::LimitSimulcastLayerCount(
streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 480, 250, kMinLayers, kMaxLayers, 480, 250, trials, webrtc::kVideoCodecVP8);
!kScreenshare, true, trials, EXPECT_EQ(num_layers, 1u);
webrtc::kVideoCodecVP8);
EXPECT_EQ(1u, streams.size());
} }
TEST(SimulcastTest, BitratesInterpolatedForResBelow180p) { TEST(SimulcastTest, BitratesInterpolatedForResBelow180p) {
@ -465,10 +307,10 @@ TEST(SimulcastTest, BitratesInterpolatedForResBelow180p) {
const size_t kMaxLayers = 3; const size_t kMaxLayers = 3;
std::vector<VideoStream> streams = cricket::GetSimulcastConfig( std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
/* min_layers = */ 1, kMaxLayers, /* width = */ 960, /* height = */ 540, CreateResolutions(/*max_width=*/960, /*max_height=*/540, kMaxLayers),
!kScreenshare, true, trials, webrtc::kVideoCodecVP8); !kScreenshare, true, trials, webrtc::kVideoCodecVP8);
ASSERT_EQ(streams.size(), kMaxLayers); ASSERT_THAT(streams, SizeIs(kMaxLayers));
EXPECT_EQ(240u, streams[0].width); EXPECT_EQ(240u, streams[0].width);
EXPECT_EQ(135u, streams[0].height); EXPECT_EQ(135u, streams[0].height);
EXPECT_EQ(streams[0].max_bitrate_bps, 112500); EXPECT_EQ(streams[0].max_bitrate_bps, 112500);
@ -482,10 +324,10 @@ TEST(SimulcastTest, BitratesConsistentForVerySmallRes) {
"WebRTC-LowresSimulcastBitrateInterpolation/Enabled/"); "WebRTC-LowresSimulcastBitrateInterpolation/Enabled/");
std::vector<VideoStream> streams = cricket::GetSimulcastConfig( std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
/* min_layers = */ 1, /* max_layers = */ 3, /* width = */ 1, std::vector<Resolution>{{.width = 1, .height = 1}}, !kScreenshare, true,
/* height = */ 1, !kScreenshare, true, trials, webrtc::kVideoCodecVP8); trials, webrtc::kVideoCodecVP8);
ASSERT_TRUE(!streams.empty()); ASSERT_THAT(streams, SizeIs(1));
EXPECT_EQ(1u, streams[0].width); EXPECT_EQ(1u, streams[0].width);
EXPECT_EQ(1u, streams[0].height); EXPECT_EQ(1u, streams[0].height);
EXPECT_EQ(streams[0].max_bitrate_bps, 30000); EXPECT_EQ(streams[0].max_bitrate_bps, 30000);
@ -500,10 +342,10 @@ TEST(SimulcastTest,
const size_t kMaxLayers = 3; const size_t kMaxLayers = 3;
std::vector<VideoStream> streams = cricket::GetSimulcastConfig( std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
/* min_layers = */ 1, kMaxLayers, /* width = */ 960, /* height = */ 540, CreateResolutions(/*max_width=*/960, /*max_height=*/540, kMaxLayers),
!kScreenshare, true, trials, webrtc::kVideoCodecVP8); !kScreenshare, true, trials, webrtc::kVideoCodecVP8);
ASSERT_EQ(streams.size(), kMaxLayers); ASSERT_THAT(streams, SizeIs(kMaxLayers));
EXPECT_EQ(240u, streams[0].width); EXPECT_EQ(240u, streams[0].width);
EXPECT_EQ(135u, streams[0].height); EXPECT_EQ(135u, streams[0].height);
EXPECT_EQ(streams[0].max_bitrate_bps, 200000); EXPECT_EQ(streams[0].max_bitrate_bps, 200000);
@ -516,15 +358,15 @@ TEST(SimulcastTest, BitratesBasedOnCodec) {
const size_t kMaxLayers = 3; const size_t kMaxLayers = 3;
std::vector<VideoStream> streams_vp8 = cricket::GetSimulcastConfig( std::vector<VideoStream> streams_vp8 = cricket::GetSimulcastConfig(
/* min_layers = */ 1, /* max_layers = */ 3, /* width = */ 1280, CreateResolutions(/*max_width=*/1280, /*max_height=*/720, kMaxLayers),
/* height = */ 720, !kScreenshare, true, trials, webrtc::kVideoCodecVP8); !kScreenshare, true, trials, webrtc::kVideoCodecVP8);
std::vector<VideoStream> streams_vp9 = cricket::GetSimulcastConfig( std::vector<VideoStream> streams_vp9 = cricket::GetSimulcastConfig(
/* min_layers = */ 1, /* max_layers = */ 3, /* width = */ 1280, CreateResolutions(/*max_width=*/1280, /*max_height=*/720, kMaxLayers),
/* height = */ 720, !kScreenshare, true, trials, webrtc::kVideoCodecVP9); !kScreenshare, true, trials, webrtc::kVideoCodecVP9);
ASSERT_EQ(streams_vp8.size(), kMaxLayers); ASSERT_THAT(streams_vp8, SizeIs(kMaxLayers));
ASSERT_EQ(streams_vp9.size(), kMaxLayers); ASSERT_THAT(streams_vp9, SizeIs(kMaxLayers));
EXPECT_EQ(streams_vp9[0].width, streams_vp8[0].width); EXPECT_EQ(streams_vp9[0].width, streams_vp8[0].width);
EXPECT_EQ(streams_vp9[0].height, streams_vp8[0].height); EXPECT_EQ(streams_vp9[0].height, streams_vp8[0].height);
@ -548,12 +390,11 @@ TEST(SimulcastTest, BitratesForVP9) {
ExplicitKeyValueConfig trials(""); ExplicitKeyValueConfig trials("");
const size_t kMaxLayers = 3; const size_t kMaxLayers = 3;
std::vector<VideoStream> streams = cricket::GetSimulcastConfig( std::vector<VideoStream> streams = cricket::GetSimulcastConfig(
/* min_layers = */ 1, kMaxLayers, /* width = */ 1280, /* height = */ 720, CreateResolutions(/*max_width=*/1280, /*max_height=*/720, kMaxLayers),
!kScreenshare, true, trials, webrtc::kVideoCodecVP9); !kScreenshare, true, trials, webrtc::kVideoCodecVP9);
ASSERT_EQ(streams.size(), kMaxLayers); ASSERT_THAT(streams, SizeIs(kMaxLayers));
EXPECT_EQ(1280u, streams[2].width); EXPECT_EQ(1280u, streams[2].width);
EXPECT_EQ(720u, streams[2].height); EXPECT_EQ(720u, streams[2].height);
EXPECT_EQ(streams[2].max_bitrate_bps, 1524000); EXPECT_EQ(streams[2].max_bitrate_bps, 1524000);
@ -561,10 +402,10 @@ TEST(SimulcastTest, BitratesForVP9) {
EXPECT_EQ(streams[2].min_bitrate_bps, 481000); EXPECT_EQ(streams[2].min_bitrate_bps, 481000);
streams = cricket::GetSimulcastConfig( streams = cricket::GetSimulcastConfig(
/* min_layers = */ 1, kMaxLayers, /* width = */ 1276, /* height = */ 716, CreateResolutions(/*max_width=*/1276, /*max_height=*/716, kMaxLayers),
!kScreenshare, true, trials, webrtc::kVideoCodecVP9); !kScreenshare, true, trials, webrtc::kVideoCodecVP9);
ASSERT_EQ(streams.size(), kMaxLayers); ASSERT_THAT(streams, SizeIs(kMaxLayers));
EXPECT_EQ(1276u, streams[2].width); EXPECT_EQ(1276u, streams[2].width);
EXPECT_EQ(716u, streams[2].height); EXPECT_EQ(716u, streams[2].height);
EXPECT_NEAR(streams[2].max_bitrate_bps, 1524000, 20000); EXPECT_NEAR(streams[2].max_bitrate_bps, 1524000, 20000);

View File

@ -2666,9 +2666,11 @@ TEST_P(ResolutionAlignmentTest, SinkWantsAlignmentApplied) {
config.video_stream_factory = nullptr; config.video_stream_factory = nullptr;
video_stream_encoder_->ConfigureEncoder(std::move(config), kMaxPayloadLength); video_stream_encoder_->ConfigureEncoder(std::move(config), kMaxPayloadLength);
// We can get up to 3 streams of 1280x720 resolution each in this test. Make
// available bitrate large enough to get all streams encoded.
const DataRate kAvailableBitrate = 3 * kSimulcastTargetBitrate;
video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
kSimulcastTargetBitrate, kSimulcastTargetBitrate, kSimulcastTargetBitrate, kAvailableBitrate, kAvailableBitrate, kAvailableBitrate, 0, 0, 0);
0, 0, 0);
// Wait for all layers before triggering event. // Wait for all layers before triggering event.
sink_.SetNumExpectedLayers(num_streams); sink_.SetNumExpectedLayers(num_streams);