Signal requested resolution alignment requirements from sinks to sources.

Bug: webrtc:11218
Change-Id: I593b0515ea389bece472234a3c4082ccc5321ea5
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/162400
Commit-Queue: Rasmus Brandt <brandtr@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#30113}
This commit is contained in:
Rasmus Brandt 2019-12-19 09:47:11 +01:00 committed by Commit Bot
parent c04242548c
commit 5cad55b240
19 changed files with 317 additions and 50 deletions

View File

@ -42,6 +42,13 @@ struct RTC_EXPORT VideoSinkWants {
absl::optional<int> target_pixel_count;
// Tells the source the maximum framerate the sink wants.
int max_framerate_fps = std::numeric_limits<int>::max();
// Tells the source that the sink wants width and height of the video frames
// to be divisible by |resolution_alignment|.
// For example: With I420, this value would be a multiple of 2.
// Note that this field is unrelated to any horizontal or vertical stride
// requirements the encoder has on the incoming video frame buffers.
int resolution_alignment = 1;
};
template <typename VideoFrameT>

View File

@ -92,6 +92,7 @@ bool VideoEncoder::ResolutionBitrateLimits::operator==(
VideoEncoder::EncoderInfo::EncoderInfo()
: scaling_settings(VideoEncoder::ScalingSettings::kOff),
requested_resolution_alignment(1),
supports_native_handle(false),
implementation_name("unknown"),
has_trusted_rate_controller(false),
@ -119,7 +120,8 @@ std::string VideoEncoder::EncoderInfo::ToString() const {
}
oss << "min_pixels_per_frame = " << scaling_settings.min_pixels_per_frame
<< " }";
oss << ", supports_native_handle = " << supports_native_handle
oss << ", requested_resolution_alignment = " << requested_resolution_alignment
<< ", supports_native_handle = " << supports_native_handle
<< ", implementation_name = '" << implementation_name << "'"
<< ", has_trusted_rate_controller = " << has_trusted_rate_controller
<< ", is_hardware_accelerated = " << is_hardware_accelerated

View File

@ -166,6 +166,14 @@ class RTC_EXPORT VideoEncoder {
// quality scaler must populate this field.
ScalingSettings scaling_settings;
// The width and height of the incoming video frames should be divisible
// by |requested_resolution_alignment|. If they are not, the encoder may
// drop the incoming frame.
// For example: With I420, this value would be a multiple of 2.
// Note that this field is unrelated to any horizontal or vertical stride
// requirements the encoder has on the incoming video frame buffers.
int requested_resolution_alignment;
// If true, encoder supports working with a native handle (e.g. texture
// handle for hw codecs) rather than requiring a raw I420 buffer.
bool supports_native_handle;

View File

@ -165,6 +165,7 @@ rtc_library("rtc_simulcast_encoder_adapter") {
"engine/simulcast_encoder_adapter.h",
]
deps = [
":rtc_media_base",
"../api:fec_controller_api",
"../api:scoped_refptr",
"../api/video:video_codec_constants",
@ -491,11 +492,11 @@ if (rtc_include_tests) {
rtc_media_unittests_resources = [
"../resources/media/captured-320x240-2s-48.frames",
"../resources/media/faces.1280x720_P420.yuv",
"../resources/media/faces_I400.jpg",
"../resources/media/faces_I411.jpg",
"../resources/media/faces_I420.jpg",
"../resources/media/faces_I422.jpg",
"../resources/media/faces_I444.jpg",
"../resources/media/faces_I411.jpg",
"../resources/media/faces_I400.jpg",
]
if (is_ios) {

View File

@ -24,24 +24,13 @@
#include "system_wrappers/include/field_trial.h"
namespace {
int Gcd(int a, int b) {
RTC_DCHECK_GE(a, 0);
RTC_DCHECK_GT(b, 0);
int c = a % b;
while (c != 0) {
a = b;
b = c;
c = a % b;
}
return b;
}
struct Fraction {
int numerator;
int denominator;
void DivideByGcd() {
int g = Gcd(numerator, denominator);
int g = cricket::GreatestCommonDivisor(numerator, denominator);
numerator /= g;
denominator /= g;
}
@ -136,7 +125,7 @@ Fraction FindScale(int input_width,
namespace cricket {
VideoAdapter::VideoAdapter(int required_resolution_alignment)
VideoAdapter::VideoAdapter(int source_resolution_alignment)
: frames_in_(0),
frames_out_(0),
frames_scaled_(0),
@ -145,7 +134,8 @@ VideoAdapter::VideoAdapter(int required_resolution_alignment)
previous_height_(0),
variable_start_scale_factor_(webrtc::field_trial::IsEnabled(
"WebRTC-Video-VariableStartScaleFactor")),
required_resolution_alignment_(required_resolution_alignment),
source_resolution_alignment_(source_resolution_alignment),
resolution_alignment_(source_resolution_alignment),
resolution_request_target_pixel_count_(std::numeric_limits<int>::max()),
resolution_request_max_pixel_count_(std::numeric_limits<int>::max()),
max_framerate_request_(std::numeric_limits<int>::max()) {}
@ -237,7 +227,8 @@ bool VideoAdapter::AdaptFrameResolution(int in_width,
<< " Input: " << in_width << "x" << in_height
<< " timestamp: " << in_timestamp_ns
<< " Output fps: " << max_framerate_request_ << "/"
<< max_fps_.value_or(-1);
<< max_fps_.value_or(-1)
<< " alignment: " << resolution_alignment_;
}
// Drop frame.
@ -261,23 +252,20 @@ bool VideoAdapter::AdaptFrameResolution(int in_width,
const Fraction scale =
FindScale(*cropped_width, *cropped_height, target_pixel_count,
max_pixel_count, variable_start_scale_factor_);
// Adjust cropping slightly to get even integer output size and a perfect
// scale factor. Make sure the resulting dimensions are aligned correctly
// to be nice to hardware encoders.
*cropped_width =
roundUp(*cropped_width,
scale.denominator * required_resolution_alignment_, in_width);
*cropped_height =
roundUp(*cropped_height,
scale.denominator * required_resolution_alignment_, in_height);
// Adjust cropping slightly to get correctly aligned output size and a perfect
// scale factor.
*cropped_width = roundUp(*cropped_width,
scale.denominator * resolution_alignment_, in_width);
*cropped_height = roundUp(
*cropped_height, scale.denominator * resolution_alignment_, in_height);
RTC_DCHECK_EQ(0, *cropped_width % scale.denominator);
RTC_DCHECK_EQ(0, *cropped_height % scale.denominator);
// Calculate final output size.
*out_width = *cropped_width / scale.denominator * scale.numerator;
*out_height = *cropped_height / scale.denominator * scale.numerator;
RTC_DCHECK_EQ(0, *out_width % required_resolution_alignment_);
RTC_DCHECK_EQ(0, *out_height % required_resolution_alignment_);
RTC_DCHECK_EQ(0, *out_width % resolution_alignment_);
RTC_DCHECK_EQ(0, *out_height % resolution_alignment_);
++frames_out_;
if (scale.numerator != scale.denominator)
@ -293,7 +281,8 @@ bool VideoAdapter::AdaptFrameResolution(int in_width,
<< " Scale: " << scale.numerator << "/"
<< scale.denominator << " Output: " << *out_width << "x"
<< *out_height << " fps: " << max_framerate_request_ << "/"
<< max_fps_.value_or(-1);
<< max_fps_.value_or(-1)
<< " alignment: " << resolution_alignment_;
}
previous_width_ = *out_width;
@ -358,6 +347,8 @@ void VideoAdapter::OnSinkWants(const rtc::VideoSinkWants& sink_wants) {
sink_wants.target_pixel_count.value_or(
resolution_request_max_pixel_count_);
max_framerate_request_ = sink_wants.max_framerate_fps;
resolution_alignment_ = cricket::LeastCommonMultiple(
source_resolution_alignment_, sink_wants.resolution_alignment);
}
} // namespace cricket

View File

@ -31,9 +31,9 @@ namespace cricket {
class VideoAdapter {
public:
VideoAdapter();
// The output frames will have height and width that is divisible by
// |required_resolution_alignment|.
explicit VideoAdapter(int required_resolution_alignment);
// The source requests output frames whose width and height are divisible
// by |source_resolution_alignment|.
explicit VideoAdapter(int source_resolution_alignment);
virtual ~VideoAdapter();
// Return the adapted resolution and cropping parameters given the
@ -90,6 +90,8 @@ class VideoAdapter {
// |sink_wants.max_pixel_count|, but for framerate rather than resolution.
// Set |sink_wants.max_pixel_count| and/or |sink_wants.max_framerate_fps| to
// std::numeric_limit<int>::max() if no upper limit is desired.
// The sink resolution alignment requirement is given by
// |sink_wants.resolution_alignment|.
// Note: Should be called from the sink only.
void OnSinkWants(const rtc::VideoSinkWants& sink_wants);
@ -104,8 +106,14 @@ class VideoAdapter {
int previous_width_; // Previous adapter output width.
int previous_height_; // Previous adapter output height.
const bool variable_start_scale_factor_;
// Resolution must be divisible by this factor.
const int required_resolution_alignment_;
// The fixed source resolution alignment requirement.
const int source_resolution_alignment_;
// The currently applied resolution alignment, as given by the requirements:
// - the fixed |source_resolution_alignment_|; and
// - the latest |sink_wants.resolution_alignment|.
int resolution_alignment_ RTC_GUARDED_BY(critical_section_);
// The target timestamp for the next frame based on requested format.
absl::optional<int64_t> next_frame_timestamp_ns_
RTC_GUARDED_BY(critical_section_);

View File

@ -31,11 +31,13 @@ const int kDefaultFps = 30;
rtc::VideoSinkWants BuildSinkWants(absl::optional<int> target_pixel_count,
int max_pixel_count,
int max_framerate_fps) {
int max_framerate_fps,
int sink_alignment = 1) {
rtc::VideoSinkWants wants;
wants.target_pixel_count = target_pixel_count;
wants.max_pixel_count = max_pixel_count;
wants.max_framerate_fps = max_framerate_fps;
wants.resolution_alignment = sink_alignment;
return wants;
}
@ -44,14 +46,16 @@ rtc::VideoSinkWants BuildSinkWants(absl::optional<int> target_pixel_count,
class VideoAdapterTest : public ::testing::Test,
public ::testing::WithParamInterface<bool> {
public:
VideoAdapterTest() : VideoAdapterTest("") {}
explicit VideoAdapterTest(const std::string& field_trials)
VideoAdapterTest() : VideoAdapterTest("", 1) {}
explicit VideoAdapterTest(const std::string& field_trials,
int source_resolution_alignment)
: override_field_trials_(field_trials),
frame_source_(std::make_unique<FakeFrameSource>(
kWidth,
kHeight,
VideoFormat::FpsToInterval(kDefaultFps) /
rtc::kNumNanosecsPerMicrosec)),
adapter_(source_resolution_alignment),
adapter_wrapper_(std::make_unique<VideoAdapterWrapper>(&adapter_)),
use_new_format_request_(GetParam()) {}
@ -146,7 +150,8 @@ class VideoAdapterTest : public ::testing::Test,
class VideoAdapterTestVariableStartScale : public VideoAdapterTest {
public:
VideoAdapterTestVariableStartScale()
: VideoAdapterTest("WebRTC-Video-VariableStartScaleFactor/Enabled/") {}
: VideoAdapterTest("WebRTC-Video-VariableStartScaleFactor/Enabled/",
/*source_resolution_alignment=*/1) {}
};
INSTANTIATE_TEST_SUITE_P(OnOutputFormatRequests,
@ -673,7 +678,7 @@ TEST_P(VideoAdapterTest, TestViewRequestPlusCameraSwitch) {
}
TEST_P(VideoAdapterTest, TestVgaWidth) {
// Reqeuested Output format is 640x360.
// Requested output format is 640x360.
OnOutputFormatRequest(640, 360, absl::nullopt);
EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0, &cropped_width_,
@ -1304,4 +1309,81 @@ TEST_P(VideoAdapterTestVariableStartScale, AdaptResolutionInStepsFirst2x2_3) {
}
}
TEST_P(VideoAdapterTest, AdaptResolutionWithSinkAlignment) {
constexpr int kSourceWidth = 1280;
constexpr int kSourceHeight = 720;
constexpr int kSourceFramerate = 30;
constexpr int kRequestedWidth = 480;
constexpr int kRequestedHeight = 270;
constexpr int kRequestedFramerate = 30;
OnOutputFormatRequest(kRequestedWidth, kRequestedHeight, kRequestedFramerate);
int frame_num = 1;
for (const int sink_alignment : {2, 3, 4, 5}) {
adapter_.OnSinkWants(
BuildSinkWants(absl::nullopt, std::numeric_limits<int>::max(),
std::numeric_limits<int>::max(), sink_alignment));
EXPECT_TRUE(adapter_.AdaptFrameResolution(
kSourceWidth, kSourceHeight,
frame_num * rtc::kNumNanosecsPerSec / kSourceFramerate, &cropped_width_,
&cropped_height_, &out_width_, &out_height_));
EXPECT_EQ(out_width_ % sink_alignment, 0);
EXPECT_EQ(out_height_ % sink_alignment, 0);
++frame_num;
}
}
class VideoAdapterWithSourceAlignmentTest : public VideoAdapterTest {
protected:
static constexpr int kSourceResolutionAlignment = 7;
VideoAdapterWithSourceAlignmentTest()
: VideoAdapterTest(/*field_trials=*/"", kSourceResolutionAlignment) {}
};
TEST_P(VideoAdapterWithSourceAlignmentTest, AdaptResolution) {
constexpr int kSourceWidth = 1280;
constexpr int kSourceHeight = 720;
constexpr int kRequestedWidth = 480;
constexpr int kRequestedHeight = 270;
constexpr int kRequestedFramerate = 30;
OnOutputFormatRequest(kRequestedWidth, kRequestedHeight, kRequestedFramerate);
EXPECT_TRUE(adapter_.AdaptFrameResolution(
kSourceWidth, kSourceHeight, /*in_timestamp_ns=*/0, &cropped_width_,
&cropped_height_, &out_width_, &out_height_));
EXPECT_EQ(out_width_ % kSourceResolutionAlignment, 0);
EXPECT_EQ(out_height_ % kSourceResolutionAlignment, 0);
}
TEST_P(VideoAdapterWithSourceAlignmentTest, AdaptResolutionWithSinkAlignment) {
constexpr int kSourceWidth = 1280;
constexpr int kSourceHeight = 720;
// 7 and 8 neither divide 480 nor 270.
constexpr int kRequestedWidth = 480;
constexpr int kRequestedHeight = 270;
constexpr int kRequestedFramerate = 30;
constexpr int kSinkResolutionAlignment = 8;
OnOutputFormatRequest(kRequestedWidth, kRequestedHeight, kRequestedFramerate);
adapter_.OnSinkWants(BuildSinkWants(
absl::nullopt, std::numeric_limits<int>::max(),
std::numeric_limits<int>::max(), kSinkResolutionAlignment));
EXPECT_TRUE(adapter_.AdaptFrameResolution(
kSourceWidth, kSourceHeight, /*in_timestamp_ns=*/0, &cropped_width_,
&cropped_height_, &out_width_, &out_height_));
EXPECT_EQ(out_width_ % kSourceResolutionAlignment, 0);
EXPECT_EQ(out_height_ % kSourceResolutionAlignment, 0);
EXPECT_EQ(out_width_ % kSinkResolutionAlignment, 0);
EXPECT_EQ(out_height_ % kSinkResolutionAlignment, 0);
}
INSTANTIATE_TEST_SUITE_P(OnOutputFormatRequests,
VideoAdapterWithSourceAlignmentTest,
::testing::Values(true, false));
} // namespace cricket

View File

@ -15,6 +15,7 @@
#include "absl/types/optional.h"
#include "api/video/i420_buffer.h"
#include "api/video/video_rotation.h"
#include "media/base/video_common.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
@ -101,6 +102,7 @@ void VideoBroadcaster::OnDiscardedFrame() {
void VideoBroadcaster::UpdateWants() {
VideoSinkWants wants;
wants.rotation_applied = false;
wants.resolution_alignment = 1;
for (auto& sink : sink_pairs()) {
// wants.rotation_applied == ANY(sink.wants.rotation_applied)
if (sink.wants.rotation_applied) {
@ -123,6 +125,8 @@ void VideoBroadcaster::UpdateWants() {
if (sink.wants.max_framerate_fps < wants.max_framerate_fps) {
wants.max_framerate_fps = sink.wants.max_framerate_fps;
}
wants.resolution_alignment = cricket::LeastCommonMultiple(
wants.resolution_alignment, sink.wants.resolution_alignment);
}
if (wants.target_pixel_count &&

View File

@ -158,6 +158,33 @@ TEST(VideoBroadcasterTest, AppliesMinOfSinkWantsMaxFramerate) {
EXPECT_EQ(30, broadcaster.wants().max_framerate_fps);
}
TEST(VideoBroadcasterTest,
AppliesLeastCommonMultipleOfSinkWantsResolutionAlignment) {
VideoBroadcaster broadcaster;
EXPECT_EQ(broadcaster.wants().resolution_alignment, 1);
FakeVideoRenderer sink1;
VideoSinkWants wants1;
wants1.resolution_alignment = 2;
broadcaster.AddOrUpdateSink(&sink1, wants1);
EXPECT_EQ(broadcaster.wants().resolution_alignment, 2);
FakeVideoRenderer sink2;
VideoSinkWants wants2;
wants2.resolution_alignment = 3;
broadcaster.AddOrUpdateSink(&sink2, wants2);
EXPECT_EQ(broadcaster.wants().resolution_alignment, 6);
FakeVideoRenderer sink3;
VideoSinkWants wants3;
wants3.resolution_alignment = 4;
broadcaster.AddOrUpdateSink(&sink3, wants3);
EXPECT_EQ(broadcaster.wants().resolution_alignment, 12);
broadcaster.RemoveSink(&sink2);
EXPECT_EQ(broadcaster.wants().resolution_alignment, 4);
}
TEST(VideoBroadcasterTest, SinkWantsBlackFrames) {
VideoBroadcaster broadcaster;
EXPECT_TRUE(!broadcaster.wants().black_frames);

View File

@ -12,6 +12,7 @@
#include "api/array_view.h"
#include "rtc_base/arraysize.h"
#include "rtc_base/checks.h"
#include "rtc_base/strings/string_builder.h"
namespace cricket {
@ -75,4 +76,22 @@ std::string VideoFormat::ToString() const {
return sb.str();
}
int GreatestCommonDivisor(int a, int b) {
RTC_DCHECK_GE(a, 0);
RTC_DCHECK_GT(b, 0);
int c = a % b;
while (c != 0) {
a = b;
b = c;
c = a % b;
}
return b;
}
int LeastCommonMultiple(int a, int b) {
RTC_DCHECK_GT(a, 0);
RTC_DCHECK_GT(b, 0);
return a * (b / GreatestCommonDivisor(a, b));
}
} // namespace cricket

View File

@ -213,6 +213,12 @@ struct RTC_EXPORT VideoFormat : VideoFormatPod {
std::string ToString() const;
};
// Returns the largest positive integer that divides both |a| and |b|.
int GreatestCommonDivisor(int a, int b);
// Returns the smallest positive integer that is divisible by both |a| and |b|.
int LeastCommonMultiple(int a, int b);
} // namespace cricket
#endif // MEDIA_BASE_VIDEO_COMMON_H_

View File

@ -92,4 +92,17 @@ TEST(VideoCommonTest, TestVideoFormatCompare) {
EXPECT_TRUE(format.IsPixelRateLess(format2));
}
TEST(VideoCommonTest, GreatestCommonDivisor) {
EXPECT_EQ(GreatestCommonDivisor(0, 1000), 1000);
EXPECT_EQ(GreatestCommonDivisor(1, 1), 1);
EXPECT_EQ(GreatestCommonDivisor(8, 12), 4);
EXPECT_EQ(GreatestCommonDivisor(24, 54), 6);
}
TEST(VideoCommonTest, LeastCommonMultiple) {
EXPECT_EQ(LeastCommonMultiple(1, 1), 1);
EXPECT_EQ(LeastCommonMultiple(2, 3), 6);
EXPECT_EQ(LeastCommonMultiple(16, 32), 32);
}
} // namespace cricket

View File

@ -26,6 +26,7 @@
#include "api/video_codecs/video_encoder.h"
#include "api/video_codecs/video_encoder_factory.h"
#include "api/video_codecs/video_encoder_software_fallback_wrapper.h"
#include "media/base/video_common.h"
#include "modules/video_coding/include/video_error_codes.h"
#include "modules/video_coding/utility/simulcast_rate_allocator.h"
#include "rtc_base/atomic_ops.h"
@ -626,6 +627,7 @@ VideoEncoder::EncoderInfo SimulcastEncoderAdapter::GetEncoderInfo() const {
VideoEncoder::EncoderInfo encoder_info;
encoder_info.implementation_name = "SimulcastEncoderAdapter";
encoder_info.requested_resolution_alignment = 1;
encoder_info.supports_native_handle = true;
encoder_info.scaling_settings.thresholds = absl::nullopt;
if (streaminfos_.empty()) {
@ -674,6 +676,9 @@ VideoEncoder::EncoderInfo SimulcastEncoderAdapter::GetEncoderInfo() const {
encoder_info.has_internal_source &= encoder_impl_info.has_internal_source;
}
encoder_info.fps_allocation[i] = encoder_impl_info.fps_allocation[0];
encoder_info.requested_resolution_alignment = cricket::LeastCommonMultiple(
encoder_info.requested_resolution_alignment,
encoder_impl_info.requested_resolution_alignment);
}
encoder_info.implementation_name += ")";

View File

@ -27,6 +27,7 @@
#include "modules/video_coding/codecs/vp8/include/vp8.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "modules/video_coding/utility/simulcast_test_fixture_impl.h"
#include "rtc_base/checks.h"
#include "test/gmock.h"
#include "test/gtest.h"
@ -171,6 +172,10 @@ class MockVideoEncoderFactory : public VideoEncoderFactory {
const std::vector<MockVideoEncoder*>& encoders() const;
void SetEncoderNames(const std::vector<const char*>& encoder_names);
void set_init_encode_return_value(int32_t value);
void set_requested_resolution_alignments(
std::vector<int> requested_resolution_alignments) {
requested_resolution_alignments_ = requested_resolution_alignments;
}
void set_supports_simulcast(bool supports_simulcast) {
supports_simulcast_ = supports_simulcast;
}
@ -181,6 +186,8 @@ class MockVideoEncoderFactory : public VideoEncoderFactory {
int32_t init_encode_return_value_ = 0;
std::vector<MockVideoEncoder*> encoders_;
std::vector<const char*> encoder_names_;
// Keep number of entries in sync with |kMaxSimulcastStreams|.
std::vector<int> requested_resolution_alignments_ = {1, 1, 1};
bool supports_simulcast_ = false;
};
@ -226,6 +233,7 @@ class MockVideoEncoder : public VideoEncoder {
info.supports_native_handle = supports_native_handle_;
info.implementation_name = implementation_name_;
info.scaling_settings = scaling_settings_;
info.requested_resolution_alignment = requested_resolution_alignment_;
info.has_trusted_rate_controller = has_trusted_rate_controller_;
info.is_hardware_accelerated = is_hardware_accelerated_;
info.has_internal_source = has_internal_source_;
@ -264,6 +272,10 @@ class MockVideoEncoder : public VideoEncoder {
scaling_settings_ = settings;
}
void set_requested_resolution_alignment(int requested_resolution_alignment) {
requested_resolution_alignment_ = requested_resolution_alignment;
}
void set_has_trusted_rate_controller(bool trusted) {
has_trusted_rate_controller_ = trusted;
}
@ -293,6 +305,7 @@ class MockVideoEncoder : public VideoEncoder {
bool supports_native_handle_ = false;
std::string implementation_name_ = "unknown";
VideoEncoder::ScalingSettings scaling_settings_;
int requested_resolution_alignment_ = 1;
bool has_trusted_rate_controller_ = false;
bool is_hardware_accelerated_ = false;
bool has_internal_source_ = false;
@ -320,6 +333,9 @@ std::unique_ptr<VideoEncoder> MockVideoEncoderFactory::CreateVideoEncoder(
? "codec_implementation_name"
: encoder_names_[encoders_.size()];
encoder->set_implementation_name(encoder_name);
RTC_CHECK_LT(encoders_.size(), requested_resolution_alignments_.size());
encoder->set_requested_resolution_alignment(
requested_resolution_alignments_[encoders_.size()]);
encoder->set_supports_simulcast(supports_simulcast_);
encoders_.push_back(encoder.get());
return encoder;
@ -1176,6 +1192,18 @@ TEST_F(TestSimulcastEncoderAdapterFake, ReportsHardwareAccelerated) {
EXPECT_TRUE(adapter_->GetEncoderInfo().is_hardware_accelerated);
}
TEST_F(TestSimulcastEncoderAdapterFake,
ReportsLeastCommonMultipleOfRequestedResolutionAlignments) {
SimulcastTestFixtureImpl::DefaultSettings(
&codec_, static_cast<const int*>(kTestTemporalLayerProfile),
kVideoCodecVP8);
codec_.numberOfSimulcastStreams = 3;
helper_->factory()->set_requested_resolution_alignments({2, 4, 7});
EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings));
EXPECT_EQ(adapter_->GetEncoderInfo().requested_resolution_alignment, 28);
}
TEST_F(TestSimulcastEncoderAdapterFake, ReportsInternalSource) {
SimulcastTestFixtureImpl::DefaultSettings(
&codec_, static_cast<const int*>(kTestTemporalLayerProfile),

View File

@ -380,6 +380,7 @@ rtc_library("webrtc_multiplex") {
"../../api/video:video_rtp_headers",
"../../api/video_codecs:video_codecs_api",
"../../common_video",
"../../media:rtc_media_base",
"../../rtc_base",
"../../rtc_base:checks",
"../rtp_rtcp:rtp_rtcp_format",
@ -656,8 +657,8 @@ if (rtc_include_tests) {
}
if (!is_android) {
video_coding_modules_tests_resources += [
"../../resources/FourPeople_1280x720_30.yuv",
"../../resources/ConferenceMotion_1280_720_50.yuv",
"../../resources/FourPeople_1280x720_30.yuv",
]
}

View File

@ -12,6 +12,9 @@ specific_include_rules = {
"android_codec_factory_helper\.cc": [
"+base/android",
],
"multiplex_encoder_adapter\.cc": [
"+media/base",
],
".*test.*\.cc": [
"+media/base",
"+media/engine",

View File

@ -16,6 +16,7 @@
#include "api/video_codecs/video_encoder.h"
#include "common_video/include/video_frame_buffer.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "media/base/video_common.h"
#include "modules/include/module_common_types.h"
#include "modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h"
#include "rtc_base/keep_ref_until_done.h"
@ -101,6 +102,7 @@ int MultiplexEncoderAdapter::InitEncode(
encoder_info_ = EncoderInfo();
encoder_info_.implementation_name = "MultiplexEncoderAdapter (";
encoder_info_.requested_resolution_alignment = 1;
// This needs to be false so that we can do the split in Encode().
encoder_info_.supports_native_handle = false;
@ -132,6 +134,11 @@ int MultiplexEncoderAdapter::InitEncode(
encoder_info_.is_hardware_accelerated |=
encoder_impl_info.is_hardware_accelerated;
}
encoder_info_.requested_resolution_alignment = cricket::LeastCommonMultiple(
encoder_info_.requested_resolution_alignment,
encoder_impl_info.requested_resolution_alignment);
encoder_info_.has_internal_source = false;
encoders_.emplace_back(std::move(encoder));

View File

@ -225,7 +225,8 @@ class VideoStreamEncoder::VideoSourceProxy {
degradation_preference_(DegradationPreference::DISABLED),
source_(nullptr),
max_framerate_(std::numeric_limits<int>::max()),
max_pixels_(std::numeric_limits<int>::max()) {}
max_pixels_(std::numeric_limits<int>::max()),
resolution_alignment_(1) {}
void SetSource(rtc::VideoSourceInterface<VideoFrame>* source,
const DegradationPreference& degradation_preference) {
@ -252,14 +253,19 @@ class VideoStreamEncoder::VideoSourceProxy {
source->AddOrUpdateSink(video_stream_encoder_, wants);
}
void SetMaxFramerate(int max_framerate) {
void SetMaxFramerateAndAlignment(int max_framerate,
int resolution_alignment) {
RTC_DCHECK_GT(max_framerate, 0);
rtc::CritScope lock(&crit_);
if (max_framerate == max_framerate_)
if (max_framerate == max_framerate_ &&
resolution_alignment == resolution_alignment_) {
return;
}
RTC_LOG(LS_INFO) << "Set max framerate: " << max_framerate;
RTC_LOG(LS_INFO) << "Set max framerate: " << max_framerate
<< " and resolution alignment: " << resolution_alignment;
max_framerate_ = max_framerate;
resolution_alignment_ = resolution_alignment;
if (source_) {
source_->AddOrUpdateSink(video_stream_encoder_,
GetActiveSinkWantsInternal());
@ -454,7 +460,7 @@ class VideoStreamEncoder::VideoSourceProxy {
wants.max_framerate_fps = std::min(max_framerate_, wants.max_framerate_fps);
// Limit resolution due to automatic animation detection for screenshare.
wants.max_pixel_count = std::min(max_pixels_, wants.max_pixel_count);
wants.resolution_alignment = resolution_alignment_;
return wants;
}
@ -466,6 +472,7 @@ class VideoStreamEncoder::VideoSourceProxy {
rtc::VideoSourceInterface<VideoFrame>* source_ RTC_GUARDED_BY(&crit_);
int max_framerate_ RTC_GUARDED_BY(&crit_);
int max_pixels_ RTC_GUARDED_BY(&crit_);
int resolution_alignment_ RTC_GUARDED_BY(&crit_);
RTC_DISALLOW_COPY_AND_ASSIGN(VideoSourceProxy);
};
@ -881,7 +888,8 @@ void VideoStreamEncoder::ReconfigureEncoder() {
for (const auto& stream : streams) {
max_framerate = std::max(stream.max_framerate, max_framerate);
}
source_proxy_->SetMaxFramerate(max_framerate);
source_proxy_->SetMaxFramerateAndAlignment(
max_framerate, encoder_->GetEncoderInfo().requested_resolution_alignment);
if (codec.maxBitrate == 0) {
// max is one bit per pixel

View File

@ -703,6 +703,7 @@ class VideoStreamEncoderTest : public ::testing::Test {
}
info.resolution_bitrate_limits = resolution_bitrate_limits_;
info.requested_resolution_alignment = requested_resolution_alignment_;
return info;
}
@ -727,6 +728,11 @@ class VideoStreamEncoderTest : public ::testing::Test {
quality_scaling_ = b;
}
void SetRequestedResolutionAlignment(int requested_resolution_alignment) {
rtc::CritScope lock(&local_crit_sect_);
requested_resolution_alignment_ = requested_resolution_alignment;
}
void SetIsHardwareAccelerated(bool is_hardware_accelerated) {
rtc::CritScope lock(&local_crit_sect_);
is_hardware_accelerated_ = is_hardware_accelerated;
@ -914,6 +920,7 @@ class VideoStreamEncoderTest : public ::testing::Test {
int last_input_width_ RTC_GUARDED_BY(local_crit_sect_) = 0;
int last_input_height_ RTC_GUARDED_BY(local_crit_sect_) = 0;
bool quality_scaling_ RTC_GUARDED_BY(local_crit_sect_) = true;
int requested_resolution_alignment_ RTC_GUARDED_BY(local_crit_sect_) = 1;
bool is_hardware_accelerated_ RTC_GUARDED_BY(local_crit_sect_) = false;
std::unique_ptr<Vp8FrameBufferController> frame_buffer_controller_
RTC_GUARDED_BY(local_crit_sect_);
@ -980,6 +987,18 @@ class VideoStreamEncoderTest : public ::testing::Test {
EXPECT_EQ(expected_width, width);
}
void CheckLastFrameSizeIsMultipleOf(int resolution_alignment) {
int width = 0;
int height = 0;
{
rtc::CritScope lock(&crit_);
width = last_width_;
height = last_height_;
}
EXPECT_EQ(width % resolution_alignment, 0);
EXPECT_EQ(height % resolution_alignment, 0);
}
void CheckLastFrameRotationMatches(VideoRotation expected_rotation) {
VideoRotation rotation;
{
@ -1623,6 +1642,33 @@ TEST_F(VideoStreamEncoderTest, SinkWantsRotationApplied) {
video_stream_encoder_->Stop();
}
TEST_F(VideoStreamEncoderTest, SinkWantsResolutionAlignment) {
constexpr int kRequestedResolutionAlignment = 7;
video_source_.set_adaptation_enabled(true);
fake_encoder_.SetRequestedResolutionAlignment(kRequestedResolutionAlignment);
video_stream_encoder_->OnBitrateUpdated(
DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps),
DataRate::bps(kTargetBitrateBps), 0, 0);
// On the 1st frame, we should have initialized the encoder and
// asked for its resolution requirements.
video_source_.IncomingCapturedFrame(
CreateFrame(1, codec_width_, codec_height_));
WaitForEncodedFrame(1);
EXPECT_EQ(video_source_.sink_wants().resolution_alignment,
kRequestedResolutionAlignment);
// On the 2nd frame, we should be receiving a correctly aligned resolution.
// (It's up the to the encoder to potentially drop the previous frame,
// to avoid coding back-to-back keyframes.)
video_source_.IncomingCapturedFrame(
CreateFrame(2, codec_width_, codec_height_));
WaitForEncodedFrame(2);
sink_.CheckLastFrameSizeIsMultipleOf(kRequestedResolutionAlignment);
video_stream_encoder_->Stop();
}
TEST_F(VideoStreamEncoderTest, TestCpuDowngrades_BalancedMode) {
const int kFramerateFps = 30;
const int kWidth = 1280;
@ -1716,6 +1762,7 @@ TEST_F(VideoStreamEncoderTest, TestCpuDowngrades_BalancedMode) {
video_stream_encoder_->Stop();
}
TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) {
video_stream_encoder_->OnBitrateUpdated(
DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps),
@ -5011,7 +5058,7 @@ TEST_F(VideoStreamEncoderTest, BandwidthAllocationLowerBound) {
video_stream_encoder_->Stop();
}
TEST_F(VideoStreamEncoderTest, EncoderRatesPropegatedOnReconfigure) {
TEST_F(VideoStreamEncoderTest, EncoderRatesPropagatedOnReconfigure) {
video_stream_encoder_->OnBitrateUpdated(
DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps),
DataRate::bps(kTargetBitrateBps), 0, 0);
@ -5138,7 +5185,7 @@ TEST_F(VideoStreamEncoderTest, ResolutionEncoderSwitch) {
}
TEST_F(VideoStreamEncoderTest,
AllocationPropegratedToEncoderWhenTargetRateChanged) {
AllocationPropagatedToEncoderWhenTargetRateChanged) {
const int kFrameWidth = 320;
const int kFrameHeight = 180;
@ -5173,7 +5220,7 @@ TEST_F(VideoStreamEncoderTest,
}
TEST_F(VideoStreamEncoderTest,
AllocationNotPropegratedToEncoderWhenTargetRateUnchanged) {
AllocationNotPropagatedToEncoderWhenTargetRateUnchanged) {
const int kFrameWidth = 320;
const int kFrameHeight = 180;