Delete support for has_internal_source
Bug: webrtc:12875 Change-Id: I9683e71e1fe5b24802033ffcb32a531ca685fc6f Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/179220 Commit-Queue: Niels Moller <nisse@webrtc.org> Reviewed-by: Tommi <tommi@webrtc.org> Reviewed-by: Harald Alvestrand <hta@webrtc.org> Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org> Cr-Commit-Position: refs/heads/main@{#35353}
This commit is contained in:
parent
efe46b6bee
commit
13d163654a
@ -28,10 +28,6 @@ class MockVideoEncoderFactory : public webrtc::VideoEncoderFactory {
|
|||||||
GetSupportedFormats,
|
GetSupportedFormats,
|
||||||
(),
|
(),
|
||||||
(const, override));
|
(const, override));
|
||||||
MOCK_METHOD(CodecInfo,
|
|
||||||
QueryVideoEncoder,
|
|
||||||
(const SdpVideoFormat&),
|
|
||||||
(const, override));
|
|
||||||
MOCK_METHOD(std::unique_ptr<VideoEncoder>,
|
MOCK_METHOD(std::unique_ptr<VideoEncoder>,
|
||||||
CreateVideoEncoder,
|
CreateVideoEncoder,
|
||||||
(const SdpVideoFormat&),
|
(const SdpVideoFormat&),
|
||||||
|
|||||||
@ -32,15 +32,6 @@ class BuiltinVideoEncoderFactory : public VideoEncoderFactory {
|
|||||||
BuiltinVideoEncoderFactory()
|
BuiltinVideoEncoderFactory()
|
||||||
: internal_encoder_factory_(new InternalEncoderFactory()) {}
|
: internal_encoder_factory_(new InternalEncoderFactory()) {}
|
||||||
|
|
||||||
VideoEncoderFactory::CodecInfo QueryVideoEncoder(
|
|
||||||
const SdpVideoFormat& format) const override {
|
|
||||||
// Format must be one of the internal formats.
|
|
||||||
RTC_DCHECK(
|
|
||||||
format.IsCodecInList(internal_encoder_factory_->GetSupportedFormats()));
|
|
||||||
VideoEncoderFactory::CodecInfo info;
|
|
||||||
return info;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::unique_ptr<VideoEncoder> CreateVideoEncoder(
|
std::unique_ptr<VideoEncoder> CreateVideoEncoder(
|
||||||
const SdpVideoFormat& format) override {
|
const SdpVideoFormat& format) override {
|
||||||
// Try creating internal encoder.
|
// Try creating internal encoder.
|
||||||
|
|||||||
@ -71,13 +71,6 @@ VideoEncoder::EncoderInfo GetEncoderInfoWithHardwareAccelerated(
|
|||||||
return info;
|
return info;
|
||||||
}
|
}
|
||||||
|
|
||||||
VideoEncoder::EncoderInfo GetEncoderInfoWithInternalSource(
|
|
||||||
bool internal_source) {
|
|
||||||
VideoEncoder::EncoderInfo info;
|
|
||||||
info.has_internal_source = internal_source;
|
|
||||||
return info;
|
|
||||||
}
|
|
||||||
|
|
||||||
class FakeEncodedImageCallback : public EncodedImageCallback {
|
class FakeEncodedImageCallback : public EncodedImageCallback {
|
||||||
public:
|
public:
|
||||||
Result OnEncodedImage(const EncodedImage& encoded_image,
|
Result OnEncodedImage(const EncodedImage& encoded_image,
|
||||||
@ -803,35 +796,6 @@ TEST(SoftwareFallbackEncoderTest, ReportsHardwareAccelerated) {
|
|||||||
EXPECT_FALSE(wrapper->GetEncoderInfo().is_hardware_accelerated);
|
EXPECT_FALSE(wrapper->GetEncoderInfo().is_hardware_accelerated);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(SoftwareFallbackEncoderTest, ReportsInternalSource) {
|
|
||||||
auto* sw_encoder = new ::testing::NiceMock<MockVideoEncoder>();
|
|
||||||
auto* hw_encoder = new ::testing::NiceMock<MockVideoEncoder>();
|
|
||||||
EXPECT_CALL(*sw_encoder, GetEncoderInfo())
|
|
||||||
.WillRepeatedly(Return(GetEncoderInfoWithInternalSource(false)));
|
|
||||||
EXPECT_CALL(*hw_encoder, GetEncoderInfo())
|
|
||||||
.WillRepeatedly(Return(GetEncoderInfoWithInternalSource(true)));
|
|
||||||
|
|
||||||
std::unique_ptr<VideoEncoder> wrapper =
|
|
||||||
CreateVideoEncoderSoftwareFallbackWrapper(
|
|
||||||
std::unique_ptr<VideoEncoder>(sw_encoder),
|
|
||||||
std::unique_ptr<VideoEncoder>(hw_encoder));
|
|
||||||
EXPECT_TRUE(wrapper->GetEncoderInfo().has_internal_source);
|
|
||||||
|
|
||||||
VideoCodec codec_ = {};
|
|
||||||
codec_.width = 100;
|
|
||||||
codec_.height = 100;
|
|
||||||
wrapper->InitEncode(&codec_, kSettings);
|
|
||||||
|
|
||||||
// Trigger fallback to software.
|
|
||||||
EXPECT_CALL(*hw_encoder, Encode)
|
|
||||||
.WillOnce(Return(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE));
|
|
||||||
VideoFrame frame = VideoFrame::Builder()
|
|
||||||
.set_video_frame_buffer(I420Buffer::Create(100, 100))
|
|
||||||
.build();
|
|
||||||
wrapper->Encode(frame, nullptr);
|
|
||||||
EXPECT_FALSE(wrapper->GetEncoderInfo().has_internal_source);
|
|
||||||
}
|
|
||||||
|
|
||||||
class PreferTemporalLayersFallbackTest : public ::testing::Test {
|
class PreferTemporalLayersFallbackTest : public ::testing::Test {
|
||||||
public:
|
public:
|
||||||
PreferTemporalLayersFallbackTest() {}
|
PreferTemporalLayersFallbackTest() {}
|
||||||
|
|||||||
@ -99,7 +99,6 @@ VideoEncoder::EncoderInfo::EncoderInfo()
|
|||||||
implementation_name("unknown"),
|
implementation_name("unknown"),
|
||||||
has_trusted_rate_controller(false),
|
has_trusted_rate_controller(false),
|
||||||
is_hardware_accelerated(true),
|
is_hardware_accelerated(true),
|
||||||
has_internal_source(false),
|
|
||||||
fps_allocation{absl::InlinedVector<uint8_t, kMaxTemporalStreams>(
|
fps_allocation{absl::InlinedVector<uint8_t, kMaxTemporalStreams>(
|
||||||
1,
|
1,
|
||||||
kMaxFramerateFraction)},
|
kMaxFramerateFraction)},
|
||||||
@ -133,7 +132,6 @@ std::string VideoEncoder::EncoderInfo::ToString() const {
|
|||||||
", has_trusted_rate_controller = "
|
", has_trusted_rate_controller = "
|
||||||
<< has_trusted_rate_controller
|
<< has_trusted_rate_controller
|
||||||
<< ", is_hardware_accelerated = " << is_hardware_accelerated
|
<< ", is_hardware_accelerated = " << is_hardware_accelerated
|
||||||
<< ", has_internal_source = " << has_internal_source
|
|
||||||
<< ", fps_allocation = [";
|
<< ", fps_allocation = [";
|
||||||
size_t num_spatial_layer_with_fps_allocation = 0;
|
size_t num_spatial_layer_with_fps_allocation = 0;
|
||||||
for (size_t i = 0; i < kMaxSpatialLayers; ++i) {
|
for (size_t i = 0; i < kMaxSpatialLayers; ++i) {
|
||||||
@ -214,8 +212,7 @@ bool VideoEncoder::EncoderInfo::operator==(const EncoderInfo& rhs) const {
|
|||||||
if (supports_native_handle != rhs.supports_native_handle ||
|
if (supports_native_handle != rhs.supports_native_handle ||
|
||||||
implementation_name != rhs.implementation_name ||
|
implementation_name != rhs.implementation_name ||
|
||||||
has_trusted_rate_controller != rhs.has_trusted_rate_controller ||
|
has_trusted_rate_controller != rhs.has_trusted_rate_controller ||
|
||||||
is_hardware_accelerated != rhs.is_hardware_accelerated ||
|
is_hardware_accelerated != rhs.is_hardware_accelerated) {
|
||||||
has_internal_source != rhs.has_internal_source) {
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -207,13 +207,6 @@ class RTC_EXPORT VideoEncoder {
|
|||||||
// thresholds will be used in CPU adaptation.
|
// thresholds will be used in CPU adaptation.
|
||||||
bool is_hardware_accelerated;
|
bool is_hardware_accelerated;
|
||||||
|
|
||||||
// If this field is true, the encoder uses internal camera sources, meaning
|
|
||||||
// that it does not require/expect frames to be delivered via
|
|
||||||
// webrtc::VideoEncoder::Encode.
|
|
||||||
// Internal source encoders are deprecated and support for them will be
|
|
||||||
// phased out.
|
|
||||||
bool has_internal_source;
|
|
||||||
|
|
||||||
// For each spatial layer (simulcast stream or SVC layer), represented as an
|
// For each spatial layer (simulcast stream or SVC layer), represented as an
|
||||||
// element in `fps_allocation` a vector indicates how many temporal layers
|
// element in `fps_allocation` a vector indicates how many temporal layers
|
||||||
// the encoder is using for that spatial layer.
|
// the encoder is using for that spatial layer.
|
||||||
|
|||||||
@ -29,12 +29,6 @@ class VideoEncoderFactory {
|
|||||||
public:
|
public:
|
||||||
// TODO(magjed): Try to get rid of this struct.
|
// TODO(magjed): Try to get rid of this struct.
|
||||||
struct CodecInfo {
|
struct CodecInfo {
|
||||||
// `has_internal_source` is true if encoders created by this factory of the
|
|
||||||
// given codec will use internal camera sources, meaning that they don't
|
|
||||||
// require/expect frames to be delivered via webrtc::VideoEncoder::Encode.
|
|
||||||
// This flag is used as the internal_source parameter to
|
|
||||||
// webrtc::ViEExternalCodec::RegisterExternalSendCodec.
|
|
||||||
bool has_internal_source = false;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
struct CodecSupport {
|
struct CodecSupport {
|
||||||
|
|||||||
@ -184,35 +184,5 @@ TEST(EncoderSimulcastProxy, ForwardsHardwareAccelerated) {
|
|||||||
EXPECT_TRUE(simulcast_enabled_proxy.GetEncoderInfo().is_hardware_accelerated);
|
EXPECT_TRUE(simulcast_enabled_proxy.GetEncoderInfo().is_hardware_accelerated);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(EncoderSimulcastProxy, ForwardsInternalSource) {
|
|
||||||
auto mock_encoder_owned = std::make_unique<NiceMock<MockVideoEncoder>>();
|
|
||||||
NiceMock<MockVideoEncoder>* mock_encoder = mock_encoder_owned.get();
|
|
||||||
NiceMock<MockVideoEncoderFactory> simulcast_factory;
|
|
||||||
|
|
||||||
EXPECT_CALL(*mock_encoder, InitEncode(_, _))
|
|
||||||
.WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
|
|
||||||
|
|
||||||
EXPECT_CALL(simulcast_factory, CreateVideoEncoder)
|
|
||||||
.Times(1)
|
|
||||||
.WillOnce(Return(ByMove(std::move(mock_encoder_owned))));
|
|
||||||
|
|
||||||
EncoderSimulcastProxy simulcast_enabled_proxy(&simulcast_factory,
|
|
||||||
SdpVideoFormat("VP8"));
|
|
||||||
VideoCodec codec_settings;
|
|
||||||
webrtc::test::CodecSettings(kVideoCodecVP8, &codec_settings);
|
|
||||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
|
||||||
simulcast_enabled_proxy.InitEncode(&codec_settings, kSettings));
|
|
||||||
|
|
||||||
VideoEncoder::EncoderInfo info;
|
|
||||||
|
|
||||||
info.has_internal_source = false;
|
|
||||||
EXPECT_CALL(*mock_encoder, GetEncoderInfo()).WillOnce(Return(info));
|
|
||||||
EXPECT_FALSE(simulcast_enabled_proxy.GetEncoderInfo().has_internal_source);
|
|
||||||
|
|
||||||
info.has_internal_source = true;
|
|
||||||
EXPECT_CALL(*mock_encoder, GetEncoderInfo()).WillOnce(Return(info));
|
|
||||||
EXPECT_TRUE(simulcast_enabled_proxy.GetEncoderInfo().has_internal_source);
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace testing
|
} // namespace testing
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
|||||||
@ -167,7 +167,6 @@ webrtc::VideoEncoder::EncoderInfo FakeWebRtcVideoEncoder::GetEncoderInfo()
|
|||||||
const {
|
const {
|
||||||
EncoderInfo info;
|
EncoderInfo info;
|
||||||
info.is_hardware_accelerated = true;
|
info.is_hardware_accelerated = true;
|
||||||
info.has_internal_source = false;
|
|
||||||
return info;
|
return info;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -188,7 +187,6 @@ int FakeWebRtcVideoEncoder::GetNumEncodedFrames() {
|
|||||||
// Video encoder factory.
|
// Video encoder factory.
|
||||||
FakeWebRtcVideoEncoderFactory::FakeWebRtcVideoEncoderFactory()
|
FakeWebRtcVideoEncoderFactory::FakeWebRtcVideoEncoderFactory()
|
||||||
: num_created_encoders_(0),
|
: num_created_encoders_(0),
|
||||||
encoders_have_internal_sources_(false),
|
|
||||||
vp8_factory_mode_(false) {}
|
vp8_factory_mode_(false) {}
|
||||||
|
|
||||||
std::vector<webrtc::SdpVideoFormat>
|
std::vector<webrtc::SdpVideoFormat>
|
||||||
@ -227,14 +225,6 @@ FakeWebRtcVideoEncoderFactory::CreateVideoEncoder(
|
|||||||
return encoder;
|
return encoder;
|
||||||
}
|
}
|
||||||
|
|
||||||
webrtc::VideoEncoderFactory::CodecInfo
|
|
||||||
FakeWebRtcVideoEncoderFactory::QueryVideoEncoder(
|
|
||||||
const webrtc::SdpVideoFormat& format) const {
|
|
||||||
webrtc::VideoEncoderFactory::CodecInfo info;
|
|
||||||
info.has_internal_source = encoders_have_internal_sources_;
|
|
||||||
return info;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool FakeWebRtcVideoEncoderFactory::WaitForCreatedVideoEncoders(
|
bool FakeWebRtcVideoEncoderFactory::WaitForCreatedVideoEncoders(
|
||||||
int num_encoders) {
|
int num_encoders) {
|
||||||
int64_t start_offset_ms = rtc::TimeMillis();
|
int64_t start_offset_ms = rtc::TimeMillis();
|
||||||
@ -254,11 +244,6 @@ void FakeWebRtcVideoEncoderFactory::EncoderDestroyed(
|
|||||||
encoders_.end());
|
encoders_.end());
|
||||||
}
|
}
|
||||||
|
|
||||||
void FakeWebRtcVideoEncoderFactory::set_encoders_have_internal_sources(
|
|
||||||
bool internal_source) {
|
|
||||||
encoders_have_internal_sources_ = internal_source;
|
|
||||||
}
|
|
||||||
|
|
||||||
void FakeWebRtcVideoEncoderFactory::AddSupportedVideoCodec(
|
void FakeWebRtcVideoEncoderFactory::AddSupportedVideoCodec(
|
||||||
const webrtc::SdpVideoFormat& format) {
|
const webrtc::SdpVideoFormat& format) {
|
||||||
formats_.push_back(format);
|
formats_.push_back(format);
|
||||||
|
|||||||
@ -116,8 +116,6 @@ class FakeWebRtcVideoEncoderFactory : public webrtc::VideoEncoderFactory {
|
|||||||
std::vector<webrtc::SdpVideoFormat> GetSupportedFormats() const override;
|
std::vector<webrtc::SdpVideoFormat> GetSupportedFormats() const override;
|
||||||
std::unique_ptr<webrtc::VideoEncoder> CreateVideoEncoder(
|
std::unique_ptr<webrtc::VideoEncoder> CreateVideoEncoder(
|
||||||
const webrtc::SdpVideoFormat& format) override;
|
const webrtc::SdpVideoFormat& format) override;
|
||||||
CodecInfo QueryVideoEncoder(
|
|
||||||
const webrtc::SdpVideoFormat& format) const override;
|
|
||||||
|
|
||||||
bool WaitForCreatedVideoEncoders(int num_encoders);
|
bool WaitForCreatedVideoEncoders(int num_encoders);
|
||||||
void EncoderDestroyed(FakeWebRtcVideoEncoder* encoder);
|
void EncoderDestroyed(FakeWebRtcVideoEncoder* encoder);
|
||||||
@ -133,7 +131,6 @@ class FakeWebRtcVideoEncoderFactory : public webrtc::VideoEncoderFactory {
|
|||||||
std::vector<webrtc::SdpVideoFormat> formats_;
|
std::vector<webrtc::SdpVideoFormat> formats_;
|
||||||
std::vector<FakeWebRtcVideoEncoder*> encoders_ RTC_GUARDED_BY(mutex_);
|
std::vector<FakeWebRtcVideoEncoder*> encoders_ RTC_GUARDED_BY(mutex_);
|
||||||
int num_created_encoders_ RTC_GUARDED_BY(mutex_);
|
int num_created_encoders_ RTC_GUARDED_BY(mutex_);
|
||||||
bool encoders_have_internal_sources_;
|
|
||||||
bool vp8_factory_mode_;
|
bool vp8_factory_mode_;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@ -896,7 +896,6 @@ VideoEncoder::EncoderInfo SimulcastEncoderAdapter::GetEncoderInfo() const {
|
|||||||
encoder_impl_info.has_trusted_rate_controller;
|
encoder_impl_info.has_trusted_rate_controller;
|
||||||
encoder_info.is_hardware_accelerated =
|
encoder_info.is_hardware_accelerated =
|
||||||
encoder_impl_info.is_hardware_accelerated;
|
encoder_impl_info.is_hardware_accelerated;
|
||||||
encoder_info.has_internal_source = encoder_impl_info.has_internal_source;
|
|
||||||
encoder_info.is_qp_trusted = encoder_impl_info.is_qp_trusted;
|
encoder_info.is_qp_trusted = encoder_impl_info.is_qp_trusted;
|
||||||
} else {
|
} else {
|
||||||
encoder_info.implementation_name += ", ";
|
encoder_info.implementation_name += ", ";
|
||||||
@ -917,9 +916,6 @@ VideoEncoder::EncoderInfo SimulcastEncoderAdapter::GetEncoderInfo() const {
|
|||||||
encoder_info.is_hardware_accelerated |=
|
encoder_info.is_hardware_accelerated |=
|
||||||
encoder_impl_info.is_hardware_accelerated;
|
encoder_impl_info.is_hardware_accelerated;
|
||||||
|
|
||||||
// Has internal source only if all encoders have it.
|
|
||||||
encoder_info.has_internal_source &= encoder_impl_info.has_internal_source;
|
|
||||||
|
|
||||||
// Treat QP from frame/slice/tile header as average QP only if all
|
// Treat QP from frame/slice/tile header as average QP only if all
|
||||||
// encoders report it as average QP.
|
// encoders report it as average QP.
|
||||||
encoder_info.is_qp_trusted =
|
encoder_info.is_qp_trusted =
|
||||||
|
|||||||
@ -242,7 +242,6 @@ class MockVideoEncoder : public VideoEncoder {
|
|||||||
apply_alignment_to_all_simulcast_layers_;
|
apply_alignment_to_all_simulcast_layers_;
|
||||||
info.has_trusted_rate_controller = has_trusted_rate_controller_;
|
info.has_trusted_rate_controller = has_trusted_rate_controller_;
|
||||||
info.is_hardware_accelerated = is_hardware_accelerated_;
|
info.is_hardware_accelerated = is_hardware_accelerated_;
|
||||||
info.has_internal_source = has_internal_source_;
|
|
||||||
info.fps_allocation[0] = fps_allocation_;
|
info.fps_allocation[0] = fps_allocation_;
|
||||||
info.supports_simulcast = supports_simulcast_;
|
info.supports_simulcast = supports_simulcast_;
|
||||||
info.is_qp_trusted = is_qp_trusted_;
|
info.is_qp_trusted = is_qp_trusted_;
|
||||||
@ -295,10 +294,6 @@ class MockVideoEncoder : public VideoEncoder {
|
|||||||
is_hardware_accelerated_ = is_hardware_accelerated;
|
is_hardware_accelerated_ = is_hardware_accelerated;
|
||||||
}
|
}
|
||||||
|
|
||||||
void set_has_internal_source(bool has_internal_source) {
|
|
||||||
has_internal_source_ = has_internal_source;
|
|
||||||
}
|
|
||||||
|
|
||||||
void set_fps_allocation(const FramerateFractions& fps_allocation) {
|
void set_fps_allocation(const FramerateFractions& fps_allocation) {
|
||||||
fps_allocation_ = fps_allocation;
|
fps_allocation_ = fps_allocation;
|
||||||
}
|
}
|
||||||
@ -330,7 +325,6 @@ class MockVideoEncoder : public VideoEncoder {
|
|||||||
bool apply_alignment_to_all_simulcast_layers_ = false;
|
bool apply_alignment_to_all_simulcast_layers_ = false;
|
||||||
bool has_trusted_rate_controller_ = false;
|
bool has_trusted_rate_controller_ = false;
|
||||||
bool is_hardware_accelerated_ = false;
|
bool is_hardware_accelerated_ = false;
|
||||||
bool has_internal_source_ = false;
|
|
||||||
int32_t init_encode_return_value_ = 0;
|
int32_t init_encode_return_value_ = 0;
|
||||||
VideoEncoder::RateControlParameters last_set_rates_;
|
VideoEncoder::RateControlParameters last_set_rates_;
|
||||||
FramerateFractions fps_allocation_;
|
FramerateFractions fps_allocation_;
|
||||||
@ -1379,28 +1373,6 @@ TEST_F(TestSimulcastEncoderAdapterFake,
|
|||||||
VideoEncoder::ResolutionBitrateLimits{789, 33000, 66000, 99000}));
|
VideoEncoder::ResolutionBitrateLimits{789, 33000, 66000, 99000}));
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(TestSimulcastEncoderAdapterFake, ReportsInternalSource) {
|
|
||||||
SimulcastTestFixtureImpl::DefaultSettings(
|
|
||||||
&codec_, static_cast<const int*>(kTestTemporalLayerProfile),
|
|
||||||
kVideoCodecVP8);
|
|
||||||
codec_.numberOfSimulcastStreams = 3;
|
|
||||||
adapter_->RegisterEncodeCompleteCallback(this);
|
|
||||||
EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings));
|
|
||||||
ASSERT_EQ(3u, helper_->factory()->encoders().size());
|
|
||||||
|
|
||||||
// All encoders have internal source, simulcast adapter reports true.
|
|
||||||
for (MockVideoEncoder* encoder : helper_->factory()->encoders()) {
|
|
||||||
encoder->set_has_internal_source(true);
|
|
||||||
}
|
|
||||||
EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings));
|
|
||||||
EXPECT_TRUE(adapter_->GetEncoderInfo().has_internal_source);
|
|
||||||
|
|
||||||
// One encoder does not have internal source, simulcast adapter reports false.
|
|
||||||
helper_->factory()->encoders()[2]->set_has_internal_source(false);
|
|
||||||
EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings));
|
|
||||||
EXPECT_FALSE(adapter_->GetEncoderInfo().has_internal_source);
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST_F(TestSimulcastEncoderAdapterFake, ReportsIsQpTrusted) {
|
TEST_F(TestSimulcastEncoderAdapterFake, ReportsIsQpTrusted) {
|
||||||
SimulcastTestFixtureImpl::DefaultSettings(
|
SimulcastTestFixtureImpl::DefaultSettings(
|
||||||
&codec_, static_cast<const int*>(kTestTemporalLayerProfile),
|
&codec_, static_cast<const int*>(kTestTemporalLayerProfile),
|
||||||
|
|||||||
@ -1211,11 +1211,7 @@ TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, Vp8) {
|
|||||||
/*lntf_expected=*/false);
|
/*lntf_expected=*/false);
|
||||||
|
|
||||||
// Mock encoder creation. `engine` take ownership of the encoder.
|
// Mock encoder creation. `engine` take ownership of the encoder.
|
||||||
webrtc::VideoEncoderFactory::CodecInfo codec_info;
|
|
||||||
codec_info.has_internal_source = false;
|
|
||||||
const webrtc::SdpVideoFormat format("VP8");
|
const webrtc::SdpVideoFormat format("VP8");
|
||||||
EXPECT_CALL(*encoder_factory, QueryVideoEncoder(format))
|
|
||||||
.WillRepeatedly(Return(codec_info));
|
|
||||||
EXPECT_CALL(*encoder_factory, CreateVideoEncoder(format)).WillOnce([&] {
|
EXPECT_CALL(*encoder_factory, CreateVideoEncoder(format)).WillOnce([&] {
|
||||||
return std::make_unique<FakeWebRtcVideoEncoder>(nullptr);
|
return std::make_unique<FakeWebRtcVideoEncoder>(nullptr);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -635,7 +635,6 @@ VideoEncoder::EncoderInfo H264EncoderImpl::GetEncoderInfo() const {
|
|||||||
info.scaling_settings =
|
info.scaling_settings =
|
||||||
VideoEncoder::ScalingSettings(kLowH264QpThreshold, kHighH264QpThreshold);
|
VideoEncoder::ScalingSettings(kLowH264QpThreshold, kHighH264QpThreshold);
|
||||||
info.is_hardware_accelerated = false;
|
info.is_hardware_accelerated = false;
|
||||||
info.has_internal_source = false;
|
|
||||||
info.supports_simulcast = true;
|
info.supports_simulcast = true;
|
||||||
info.preferred_pixel_formats = {VideoFrameBuffer::Type::kI420};
|
info.preferred_pixel_formats = {VideoFrameBuffer::Type::kI420};
|
||||||
return info;
|
return info;
|
||||||
|
|||||||
@ -141,8 +141,6 @@ int MultiplexEncoderAdapter::InitEncode(
|
|||||||
encoder_info_.apply_alignment_to_all_simulcast_layers = true;
|
encoder_info_.apply_alignment_to_all_simulcast_layers = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
encoder_info_.has_internal_source = false;
|
|
||||||
|
|
||||||
encoders_.emplace_back(std::move(encoder));
|
encoders_.emplace_back(std::move(encoder));
|
||||||
}
|
}
|
||||||
encoder_info_.implementation_name += ")";
|
encoder_info_.implementation_name += ")";
|
||||||
|
|||||||
@ -1215,7 +1215,6 @@ VideoEncoder::EncoderInfo LibvpxVp8Encoder::GetEncoderInfo() const {
|
|||||||
info.has_trusted_rate_controller =
|
info.has_trusted_rate_controller =
|
||||||
rate_control_settings_.LibvpxVp8TrustedRateController();
|
rate_control_settings_.LibvpxVp8TrustedRateController();
|
||||||
info.is_hardware_accelerated = false;
|
info.is_hardware_accelerated = false;
|
||||||
info.has_internal_source = false;
|
|
||||||
info.supports_simulcast = true;
|
info.supports_simulcast = true;
|
||||||
if (!resolution_bitrate_limits_.empty()) {
|
if (!resolution_bitrate_limits_.empty()) {
|
||||||
info.resolution_bitrate_limits = resolution_bitrate_limits_;
|
info.resolution_bitrate_limits = resolution_bitrate_limits_;
|
||||||
|
|||||||
@ -520,7 +520,6 @@ TEST(LibvpxVp8EncoderTest, GetEncoderInfoReturnsStaticInformation) {
|
|||||||
|
|
||||||
EXPECT_FALSE(info.supports_native_handle);
|
EXPECT_FALSE(info.supports_native_handle);
|
||||||
EXPECT_FALSE(info.is_hardware_accelerated);
|
EXPECT_FALSE(info.is_hardware_accelerated);
|
||||||
EXPECT_FALSE(info.has_internal_source);
|
|
||||||
EXPECT_TRUE(info.supports_simulcast);
|
EXPECT_TRUE(info.supports_simulcast);
|
||||||
EXPECT_EQ(info.implementation_name, "libvpx");
|
EXPECT_EQ(info.implementation_name, "libvpx");
|
||||||
EXPECT_EQ(info.requested_resolution_alignment, 1);
|
EXPECT_EQ(info.requested_resolution_alignment, 1);
|
||||||
|
|||||||
@ -1735,7 +1735,6 @@ VideoEncoder::EncoderInfo LibvpxVp9Encoder::GetEncoderInfo() const {
|
|||||||
}
|
}
|
||||||
info.has_trusted_rate_controller = trusted_rate_controller_;
|
info.has_trusted_rate_controller = trusted_rate_controller_;
|
||||||
info.is_hardware_accelerated = false;
|
info.is_hardware_accelerated = false;
|
||||||
info.has_internal_source = false;
|
|
||||||
if (inited_) {
|
if (inited_) {
|
||||||
// Find the max configured fps of any active spatial layer.
|
// Find the max configured fps of any active spatial layer.
|
||||||
float max_fps = 0.0;
|
float max_fps = 0.0;
|
||||||
|
|||||||
@ -101,7 +101,6 @@ int32_t VideoEncoderWrapper::InitEncodeInternal(JNIEnv* jni) {
|
|||||||
|
|
||||||
void VideoEncoderWrapper::UpdateEncoderInfo(JNIEnv* jni) {
|
void VideoEncoderWrapper::UpdateEncoderInfo(JNIEnv* jni) {
|
||||||
encoder_info_.supports_native_handle = true;
|
encoder_info_.supports_native_handle = true;
|
||||||
encoder_info_.has_internal_source = false;
|
|
||||||
|
|
||||||
encoder_info_.implementation_name = JavaToStdString(
|
encoder_info_.implementation_name = JavaToStdString(
|
||||||
jni, Java_VideoEncoder_getImplementationName(jni, encoder_));
|
jni, Java_VideoEncoder_getImplementationName(jni, encoder_));
|
||||||
|
|||||||
@ -101,7 +101,6 @@ class ObjCVideoEncoder : public VideoEncoder {
|
|||||||
info.apply_alignment_to_all_simulcast_layers = encoder_.applyAlignmentToAllSimulcastLayers;
|
info.apply_alignment_to_all_simulcast_layers = encoder_.applyAlignmentToAllSimulcastLayers;
|
||||||
info.supports_native_handle = encoder_.supportsNativeHandle;
|
info.supports_native_handle = encoder_.supportsNativeHandle;
|
||||||
info.is_hardware_accelerated = true;
|
info.is_hardware_accelerated = true;
|
||||||
info.has_internal_source = false;
|
|
||||||
return info;
|
return info;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -372,12 +372,6 @@ QualityAnalyzingVideoEncoderFactory::GetSupportedFormats() const {
|
|||||||
return delegate_->GetSupportedFormats();
|
return delegate_->GetSupportedFormats();
|
||||||
}
|
}
|
||||||
|
|
||||||
VideoEncoderFactory::CodecInfo
|
|
||||||
QualityAnalyzingVideoEncoderFactory::QueryVideoEncoder(
|
|
||||||
const SdpVideoFormat& format) const {
|
|
||||||
return delegate_->QueryVideoEncoder(format);
|
|
||||||
}
|
|
||||||
|
|
||||||
std::unique_ptr<VideoEncoder>
|
std::unique_ptr<VideoEncoder>
|
||||||
QualityAnalyzingVideoEncoderFactory::CreateVideoEncoder(
|
QualityAnalyzingVideoEncoderFactory::CreateVideoEncoder(
|
||||||
const SdpVideoFormat& format) {
|
const SdpVideoFormat& format) {
|
||||||
|
|||||||
@ -176,8 +176,6 @@ class QualityAnalyzingVideoEncoderFactory : public VideoEncoderFactory {
|
|||||||
|
|
||||||
// Methods of VideoEncoderFactory interface.
|
// Methods of VideoEncoderFactory interface.
|
||||||
std::vector<SdpVideoFormat> GetSupportedFormats() const override;
|
std::vector<SdpVideoFormat> GetSupportedFormats() const override;
|
||||||
VideoEncoderFactory::CodecInfo QueryVideoEncoder(
|
|
||||||
const SdpVideoFormat& format) const override;
|
|
||||||
std::unique_ptr<VideoEncoder> CreateVideoEncoder(
|
std::unique_ptr<VideoEncoder> CreateVideoEncoder(
|
||||||
const SdpVideoFormat& format) override;
|
const SdpVideoFormat& format) override;
|
||||||
|
|
||||||
|
|||||||
@ -172,11 +172,6 @@ class FakeVideoEncoderFactory : public VideoEncoderFactory {
|
|||||||
std::vector<SdpVideoFormat> GetSupportedFormats() const override {
|
std::vector<SdpVideoFormat> GetSupportedFormats() const override {
|
||||||
return {SdpVideoFormat("VP8")};
|
return {SdpVideoFormat("VP8")};
|
||||||
}
|
}
|
||||||
CodecInfo QueryVideoEncoder(const SdpVideoFormat& format) const override {
|
|
||||||
RTC_CHECK_EQ(format.name, "VP8");
|
|
||||||
CodecInfo info;
|
|
||||||
return info;
|
|
||||||
}
|
|
||||||
std::unique_ptr<VideoEncoder> CreateVideoEncoder(
|
std::unique_ptr<VideoEncoder> CreateVideoEncoder(
|
||||||
const SdpVideoFormat& format) override {
|
const SdpVideoFormat& format) override {
|
||||||
return std::make_unique<FakeVp8Encoder>(clock_);
|
return std::make_unique<FakeVp8Encoder>(clock_);
|
||||||
|
|||||||
@ -38,7 +38,6 @@ class VideoEncoderProxyFactory final : public VideoEncoderFactory {
|
|||||||
encoder_selector_(encoder_selector),
|
encoder_selector_(encoder_selector),
|
||||||
num_simultaneous_encoder_instances_(0),
|
num_simultaneous_encoder_instances_(0),
|
||||||
max_num_simultaneous_encoder_instances_(0) {
|
max_num_simultaneous_encoder_instances_(0) {
|
||||||
codec_info_.has_internal_source = false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Unused by tests.
|
// Unused by tests.
|
||||||
@ -47,10 +46,6 @@ class VideoEncoderProxyFactory final : public VideoEncoderFactory {
|
|||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
CodecInfo QueryVideoEncoder(const SdpVideoFormat& format) const override {
|
|
||||||
return codec_info_;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::unique_ptr<VideoEncoder> CreateVideoEncoder(
|
std::unique_ptr<VideoEncoder> CreateVideoEncoder(
|
||||||
const SdpVideoFormat& format) override {
|
const SdpVideoFormat& format) override {
|
||||||
++num_simultaneous_encoder_instances_;
|
++num_simultaneous_encoder_instances_;
|
||||||
@ -69,10 +64,6 @@ class VideoEncoderProxyFactory final : public VideoEncoderFactory {
|
|||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
void SetHasInternalSource(bool has_internal_source) {
|
|
||||||
codec_info_.has_internal_source = has_internal_source;
|
|
||||||
}
|
|
||||||
|
|
||||||
int GetMaxNumberOfSimultaneousEncoderInstances() {
|
int GetMaxNumberOfSimultaneousEncoderInstances() {
|
||||||
return max_num_simultaneous_encoder_instances_;
|
return max_num_simultaneous_encoder_instances_;
|
||||||
}
|
}
|
||||||
@ -151,7 +142,6 @@ class VideoEncoderProxyFactory final : public VideoEncoderFactory {
|
|||||||
|
|
||||||
VideoEncoder* const encoder_;
|
VideoEncoder* const encoder_;
|
||||||
EncoderSelectorInterface* const encoder_selector_;
|
EncoderSelectorInterface* const encoder_selector_;
|
||||||
CodecInfo codec_info_;
|
|
||||||
|
|
||||||
int num_simultaneous_encoder_instances_;
|
int num_simultaneous_encoder_instances_;
|
||||||
int max_num_simultaneous_encoder_instances_;
|
int max_num_simultaneous_encoder_instances_;
|
||||||
|
|||||||
@ -50,7 +50,6 @@ FrameEncodeMetadataWriter::TimingFramesLayerInfo::~TimingFramesLayerInfo() =
|
|||||||
FrameEncodeMetadataWriter::FrameEncodeMetadataWriter(
|
FrameEncodeMetadataWriter::FrameEncodeMetadataWriter(
|
||||||
EncodedImageCallback* frame_drop_callback)
|
EncodedImageCallback* frame_drop_callback)
|
||||||
: frame_drop_callback_(frame_drop_callback),
|
: frame_drop_callback_(frame_drop_callback),
|
||||||
internal_source_(false),
|
|
||||||
framerate_fps_(0),
|
framerate_fps_(0),
|
||||||
last_timing_frame_time_ms_(-1),
|
last_timing_frame_time_ms_(-1),
|
||||||
reordered_frames_logged_messages_(0),
|
reordered_frames_logged_messages_(0),
|
||||||
@ -59,12 +58,9 @@ FrameEncodeMetadataWriter::FrameEncodeMetadataWriter(
|
|||||||
}
|
}
|
||||||
FrameEncodeMetadataWriter::~FrameEncodeMetadataWriter() {}
|
FrameEncodeMetadataWriter::~FrameEncodeMetadataWriter() {}
|
||||||
|
|
||||||
void FrameEncodeMetadataWriter::OnEncoderInit(const VideoCodec& codec,
|
void FrameEncodeMetadataWriter::OnEncoderInit(const VideoCodec& codec) {
|
||||||
bool internal_source) {
|
|
||||||
MutexLock lock(&lock_);
|
MutexLock lock(&lock_);
|
||||||
codec_settings_ = codec;
|
codec_settings_ = codec;
|
||||||
internal_source_ = internal_source;
|
|
||||||
|
|
||||||
size_t num_spatial_layers = codec_settings_.numberOfSimulcastStreams;
|
size_t num_spatial_layers = codec_settings_.numberOfSimulcastStreams;
|
||||||
if (codec_settings_.codecType == kVideoCodecVP9) {
|
if (codec_settings_.codecType == kVideoCodecVP9) {
|
||||||
num_spatial_layers = std::max(
|
num_spatial_layers = std::max(
|
||||||
@ -96,9 +92,6 @@ void FrameEncodeMetadataWriter::OnSetRates(
|
|||||||
|
|
||||||
void FrameEncodeMetadataWriter::OnEncodeStarted(const VideoFrame& frame) {
|
void FrameEncodeMetadataWriter::OnEncodeStarted(const VideoFrame& frame) {
|
||||||
MutexLock lock(&lock_);
|
MutexLock lock(&lock_);
|
||||||
if (internal_source_) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
timing_frames_info_.resize(num_spatial_layers_);
|
timing_frames_info_.resize(num_spatial_layers_);
|
||||||
FrameMetadata metadata;
|
FrameMetadata metadata;
|
||||||
@ -148,12 +141,8 @@ void FrameEncodeMetadataWriter::FillTimingInfo(size_t simulcast_svc_idx,
|
|||||||
|
|
||||||
int64_t encode_done_ms = rtc::TimeMillis();
|
int64_t encode_done_ms = rtc::TimeMillis();
|
||||||
|
|
||||||
// Encoders with internal sources do not call OnEncodeStarted
|
encode_start_ms =
|
||||||
// `timing_frames_info_` may be not filled here.
|
ExtractEncodeStartTimeAndFillMetadata(simulcast_svc_idx, encoded_image);
|
||||||
if (!internal_source_) {
|
|
||||||
encode_start_ms =
|
|
||||||
ExtractEncodeStartTimeAndFillMetadata(simulcast_svc_idx, encoded_image);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (timing_frames_info_.size() > simulcast_svc_idx) {
|
if (timing_frames_info_.size() > simulcast_svc_idx) {
|
||||||
size_t target_bitrate =
|
size_t target_bitrate =
|
||||||
@ -187,21 +176,6 @@ void FrameEncodeMetadataWriter::FillTimingInfo(size_t simulcast_svc_idx,
|
|||||||
last_timing_frame_time_ms_ = encoded_image->capture_time_ms_;
|
last_timing_frame_time_ms_ = encoded_image->capture_time_ms_;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Workaround for chromoting encoder: it passes encode start and finished
|
|
||||||
// timestamps in `timing_` field, but they (together with capture timestamp)
|
|
||||||
// are not in the WebRTC clock.
|
|
||||||
if (internal_source_ && encoded_image->timing_.encode_finish_ms > 0 &&
|
|
||||||
encoded_image->timing_.encode_start_ms > 0) {
|
|
||||||
int64_t clock_offset_ms =
|
|
||||||
encode_done_ms - encoded_image->timing_.encode_finish_ms;
|
|
||||||
// Translate capture timestamp to local WebRTC clock.
|
|
||||||
encoded_image->capture_time_ms_ += clock_offset_ms;
|
|
||||||
encoded_image->SetTimestamp(
|
|
||||||
static_cast<uint32_t>(encoded_image->capture_time_ms_ * 90));
|
|
||||||
encode_start_ms.emplace(encoded_image->timing_.encode_start_ms +
|
|
||||||
clock_offset_ms);
|
|
||||||
}
|
|
||||||
|
|
||||||
// If encode start is not available that means that encoder uses internal
|
// If encode start is not available that means that encoder uses internal
|
||||||
// source. In that case capture timestamp may be from a different clock with a
|
// source. In that case capture timestamp may be from a different clock with a
|
||||||
// drift relative to rtc::TimeMillis(). We can't use it for Timing frames,
|
// drift relative to rtc::TimeMillis(). We can't use it for Timing frames,
|
||||||
|
|||||||
@ -28,7 +28,7 @@ class FrameEncodeMetadataWriter {
|
|||||||
explicit FrameEncodeMetadataWriter(EncodedImageCallback* frame_drop_callback);
|
explicit FrameEncodeMetadataWriter(EncodedImageCallback* frame_drop_callback);
|
||||||
~FrameEncodeMetadataWriter();
|
~FrameEncodeMetadataWriter();
|
||||||
|
|
||||||
void OnEncoderInit(const VideoCodec& codec, bool internal_source = false);
|
void OnEncoderInit(const VideoCodec& codec);
|
||||||
void OnSetRates(const VideoBitrateAllocation& bitrate_allocation,
|
void OnSetRates(const VideoBitrateAllocation& bitrate_allocation,
|
||||||
uint32_t framerate_fps);
|
uint32_t framerate_fps);
|
||||||
|
|
||||||
@ -67,7 +67,6 @@ class FrameEncodeMetadataWriter {
|
|||||||
Mutex lock_;
|
Mutex lock_;
|
||||||
EncodedImageCallback* const frame_drop_callback_;
|
EncodedImageCallback* const frame_drop_callback_;
|
||||||
VideoCodec codec_settings_ RTC_GUARDED_BY(&lock_);
|
VideoCodec codec_settings_ RTC_GUARDED_BY(&lock_);
|
||||||
bool internal_source_ RTC_GUARDED_BY(&lock_);
|
|
||||||
uint32_t framerate_fps_ RTC_GUARDED_BY(&lock_);
|
uint32_t framerate_fps_ RTC_GUARDED_BY(&lock_);
|
||||||
|
|
||||||
size_t num_spatial_layers_ RTC_GUARDED_BY(&lock_);
|
size_t num_spatial_layers_ RTC_GUARDED_BY(&lock_);
|
||||||
|
|||||||
@ -78,7 +78,7 @@ std::vector<std::vector<FrameType>> GetTimingFrames(
|
|||||||
codec_settings.numberOfSimulcastStreams = num_streams;
|
codec_settings.numberOfSimulcastStreams = num_streams;
|
||||||
codec_settings.timing_frame_thresholds = {delay_ms,
|
codec_settings.timing_frame_thresholds = {delay_ms,
|
||||||
kDefaultOutlierFrameSizePercent};
|
kDefaultOutlierFrameSizePercent};
|
||||||
encode_timer.OnEncoderInit(codec_settings, false);
|
encode_timer.OnEncoderInit(codec_settings);
|
||||||
const size_t kFramerate = 30;
|
const size_t kFramerate = 30;
|
||||||
VideoBitrateAllocation bitrate_allocation;
|
VideoBitrateAllocation bitrate_allocation;
|
||||||
for (int si = 0; si < num_streams; ++si) {
|
for (int si = 0; si < num_streams; ++si) {
|
||||||
@ -205,7 +205,7 @@ TEST(FrameEncodeMetadataWriterTest, NoTimingFrameIfNoEncodeStartTime) {
|
|||||||
VideoCodec codec_settings;
|
VideoCodec codec_settings;
|
||||||
// Make all frames timing frames.
|
// Make all frames timing frames.
|
||||||
codec_settings.timing_frame_thresholds.delay_ms = 1;
|
codec_settings.timing_frame_thresholds.delay_ms = 1;
|
||||||
encode_timer.OnEncoderInit(codec_settings, false);
|
encode_timer.OnEncoderInit(codec_settings);
|
||||||
VideoBitrateAllocation bitrate_allocation;
|
VideoBitrateAllocation bitrate_allocation;
|
||||||
bitrate_allocation.SetBitrate(0, 0, 500000);
|
bitrate_allocation.SetBitrate(0, 0, 500000);
|
||||||
encode_timer.OnSetRates(bitrate_allocation, 30);
|
encode_timer.OnSetRates(bitrate_allocation, 30);
|
||||||
@ -228,51 +228,6 @@ TEST(FrameEncodeMetadataWriterTest, NoTimingFrameIfNoEncodeStartTime) {
|
|||||||
EXPECT_FALSE(IsTimingFrame(image));
|
EXPECT_FALSE(IsTimingFrame(image));
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(FrameEncodeMetadataWriterTest,
|
|
||||||
AdjustsCaptureTimeForInternalSourceEncoder) {
|
|
||||||
const int64_t kEncodeStartDelayMs = 2;
|
|
||||||
const int64_t kEncodeFinishDelayMs = 10;
|
|
||||||
constexpr size_t kFrameSize = 500;
|
|
||||||
|
|
||||||
int64_t timestamp = 1;
|
|
||||||
EncodedImage image;
|
|
||||||
image.SetEncodedData(EncodedImageBuffer::Create(kFrameSize));
|
|
||||||
image.capture_time_ms_ = timestamp;
|
|
||||||
image.SetTimestamp(static_cast<uint32_t>(timestamp * 90));
|
|
||||||
|
|
||||||
FakeEncodedImageCallback sink;
|
|
||||||
FrameEncodeMetadataWriter encode_timer(&sink);
|
|
||||||
|
|
||||||
VideoCodec codec_settings;
|
|
||||||
// Make all frames timing frames.
|
|
||||||
codec_settings.timing_frame_thresholds.delay_ms = 1;
|
|
||||||
encode_timer.OnEncoderInit(codec_settings, true);
|
|
||||||
|
|
||||||
VideoBitrateAllocation bitrate_allocation;
|
|
||||||
bitrate_allocation.SetBitrate(0, 0, 500000);
|
|
||||||
encode_timer.OnSetRates(bitrate_allocation, 30);
|
|
||||||
|
|
||||||
// Verify a single frame without encode timestamps isn't a timing frame.
|
|
||||||
encode_timer.FillTimingInfo(0, &image);
|
|
||||||
EXPECT_FALSE(IsTimingFrame(image));
|
|
||||||
|
|
||||||
// New frame, but this time with encode timestamps set in timing_.
|
|
||||||
// This should be a timing frame.
|
|
||||||
image.capture_time_ms_ = ++timestamp;
|
|
||||||
image.SetTimestamp(static_cast<uint32_t>(timestamp * 90));
|
|
||||||
image.timing_ = EncodedImage::Timing();
|
|
||||||
image.timing_.encode_start_ms = timestamp + kEncodeStartDelayMs;
|
|
||||||
image.timing_.encode_finish_ms = timestamp + kEncodeFinishDelayMs;
|
|
||||||
|
|
||||||
encode_timer.FillTimingInfo(0, &image);
|
|
||||||
EXPECT_TRUE(IsTimingFrame(image));
|
|
||||||
|
|
||||||
// Frame is captured kEncodeFinishDelayMs before it's encoded, so restored
|
|
||||||
// capture timestamp should be kEncodeFinishDelayMs in the past.
|
|
||||||
EXPECT_NEAR(image.capture_time_ms_, rtc::TimeMillis() - kEncodeFinishDelayMs,
|
|
||||||
1);
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST(FrameEncodeMetadataWriterTest, NotifiesAboutDroppedFrames) {
|
TEST(FrameEncodeMetadataWriterTest, NotifiesAboutDroppedFrames) {
|
||||||
const int64_t kTimestampMs1 = 47721840;
|
const int64_t kTimestampMs1 = 47721840;
|
||||||
const int64_t kTimestampMs2 = 47721850;
|
const int64_t kTimestampMs2 = 47721850;
|
||||||
@ -281,7 +236,7 @@ TEST(FrameEncodeMetadataWriterTest, NotifiesAboutDroppedFrames) {
|
|||||||
|
|
||||||
FakeEncodedImageCallback sink;
|
FakeEncodedImageCallback sink;
|
||||||
FrameEncodeMetadataWriter encode_timer(&sink);
|
FrameEncodeMetadataWriter encode_timer(&sink);
|
||||||
encode_timer.OnEncoderInit(VideoCodec(), false);
|
encode_timer.OnEncoderInit(VideoCodec());
|
||||||
// Any non-zero bitrate needed to be set before the first frame.
|
// Any non-zero bitrate needed to be set before the first frame.
|
||||||
VideoBitrateAllocation bitrate_allocation;
|
VideoBitrateAllocation bitrate_allocation;
|
||||||
bitrate_allocation.SetBitrate(0, 0, 500000);
|
bitrate_allocation.SetBitrate(0, 0, 500000);
|
||||||
@ -338,7 +293,7 @@ TEST(FrameEncodeMetadataWriterTest, RestoresCaptureTimestamps) {
|
|||||||
FakeEncodedImageCallback sink;
|
FakeEncodedImageCallback sink;
|
||||||
|
|
||||||
FrameEncodeMetadataWriter encode_timer(&sink);
|
FrameEncodeMetadataWriter encode_timer(&sink);
|
||||||
encode_timer.OnEncoderInit(VideoCodec(), false);
|
encode_timer.OnEncoderInit(VideoCodec());
|
||||||
// Any non-zero bitrate needed to be set before the first frame.
|
// Any non-zero bitrate needed to be set before the first frame.
|
||||||
VideoBitrateAllocation bitrate_allocation;
|
VideoBitrateAllocation bitrate_allocation;
|
||||||
bitrate_allocation.SetBitrate(0, 0, 500000);
|
bitrate_allocation.SetBitrate(0, 0, 500000);
|
||||||
@ -363,7 +318,7 @@ TEST(FrameEncodeMetadataWriterTest, CopiesRotation) {
|
|||||||
FakeEncodedImageCallback sink;
|
FakeEncodedImageCallback sink;
|
||||||
|
|
||||||
FrameEncodeMetadataWriter encode_timer(&sink);
|
FrameEncodeMetadataWriter encode_timer(&sink);
|
||||||
encode_timer.OnEncoderInit(VideoCodec(), false);
|
encode_timer.OnEncoderInit(VideoCodec());
|
||||||
// Any non-zero bitrate needed to be set before the first frame.
|
// Any non-zero bitrate needed to be set before the first frame.
|
||||||
VideoBitrateAllocation bitrate_allocation;
|
VideoBitrateAllocation bitrate_allocation;
|
||||||
bitrate_allocation.SetBitrate(0, 0, 500000);
|
bitrate_allocation.SetBitrate(0, 0, 500000);
|
||||||
@ -389,7 +344,7 @@ TEST(FrameEncodeMetadataWriterTest, SetsContentType) {
|
|||||||
FrameEncodeMetadataWriter encode_timer(&sink);
|
FrameEncodeMetadataWriter encode_timer(&sink);
|
||||||
VideoCodec codec;
|
VideoCodec codec;
|
||||||
codec.mode = VideoCodecMode::kScreensharing;
|
codec.mode = VideoCodecMode::kScreensharing;
|
||||||
encode_timer.OnEncoderInit(codec, false);
|
encode_timer.OnEncoderInit(codec);
|
||||||
// Any non-zero bitrate needed to be set before the first frame.
|
// Any non-zero bitrate needed to be set before the first frame.
|
||||||
VideoBitrateAllocation bitrate_allocation;
|
VideoBitrateAllocation bitrate_allocation;
|
||||||
bitrate_allocation.SetBitrate(0, 0, 500000);
|
bitrate_allocation.SetBitrate(0, 0, 500000);
|
||||||
@ -413,7 +368,7 @@ TEST(FrameEncodeMetadataWriterTest, CopiesColorSpace) {
|
|||||||
FakeEncodedImageCallback sink;
|
FakeEncodedImageCallback sink;
|
||||||
|
|
||||||
FrameEncodeMetadataWriter encode_timer(&sink);
|
FrameEncodeMetadataWriter encode_timer(&sink);
|
||||||
encode_timer.OnEncoderInit(VideoCodec(), false);
|
encode_timer.OnEncoderInit(VideoCodec());
|
||||||
// Any non-zero bitrate needed to be set before the first frame.
|
// Any non-zero bitrate needed to be set before the first frame.
|
||||||
VideoBitrateAllocation bitrate_allocation;
|
VideoBitrateAllocation bitrate_allocation;
|
||||||
bitrate_allocation.SetBitrate(0, 0, 500000);
|
bitrate_allocation.SetBitrate(0, 0, 500000);
|
||||||
@ -440,7 +395,7 @@ TEST(FrameEncodeMetadataWriterTest, CopiesPacketInfos) {
|
|||||||
FakeEncodedImageCallback sink;
|
FakeEncodedImageCallback sink;
|
||||||
|
|
||||||
FrameEncodeMetadataWriter encode_timer(&sink);
|
FrameEncodeMetadataWriter encode_timer(&sink);
|
||||||
encode_timer.OnEncoderInit(VideoCodec(), false);
|
encode_timer.OnEncoderInit(VideoCodec());
|
||||||
// Any non-zero bitrate needed to be set before the first frame.
|
// Any non-zero bitrate needed to be set before the first frame.
|
||||||
VideoBitrateAllocation bitrate_allocation;
|
VideoBitrateAllocation bitrate_allocation;
|
||||||
bitrate_allocation.SetBitrate(0, 0, 500000);
|
bitrate_allocation.SetBitrate(0, 0, 500000);
|
||||||
|
|||||||
@ -105,13 +105,6 @@ enum VideoFormat {
|
|||||||
kVP8,
|
kVP8,
|
||||||
};
|
};
|
||||||
|
|
||||||
VideoFrame CreateVideoFrame(int width, int height, int64_t timestamp_ms) {
|
|
||||||
return webrtc::VideoFrame::Builder()
|
|
||||||
.set_video_frame_buffer(I420Buffer::Create(width, height))
|
|
||||||
.set_rotation(webrtc::kVideoRotation_0)
|
|
||||||
.set_timestamp_ms(timestamp_ms)
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
class VideoSendStreamTest : public test::CallTest {
|
class VideoSendStreamTest : public test::CallTest {
|
||||||
@ -2206,145 +2199,6 @@ class StartStopBitrateObserver : public test::FakeEncoder {
|
|||||||
absl::optional<int> bitrate_kbps_ RTC_GUARDED_BY(mutex_);
|
absl::optional<int> bitrate_kbps_ RTC_GUARDED_BY(mutex_);
|
||||||
};
|
};
|
||||||
|
|
||||||
// This test that if the encoder use an internal source, VideoEncoder::SetRates
|
|
||||||
// will be called with zero bitrate during initialization and that
|
|
||||||
// VideoSendStream::Stop also triggers VideoEncoder::SetRates Start to be called
|
|
||||||
// with zero bitrate.
|
|
||||||
TEST_F(VideoSendStreamTest, VideoSendStreamStopSetEncoderRateToZero) {
|
|
||||||
test::NullTransport transport;
|
|
||||||
StartStopBitrateObserver encoder;
|
|
||||||
test::VideoEncoderProxyFactory encoder_factory(&encoder);
|
|
||||||
encoder_factory.SetHasInternalSource(true);
|
|
||||||
test::FrameForwarder forwarder;
|
|
||||||
|
|
||||||
SendTask(RTC_FROM_HERE, task_queue(),
|
|
||||||
[this, &transport, &encoder_factory, &forwarder]() {
|
|
||||||
CreateSenderCall();
|
|
||||||
CreateSendConfig(1, 0, 0, &transport);
|
|
||||||
|
|
||||||
sender_call_->SignalChannelNetworkState(MediaType::VIDEO,
|
|
||||||
kNetworkUp);
|
|
||||||
GetVideoSendConfig()->encoder_settings.encoder_factory =
|
|
||||||
&encoder_factory;
|
|
||||||
|
|
||||||
CreateVideoStreams();
|
|
||||||
// Inject a frame, to force encoder creation.
|
|
||||||
GetVideoSendStream()->Start();
|
|
||||||
GetVideoSendStream()->SetSource(&forwarder,
|
|
||||||
DegradationPreference::DISABLED);
|
|
||||||
forwarder.IncomingCapturedFrame(CreateVideoFrame(640, 480, 4));
|
|
||||||
});
|
|
||||||
|
|
||||||
EXPECT_TRUE(encoder.WaitForEncoderInit());
|
|
||||||
|
|
||||||
SendTask(RTC_FROM_HERE, task_queue(),
|
|
||||||
[this]() { GetVideoSendStream()->Start(); });
|
|
||||||
EXPECT_TRUE(encoder.WaitBitrateChanged(WaitUntil::kNonZero));
|
|
||||||
|
|
||||||
SendTask(RTC_FROM_HERE, task_queue(),
|
|
||||||
[this]() { GetVideoSendStream()->Stop(); });
|
|
||||||
EXPECT_TRUE(encoder.WaitBitrateChanged(WaitUntil::kZero));
|
|
||||||
|
|
||||||
SendTask(RTC_FROM_HERE, task_queue(),
|
|
||||||
[this]() { GetVideoSendStream()->Start(); });
|
|
||||||
EXPECT_TRUE(encoder.WaitBitrateChanged(WaitUntil::kNonZero));
|
|
||||||
|
|
||||||
SendTask(RTC_FROM_HERE, task_queue(), [this]() {
|
|
||||||
DestroyStreams();
|
|
||||||
DestroyCalls();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Tests that when the encoder uses an internal source, the VideoEncoder will
|
|
||||||
// be updated with a new bitrate when turning the VideoSendStream on/off with
|
|
||||||
// VideoSendStream::UpdateActiveSimulcastLayers, and when the VideoStreamEncoder
|
|
||||||
// is reconfigured with new active layers.
|
|
||||||
TEST_F(VideoSendStreamTest, VideoSendStreamUpdateActiveSimulcastLayers) {
|
|
||||||
test::NullTransport transport;
|
|
||||||
StartStopBitrateObserver encoder;
|
|
||||||
test::VideoEncoderProxyFactory encoder_factory(&encoder);
|
|
||||||
encoder_factory.SetHasInternalSource(true);
|
|
||||||
test::FrameForwarder forwarder;
|
|
||||||
|
|
||||||
SendTask(RTC_FROM_HERE, task_queue(),
|
|
||||||
[this, &transport, &encoder_factory, &forwarder]() {
|
|
||||||
CreateSenderCall();
|
|
||||||
// Create two simulcast streams.
|
|
||||||
CreateSendConfig(2, 0, 0, &transport);
|
|
||||||
|
|
||||||
sender_call_->SignalChannelNetworkState(MediaType::VIDEO,
|
|
||||||
kNetworkUp);
|
|
||||||
GetVideoSendConfig()->encoder_settings.encoder_factory =
|
|
||||||
&encoder_factory;
|
|
||||||
|
|
||||||
CreateVideoStreams();
|
|
||||||
|
|
||||||
EXPECT_FALSE(GetVideoSendStream()->started());
|
|
||||||
|
|
||||||
// Inject a frame, to force encoder creation.
|
|
||||||
GetVideoSendStream()->Start();
|
|
||||||
GetVideoSendStream()->SetSource(&forwarder,
|
|
||||||
DegradationPreference::DISABLED);
|
|
||||||
forwarder.IncomingCapturedFrame(CreateVideoFrame(640, 480, 4));
|
|
||||||
});
|
|
||||||
|
|
||||||
EXPECT_TRUE(encoder.WaitForEncoderInit());
|
|
||||||
|
|
||||||
// When we turn on the simulcast layers it will update the BitrateAllocator,
|
|
||||||
// which in turn updates the VideoEncoder's bitrate.
|
|
||||||
SendTask(RTC_FROM_HERE, task_queue(), [this]() {
|
|
||||||
GetVideoSendStream()->UpdateActiveSimulcastLayers({true, true});
|
|
||||||
EXPECT_TRUE(GetVideoSendStream()->started());
|
|
||||||
});
|
|
||||||
EXPECT_TRUE(encoder.WaitBitrateChanged(WaitUntil::kNonZero));
|
|
||||||
|
|
||||||
GetVideoEncoderConfig()->simulcast_layers[0].active = true;
|
|
||||||
GetVideoEncoderConfig()->simulcast_layers[1].active = false;
|
|
||||||
SendTask(RTC_FROM_HERE, task_queue(), [this]() {
|
|
||||||
GetVideoSendStream()->ReconfigureVideoEncoder(
|
|
||||||
GetVideoEncoderConfig()->Copy());
|
|
||||||
});
|
|
||||||
EXPECT_TRUE(encoder.WaitBitrateChanged(WaitUntil::kNonZero));
|
|
||||||
|
|
||||||
// Turning off both simulcast layers should trigger a bitrate change of 0.
|
|
||||||
GetVideoEncoderConfig()->simulcast_layers[0].active = false;
|
|
||||||
GetVideoEncoderConfig()->simulcast_layers[1].active = false;
|
|
||||||
SendTask(RTC_FROM_HERE, task_queue(), [this]() {
|
|
||||||
GetVideoSendStream()->UpdateActiveSimulcastLayers({false, false});
|
|
||||||
EXPECT_FALSE(GetVideoSendStream()->started());
|
|
||||||
});
|
|
||||||
EXPECT_TRUE(encoder.WaitBitrateChanged(WaitUntil::kZero));
|
|
||||||
|
|
||||||
// Re-activating a layer should resume sending and trigger a bitrate change.
|
|
||||||
GetVideoEncoderConfig()->simulcast_layers[0].active = true;
|
|
||||||
SendTask(RTC_FROM_HERE, task_queue(), [this]() {
|
|
||||||
GetVideoSendStream()->UpdateActiveSimulcastLayers({true, false});
|
|
||||||
EXPECT_TRUE(GetVideoSendStream()->started());
|
|
||||||
});
|
|
||||||
EXPECT_TRUE(encoder.WaitBitrateChanged(WaitUntil::kNonZero));
|
|
||||||
|
|
||||||
// Stop the stream and make sure the bit rate goes to zero again.
|
|
||||||
SendTask(RTC_FROM_HERE, task_queue(), [this]() {
|
|
||||||
GetVideoSendStream()->Stop();
|
|
||||||
EXPECT_FALSE(GetVideoSendStream()->started());
|
|
||||||
});
|
|
||||||
EXPECT_TRUE(encoder.WaitBitrateChanged(WaitUntil::kZero));
|
|
||||||
|
|
||||||
// One last test to verify that after `Stop()` we can still implicitly start
|
|
||||||
// the stream if needed. This is what will happen when a send stream gets
|
|
||||||
// re-used. See crbug.com/1241213.
|
|
||||||
SendTask(RTC_FROM_HERE, task_queue(), [this]() {
|
|
||||||
GetVideoSendStream()->UpdateActiveSimulcastLayers({true, true});
|
|
||||||
EXPECT_TRUE(GetVideoSendStream()->started());
|
|
||||||
});
|
|
||||||
EXPECT_TRUE(encoder.WaitBitrateChanged(WaitUntil::kNonZero));
|
|
||||||
|
|
||||||
SendTask(RTC_FROM_HERE, task_queue(), [this]() {
|
|
||||||
DestroyStreams();
|
|
||||||
DestroyCalls();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
|
TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
|
||||||
class EncoderStateObserver : public test::SendTest, public VideoEncoder {
|
class EncoderStateObserver : public test::SendTest, public VideoEncoder {
|
||||||
public:
|
public:
|
||||||
|
|||||||
@ -839,21 +839,12 @@ void VideoStreamEncoder::ConfigureEncoder(VideoEncoderConfig config,
|
|||||||
max_data_payload_length_ = max_data_payload_length;
|
max_data_payload_length_ = max_data_payload_length;
|
||||||
pending_encoder_reconfiguration_ = true;
|
pending_encoder_reconfiguration_ = true;
|
||||||
|
|
||||||
// Reconfigure the encoder now if the encoder has an internal source or
|
// Reconfigure the encoder now if the frame resolution is known.
|
||||||
// if the frame resolution is known. Otherwise, the reconfiguration is
|
// Otherwise, the reconfiguration is deferred until the next frame to
|
||||||
// deferred until the next frame to minimize the number of
|
// minimize the number of reconfigurations. The codec configuration
|
||||||
// reconfigurations. The codec configuration depends on incoming video
|
// depends on incoming video frame size.
|
||||||
// frame size.
|
|
||||||
if (last_frame_info_) {
|
if (last_frame_info_) {
|
||||||
ReconfigureEncoder();
|
ReconfigureEncoder();
|
||||||
} else {
|
|
||||||
codec_info_ = settings_.encoder_factory->QueryVideoEncoder(
|
|
||||||
encoder_config_.video_format);
|
|
||||||
if (HasInternalSource()) {
|
|
||||||
last_frame_info_ = VideoFrameInfo(kDefaultInputPixelsWidth,
|
|
||||||
kDefaultInputPixelsHeight, false);
|
|
||||||
ReconfigureEncoder();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -884,9 +875,6 @@ void VideoStreamEncoder::ReconfigureEncoder() {
|
|||||||
|
|
||||||
encoder_->SetFecControllerOverride(fec_controller_override_);
|
encoder_->SetFecControllerOverride(fec_controller_override_);
|
||||||
|
|
||||||
codec_info_ = settings_.encoder_factory->QueryVideoEncoder(
|
|
||||||
encoder_config_.video_format);
|
|
||||||
|
|
||||||
encoder_reset_required = true;
|
encoder_reset_required = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1158,8 +1146,7 @@ void VideoStreamEncoder::ReconfigureEncoder() {
|
|||||||
} else {
|
} else {
|
||||||
encoder_initialized_ = true;
|
encoder_initialized_ = true;
|
||||||
encoder_->RegisterEncodeCompleteCallback(this);
|
encoder_->RegisterEncodeCompleteCallback(this);
|
||||||
frame_encode_metadata_writer_.OnEncoderInit(send_codec_,
|
frame_encode_metadata_writer_.OnEncoderInit(send_codec_);
|
||||||
HasInternalSource());
|
|
||||||
next_frame_types_.clear();
|
next_frame_types_.clear();
|
||||||
next_frame_types_.resize(
|
next_frame_types_.resize(
|
||||||
std::max(static_cast<int>(codec.numberOfSimulcastStreams), 1),
|
std::max(static_cast<int>(codec.numberOfSimulcastStreams), 1),
|
||||||
@ -1481,15 +1468,12 @@ void VideoStreamEncoder::SetEncoderRates(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// `bitrate_allocation` is 0 it means that the network is down or the send
|
// `bitrate_allocation` is 0 it means that the network is down or the send
|
||||||
// pacer is full. We currently only report this if the encoder has an internal
|
// pacer is full. We currently don't pass this on to the encoder since it is
|
||||||
// source. If the encoder does not have an internal source, higher levels
|
// unclear how current encoder implementations behave when given a zero target
|
||||||
// are expected to not call AddVideoFrame. We do this since it is unclear
|
|
||||||
// how current encoder implementations behave when given a zero target
|
|
||||||
// bitrate.
|
// bitrate.
|
||||||
// TODO(perkj): Make sure all known encoder implementations handle zero
|
// TODO(perkj): Make sure all known encoder implementations handle zero
|
||||||
// target bitrate and remove this check.
|
// target bitrate and remove this check.
|
||||||
if (!HasInternalSource() &&
|
if (rate_settings.rate_control.bitrate.get_sum_bps() == 0) {
|
||||||
rate_settings.rate_control.bitrate.get_sum_bps() == 0) {
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1839,29 +1823,6 @@ void VideoStreamEncoder::SendKeyFrame() {
|
|||||||
// TODO(webrtc:10615): Map keyframe request to spatial layer.
|
// TODO(webrtc:10615): Map keyframe request to spatial layer.
|
||||||
std::fill(next_frame_types_.begin(), next_frame_types_.end(),
|
std::fill(next_frame_types_.begin(), next_frame_types_.end(),
|
||||||
VideoFrameType::kVideoFrameKey);
|
VideoFrameType::kVideoFrameKey);
|
||||||
|
|
||||||
if (HasInternalSource()) {
|
|
||||||
// Try to request the frame if we have an external encoder with
|
|
||||||
// internal source since AddVideoFrame never will be called.
|
|
||||||
|
|
||||||
// TODO(nisse): Used only with internal source. Delete as soon as
|
|
||||||
// that feature is removed. The only implementation I've been able
|
|
||||||
// to find ignores what's in the frame. With one exception: It seems
|
|
||||||
// a few test cases, e.g.,
|
|
||||||
// VideoSendStreamTest.VideoSendStreamStopSetEncoderRateToZero, set
|
|
||||||
// internal_source to true and use FakeEncoder. And the latter will
|
|
||||||
// happily encode this 1x1 frame and pass it on down the pipeline.
|
|
||||||
if (encoder_->Encode(VideoFrame::Builder()
|
|
||||||
.set_video_frame_buffer(I420Buffer::Create(1, 1))
|
|
||||||
.set_rotation(kVideoRotation_0)
|
|
||||||
.set_timestamp_us(0)
|
|
||||||
.build(),
|
|
||||||
&next_frame_types_) == WEBRTC_VIDEO_CODEC_OK) {
|
|
||||||
// Try to remove just-performed keyframe request, if stream still exists.
|
|
||||||
std::fill(next_frame_types_.begin(), next_frame_types_.end(),
|
|
||||||
VideoFrameType::kVideoFrameDelta);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void VideoStreamEncoder::OnLossNotification(
|
void VideoStreamEncoder::OnLossNotification(
|
||||||
@ -2204,16 +2165,6 @@ void VideoStreamEncoder::RunPostEncode(const EncodedImage& encoded_image,
|
|||||||
frame_dropper_.Fill(frame_size.bytes(), !keyframe);
|
frame_dropper_.Fill(frame_size.bytes(), !keyframe);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (HasInternalSource()) {
|
|
||||||
// Update frame dropper after the fact for internal sources.
|
|
||||||
input_framerate_.Update(1u, clock_->TimeInMilliseconds());
|
|
||||||
frame_dropper_.Leak(GetInputFramerateFps());
|
|
||||||
// Signal to encoder to drop next frame.
|
|
||||||
if (frame_dropper_.DropFrame()) {
|
|
||||||
pending_frame_drops_.fetch_add(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
stream_resource_manager_.OnEncodeCompleted(encoded_image, time_sent_us,
|
stream_resource_manager_.OnEncodeCompleted(encoded_image, time_sent_us,
|
||||||
encode_duration_us, frame_size);
|
encode_duration_us, frame_size);
|
||||||
if (bitrate_adjuster_) {
|
if (bitrate_adjuster_) {
|
||||||
@ -2222,12 +2173,6 @@ void VideoStreamEncoder::RunPostEncode(const EncodedImage& encoded_image,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bool VideoStreamEncoder::HasInternalSource() const {
|
|
||||||
// TODO(sprang): Checking both info from encoder and from encoder factory
|
|
||||||
// until we have deprecated and removed the encoder factory info.
|
|
||||||
return codec_info_.has_internal_source || encoder_info_.has_internal_source;
|
|
||||||
}
|
|
||||||
|
|
||||||
void VideoStreamEncoder::ReleaseEncoder() {
|
void VideoStreamEncoder::ReleaseEncoder() {
|
||||||
if (!encoder_ || !encoder_initialized_) {
|
if (!encoder_ || !encoder_initialized_) {
|
||||||
return;
|
return;
|
||||||
|
|||||||
@ -228,7 +228,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
|
|||||||
int64_t time_sent_us,
|
int64_t time_sent_us,
|
||||||
int temporal_index,
|
int temporal_index,
|
||||||
DataSize frame_size);
|
DataSize frame_size);
|
||||||
bool HasInternalSource() const RTC_RUN_ON(&encoder_queue_);
|
|
||||||
void ReleaseEncoder() RTC_RUN_ON(&encoder_queue_);
|
void ReleaseEncoder() RTC_RUN_ON(&encoder_queue_);
|
||||||
// After calling this function `resource_adaptation_processor_` will be null.
|
// After calling this function `resource_adaptation_processor_` will be null.
|
||||||
void ShutdownResourceAdaptationQueue();
|
void ShutdownResourceAdaptationQueue();
|
||||||
@ -334,7 +333,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
|
|||||||
absl::optional<int64_t> last_encode_info_ms_ RTC_GUARDED_BY(&encoder_queue_);
|
absl::optional<int64_t> last_encode_info_ms_ RTC_GUARDED_BY(&encoder_queue_);
|
||||||
|
|
||||||
VideoEncoder::EncoderInfo encoder_info_ RTC_GUARDED_BY(&encoder_queue_);
|
VideoEncoder::EncoderInfo encoder_info_ RTC_GUARDED_BY(&encoder_queue_);
|
||||||
VideoEncoderFactory::CodecInfo codec_info_ RTC_GUARDED_BY(&encoder_queue_);
|
|
||||||
VideoCodec send_codec_ RTC_GUARDED_BY(&encoder_queue_);
|
VideoCodec send_codec_ RTC_GUARDED_BY(&encoder_queue_);
|
||||||
|
|
||||||
FrameDropper frame_dropper_ RTC_GUARDED_BY(&encoder_queue_);
|
FrameDropper frame_dropper_ RTC_GUARDED_BY(&encoder_queue_);
|
||||||
|
|||||||
@ -7240,68 +7240,6 @@ TEST_F(VideoStreamEncoderTest, SetsFrameTypesSimulcast) {
|
|||||||
video_stream_encoder_->Stop();
|
video_stream_encoder_->Stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(VideoStreamEncoderTest, RequestKeyframeInternalSource) {
|
|
||||||
// Configure internal source factory and setup test again.
|
|
||||||
encoder_factory_.SetHasInternalSource(true);
|
|
||||||
ResetEncoder("VP8", 1, 1, 1, false);
|
|
||||||
video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
|
|
||||||
kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
|
|
||||||
|
|
||||||
// Call encoder directly, simulating internal source where encoded frame
|
|
||||||
// callback in VideoStreamEncoder is called despite no OnFrame().
|
|
||||||
fake_encoder_.InjectFrame(CreateFrame(1, nullptr), true);
|
|
||||||
EXPECT_TRUE(WaitForFrame(kDefaultTimeoutMs));
|
|
||||||
EXPECT_THAT(
|
|
||||||
fake_encoder_.LastFrameTypes(),
|
|
||||||
::testing::ElementsAre(VideoFrameType{VideoFrameType::kVideoFrameKey}));
|
|
||||||
|
|
||||||
const std::vector<VideoFrameType> kDeltaFrame = {
|
|
||||||
VideoFrameType::kVideoFrameDelta};
|
|
||||||
// Need to set timestamp manually since manually for injected frame.
|
|
||||||
VideoFrame frame = CreateFrame(101, nullptr);
|
|
||||||
frame.set_timestamp(101);
|
|
||||||
fake_encoder_.InjectFrame(frame, false);
|
|
||||||
EXPECT_TRUE(WaitForFrame(kDefaultTimeoutMs));
|
|
||||||
EXPECT_THAT(
|
|
||||||
fake_encoder_.LastFrameTypes(),
|
|
||||||
::testing::ElementsAre(VideoFrameType{VideoFrameType::kVideoFrameDelta}));
|
|
||||||
|
|
||||||
// Request key-frame. The forces a dummy frame down into the encoder.
|
|
||||||
fake_encoder_.ExpectNullFrame();
|
|
||||||
video_stream_encoder_->SendKeyFrame();
|
|
||||||
EXPECT_TRUE(WaitForFrame(kDefaultTimeoutMs));
|
|
||||||
EXPECT_THAT(
|
|
||||||
fake_encoder_.LastFrameTypes(),
|
|
||||||
::testing::ElementsAre(VideoFrameType{VideoFrameType::kVideoFrameKey}));
|
|
||||||
|
|
||||||
video_stream_encoder_->Stop();
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST_F(VideoStreamEncoderTest, AdjustsTimestampInternalSource) {
|
|
||||||
// Configure internal source factory and setup test again.
|
|
||||||
encoder_factory_.SetHasInternalSource(true);
|
|
||||||
ResetEncoder("VP8", 1, 1, 1, false);
|
|
||||||
video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
|
|
||||||
kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
|
|
||||||
|
|
||||||
int64_t timestamp = 1;
|
|
||||||
EncodedImage image;
|
|
||||||
image.capture_time_ms_ = ++timestamp;
|
|
||||||
image.SetTimestamp(static_cast<uint32_t>(timestamp * 90));
|
|
||||||
const int64_t kEncodeFinishDelayMs = 10;
|
|
||||||
image.timing_.encode_start_ms = timestamp;
|
|
||||||
image.timing_.encode_finish_ms = timestamp + kEncodeFinishDelayMs;
|
|
||||||
fake_encoder_.InjectEncodedImage(image, /*codec_specific_info=*/nullptr);
|
|
||||||
// Wait for frame without incrementing clock.
|
|
||||||
EXPECT_TRUE(sink_.WaitForFrame(kDefaultTimeoutMs));
|
|
||||||
// Frame is captured kEncodeFinishDelayMs before it's encoded, so restored
|
|
||||||
// capture timestamp should be kEncodeFinishDelayMs in the past.
|
|
||||||
EXPECT_EQ(sink_.GetLastCaptureTimeMs(),
|
|
||||||
CurrentTimeMs() - kEncodeFinishDelayMs);
|
|
||||||
|
|
||||||
video_stream_encoder_->Stop();
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST_F(VideoStreamEncoderTest, DoesNotRewriteH264BitstreamWithOptimalSps) {
|
TEST_F(VideoStreamEncoderTest, DoesNotRewriteH264BitstreamWithOptimalSps) {
|
||||||
// SPS contains VUI with restrictions on the maximum number of reordered
|
// SPS contains VUI with restrictions on the maximum number of reordered
|
||||||
// pictures, there is no need to rewrite the bitstream to enable faster
|
// pictures, there is no need to rewrite the bitstream to enable faster
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user