Remove WebRTC-AutomaticAnimationDetectionScreenshare experiment

The experiment has been disabled for several years and the code
is not maintained.

Bug: webrtc:42221141
Change-Id: I631e4bd476ca01eb5312d4077c9467e77c42ff78
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/351143
Commit-Queue: Johannes Kron <kron@webrtc.org>
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#42364}
This commit is contained in:
Johannes Kron 2024-05-22 08:33:09 +00:00 committed by WebRTC LUCI CQ
parent a1ba309ad0
commit 1d7d0e6e2c
4 changed files with 5 additions and 241 deletions

View File

@ -478,9 +478,6 @@ POLICY_EXEMPT_FIELD_TRIALS: FrozenSet[FieldTrial] = frozenset([
FieldTrial('WebRTC-AudioDevicePlayoutBufferSizeFactor',
42221006,
date(2024, 4, 1)),
FieldTrial('WebRTC-AutomaticAnimationDetectionScreenshare',
42221141,
date(2024, 4, 1)),
FieldTrial('WebRTC-BindUsingInterfaceName',
42220770,
date(2024, 4, 1)),
@ -875,7 +872,7 @@ POLICY_EXEMPT_FIELD_TRIALS: FrozenSet[FieldTrial] = frozenset([
]) # yapf: disable
POLICY_EXEMPT_FIELD_TRIALS_DIGEST: str = \
'b0b0cce63bfdf327229b569576a07b716db186b7'
'd6beac9eb318c70cd1695598b3d3c069cd17b42f'
REGISTERED_FIELD_TRIALS: FrozenSet[FieldTrial] = ACTIVE_FIELD_TRIALS.union(
POLICY_EXEMPT_FIELD_TRIALS)

View File

@ -78,9 +78,6 @@ const size_t kDefaultPayloadSize = 1440;
const int64_t kParameterUpdateIntervalMs = 1000;
// Animation is capped to 720p.
constexpr int kMaxAnimationPixels = 1280 * 720;
constexpr int kDefaultMinScreenSharebps = 1200000;
int GetNumSpatialLayers(const VideoCodec& codec) {
@ -542,22 +539,6 @@ absl::optional<int> ParseEncoderThreadLimit(const FieldTrialsView& trials) {
return encoder_thread_limit.GetOptional();
}
absl::optional<VideoSourceRestrictions> MergeRestrictions(
const std::vector<absl::optional<VideoSourceRestrictions>>& list) {
absl::optional<VideoSourceRestrictions> return_value;
for (const auto& res : list) {
if (!res) {
continue;
}
if (!return_value) {
return_value = *res;
continue;
}
return_value->UpdateMin(*res);
}
return return_value;
}
} // namespace
VideoStreamEncoder::EncoderRateSettings::EncoderRateSettings()
@ -675,8 +656,6 @@ VideoStreamEncoder::VideoStreamEncoder(
env_.clock().TimeInMilliseconds()),
last_frame_log_ms_(env_.clock().TimeInMilliseconds()),
next_frame_types_(1, VideoFrameType::kVideoFrameDelta),
automatic_animation_detection_experiment_(
ParseAutomatincAnimationDetectionFieldTrial()),
input_state_provider_(encoder_stats_observer),
video_stream_adapter_(
std::make_unique<VideoStreamAdapter>(&input_state_provider_,
@ -1025,7 +1004,7 @@ void VideoStreamEncoder::ReconfigureEncoder() {
encoder_config_.content_type ==
webrtc::VideoEncoderConfig::ContentType::kScreen,
encoder_config_.legacy_conference_mode, encoder_->GetEncoderInfo(),
MergeRestrictions({latest_restrictions_, animate_restrictions_}));
latest_restrictions_);
streams = factory->CreateEncoderStreams(
env_.field_trials(), last_frame_info_->width, last_frame_info_->height,
@ -1555,7 +1534,6 @@ void VideoStreamEncoder::OnFrame(Timestamp post_time,
encoder_stats_observer_->OnIncomingFrame(incoming_frame.width(),
incoming_frame.height());
++captured_frame_count_;
CheckForAnimatedContent(incoming_frame, post_time.us());
bool cwnd_frame_drop =
cwnd_frame_drop_interval_ &&
(cwnd_frame_counter_++ % cwnd_frame_drop_interval_.value() == 0);
@ -2451,114 +2429,6 @@ void VideoStreamEncoder::ReleaseEncoder() {
TRACE_EVENT0("webrtc", "VCMGenericEncoder::Release");
}
VideoStreamEncoder::AutomaticAnimationDetectionExperiment
VideoStreamEncoder::ParseAutomatincAnimationDetectionFieldTrial() const {
AutomaticAnimationDetectionExperiment result;
result.Parser()->Parse(env_.field_trials().Lookup(
"WebRTC-AutomaticAnimationDetectionScreenshare"));
if (!result.enabled) {
RTC_LOG(LS_INFO) << "Automatic animation detection experiment is disabled.";
return result;
}
RTC_LOG(LS_INFO) << "Automatic animation detection experiment settings:"
" min_duration_ms="
<< result.min_duration_ms
<< " min_area_ration=" << result.min_area_ratio
<< " min_fps=" << result.min_fps;
return result;
}
void VideoStreamEncoder::CheckForAnimatedContent(
const VideoFrame& frame,
int64_t time_when_posted_in_us) {
if (!automatic_animation_detection_experiment_.enabled ||
encoder_config_.content_type !=
VideoEncoderConfig::ContentType::kScreen ||
stream_resource_manager_.degradation_preference() !=
DegradationPreference::BALANCED) {
return;
}
if (expect_resize_state_ == ExpectResizeState::kResize && last_frame_info_ &&
last_frame_info_->width != frame.width() &&
last_frame_info_->height != frame.height()) {
// On applying resolution cap there will be one frame with no/different
// update, which should be skipped.
// It can be delayed by several frames.
expect_resize_state_ = ExpectResizeState::kFirstFrameAfterResize;
return;
}
if (expect_resize_state_ == ExpectResizeState::kFirstFrameAfterResize) {
// The first frame after resize should have new, scaled update_rect.
if (frame.has_update_rect()) {
last_update_rect_ = frame.update_rect();
} else {
last_update_rect_ = absl::nullopt;
}
expect_resize_state_ = ExpectResizeState::kNoResize;
}
bool should_cap_resolution = false;
if (!frame.has_update_rect()) {
last_update_rect_ = absl::nullopt;
animation_start_time_ = Timestamp::PlusInfinity();
} else if ((!last_update_rect_ ||
frame.update_rect() != *last_update_rect_)) {
last_update_rect_ = frame.update_rect();
animation_start_time_ = Timestamp::Micros(time_when_posted_in_us);
} else {
TimeDelta animation_duration =
Timestamp::Micros(time_when_posted_in_us) - animation_start_time_;
float area_ratio = static_cast<float>(last_update_rect_->width *
last_update_rect_->height) /
(frame.width() * frame.height());
if (animation_duration.ms() >=
automatic_animation_detection_experiment_.min_duration_ms &&
area_ratio >=
automatic_animation_detection_experiment_.min_area_ratio &&
encoder_stats_observer_->GetInputFrameRate() >=
automatic_animation_detection_experiment_.min_fps) {
should_cap_resolution = true;
}
}
if (cap_resolution_due_to_video_content_ != should_cap_resolution) {
expect_resize_state_ = should_cap_resolution ? ExpectResizeState::kResize
: ExpectResizeState::kNoResize;
cap_resolution_due_to_video_content_ = should_cap_resolution;
if (should_cap_resolution) {
RTC_LOG(LS_INFO) << "Applying resolution cap due to animation detection.";
} else {
RTC_LOG(LS_INFO) << "Removing resolution cap due to no consistent "
"animation detection.";
}
// TODO(webrtc:14451) Split video_source_sink_controller_
// so that ownership on restrictions/wants is kept on &encoder_queue_
if (should_cap_resolution) {
animate_restrictions_ =
VideoSourceRestrictions(kMaxAnimationPixels,
/* target_pixels_per_frame= */ absl::nullopt,
/* max_frame_rate= */ absl::nullopt);
} else {
animate_restrictions_.reset();
}
worker_queue_->PostTask(
SafeTask(task_safety_.flag(), [this, should_cap_resolution]() {
RTC_DCHECK_RUN_ON(worker_queue_);
video_source_sink_controller_.SetPixelsPerFrameUpperLimit(
should_cap_resolution
? absl::optional<size_t>(kMaxAnimationPixels)
: absl::nullopt);
video_source_sink_controller_.PushSourceSinkSettings();
}));
}
}
void VideoStreamEncoder::InjectAdaptationResource(
rtc::scoped_refptr<Resource> resource,
VideoAdaptationReason reason) {

View File

@ -252,10 +252,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
// After calling this function `resource_adaptation_processor_` will be null.
void ShutdownResourceAdaptationQueue();
void CheckForAnimatedContent(const VideoFrame& frame,
int64_t time_when_posted_in_ms)
RTC_RUN_ON(encoder_queue_);
void RequestEncoderSwitch() RTC_RUN_ON(encoder_queue_);
// Augments an EncodedImage received from an encoder with parsable
@ -346,22 +342,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
RTC_GUARDED_BY(encoder_queue_);
bool accumulated_update_rect_is_valid_ RTC_GUARDED_BY(encoder_queue_) = true;
// Used for automatic content type detection.
absl::optional<VideoFrame::UpdateRect> last_update_rect_
RTC_GUARDED_BY(encoder_queue_);
Timestamp animation_start_time_ RTC_GUARDED_BY(encoder_queue_) =
Timestamp::PlusInfinity();
bool cap_resolution_due_to_video_content_ RTC_GUARDED_BY(encoder_queue_) =
false;
// Used to correctly ignore changes in update_rect introduced by
// resize triggered by animation detection.
enum class ExpectResizeState {
kNoResize, // Normal operation.
kResize, // Resize was triggered by the animation detection.
kFirstFrameAfterResize // Resize observed.
} expect_resize_state_ RTC_GUARDED_BY(encoder_queue_) =
ExpectResizeState::kNoResize;
FecControllerOverride* fec_controller_override_
RTC_GUARDED_BY(encoder_queue_) = nullptr;
absl::optional<int64_t> last_parameters_update_ms_
@ -398,26 +378,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
FrameEncodeMetadataWriter frame_encode_metadata_writer_{this};
struct AutomaticAnimationDetectionExperiment {
bool enabled = false;
int min_duration_ms = 2000;
double min_area_ratio = 0.8;
int min_fps = 10;
std::unique_ptr<StructParametersParser> Parser() {
return StructParametersParser::Create(
"enabled", &enabled, //
"min_duration_ms", &min_duration_ms, //
"min_area_ratio", &min_area_ratio, //
"min_fps", &min_fps);
}
};
AutomaticAnimationDetectionExperiment
ParseAutomatincAnimationDetectionFieldTrial() const;
AutomaticAnimationDetectionExperiment
automatic_animation_detection_experiment_ RTC_GUARDED_BY(encoder_queue_);
// Provides video stream input states: current resolution and frame rate.
VideoStreamInputStateProvider input_state_provider_;
@ -464,19 +424,15 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
const absl::optional<int> vp9_low_tier_core_threshold_;
const absl::optional<int> experimental_encoder_thread_limit_;
// These are copies of restrictions (glorified max_pixel_count) set by
// a) OnVideoSourceRestrictionsUpdated
// b) CheckForAnimatedContent
// They are used to scale down encoding resolution if needed when using
// requested_resolution.
// This is a copy of restrictions (glorified max_pixel_count) set by
// OnVideoSourceRestrictionsUpdated. It is used to scale down encoding
// resolution if needed when using requested_resolution.
//
// TODO(webrtc:14451) Split video_source_sink_controller_
// so that ownership on restrictions/wants is kept on &encoder_queue_, that
// these extra copies would not be needed.
absl::optional<VideoSourceRestrictions> latest_restrictions_
RTC_GUARDED_BY(encoder_queue_);
absl::optional<VideoSourceRestrictions> animate_restrictions_
RTC_GUARDED_BY(encoder_queue_);
// Used to cancel any potentially pending tasks to the worker thread.
// Refrenced by tasks running on `encoder_queue_` so need to be destroyed

View File

@ -8015,65 +8015,6 @@ TEST_F(VideoStreamEncoderTest,
video_stream_encoder_->Stop();
}
TEST_F(VideoStreamEncoderTest, AutomaticAnimationDetection) {
test::ScopedKeyValueConfig field_trials(
field_trials_,
"WebRTC-AutomaticAnimationDetectionScreenshare/"
"enabled:true,min_fps:20,min_duration_ms:1000,min_area_ratio:0.8/");
const int kFramerateFps = 30;
const int kWidth = 1920;
const int kHeight = 1080;
const int kNumFrames = 2 * kFramerateFps; // >1 seconds of frames.
ASSERT_EQ(video_encoder_config_.simulcast_layers.size(), 1u);
// Works on screenshare mode.
ResetEncoder("VP8", 1, 1, 1, /*screenshare*/ true,
/*max_frame_rate=*/kFramerateFps);
// We rely on the automatic resolution adaptation, but we handle framerate
// adaptation manually by mocking the stats proxy.
video_source_.set_adaptation_enabled(true);
// BALANCED degradation preference is required for this feature.
video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
video_stream_encoder_->SetSource(&video_source_,
webrtc::DegradationPreference::BALANCED);
EXPECT_THAT(video_source_.sink_wants(), WantsFps(Eq(kFramerateFps)));
VideoFrame frame = CreateFrame(1, kWidth, kHeight);
frame.set_update_rect(VideoFrame::UpdateRect{0, 0, kWidth, kHeight});
// Pass enough frames with the full update to trigger animation detection.
for (int i = 0; i < kNumFrames; ++i) {
int64_t timestamp_ms = CurrentTimeMs();
frame.set_ntp_time_ms(timestamp_ms);
frame.set_timestamp_us(timestamp_ms * 1000);
video_source_.IncomingCapturedFrame(frame);
WaitForEncodedFrame(timestamp_ms);
}
// Resolution should be limited.
rtc::VideoSinkWants expected;
expected.max_framerate_fps = kFramerateFps;
expected.max_pixel_count = 1280 * 720 + 1;
EXPECT_THAT(video_source_.sink_wants(), FpsEqResolutionLt(expected));
// Pass one frame with no known update.
// Resolution cap should be removed immediately.
int64_t timestamp_ms = CurrentTimeMs();
frame.set_ntp_time_ms(timestamp_ms);
frame.set_timestamp_us(timestamp_ms * 1000);
frame.clear_update_rect();
video_source_.IncomingCapturedFrame(frame);
WaitForEncodedFrame(timestamp_ms);
// Resolution should be unlimited now.
EXPECT_THAT(video_source_.sink_wants(),
FpsMatchesResolutionMax(Eq(kFramerateFps)));
video_stream_encoder_->Stop();
}
TEST_F(VideoStreamEncoderTest, ConfiguresVp9SvcAtOddResolutions) {
const int kWidth = 720; // 540p adapted down.
const int kHeight = 405;