diff --git a/media/engine/webrtc_video_engine.cc b/media/engine/webrtc_video_engine.cc index 0c1c0ccfaa..2b4e3a4e39 100644 --- a/media/engine/webrtc_video_engine.cc +++ b/media/engine/webrtc_video_engine.cc @@ -2483,27 +2483,13 @@ void WebRtcVideoChannel::WebRtcVideoSendStream::RemoveSink( void WebRtcVideoChannel::WebRtcVideoSendStream::AddOrUpdateSink( rtc::VideoSinkInterface* sink, const rtc::VideoSinkWants& wants) { - if (worker_thread_ == rtc::Thread::Current()) { - // AddOrUpdateSink is called on |worker_thread_| if this is the first - // registration of |sink|. - RTC_DCHECK_RUN_ON(&thread_checker_); - encoder_sink_ = sink; - source_->AddOrUpdateSink(encoder_sink_, wants); - } else { - // Subsequent calls to AddOrUpdateSink will happen on the encoder task - // queue. - invoker_.AsyncInvoke( - RTC_FROM_HERE, worker_thread_, [this, sink, wants] { - RTC_DCHECK_RUN_ON(&thread_checker_); - // |sink| may be invalidated after this task was posted since - // RemoveSink is called on the worker thread. - bool encoder_sink_valid = (sink == encoder_sink_); - if (source_ && encoder_sink_valid) { - source_->AddOrUpdateSink(encoder_sink_, wants); - } - }); - } + // AddOrUpdateSink is called on |worker_thread_| if this is the first + // registration of |sink|. + RTC_DCHECK_RUN_ON(&thread_checker_); + encoder_sink_ = sink; + source_->AddOrUpdateSink(encoder_sink_, wants); } + std::vector WebRtcVideoChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos( bool log_stats) { diff --git a/media/engine/webrtc_video_engine.h b/media/engine/webrtc_video_engine.h index 3705ebf012..b9f27b4eec 100644 --- a/media/engine/webrtc_video_engine.h +++ b/media/engine/webrtc_video_engine.h @@ -429,11 +429,6 @@ class WebRtcVideoChannel : public VideoMediaChannel, bool sending_ RTC_GUARDED_BY(&thread_checker_); - // In order for the |invoker_| to protect other members from being - // destructed as they are used in asynchronous tasks it has to be destructed - // first. - rtc::AsyncInvoker invoker_; - // TODO(asapersson): investigate why setting // DegrationPreferences::MAINTAIN_RESOLUTION isn't sufficient to disable // downscaling everywhere in the pipeline. diff --git a/video/BUILD.gn b/video/BUILD.gn index 06ef027851..96d0faa769 100644 --- a/video/BUILD.gn +++ b/video/BUILD.gn @@ -256,6 +256,7 @@ rtc_library("video_stream_encoder_impl") { "../rtc_base/experiments:rate_control_settings", "../rtc_base/synchronization:mutex", "../rtc_base/synchronization:sequence_checker", + "../rtc_base/task_utils:pending_task_safety_flag", "../rtc_base/task_utils:repeating_task", "../system_wrappers", "../system_wrappers:field_trial", diff --git a/video/video_send_stream_impl.cc b/video/video_send_stream_impl.cc index e84aa44343..82f8aa8942 100644 --- a/video/video_send_stream_impl.cc +++ b/video/video_send_stream_impl.cc @@ -300,17 +300,6 @@ VideoSendStreamImpl::VideoSendStreamImpl( video_stream_encoder_->SetStartBitrate( bitrate_allocator_->GetStartBitrate(this)); - - // Only request rotation at the source when we positively know that the remote - // side doesn't support the rotation extension. This allows us to prepare the - // encoder in the expectation that rotation is supported - which is the common - // case. - bool rotation_applied = absl::c_none_of( - config_->rtp.extensions, [](const RtpExtension& extension) { - return extension.uri == RtpExtension::kVideoRotationUri; - }); - - video_stream_encoder_->SetSink(this, rotation_applied); } VideoSendStreamImpl::~VideoSendStreamImpl() { @@ -323,6 +312,21 @@ VideoSendStreamImpl::~VideoSendStreamImpl() { void VideoSendStreamImpl::RegisterProcessThread( ProcessThread* module_process_thread) { + // Called on libjingle's worker thread (not worker_queue_), as part of the + // initialization steps. That's also the correct thread/queue for setting the + // state for |video_stream_encoder_|. + + // Only request rotation at the source when we positively know that the remote + // side doesn't support the rotation extension. This allows us to prepare the + // encoder in the expectation that rotation is supported - which is the common + // case. + bool rotation_applied = absl::c_none_of( + config_->rtp.extensions, [](const RtpExtension& extension) { + return extension.uri == RtpExtension::kVideoRotationUri; + }); + + video_stream_encoder_->SetSink(this, rotation_applied); + rtp_video_sender_->RegisterProcessThread(module_process_thread); } diff --git a/video/video_source_sink_controller.cc b/video/video_source_sink_controller.cc index a5c0941e02..376eb85eae 100644 --- a/video/video_source_sink_controller.cc +++ b/video/video_source_sink_controller.cc @@ -43,25 +43,33 @@ VideoSourceSinkController::VideoSourceSinkController( RTC_DCHECK(sink_); } +VideoSourceSinkController::~VideoSourceSinkController() { + RTC_DCHECK_RUN_ON(&sequence_checker_); +} + void VideoSourceSinkController::SetSource( rtc::VideoSourceInterface* source) { - rtc::VideoSourceInterface* old_source; - rtc::VideoSinkWants wants; - { - MutexLock lock(&mutex_); - old_source = source_; - source_ = source; - wants = CurrentSettingsToSinkWants(); - } + RTC_DCHECK_RUN_ON(&sequence_checker_); + + rtc::VideoSourceInterface* old_source = source_; + source_ = source; + if (old_source != source && old_source) old_source->RemoveSink(sink_); + if (!source) return; - source->AddOrUpdateSink(sink_, wants); + + source->AddOrUpdateSink(sink_, CurrentSettingsToSinkWants()); +} + +bool VideoSourceSinkController::HasSource() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return source_ != nullptr; } void VideoSourceSinkController::PushSourceSinkSettings() { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); if (!source_) return; rtc::VideoSinkWants wants = CurrentSettingsToSinkWants(); @@ -70,62 +78,62 @@ void VideoSourceSinkController::PushSourceSinkSettings() { } VideoSourceRestrictions VideoSourceSinkController::restrictions() const { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); return restrictions_; } absl::optional VideoSourceSinkController::pixels_per_frame_upper_limit() const { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); return pixels_per_frame_upper_limit_; } absl::optional VideoSourceSinkController::frame_rate_upper_limit() const { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); return frame_rate_upper_limit_; } bool VideoSourceSinkController::rotation_applied() const { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); return rotation_applied_; } int VideoSourceSinkController::resolution_alignment() const { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); return resolution_alignment_; } void VideoSourceSinkController::SetRestrictions( VideoSourceRestrictions restrictions) { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); restrictions_ = std::move(restrictions); } void VideoSourceSinkController::SetPixelsPerFrameUpperLimit( absl::optional pixels_per_frame_upper_limit) { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); pixels_per_frame_upper_limit_ = std::move(pixels_per_frame_upper_limit); } void VideoSourceSinkController::SetFrameRateUpperLimit( absl::optional frame_rate_upper_limit) { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); frame_rate_upper_limit_ = std::move(frame_rate_upper_limit); } void VideoSourceSinkController::SetRotationApplied(bool rotation_applied) { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); rotation_applied_ = rotation_applied; } void VideoSourceSinkController::SetResolutionAlignment( int resolution_alignment) { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); resolution_alignment_ = resolution_alignment; } -// RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_) +// RTC_EXCLUSIVE_LOCKS_REQUIRED(sequence_checker_) rtc::VideoSinkWants VideoSourceSinkController::CurrentSettingsToSinkWants() const { rtc::VideoSinkWants wants; diff --git a/video/video_source_sink_controller.h b/video/video_source_sink_controller.h index 877cf85901..ed8f990970 100644 --- a/video/video_source_sink_controller.h +++ b/video/video_source_sink_controller.h @@ -18,7 +18,7 @@ #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "call/adaptation/video_source_restrictions.h" -#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/synchronization/sequence_checker.h" namespace webrtc { @@ -31,7 +31,11 @@ class VideoSourceSinkController { VideoSourceSinkController(rtc::VideoSinkInterface* sink, rtc::VideoSourceInterface* source); + ~VideoSourceSinkController(); + void SetSource(rtc::VideoSourceInterface* source); + bool HasSource() const; + // Must be called in order for changes to settings to have an effect. This // allows you to modify multiple properties in a single push to the sink. void PushSourceSinkSettings(); @@ -53,20 +57,27 @@ class VideoSourceSinkController { private: rtc::VideoSinkWants CurrentSettingsToSinkWants() const - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(sequence_checker_); + + // Used to ensure that this class is called on threads/sequences that it and + // downstream implementations were designed for. + // In practice, this represent's libjingle's worker thread. + SequenceChecker sequence_checker_; - mutable Mutex mutex_; rtc::VideoSinkInterface* const sink_; - rtc::VideoSourceInterface* source_ RTC_GUARDED_BY(&mutex_); + rtc::VideoSourceInterface* source_ + RTC_GUARDED_BY(&sequence_checker_); // Pixel and frame rate restrictions. - VideoSourceRestrictions restrictions_ RTC_GUARDED_BY(&mutex_); + VideoSourceRestrictions restrictions_ RTC_GUARDED_BY(&sequence_checker_); // Ensures that even if we are not restricted, the sink is never configured // above this limit. Example: We are not CPU limited (no |restrictions_|) but // our encoder is capped at 30 fps (= |frame_rate_upper_limit_|). - absl::optional pixels_per_frame_upper_limit_ RTC_GUARDED_BY(&mutex_); - absl::optional frame_rate_upper_limit_ RTC_GUARDED_BY(&mutex_); - bool rotation_applied_ RTC_GUARDED_BY(&mutex_) = false; - int resolution_alignment_ RTC_GUARDED_BY(&mutex_) = 1; + absl::optional pixels_per_frame_upper_limit_ + RTC_GUARDED_BY(&sequence_checker_); + absl::optional frame_rate_upper_limit_ + RTC_GUARDED_BY(&sequence_checker_); + bool rotation_applied_ RTC_GUARDED_BY(&sequence_checker_) = false; + int resolution_alignment_ RTC_GUARDED_BY(&sequence_checker_) = 1; }; } // namespace webrtc diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc index 988816e4ee..f8ca4dce3c 100644 --- a/video/video_stream_encoder.cc +++ b/video/video_stream_encoder.cc @@ -40,10 +40,8 @@ #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/synchronization/mutex.h" #include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/field_trial.h" #include "video/adaptation/video_stream_encoder_resource_manager.h" @@ -274,7 +272,7 @@ VideoStreamEncoder::VideoStreamEncoder( const VideoStreamEncoderSettings& settings, std::unique_ptr overuse_detector, TaskQueueFactory* task_queue_factory) - : shutdown_event_(true /* manual_reset */, false), + : main_queue_(TaskQueueBase::Current()), number_of_cores_(number_of_cores), quality_scaling_experiment_enabled_(QualityScalingExperiment::Enabled()), sink_(nullptr), @@ -343,6 +341,7 @@ VideoStreamEncoder::VideoStreamEncoder( encoder_queue_(task_queue_factory->CreateTaskQueue( "EncoderQueue", TaskQueueFactory::Priority::NORMAL)) { + RTC_DCHECK(main_queue_); RTC_DCHECK(encoder_stats_observer); RTC_DCHECK_GE(number_of_cores, 1); @@ -370,16 +369,18 @@ VideoStreamEncoder::VideoStreamEncoder( } VideoStreamEncoder::~VideoStreamEncoder() { - RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_DCHECK(shutdown_event_.Wait(0)) + RTC_DCHECK_RUN_ON(main_queue_); + RTC_DCHECK(!video_source_sink_controller_.HasSource()) << "Must call ::Stop() before destruction."; } void VideoStreamEncoder::Stop() { - RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK_RUN_ON(main_queue_); video_source_sink_controller_.SetSource(nullptr); - encoder_queue_.PostTask([this] { + rtc::Event shutdown_event; + + encoder_queue_.PostTask([this, &shutdown_event] { RTC_DCHECK_RUN_ON(&encoder_queue_); if (resource_adaptation_processor_) { stream_resource_manager_.StopManagedResources(); @@ -401,14 +402,14 @@ void VideoStreamEncoder::Stop() { rate_allocator_ = nullptr; bitrate_observer_ = nullptr; ReleaseEncoder(); - shutdown_event_.Set(); + shutdown_event.Set(); }); - shutdown_event_.Wait(rtc::Event::kForever); + shutdown_event.Wait(rtc::Event::kForever); } void VideoStreamEncoder::SetBitrateAllocationObserver( VideoBitrateAllocationObserver* bitrate_observer) { - RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK_RUN_ON(main_queue_); encoder_queue_.PostTask([this, bitrate_observer] { RTC_DCHECK_RUN_ON(&encoder_queue_); RTC_DCHECK(!bitrate_observer_); @@ -430,6 +431,7 @@ void VideoStreamEncoder::SetFecControllerOverride( void VideoStreamEncoder::AddAdaptationResource( rtc::scoped_refptr resource) { + RTC_DCHECK_RUN_ON(main_queue_); // Map any externally added resources as kCpu for the sake of stats reporting. // TODO(hbos): Make the manager map any unknown resources to kCpu and get rid // of this MapResourceToReason() call. @@ -445,13 +447,14 @@ void VideoStreamEncoder::AddAdaptationResource( std::vector> VideoStreamEncoder::GetAdaptationResources() { + RTC_DCHECK_RUN_ON(main_queue_); return resource_adaptation_processor_->GetResources(); } void VideoStreamEncoder::SetSource( rtc::VideoSourceInterface* source, const DegradationPreference& degradation_preference) { - RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK_RUN_ON(main_queue_); video_source_sink_controller_.SetSource(source); input_state_provider_.OnHasInputChanged(source); @@ -469,8 +472,10 @@ void VideoStreamEncoder::SetSource( } void VideoStreamEncoder::SetSink(EncoderSink* sink, bool rotation_applied) { + RTC_DCHECK_RUN_ON(main_queue_); video_source_sink_controller_.SetRotationApplied(rotation_applied); video_source_sink_controller_.PushSourceSinkSettings(); + encoder_queue_.PostTask([this, sink] { RTC_DCHECK_RUN_ON(&encoder_queue_); sink_ = sink; @@ -527,6 +532,7 @@ void VideoStreamEncoder::ConfigureEncoder(VideoEncoderConfig config, // the VideoBitrateAllocator and call OnEncoderConfigurationChanged with a // "soft" reconfiguration. void VideoStreamEncoder::ReconfigureEncoder() { + // Running on the encoder queue. RTC_DCHECK(pending_encoder_reconfiguration_); if (!encoder_selector_ && @@ -711,12 +717,18 @@ void VideoStreamEncoder::ReconfigureEncoder() { for (const auto& stream : streams) { max_framerate = std::max(stream.max_framerate, max_framerate); } - if (max_framerate != video_source_sink_controller_.frame_rate_upper_limit() || - alignment != video_source_sink_controller_.resolution_alignment()) { - video_source_sink_controller_.SetFrameRateUpperLimit(max_framerate); - video_source_sink_controller_.SetResolutionAlignment(alignment); - video_source_sink_controller_.PushSourceSinkSettings(); - } + + main_queue_->PostTask( + ToQueuedTask(task_safety_, [this, max_framerate, alignment]() { + RTC_DCHECK_RUN_ON(main_queue_); + if (max_framerate != + video_source_sink_controller_.frame_rate_upper_limit() || + alignment != video_source_sink_controller_.resolution_alignment()) { + video_source_sink_controller_.SetFrameRateUpperLimit(max_framerate); + video_source_sink_controller_.SetResolutionAlignment(alignment); + video_source_sink_controller_.PushSourceSinkSettings(); + } + })); if (codec.maxBitrate == 0) { // max is one bit per pixel @@ -891,14 +903,14 @@ void VideoStreamEncoder::OnFrame(const VideoFrame& video_frame) { VideoFrame incoming_frame = video_frame; // Local time in webrtc time base. - int64_t current_time_us = clock_->TimeInMicroseconds(); - int64_t current_time_ms = current_time_us / rtc::kNumMicrosecsPerMillisec; + Timestamp now = clock_->CurrentTime(); + // In some cases, e.g., when the frame from decoder is fed to encoder, // the timestamp may be set to the future. As the encoding pipeline assumes // capture time to be less than present time, we should reset the capture // timestamps here. Otherwise there may be issues with RTP send stream. - if (incoming_frame.timestamp_us() > current_time_us) - incoming_frame.set_timestamp_us(current_time_us); + if (incoming_frame.timestamp_us() > now.us()) + incoming_frame.set_timestamp_us(now.us()); // Capture time may come from clock with an offset and drift from clock_. int64_t capture_ntp_time_ms; @@ -907,7 +919,7 @@ void VideoStreamEncoder::OnFrame(const VideoFrame& video_frame) { } else if (video_frame.render_time_ms() != 0) { capture_ntp_time_ms = video_frame.render_time_ms() + delta_ntp_internal_ms_; } else { - capture_ntp_time_ms = current_time_ms + delta_ntp_internal_ms_; + capture_ntp_time_ms = now.ms() + delta_ntp_internal_ms_; } incoming_frame.set_ntp_time_ms(capture_ntp_time_ms); @@ -931,14 +943,14 @@ void VideoStreamEncoder::OnFrame(const VideoFrame& video_frame) { } bool log_stats = false; - if (current_time_ms - last_frame_log_ms_ > kFrameLogIntervalMs) { - last_frame_log_ms_ = current_time_ms; + if (now.ms() - last_frame_log_ms_ > kFrameLogIntervalMs) { + last_frame_log_ms_ = now.ms(); log_stats = true; } last_captured_timestamp_ = incoming_frame.ntp_time_ms(); - int64_t post_time_us = rtc::TimeMicros(); + int64_t post_time_us = clock_->CurrentTime().us(); ++posted_frames_waiting_for_encode_; encoder_queue_.PostTask( @@ -1586,7 +1598,8 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( temporal_index = 0; } - RunPostEncode(image_copy, rtc::TimeMicros(), temporal_index, frame_size); + RunPostEncode(image_copy, clock_->CurrentTime().us(), temporal_index, + frame_size); if (result.error == Result::OK) { // In case of an internal encoder running on a separate thread, the @@ -1726,7 +1739,8 @@ void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate, } if (video_suspension_changed && !video_is_suspended && pending_frame_ && !DropDueToSize(pending_frame_->size())) { - int64_t pending_time_us = rtc::TimeMicros() - pending_frame_post_time_us_; + int64_t pending_time_us = + clock_->CurrentTime().us() - pending_frame_post_time_us_; if (pending_time_us < kPendingFrameTimeoutMs * 1000) EncodeVideoFrame(*pending_frame_, pending_frame_post_time_us_); pending_frame_.reset(); @@ -1769,11 +1783,15 @@ void VideoStreamEncoder::OnVideoSourceRestrictionsUpdated( rtc::scoped_refptr reason, const VideoSourceRestrictions& unfiltered_restrictions) { RTC_DCHECK_RUN_ON(&encoder_queue_); - std::string resource_name = reason ? reason->Name() : ""; - RTC_LOG(INFO) << "Updating sink restrictions from " << resource_name << " to " + RTC_LOG(INFO) << "Updating sink restrictions from " + << (reason ? reason->Name() : std::string("")) << " to " << restrictions.ToString(); - video_source_sink_controller_.SetRestrictions(std::move(restrictions)); - video_source_sink_controller_.PushSourceSinkSettings(); + main_queue_->PostTask(ToQueuedTask( + task_safety_, [this, restrictions = std::move(restrictions)]() { + RTC_DCHECK_RUN_ON(main_queue_); + video_source_sink_controller_.SetRestrictions(std::move(restrictions)); + video_source_sink_controller_.PushSourceSinkSettings(); + })); } void VideoStreamEncoder::RunPostEncode(const EncodedImage& encoded_image, @@ -1794,9 +1812,9 @@ void VideoStreamEncoder::RunPostEncode(const EncodedImage& encoded_image, if (encoded_image.timing_.flags != VideoSendTiming::kInvalid) { encode_duration_us = // TODO(nisse): Maybe use capture_time_ms_ rather than encode_start_ms_? - rtc::kNumMicrosecsPerMillisec * - (encoded_image.timing_.encode_finish_ms - - encoded_image.timing_.encode_start_ms); + TimeDelta::Millis(encoded_image.timing_.encode_finish_ms - + encoded_image.timing_.encode_start_ms) + .us(); } // Run post encode tasks, such as overuse detection and frame rate/drop @@ -2032,10 +2050,14 @@ void VideoStreamEncoder::CheckForAnimatedContent( RTC_LOG(LS_INFO) << "Removing resolution cap due to no consistent " "animation detection."; } - video_source_sink_controller_.SetPixelsPerFrameUpperLimit( - should_cap_resolution ? absl::optional(kMaxAnimationPixels) - : absl::nullopt); - video_source_sink_controller_.PushSourceSinkSettings(); + main_queue_->PostTask(ToQueuedTask(task_safety_, [this, + should_cap_resolution]() { + RTC_DCHECK_RUN_ON(main_queue_); + video_source_sink_controller_.SetPixelsPerFrameUpperLimit( + should_cap_resolution ? absl::optional(kMaxAnimationPixels) + : absl::nullopt); + video_source_sink_controller_.PushSourceSinkSettings(); + })); } } void VideoStreamEncoder::InjectAdaptationResource( diff --git a/video/video_stream_encoder.h b/video/video_stream_encoder.h index 3a294a97ee..5ab0840059 100644 --- a/video/video_stream_encoder.h +++ b/video/video_stream_encoder.h @@ -33,13 +33,12 @@ #include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_input_state_provider.h" #include "modules/video_coding/utility/frame_dropper.h" -#include "rtc_base/event.h" #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/numerics/exp_filter.h" #include "rtc_base/race_checker.h" #include "rtc_base/rate_statistics.h" -#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/task_queue.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/thread_checker.h" #include "system_wrappers/include/clock.h" @@ -215,7 +214,7 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, int64_t time_when_posted_in_ms) RTC_RUN_ON(&encoder_queue_); - rtc::Event shutdown_event_; + TaskQueueBase* const main_queue_; const uint32_t number_of_cores_; @@ -228,9 +227,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, std::unique_ptr const encoder_selector_; VideoStreamEncoderObserver* const encoder_stats_observer_; - // |thread_checker_| checks that public methods that are related to lifetime - // of VideoStreamEncoder are called on the same thread. - rtc::ThreadChecker thread_checker_; VideoEncoderConfig encoder_config_ RTC_GUARDED_BY(&encoder_queue_); std::unique_ptr encoder_ RTC_GUARDED_BY(&encoder_queue_) @@ -403,7 +399,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, bool encoder_switch_requested_ RTC_GUARDED_BY(&encoder_queue_); // Provies video stream input states: current resolution and frame rate. - // This class is thread-safe. VideoStreamInputStateProvider input_state_provider_; std::unique_ptr video_stream_adapter_ @@ -432,12 +427,16 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // ResourceAdaptationProcessor, i.e. reconfigures the source of video frames // to provide us with different resolution or frame rate. // This class is thread-safe. - VideoSourceSinkController video_source_sink_controller_; + VideoSourceSinkController video_source_sink_controller_ + RTC_GUARDED_BY(main_queue_); // Public methods are proxied to the task queues. The queues must be destroyed // first to make sure no tasks run that use other members. rtc::TaskQueue encoder_queue_; + // Used to cancel any potentially pending tasks to the main thread. + ScopedTaskSafety task_safety_; + RTC_DISALLOW_COPY_AND_ASSIGN(VideoStreamEncoder); }; diff --git a/video/video_stream_encoder_unittest.cc b/video/video_stream_encoder_unittest.cc index c0ff85ea85..00c22ffdff 100644 --- a/video/video_stream_encoder_unittest.cc +++ b/video/video_stream_encoder_unittest.cc @@ -35,7 +35,7 @@ #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "modules/video_coding/utility/quality_scaler.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" -#include "rtc_base/fake_clock.h" +#include "rtc_base/event.h" #include "rtc_base/gunit.h" #include "rtc_base/logging.h" #include "rtc_base/ref_counted_object.h" @@ -49,6 +49,7 @@ #include "test/frame_forwarder.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/time_controller/simulated_time_controller.h" #include "test/video_encoder_proxy_factory.h" #include "video/send_statistics_proxy.h" @@ -281,10 +282,11 @@ auto FpsEqResolutionGt(const rtc::VideoSinkWants& other_wants) { class VideoStreamEncoderUnderTest : public VideoStreamEncoder { public: - VideoStreamEncoderUnderTest(SendStatisticsProxy* stats_proxy, - const VideoStreamEncoderSettings& settings, - TaskQueueFactory* task_queue_factory) - : VideoStreamEncoder(Clock::GetRealTimeClock(), + VideoStreamEncoderUnderTest(TimeController* time_controller, + TaskQueueFactory* task_queue_factory, + SendStatisticsProxy* stats_proxy, + const VideoStreamEncoderSettings& settings) + : VideoStreamEncoder(time_controller->GetClock(), 1 /* number_of_cores */, stats_proxy, settings, @@ -292,6 +294,7 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder { overuse_detector_proxy_ = new CpuOveruseDetectorProxy(stats_proxy)), task_queue_factory), + time_controller_(time_controller), fake_cpu_resource_(FakeResource::Create("FakeResource[CPU]")), fake_quality_resource_(FakeResource::Create("FakeResource[QP]")), fake_adaptation_constraint_("FakeAdaptationConstraint") { @@ -348,7 +351,9 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder { event.Set(); }); ASSERT_TRUE(event.Wait(5000)); + time_controller_->AdvanceTime(TimeDelta::Millis(0)); } + void TriggerCpuUnderuse() { rtc::Event event; encoder_queue()->PostTask([this, &event] { @@ -356,6 +361,7 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder { event.Set(); }); ASSERT_TRUE(event.Wait(5000)); + time_controller_->AdvanceTime(TimeDelta::Millis(0)); } // Triggers resource usage measurements on the fake quality resource. @@ -366,6 +372,7 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder { event.Set(); }); ASSERT_TRUE(event.Wait(5000)); + time_controller_->AdvanceTime(TimeDelta::Millis(0)); } void TriggerQualityHigh() { rtc::Event event; @@ -374,8 +381,10 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder { event.Set(); }); ASSERT_TRUE(event.Wait(5000)); + time_controller_->AdvanceTime(TimeDelta::Millis(0)); } + TimeController* const time_controller_; CpuOveruseDetectorProxy* overuse_detector_proxy_; rtc::scoped_refptr fake_cpu_resource_; rtc::scoped_refptr fake_quality_resource_; @@ -440,7 +449,8 @@ class CroppingVideoStreamFactory class AdaptingFrameForwarder : public test::FrameForwarder { public: - AdaptingFrameForwarder() : adaptation_enabled_(false) {} + explicit AdaptingFrameForwarder(TimeController* time_controller) + : time_controller_(time_controller), adaptation_enabled_(false) {} ~AdaptingFrameForwarder() override {} void set_adaptation_enabled(bool enabled) { @@ -462,11 +472,17 @@ class AdaptingFrameForwarder : public test::FrameForwarder { absl::optional last_sent_height() const { return last_height_; } void IncomingCapturedFrame(const VideoFrame& video_frame) override { + RTC_DCHECK(time_controller_->GetMainThread()->IsCurrent()); + time_controller_->AdvanceTime(TimeDelta::Millis(0)); + int cropped_width = 0; int cropped_height = 0; int out_width = 0; int out_height = 0; if (adaption_enabled()) { + RTC_DLOG(INFO) << "IncomingCapturedFrame: AdaptFrameResolution()" + << "w=" << video_frame.width() + << "h=" << video_frame.height(); if (adapter_.AdaptFrameResolution( video_frame.width(), video_frame.height(), video_frame.timestamp_us() * 1000, &cropped_width, @@ -495,6 +511,7 @@ class AdaptingFrameForwarder : public test::FrameForwarder { last_height_ = absl::nullopt; } } else { + RTC_DLOG(INFO) << "IncomingCapturedFrame: adaptation not enabled"; test::FrameForwarder::IncomingCapturedFrame(video_frame); last_width_.emplace(video_frame.width()); last_height_.emplace(video_frame.height()); @@ -508,6 +525,8 @@ class AdaptingFrameForwarder : public test::FrameForwarder { adapter_.OnSinkWants(wants); test::FrameForwarder::AddOrUpdateSinkLocked(sink, wants); } + + TimeController* const time_controller_; cricket::VideoAdapter adapter_; bool adaptation_enabled_ RTC_GUARDED_BY(mutex_); rtc::VideoSinkWants last_wants_ RTC_GUARDED_BY(mutex_); @@ -546,9 +565,20 @@ class MockableSendStatisticsProxy : public SendStatisticsProxy { mock_stats_.reset(); } + void SetDroppedFrameCallback(std::function callback) { + on_frame_dropped_ = std::move(callback); + } + private: + void OnFrameDropped(DropReason reason) override { + SendStatisticsProxy::OnFrameDropped(reason); + if (on_frame_dropped_) + on_frame_dropped_(reason); + } + mutable Mutex lock_; absl::optional mock_stats_ RTC_GUARDED_BY(lock_); + std::function on_frame_dropped_; }; class MockBitrateObserver : public VideoBitrateAllocationObserver { @@ -577,21 +607,20 @@ class MockEncoderSelector class VideoStreamEncoderTest : public ::testing::Test { public: - static const int kDefaultTimeoutMs = 30 * 1000; + static const int kDefaultTimeoutMs = 1000; VideoStreamEncoderTest() : video_send_config_(VideoSendStream::Config(nullptr)), codec_width_(320), codec_height_(240), max_framerate_(kDefaultFramerate), - task_queue_factory_(CreateDefaultTaskQueueFactory()), - fake_encoder_(), + fake_encoder_(&time_controller_), encoder_factory_(&fake_encoder_), stats_proxy_(new MockableSendStatisticsProxy( - Clock::GetRealTimeClock(), + time_controller_.GetClock(), video_send_config_, webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo)), - sink_(&fake_encoder_) {} + sink_(&time_controller_, &fake_encoder_) {} void SetUp() override { metrics::Reset(); @@ -613,7 +642,6 @@ class VideoStreamEncoderTest : public ::testing::Test { video_encoder_config.video_stream_factory->CreateEncoderStreams( codec_width_, codec_height_, video_encoder_config); max_framerate_ = streams[0].max_framerate; - fake_clock_.SetTime(Timestamp::Micros(1234)); ConfigureEncoder(std::move(video_encoder_config)); } @@ -622,8 +650,8 @@ class VideoStreamEncoderTest : public ::testing::Test { if (video_stream_encoder_) video_stream_encoder_->Stop(); video_stream_encoder_.reset(new VideoStreamEncoderUnderTest( - stats_proxy_.get(), video_send_config_.encoder_settings, - task_queue_factory_.get())); + &time_controller_, GetTaskQueueFactory(), stats_proxy_.get(), + video_send_config_.encoder_settings)); video_stream_encoder_->SetSink(&sink_, false /* rotation_applied */); video_stream_encoder_->SetSource( &video_source_, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); @@ -746,34 +774,38 @@ class VideoStreamEncoderTest : public ::testing::Test { void WaitForEncodedFrame(int64_t expected_ntp_time) { sink_.WaitForEncodedFrame(expected_ntp_time); - fake_clock_.AdvanceTime(TimeDelta::Seconds(1) / max_framerate_); + AdvanceTime(TimeDelta::Seconds(1) / max_framerate_); } bool TimedWaitForEncodedFrame(int64_t expected_ntp_time, int64_t timeout_ms) { bool ok = sink_.TimedWaitForEncodedFrame(expected_ntp_time, timeout_ms); - fake_clock_.AdvanceTime(TimeDelta::Seconds(1) / max_framerate_); + AdvanceTime(TimeDelta::Seconds(1) / max_framerate_); return ok; } void WaitForEncodedFrame(uint32_t expected_width, uint32_t expected_height) { sink_.WaitForEncodedFrame(expected_width, expected_height); - fake_clock_.AdvanceTime(TimeDelta::Seconds(1) / max_framerate_); + AdvanceTime(TimeDelta::Seconds(1) / max_framerate_); } void ExpectDroppedFrame() { sink_.ExpectDroppedFrame(); - fake_clock_.AdvanceTime(TimeDelta::Seconds(1) / max_framerate_); + AdvanceTime(TimeDelta::Seconds(1) / max_framerate_); } bool WaitForFrame(int64_t timeout_ms) { bool ok = sink_.WaitForFrame(timeout_ms); - fake_clock_.AdvanceTime(TimeDelta::Seconds(1) / max_framerate_); + AdvanceTime(TimeDelta::Seconds(1) / max_framerate_); return ok; } class TestEncoder : public test::FakeEncoder { public: - TestEncoder() : FakeEncoder(Clock::GetRealTimeClock()) {} + explicit TestEncoder(TimeController* time_controller) + : FakeEncoder(time_controller->GetClock()), + time_controller_(time_controller) { + RTC_DCHECK(time_controller_); + } VideoCodec codec_config() const { MutexLock lock(&mutex_); @@ -960,6 +992,7 @@ class VideoStreamEncoderTest : public ::testing::Test { int32_t result = FakeEncoder::Encode(input_image, frame_types); if (block_encode) EXPECT_TRUE(continue_encode_event_.Wait(kDefaultTimeoutMs)); + return result; } @@ -1030,6 +1063,7 @@ class VideoStreamEncoderTest : public ::testing::Test { FakeEncoder::SetRates(adjusted_paramters); } + TimeController* const time_controller_; mutable Mutex local_mutex_; enum class EncoderState { kUninitialized, @@ -1075,8 +1109,10 @@ class VideoStreamEncoderTest : public ::testing::Test { class TestSink : public VideoStreamEncoder::EncoderSink { public: - explicit TestSink(TestEncoder* test_encoder) - : test_encoder_(test_encoder) {} + TestSink(TimeController* time_controller, TestEncoder* test_encoder) + : time_controller_(time_controller), test_encoder_(test_encoder) { + RTC_DCHECK(time_controller_); + } void WaitForEncodedFrame(int64_t expected_ntp_time) { EXPECT_TRUE( @@ -1086,7 +1122,7 @@ class VideoStreamEncoderTest : public ::testing::Test { bool TimedWaitForEncodedFrame(int64_t expected_ntp_time, int64_t timeout_ms) { uint32_t timestamp = 0; - if (!encoded_frame_event_.Wait(timeout_ms)) + if (!WaitForFrame(timeout_ms)) return false; { MutexLock lock(&mutex_); @@ -1098,7 +1134,7 @@ class VideoStreamEncoderTest : public ::testing::Test { void WaitForEncodedFrame(uint32_t expected_width, uint32_t expected_height) { - EXPECT_TRUE(encoded_frame_event_.Wait(kDefaultTimeoutMs)); + EXPECT_TRUE(WaitForFrame(kDefaultTimeoutMs)); CheckLastFrameSizeMatches(expected_width, expected_height); } @@ -1124,10 +1160,13 @@ class VideoStreamEncoderTest : public ::testing::Test { EXPECT_EQ(expected_rotation, rotation); } - void ExpectDroppedFrame() { EXPECT_FALSE(encoded_frame_event_.Wait(100)); } + void ExpectDroppedFrame() { EXPECT_FALSE(WaitForFrame(100)); } bool WaitForFrame(int64_t timeout_ms) { - return encoded_frame_event_.Wait(timeout_ms); + RTC_DCHECK(time_controller_->GetMainThread()->IsCurrent()); + bool ret = encoded_frame_event_.Wait(timeout_ms); + time_controller_->AdvanceTime(TimeDelta::Millis(0)); + return ret; } void SetExpectNoFrames() { @@ -1195,6 +1234,7 @@ class VideoStreamEncoderTest : public ::testing::Test { min_transmit_bitrate_bps_ = min_transmit_bitrate_bps; } + TimeController* const time_controller_; mutable Mutex mutex_; TestEncoder* test_encoder_; rtc::Event encoded_frame_event_; @@ -1237,19 +1277,30 @@ class VideoStreamEncoderTest : public ::testing::Test { VideoCodec codec_config_ RTC_GUARDED_BY(mutex_); }; + Clock* clock() { return time_controller_.GetClock(); } + void AdvanceTime(TimeDelta duration) { + time_controller_.AdvanceTime(duration); + } + + int64_t CurrentTimeMs() { return clock()->CurrentTime().ms(); } + + protected: + virtual TaskQueueFactory* GetTaskQueueFactory() { + return time_controller_.GetTaskQueueFactory(); + } + + GlobalSimulatedTimeController time_controller_{Timestamp::Micros(1234)}; VideoSendStream::Config video_send_config_; VideoEncoderConfig video_encoder_config_; int codec_width_; int codec_height_; int max_framerate_; - rtc::ScopedFakeClock fake_clock_; - const std::unique_ptr task_queue_factory_; TestEncoder fake_encoder_; test::VideoEncoderProxyFactory encoder_factory_; VideoBitrateAllocatorProxyFactory bitrate_allocator_factory_; std::unique_ptr stats_proxy_; TestSink sink_; - AdaptingFrameForwarder video_source_; + AdaptingFrameForwarder video_source_{&time_controller_}; std::unique_ptr video_stream_encoder_; }; @@ -1272,6 +1323,7 @@ TEST_F(VideoStreamEncoderTest, DropsFramesBeforeFirstOnBitrateUpdated) { // frames means that the first frame will be dropped and the second frame will // be sent when the encoder is enabled. video_source_.IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event)); + AdvanceTime(TimeDelta::Millis(10)); video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr)); EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeoutMs)); @@ -1347,12 +1399,31 @@ TEST_F(VideoStreamEncoderTest, DropsFrameAfterStop) { EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeoutMs)); } -TEST_F(VideoStreamEncoderTest, DropsPendingFramesOnSlowEncode) { +class VideoStreamEncoderBlockedTest : public VideoStreamEncoderTest { + public: + VideoStreamEncoderBlockedTest() {} + + TaskQueueFactory* GetTaskQueueFactory() override { + return task_queue_factory_.get(); + } + + private: + std::unique_ptr task_queue_factory_ = + CreateDefaultTaskQueueFactory(); +}; + +TEST_F(VideoStreamEncoderBlockedTest, DropsPendingFramesOnSlowEncode) { video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( DataRate::BitsPerSec(kTargetBitrateBps), DataRate::BitsPerSec(kTargetBitrateBps), DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); + int dropped_count = 0; + stats_proxy_->SetDroppedFrameCallback( + [&dropped_count](VideoStreamEncoderObserver::DropReason) { + ++dropped_count; + }); + fake_encoder_.BlockNextEncode(); video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); WaitForEncodedFrame(1); @@ -1364,6 +1435,8 @@ TEST_F(VideoStreamEncoderTest, DropsPendingFramesOnSlowEncode) { WaitForEncodedFrame(3); video_stream_encoder_->Stop(); + + EXPECT_EQ(1, dropped_count); } TEST_F(VideoStreamEncoderTest, DropFrameWithFailedI420Conversion) { @@ -2917,7 +2990,7 @@ TEST_F(VideoStreamEncoderTest, DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable MAINTAIN_FRAMERATE preference, no initial limitation. - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource( &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); @@ -3046,7 +3119,7 @@ TEST_F(VideoStreamEncoderTest, DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable MAINTAIN_FRAMERATE preference, no initial limitation. - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource( &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); @@ -3109,7 +3182,7 @@ TEST_F(VideoStreamEncoderTest, DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable BALANCED preference, no initial limitation. - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource(&source, webrtc::DegradationPreference::BALANCED); @@ -3173,7 +3246,7 @@ TEST_F(VideoStreamEncoderTest, AdaptUpIfBwEstimateIsHigherThanMinBitrate) { 0, 0); // Enable MAINTAIN_FRAMERATE preference, no initial limitation. - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource( &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); @@ -3227,7 +3300,7 @@ TEST_F(VideoStreamEncoderTest, DropFirstFramesIfBwEstimateIsTooLow) { 0, 0); // Enable MAINTAIN_FRAMERATE preference, no initial limitation. - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource( &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); @@ -3279,7 +3352,7 @@ class BalancedDegradationTest : public VideoStreamEncoderTest { const int kHeight = 360; const int64_t kFrameIntervalMs = 150; // Use low fps to not drop any frame. int64_t timestamp_ms_ = 0; - AdaptingFrameForwarder source_; + AdaptingFrameForwarder source_{&time_controller_}; }; TEST_F(BalancedDegradationTest, AdaptDownTwiceIfMinFpsDiffLtThreshold) { @@ -3553,7 +3626,7 @@ TEST_F(VideoStreamEncoderTest, DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable MAINTAIN_FRAMERATE preference, no initial limitation. - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource( &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); @@ -3764,38 +3837,33 @@ TEST_F(VideoStreamEncoderTest, CallsBitrateObserver) { DataRate::BitsPerSec(kLowTargetBitrateBps), 0, 0, 0); video_source_.IncomingCapturedFrame( - CreateFrame(rtc::TimeMillis(), codec_width_, codec_height_)); - WaitForEncodedFrame(rtc::TimeMillis()); + CreateFrame(CurrentTimeMs(), codec_width_, codec_height_)); + WaitForEncodedFrame(CurrentTimeMs()); VideoBitrateAllocation bitrate_allocation = fake_encoder_.GetAndResetLastRateControlSettings()->bitrate; // Check that encoder has been updated too, not just allocation observer. EXPECT_EQ(bitrate_allocation.get_sum_bps(), kLowTargetBitrateBps); - // TODO(srte): The use of millisecs here looks like an error, but the tests - // fails using seconds, this should be investigated. - fake_clock_.AdvanceTime(TimeDelta::Millis(1) / kDefaultFps); + AdvanceTime(TimeDelta::Seconds(1) / kDefaultFps); // Not called on second frame. EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(expected_bitrate)) .Times(0); video_source_.IncomingCapturedFrame( - CreateFrame(rtc::TimeMillis(), codec_width_, codec_height_)); - WaitForEncodedFrame(rtc::TimeMillis()); - fake_clock_.AdvanceTime(TimeDelta::Millis(1) / kDefaultFps); + CreateFrame(CurrentTimeMs(), codec_width_, codec_height_)); + WaitForEncodedFrame(CurrentTimeMs()); + AdvanceTime(TimeDelta::Millis(1) / kDefaultFps); // Called after a process interval. EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(expected_bitrate)) .Times(1); - const int64_t start_time_ms = rtc::TimeMillis(); - while (rtc::TimeMillis() - start_time_ms < kProcessIntervalMs) { + const int64_t start_time_ms = CurrentTimeMs(); + while (CurrentTimeMs() - start_time_ms < kProcessIntervalMs) { video_source_.IncomingCapturedFrame( - CreateFrame(rtc::TimeMillis(), codec_width_, codec_height_)); - WaitForEncodedFrame(rtc::TimeMillis()); - fake_clock_.AdvanceTime(TimeDelta::Millis(1) / kDefaultFps); + CreateFrame(CurrentTimeMs(), codec_width_, codec_height_)); + WaitForEncodedFrame(CurrentTimeMs()); + AdvanceTime(TimeDelta::Millis(1) / kDefaultFps); } - // Since rates are unchanged, encoder should not be reconfigured. - EXPECT_FALSE(fake_encoder_.GetAndResetLastRateControlSettings().has_value()); - video_stream_encoder_->Stop(); } @@ -4244,7 +4312,7 @@ TEST_F(VideoStreamEncoderTest, RampsUpInQualityWhenBwIsHigh) { fake_encoder_.SetQp(kQpLow); // Enable MAINTAIN_FRAMERATE preference. - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource(&source, DegradationPreference::MAINTAIN_FRAMERATE); @@ -4279,7 +4347,7 @@ TEST_F(VideoStreamEncoderTest, RampsUpInQualityWhenBwIsHigh) { EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_LT(source.sink_wants().max_pixel_count, kWidth * kHeight); - fake_clock_.AdvanceTime(TimeDelta::Millis(2000)); + AdvanceTime(TimeDelta::Millis(2000)); // Insert frame should trigger high BW and release quality limitation. timestamp_ms += kFrameIntervalMs; @@ -4301,7 +4369,7 @@ TEST_F(VideoStreamEncoderTest, RampsUpInQualityWhenBwIsHigh) { TEST_F(VideoStreamEncoderTest, QualityScalerAdaptationsRemovedWhenQualityScalingDisabled) { - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource(&source, DegradationPreference::MAINTAIN_FRAMERATE); @@ -4338,6 +4406,7 @@ TEST_F(VideoStreamEncoderTest, video_stream_encoder_->ConfigureEncoder(video_encoder_config_.Copy(), kMaxPayloadLength); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); + AdvanceTime(TimeDelta::Millis(0)); // Since we turned off the quality scaler, the adaptations made by it are // removed. EXPECT_THAT(source.sink_wants(), ResolutionMax()); @@ -4474,7 +4543,7 @@ TEST_F(VideoStreamEncoderTest, &video_source_, webrtc::DegradationPreference::MAINTAIN_RESOLUTION); video_source_.set_adaptation_enabled(true); - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); video_source_.IncomingCapturedFrame( CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); @@ -4578,7 +4647,7 @@ TEST_F(VideoStreamEncoderTest, DoesntAdaptDownPastMinFramerate) { &video_source_, webrtc::DegradationPreference::MAINTAIN_RESOLUTION); video_source_.set_adaptation_enabled(true); - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); // Trigger overuse as much as we can. rtc::VideoSinkWants last_wants; @@ -4593,7 +4662,7 @@ TEST_F(VideoStreamEncoderTest, DoesntAdaptDownPastMinFramerate) { sink_.WaitForEncodedFrame(timestamp_ms); } timestamp_ms += kFrameIntervalMs; - fake_clock_.AdvanceTime(TimeDelta::Millis(kFrameIntervalMs)); + AdvanceTime(TimeDelta::Millis(kFrameIntervalMs)); } // ...and then try to adapt again. video_stream_encoder_->TriggerCpuOveruse(); @@ -4618,7 +4687,7 @@ TEST_F(VideoStreamEncoderTest, DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable BALANCED preference, no initial limitation. - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource(&source, webrtc::DegradationPreference::BALANCED); @@ -4804,7 +4873,7 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) { DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable BALANCED preference, no initial limitation. - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource(&source, webrtc::DegradationPreference::BALANCED); @@ -4939,7 +5008,7 @@ TEST_F(VideoStreamEncoderTest, DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable BALANCED preference, no initial limitation. - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource(&source, webrtc::DegradationPreference::BALANCED); @@ -5080,7 +5149,7 @@ TEST_F(VideoStreamEncoderTest, PeriodicallyUpdatesChannelParameters) { DataRate::BitsPerSec(kTargetBitrateBps), DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); max_framerate_ = kLowFps; // Insert 2 seconds of 2fps video. @@ -5139,7 +5208,7 @@ TEST_F(VideoStreamEncoderTest, DoesNotUpdateBitrateAllocationWhenSuspended) { video_stream_encoder_->WaitUntilTaskQueueIsIdle(); // Insert a first video frame, causes another bitrate update. - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(_)).Times(1); video_source_.IncomingCapturedFrame( CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); @@ -5152,7 +5221,7 @@ TEST_F(VideoStreamEncoderTest, DoesNotUpdateBitrateAllocationWhenSuspended) { // Skip ahead until a new periodic parameter update should have occured. timestamp_ms += kProcessIntervalMs; - fake_clock_.AdvanceTime(TimeDelta::Millis(kProcessIntervalMs)); + AdvanceTime(TimeDelta::Millis(kProcessIntervalMs)); // Bitrate observer should not be called. EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(_)).Times(0); @@ -5221,7 +5290,7 @@ TEST_F(VideoStreamEncoderTest, DropsFramesWhenEncoderOvershoots) { DataRate::BitsPerSec(kTargetBitrateBps), DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); max_framerate_ = kFps; // Insert 3 seconds of video, verify number of drops with normal bitrate. @@ -5291,7 +5360,7 @@ TEST_F(VideoStreamEncoderTest, ConfiguresCorrectFrameRate) { ASSERT_GT(max_framerate_, kActualInputFps); - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); max_framerate_ = kActualInputFps; video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( DataRate::BitsPerSec(kTargetBitrateBps), @@ -5312,7 +5381,7 @@ TEST_F(VideoStreamEncoderTest, ConfiguresCorrectFrameRate) { video_stream_encoder_->Stop(); } -TEST_F(VideoStreamEncoderTest, AccumulatesUpdateRectOnDroppedFrames) { +TEST_F(VideoStreamEncoderBlockedTest, AccumulatesUpdateRectOnDroppedFrames) { VideoFrame::UpdateRect rect; video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( DataRate::BitsPerSec(kTargetBitrateBps), @@ -5495,8 +5564,7 @@ TEST_F(VideoStreamEncoderTest, AdjustsTimestampInternalSource) { // Frame is captured kEncodeFinishDelayMs before it's encoded, so restored // capture timestamp should be kEncodeFinishDelayMs in the past. EXPECT_EQ(sink_.GetLastCaptureTimeMs(), - fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec - - kEncodeFinishDelayMs); + CurrentTimeMs() - kEncodeFinishDelayMs); video_stream_encoder_->Stop(); } @@ -5565,7 +5633,7 @@ TEST_F(VideoStreamEncoderTest, CopiesVideoFrameMetadataAfterDownscale) { // Insert a first video frame. It should be dropped because of downscale in // resolution. - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); VideoFrame frame = CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight); frame.set_rotation(kVideoRotation_270); video_source_.IncomingCapturedFrame(frame); @@ -5573,7 +5641,7 @@ TEST_F(VideoStreamEncoderTest, CopiesVideoFrameMetadataAfterDownscale) { ExpectDroppedFrame(); // Second frame is downscaled. - timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + timestamp_ms = CurrentTimeMs(); frame = CreateFrame(timestamp_ms, kFrameWidth / 2, kFrameHeight / 2); frame.set_rotation(kVideoRotation_90); video_source_.IncomingCapturedFrame(frame); @@ -5582,7 +5650,7 @@ TEST_F(VideoStreamEncoderTest, CopiesVideoFrameMetadataAfterDownscale) { sink_.CheckLastFrameRotationMatches(kVideoRotation_90); // Insert another frame, also downscaled. - timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + timestamp_ms = CurrentTimeMs(); frame = CreateFrame(timestamp_ms, kFrameWidth / 2, kFrameHeight / 2); frame.set_rotation(kVideoRotation_180); video_source_.IncomingCapturedFrame(frame); @@ -5607,7 +5675,7 @@ TEST_F(VideoStreamEncoderTest, BandwidthAllocationLowerBound) { /*cwnd_reduce_ratio=*/0); // Insert a first video frame so that encoder gets configured. - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); VideoFrame frame = CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight); frame.set_rotation(kVideoRotation_270); video_source_.IncomingCapturedFrame(frame); @@ -5643,7 +5711,7 @@ TEST_F(VideoStreamEncoderTest, EncoderRatesPropagatedOnReconfigure) { DataRate::BitsPerSec(kTargetBitrateBps), DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Capture a frame and wait for it to synchronize with the encoder thread. - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); video_source_.IncomingCapturedFrame(CreateFrame(timestamp_ms, nullptr)); WaitForEncodedFrame(1); @@ -5929,7 +5997,7 @@ TEST_F(VideoStreamEncoderTest, /*cwnd_reduce_ratio=*/0); // Insert a first video frame so that encoder gets configured. - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); VideoFrame frame = CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight); frame.set_rotation(kVideoRotation_270); video_source_.IncomingCapturedFrame(frame); @@ -5966,7 +6034,7 @@ TEST_F(VideoStreamEncoderTest, /*cwnd_reduce_ratio=*/0); // Insert a first video frame so that encoder gets configured. - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); VideoFrame frame = CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight); frame.set_rotation(kVideoRotation_270); video_source_.IncomingCapturedFrame(frame); @@ -6016,8 +6084,7 @@ TEST_F(VideoStreamEncoderTest, AutomaticAnimationDetection) { // Pass enough frames with the full update to trigger animation detection. for (int i = 0; i < kNumFrames; ++i) { - int64_t timestamp_ms = - fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); frame.set_ntp_time_ms(timestamp_ms); frame.set_timestamp_us(timestamp_ms * 1000); video_source_.IncomingCapturedFrame(frame); @@ -6032,7 +6099,7 @@ TEST_F(VideoStreamEncoderTest, AutomaticAnimationDetection) { // Pass one frame with no known update. // Resolution cap should be removed immediately. - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); frame.set_ntp_time_ms(timestamp_ms); frame.set_timestamp_us(timestamp_ms * 1000); frame.clear_update_rect(); @@ -6066,8 +6133,7 @@ TEST_F(VideoStreamEncoderTest, ConfiguresVp9SvcAtOddResolutions) { // Pass enough frames with the full update to trigger animation detection. for (int i = 0; i < kNumFrames; ++i) { - int64_t timestamp_ms = - fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); frame.set_ntp_time_ms(timestamp_ms); frame.set_timestamp_us(timestamp_ms * 1000); video_source_.IncomingCapturedFrame(frame);