diff --git a/webrtc/video/video_capture_input.cc b/webrtc/video/video_capture_input.cc index 4bf80e3bec..ecd7a30234 100644 --- a/webrtc/video/video_capture_input.cc +++ b/webrtc/video/video_capture_input.cc @@ -131,7 +131,6 @@ bool VideoCaptureInput::CaptureProcess() { if (rtc::AtomicOps::Load(&stop_)) return false; - overuse_detector_->FrameProcessingStarted(); int64_t encode_start_time = -1; VideoFrame deliver_frame; { diff --git a/webrtc/video_engine/overuse_frame_detector.cc b/webrtc/video_engine/overuse_frame_detector.cc index 71f8f5764d..6780688207 100644 --- a/webrtc/video_engine/overuse_frame_detector.cc +++ b/webrtc/video_engine/overuse_frame_detector.cc @@ -253,63 +253,6 @@ class OveruseFrameDetector::FrameQueue { int last_processing_time_ms_; }; -// TODO(asapersson): Remove this class. Not used. -// Class for calculating the capture queue delay change. -class OveruseFrameDetector::CaptureQueueDelay { - public: - CaptureQueueDelay() - : kWeightFactor(0.5f), - delay_ms_(0), - filtered_delay_ms_per_s_(new rtc::ExpFilter(kWeightFactor)) { - filtered_delay_ms_per_s_->Apply(1.0f, 0.0f); - } - ~CaptureQueueDelay() {} - - void FrameCaptured(int64_t now) { - const size_t kMaxSize = 200; - if (frames_.size() > kMaxSize) { - frames_.pop_front(); - } - frames_.push_back(now); - } - - void FrameProcessingStarted(int64_t now) { - if (frames_.empty()) { - return; - } - delay_ms_ = now - frames_.front(); - frames_.pop_front(); - } - - void CalculateDelayChange(int64_t diff_last_sample_ms) { - if (diff_last_sample_ms <= 0) { - return; - } - float exp = static_cast(diff_last_sample_ms) / kProcessIntervalMs; - exp = std::min(exp, kMaxExp); - filtered_delay_ms_per_s_->Apply(exp, - delay_ms_ * 1000.0f / diff_last_sample_ms); - ClearFrames(); - } - - void ClearFrames() { - frames_.clear(); - } - - int delay_ms() const { - return delay_ms_; - } - - int Value() const { - return static_cast(filtered_delay_ms_per_s_->filtered() + 0.5); - } - - private: - const float kWeightFactor; - std::list frames_; - int delay_ms_; - rtc::scoped_ptr filtered_delay_ms_per_s_; -}; OveruseFrameDetector::OveruseFrameDetector( Clock* clock, @@ -335,8 +278,7 @@ OveruseFrameDetector::OveruseFrameDetector( encode_time_(new EncodeTimeAvg()), usage_(new SendProcessingUsage(options)), frame_queue_(new FrameQueue()), - last_sample_time_ms_(0), - capture_queue_delay_(new CaptureQueueDelay()) { + last_sample_time_ms_(0) { DCHECK(metrics_observer != nullptr); // Make sure stats are initially up-to-date. This simplifies unit testing // since we don't have to trigger an update using one of the methods which @@ -348,11 +290,6 @@ OveruseFrameDetector::OveruseFrameDetector( OveruseFrameDetector::~OveruseFrameDetector() { } -int OveruseFrameDetector::CaptureQueueDelayMsPerS() const { - rtc::CritScope cs(&crit_); - return capture_queue_delay_->delay_ms(); -} - int OveruseFrameDetector::LastProcessingTimeMs() const { rtc::CritScope cs(&crit_); return frame_queue_->last_processing_time_ms(); @@ -367,7 +304,6 @@ void OveruseFrameDetector::UpdateCpuOveruseMetrics() { metrics_.capture_jitter_ms = static_cast(capture_deltas_.StdDev() + 0.5); metrics_.avg_encode_time_ms = encode_time_->Value(); metrics_.encode_usage_percent = usage_->Value(); - metrics_.capture_queue_delay_ms_per_s = capture_queue_delay_->Value(); metrics_observer_->CpuOveruseMetricsUpdated(metrics_); } @@ -396,7 +332,6 @@ void OveruseFrameDetector::ResetAll(int num_pixels) { capture_deltas_.Reset(); usage_->Reset(); frame_queue_->Reset(); - capture_queue_delay_->ClearFrames(); last_capture_time_ = 0; num_process_times_ = 0; UpdateCpuOveruseMetrics(); @@ -418,19 +353,12 @@ void OveruseFrameDetector::FrameCaptured(int width, } last_capture_time_ = now; - capture_queue_delay_->FrameCaptured(now); - if (options_.enable_extended_processing_usage) { frame_queue_->Start(capture_time_ms, now); } UpdateCpuOveruseMetrics(); } -void OveruseFrameDetector::FrameProcessingStarted() { - rtc::CritScope cs(&crit_); - capture_queue_delay_->FrameProcessingStarted(clock_->TimeInMilliseconds()); -} - void OveruseFrameDetector::FrameEncoded(int encode_time_ms) { rtc::CritScope cs(&crit_); int64_t now = clock_->TimeInMilliseconds(); @@ -477,15 +405,11 @@ int32_t OveruseFrameDetector::Process() { if (now < next_process_time_) return 0; - int64_t diff_ms = now - next_process_time_ + kProcessIntervalMs; next_process_time_ = now + kProcessIntervalMs; rtc::CritScope cs(&crit_); ++num_process_times_; - capture_queue_delay_->CalculateDelayChange(diff_ms); - UpdateCpuOveruseMetrics(); - if (num_process_times_ <= options_.min_process_count) { return 0; } diff --git a/webrtc/video_engine/overuse_frame_detector.h b/webrtc/video_engine/overuse_frame_detector.h index 51982933f1..4791cee2f5 100644 --- a/webrtc/video_engine/overuse_frame_detector.h +++ b/webrtc/video_engine/overuse_frame_detector.h @@ -106,18 +106,13 @@ struct CpuOveruseMetrics { CpuOveruseMetrics() : capture_jitter_ms(-1), avg_encode_time_ms(-1), - encode_usage_percent(-1), - capture_queue_delay_ms_per_s(-1) {} + encode_usage_percent(-1) {} int capture_jitter_ms; // The current estimated jitter in ms based on // incoming captured frames. int avg_encode_time_ms; // The average encode time in ms. int encode_usage_percent; // The average encode time divided by the average // time difference between incoming captured frames. - int capture_queue_delay_ms_per_s; // The current time delay between an - // incoming captured frame until the frame - // is being processed. The delay is - // expressed in ms delay per second. }; class CpuOveruseMetricsObserver { @@ -161,9 +156,6 @@ class OveruseFrameDetector : public Module { // Called for each captured frame. void FrameCaptured(int width, int height, int64_t capture_time_ms); - // Called when the processing of a captured frame is started. - void FrameProcessingStarted(); - // Called for each encoded frame. void FrameEncoded(int encode_time_ms); @@ -171,7 +163,6 @@ class OveruseFrameDetector : public Module { void FrameSent(int64_t capture_time_ms); // Only public for testing. - int CaptureQueueDelayMsPerS() const; int LastProcessingTimeMs() const; int FramesInQueue() const; @@ -182,7 +173,6 @@ class OveruseFrameDetector : public Module { private: class EncodeTimeAvg; class SendProcessingUsage; - class CaptureQueueDelay; class FrameQueue; void UpdateCpuOveruseMetrics() EXCLUSIVE_LOCKS_REQUIRED(crit_); @@ -247,9 +237,6 @@ class OveruseFrameDetector : public Module { int64_t last_sample_time_ms_; // Only accessed by one thread. - const rtc::scoped_ptr capture_queue_delay_ - GUARDED_BY(crit_); - rtc::ThreadChecker processing_thread_; DISALLOW_COPY_AND_ASSIGN(OveruseFrameDetector); diff --git a/webrtc/video_engine/overuse_frame_detector_unittest.cc b/webrtc/video_engine/overuse_frame_detector_unittest.cc index 07306ef0b3..0abce08f9c 100644 --- a/webrtc/video_engine/overuse_frame_detector_unittest.cc +++ b/webrtc/video_engine/overuse_frame_detector_unittest.cc @@ -288,59 +288,6 @@ TEST_F(OveruseFrameDetectorTest, MinFrameSamplesBeforeUpdatingCaptureJitter) { EXPECT_EQ(InitialJitter(), CaptureJitterMs()); } -TEST_F(OveruseFrameDetectorTest, NoCaptureQueueDelay) { - EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 0); - overuse_detector_->FrameCaptured( - kWidth, kHeight, clock_->TimeInMilliseconds()); - overuse_detector_->FrameProcessingStarted(); - EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 0); -} - -TEST_F(OveruseFrameDetectorTest, CaptureQueueDelay) { - overuse_detector_->FrameCaptured( - kWidth, kHeight, clock_->TimeInMilliseconds()); - clock_->AdvanceTimeMilliseconds(100); - overuse_detector_->FrameProcessingStarted(); - EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 100); -} - -TEST_F(OveruseFrameDetectorTest, CaptureQueueDelayMultipleFrames) { - overuse_detector_->FrameCaptured( - kWidth, kHeight, clock_->TimeInMilliseconds()); - clock_->AdvanceTimeMilliseconds(10); - overuse_detector_->FrameCaptured( - kWidth, kHeight, clock_->TimeInMilliseconds()); - clock_->AdvanceTimeMilliseconds(20); - - overuse_detector_->FrameProcessingStarted(); - EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 30); - overuse_detector_->FrameProcessingStarted(); - EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 20); -} - -TEST_F(OveruseFrameDetectorTest, CaptureQueueDelayResetAtResolutionSwitch) { - overuse_detector_->FrameCaptured( - kWidth, kHeight, clock_->TimeInMilliseconds()); - clock_->AdvanceTimeMilliseconds(10); - overuse_detector_->FrameCaptured( - kWidth, kHeight + 1, clock_->TimeInMilliseconds()); - clock_->AdvanceTimeMilliseconds(20); - - overuse_detector_->FrameProcessingStarted(); - EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 20); -} - -TEST_F(OveruseFrameDetectorTest, CaptureQueueDelayNoMatchingCapturedFrame) { - overuse_detector_->FrameCaptured( - kWidth, kHeight, clock_->TimeInMilliseconds()); - clock_->AdvanceTimeMilliseconds(100); - overuse_detector_->FrameProcessingStarted(); - EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 100); - // No new captured frame. The last delay should be reported. - overuse_detector_->FrameProcessingStarted(); - EXPECT_EQ(overuse_detector_->CaptureQueueDelayMsPerS(), 100); -} - TEST_F(OveruseFrameDetectorTest, FrameDelay_OneFrameDisabled) { options_.enable_extended_processing_usage = false; ReinitializeOveruseDetector();