diff --git a/webrtc/video_engine/include/vie_base.h b/webrtc/video_engine/include/vie_base.h index 997ae74e20..1801e7b9cf 100644 --- a/webrtc/video_engine/include/vie_base.h +++ b/webrtc/video_engine/include/vie_base.h @@ -122,8 +122,9 @@ class WEBRTC_DLLEXPORT ViEBase { // Gets the last cpu overuse measure. // TODO(asapersson): Remove default implementation. - virtual int CpuOveruseMeasure(int channel, - int* capture_jitter_ms) { return -1; } + virtual int CpuOveruseMeasures(int channel, + int* capture_jitter_ms, + int* avg_encode_time_ms) { return -1; } // Specifies the VoiceEngine and VideoEngine channel pair to use for // audio/video synchronization. diff --git a/webrtc/video_engine/overuse_frame_detector.cc b/webrtc/video_engine/overuse_frame_detector.cc index 262c76725f..c284f646de 100644 --- a/webrtc/video_engine/overuse_frame_detector.cc +++ b/webrtc/video_engine/overuse_frame_detector.cc @@ -47,6 +47,8 @@ const int kMaxRampUpDelayMs = 120 * 1000; // Expontential back-off factor, to prevent annoying up-down behaviour. const double kRampUpBackoffFactor = 2.0; +// The initial average encode time (set to a fairly small value). +const float kInitialAvgEncodeTimeMs = 5.0f; } // namespace Statistics::Statistics() : @@ -117,7 +119,7 @@ OveruseFrameDetector::OveruseFrameDetector(Clock* clock, in_quick_rampup_(false), current_rampup_delay_ms_(kStandardRampUpDelayMs), num_pixels_(0), - last_capture_jitter_ms_(-1) {} + avg_encode_time_ms_(kInitialAvgEncodeTimeMs) {} OveruseFrameDetector::~OveruseFrameDetector() { } @@ -145,9 +147,21 @@ void OveruseFrameDetector::FrameCaptured(int width, int height) { last_capture_time_ = time; } -int OveruseFrameDetector::last_capture_jitter_ms() { +void OveruseFrameDetector::FrameEncoded(int encode_time_ms) { CriticalSectionScoped cs(crit_.get()); - return last_capture_jitter_ms_; + const float kWeight = 0.1f; + avg_encode_time_ms_ = kWeight * encode_time_ms + + (1.0f - kWeight) * avg_encode_time_ms_; +} + +int OveruseFrameDetector::last_capture_jitter_ms() const { + CriticalSectionScoped cs(crit_.get()); + return static_cast(capture_deltas_.StdDev() + 0.5); +} + +int OveruseFrameDetector::avg_encode_time_ms() const { + CriticalSectionScoped cs(crit_.get()); + return static_cast(avg_encode_time_ms_ + 0.5); } int32_t OveruseFrameDetector::TimeUntilNextProcess() { @@ -214,7 +228,6 @@ int32_t OveruseFrameDetector::Process() { overuse_stddev_ms_, normaluse_stddev_ms_); - last_capture_jitter_ms_ = static_cast(capture_deltas_.StdDev()); return 0; } diff --git a/webrtc/video_engine/overuse_frame_detector.h b/webrtc/video_engine/overuse_frame_detector.h index 4c2d1295ff..356e27fee8 100644 --- a/webrtc/video_engine/overuse_frame_detector.h +++ b/webrtc/video_engine/overuse_frame_detector.h @@ -71,7 +71,13 @@ class OveruseFrameDetector : public Module { // Called for each captured frame. void FrameCaptured(int width, int height); - int last_capture_jitter_ms(); + void FrameEncoded(int encode_time_ms); + + int last_capture_jitter_ms() const; + + // Running average of reported encode time (FrameEncoded()). + // Only used for stats. + int avg_encode_time_ms() const; // Implements Module. virtual int32_t TimeUntilNextProcess() OVERRIDE; @@ -107,7 +113,7 @@ class OveruseFrameDetector : public Module { // Number of pixels of last captured frame. int num_pixels_; - int last_capture_jitter_ms_; + float avg_encode_time_ms_; DISALLOW_COPY_AND_ASSIGN(OveruseFrameDetector); }; diff --git a/webrtc/video_engine/overuse_frame_detector_unittest.cc b/webrtc/video_engine/overuse_frame_detector_unittest.cc index 8b25294442..131bbfd695 100644 --- a/webrtc/video_engine/overuse_frame_detector_unittest.cc +++ b/webrtc/video_engine/overuse_frame_detector_unittest.cc @@ -96,9 +96,17 @@ TEST_F(OveruseFrameDetectorTest, ConstantOveruseGivesNoNormalUsage) { } TEST_F(OveruseFrameDetectorTest, LastCaptureJitter) { - EXPECT_EQ(-1, overuse_detector_->last_capture_jitter_ms()); + EXPECT_EQ(0, overuse_detector_->last_capture_jitter_ms()); TriggerOveruse(); EXPECT_GT(overuse_detector_->last_capture_jitter_ms(), 0); } +TEST_F(OveruseFrameDetectorTest, EncodedFrame) { + const int kInitialAvgEncodeTimeInMs = 5; + EXPECT_EQ(kInitialAvgEncodeTimeInMs, overuse_detector_->avg_encode_time_ms()); + for (int i = 0; i < 30; i++) + overuse_detector_->FrameEncoded(2); + EXPECT_EQ(2, overuse_detector_->avg_encode_time_ms()); +} + } // namespace webrtc diff --git a/webrtc/video_engine/vie_base_impl.cc b/webrtc/video_engine/vie_base_impl.cc index 873327ab3d..8d6aeb2193 100644 --- a/webrtc/video_engine/vie_base_impl.cc +++ b/webrtc/video_engine/vie_base_impl.cc @@ -117,7 +117,9 @@ int ViEBaseImpl::RegisterCpuOveruseObserver(int video_channel, return 0; } -int ViEBaseImpl::CpuOveruseMeasure(int video_channel, int* capture_jitter_ms) { +int ViEBaseImpl::CpuOveruseMeasures(int video_channel, + int* capture_jitter_ms, + int* avg_encode_time_ms) { ViEChannelManagerScoped cs(*(shared_data_.channel_manager())); ViEChannel* vie_channel = cs.Channel(video_channel); if (!vie_channel) { @@ -138,7 +140,7 @@ int ViEBaseImpl::CpuOveruseMeasure(int video_channel, int* capture_jitter_ms) { if (provider) { ViECapturer* capturer = is.Capture(provider->Id()); if (capturer) { - *capture_jitter_ms = capturer->CpuOveruseMeasure(); + capturer->CpuOveruseMeasures(capture_jitter_ms, avg_encode_time_ms); return 0; } } diff --git a/webrtc/video_engine/vie_base_impl.h b/webrtc/video_engine/vie_base_impl.h index b44754a2c0..1411cafce1 100644 --- a/webrtc/video_engine/vie_base_impl.h +++ b/webrtc/video_engine/vie_base_impl.h @@ -33,7 +33,9 @@ class ViEBaseImpl virtual int SetVoiceEngine(VoiceEngine* voice_engine); virtual int RegisterCpuOveruseObserver(int channel, CpuOveruseObserver* observer); - virtual int CpuOveruseMeasure(int channel, int* capture_jitter_ms); + virtual int CpuOveruseMeasures(int channel, + int* capture_jitter_ms, + int* avg_encode_time_ms); virtual int CreateChannel(int& video_channel); // NOLINT virtual int CreateChannel(int& video_channel, // NOLINT int original_channel); diff --git a/webrtc/video_engine/vie_capturer.cc b/webrtc/video_engine/vie_capturer.cc index b8de7cd25f..c5844e2293 100644 --- a/webrtc/video_engine/vie_capturer.cc +++ b/webrtc/video_engine/vie_capturer.cc @@ -266,8 +266,10 @@ void ViECapturer::RegisterCpuOveruseObserver(CpuOveruseObserver* observer) { overuse_detector_->SetObserver(observer); } -int ViECapturer::CpuOveruseMeasure() { - return overuse_detector_->last_capture_jitter_ms(); +void ViECapturer::CpuOveruseMeasures(int* capture_jitter_ms, + int* avg_encode_time_ms) const { + *capture_jitter_ms = overuse_detector_->last_capture_jitter_ms(); + *avg_encode_time_ms = overuse_detector_->avg_encode_time_ms(); } int32_t ViECapturer::SetCaptureDelay(int32_t delay_ms) { @@ -530,8 +532,10 @@ bool ViECapturer::ViECaptureThreadFunction(void* obj) { bool ViECapturer::ViECaptureProcess() { if (capture_event_.Wait(kThreadWaitTimeMs) == kEventSignaled) { + int64_t encode_start_time = -1; deliver_cs_->Enter(); if (SwapCapturedAndDeliverFrameIfAvailable()) { + encode_start_time = Clock::GetRealTimeClock()->TimeInMilliseconds(); DeliverI420Frame(&deliver_frame_); } deliver_cs_->Leave(); @@ -542,6 +546,11 @@ bool ViECapturer::ViECaptureProcess() { reported_brightness_level_ = current_brightness_level_; } } + // Update the overuse detector with the duration. + if (encode_start_time != -1) { + overuse_detector_->FrameEncoded( + Clock::GetRealTimeClock()->TimeInMilliseconds() - encode_start_time); + } } // We're done! return true; diff --git a/webrtc/video_engine/vie_capturer.h b/webrtc/video_engine/vie_capturer.h index 1b12c45da0..7900e55cf7 100644 --- a/webrtc/video_engine/vie_capturer.h +++ b/webrtc/video_engine/vie_capturer.h @@ -104,7 +104,8 @@ class ViECapturer void RegisterCpuOveruseObserver(CpuOveruseObserver* observer); - int CpuOveruseMeasure(); + void CpuOveruseMeasures(int* capture_jitter_ms, + int* avg_encode_time_ms) const; protected: ViECapturer(int capture_id,