diff --git a/webrtc/audio/audio_send_stream.cc b/webrtc/audio/audio_send_stream.cc index 417720cdb0..17979d5760 100644 --- a/webrtc/audio/audio_send_stream.cc +++ b/webrtc/audio/audio_send_stream.cc @@ -16,9 +16,7 @@ #include "webrtc/audio/conversion.h" #include "webrtc/audio/scoped_voe_interface.h" #include "webrtc/base/checks.h" -#include "webrtc/base/event.h" #include "webrtc/base/logging.h" -#include "webrtc/base/task_queue.h" #include "webrtc/modules/congestion_controller/include/congestion_controller.h" #include "webrtc/modules/pacing/paced_sender.h" #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -61,11 +59,9 @@ namespace internal { AudioSendStream::AudioSendStream( const webrtc::AudioSendStream::Config& config, const rtc::scoped_refptr& audio_state, - rtc::TaskQueue* worker_queue, CongestionController* congestion_controller, BitrateAllocator* bitrate_allocator) - : worker_queue_(worker_queue), - config_(config), + : config_(config), audio_state_(audio_state), bitrate_allocator_(bitrate_allocator) { LOG(LS_INFO) << "AudioSendStream: " << config_.ToString(); @@ -113,13 +109,8 @@ void AudioSendStream::Start() { RTC_DCHECK(thread_checker_.CalledOnValidThread()); if (config_.min_bitrate_kbps != -1 && config_.max_bitrate_kbps != -1) { RTC_DCHECK_GE(config_.max_bitrate_kbps, config_.min_bitrate_kbps); - rtc::Event thread_sync_event(false /* manual_reset */, false); - worker_queue_->PostTask([this, &thread_sync_event] { - bitrate_allocator_->AddObserver(this, config_.min_bitrate_kbps * 1000, - config_.max_bitrate_kbps * 1000, 0, true); - thread_sync_event.Set(); - }); - thread_sync_event.Wait(rtc::Event::kForever); + bitrate_allocator_->AddObserver(this, config_.min_bitrate_kbps * 1000, + config_.max_bitrate_kbps * 1000, 0, true); } ScopedVoEInterface base(voice_engine()); @@ -131,13 +122,7 @@ void AudioSendStream::Start() { void AudioSendStream::Stop() { RTC_DCHECK(thread_checker_.CalledOnValidThread()); - rtc::Event thread_sync_event(false /* manual_reset */, false); - worker_queue_->PostTask([this, &thread_sync_event] { - bitrate_allocator_->RemoveObserver(this); - thread_sync_event.Set(); - }); - thread_sync_event.Wait(rtc::Event::kForever); - + bitrate_allocator_->RemoveObserver(this); ScopedVoEInterface base(voice_engine()); int error = base->StopSend(config_.voe_channel_id); if (error != 0) { diff --git a/webrtc/audio/audio_send_stream.h b/webrtc/audio/audio_send_stream.h index 3629463162..a993d5f2f9 100644 --- a/webrtc/audio/audio_send_stream.h +++ b/webrtc/audio/audio_send_stream.h @@ -33,7 +33,6 @@ class AudioSendStream final : public webrtc::AudioSendStream, public: AudioSendStream(const webrtc::AudioSendStream::Config& config, const rtc::scoped_refptr& audio_state, - rtc::TaskQueue* worker_queue, CongestionController* congestion_controller, BitrateAllocator* bitrate_allocator); ~AudioSendStream() override; @@ -60,7 +59,6 @@ class AudioSendStream final : public webrtc::AudioSendStream, VoiceEngine* voice_engine() const; rtc::ThreadChecker thread_checker_; - rtc::TaskQueue* worker_queue_; const webrtc::AudioSendStream::Config config_; rtc::scoped_refptr audio_state_; std::unique_ptr channel_proxy_; diff --git a/webrtc/audio/audio_send_stream_unittest.cc b/webrtc/audio/audio_send_stream_unittest.cc index 9172064705..7f940fc767 100644 --- a/webrtc/audio/audio_send_stream_unittest.cc +++ b/webrtc/audio/audio_send_stream_unittest.cc @@ -16,7 +16,6 @@ #include "webrtc/audio/audio_send_stream.h" #include "webrtc/audio/audio_state.h" #include "webrtc/audio/conversion.h" -#include "webrtc/base/task_queue.h" #include "webrtc/modules/congestion_controller/include/mock/mock_congestion_controller.h" #include "webrtc/call/mock/mock_rtc_event_log.h" #include "webrtc/modules/congestion_controller/include/congestion_controller.h" @@ -66,8 +65,7 @@ struct ConfigHelper { &bitrate_observer_, &remote_bitrate_observer_, &event_log_), - bitrate_allocator_(&limit_observer_), - worker_queue_("ConfigHelper_worker_queue") { + bitrate_allocator_(&limit_observer_) { using testing::Invoke; using testing::StrEq; @@ -127,7 +125,6 @@ struct ConfigHelper { return &congestion_controller_; } BitrateAllocator* bitrate_allocator() { return &bitrate_allocator_; } - rtc::TaskQueue* worker_queue() { return &worker_queue_; } void SetupMockForSendTelephoneEvent() { EXPECT_TRUE(channel_proxy_); @@ -184,9 +181,6 @@ struct ConfigHelper { MockRtcEventLog event_log_; testing::NiceMock limit_observer_; BitrateAllocator bitrate_allocator_; - // |worker_queue| is defined last to ensure all pending tasks are cancelled - // and deleted before any other members. - rtc::TaskQueue worker_queue_; }; } // namespace @@ -208,16 +202,16 @@ TEST(AudioSendStreamTest, ConfigToString) { TEST(AudioSendStreamTest, ConstructDestruct) { ConfigHelper helper; - internal::AudioSendStream send_stream( - helper.config(), helper.audio_state(), helper.worker_queue(), - helper.congestion_controller(), helper.bitrate_allocator()); + internal::AudioSendStream send_stream(helper.config(), helper.audio_state(), + helper.congestion_controller(), + helper.bitrate_allocator()); } TEST(AudioSendStreamTest, SendTelephoneEvent) { ConfigHelper helper; - internal::AudioSendStream send_stream( - helper.config(), helper.audio_state(), helper.worker_queue(), - helper.congestion_controller(), helper.bitrate_allocator()); + internal::AudioSendStream send_stream(helper.config(), helper.audio_state(), + helper.congestion_controller(), + helper.bitrate_allocator()); helper.SetupMockForSendTelephoneEvent(); EXPECT_TRUE(send_stream.SendTelephoneEvent(kTelephoneEventPayloadType, kTelephoneEventCode, kTelephoneEventDuration)); @@ -225,18 +219,18 @@ TEST(AudioSendStreamTest, SendTelephoneEvent) { TEST(AudioSendStreamTest, SetMuted) { ConfigHelper helper; - internal::AudioSendStream send_stream( - helper.config(), helper.audio_state(), helper.worker_queue(), - helper.congestion_controller(), helper.bitrate_allocator()); + internal::AudioSendStream send_stream(helper.config(), helper.audio_state(), + helper.congestion_controller(), + helper.bitrate_allocator()); EXPECT_CALL(*helper.channel_proxy(), SetInputMute(true)); send_stream.SetMuted(true); } TEST(AudioSendStreamTest, GetStats) { ConfigHelper helper; - internal::AudioSendStream send_stream( - helper.config(), helper.audio_state(), helper.worker_queue(), - helper.congestion_controller(), helper.bitrate_allocator()); + internal::AudioSendStream send_stream(helper.config(), helper.audio_state(), + helper.congestion_controller(), + helper.bitrate_allocator()); helper.SetupMockForGetStats(); AudioSendStream::Stats stats = send_stream.GetStats(); EXPECT_EQ(kSsrc, stats.local_ssrc); @@ -263,9 +257,9 @@ TEST(AudioSendStreamTest, GetStats) { TEST(AudioSendStreamTest, GetStatsTypingNoiseDetected) { ConfigHelper helper; - internal::AudioSendStream send_stream( - helper.config(), helper.audio_state(), helper.worker_queue(), - helper.congestion_controller(), helper.bitrate_allocator()); + internal::AudioSendStream send_stream(helper.config(), helper.audio_state(), + helper.congestion_controller(), + helper.bitrate_allocator()); helper.SetupMockForGetStats(); EXPECT_FALSE(send_stream.GetStats().typing_noise_detected); diff --git a/webrtc/call.h b/webrtc/call.h index d3064d4849..ec43b18ad4 100644 --- a/webrtc/call.h +++ b/webrtc/call.h @@ -113,8 +113,8 @@ class Call { AudioReceiveStream* receive_stream) = 0; virtual VideoSendStream* CreateVideoSendStream( - VideoSendStream::Config config, - VideoEncoderConfig encoder_config) = 0; + const VideoSendStream::Config& config, + const VideoEncoderConfig& encoder_config) = 0; virtual void DestroyVideoSendStream(VideoSendStream* send_stream) = 0; virtual VideoReceiveStream* CreateVideoReceiveStream( diff --git a/webrtc/call/BUILD.gn b/webrtc/call/BUILD.gn index e7c86b72c8..5b428f72e9 100644 --- a/webrtc/call/BUILD.gn +++ b/webrtc/call/BUILD.gn @@ -29,7 +29,6 @@ source_set("call") { "..:rtc_event_log", "..:webrtc_common", "../audio", - "../base:rtc_task_queue", "../modules/congestion_controller", "../modules/rtp_rtcp", "../system_wrappers", @@ -52,7 +51,6 @@ if (rtc_include_tests) { "//testing/gmock", "//testing/gtest", ] - configs += [ "..:common_config" ] if (is_clang) { # Suppress warnings from the Chromium Clang plugin. # See http://code.google.com/p/webrtc/issues/detail?id=163 for details. diff --git a/webrtc/call/bitrate_allocator.cc b/webrtc/call/bitrate_allocator.cc index 085fdf98aa..34b06b1984 100644 --- a/webrtc/call/bitrate_allocator.cc +++ b/webrtc/call/bitrate_allocator.cc @@ -54,9 +54,7 @@ BitrateAllocator::BitrateAllocator(LimitObserver* limit_observer) last_rtt_(0), num_pause_events_(0), clock_(Clock::GetRealTimeClock()), - last_bwe_log_time_(0) { - sequenced_checker_.Detach(); -} + last_bwe_log_time_(0) {} BitrateAllocator::~BitrateAllocator() { RTC_LOGGED_HISTOGRAM_COUNTS_100("WebRTC.Call.NumberOfPauseEvents", @@ -66,7 +64,7 @@ BitrateAllocator::~BitrateAllocator() { void BitrateAllocator::OnNetworkChanged(uint32_t target_bitrate_bps, uint8_t fraction_loss, int64_t rtt) { - RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_); + rtc::CritScope lock(&crit_sect_); last_bitrate_bps_ = target_bitrate_bps; last_non_zero_bitrate_bps_ = target_bitrate_bps > 0 ? target_bitrate_bps : last_non_zero_bitrate_bps_; @@ -119,7 +117,7 @@ void BitrateAllocator::AddObserver(BitrateAllocatorObserver* observer, uint32_t max_bitrate_bps, uint32_t pad_up_bitrate_bps, bool enforce_min_bitrate) { - RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_); + rtc::CritScope lock(&crit_sect_); auto it = FindObserverConfig(observer); // Update settings if the observer already exists, create a new one otherwise. @@ -157,15 +155,17 @@ void BitrateAllocator::AddObserver(BitrateAllocatorObserver* observer, } void BitrateAllocator::UpdateAllocationLimits() { - RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_); uint32_t total_requested_padding_bitrate = 0; uint32_t total_requested_min_bitrate = 0; - for (const auto& config : bitrate_observer_configs_) { - if (config.enforce_min_bitrate) { - total_requested_min_bitrate += config.min_bitrate_bps; + { + rtc::CritScope lock(&crit_sect_); + for (const auto& config : bitrate_observer_configs_) { + if (config.enforce_min_bitrate) { + total_requested_min_bitrate += config.min_bitrate_bps; + } + total_requested_padding_bitrate += config.pad_up_bitrate_bps; } - total_requested_padding_bitrate += config.pad_up_bitrate_bps; } LOG(LS_INFO) << "UpdateAllocationLimits : total_requested_min_bitrate: " @@ -177,26 +177,27 @@ void BitrateAllocator::UpdateAllocationLimits() { } void BitrateAllocator::RemoveObserver(BitrateAllocatorObserver* observer) { - RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_); - auto it = FindObserverConfig(observer); - if (it != bitrate_observer_configs_.end()) { - bitrate_observer_configs_.erase(it); + { + rtc::CritScope lock(&crit_sect_); + auto it = FindObserverConfig(observer); + if (it != bitrate_observer_configs_.end()) { + bitrate_observer_configs_.erase(it); + } } - UpdateAllocationLimits(); } int BitrateAllocator::GetStartBitrate(BitrateAllocatorObserver* observer) { - RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_); + rtc::CritScope lock(&crit_sect_); const auto& it = FindObserverConfig(observer); if (it == bitrate_observer_configs_.end()) { // This observer hasn't been added yet, just give it its fair share. return last_non_zero_bitrate_bps_ / - static_cast((bitrate_observer_configs_.size() + 1)); + static_cast((bitrate_observer_configs_.size() + 1)); } else if (it->allocated_bitrate_bps == -1) { // This observer hasn't received an allocation yet, so do the same. return last_non_zero_bitrate_bps_ / - static_cast(bitrate_observer_configs_.size()); + static_cast(bitrate_observer_configs_.size()); } else { // This observer already has an allocation. return it->allocated_bitrate_bps; @@ -204,8 +205,8 @@ int BitrateAllocator::GetStartBitrate(BitrateAllocatorObserver* observer) { } BitrateAllocator::ObserverConfigs::iterator -BitrateAllocator::FindObserverConfig(const BitrateAllocatorObserver* observer) { - RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_); +BitrateAllocator::FindObserverConfig( + const BitrateAllocatorObserver* observer) { for (auto it = bitrate_observer_configs_.begin(); it != bitrate_observer_configs_.end(); ++it) { if (it->observer == observer) @@ -216,7 +217,6 @@ BitrateAllocator::FindObserverConfig(const BitrateAllocatorObserver* observer) { BitrateAllocator::ObserverAllocation BitrateAllocator::AllocateBitrates( uint32_t bitrate) { - RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_); if (bitrate_observer_configs_.empty()) return ObserverAllocation(); @@ -245,7 +245,6 @@ BitrateAllocator::ObserverAllocation BitrateAllocator::AllocateBitrates( } BitrateAllocator::ObserverAllocation BitrateAllocator::ZeroRateAllocation() { - RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_); ObserverAllocation allocation; for (const auto& observer_config : bitrate_observer_configs_) allocation[observer_config.observer] = 0; @@ -254,8 +253,8 @@ BitrateAllocator::ObserverAllocation BitrateAllocator::ZeroRateAllocation() { BitrateAllocator::ObserverAllocation BitrateAllocator::LowRateAllocation( uint32_t bitrate) { - RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_); ObserverAllocation allocation; + // Start by allocating bitrate to observers enforcing a min bitrate, hence // remaining_bitrate might turn negative. int64_t remaining_bitrate = bitrate; @@ -309,7 +308,7 @@ BitrateAllocator::ObserverAllocation BitrateAllocator::LowRateAllocation( BitrateAllocator::ObserverAllocation BitrateAllocator::NormalRateAllocation( uint32_t bitrate, uint32_t sum_min_bitrates) { - RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_); + ObserverAllocation allocation; for (const auto& observer_config : bitrate_observer_configs_) allocation[observer_config.observer] = observer_config.min_bitrate_bps; @@ -322,9 +321,7 @@ BitrateAllocator::ObserverAllocation BitrateAllocator::NormalRateAllocation( } BitrateAllocator::ObserverAllocation BitrateAllocator::MaxRateAllocation( - uint32_t bitrate, - uint32_t sum_max_bitrates) { - RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_); + uint32_t bitrate, uint32_t sum_max_bitrates) { ObserverAllocation allocation; for (const auto& observer_config : bitrate_observer_configs_) { @@ -338,12 +335,12 @@ BitrateAllocator::ObserverAllocation BitrateAllocator::MaxRateAllocation( uint32_t BitrateAllocator::LastAllocatedBitrate( const ObserverConfig& observer_config) { + // Return the configured minimum bitrate for newly added observers, to avoid // requiring an extra high bitrate for the observer to get an allocated // bitrate. - return observer_config.allocated_bitrate_bps == -1 - ? observer_config.min_bitrate_bps - : observer_config.allocated_bitrate_bps; + return observer_config.allocated_bitrate_bps == -1 ? + observer_config.min_bitrate_bps : observer_config.allocated_bitrate_bps; } uint32_t BitrateAllocator::MinBitrateWithHysteresis( @@ -369,7 +366,6 @@ void BitrateAllocator::DistributeBitrateEvenly(uint32_t bitrate, bool include_zero_allocations, int max_multiplier, ObserverAllocation* allocation) { - RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_); RTC_DCHECK_EQ(allocation->size(), bitrate_observer_configs_.size()); ObserverSortingMap list_max_bitrates; @@ -402,12 +398,10 @@ void BitrateAllocator::DistributeBitrateEvenly(uint32_t bitrate, bool BitrateAllocator::EnoughBitrateForAllObservers(uint32_t bitrate, uint32_t sum_min_bitrates) { - RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_); if (bitrate < sum_min_bitrates) return false; - uint32_t extra_bitrate_per_observer = - (bitrate - sum_min_bitrates) / + uint32_t extra_bitrate_per_observer = (bitrate - sum_min_bitrates) / static_cast(bitrate_observer_configs_.size()); for (const auto& observer_config : bitrate_observer_configs_) { if (observer_config.min_bitrate_bps + extra_bitrate_per_observer < diff --git a/webrtc/call/bitrate_allocator.h b/webrtc/call/bitrate_allocator.h index a5ed26c71b..37e15b4f7b 100644 --- a/webrtc/call/bitrate_allocator.h +++ b/webrtc/call/bitrate_allocator.h @@ -17,7 +17,8 @@ #include #include -#include "webrtc/base/sequenced_task_checker.h" +#include "webrtc/base/criticalsection.h" +#include "webrtc/base/thread_annotations.h" namespace webrtc { @@ -120,24 +121,31 @@ class BitrateAllocator { typedef std::vector ObserverConfigs; ObserverConfigs::iterator FindObserverConfig( - const BitrateAllocatorObserver* observer); + const BitrateAllocatorObserver* observer) + EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); typedef std::multimap ObserverSortingMap; typedef std::map ObserverAllocation; - ObserverAllocation AllocateBitrates(uint32_t bitrate); + ObserverAllocation AllocateBitrates(uint32_t bitrate) + EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); - ObserverAllocation ZeroRateAllocation(); - ObserverAllocation LowRateAllocation(uint32_t bitrate); + ObserverAllocation ZeroRateAllocation() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + ObserverAllocation LowRateAllocation(uint32_t bitrate) + EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); ObserverAllocation NormalRateAllocation(uint32_t bitrate, - uint32_t sum_min_bitrates); + uint32_t sum_min_bitrates) + EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); ObserverAllocation MaxRateAllocation(uint32_t bitrate, - uint32_t sum_max_bitrates); + uint32_t sum_max_bitrates) + EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); - uint32_t LastAllocatedBitrate(const ObserverConfig& observer_config); + uint32_t LastAllocatedBitrate(const ObserverConfig& observer_config) + EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); // The minimum bitrate required by this observer, including enable-hysteresis // if the observer is in a paused state. - uint32_t MinBitrateWithHysteresis(const ObserverConfig& observer_config); + uint32_t MinBitrateWithHysteresis(const ObserverConfig& observer_config) + EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); // Splits |bitrate| evenly to observers already in |allocation|. // |include_zero_allocations| decides if zero allocations should be part of // the distribution or not. The allowed max bitrate is |max_multiplier| x @@ -145,22 +153,24 @@ class BitrateAllocator { void DistributeBitrateEvenly(uint32_t bitrate, bool include_zero_allocations, int max_multiplier, - ObserverAllocation* allocation); - bool EnoughBitrateForAllObservers(uint32_t bitrate, - uint32_t sum_min_bitrates); + ObserverAllocation* allocation) + EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + bool EnoughBitrateForAllObservers(uint32_t bitrate, uint32_t sum_min_bitrates) + EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); - rtc::SequencedTaskChecker sequenced_checker_; - LimitObserver* const limit_observer_ GUARDED_BY(&sequenced_checker_); + LimitObserver* const limit_observer_; + + rtc::CriticalSection crit_sect_; // Stored in a list to keep track of the insertion order. - ObserverConfigs bitrate_observer_configs_ GUARDED_BY(&sequenced_checker_); - uint32_t last_bitrate_bps_ GUARDED_BY(&sequenced_checker_); - uint32_t last_non_zero_bitrate_bps_ GUARDED_BY(&sequenced_checker_); - uint8_t last_fraction_loss_ GUARDED_BY(&sequenced_checker_); - int64_t last_rtt_ GUARDED_BY(&sequenced_checker_); + ObserverConfigs bitrate_observer_configs_ GUARDED_BY(crit_sect_); + uint32_t last_bitrate_bps_ GUARDED_BY(crit_sect_); + uint32_t last_non_zero_bitrate_bps_ GUARDED_BY(crit_sect_); + uint8_t last_fraction_loss_ GUARDED_BY(crit_sect_); + int64_t last_rtt_ GUARDED_BY(crit_sect_); // Number of mute events based on too low BWE, not network up/down. - int num_pause_events_ GUARDED_BY(&sequenced_checker_); - Clock* const clock_ GUARDED_BY(&sequenced_checker_); - int64_t last_bwe_log_time_ GUARDED_BY(&sequenced_checker_); + int num_pause_events_ GUARDED_BY(crit_sect_); + Clock* const clock_; + int64_t last_bwe_log_time_; }; } // namespace webrtc #endif // WEBRTC_CALL_BITRATE_ALLOCATOR_H_ diff --git a/webrtc/call/bitrate_estimator_tests.cc b/webrtc/call/bitrate_estimator_tests.cc index a7e04aa695..10d435d7d4 100644 --- a/webrtc/call/bitrate_estimator_tests.cc +++ b/webrtc/call/bitrate_estimator_tests.cc @@ -173,8 +173,7 @@ class BitrateEstimatorTest : public test::CallTest { test_->video_send_config_.rtp.ssrcs[0]++; test_->video_send_config_.encoder_settings.encoder = &fake_encoder_; send_stream_ = test_->sender_call_->CreateVideoSendStream( - test_->video_send_config_.Copy(), - test_->video_encoder_config_.Copy()); + test_->video_send_config_, test_->video_encoder_config_); RTC_DCHECK_EQ(1u, test_->video_encoder_config_.streams.size()); frame_generator_capturer_.reset(test::FrameGeneratorCapturer::Create( send_stream_->Input(), test_->video_encoder_config_.streams[0].width, diff --git a/webrtc/call/call.cc b/webrtc/call/call.cc index 65b1d56113..f6354ade28 100644 --- a/webrtc/call/call.cc +++ b/webrtc/call/call.cc @@ -9,6 +9,7 @@ */ #include + #include #include #include @@ -21,7 +22,6 @@ #include "webrtc/base/checks.h" #include "webrtc/base/constructormagic.h" #include "webrtc/base/logging.h" -#include "webrtc/base/task_queue.h" #include "webrtc/base/thread_annotations.h" #include "webrtc/base/thread_checker.h" #include "webrtc/base/trace_event.h" @@ -74,8 +74,8 @@ class Call : public webrtc::Call, webrtc::AudioReceiveStream* receive_stream) override; webrtc::VideoSendStream* CreateVideoSendStream( - webrtc::VideoSendStream::Config config, - VideoEncoderConfig encoder_config) override; + const webrtc::VideoSendStream::Config& config, + const VideoEncoderConfig& encoder_config) override; void DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) override; webrtc::VideoReceiveStream* CreateVideoReceiveStream( @@ -198,11 +198,6 @@ class Call : public webrtc::Call, const std::unique_ptr congestion_controller_; const std::unique_ptr video_send_delay_stats_; const int64_t start_ms_; - // TODO(perkj): |worker_queue_| is supposed to replace - // |module_process_thread_|. - // |worker_queue| is defined last to ensure all pending tasks are cancelled - // and deleted before any other members. - rtc::TaskQueue worker_queue_; RTC_DISALLOW_COPY_AND_ASSIGN(Call); }; @@ -254,8 +249,7 @@ Call::Call(const Call::Config& config) congestion_controller_( new CongestionController(clock_, this, &remb_, event_log_.get())), video_send_delay_stats_(new SendDelayStats(clock_)), - start_ms_(clock_->TimeInMilliseconds()), - worker_queue_("call_worker_queue") { + start_ms_(clock_->TimeInMilliseconds()) { RTC_DCHECK(configuration_thread_checker_.CalledOnValidThread()); RTC_DCHECK_GE(config.bitrate_config.min_bitrate_bps, 0); RTC_DCHECK_GE(config.bitrate_config.start_bitrate_bps, @@ -285,7 +279,6 @@ Call::Call(const Call::Config& config) Call::~Call() { RTC_DCHECK(!remb_.InUse()); RTC_DCHECK(configuration_thread_checker_.CalledOnValidThread()); - RTC_CHECK(audio_send_ssrcs_.empty()); RTC_CHECK(video_send_ssrcs_.empty()); RTC_CHECK(video_send_streams_.empty()); @@ -304,10 +297,7 @@ Call::~Call() { // Only update histograms after process threads have been shut down, so that // they won't try to concurrently update stats. - { - rtc::CritScope lock(&bitrate_crit_); - UpdateSendHistograms(); - } + UpdateSendHistograms(); UpdateReceiveHistograms(); UpdateHistograms(); @@ -379,7 +369,7 @@ webrtc::AudioSendStream* Call::CreateAudioSendStream( TRACE_EVENT0("webrtc", "Call::CreateAudioSendStream"); RTC_DCHECK(configuration_thread_checker_.CalledOnValidThread()); AudioSendStream* send_stream = new AudioSendStream( - config, config_.audio_state, &worker_queue_, congestion_controller_.get(), + config, config_.audio_state, congestion_controller_.get(), bitrate_allocator_.get()); { WriteLockScoped write_lock(*send_crit_); @@ -455,28 +445,22 @@ void Call::DestroyAudioReceiveStream( } webrtc::VideoSendStream* Call::CreateVideoSendStream( - webrtc::VideoSendStream::Config config, - VideoEncoderConfig encoder_config) { + const webrtc::VideoSendStream::Config& config, + const VideoEncoderConfig& encoder_config) { TRACE_EVENT0("webrtc", "Call::CreateVideoSendStream"); RTC_DCHECK(configuration_thread_checker_.CalledOnValidThread()); video_send_delay_stats_->AddSsrcs(config); - event_log_->LogVideoSendStreamConfig(config); - // TODO(mflodman): Base the start bitrate on a current bandwidth estimate, if // the call has already started. - // Copy ssrcs from |config| since |config| is moved. - std::vector ssrcs = config.rtp.ssrcs; VideoSendStream* send_stream = new VideoSendStream( - num_cpu_cores_, module_process_thread_.get(), &worker_queue_, - call_stats_.get(), congestion_controller_.get(), bitrate_allocator_.get(), - video_send_delay_stats_.get(), &remb_, event_log_.get(), - std::move(config), std::move(encoder_config), - suspended_video_send_ssrcs_); - + num_cpu_cores_, module_process_thread_.get(), call_stats_.get(), + congestion_controller_.get(), bitrate_allocator_.get(), + video_send_delay_stats_.get(), &remb_, event_log_.get(), config, + encoder_config, suspended_video_send_ssrcs_); { WriteLockScoped write_lock(*send_crit_); - for (uint32_t ssrc : ssrcs) { + for (uint32_t ssrc : config.rtp.ssrcs) { RTC_DCHECK(video_send_ssrcs_.find(ssrc) == video_send_ssrcs_.end()); video_send_ssrcs_[ssrc] = send_stream; } @@ -484,7 +468,7 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream( } send_stream->SignalNetworkState(video_network_state_); UpdateAggregateNetworkState(); - + event_log_->LogVideoSendStreamConfig(config); return send_stream; } @@ -511,11 +495,11 @@ void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) { } RTC_CHECK(send_stream_impl != nullptr); - VideoSendStream::RtpStateMap rtp_state = - send_stream_impl->StopPermanentlyAndGetRtpStates(); + VideoSendStream::RtpStateMap rtp_state = send_stream_impl->GetRtpStates(); for (VideoSendStream::RtpStateMap::iterator it = rtp_state.begin(); - it != rtp_state.end(); ++it) { + it != rtp_state.end(); + ++it) { suspended_video_send_ssrcs_[it->first] = it->second; } @@ -745,15 +729,6 @@ void Call::OnSentPacket(const rtc::SentPacket& sent_packet) { void Call::OnNetworkChanged(uint32_t target_bitrate_bps, uint8_t fraction_loss, int64_t rtt_ms) { - // TODO(perkj): Consider making sure CongestionController operates on - // |worker_queue_|. - if (!worker_queue_.IsCurrent()) { - worker_queue_.PostTask([this, target_bitrate_bps, fraction_loss, rtt_ms] { - OnNetworkChanged(target_bitrate_bps, fraction_loss, rtt_ms); - }); - return; - } - RTC_DCHECK_RUN_ON(&worker_queue_); bitrate_allocator_->OnNetworkChanged(target_bitrate_bps, fraction_loss, rtt_ms); diff --git a/webrtc/call/call_perf_tests.cc b/webrtc/call/call_perf_tests.cc index 81fbdb7d49..12cafd9582 100644 --- a/webrtc/call/call_perf_tests.cc +++ b/webrtc/call/call_perf_tests.cc @@ -672,7 +672,7 @@ TEST_F(CallPerfTest, KeepsHighBitrateWhenReconfiguringSender) { encoder_config->streams[0].target_bitrate_bps = encoder_config->streams[0].max_bitrate_bps = 2000000; - encoder_config_ = encoder_config->Copy(); + encoder_config_ = *encoder_config; } void OnVideoStreamsCreated( @@ -686,7 +686,7 @@ TEST_F(CallPerfTest, KeepsHighBitrateWhenReconfiguringSender) { << "Timed out before receiving an initial high bitrate."; encoder_config_.streams[0].width *= 2; encoder_config_.streams[0].height *= 2; - send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy()); + send_stream_->ReconfigureVideoEncoder(encoder_config_); EXPECT_TRUE(Wait()) << "Timed out while waiting for a couple of high bitrate estimates " "after reconfiguring the send stream."; diff --git a/webrtc/config.h b/webrtc/config.h index d932eda1df..1550a9f8a1 100644 --- a/webrtc/config.h +++ b/webrtc/config.h @@ -125,20 +125,12 @@ struct VideoStream { }; struct VideoEncoderConfig { - public: enum class ContentType { kRealtimeVideo, kScreen, }; - VideoEncoderConfig& operator=(VideoEncoderConfig&&) = default; - VideoEncoderConfig& operator=(const VideoEncoderConfig&) = delete; - - // Mostly used by tests. Avoid creating copies if you can. - VideoEncoderConfig Copy() const { return VideoEncoderConfig(*this); } - VideoEncoderConfig(); - VideoEncoderConfig(VideoEncoderConfig&&) = default; ~VideoEncoderConfig(); std::string ToString() const; @@ -153,11 +145,6 @@ struct VideoEncoderConfig { // unless the estimated bandwidth indicates that the link can handle it. int min_transmit_bitrate_bps; bool expect_encode_from_texture; - - private: - // Access to the copy constructor is private to force use of the Copy() - // method for those exceptional cases where we do use it. - VideoEncoderConfig(const VideoEncoderConfig&) = default; }; struct VideoDecoderH264Settings { diff --git a/webrtc/media/engine/fakewebrtccall.cc b/webrtc/media/engine/fakewebrtccall.cc index 193831598c..fdf7cf36fc 100644 --- a/webrtc/media/engine/fakewebrtccall.cc +++ b/webrtc/media/engine/fakewebrtccall.cc @@ -98,22 +98,21 @@ void FakeAudioReceiveStream::SetGain(float gain) { } FakeVideoSendStream::FakeVideoSendStream( - webrtc::VideoSendStream::Config config, - webrtc::VideoEncoderConfig encoder_config) + const webrtc::VideoSendStream::Config& config, + const webrtc::VideoEncoderConfig& encoder_config) : sending_(false), - config_(std::move(config)), + config_(config), codec_settings_set_(false), num_swapped_frames_(0) { RTC_DCHECK(config.encoder_settings.encoder != NULL); - ReconfigureVideoEncoder(std::move(encoder_config)); + ReconfigureVideoEncoder(encoder_config); } -const webrtc::VideoSendStream::Config& FakeVideoSendStream::GetConfig() const { +webrtc::VideoSendStream::Config FakeVideoSendStream::GetConfig() const { return config_; } -const webrtc::VideoEncoderConfig& FakeVideoSendStream::GetEncoderConfig() - const { +webrtc::VideoEncoderConfig FakeVideoSendStream::GetEncoderConfig() const { return encoder_config_; } @@ -178,7 +177,8 @@ webrtc::VideoSendStream::Stats FakeVideoSendStream::GetStats() { } void FakeVideoSendStream::ReconfigureVideoEncoder( - webrtc::VideoEncoderConfig config) { + const webrtc::VideoEncoderConfig& config) { + encoder_config_ = config; if (config.encoder_specific_settings != NULL) { if (config_.encoder_settings.payload_name == "VP8") { vpx_settings_.vp8 = *reinterpret_cast( @@ -199,7 +199,6 @@ void FakeVideoSendStream::ReconfigureVideoEncoder( << config_.encoder_settings.payload_name; } } - encoder_config_ = std::move(config); codec_settings_set_ = config.encoder_specific_settings != NULL; ++num_encoder_reconfigurations_; } @@ -360,10 +359,10 @@ void FakeCall::DestroyAudioReceiveStream( } webrtc::VideoSendStream* FakeCall::CreateVideoSendStream( - webrtc::VideoSendStream::Config config, - webrtc::VideoEncoderConfig encoder_config) { + const webrtc::VideoSendStream::Config& config, + const webrtc::VideoEncoderConfig& encoder_config) { FakeVideoSendStream* fake_stream = - new FakeVideoSendStream(std::move(config), std::move(encoder_config)); + new FakeVideoSendStream(config, encoder_config); video_send_streams_.push_back(fake_stream); ++num_created_send_streams_; return fake_stream; diff --git a/webrtc/media/engine/fakewebrtccall.h b/webrtc/media/engine/fakewebrtccall.h index 6c687ef6d2..8581d829d6 100644 --- a/webrtc/media/engine/fakewebrtccall.h +++ b/webrtc/media/engine/fakewebrtccall.h @@ -102,10 +102,10 @@ class FakeAudioReceiveStream final : public webrtc::AudioReceiveStream { class FakeVideoSendStream final : public webrtc::VideoSendStream, public webrtc::VideoCaptureInput { public: - FakeVideoSendStream(webrtc::VideoSendStream::Config config, - webrtc::VideoEncoderConfig encoder_config); - const webrtc::VideoSendStream::Config& GetConfig() const; - const webrtc::VideoEncoderConfig& GetEncoderConfig() const; + FakeVideoSendStream(const webrtc::VideoSendStream::Config& config, + const webrtc::VideoEncoderConfig& encoder_config); + webrtc::VideoSendStream::Config GetConfig() const; + webrtc::VideoEncoderConfig GetEncoderConfig() const; std::vector GetVideoStreams(); bool IsSending() const; @@ -128,7 +128,8 @@ class FakeVideoSendStream final : public webrtc::VideoSendStream, void Start() override; void Stop() override; webrtc::VideoSendStream::Stats GetStats() override; - void ReconfigureVideoEncoder(webrtc::VideoEncoderConfig config) override; + void ReconfigureVideoEncoder( + const webrtc::VideoEncoderConfig& config) override; webrtc::VideoCaptureInput* Input() override; bool sending_; @@ -207,8 +208,8 @@ class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver { webrtc::AudioReceiveStream* receive_stream) override; webrtc::VideoSendStream* CreateVideoSendStream( - webrtc::VideoSendStream::Config config, - webrtc::VideoEncoderConfig encoder_config) override; + const webrtc::VideoSendStream::Config& config, + const webrtc::VideoEncoderConfig& encoder_config) override; void DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) override; webrtc::VideoReceiveStream* CreateVideoReceiveStream( diff --git a/webrtc/media/engine/webrtcvideoengine2.cc b/webrtc/media/engine/webrtcvideoengine2.cc index 69fedf2aee..4a72ef6139 100644 --- a/webrtc/media/engine/webrtcvideoengine2.cc +++ b/webrtc/media/engine/webrtcvideoengine2.cc @@ -1143,8 +1143,8 @@ bool WebRtcVideoChannel2::AddSendStream(const StreamParams& sp) { webrtc::VideoSendStream::Config config(this); config.suspend_below_min_bitrate = video_config_.suspend_below_min_bitrate; WebRtcVideoSendStream* stream = new WebRtcVideoSendStream( - call_, sp, std::move(config), default_send_options_, - external_encoder_factory_, video_config_.enable_cpu_overuse_detection, + call_, sp, config, default_send_options_, external_encoder_factory_, + video_config_.enable_cpu_overuse_detection, bitrate_config_.max_bitrate_bps, send_codec_, send_rtp_extensions_, send_params_); @@ -1533,11 +1533,11 @@ bool WebRtcVideoChannel2::SendRtcp(const uint8_t* data, size_t len) { WebRtcVideoChannel2::WebRtcVideoSendStream::VideoSendStreamParameters:: VideoSendStreamParameters( - webrtc::VideoSendStream::Config config, + const webrtc::VideoSendStream::Config& config, const VideoOptions& options, int max_bitrate_bps, const rtc::Optional& codec_settings) - : config(std::move(config)), + : config(config), options(options), max_bitrate_bps(max_bitrate_bps), codec_settings(codec_settings) {} @@ -1560,7 +1560,7 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::AllocatedEncoder::AllocatedEncoder( WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream( webrtc::Call* call, const StreamParams& sp, - webrtc::VideoSendStream::Config config, + const webrtc::VideoSendStream::Config& config, const VideoOptions& options, WebRtcVideoEncoderFactory* external_encoder_factory, bool enable_cpu_overuse_detection, @@ -1579,7 +1579,7 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream( source_(nullptr), external_encoder_factory_(external_encoder_factory), stream_(nullptr), - parameters_(std::move(config), options, max_bitrate_bps, codec_settings), + parameters_(config, options, max_bitrate_bps, codec_settings), rtp_parameters_(CreateRtpParametersWithOneEncoding()), pending_encoder_reconfiguration_(false), allocated_encoder_(nullptr, webrtc::kVideoCodecUnknown, false), @@ -2035,11 +2035,11 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::ReconfigureEncoder() { encoder_config.encoder_specific_settings = ConfigureVideoEncoderSettings( codec_settings.codec); - stream_->ReconfigureVideoEncoder(encoder_config.Copy()); + stream_->ReconfigureVideoEncoder(encoder_config); encoder_config.encoder_specific_settings = NULL; - parameters_.encoder_config = std::move(encoder_config); + parameters_.encoder_config = encoder_config; } void WebRtcVideoChannel2::WebRtcVideoSendStream::SetSend(bool send) { @@ -2232,14 +2232,13 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::RecreateWebRtcStream() { parameters_.encoder_config.encoder_specific_settings = ConfigureVideoEncoderSettings(parameters_.codec_settings->codec); - webrtc::VideoSendStream::Config config = parameters_.config.Copy(); + webrtc::VideoSendStream::Config config = parameters_.config; if (!config.rtp.rtx.ssrcs.empty() && config.rtp.rtx.payload_type == -1) { LOG(LS_WARNING) << "RTX SSRCs configured but there's no configured RTX " "payload type the set codec. Ignoring RTX."; config.rtp.rtx.ssrcs.clear(); } - stream_ = call_->CreateVideoSendStream(std::move(config), - parameters_.encoder_config.Copy()); + stream_ = call_->CreateVideoSendStream(config, parameters_.encoder_config); parameters_.encoder_config.encoder_specific_settings = NULL; pending_encoder_reconfiguration_ = false; diff --git a/webrtc/media/engine/webrtcvideoengine2.h b/webrtc/media/engine/webrtcvideoengine2.h index 794b313da5..9413047cb3 100644 --- a/webrtc/media/engine/webrtcvideoengine2.h +++ b/webrtc/media/engine/webrtcvideoengine2.h @@ -248,7 +248,7 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport { WebRtcVideoSendStream( webrtc::Call* call, const StreamParams& sp, - webrtc::VideoSendStream::Config config, + const webrtc::VideoSendStream::Config& config, const VideoOptions& options, WebRtcVideoEncoderFactory* external_encoder_factory, bool enable_cpu_overuse_detection, @@ -284,7 +284,7 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport { // similar parameters depending on which options changed etc. struct VideoSendStreamParameters { VideoSendStreamParameters( - webrtc::VideoSendStream::Config config, + const webrtc::VideoSendStream::Config& config, const VideoOptions& options, int max_bitrate_bps, const rtc::Optional& codec_settings); diff --git a/webrtc/media/engine/webrtcvideoengine2_unittest.cc b/webrtc/media/engine/webrtcvideoengine2_unittest.cc index 3e041f7a41..9a37e9d7cf 100644 --- a/webrtc/media/engine/webrtcvideoengine2_unittest.cc +++ b/webrtc/media/engine/webrtcvideoengine2_unittest.cc @@ -1153,8 +1153,7 @@ class WebRtcVideoChannel2Test : public WebRtcVideoEngine2Test { EXPECT_TRUE(streams.size() > 0); FakeVideoSendStream* stream = streams[streams.size() - 1]; - webrtc::VideoEncoderConfig encoder_config = - stream->GetEncoderConfig().Copy(); + webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig(); EXPECT_EQ(1, encoder_config.streams.size()); return encoder_config.streams[0].max_bitrate_bps; } @@ -1646,8 +1645,7 @@ TEST_F(WebRtcVideoChannel2Test, UsesCorrectSettingsForScreencast) { EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames()); // Verify non-screencast settings. - webrtc::VideoEncoderConfig encoder_config = - send_stream->GetEncoderConfig().Copy(); + webrtc::VideoEncoderConfig encoder_config = send_stream->GetEncoderConfig(); EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo, encoder_config.content_type); EXPECT_EQ(codec.width, encoder_config.streams.front().width); @@ -1668,7 +1666,7 @@ TEST_F(WebRtcVideoChannel2Test, UsesCorrectSettingsForScreencast) { EXPECT_EQ(3, send_stream->GetNumberOfSwappedFrames()); // Verify screencast settings. - encoder_config = send_stream->GetEncoderConfig().Copy(); + encoder_config = send_stream->GetEncoderConfig(); EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kScreen, encoder_config.content_type); EXPECT_EQ(kScreenshareMinBitrateKbps * 1000, @@ -1695,7 +1693,7 @@ TEST_F(WebRtcVideoChannel2Test, NoRecreateStreamForScreencast) { ASSERT_EQ(1, fake_call_->GetNumCreatedSendStreams()); FakeVideoSendStream* stream = fake_call_->GetVideoSendStreams().front(); - webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); + webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig(); EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo, encoder_config.content_type); @@ -1712,7 +1710,7 @@ TEST_F(WebRtcVideoChannel2Test, NoRecreateStreamForScreencast) { ASSERT_EQ(stream, fake_call_->GetVideoSendStreams().front()); EXPECT_EQ(2, stream->GetNumberOfSwappedFrames()); - encoder_config = stream->GetEncoderConfig().Copy(); + encoder_config = stream->GetEncoderConfig(); EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kScreen, encoder_config.content_type); @@ -1725,7 +1723,7 @@ TEST_F(WebRtcVideoChannel2Test, NoRecreateStreamForScreencast) { ASSERT_EQ(stream, fake_call_->GetVideoSendStreams().front()); EXPECT_EQ(3, stream->GetNumberOfSwappedFrames()); - encoder_config = stream->GetEncoderConfig().Copy(); + encoder_config = stream->GetEncoderConfig(); EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo, encoder_config.content_type); @@ -1754,11 +1752,10 @@ TEST_F(WebRtcVideoChannel2Test, ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size()); FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front(); - webrtc::VideoEncoderConfig encoder_config = - send_stream->GetEncoderConfig().Copy(); + webrtc::VideoEncoderConfig encoder_config = send_stream->GetEncoderConfig(); // Verify screencast settings. - encoder_config = send_stream->GetEncoderConfig().Copy(); + encoder_config = send_stream->GetEncoderConfig(); EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kScreen, encoder_config.content_type); ASSERT_EQ(1u, encoder_config.streams.size()); @@ -2313,7 +2310,7 @@ TEST_F(WebRtcVideoChannel2Test, SetDefaultSendCodecs) { const std::vector rtx_ssrcs = MAKE_VECTOR(kRtxSsrcs1); FakeVideoSendStream* stream = AddSendStream( cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs)); - webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); + webrtc::VideoSendStream::Config config = stream->GetConfig(); // Make sure NACK and FEC are enabled on the correct payload types. EXPECT_EQ(1000, config.rtp.nack.rtp_history_ms); @@ -2332,7 +2329,7 @@ TEST_F(WebRtcVideoChannel2Test, SetSendCodecsWithoutFec) { ASSERT_TRUE(channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); + webrtc::VideoSendStream::Config config = stream->GetConfig(); EXPECT_EQ(-1, config.rtp.fec.ulpfec_payload_type); EXPECT_EQ(-1, config.rtp.fec.red_payload_type); @@ -2371,7 +2368,7 @@ TEST_F(WebRtcVideoChannel2Test, SetSendCodecsWithoutFecDisablesFec) { ASSERT_TRUE(channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); + webrtc::VideoSendStream::Config config = stream->GetConfig(); EXPECT_EQ(kUlpfecCodec.id, config.rtp.fec.ulpfec_payload_type); @@ -2379,7 +2376,7 @@ TEST_F(WebRtcVideoChannel2Test, SetSendCodecsWithoutFecDisablesFec) { ASSERT_TRUE(channel_->SetSendParameters(parameters)); stream = fake_call_->GetVideoSendStreams()[0]; ASSERT_TRUE(stream != NULL); - config = stream->GetConfig().Copy(); + config = stream->GetConfig(); EXPECT_EQ(-1, config.rtp.fec.ulpfec_payload_type) << "SetSendCodec without FEC should disable current FEC."; } diff --git a/webrtc/test/call_test.cc b/webrtc/test/call_test.cc index a766f79f56..590479f4d4 100644 --- a/webrtc/test/call_test.cc +++ b/webrtc/test/call_test.cc @@ -267,7 +267,7 @@ void CallTest::CreateVideoStreams() { RTC_DCHECK(audio_receive_streams_.empty()); video_send_stream_ = sender_call_->CreateVideoSendStream( - video_send_config_.Copy(), video_encoder_config_.Copy()); + video_send_config_, video_encoder_config_); for (size_t i = 0; i < video_receive_configs_.size(); ++i) { video_receive_streams_.push_back(receiver_call_->CreateVideoReceiveStream( video_receive_configs_[i].Copy())); diff --git a/webrtc/test/fake_encoder.cc b/webrtc/test/fake_encoder.cc index c1686b34ff..a3fb7686f1 100644 --- a/webrtc/test/fake_encoder.cc +++ b/webrtc/test/fake_encoder.cc @@ -12,7 +12,6 @@ #include "testing/gtest/include/gtest/gtest.h" -#include "webrtc/base/checks.h" #include "webrtc/modules/video_coding/include/video_codec_interface.h" #include "webrtc/system_wrappers/include/sleep.h" @@ -34,7 +33,7 @@ FakeEncoder::FakeEncoder(Clock* clock) FakeEncoder::~FakeEncoder() {} void FakeEncoder::SetMaxBitrate(int max_kbps) { - RTC_DCHECK_GE(max_kbps, -1); // max_kbps == -1 disables it. + assert(max_kbps >= -1); // max_kbps == -1 disables it. max_target_bitrate_kbps_ = max_kbps; } @@ -49,7 +48,7 @@ int32_t FakeEncoder::InitEncode(const VideoCodec* config, int32_t FakeEncoder::Encode(const VideoFrame& input_image, const CodecSpecificInfo* codec_specific_info, const std::vector* frame_types) { - RTC_DCHECK_GT(config_.maxFramerate, 0); + assert(config_.maxFramerate > 0); int64_t time_since_last_encode_ms = 1000 / config_.maxFramerate; int64_t time_now_ms = clock_->TimeInMilliseconds(); const bool first_encode = last_encode_time_ms_ == 0; @@ -76,7 +75,7 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image, bits_available = max_bits; last_encode_time_ms_ = time_now_ms; - RTC_DCHECK_GT(config_.numberOfSimulcastStreams, 0); + assert(config_.numberOfSimulcastStreams > 0); for (unsigned char i = 0; i < config_.numberOfSimulcastStreams; ++i) { CodecSpecificInfo specifics; memset(&specifics, 0, sizeof(specifics)); @@ -98,9 +97,6 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image, if (stream_bytes > sizeof(encoded_buffer_)) stream_bytes = sizeof(encoded_buffer_); - // Always encode something on the first frame. - if (min_stream_bits > bits_available && i > 0) - continue; EncodedImage encoded( encoded_buffer_, stream_bytes, sizeof(encoded_buffer_)); encoded._timeStamp = input_image.timestamp(); @@ -108,7 +104,10 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image, encoded._frameType = (*frame_types)[i]; encoded._encodedWidth = config_.simulcastStream[i].width; encoded._encodedHeight = config_.simulcastStream[i].height; - RTC_DCHECK(callback_ != NULL); + // Always encode something on the first frame. + if (min_stream_bits > bits_available && i > 0) + continue; + assert(callback_ != NULL); if (callback_->Encoded(encoded, &specifics, NULL) != 0) return -1; bits_available -= std::min(encoded._length * 8, bits_available); diff --git a/webrtc/video/BUILD.gn b/webrtc/video/BUILD.gn index 76c2ea64e9..84cab2680a 100644 --- a/webrtc/video/BUILD.gn +++ b/webrtc/video/BUILD.gn @@ -35,6 +35,8 @@ source_set("video") { "stats_counter.h", "stream_synchronization.cc", "stream_synchronization.h", + "video_capture_input.cc", + "video_capture_input.h", "video_decoder.cc", "video_encoder.cc", "video_receive_stream.cc", @@ -62,7 +64,6 @@ source_set("video") { "..:rtc_event_log", "..:webrtc_common", "../base:rtc_base_approved", - "../base:rtc_task_queue", "../common_video", "../modules/bitrate_controller", "../modules/congestion_controller", @@ -93,10 +94,10 @@ if (rtc_include_tests) { "send_statistics_proxy_unittest.cc", "stats_counter_unittest.cc", "stream_synchronization_unittest.cc", + "video_capture_input_unittest.cc", "video_decoder_unittest.cc", "video_encoder_unittest.cc", "video_send_stream_tests.cc", - "vie_encoder_unittest.cc", "vie_remb_unittest.cc", ] configs += [ "..:common_config" ] diff --git a/webrtc/video/encoder_state_feedback_unittest.cc b/webrtc/video/encoder_state_feedback_unittest.cc index cf92813c48..5351e1516b 100644 --- a/webrtc/video/encoder_state_feedback_unittest.cc +++ b/webrtc/video/encoder_state_feedback_unittest.cc @@ -21,14 +21,9 @@ namespace webrtc { class MockVieEncoder : public ViEEncoder { public: - MockVieEncoder() - : ViEEncoder(1, - nullptr, - VideoSendStream::Config::EncoderSettings("fake", 0, nullptr), - nullptr, - nullptr, - nullptr) {} - ~MockVieEncoder() { Stop(); } + explicit MockVieEncoder(ProcessThread* process_thread) + : ViEEncoder(1, process_thread, nullptr, nullptr, nullptr) {} + ~MockVieEncoder() {} MOCK_METHOD1(OnReceivedIntraFrameRequest, void(size_t)); MOCK_METHOD1(OnReceivedSLI, void(uint8_t picture_id)); @@ -38,7 +33,8 @@ class MockVieEncoder : public ViEEncoder { class VieKeyRequestTest : public ::testing::Test { public: VieKeyRequestTest() - : simulated_clock_(123456789), + : encoder_(&process_thread_), + simulated_clock_(123456789), encoder_state_feedback_( &simulated_clock_, std::vector(1, VieKeyRequestTest::kSsrc), @@ -46,6 +42,7 @@ class VieKeyRequestTest : public ::testing::Test { protected: const uint32_t kSsrc = 1234; + NiceMock process_thread_; MockVieEncoder encoder_; SimulatedClock simulated_clock_; EncoderStateFeedback encoder_state_feedback_; diff --git a/webrtc/video/end_to_end_tests.cc b/webrtc/video/end_to_end_tests.cc index 93b98cadef..528338defe 100644 --- a/webrtc/video/end_to_end_tests.cc +++ b/webrtc/video/end_to_end_tests.cc @@ -1281,8 +1281,8 @@ class MultiStreamTest { UpdateSendConfig(i, &send_config, &encoder_config, &frame_generators[i]); - send_streams[i] = sender_call->CreateVideoSendStream( - send_config.Copy(), encoder_config.Copy()); + send_streams[i] = + sender_call->CreateVideoSendStream(send_config, encoder_config); send_streams[i]->Start(); VideoReceiveStream::Config receive_config(receiver_transport.get()); @@ -2486,7 +2486,7 @@ void EndToEndTest::TestSendsSetSsrcs(size_t num_ssrcs, } } - video_encoder_config_all_streams_ = encoder_config->Copy(); + video_encoder_config_all_streams_ = *encoder_config; if (send_single_ssrc_first_) encoder_config->streams.resize(1); } @@ -2505,7 +2505,7 @@ void EndToEndTest::TestSendsSetSsrcs(size_t num_ssrcs, if (send_single_ssrc_first_) { // Set full simulcast and continue with the rest of the SSRCs. send_stream_->ReconfigureVideoEncoder( - std::move(video_encoder_config_all_streams_)); + video_encoder_config_all_streams_); EXPECT_TRUE(Wait()) << "Timed out while waiting on additional SSRCs."; } } @@ -3200,7 +3200,7 @@ void EndToEndTest::TestRtpStatePreservation(bool use_rtx, // Use the same total bitrates when sending a single stream to avoid lowering // the bitrate estimate and requiring a subsequent rampup. - VideoEncoderConfig one_stream = video_encoder_config_.Copy(); + VideoEncoderConfig one_stream = video_encoder_config_; one_stream.streams.resize(1); for (size_t i = 1; i < video_encoder_config_.streams.size(); ++i) { one_stream.streams.front().min_bitrate_bps += @@ -3227,8 +3227,8 @@ void EndToEndTest::TestRtpStatePreservation(bool use_rtx, sender_call_->DestroyVideoSendStream(video_send_stream_); // Re-create VideoSendStream with only one stream. - video_send_stream_ = sender_call_->CreateVideoSendStream( - video_send_config_.Copy(), one_stream.Copy()); + video_send_stream_ = + sender_call_->CreateVideoSendStream(video_send_config_, one_stream); video_send_stream_->Start(); if (provoke_rtcpsr_before_rtp) { // Rapid Resync Request forces sending RTCP Sender Report back. @@ -3246,18 +3246,18 @@ void EndToEndTest::TestRtpStatePreservation(bool use_rtx, EXPECT_TRUE(observer.Wait()) << "Timed out waiting for single RTP packet."; // Reconfigure back to use all streams. - video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_.Copy()); + video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_); observer.ResetExpectedSsrcs(kNumSsrcs); EXPECT_TRUE(observer.Wait()) << "Timed out waiting for all SSRCs to send packets."; // Reconfigure down to one stream. - video_send_stream_->ReconfigureVideoEncoder(one_stream.Copy()); + video_send_stream_->ReconfigureVideoEncoder(one_stream); observer.ResetExpectedSsrcs(1); EXPECT_TRUE(observer.Wait()) << "Timed out waiting for single RTP packet."; // Reconfigure back to use all streams. - video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_.Copy()); + video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_); observer.ResetExpectedSsrcs(kNumSsrcs); EXPECT_TRUE(observer.Wait()) << "Timed out waiting for all SSRCs to send packets."; diff --git a/webrtc/video/overuse_frame_detector.cc b/webrtc/video/overuse_frame_detector.cc index cc4c000a73..8498008f97 100644 --- a/webrtc/video/overuse_frame_detector.cc +++ b/webrtc/video/overuse_frame_detector.cc @@ -193,6 +193,7 @@ OveruseFrameDetector::OveruseFrameDetector( in_quick_rampup_(false), current_rampup_delay_ms_(kStandardRampUpDelayMs), usage_(new SendProcessingUsage(options)) { + RTC_DCHECK(metrics_observer); processing_thread_.DetachFromThread(); } diff --git a/webrtc/video/send_statistics_proxy.cc b/webrtc/video/send_statistics_proxy.cc index d7dd256d89..9325b2f7dd 100644 --- a/webrtc/video/send_statistics_proxy.cc +++ b/webrtc/video/send_statistics_proxy.cc @@ -75,8 +75,7 @@ SendStatisticsProxy::SendStatisticsProxy( const VideoSendStream::Config& config, VideoEncoderConfig::ContentType content_type) : clock_(clock), - payload_name_(config.encoder_settings.payload_name), - rtp_config_(config.rtp), + config_(config), content_type_(content_type), start_ms_(clock->TimeInMilliseconds()), last_sent_frame_timestamp_(0), @@ -87,14 +86,14 @@ SendStatisticsProxy::SendStatisticsProxy( SendStatisticsProxy::~SendStatisticsProxy() { rtc::CritScope lock(&crit_); - uma_container_->UpdateHistograms(rtp_config_, stats_); + uma_container_->UpdateHistograms(config_, stats_); int64_t elapsed_sec = (clock_->TimeInMilliseconds() - start_ms_) / 1000; RTC_LOGGED_HISTOGRAM_COUNTS_100000("WebRTC.Video.SendStreamLifetimeInSeconds", elapsed_sec); if (elapsed_sec >= metrics::kMinRunTimeInSeconds) - UpdateCodecTypeHistogram(payload_name_); + UpdateCodecTypeHistogram(config_.encoder_settings.payload_name); } SendStatisticsProxy::UmaSamplesContainer::UmaSamplesContainer( @@ -113,11 +112,12 @@ SendStatisticsProxy::UmaSamplesContainer::UmaSamplesContainer( SendStatisticsProxy::UmaSamplesContainer::~UmaSamplesContainer() {} -void AccumulateRtxStats(const VideoSendStream::Stats& stats, - const std::vector& rtx_ssrcs, +void AccumulateRtpStats(const VideoSendStream::Stats& stats, + const VideoSendStream::Config& config, StreamDataCounters* total_rtp_stats, StreamDataCounters* rtx_stats) { for (auto it : stats.substreams) { + const std::vector rtx_ssrcs = config.rtp.rtx.ssrcs; if (std::find(rtx_ssrcs.begin(), rtx_ssrcs.end(), it.first) != rtx_ssrcs.end()) { rtx_stats->Add(it.second.rtp_stats); @@ -128,7 +128,7 @@ void AccumulateRtxStats(const VideoSendStream::Stats& stats, } void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms( - const VideoSendStream::Config::Rtp& rtp_config, + const VideoSendStream::Config& config, const VideoSendStream::Stats& current_stats) { RTC_DCHECK(uma_prefix_ == kRealtimePrefix || uma_prefix_ == kScreenPrefix); const int kIndex = uma_prefix_ == kScreenPrefix ? 1 : 0; @@ -262,7 +262,7 @@ void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms( // UmaSamplesContainer, we save the initial state of the counters, so that // we can calculate the delta here and aggregate over all ssrcs. RtcpPacketTypeCounter counters; - for (uint32_t ssrc : rtp_config.ssrcs) { + for (uint32_t ssrc : config.rtp.ssrcs) { auto kv = current_stats.substreams.find(ssrc); if (kv == current_stats.substreams.end()) continue; @@ -298,11 +298,10 @@ void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms( if (elapsed_sec >= metrics::kMinRunTimeInSeconds) { StreamDataCounters rtp; StreamDataCounters rtx; - AccumulateRtxStats(current_stats, rtp_config.rtx.ssrcs, &rtp, &rtx); + AccumulateRtpStats(current_stats, config, &rtp, &rtx); StreamDataCounters start_rtp; StreamDataCounters start_rtx; - AccumulateRtxStats(start_stats_, rtp_config.rtx.ssrcs, &start_rtp, - &start_rtx); + AccumulateRtpStats(start_stats_, config, &start_rtp, &start_rtx); rtp.Subtract(start_rtp); rtx.Subtract(start_rtx); StreamDataCounters rtp_rtx = rtp; @@ -323,13 +322,13 @@ void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms( kIndex, uma_prefix_ + "RetransmittedBitrateSentInKbps", static_cast(rtp_rtx.retransmitted.TotalBytes() * 8 / elapsed_sec / 1000)); - if (!rtp_config.rtx.ssrcs.empty()) { + if (!config.rtp.rtx.ssrcs.empty()) { RTC_LOGGED_HISTOGRAMS_COUNTS_10000( kIndex, uma_prefix_ + "RtxBitrateSentInKbps", static_cast(rtx.transmitted.TotalBytes() * 8 / elapsed_sec / 1000)); } - if (rtp_config.fec.red_payload_type != -1) { + if (config.rtp.fec.red_payload_type != -1) { RTC_LOGGED_HISTOGRAMS_COUNTS_10000( kIndex, uma_prefix_ + "FecBitrateSentInKbps", static_cast(rtp_rtx.fec.TotalBytes() * 8 / elapsed_sec / @@ -343,7 +342,7 @@ void SendStatisticsProxy::SetContentType( VideoEncoderConfig::ContentType content_type) { rtc::CritScope lock(&crit_); if (content_type_ != content_type) { - uma_container_->UpdateHistograms(rtp_config_, stats_); + uma_container_->UpdateHistograms(config_, stats_); uma_container_.reset( new UmaSamplesContainer(GetUmaPrefix(content_type), stats_, clock_)); content_type_ = content_type; @@ -404,10 +403,10 @@ VideoSendStream::StreamStats* SendStatisticsProxy::GetStatsEntry( return &it->second; bool is_rtx = false; - if (std::find(rtp_config_.ssrcs.begin(), rtp_config_.ssrcs.end(), ssrc) == - rtp_config_.ssrcs.end()) { - if (std::find(rtp_config_.rtx.ssrcs.begin(), rtp_config_.rtx.ssrcs.end(), - ssrc) == rtp_config_.rtx.ssrcs.end()) { + if (std::find(config_.rtp.ssrcs.begin(), config_.rtp.ssrcs.end(), ssrc) == + config_.rtp.ssrcs.end()) { + if (std::find(config_.rtp.rtx.ssrcs.begin(), config_.rtp.rtx.ssrcs.end(), + ssrc) == config_.rtp.rtx.ssrcs.end()) { return nullptr; } is_rtx = true; @@ -450,12 +449,12 @@ void SendStatisticsProxy::OnSendEncodedImage( } } - if (simulcast_idx >= rtp_config_.ssrcs.size()) { + if (simulcast_idx >= config_.rtp.ssrcs.size()) { LOG(LS_ERROR) << "Encoded image outside simulcast range (" << simulcast_idx - << " >= " << rtp_config_.ssrcs.size() << ")."; + << " >= " << config_.rtp.ssrcs.size() << ")."; return; } - uint32_t ssrc = rtp_config_.ssrcs[simulcast_idx]; + uint32_t ssrc = config_.rtp.ssrcs[simulcast_idx]; rtc::CritScope lock(&crit_); VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc); @@ -493,7 +492,7 @@ void SendStatisticsProxy::OnSendEncodedImage( if (encoded_image.qp_ != -1 && codec_info) { if (codec_info->codecType == kVideoCodecVP8) { - int spatial_idx = (rtp_config_.ssrcs.size() == 1) + int spatial_idx = (config_.rtp.ssrcs.size() == 1) ? -1 : static_cast(simulcast_idx); uma_container_->qp_counters_[spatial_idx].vp8.Add(encoded_image.qp_); diff --git a/webrtc/video/send_statistics_proxy.h b/webrtc/video/send_statistics_proxy.h index 3d2f8ef61e..b47691ab39 100644 --- a/webrtc/video/send_statistics_proxy.h +++ b/webrtc/video/send_statistics_proxy.h @@ -136,8 +136,7 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver, EXCLUSIVE_LOCKS_REQUIRED(crit_); Clock* const clock_; - const std::string payload_name_; - const VideoSendStream::Config::Rtp rtp_config_; + const VideoSendStream::Config config_; rtc::CriticalSection crit_; VideoEncoderConfig::ContentType content_type_ GUARDED_BY(crit_); const int64_t start_ms_; @@ -155,7 +154,7 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver, Clock* clock); ~UmaSamplesContainer(); - void UpdateHistograms(const VideoSendStream::Config::Rtp& rtp_config, + void UpdateHistograms(const VideoSendStream::Config& config, const VideoSendStream::Stats& current_stats); const std::string uma_prefix_; diff --git a/webrtc/video/video_capture_input.cc b/webrtc/video/video_capture_input.cc new file mode 100644 index 0000000000..8f574e2115 --- /dev/null +++ b/webrtc/video/video_capture_input.cc @@ -0,0 +1,109 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/video/video_capture_input.h" + +#include "webrtc/base/checks.h" +#include "webrtc/base/logging.h" +#include "webrtc/base/trace_event.h" +#include "webrtc/modules/include/module_common_types.h" +#include "webrtc/modules/video_capture/video_capture_factory.h" +#include "webrtc/modules/video_processing/include/video_processing.h" +#include "webrtc/video/overuse_frame_detector.h" +#include "webrtc/video/send_statistics_proxy.h" +#include "webrtc/video/vie_encoder.h" + +namespace webrtc { + +namespace internal { +VideoCaptureInput::VideoCaptureInput( + rtc::Event* capture_event, + rtc::VideoSinkInterface* local_renderer, + SendStatisticsProxy* stats_proxy, + OveruseFrameDetector* overuse_detector) + : local_renderer_(local_renderer), + stats_proxy_(stats_proxy), + capture_event_(capture_event), + // TODO(danilchap): Pass clock from outside to ensure it is same clock + // rtcp module use to calculate offset since last frame captured + // to estimate rtp timestamp for SenderReport. + clock_(Clock::GetRealTimeClock()), + last_captured_timestamp_(0), + delta_ntp_internal_ms_(clock_->CurrentNtpInMilliseconds() - + clock_->TimeInMilliseconds()), + overuse_detector_(overuse_detector) {} + +VideoCaptureInput::~VideoCaptureInput() { +} + +void VideoCaptureInput::IncomingCapturedFrame(const VideoFrame& video_frame) { + // TODO(pbos): Remove local rendering, it should be handled by the client code + // if required. + if (local_renderer_) + local_renderer_->OnFrame(video_frame); + + stats_proxy_->OnIncomingFrame(video_frame.width(), video_frame.height()); + + VideoFrame incoming_frame = video_frame; + + // Local time in webrtc time base. + int64_t current_time = clock_->TimeInMilliseconds(); + incoming_frame.set_render_time_ms(current_time); + + // Capture time may come from clock with an offset and drift from clock_. + int64_t capture_ntp_time_ms; + if (video_frame.ntp_time_ms() != 0) { + capture_ntp_time_ms = video_frame.ntp_time_ms(); + } else if (video_frame.render_time_ms() != 0) { + capture_ntp_time_ms = video_frame.render_time_ms() + delta_ntp_internal_ms_; + } else { + capture_ntp_time_ms = current_time + delta_ntp_internal_ms_; + } + incoming_frame.set_ntp_time_ms(capture_ntp_time_ms); + + // Convert NTP time, in ms, to RTP timestamp. + const int kMsToRtpTimestamp = 90; + incoming_frame.set_timestamp( + kMsToRtpTimestamp * static_cast(incoming_frame.ntp_time_ms())); + + rtc::CritScope lock(&crit_); + if (incoming_frame.ntp_time_ms() <= last_captured_timestamp_) { + // We don't allow the same capture time for two frames, drop this one. + LOG(LS_WARNING) << "Same/old NTP timestamp (" + << incoming_frame.ntp_time_ms() + << " <= " << last_captured_timestamp_ + << ") for incoming frame. Dropping."; + return; + } + + captured_frame_.reset(new VideoFrame); + captured_frame_->ShallowCopy(incoming_frame); + last_captured_timestamp_ = incoming_frame.ntp_time_ms(); + + overuse_detector_->FrameCaptured(*captured_frame_); + + TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(), + "render_time", video_frame.render_time_ms()); + + capture_event_->Set(); +} + +bool VideoCaptureInput::GetVideoFrame(VideoFrame* video_frame) { + rtc::CritScope lock(&crit_); + if (!captured_frame_) + return false; + + *video_frame = *captured_frame_; + captured_frame_.reset(); + return true; +} + +} // namespace internal +} // namespace webrtc diff --git a/webrtc/video/video_capture_input.h b/webrtc/video/video_capture_input.h new file mode 100644 index 0000000000..5877f6c94f --- /dev/null +++ b/webrtc/video/video_capture_input.h @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_VIDEO_VIDEO_CAPTURE_INPUT_H_ +#define WEBRTC_VIDEO_VIDEO_CAPTURE_INPUT_H_ + +#include +#include + +#include "webrtc/base/criticalsection.h" +#include "webrtc/base/event.h" +#include "webrtc/base/platform_thread.h" +#include "webrtc/base/thread_annotations.h" +#include "webrtc/common_types.h" +#include "webrtc/engine_configurations.h" +#include "webrtc/modules/video_capture/video_capture.h" +#include "webrtc/modules/video_coding/include/video_codec_interface.h" +#include "webrtc/modules/video_coding/include/video_coding.h" +#include "webrtc/modules/video_processing/include/video_processing.h" +#include "webrtc/system_wrappers/include/clock.h" +#include "webrtc/typedefs.h" +#include "webrtc/video_send_stream.h" + +namespace webrtc { + +class Config; +class OveruseFrameDetector; +class SendStatisticsProxy; + +namespace internal { +class VideoCaptureInput : public webrtc::VideoCaptureInput { + public: + VideoCaptureInput(rtc::Event* capture_event, + rtc::VideoSinkInterface* local_renderer, + SendStatisticsProxy* send_stats_proxy, + OveruseFrameDetector* overuse_detector); + ~VideoCaptureInput(); + + void IncomingCapturedFrame(const VideoFrame& video_frame) override; + + bool GetVideoFrame(VideoFrame* frame); + + private: + rtc::CriticalSection crit_; + + rtc::VideoSinkInterface* const local_renderer_; + SendStatisticsProxy* const stats_proxy_; + rtc::Event* const capture_event_; + + std::unique_ptr captured_frame_ GUARDED_BY(crit_); + Clock* const clock_; + // Used to make sure incoming time stamp is increasing for every frame. + int64_t last_captured_timestamp_; + // Delta used for translating between NTP and internal timestamps. + const int64_t delta_ntp_internal_ms_; + + OveruseFrameDetector* const overuse_detector_; +}; + +} // namespace internal +} // namespace webrtc + +#endif // WEBRTC_VIDEO_VIDEO_CAPTURE_INPUT_H_ diff --git a/webrtc/video/video_capture_input_unittest.cc b/webrtc/video/video_capture_input_unittest.cc new file mode 100644 index 0000000000..2da722b47d --- /dev/null +++ b/webrtc/video/video_capture_input_unittest.cc @@ -0,0 +1,255 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "webrtc/video/video_capture_input.h" + +#include +#include + +#include "testing/gtest/include/gtest/gtest.h" +#include "webrtc/base/event.h" +#include "webrtc/base/refcount.h" +#include "webrtc/test/fake_texture_frame.h" +#include "webrtc/test/frame_utils.h" +#include "webrtc/video/send_statistics_proxy.h" + +// If an output frame does not arrive in 500ms, the test will fail. +#define FRAME_TIMEOUT_MS 500 + +namespace webrtc { + +bool EqualFramesVector(const std::vector>& frames1, + const std::vector>& frames2); +std::unique_ptr CreateVideoFrame(uint8_t length); + +class VideoCaptureInputTest : public ::testing::Test { + protected: + VideoCaptureInputTest() + : stats_proxy_(Clock::GetRealTimeClock(), + webrtc::VideoSendStream::Config(nullptr), + webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo), + capture_event_(false, false) {} + + virtual void SetUp() { + overuse_detector_.reset( + new OveruseFrameDetector(Clock::GetRealTimeClock(), CpuOveruseOptions(), + nullptr, nullptr, &stats_proxy_)); + input_.reset(new internal::VideoCaptureInput( + &capture_event_, nullptr, &stats_proxy_, overuse_detector_.get())); + } + + void AddInputFrame(VideoFrame* frame) { + input_->IncomingCapturedFrame(*frame); + } + + void WaitOutputFrame() { + EXPECT_TRUE(capture_event_.Wait(FRAME_TIMEOUT_MS)); + VideoFrame frame; + EXPECT_TRUE(input_->GetVideoFrame(&frame)); + ASSERT_TRUE(frame.video_frame_buffer()); + if (!frame.video_frame_buffer()->native_handle()) { + output_frame_ybuffers_.push_back(frame.video_frame_buffer()->DataY()); + } + output_frames_.push_back( + std::unique_ptr(new VideoFrame(frame))); + } + + SendStatisticsProxy stats_proxy_; + + rtc::Event capture_event_; + + std::unique_ptr overuse_detector_; + + // Used to send input capture frames to VideoCaptureInput. + std::unique_ptr input_; + + // Input capture frames of VideoCaptureInput. + std::vector> input_frames_; + + // Output delivered frames of VideoCaptureInput. + std::vector> output_frames_; + + // The pointers of Y plane buffers of output frames. This is used to verify + // the frame are swapped and not copied. + std::vector output_frame_ybuffers_; +}; + +TEST_F(VideoCaptureInputTest, DoesNotRetainHandleNorCopyBuffer) { + // Indicate an output frame has arrived. + rtc::Event frame_destroyed_event(false, false); + class TestBuffer : public webrtc::I420Buffer { + public: + explicit TestBuffer(rtc::Event* event) : I420Buffer(5, 5), event_(event) {} + + private: + friend class rtc::RefCountedObject; + ~TestBuffer() override { event_->Set(); } + rtc::Event* const event_; + }; + + { + VideoFrame frame( + new rtc::RefCountedObject(&frame_destroyed_event), 1, 1, + kVideoRotation_0); + + AddInputFrame(&frame); + WaitOutputFrame(); + + EXPECT_EQ(output_frames_[0]->video_frame_buffer().get(), + frame.video_frame_buffer().get()); + output_frames_.clear(); + } + EXPECT_TRUE(frame_destroyed_event.Wait(FRAME_TIMEOUT_MS)); +} + +TEST_F(VideoCaptureInputTest, TestNtpTimeStampSetIfRenderTimeSet) { + input_frames_.push_back(CreateVideoFrame(0)); + input_frames_[0]->set_render_time_ms(5); + input_frames_[0]->set_ntp_time_ms(0); + + AddInputFrame(input_frames_[0].get()); + WaitOutputFrame(); + EXPECT_GT(output_frames_[0]->ntp_time_ms(), + input_frames_[0]->render_time_ms()); +} + +TEST_F(VideoCaptureInputTest, TestRtpTimeStampSet) { + input_frames_.push_back(CreateVideoFrame(0)); + input_frames_[0]->set_render_time_ms(0); + input_frames_[0]->set_ntp_time_ms(1); + input_frames_[0]->set_timestamp(0); + + AddInputFrame(input_frames_[0].get()); + WaitOutputFrame(); + EXPECT_EQ(output_frames_[0]->timestamp(), + input_frames_[0]->ntp_time_ms() * 90); +} + +TEST_F(VideoCaptureInputTest, DropsFramesWithSameOrOldNtpTimestamp) { + input_frames_.push_back(CreateVideoFrame(0)); + + input_frames_[0]->set_ntp_time_ms(17); + AddInputFrame(input_frames_[0].get()); + WaitOutputFrame(); + EXPECT_EQ(output_frames_[0]->timestamp(), + input_frames_[0]->ntp_time_ms() * 90); + + // Repeat frame with the same NTP timestamp should drop. + AddInputFrame(input_frames_[0].get()); + EXPECT_FALSE(capture_event_.Wait(FRAME_TIMEOUT_MS)); + + // As should frames with a decreased NTP timestamp. + input_frames_[0]->set_ntp_time_ms(input_frames_[0]->ntp_time_ms() - 1); + AddInputFrame(input_frames_[0].get()); + EXPECT_FALSE(capture_event_.Wait(FRAME_TIMEOUT_MS)); + + // But delivering with an increased NTP timestamp should succeed. + input_frames_[0]->set_ntp_time_ms(4711); + AddInputFrame(input_frames_[0].get()); + WaitOutputFrame(); + EXPECT_EQ(output_frames_[1]->timestamp(), + input_frames_[0]->ntp_time_ms() * 90); +} + +TEST_F(VideoCaptureInputTest, TestTextureFrames) { + const int kNumFrame = 3; + for (int i = 0 ; i < kNumFrame; ++i) { + test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle(); + // Add one to |i| so that width/height > 0. + input_frames_.push_back(std::unique_ptr(new VideoFrame( + test::FakeNativeHandle::CreateFrame(dummy_handle, i + 1, i + 1, i + 1, + i + 1, webrtc::kVideoRotation_0)))); + AddInputFrame(input_frames_[i].get()); + WaitOutputFrame(); + ASSERT_TRUE(output_frames_[i]->video_frame_buffer()); + EXPECT_EQ(dummy_handle, + output_frames_[i]->video_frame_buffer()->native_handle()); + } + + EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); +} + +TEST_F(VideoCaptureInputTest, TestI420Frames) { + const int kNumFrame = 4; + std::vector ybuffer_pointers; + for (int i = 0; i < kNumFrame; ++i) { + input_frames_.push_back(CreateVideoFrame(static_cast(i + 1))); + ybuffer_pointers.push_back(input_frames_[i]->video_frame_buffer()->DataY()); + AddInputFrame(input_frames_[i].get()); + WaitOutputFrame(); + } + + EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); + // Make sure the buffer is not copied. + for (int i = 0; i < kNumFrame; ++i) + EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]); +} + +TEST_F(VideoCaptureInputTest, TestI420FrameAfterTextureFrame) { + test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle(); + input_frames_.push_back(std::unique_ptr( + new VideoFrame(test::FakeNativeHandle::CreateFrame( + dummy_handle, 1, 1, 1, 1, webrtc::kVideoRotation_0)))); + AddInputFrame(input_frames_[0].get()); + WaitOutputFrame(); + ASSERT_TRUE(output_frames_[0]->video_frame_buffer()); + EXPECT_EQ(dummy_handle, + output_frames_[0]->video_frame_buffer()->native_handle()); + + input_frames_.push_back(CreateVideoFrame(2)); + AddInputFrame(input_frames_[1].get()); + WaitOutputFrame(); + + EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); +} + +TEST_F(VideoCaptureInputTest, TestTextureFrameAfterI420Frame) { + input_frames_.push_back(CreateVideoFrame(1)); + AddInputFrame(input_frames_[0].get()); + WaitOutputFrame(); + + test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle(); + input_frames_.push_back(std::unique_ptr( + new VideoFrame(test::FakeNativeHandle::CreateFrame( + dummy_handle, 1, 1, 2, 2, webrtc::kVideoRotation_0)))); + AddInputFrame(input_frames_[1].get()); + WaitOutputFrame(); + + EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); +} + +bool EqualFramesVector( + const std::vector>& frames1, + const std::vector>& frames2) { + if (frames1.size() != frames2.size()) + return false; + for (size_t i = 0; i < frames1.size(); ++i) { + // Compare frame buffers, since we don't care about differing timestamps. + if (!test::FrameBufsEqual(frames1[i]->video_frame_buffer(), + frames2[i]->video_frame_buffer())) { + return false; + } + } + return true; +} + +std::unique_ptr CreateVideoFrame(uint8_t data) { + std::unique_ptr frame(new VideoFrame()); + const int width = 36; + const int height = 24; + const int kSizeY = width * height * 2; + uint8_t buffer[kSizeY]; + memset(buffer, data, kSizeY); + frame->CreateFrame(buffer, buffer, buffer, width, height, width, width / 2, + width / 2, kVideoRotation_0); + frame->set_render_time_ms(data); + return frame; +} + +} // namespace webrtc diff --git a/webrtc/video/video_quality_test.cc b/webrtc/video/video_quality_test.cc index b111be774c..a401d6dd42 100644 --- a/webrtc/video/video_quality_test.cc +++ b/webrtc/video/video_quality_test.cc @@ -1135,7 +1135,7 @@ void VideoQualityTest::RunWithVideoRenderer(const Params& params) { SetupCommon(&transport, &transport); - video_send_config_.pre_encode_callback = local_preview.get(); + video_send_config_.local_renderer = local_preview.get(); video_receive_configs_[stream_id].renderer = loopback_video.get(); video_send_config_.suspend_below_min_bitrate = @@ -1153,8 +1153,8 @@ void VideoQualityTest::RunWithVideoRenderer(const Params& params) { if (params_.screenshare.enabled) SetupScreenshare(); - video_send_stream_ = call->CreateVideoSendStream( - video_send_config_.Copy(), video_encoder_config_.Copy()); + video_send_stream_ = + call->CreateVideoSendStream(video_send_config_, video_encoder_config_); VideoReceiveStream* receive_stream = call->CreateVideoReceiveStream(video_receive_configs_[stream_id].Copy()); CreateCapturer(video_send_stream_->Input()); diff --git a/webrtc/video/video_send_stream.cc b/webrtc/video/video_send_stream.cc index 2f8241e28e..a46bc859e7 100644 --- a/webrtc/video/video_send_stream.cc +++ b/webrtc/video/video_send_stream.cc @@ -7,6 +7,7 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ + #include "webrtc/video/video_send_stream.h" #include @@ -18,6 +19,7 @@ #include "webrtc/base/checks.h" #include "webrtc/base/logging.h" #include "webrtc/base/trace_event.h" +#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" #include "webrtc/modules/bitrate_controller/include/bitrate_controller.h" #include "webrtc/modules/congestion_controller/include/congestion_controller.h" #include "webrtc/modules/pacing/packet_router.h" @@ -25,12 +27,18 @@ #include "webrtc/modules/utility/include/process_thread.h" #include "webrtc/modules/video_coding/utility/ivf_file_writer.h" #include "webrtc/video/call_stats.h" +#include "webrtc/video/video_capture_input.h" #include "webrtc/video/vie_remb.h" #include "webrtc/video_send_stream.h" namespace webrtc { +class RtcpIntraFrameObserver; +class TransportFeedbackObserver; + static const int kMinSendSidePacketHistorySize = 600; +static const int kEncoderTimeOutMs = 2000; + namespace { std::vector CreateRtpRtcpModules( @@ -144,6 +152,8 @@ std::string VideoSendStream::Config::ToString() const { << (pre_encode_callback ? "(I420FrameCallback)" : "nullptr"); ss << ", post_encode_callback: " << (post_encode_callback ? "(EncodedFrameObserver)" : "nullptr"); + ss << ", local_renderer: " + << (local_renderer ? "(VideoRenderer)" : "nullptr"); ss << ", render_delay_ms: " << render_delay_ms; ss << ", target_delay_ms: " << target_delay_ms; ss << ", suspend_below_min_bitrate: " << (suspend_below_min_bitrate ? "on" @@ -194,15 +204,192 @@ std::string VideoSendStream::StreamStats::ToString() const { namespace { +VideoCodecType PayloadNameToCodecType(const std::string& payload_name) { + if (payload_name == "VP8") + return kVideoCodecVP8; + if (payload_name == "VP9") + return kVideoCodecVP9; + if (payload_name == "H264") + return kVideoCodecH264; + return kVideoCodecGeneric; +} + bool PayloadTypeSupportsSkippingFecPackets(const std::string& payload_name) { - if (payload_name == "VP8" || payload_name == "VP9") - return true; - RTC_DCHECK(payload_name == "H264" || payload_name == "FAKE") - << "unknown payload_name " << payload_name; + switch (PayloadNameToCodecType(payload_name)) { + case kVideoCodecVP8: + case kVideoCodecVP9: + return true; + case kVideoCodecH264: + case kVideoCodecGeneric: + return false; + case kVideoCodecI420: + case kVideoCodecRED: + case kVideoCodecULPFEC: + case kVideoCodecUnknown: + RTC_NOTREACHED(); + return false; + } + RTC_NOTREACHED(); return false; } -int CalculateMaxPadBitrateBps(const VideoEncoderConfig& config, +// TODO(pbos): Lower these thresholds (to closer to 100%) when we handle +// pipelining encoders better (multiple input frames before something comes +// out). This should effectively turn off CPU adaptations for systems that +// remotely cope with the load right now. +CpuOveruseOptions GetCpuOveruseOptions(bool full_overuse_time) { + CpuOveruseOptions options; + if (full_overuse_time) { + options.low_encode_usage_threshold_percent = 150; + options.high_encode_usage_threshold_percent = 200; + } + return options; +} + +VideoCodec VideoEncoderConfigToVideoCodec(const VideoEncoderConfig& config, + const std::string& payload_name, + int payload_type) { + const std::vector& streams = config.streams; + static const int kEncoderMinBitrateKbps = 30; + RTC_DCHECK(!streams.empty()); + RTC_DCHECK_GE(config.min_transmit_bitrate_bps, 0); + + VideoCodec video_codec; + memset(&video_codec, 0, sizeof(video_codec)); + video_codec.codecType = PayloadNameToCodecType(payload_name); + + switch (config.content_type) { + case VideoEncoderConfig::ContentType::kRealtimeVideo: + video_codec.mode = kRealtimeVideo; + break; + case VideoEncoderConfig::ContentType::kScreen: + video_codec.mode = kScreensharing; + if (config.streams.size() == 1 && + config.streams[0].temporal_layer_thresholds_bps.size() == 1) { + video_codec.targetBitrate = + config.streams[0].temporal_layer_thresholds_bps[0] / 1000; + } + break; + } + + switch (video_codec.codecType) { + case kVideoCodecVP8: { + if (config.encoder_specific_settings) { + video_codec.codecSpecific.VP8 = *reinterpret_cast( + config.encoder_specific_settings); + } else { + video_codec.codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings(); + } + video_codec.codecSpecific.VP8.numberOfTemporalLayers = + static_cast( + streams.back().temporal_layer_thresholds_bps.size() + 1); + break; + } + case kVideoCodecVP9: { + if (config.encoder_specific_settings) { + video_codec.codecSpecific.VP9 = *reinterpret_cast( + config.encoder_specific_settings); + if (video_codec.mode == kScreensharing) { + video_codec.codecSpecific.VP9.flexibleMode = true; + // For now VP9 screensharing use 1 temporal and 2 spatial layers. + RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfTemporalLayers, + 1); + RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfSpatialLayers, 2); + } + } else { + video_codec.codecSpecific.VP9 = VideoEncoder::GetDefaultVp9Settings(); + } + video_codec.codecSpecific.VP9.numberOfTemporalLayers = + static_cast( + streams.back().temporal_layer_thresholds_bps.size() + 1); + break; + } + case kVideoCodecH264: { + if (config.encoder_specific_settings) { + video_codec.codecSpecific.H264 = + *reinterpret_cast( + config.encoder_specific_settings); + } else { + video_codec.codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings(); + } + break; + } + default: + // TODO(pbos): Support encoder_settings codec-agnostically. + RTC_DCHECK(!config.encoder_specific_settings) + << "Encoder-specific settings for codec type not wired up."; + break; + } + + strncpy(video_codec.plName, payload_name.c_str(), kPayloadNameSize - 1); + video_codec.plName[kPayloadNameSize - 1] = '\0'; + video_codec.plType = payload_type; + video_codec.numberOfSimulcastStreams = + static_cast(streams.size()); + video_codec.minBitrate = streams[0].min_bitrate_bps / 1000; + if (video_codec.minBitrate < kEncoderMinBitrateKbps) + video_codec.minBitrate = kEncoderMinBitrateKbps; + RTC_DCHECK_LE(streams.size(), static_cast(kMaxSimulcastStreams)); + if (video_codec.codecType == kVideoCodecVP9) { + // If the vector is empty, bitrates will be configured automatically. + RTC_DCHECK(config.spatial_layers.empty() || + config.spatial_layers.size() == + video_codec.codecSpecific.VP9.numberOfSpatialLayers); + RTC_DCHECK_LE(video_codec.codecSpecific.VP9.numberOfSpatialLayers, + kMaxSimulcastStreams); + for (size_t i = 0; i < config.spatial_layers.size(); ++i) + video_codec.spatialLayers[i] = config.spatial_layers[i]; + } + for (size_t i = 0; i < streams.size(); ++i) { + SimulcastStream* sim_stream = &video_codec.simulcastStream[i]; + RTC_DCHECK_GT(streams[i].width, 0u); + RTC_DCHECK_GT(streams[i].height, 0u); + RTC_DCHECK_GT(streams[i].max_framerate, 0); + // Different framerates not supported per stream at the moment. + RTC_DCHECK_EQ(streams[i].max_framerate, streams[0].max_framerate); + RTC_DCHECK_GE(streams[i].min_bitrate_bps, 0); + RTC_DCHECK_GE(streams[i].target_bitrate_bps, streams[i].min_bitrate_bps); + RTC_DCHECK_GE(streams[i].max_bitrate_bps, streams[i].target_bitrate_bps); + RTC_DCHECK_GE(streams[i].max_qp, 0); + + sim_stream->width = static_cast(streams[i].width); + sim_stream->height = static_cast(streams[i].height); + sim_stream->minBitrate = streams[i].min_bitrate_bps / 1000; + sim_stream->targetBitrate = streams[i].target_bitrate_bps / 1000; + sim_stream->maxBitrate = streams[i].max_bitrate_bps / 1000; + sim_stream->qpMax = streams[i].max_qp; + sim_stream->numberOfTemporalLayers = static_cast( + streams[i].temporal_layer_thresholds_bps.size() + 1); + + video_codec.width = std::max(video_codec.width, + static_cast(streams[i].width)); + video_codec.height = std::max( + video_codec.height, static_cast(streams[i].height)); + video_codec.minBitrate = + std::min(static_cast(video_codec.minBitrate), + static_cast(streams[i].min_bitrate_bps / 1000)); + video_codec.maxBitrate += streams[i].max_bitrate_bps / 1000; + video_codec.qpMax = std::max(video_codec.qpMax, + static_cast(streams[i].max_qp)); + } + + if (video_codec.maxBitrate == 0) { + // Unset max bitrate -> cap to one bit per pixel. + video_codec.maxBitrate = + (video_codec.width * video_codec.height * video_codec.maxFramerate) / + 1000; + } + if (video_codec.maxBitrate < kEncoderMinBitrateKbps) + video_codec.maxBitrate = kEncoderMinBitrateKbps; + + RTC_DCHECK_GT(streams[0].max_framerate, 0); + video_codec.maxFramerate = streams[0].max_framerate; + video_codec.expect_encode_from_texture = config.expect_encode_from_texture; + + return video_codec; +} + +int CalulcateMaxPadBitrateBps(const VideoEncoderConfig& config, bool pad_to_min_bitrate) { int pad_up_to_bitrate_bps = 0; // Calculate max padding bitrate for a multi layer codec. @@ -226,443 +413,87 @@ int CalculateMaxPadBitrateBps(const VideoEncoderConfig& config, } // namespace namespace internal { - -// VideoSendStreamImpl implements internal::VideoSendStream. -// It is created and destroyed on |worker_queue|. The intent is to decrease the -// need for locking and to ensure methods are called in sequence. -// Public methods except |DeliverRtcp| must be called on |worker_queue|. -// DeliverRtcp is called on the libjingle worker thread or a network thread. -// An encoder may deliver frames through the EncodedImageCallback on an -// arbitrary thread. -class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, - public webrtc::VCMProtectionCallback, - public EncodedImageCallback { - public: - VideoSendStreamImpl(SendStatisticsProxy* stats_proxy, - rtc::TaskQueue* worker_queue, - CallStats* call_stats, - CongestionController* congestion_controller, - BitrateAllocator* bitrate_allocator, - SendDelayStats* send_delay_stats, - VieRemb* remb, - ViEEncoder* vie_encoder, - RtcEventLog* event_log, - const VideoSendStream::Config* config, - std::map suspended_ssrcs); - ~VideoSendStreamImpl() override; - - // RegisterProcessThread register |module_process_thread| with those objects - // that use it. Registration has to happen on the thread were - // |module_process_thread| was created (libjingle's worker thread). - // TODO(perkj): Replace the use of |module_process_thread| with a TaskQueue, - // maybe |worker_queue|. - void RegisterProcessThread(ProcessThread* module_process_thread); - void DeRegisterProcessThread(); - - void SignalNetworkState(NetworkState state); - bool DeliverRtcp(const uint8_t* packet, size_t length); - void Start(); - void Stop(); - - void SignalEncoderConfigurationChanged(const VideoEncoderConfig& config); - VideoSendStream::RtpStateMap GetRtpStates() const; - - private: - class CheckEncoderActivityTask; - - // Implements BitrateAllocatorObserver. - uint32_t OnBitrateUpdated(uint32_t bitrate_bps, - uint8_t fraction_loss, - int64_t rtt) override; - - // Implements webrtc::VCMProtectionCallback. - int ProtectionRequest(const FecProtectionParams* delta_params, - const FecProtectionParams* key_params, - uint32_t* sent_video_rate_bps, - uint32_t* sent_nack_rate_bps, - uint32_t* sent_fec_rate_bps) override; - - // Implements EncodedImageCallback. The implementation routes encoded frames - // to the |payload_router_| and |config.pre_encode_callback| if set. - // Called on an arbitrary encoder callback thread. - EncodedImageCallback::Result OnEncodedImage( - const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) override; - - void ConfigureProtection(); - void ConfigureSsrcs(); - void SignalEncoderTimedOut(); - void SignalEncoderActive(); - - SendStatisticsProxy* const stats_proxy_; - const VideoSendStream::Config* const config_; - std::map suspended_ssrcs_; - - ProcessThread* module_process_thread_; - rtc::ThreadChecker module_process_thread_checker_; - rtc::TaskQueue* const worker_queue_; - - rtc::CriticalSection encoder_activity_crit_sect_; - CheckEncoderActivityTask* check_encoder_activity_task_ - GUARDED_BY(encoder_activity_crit_sect_); - CallStats* const call_stats_; - CongestionController* const congestion_controller_; - BitrateAllocator* const bitrate_allocator_; - VieRemb* const remb_; - - static const bool kEnableFrameRecording = false; - static const int kMaxLayers = 3; - std::unique_ptr file_writers_[kMaxLayers]; - - int max_padding_bitrate_; - int encoder_min_bitrate_bps_; - uint32_t encoder_max_bitrate_bps_; - uint32_t encoder_target_rate_bps_; - - ViEEncoder* const vie_encoder_; - EncoderStateFeedback encoder_feedback_; - ProtectionBitrateCalculator protection_bitrate_calculator_; - - const std::unique_ptr bandwidth_observer_; - // RtpRtcp modules, declared here as they use other members on construction. - const std::vector rtp_rtcp_modules_; - PayloadRouter payload_router_; -}; - -// TODO(tommi): See if there's a more elegant way to create a task that creates -// an object on the correct task queue. -class VideoSendStream::ConstructionTask : public rtc::QueuedTask { - public: - ConstructionTask(std::unique_ptr* send_stream, - rtc::Event* done_event, - SendStatisticsProxy* stats_proxy, - ViEEncoder* vie_encoder, - ProcessThread* module_process_thread, - CallStats* call_stats, - CongestionController* congestion_controller, - BitrateAllocator* bitrate_allocator, - SendDelayStats* send_delay_stats, - VieRemb* remb, - RtcEventLog* event_log, - const VideoSendStream::Config* config, - const std::map& suspended_ssrcs) - : send_stream_(send_stream), - done_event_(done_event), - stats_proxy_(stats_proxy), - vie_encoder_(vie_encoder), - call_stats_(call_stats), - congestion_controller_(congestion_controller), - bitrate_allocator_(bitrate_allocator), - send_delay_stats_(send_delay_stats), - remb_(remb), - event_log_(event_log), - config_(config), - suspended_ssrcs_(suspended_ssrcs) {} - - ~ConstructionTask() override { done_event_->Set(); } - - private: - bool Run() override { - send_stream_->reset(new VideoSendStreamImpl( - stats_proxy_, rtc::TaskQueue::Current(), call_stats_, - congestion_controller_, bitrate_allocator_, send_delay_stats_, remb_, - vie_encoder_, event_log_, config_, std::move(suspended_ssrcs_))); - return true; - } - - std::unique_ptr* const send_stream_; - rtc::Event* const done_event_; - SendStatisticsProxy* const stats_proxy_; - ViEEncoder* const vie_encoder_; - CallStats* const call_stats_; - CongestionController* const congestion_controller_; - BitrateAllocator* const bitrate_allocator_; - SendDelayStats* const send_delay_stats_; - VieRemb* const remb_; - RtcEventLog* const event_log_; - const VideoSendStream::Config* config_; - std::map suspended_ssrcs_; -}; - -class VideoSendStream::DestructAndGetRtpStateTask : public rtc::QueuedTask { - public: - DestructAndGetRtpStateTask(VideoSendStream::RtpStateMap* state_map, - std::unique_ptr send_stream, - rtc::Event* done_event) - : state_map_(state_map), - send_stream_(std::move(send_stream)), - done_event_(done_event) {} - - ~DestructAndGetRtpStateTask() override { RTC_CHECK(!send_stream_); } - - private: - bool Run() override { - send_stream_->Stop(); - *state_map_ = send_stream_->GetRtpStates(); - send_stream_.reset(); - done_event_->Set(); - return true; - } - - VideoSendStream::RtpStateMap* state_map_; - std::unique_ptr send_stream_; - rtc::Event* done_event_; -}; - -// CheckEncoderActivityTask is used for tracking when the encoder last produced -// and encoded video frame. If the encoder has not produced anything the last -// kEncoderTimeOutMs we also want to stop sending padding. -class VideoSendStreamImpl::CheckEncoderActivityTask : public rtc::QueuedTask { - public: - static const int kEncoderTimeOutMs = 2000; - explicit CheckEncoderActivityTask(VideoSendStreamImpl* send_stream) - : activity_(0), send_stream_(send_stream), timed_out_(false) {} - - void Stop() { - RTC_CHECK(task_checker_.CalledSequentially()); - send_stream_ = nullptr; - } - - void UpdateEncoderActivity() { - // UpdateEncoderActivity is called from VideoSendStreamImpl::Encoded on - // whatever thread the real encoder implementation run on. In the case of - // hardware encoders, there might be several encoders - // running in parallel on different threads. - rtc::AtomicOps::ReleaseStore(&activity_, 1); - } - - private: - bool Run() override { - RTC_CHECK(task_checker_.CalledSequentially()); - if (!send_stream_) - return true; - if (!rtc::AtomicOps::AcquireLoad(&activity_)) { - if (!timed_out_) { - send_stream_->SignalEncoderTimedOut(); - } - timed_out_ = true; - } else if (timed_out_) { - send_stream_->SignalEncoderActive(); - timed_out_ = false; - } - rtc::AtomicOps::ReleaseStore(&activity_, 0); - - rtc::TaskQueue::Current()->PostDelayedTask( - std::unique_ptr(this), kEncoderTimeOutMs); - // Return false to prevent this task from being deleted. Ownership has been - // transferred to the task queue when PostDelayedTask was called. - return false; - } - volatile int activity_; - - rtc::SequencedTaskChecker task_checker_; - VideoSendStreamImpl* send_stream_; - bool timed_out_; -}; - -class ReconfigureVideoEncoderTask : public rtc::QueuedTask { - public: - ReconfigureVideoEncoderTask(VideoSendStreamImpl* send_stream, - VideoEncoderConfig config) - : send_stream_(send_stream), config_(std::move(config)) {} - - private: - bool Run() override { - send_stream_->SignalEncoderConfigurationChanged(std::move(config_)); - return true; - } - - VideoSendStreamImpl* send_stream_; - VideoEncoderConfig config_; -}; - VideoSendStream::VideoSendStream( int num_cpu_cores, ProcessThread* module_process_thread, - rtc::TaskQueue* worker_queue, CallStats* call_stats, CongestionController* congestion_controller, BitrateAllocator* bitrate_allocator, SendDelayStats* send_delay_stats, VieRemb* remb, RtcEventLog* event_log, - VideoSendStream::Config config, - VideoEncoderConfig encoder_config, + const VideoSendStream::Config& config, + const VideoEncoderConfig& encoder_config, const std::map& suspended_ssrcs) - : worker_queue_(worker_queue), - thread_sync_event_(false /* manual_reset */, false), - stats_proxy_(Clock::GetRealTimeClock(), + : stats_proxy_(Clock::GetRealTimeClock(), config, encoder_config.content_type), - config_(std::move(config)) { - vie_encoder_.reset( - new ViEEncoder(num_cpu_cores, &stats_proxy_, config_.encoder_settings, - config_.pre_encode_callback, config_.overuse_callback, - config_.post_encode_callback)); - - worker_queue_->PostTask(std::unique_ptr(new ConstructionTask( - &send_stream_, &thread_sync_event_, &stats_proxy_, vie_encoder_.get(), - module_process_thread, call_stats, congestion_controller, - bitrate_allocator, send_delay_stats, remb, event_log, &config_, - suspended_ssrcs))); - - // Wait for ConstructionTask to complete so that |send_stream_| can be used. - // |module_process_thread| must be registered and deregistered on the thread - // it was created on. - thread_sync_event_.Wait(rtc::Event::kForever); - send_stream_->RegisterProcessThread(module_process_thread); - - vie_encoder_->RegisterProcessThread(module_process_thread); - - ReconfigureVideoEncoder(std::move(encoder_config)); -} - -VideoSendStream::~VideoSendStream() { - RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_DCHECK(!send_stream_); -} - -void VideoSendStream::Start() { - RTC_DCHECK_RUN_ON(&thread_checker_); - LOG(LS_INFO) << "VideoSendStream::Start"; - VideoSendStreamImpl* send_stream = send_stream_.get(); - worker_queue_->PostTask([this, send_stream] { - send_stream->Start(); - thread_sync_event_.Set(); - }); - - // It is expected that after VideoSendStream::Start has been called, incoming - // frames are not dropped in ViEEncoder. To ensure this, Start has to be - // synchronized. - thread_sync_event_.Wait(rtc::Event::kForever); -} - -void VideoSendStream::Stop() { - RTC_DCHECK_RUN_ON(&thread_checker_); - LOG(LS_INFO) << "VideoSendStream::Stop"; - VideoSendStreamImpl* send_stream = send_stream_.get(); - worker_queue_->PostTask([send_stream] { send_stream->Stop(); }); -} - -VideoCaptureInput* VideoSendStream::Input() { - // Input() will be called on the thread that deliverers video frames from - // libjingle. - // TODO(perkj): Refactor ViEEncoder to register directly as a VideoSink to the - // VideoSource. - return vie_encoder_.get(); -} - -void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config) { - // ReconfigureVideoEncoder will be called on the thread that deliverers video - // frames. We must change the encoder settings immediately so that - // the codec settings matches the next frame. - // TODO(perkj): Move logic for reconfiguration the encoder due to frame size - // change from WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame to - // be internally handled by ViEEncoder. - vie_encoder_->ConfigureEncoder(config, config_.rtp.max_packet_size); - - worker_queue_->PostTask(std::unique_ptr( - new ReconfigureVideoEncoderTask(send_stream_.get(), std::move(config)))); -} - -VideoSendStream::Stats VideoSendStream::GetStats() { - // TODO(perkj, solenberg): Some test cases in EndToEndTest call GetStats from - // a network thread. See comment in Call::GetStats(). - // RTC_DCHECK_RUN_ON(&thread_checker_); - return stats_proxy_.GetStats(); -} - -void VideoSendStream::SignalNetworkState(NetworkState state) { - RTC_DCHECK_RUN_ON(&thread_checker_); - VideoSendStreamImpl* send_stream = send_stream_.get(); - worker_queue_->PostTask( - [send_stream, state] { send_stream->SignalNetworkState(state); }); -} - -VideoSendStream::RtpStateMap VideoSendStream::StopPermanentlyAndGetRtpStates() { - RTC_DCHECK_RUN_ON(&thread_checker_); - vie_encoder_->Stop(); - vie_encoder_->DeRegisterProcessThread(); - VideoSendStream::RtpStateMap state_map; - send_stream_->DeRegisterProcessThread(); - worker_queue_->PostTask( - std::unique_ptr(new DestructAndGetRtpStateTask( - &state_map, std::move(send_stream_), &thread_sync_event_))); - thread_sync_event_.Wait(rtc::Event::kForever); - return state_map; -} - -bool VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) { - // Called on a network thread. - return send_stream_->DeliverRtcp(packet, length); -} - -VideoSendStreamImpl::VideoSendStreamImpl( - SendStatisticsProxy* stats_proxy, - rtc::TaskQueue* worker_queue, - CallStats* call_stats, - CongestionController* congestion_controller, - BitrateAllocator* bitrate_allocator, - SendDelayStats* send_delay_stats, - VieRemb* remb, - ViEEncoder* vie_encoder, - RtcEventLog* event_log, - const VideoSendStream::Config* config, - std::map suspended_ssrcs) - : stats_proxy_(stats_proxy), config_(config), - suspended_ssrcs_(std::move(suspended_ssrcs)), - module_process_thread_(nullptr), - worker_queue_(worker_queue), - check_encoder_activity_task_(nullptr), + suspended_ssrcs_(suspended_ssrcs), + module_process_thread_(module_process_thread), call_stats_(call_stats), congestion_controller_(congestion_controller), bitrate_allocator_(bitrate_allocator), remb_(remb), - max_padding_bitrate_(0), - encoder_min_bitrate_bps_(0), + encoder_thread_(EncoderThreadFunction, this, "EncoderThread"), + encoder_wakeup_event_(false, false), + stop_encoder_thread_(0), encoder_max_bitrate_bps_(0), encoder_target_rate_bps_(0), - vie_encoder_(vie_encoder), + state_(State::kStopped), + overuse_detector_( + Clock::GetRealTimeClock(), + GetCpuOveruseOptions(config.encoder_settings.full_overuse_time), + this, + config.post_encode_callback, + &stats_proxy_), + vie_encoder_(num_cpu_cores, + module_process_thread_, + &stats_proxy_, + &overuse_detector_, + this), encoder_feedback_(Clock::GetRealTimeClock(), - config_->rtp.ssrcs, - vie_encoder), + config.rtp.ssrcs, + &vie_encoder_), protection_bitrate_calculator_(Clock::GetRealTimeClock(), this), + video_sender_(vie_encoder_.video_sender()), bandwidth_observer_(congestion_controller_->GetBitrateController() ->CreateRtcpBandwidthObserver()), rtp_rtcp_modules_(CreateRtpRtcpModules( - config_->send_transport, + config.send_transport, &encoder_feedback_, bandwidth_observer_.get(), congestion_controller_->GetTransportFeedbackObserver(), call_stats_->rtcp_rtt_stats(), congestion_controller_->pacer(), congestion_controller_->packet_router(), - stats_proxy_, + &stats_proxy_, send_delay_stats, event_log, congestion_controller_->GetRetransmissionRateLimiter(), - config_->rtp.ssrcs.size())), - payload_router_(rtp_rtcp_modules_, - config_->encoder_settings.payload_type) { - RTC_DCHECK_RUN_ON(worker_queue_); - LOG(LS_INFO) << "VideoSendStreamInternal: " << config_->ToString(); - module_process_thread_checker_.DetachFromThread(); + config_.rtp.ssrcs.size())), + payload_router_(rtp_rtcp_modules_, config.encoder_settings.payload_type), + input_(&encoder_wakeup_event_, + config_.local_renderer, + &stats_proxy_, + &overuse_detector_) { + LOG(LS_INFO) << "VideoSendStream: " << config_.ToString(); - RTC_DCHECK(!config_->rtp.ssrcs.empty()); + RTC_DCHECK(!config_.rtp.ssrcs.empty()); + RTC_DCHECK(module_process_thread_); RTC_DCHECK(call_stats_); RTC_DCHECK(congestion_controller_); RTC_DCHECK(remb_); // RTP/RTCP initialization. for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { + module_process_thread_->RegisterModule(rtp_rtcp); congestion_controller_->packet_router()->AddRtpModule(rtp_rtcp); } - for (size_t i = 0; i < config_->rtp.extensions.size(); ++i) { - const std::string& extension = config_->rtp.extensions[i].uri; - int id = config_->rtp.extensions[i].id; + for (size_t i = 0; i < config_.rtp.extensions.size(); ++i) { + const std::string& extension = config_.rtp.extensions[i].uri; + int id = config_.rtp.extensions[i].id; // One-byte-extension local identifiers are in the range 1-14 inclusive. RTC_DCHECK_GE(id, 1); RTC_DCHECK_LE(id, 14); @@ -680,185 +511,264 @@ VideoSendStreamImpl::VideoSendStreamImpl( ConfigureSsrcs(); // TODO(pbos): Should we set CNAME on all RTP modules? - rtp_rtcp_modules_.front()->SetCNAME(config_->rtp.c_name.c_str()); + rtp_rtcp_modules_.front()->SetCNAME(config_.rtp.c_name.c_str()); // 28 to match packet overhead in ModuleRtpRtcpImpl. static const size_t kRtpPacketSizeOverhead = 28; - RTC_DCHECK_LE(config_->rtp.max_packet_size, 0xFFFFu + kRtpPacketSizeOverhead); - const uint16_t mtu = static_cast(config_->rtp.max_packet_size + + RTC_DCHECK_LE(config_.rtp.max_packet_size, 0xFFFFu + kRtpPacketSizeOverhead); + const uint16_t mtu = static_cast(config_.rtp.max_packet_size + kRtpPacketSizeOverhead); for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { - rtp_rtcp->RegisterRtcpStatisticsCallback(stats_proxy_); - rtp_rtcp->RegisterSendChannelRtpStatisticsCallback(stats_proxy_); + rtp_rtcp->RegisterRtcpStatisticsCallback(&stats_proxy_); + rtp_rtcp->RegisterSendChannelRtpStatisticsCallback(&stats_proxy_); rtp_rtcp->SetMaxTransferUnit(mtu); rtp_rtcp->RegisterVideoSendPayload( - config_->encoder_settings.payload_type, - config_->encoder_settings.payload_name.c_str()); + config_.encoder_settings.payload_type, + config_.encoder_settings.payload_name.c_str()); } - RTC_DCHECK(config_->encoder_settings.encoder); - RTC_DCHECK_GE(config_->encoder_settings.payload_type, 0); - RTC_DCHECK_LE(config_->encoder_settings.payload_type, 127); + RTC_DCHECK(config.encoder_settings.encoder); + RTC_DCHECK_GE(config.encoder_settings.payload_type, 0); + RTC_DCHECK_LE(config.encoder_settings.payload_type, 127); + ReconfigureVideoEncoder(encoder_config); - vie_encoder_->SetStartBitrate(bitrate_allocator_->GetStartBitrate(this)); - vie_encoder_->SetSink(this); + module_process_thread_->RegisterModule(&overuse_detector_); + + encoder_thread_checker_.DetachFromThread(); + encoder_thread_.Start(); + encoder_thread_.SetPriority(rtc::kHighPriority); } -void VideoSendStreamImpl::RegisterProcessThread( - ProcessThread* module_process_thread) { - RTC_DCHECK_RUN_ON(&module_process_thread_checker_); - RTC_DCHECK(!module_process_thread_); - module_process_thread_ = module_process_thread; +VideoSendStream::~VideoSendStream() { + LOG(LS_INFO) << "~VideoSendStream: " << config_.ToString(); - for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) - module_process_thread_->RegisterModule(rtp_rtcp); -} + Stop(); -void VideoSendStreamImpl::DeRegisterProcessThread() { - RTC_DCHECK_RUN_ON(&module_process_thread_checker_); - for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) - module_process_thread_->DeRegisterModule(rtp_rtcp); -} + // Stop the encoder thread permanently. + rtc::AtomicOps::ReleaseStore(&stop_encoder_thread_, 1); + encoder_wakeup_event_.Set(); + encoder_thread_.Stop(); -VideoSendStreamImpl::~VideoSendStreamImpl() { - RTC_DCHECK_RUN_ON(worker_queue_); - RTC_DCHECK(!payload_router_.active()) - << "VideoSendStreamImpl::Stop not called"; - LOG(LS_INFO) << "~VideoSendStreamInternal: " << config_->ToString(); + // This needs to happen after stopping the encoder thread, + // since the encoder thread calls AddObserver. + bitrate_allocator_->RemoveObserver(this); + + module_process_thread_->DeRegisterModule(&overuse_detector_); rtp_rtcp_modules_[0]->SetREMBStatus(false); remb_->RemoveRembSender(rtp_rtcp_modules_[0]); for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { congestion_controller_->packet_router()->RemoveRtpModule(rtp_rtcp); + module_process_thread_->DeRegisterModule(rtp_rtcp); delete rtp_rtcp; } } -bool VideoSendStreamImpl::DeliverRtcp(const uint8_t* packet, size_t length) { - // Runs on a network thread. - RTC_DCHECK(!worker_queue_->IsCurrent()); +bool VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) { for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) rtp_rtcp->IncomingRtcpPacket(packet, length); return true; } -void VideoSendStreamImpl::Start() { - RTC_DCHECK_RUN_ON(worker_queue_); +void VideoSendStream::Start() { LOG(LS_INFO) << "VideoSendStream::Start"; if (payload_router_.active()) return; TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Start"); payload_router_.set_active(true); - - bitrate_allocator_->AddObserver( - this, encoder_min_bitrate_bps_, encoder_max_bitrate_bps_, - max_padding_bitrate_, !config_->suspend_below_min_bitrate); - - // Start monitoring encoder activity. { - rtc::CritScope lock(&encoder_activity_crit_sect_); - RTC_DCHECK(!check_encoder_activity_task_); - check_encoder_activity_task_ = new CheckEncoderActivityTask(this); - worker_queue_->PostDelayedTask( - std::unique_ptr(check_encoder_activity_task_), - CheckEncoderActivityTask::kEncoderTimeOutMs); + rtc::CritScope lock(&encoder_settings_crit_); + pending_state_change_ = rtc::Optional(State::kStarted); } - - vie_encoder_->SendKeyFrame(); + encoder_wakeup_event_.Set(); } -void VideoSendStreamImpl::Stop() { - RTC_DCHECK_RUN_ON(worker_queue_); +void VideoSendStream::Stop() { LOG(LS_INFO) << "VideoSendStream::Stop"; if (!payload_router_.active()) return; TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Stop"); payload_router_.set_active(false); - bitrate_allocator_->RemoveObserver(this); { - rtc::CritScope lock(&encoder_activity_crit_sect_); - check_encoder_activity_task_->Stop(); - check_encoder_activity_task_ = nullptr; + rtc::CritScope lock(&encoder_settings_crit_); + pending_state_change_ = rtc::Optional(State::kStopped); } - vie_encoder_->OnBitrateUpdated(0, 0, 0); - stats_proxy_->OnSetEncoderTargetRate(0); + encoder_wakeup_event_.Set(); } -void VideoSendStreamImpl::SignalEncoderTimedOut() { - RTC_DCHECK_RUN_ON(worker_queue_); - // If the encoder has not produced anything the last kEncoderTimeOutMs and it - // is supposed to, deregister as BitrateAllocatorObserver. This can happen - // if a camera stops producing frames. - if (encoder_target_rate_bps_ > 0) { - LOG(LS_INFO) << "SignalEncoderTimedOut, Encoder timed out."; - bitrate_allocator_->RemoveObserver(this); +VideoCaptureInput* VideoSendStream::Input() { + return &input_; +} + +bool VideoSendStream::EncoderThreadFunction(void* obj) { + static_cast(obj)->EncoderProcess(); + // We're done, return false to abort. + return false; +} + +void VideoSendStream::EncoderProcess() { + RTC_CHECK_EQ(0, vie_encoder_.RegisterExternalEncoder( + config_.encoder_settings.encoder, + config_.encoder_settings.payload_type, + config_.encoder_settings.internal_source)); + RTC_DCHECK_RUN_ON(&encoder_thread_checker_); + while (true) { + // Wake up every kEncodeCheckForActivityPeriodMs to check if the encoder is + // active. If not, deregister as BitrateAllocatorObserver. + const int kEncodeCheckForActivityPeriodMs = 1000; + encoder_wakeup_event_.Wait(kEncodeCheckForActivityPeriodMs); + if (rtc::AtomicOps::AcquireLoad(&stop_encoder_thread_)) + break; + bool change_settings = false; + rtc::Optional pending_state_change; + { + rtc::CritScope lock(&encoder_settings_crit_); + if (pending_encoder_settings_) { + std::swap(current_encoder_settings_, pending_encoder_settings_); + pending_encoder_settings_.reset(); + change_settings = true; + } else if (pending_state_change_) { + swap(pending_state_change, pending_state_change_); + } + } + if (change_settings) { + current_encoder_settings_->video_codec.startBitrate = std::max( + bitrate_allocator_->GetStartBitrate(this) / 1000, + static_cast(current_encoder_settings_->video_codec.minBitrate)); + + if (state_ == State::kStarted) { + bitrate_allocator_->AddObserver( + this, current_encoder_settings_->video_codec.minBitrate * 1000, + current_encoder_settings_->video_codec.maxBitrate * 1000, + CalulcateMaxPadBitrateBps(current_encoder_settings_->config, + config_.suspend_below_min_bitrate), + !config_.suspend_below_min_bitrate); + } + + payload_router_.SetSendStreams(current_encoder_settings_->config.streams); + vie_encoder_.SetEncoder(current_encoder_settings_->video_codec, + payload_router_.MaxPayloadLength()); + + // Clear stats for disabled layers. + for (size_t i = current_encoder_settings_->config.streams.size(); + i < config_.rtp.ssrcs.size(); ++i) { + stats_proxy_.OnInactiveSsrc(config_.rtp.ssrcs[i]); + } + + size_t number_of_temporal_layers = + current_encoder_settings_->config.streams.back() + .temporal_layer_thresholds_bps.size() + + 1; + protection_bitrate_calculator_.SetEncodingData( + current_encoder_settings_->video_codec.width, + current_encoder_settings_->video_codec.height, + number_of_temporal_layers, payload_router_.MaxPayloadLength()); + + // We might've gotten new settings while configuring the encoder settings, + // restart from the top to see if that's the case before trying to encode + // a frame (which might correspond to the last frame size). + encoder_wakeup_event_.Set(); + continue; + } + + if (pending_state_change) { + if (*pending_state_change == State::kStarted && + state_ == State::kStopped) { + bitrate_allocator_->AddObserver( + this, current_encoder_settings_->video_codec.minBitrate * 1000, + current_encoder_settings_->video_codec.maxBitrate * 1000, + CalulcateMaxPadBitrateBps(current_encoder_settings_->config, + config_.suspend_below_min_bitrate), + !config_.suspend_below_min_bitrate); + vie_encoder_.SendKeyFrame(); + state_ = State::kStarted; + LOG_F(LS_INFO) << "Encoder started."; + } else if (*pending_state_change == State::kStopped) { + bitrate_allocator_->RemoveObserver(this); + vie_encoder_.OnBitrateUpdated(0, 0, 0); + stats_proxy_.OnSetEncoderTargetRate(0); + state_ = State::kStopped; + LOG_F(LS_INFO) << "Encoder stopped."; + } + encoder_wakeup_event_.Set(); + continue; + } + + // Check if the encoder has produced anything the last kEncoderTimeOutMs. + // If not, deregister as BitrateAllocatorObserver. + if (state_ == State::kStarted && + vie_encoder_.time_of_last_frame_activity_ms() < + rtc::TimeMillis() - kEncoderTimeOutMs) { + // The encoder has timed out. + LOG_F(LS_INFO) << "Encoder timed out."; + bitrate_allocator_->RemoveObserver(this); + state_ = State::kEncoderTimedOut; + } + if (state_ == State::kEncoderTimedOut && + vie_encoder_.time_of_last_frame_activity_ms() > + rtc::TimeMillis() - kEncoderTimeOutMs) { + LOG_F(LS_INFO) << "Encoder is active."; + bitrate_allocator_->AddObserver( + this, current_encoder_settings_->video_codec.minBitrate * 1000, + current_encoder_settings_->video_codec.maxBitrate * 1000, + CalulcateMaxPadBitrateBps(current_encoder_settings_->config, + config_.suspend_below_min_bitrate), + !config_.suspend_below_min_bitrate); + state_ = State::kStarted; + } + + VideoFrame frame; + if (input_.GetVideoFrame(&frame)) { + // TODO(perkj): |pre_encode_callback| is only used by tests. Tests should + // register as a sink to the VideoSource instead. + if (config_.pre_encode_callback) { + config_.pre_encode_callback->OnFrame(frame); + } + vie_encoder_.EncodeVideoFrame(frame); + } } + vie_encoder_.DeRegisterExternalEncoder(config_.encoder_settings.payload_type); } -void VideoSendStreamImpl::SignalEncoderActive() { - RTC_DCHECK_RUN_ON(worker_queue_); - LOG(LS_INFO) << "SignalEncoderActive, Encoder is active."; - bitrate_allocator_->AddObserver( - this, encoder_min_bitrate_bps_, encoder_max_bitrate_bps_, - max_padding_bitrate_, !config_->suspend_below_min_bitrate); -} - -void VideoSendStreamImpl::SignalEncoderConfigurationChanged( +void VideoSendStream::ReconfigureVideoEncoder( const VideoEncoderConfig& config) { - RTC_DCHECK_GE(config_->rtp.ssrcs.size(), config.streams.size()); - TRACE_EVENT0("webrtc", "VideoSendStream::SignalEncoderConfigurationChanged"); - LOG(LS_INFO) << "SignalEncoderConfigurationChanged: " << config.ToString(); - RTC_DCHECK_GE(config_->rtp.ssrcs.size(), config.streams.size()); - RTC_DCHECK_RUN_ON(worker_queue_); - - const int kEncoderMinBitrateBps = 30000; - encoder_min_bitrate_bps_ = - std::max(config.streams[0].min_bitrate_bps, kEncoderMinBitrateBps); - encoder_max_bitrate_bps_ = 0; - for (const auto& stream : config.streams) - encoder_max_bitrate_bps_ += stream.max_bitrate_bps; - max_padding_bitrate_ = - CalculateMaxPadBitrateBps(config, config_->suspend_below_min_bitrate); - - payload_router_.SetSendStreams(config.streams); - - // Clear stats for disabled layers. - for (size_t i = config.streams.size(); i < config_->rtp.ssrcs.size(); ++i) { - stats_proxy_->OnInactiveSsrc(config_->rtp.ssrcs[i]); - } - - size_t number_of_temporal_layers = - config.streams.back().temporal_layer_thresholds_bps.size() + 1; - protection_bitrate_calculator_.SetEncodingData( - config.streams[0].width, config.streams[0].height, - number_of_temporal_layers, config_->rtp.max_packet_size); - - if (payload_router_.active()) { - // The send stream is started already. Update the allocator with new bitrate - // limits. - bitrate_allocator_->AddObserver( - this, encoder_min_bitrate_bps_, encoder_max_bitrate_bps_, - max_padding_bitrate_, !config_->suspend_below_min_bitrate); + TRACE_EVENT0("webrtc", "VideoSendStream::(Re)configureVideoEncoder"); + LOG(LS_INFO) << "(Re)configureVideoEncoder: " << config.ToString(); + RTC_DCHECK_GE(config_.rtp.ssrcs.size(), config.streams.size()); + VideoCodec video_codec = VideoEncoderConfigToVideoCodec( + config, config_.encoder_settings.payload_name, + config_.encoder_settings.payload_type); + { + rtc::CritScope lock(&encoder_settings_crit_); + encoder_max_bitrate_bps_ = video_codec.maxBitrate * 1000; + pending_encoder_settings_.reset(new EncoderSettings({video_codec, config})); } + encoder_wakeup_event_.Set(); } -EncodedImageCallback::Result VideoSendStreamImpl::OnEncodedImage( +VideoSendStream::Stats VideoSendStream::GetStats() { + return stats_proxy_.GetStats(); +} + +void VideoSendStream::OveruseDetected() { + if (config_.overuse_callback) + config_.overuse_callback->OnLoadUpdate(LoadObserver::kOveruse); +} + +void VideoSendStream::NormalUsage() { + if (config_.overuse_callback) + config_.overuse_callback->OnLoadUpdate(LoadObserver::kUnderuse); +} + +EncodedImageCallback::Result VideoSendStream::OnEncodedImage( const EncodedImage& encoded_image, const CodecSpecificInfo* codec_specific_info, const RTPFragmentationHeader* fragmentation) { - // Encoded is called on whatever thread the real encoder implementation run - // on. In the case of hardware encoders, there might be several encoders - // running in parallel on different threads. - if (config_->post_encode_callback) { - config_->post_encode_callback->EncodedFrameCallback( + if (config_.post_encode_callback) { + config_.post_encode_callback->EncodedFrameCallback( EncodedFrame(encoded_image._buffer, encoded_image._length, encoded_image._frameType)); } - { - rtc::CritScope lock(&encoder_activity_crit_sect_); - if (check_encoder_activity_task_) - check_encoder_activity_task_->UpdateEncoderActivity(); - } protection_bitrate_calculator_.UpdateWithEncodedData(encoded_image); EncodedImageCallback::Result result = payload_router_.OnEncodedImage( @@ -873,7 +783,7 @@ EncodedImageCallback::Result VideoSendStreamImpl::OnEncodedImage( if (file_writers_[layer] == nullptr) { std::ostringstream oss; oss << "send_bitstream_ssrc"; - for (uint32_t ssrc : config_->rtp.ssrcs) + for (uint32_t ssrc : config_.rtp.ssrcs) oss << "_" << ssrc; oss << "_layer" << layer << ".ivf"; file_writers_[layer] = @@ -890,18 +800,17 @@ EncodedImageCallback::Result VideoSendStreamImpl::OnEncodedImage( return result; } -void VideoSendStreamImpl::ConfigureProtection() { - RTC_DCHECK_RUN_ON(worker_queue_); +void VideoSendStream::ConfigureProtection() { // Enable NACK, FEC or both. - const bool enable_protection_nack = config_->rtp.nack.rtp_history_ms > 0; - bool enable_protection_fec = config_->rtp.fec.ulpfec_payload_type != -1; + const bool enable_protection_nack = config_.rtp.nack.rtp_history_ms > 0; + bool enable_protection_fec = config_.rtp.fec.ulpfec_payload_type != -1; // Payload types without picture ID cannot determine that a stream is complete // without retransmitting FEC, so using FEC + NACK for H.264 (for instance) is // a waste of bandwidth since FEC packets still have to be transmitted. Note // that this is not the case with FLEXFEC. if (enable_protection_nack && !PayloadTypeSupportsSkippingFecPackets( - config_->encoder_settings.payload_name)) { + config_.encoder_settings.payload_name)) { LOG(LS_WARNING) << "Transmitting payload type without picture ID using" "NACK+FEC is a waste of bandwidth since FEC packets " "also have to be retransmitted. Disabling FEC."; @@ -915,21 +824,21 @@ void VideoSendStreamImpl::ConfigureProtection() { // TODO(changbin): Should set RTX for RED mapping in RTP sender in future. // Validate payload types. If either RED or FEC payload types are set then // both should be. If FEC is enabled then they both have to be set. - if (config_->rtp.fec.red_payload_type != -1) { - RTC_DCHECK_GE(config_->rtp.fec.red_payload_type, 0); - RTC_DCHECK_LE(config_->rtp.fec.red_payload_type, 127); + if (config_.rtp.fec.red_payload_type != -1) { + RTC_DCHECK_GE(config_.rtp.fec.red_payload_type, 0); + RTC_DCHECK_LE(config_.rtp.fec.red_payload_type, 127); // TODO(holmer): We should only enable red if ulpfec is also enabled, but // but due to an incompatibility issue with previous versions the receiver // assumes rtx packets are containing red if it has been configured to // receive red. Remove this in a few versions once the incompatibility // issue is resolved (M53 timeframe). - payload_type_red = static_cast(config_->rtp.fec.red_payload_type); + payload_type_red = static_cast(config_.rtp.fec.red_payload_type); } - if (config_->rtp.fec.ulpfec_payload_type != -1) { - RTC_DCHECK_GE(config_->rtp.fec.ulpfec_payload_type, 0); - RTC_DCHECK_LE(config_->rtp.fec.ulpfec_payload_type, 127); + if (config_.rtp.fec.ulpfec_payload_type != -1) { + RTC_DCHECK_GE(config_.rtp.fec.ulpfec_payload_type, 0); + RTC_DCHECK_LE(config_.rtp.fec.ulpfec_payload_type, 127); payload_type_fec = - static_cast(config_->rtp.fec.ulpfec_payload_type); + static_cast(config_.rtp.fec.ulpfec_payload_type); } for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { @@ -948,102 +857,107 @@ void VideoSendStreamImpl::ConfigureProtection() { enable_protection_nack); } -void VideoSendStreamImpl::ConfigureSsrcs() { - RTC_DCHECK_RUN_ON(worker_queue_); +void VideoSendStream::ConfigureSsrcs() { // Configure regular SSRCs. - for (size_t i = 0; i < config_->rtp.ssrcs.size(); ++i) { - uint32_t ssrc = config_->rtp.ssrcs[i]; + for (size_t i = 0; i < config_.rtp.ssrcs.size(); ++i) { + uint32_t ssrc = config_.rtp.ssrcs[i]; RtpRtcp* const rtp_rtcp = rtp_rtcp_modules_[i]; rtp_rtcp->SetSSRC(ssrc); // Restore RTP state if previous existed. - VideoSendStream::RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc); + RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc); if (it != suspended_ssrcs_.end()) rtp_rtcp->SetRtpState(it->second); } // Set up RTX if available. - if (config_->rtp.rtx.ssrcs.empty()) + if (config_.rtp.rtx.ssrcs.empty()) return; // Configure RTX SSRCs. - RTC_DCHECK_EQ(config_->rtp.rtx.ssrcs.size(), config_->rtp.ssrcs.size()); - for (size_t i = 0; i < config_->rtp.rtx.ssrcs.size(); ++i) { - uint32_t ssrc = config_->rtp.rtx.ssrcs[i]; + RTC_DCHECK_EQ(config_.rtp.rtx.ssrcs.size(), config_.rtp.ssrcs.size()); + for (size_t i = 0; i < config_.rtp.rtx.ssrcs.size(); ++i) { + uint32_t ssrc = config_.rtp.rtx.ssrcs[i]; RtpRtcp* const rtp_rtcp = rtp_rtcp_modules_[i]; rtp_rtcp->SetRtxSsrc(ssrc); - VideoSendStream::RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc); + RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc); if (it != suspended_ssrcs_.end()) rtp_rtcp->SetRtxState(it->second); } // Configure RTX payload types. - RTC_DCHECK_GE(config_->rtp.rtx.payload_type, 0); + RTC_DCHECK_GE(config_.rtp.rtx.payload_type, 0); for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { - rtp_rtcp->SetRtxSendPayloadType(config_->rtp.rtx.payload_type, - config_->encoder_settings.payload_type); + rtp_rtcp->SetRtxSendPayloadType(config_.rtp.rtx.payload_type, + config_.encoder_settings.payload_type); rtp_rtcp->SetRtxSendStatus(kRtxRetransmitted | kRtxRedundantPayloads); } - if (config_->rtp.fec.red_payload_type != -1 && - config_->rtp.fec.red_rtx_payload_type != -1) { + if (config_.rtp.fec.red_payload_type != -1 && + config_.rtp.fec.red_rtx_payload_type != -1) { for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { - rtp_rtcp->SetRtxSendPayloadType(config_->rtp.fec.red_rtx_payload_type, - config_->rtp.fec.red_payload_type); + rtp_rtcp->SetRtxSendPayloadType(config_.rtp.fec.red_rtx_payload_type, + config_.rtp.fec.red_payload_type); } } } -std::map VideoSendStreamImpl::GetRtpStates() const { - RTC_DCHECK_RUN_ON(worker_queue_); +std::map VideoSendStream::GetRtpStates() const { std::map rtp_states; - for (size_t i = 0; i < config_->rtp.ssrcs.size(); ++i) { - uint32_t ssrc = config_->rtp.ssrcs[i]; + for (size_t i = 0; i < config_.rtp.ssrcs.size(); ++i) { + uint32_t ssrc = config_.rtp.ssrcs[i]; RTC_DCHECK_EQ(ssrc, rtp_rtcp_modules_[i]->SSRC()); rtp_states[ssrc] = rtp_rtcp_modules_[i]->GetRtpState(); } - for (size_t i = 0; i < config_->rtp.rtx.ssrcs.size(); ++i) { - uint32_t ssrc = config_->rtp.rtx.ssrcs[i]; + for (size_t i = 0; i < config_.rtp.rtx.ssrcs.size(); ++i) { + uint32_t ssrc = config_.rtp.rtx.ssrcs[i]; rtp_states[ssrc] = rtp_rtcp_modules_[i]->GetRtxState(); } return rtp_states; } -void VideoSendStreamImpl::SignalNetworkState(NetworkState state) { - RTC_DCHECK_RUN_ON(worker_queue_); +void VideoSendStream::SignalNetworkState(NetworkState state) { for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { - rtp_rtcp->SetRTCPStatus(state == kNetworkUp ? config_->rtp.rtcp_mode + rtp_rtcp->SetRTCPStatus(state == kNetworkUp ? config_.rtp.rtcp_mode : RtcpMode::kOff); } } -uint32_t VideoSendStreamImpl::OnBitrateUpdated(uint32_t bitrate_bps, - uint8_t fraction_loss, - int64_t rtt) { - RTC_DCHECK_RUN_ON(worker_queue_); - RTC_DCHECK(payload_router_.active()) - << "VideoSendStream::Start has not been called."; +uint32_t VideoSendStream::OnBitrateUpdated(uint32_t bitrate_bps, + uint8_t fraction_loss, + int64_t rtt) { // Get the encoder target rate. It is the estimated network rate - // protection overhead. - encoder_target_rate_bps_ = protection_bitrate_calculator_.SetTargetRates( - bitrate_bps, stats_proxy_->GetSendFrameRate(), fraction_loss, rtt); - uint32_t protection_bitrate = bitrate_bps - encoder_target_rate_bps_; + uint32_t encoder_target_rate_bps = + protection_bitrate_calculator_.SetTargetRates( + bitrate_bps, stats_proxy_.GetSendFrameRate(), fraction_loss, rtt); + + uint32_t protection_bitrate = bitrate_bps - encoder_target_rate_bps; + { + // Limit the target bitrate to the configured max bitrate. + rtc::CritScope lock(&encoder_settings_crit_); + encoder_target_rate_bps = + std::min(encoder_max_bitrate_bps_, encoder_target_rate_bps); + if ((encoder_target_rate_bps_ == 0 && encoder_target_rate_bps > 0) || + (encoder_target_rate_bps_ > 0 && encoder_target_rate_bps == 0)) { + LOG(LS_INFO) + << "OnBitrateUpdated: Encoder state changed, target bitrate " + << encoder_target_rate_bps << " bps."; + } + encoder_target_rate_bps_ = encoder_target_rate_bps; + } + vie_encoder_.OnBitrateUpdated(encoder_target_rate_bps, fraction_loss, rtt); + stats_proxy_.OnSetEncoderTargetRate(encoder_target_rate_bps); - encoder_target_rate_bps_ = - std::min(encoder_max_bitrate_bps_, encoder_target_rate_bps_); - vie_encoder_->OnBitrateUpdated(encoder_target_rate_bps_, fraction_loss, rtt); - stats_proxy_->OnSetEncoderTargetRate(encoder_target_rate_bps_); return protection_bitrate; } -int VideoSendStreamImpl::ProtectionRequest( - const FecProtectionParams* delta_params, - const FecProtectionParams* key_params, - uint32_t* sent_video_rate_bps, - uint32_t* sent_nack_rate_bps, - uint32_t* sent_fec_rate_bps) { - RTC_DCHECK_RUN_ON(worker_queue_); +int VideoSendStream::ProtectionRequest(const FecProtectionParams* delta_params, + const FecProtectionParams* key_params, + uint32_t* sent_video_rate_bps, + uint32_t* sent_nack_rate_bps, + uint32_t* sent_fec_rate_bps) { *sent_video_rate_bps = 0; *sent_nack_rate_bps = 0; *sent_fec_rate_bps = 0; diff --git a/webrtc/video/video_send_stream.h b/webrtc/video/video_send_stream.h index 932264265d..c67dc70199 100644 --- a/webrtc/video/video_send_stream.h +++ b/webrtc/video/video_send_stream.h @@ -17,8 +17,6 @@ #include "webrtc/call/bitrate_allocator.h" #include "webrtc/base/criticalsection.h" -#include "webrtc/base/event.h" -#include "webrtc/base/task_queue.h" #include "webrtc/call.h" #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" #include "webrtc/modules/video_coding/protection_bitrate_calculator.h" @@ -26,6 +24,7 @@ #include "webrtc/video/payload_router.h" #include "webrtc/video/send_delay_stats.h" #include "webrtc/video/send_statistics_proxy.h" +#include "webrtc/video/video_capture_input.h" #include "webrtc/video/vie_encoder.h" #include "webrtc/video_receive_stream.h" #include "webrtc/video_send_stream.h" @@ -38,29 +37,32 @@ class CongestionController; class IvfFileWriter; class ProcessThread; class RtpRtcp; +class ViEEncoder; class VieRemb; class RtcEventLog; +namespace vcm { +class VideoSender; +} // namespace vcm + namespace internal { -class VideoSendStreamImpl; - -// VideoSendStream implements webrtc::VideoSendStream. -// Internally, it delegates all public methods to VideoSendStreamImpl and / or -// VieEncoder. VideoSendStreamInternal is created and deleted on |worker_queue|. -class VideoSendStream : public webrtc::VideoSendStream { +class VideoSendStream : public webrtc::VideoSendStream, + public webrtc::CpuOveruseObserver, + public webrtc::BitrateAllocatorObserver, + public webrtc::VCMProtectionCallback, + public EncodedImageCallback { public: VideoSendStream(int num_cpu_cores, ProcessThread* module_process_thread, - rtc::TaskQueue* worker_queue, CallStats* call_stats, CongestionController* congestion_controller, BitrateAllocator* bitrate_allocator, SendDelayStats* send_delay_stats, VieRemb* remb, RtcEventLog* event_log, - VideoSendStream::Config config, - VideoEncoderConfig encoder_config, + const VideoSendStream::Config& config, + const VideoEncoderConfig& encoder_config, const std::map& suspended_ssrcs); ~VideoSendStream() override; @@ -72,26 +74,101 @@ class VideoSendStream : public webrtc::VideoSendStream { void Start() override; void Stop() override; VideoCaptureInput* Input() override; - void ReconfigureVideoEncoder(VideoEncoderConfig) override; + void ReconfigureVideoEncoder(const VideoEncoderConfig& config) override; Stats GetStats() override; + // webrtc::CpuOveruseObserver implementation. + void OveruseDetected() override; + void NormalUsage() override; + typedef std::map RtpStateMap; - RtpStateMap StopPermanentlyAndGetRtpStates(); + RtpStateMap GetRtpStates() const; + + int GetPaddingNeededBps() const; + + // Implements BitrateAllocatorObserver. + uint32_t OnBitrateUpdated(uint32_t bitrate_bps, + uint8_t fraction_loss, + int64_t rtt) override; + + protected: + // Implements webrtc::VCMProtectionCallback. + int ProtectionRequest(const FecProtectionParams* delta_params, + const FecProtectionParams* key_params, + uint32_t* sent_video_rate_bps, + uint32_t* sent_nack_rate_bps, + uint32_t* sent_fec_rate_bps) override; private: - class ConstructionTask; - class DestructAndGetRtpStateTask; + struct EncoderSettings { + VideoCodec video_codec; + VideoEncoderConfig config; + }; - rtc::ThreadChecker thread_checker_; - rtc::TaskQueue* const worker_queue_; - rtc::Event thread_sync_event_; + // Implements EncodedImageCallback. The implementation routes encoded frames + // to the |payload_router_| and |config.pre_encode_callback| if set. + // Called on an arbitrary encoder callback thread. + EncodedImageCallback::Result OnEncodedImage( + const EncodedImage& encoded_image, + const CodecSpecificInfo* codec_specific_info, + const RTPFragmentationHeader* fragmentation) override; + + static bool EncoderThreadFunction(void* obj); + void EncoderProcess(); + + void ConfigureProtection(); + void ConfigureSsrcs(); SendStatisticsProxy stats_proxy_; const VideoSendStream::Config config_; - std::unique_ptr send_stream_; - std::unique_ptr vie_encoder_; -}; + std::map suspended_ssrcs_; + ProcessThread* const module_process_thread_; + CallStats* const call_stats_; + CongestionController* const congestion_controller_; + BitrateAllocator* const bitrate_allocator_; + VieRemb* const remb_; + + static const bool kEnableFrameRecording = false; + static const int kMaxLayers = 3; + std::unique_ptr file_writers_[kMaxLayers]; + + rtc::PlatformThread encoder_thread_; + rtc::Event encoder_wakeup_event_; + volatile int stop_encoder_thread_; + rtc::CriticalSection encoder_settings_crit_; + std::unique_ptr pending_encoder_settings_ + GUARDED_BY(encoder_settings_crit_); + uint32_t encoder_max_bitrate_bps_ GUARDED_BY(encoder_settings_crit_); + uint32_t encoder_target_rate_bps_ GUARDED_BY(encoder_settings_crit_); + + enum class State { + kStopped, // VideoSendStream::Start has not yet been called. + kStarted, // VideoSendStream::Start has been called. + // VideoSendStream::Start has been called but the encoder have timed out. + kEncoderTimedOut, + }; + rtc::Optional pending_state_change_ GUARDED_BY(encoder_settings_crit_); + + // Only used on the encoder thread. + rtc::ThreadChecker encoder_thread_checker_; + State state_ ACCESS_ON(&encoder_thread_checker_); + std::unique_ptr current_encoder_settings_ + ACCESS_ON(&encoder_thread_checker_); + + OveruseFrameDetector overuse_detector_; + ViEEncoder vie_encoder_; + EncoderStateFeedback encoder_feedback_; + ProtectionBitrateCalculator protection_bitrate_calculator_; + + vcm::VideoSender* const video_sender_; + + const std::unique_ptr bandwidth_observer_; + // RtpRtcp modules, declared here as they use other members on construction. + const std::vector rtp_rtcp_modules_; + PayloadRouter payload_router_; + VideoCaptureInput input_; +}; } // namespace internal } // namespace webrtc diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc index 654784cc5f..623cd34ac5 100644 --- a/webrtc/video/video_send_stream_tests.cc +++ b/webrtc/video/video_send_stream_tests.cc @@ -866,8 +866,7 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) { return SEND_PACKET; } - // This method implements the rtc::VideoSinkInterface. This is called when - // a frame is provided to the VideoSendStream. + // This method implements the rtc::VideoSinkInterface void OnFrame(const VideoFrame& video_frame) override { rtc::CritScope lock(&crit_); if (test_state_ == kDuringSuspend && @@ -1206,7 +1205,7 @@ class MaxPaddingSetTest : public test::SendTest { encoder_config->min_transmit_bitrate_bps = kMinTransmitBitrateBps; encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen; } - encoder_config_ = encoder_config->Copy(); + encoder_config_ = *encoder_config; } void OnCallsCreated(Call* sender_call, Call* receiver_call) override { @@ -1230,7 +1229,7 @@ class MaxPaddingSetTest : public test::SendTest { packets_sent_ = 0; encoder_config_.min_transmit_bitrate_bps = kMinTransmitBitrateBps; encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen; - send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy()); + send_stream_->ReconfigureVideoEncoder(encoder_config_); running_without_padding_ = false; return SEND_PACKET; } @@ -1325,7 +1324,7 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) { video_encoder_config_.streams[0].max_bitrate_bps = 2 * bitrate_config.start_bitrate_bps; - video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_.Copy()); + video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_); // New bitrate should be reconfigured above the previous max. As there's no // network connection this shouldn't be flaky, as no bitrate should've been @@ -1590,13 +1589,13 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) { std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { send_config->encoder_settings.encoder = this; - encoder_config_ = encoder_config->Copy(); + encoder_config_ = *encoder_config; } void PerformTest() override { EXPECT_TRUE(Wait()) << "Timed out while waiting for Encode."; EXPECT_EQ(0u, num_releases()); - stream_->ReconfigureVideoEncoder(std::move(encoder_config_)); + stream_->ReconfigureVideoEncoder(encoder_config_); EXPECT_EQ(0u, num_releases()); stream_->Stop(); // Encoder should not be released before destroying the VideoSendStream. @@ -1639,7 +1638,7 @@ TEST_F(VideoSendStreamTest, EncoderSetupPropagatesCommonEncoderConfigValues) { std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { send_config->encoder_settings.encoder = this; - encoder_config_ = encoder_config->Copy(); + encoder_config_ = *encoder_config; } void OnVideoStreamsCreated( @@ -1668,7 +1667,7 @@ TEST_F(VideoSendStreamTest, EncoderSetupPropagatesCommonEncoderConfigValues) { EXPECT_EQ(1u, num_initializations_) << "VideoEncoder not initialized."; encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen; - stream_->ReconfigureVideoEncoder(std::move(encoder_config_)); + stream_->ReconfigureVideoEncoder(encoder_config_); EXPECT_TRUE(init_encode_event_.Wait(kDefaultTimeoutMs)); EXPECT_EQ(2u, num_initializations_) << "ReconfigureVideoEncoder did not reinitialize the encoder with " @@ -1715,7 +1714,7 @@ class VideoCodecConfigObserver : public test::SendTest, } encoder_config->encoder_specific_settings = &encoder_settings_; - encoder_config_ = encoder_config->Copy(); + encoder_config_ = *encoder_config; } void OnVideoStreamsCreated( @@ -1742,7 +1741,7 @@ class VideoCodecConfigObserver : public test::SendTest, ASSERT_EQ(1u, num_initializations_) << "VideoEncoder not initialized."; encoder_settings_.frameDroppingOn = true; - stream_->ReconfigureVideoEncoder(std::move(encoder_config_)); + stream_->ReconfigureVideoEncoder(encoder_config_); ASSERT_TRUE( init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs)); EXPECT_EQ(2u, num_initializations_) @@ -1939,8 +1938,6 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { : SendTest(kDefaultTimeoutMs), FakeEncoder(Clock::GetRealTimeClock()), init_encode_event_(false, false), - bitrate_changed_event_(false, false), - target_bitrate_(0), num_initializations_(0), call_(nullptr), send_stream_(nullptr) {} @@ -1949,8 +1946,6 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { int32_t InitEncode(const VideoCodec* codecSettings, int32_t numberOfCores, size_t maxPayloadSize) override { - EXPECT_GE(codecSettings->startBitrate, codecSettings->minBitrate); - EXPECT_LE(codecSettings->startBitrate, codecSettings->maxBitrate); if (num_initializations_ == 0) { EXPECT_EQ(static_cast(kMinBitrateKbps), codecSettings->minBitrate); @@ -1969,9 +1964,8 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { } else if (num_initializations_ == 2) { EXPECT_EQ(static_cast(kIncreasedMaxBitrateKbps), codecSettings->maxBitrate); - // The start bitrate will be whatever the rate BitRateController - // has currently configured but in the span of the set max and min - // bitrate. + EXPECT_EQ(static_cast(kIncreasedStartBitrateKbps), + codecSettings->startBitrate); } ++num_initializations_; init_encode_event_.Set(); @@ -1979,23 +1973,6 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { maxPayloadSize); } - int32_t SetRates(uint32_t newBitRate, uint32_t frameRate) override { - { - rtc::CritScope lock(&crit_); - target_bitrate_ = newBitRate; - } - bitrate_changed_event_.Set(); - return FakeEncoder::SetRates(newBitRate, frameRate); - } - - void WaitForSetRates(uint32_t expected_bitrate) { - EXPECT_TRUE( - bitrate_changed_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs)) - << "Timed out while waiting encoder rate to be set."; - rtc::CritScope lock(&crit_); - EXPECT_EQ(expected_bitrate, target_bitrate_); - } - Call::Config GetSenderCallConfig() override { Call::Config config; config.bitrate_config.min_bitrate_bps = kMinBitrateKbps * 1000; @@ -2013,7 +1990,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { // capped. encoder_config->streams.front().min_bitrate_bps = kMinBitrateKbps * 1000; encoder_config->streams.front().max_bitrate_bps = kMaxBitrateKbps * 1000; - encoder_config_ = encoder_config->Copy(); + encoder_config_ = *encoder_config; } void OnCallsCreated(Call* sender_call, Call* receiver_call) override { @@ -2029,42 +2006,32 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { void PerformTest() override { ASSERT_TRUE( init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs)) - << "Timed out while waiting for encoder to be configured."; - WaitForSetRates(kStartBitrateKbps); + << "Timed out while waiting encoder to be configured."; Call::Config::BitrateConfig bitrate_config; bitrate_config.start_bitrate_bps = kIncreasedStartBitrateKbps * 1000; bitrate_config.max_bitrate_bps = kIncreasedMaxBitrateKbps * 1000; call_->SetBitrateConfig(bitrate_config); - // Encoder rate is capped by EncoderConfig max_bitrate_bps. - WaitForSetRates(kMaxBitrateKbps); - + EXPECT_TRUE(Wait()) + << "Timed out while waiting encoder to be configured."; encoder_config_.streams[0].min_bitrate_bps = 0; encoder_config_.streams[0].max_bitrate_bps = kLowerMaxBitrateKbps * 1000; - send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy()); + send_stream_->ReconfigureVideoEncoder(encoder_config_); ASSERT_TRUE( init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs)); EXPECT_EQ(2, num_initializations_) << "Encoder should have been reconfigured with the new value."; - WaitForSetRates(kLowerMaxBitrateKbps); - encoder_config_.streams[0].target_bitrate_bps = encoder_config_.streams[0].min_bitrate_bps; encoder_config_.streams[0].max_bitrate_bps = kIncreasedMaxBitrateKbps * 1000; - send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy()); + send_stream_->ReconfigureVideoEncoder(encoder_config_); ASSERT_TRUE( init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs)); EXPECT_EQ(3, num_initializations_) << "Encoder should have been reconfigured with the new value."; - // Expected target bitrate is the start bitrate set in the call to - // call_->SetBitrateConfig. - WaitForSetRates(kIncreasedStartBitrateKbps); } rtc::Event init_encode_event_; - rtc::Event bitrate_changed_event_; - rtc::CriticalSection crit_; - uint32_t target_bitrate_ GUARDED_BY(&crit_); int num_initializations_; webrtc::Call* call_; webrtc::VideoSendStream* send_stream_; @@ -2186,7 +2153,7 @@ class Vp9HeaderObserver : public test::SendTest { EXPECT_EQ(1u, encoder_config->streams.size()); encoder_config->streams[0].temporal_layer_thresholds_bps.resize( vp9_settings_.numberOfTemporalLayers - 1); - encoder_config_ = encoder_config->Copy(); + encoder_config_ = *encoder_config; } void PerformTest() override { diff --git a/webrtc/video/vie_encoder.cc b/webrtc/video/vie_encoder.cc index ff86e07f98..956fd776d4 100644 --- a/webrtc/video/vie_encoder.cc +++ b/webrtc/video/vie_encoder.cc @@ -27,315 +27,64 @@ namespace webrtc { -namespace { - -VideoCodecType PayloadNameToCodecType(const std::string& payload_name) { - if (payload_name == "VP8") - return kVideoCodecVP8; - if (payload_name == "VP9") - return kVideoCodecVP9; - if (payload_name == "H264") - return kVideoCodecH264; - return kVideoCodecGeneric; -} - -VideoCodec VideoEncoderConfigToVideoCodec(const VideoEncoderConfig& config, - const std::string& payload_name, - int payload_type) { - const std::vector& streams = config.streams; - static const int kEncoderMinBitrateKbps = 30; - RTC_DCHECK(!streams.empty()); - RTC_DCHECK_GE(config.min_transmit_bitrate_bps, 0); - - VideoCodec video_codec; - memset(&video_codec, 0, sizeof(video_codec)); - video_codec.codecType = PayloadNameToCodecType(payload_name); - - switch (config.content_type) { - case VideoEncoderConfig::ContentType::kRealtimeVideo: - video_codec.mode = kRealtimeVideo; - break; - case VideoEncoderConfig::ContentType::kScreen: - video_codec.mode = kScreensharing; - if (config.streams.size() == 1 && - config.streams[0].temporal_layer_thresholds_bps.size() == 1) { - video_codec.targetBitrate = - config.streams[0].temporal_layer_thresholds_bps[0] / 1000; - } - break; - } - - switch (video_codec.codecType) { - case kVideoCodecVP8: { - if (config.encoder_specific_settings) { - video_codec.codecSpecific.VP8 = *reinterpret_cast( - config.encoder_specific_settings); - } else { - video_codec.codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings(); - } - video_codec.codecSpecific.VP8.numberOfTemporalLayers = - static_cast( - streams.back().temporal_layer_thresholds_bps.size() + 1); - break; - } - case kVideoCodecVP9: { - if (config.encoder_specific_settings) { - video_codec.codecSpecific.VP9 = *reinterpret_cast( - config.encoder_specific_settings); - if (video_codec.mode == kScreensharing) { - video_codec.codecSpecific.VP9.flexibleMode = true; - // For now VP9 screensharing use 1 temporal and 2 spatial layers. - RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfTemporalLayers, - 1); - RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfSpatialLayers, 2); - } - } else { - video_codec.codecSpecific.VP9 = VideoEncoder::GetDefaultVp9Settings(); - } - video_codec.codecSpecific.VP9.numberOfTemporalLayers = - static_cast( - streams.back().temporal_layer_thresholds_bps.size() + 1); - break; - } - case kVideoCodecH264: { - if (config.encoder_specific_settings) { - video_codec.codecSpecific.H264 = - *reinterpret_cast( - config.encoder_specific_settings); - } else { - video_codec.codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings(); - } - break; - } - default: - // TODO(pbos): Support encoder_settings codec-agnostically. - RTC_DCHECK(!config.encoder_specific_settings) - << "Encoder-specific settings for codec type not wired up."; - break; - } - - strncpy(video_codec.plName, payload_name.c_str(), kPayloadNameSize - 1); - video_codec.plName[kPayloadNameSize - 1] = '\0'; - video_codec.plType = payload_type; - video_codec.numberOfSimulcastStreams = - static_cast(streams.size()); - video_codec.minBitrate = streams[0].min_bitrate_bps / 1000; - if (video_codec.minBitrate < kEncoderMinBitrateKbps) - video_codec.minBitrate = kEncoderMinBitrateKbps; - RTC_DCHECK_LE(streams.size(), static_cast(kMaxSimulcastStreams)); - if (video_codec.codecType == kVideoCodecVP9) { - // If the vector is empty, bitrates will be configured automatically. - RTC_DCHECK(config.spatial_layers.empty() || - config.spatial_layers.size() == - video_codec.codecSpecific.VP9.numberOfSpatialLayers); - RTC_DCHECK_LE(video_codec.codecSpecific.VP9.numberOfSpatialLayers, - kMaxSimulcastStreams); - for (size_t i = 0; i < config.spatial_layers.size(); ++i) - video_codec.spatialLayers[i] = config.spatial_layers[i]; - } - for (size_t i = 0; i < streams.size(); ++i) { - SimulcastStream* sim_stream = &video_codec.simulcastStream[i]; - RTC_DCHECK_GT(streams[i].width, 0u); - RTC_DCHECK_GT(streams[i].height, 0u); - RTC_DCHECK_GT(streams[i].max_framerate, 0); - // Different framerates not supported per stream at the moment. - RTC_DCHECK_EQ(streams[i].max_framerate, streams[0].max_framerate); - RTC_DCHECK_GE(streams[i].min_bitrate_bps, 0); - RTC_DCHECK_GE(streams[i].target_bitrate_bps, streams[i].min_bitrate_bps); - RTC_DCHECK_GE(streams[i].max_bitrate_bps, streams[i].target_bitrate_bps); - RTC_DCHECK_GE(streams[i].max_qp, 0); - - sim_stream->width = static_cast(streams[i].width); - sim_stream->height = static_cast(streams[i].height); - sim_stream->minBitrate = streams[i].min_bitrate_bps / 1000; - sim_stream->targetBitrate = streams[i].target_bitrate_bps / 1000; - sim_stream->maxBitrate = streams[i].max_bitrate_bps / 1000; - sim_stream->qpMax = streams[i].max_qp; - sim_stream->numberOfTemporalLayers = static_cast( - streams[i].temporal_layer_thresholds_bps.size() + 1); - - video_codec.width = - std::max(video_codec.width, static_cast(streams[i].width)); - video_codec.height = - std::max(video_codec.height, static_cast(streams[i].height)); - video_codec.minBitrate = - std::min(static_cast(video_codec.minBitrate), - static_cast(streams[i].min_bitrate_bps / 1000)); - video_codec.maxBitrate += streams[i].max_bitrate_bps / 1000; - video_codec.qpMax = std::max(video_codec.qpMax, - static_cast(streams[i].max_qp)); - } - - if (video_codec.maxBitrate == 0) { - // Unset max bitrate -> cap to one bit per pixel. - video_codec.maxBitrate = - (video_codec.width * video_codec.height * video_codec.maxFramerate) / - 1000; - } - if (video_codec.maxBitrate < kEncoderMinBitrateKbps) - video_codec.maxBitrate = kEncoderMinBitrateKbps; - - RTC_DCHECK_GT(streams[0].max_framerate, 0); - video_codec.maxFramerate = streams[0].max_framerate; - video_codec.expect_encode_from_texture = config.expect_encode_from_texture; - - return video_codec; -} - -// TODO(pbos): Lower these thresholds (to closer to 100%) when we handle -// pipelining encoders better (multiple input frames before something comes -// out). This should effectively turn off CPU adaptations for systems that -// remotely cope with the load right now. -CpuOveruseOptions GetCpuOveruseOptions(bool full_overuse_time) { - CpuOveruseOptions options; - if (full_overuse_time) { - options.low_encode_usage_threshold_percent = 150; - options.high_encode_usage_threshold_percent = 200; - } - return options; -} - -} // namespace - -class ViEEncoder::EncodeTask : public rtc::QueuedTask { - public: - EncodeTask(const VideoFrame& frame, ViEEncoder* vie_encoder) - : vie_encoder_(vie_encoder) { - frame_.ShallowCopy(frame); - ++vie_encoder_->posted_frames_waiting_for_encode_; - } - - private: - bool Run() override { - RTC_DCHECK_GT(vie_encoder_->posted_frames_waiting_for_encode_.Value(), 0); - if (--vie_encoder_->posted_frames_waiting_for_encode_ == 0) { - vie_encoder_->EncodeVideoFrame(frame_); - } else { - // There is a newer frame in flight. Do not encode this frame. - LOG(LS_VERBOSE) - << "Incoming frame dropped due to that the encoder is blocked."; - } - return true; - } - VideoFrame frame_; - ViEEncoder* vie_encoder_; -}; - ViEEncoder::ViEEncoder(uint32_t number_of_cores, + ProcessThread* module_process_thread, SendStatisticsProxy* stats_proxy, - const VideoSendStream::Config::EncoderSettings& settings, - rtc::VideoSinkInterface* pre_encode_callback, - LoadObserver* overuse_callback, - EncodedFrameObserver* encoder_timing) - : shutdown_event_(true /* manual_reset */, false), - number_of_cores_(number_of_cores), - settings_(settings), + OveruseFrameDetector* overuse_detector, + EncodedImageCallback* sink) + : number_of_cores_(number_of_cores), + sink_(sink), vp_(VideoProcessing::Create()), video_sender_(Clock::GetRealTimeClock(), this, this), - overuse_detector_(Clock::GetRealTimeClock(), - GetCpuOveruseOptions(settings.full_overuse_time), - this, - encoder_timing, - stats_proxy), - load_observer_(overuse_callback), stats_proxy_(stats_proxy), - pre_encode_callback_(pre_encode_callback), - module_process_thread_(nullptr), + overuse_detector_(overuse_detector), + time_of_last_frame_activity_ms_(std::numeric_limits::max()), encoder_config_(), - encoder_start_bitrate_bps_(0), last_observed_bitrate_bps_(0), encoder_paused_and_dropped_frame_(false), + module_process_thread_(module_process_thread), has_received_sli_(false), picture_id_sli_(0), has_received_rpsi_(false), picture_id_rpsi_(0), - clock_(Clock::GetRealTimeClock()), - last_captured_timestamp_(0), - delta_ntp_internal_ms_(clock_->CurrentNtpInMilliseconds() - - clock_->TimeInMilliseconds()), - encoder_queue_("EncoderQueue") { - vp_->EnableTemporalDecimation(false); + video_suspended_(false) { + module_process_thread_->RegisterModule(&video_sender_); + vp_->EnableTemporalDecimation(true); +} - encoder_queue_.PostTask([this] { - RTC_DCHECK_RUN_ON(&encoder_queue_); - video_sender_.RegisterExternalEncoder( - settings_.encoder, settings_.payload_type, settings_.internal_source); - }); +vcm::VideoSender* ViEEncoder::video_sender() { + return &video_sender_; } ViEEncoder::~ViEEncoder() { - RTC_DCHECK(shutdown_event_.Wait(0)) - << "Must call ::Stop() before destruction."; -} - -void ViEEncoder::Stop() { - if (!encoder_queue_.IsCurrent()) { - encoder_queue_.PostTask([this] { Stop(); }); - shutdown_event_.Wait(rtc::Event::kForever); - return; - } - RTC_DCHECK_RUN_ON(&encoder_queue_); - video_sender_.RegisterExternalEncoder(nullptr, settings_.payload_type, false); - shutdown_event_.Set(); -} - -void ViEEncoder::RegisterProcessThread(ProcessThread* module_process_thread) { - RTC_DCHECK(!module_process_thread_); - module_process_thread_ = module_process_thread; - module_process_thread_->RegisterModule(&overuse_detector_); - module_process_thread_->RegisterModule(&video_sender_); - module_process_thread_checker_.DetachFromThread(); -} - -void ViEEncoder::DeRegisterProcessThread() { - module_process_thread_->DeRegisterModule(&overuse_detector_); module_process_thread_->DeRegisterModule(&video_sender_); } -void ViEEncoder::SetSink(EncodedImageCallback* sink) { - encoder_queue_.PostTask([this, sink] { - RTC_DCHECK_RUN_ON(&encoder_queue_); - sink_ = sink; - }); +int32_t ViEEncoder::RegisterExternalEncoder(webrtc::VideoEncoder* encoder, + uint8_t pl_type, + bool internal_source) { + video_sender_.RegisterExternalEncoder(encoder, pl_type, internal_source); + return 0; } -void ViEEncoder::SetStartBitrate(int start_bitrate_bps) { - encoder_queue_.PostTask([this, start_bitrate_bps] { - RTC_DCHECK_RUN_ON(&encoder_queue_); - encoder_start_bitrate_bps_ = start_bitrate_bps; - }); +int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) { + video_sender_.RegisterExternalEncoder(nullptr, pl_type, false); + return 0; } -void ViEEncoder::ConfigureEncoder(const VideoEncoderConfig& config, - size_t max_data_payload_length) { - VideoCodec video_codec = VideoEncoderConfigToVideoCodec( - config, settings_.payload_name, settings_.payload_type); - encoder_queue_.PostTask([this, video_codec, max_data_payload_length] { - ConfigureEncoderInternal(video_codec, max_data_payload_length); - }); - return; -} - -void ViEEncoder::ConfigureEncoderInternal(const VideoCodec& video_codec, - size_t max_data_payload_length) { - RTC_DCHECK_RUN_ON(&encoder_queue_); - RTC_DCHECK_GE(encoder_start_bitrate_bps_, 0); - RTC_DCHECK(sink_); - +void ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec, + size_t max_data_payload_length) { // Setting target width and height for VPM. RTC_CHECK_EQ(VPM_OK, vp_->SetTargetResolution(video_codec.width, video_codec.height, video_codec.maxFramerate)); - - encoder_config_ = video_codec; - encoder_config_.startBitrate = encoder_start_bitrate_bps_ / 1000; - encoder_config_.startBitrate = - std::max(encoder_config_.startBitrate, video_codec.minBitrate); - encoder_config_.startBitrate = - std::min(encoder_config_.startBitrate, video_codec.maxBitrate); + { + rtc::CritScope lock(&data_cs_); + encoder_config_ = video_codec; + } bool success = video_sender_.RegisterSendCodec( - &encoder_config_, number_of_cores_, + &video_codec, number_of_cores_, static_cast(max_data_payload_length)) == VCM_OK; if (!success) { @@ -361,58 +110,15 @@ void ViEEncoder::ConfigureEncoderInternal(const VideoCodec& video_codec, } } -void ViEEncoder::IncomingCapturedFrame(const VideoFrame& video_frame) { - RTC_DCHECK_RUNS_SERIALIZED(&incoming_frame_race_checker_); - stats_proxy_->OnIncomingFrame(video_frame.width(), video_frame.height()); - - VideoFrame incoming_frame = video_frame; - - // Local time in webrtc time base. - int64_t current_time = clock_->TimeInMilliseconds(); - incoming_frame.set_render_time_ms(current_time); - - // Capture time may come from clock with an offset and drift from clock_. - int64_t capture_ntp_time_ms; - if (video_frame.ntp_time_ms() != 0) { - capture_ntp_time_ms = video_frame.ntp_time_ms(); - } else if (video_frame.render_time_ms() != 0) { - capture_ntp_time_ms = video_frame.render_time_ms() + delta_ntp_internal_ms_; - } else { - capture_ntp_time_ms = current_time + delta_ntp_internal_ms_; - } - incoming_frame.set_ntp_time_ms(capture_ntp_time_ms); - - // Convert NTP time, in ms, to RTP timestamp. - const int kMsToRtpTimestamp = 90; - incoming_frame.set_timestamp( - kMsToRtpTimestamp * static_cast(incoming_frame.ntp_time_ms())); - - if (incoming_frame.ntp_time_ms() <= last_captured_timestamp_) { - // We don't allow the same capture time for two frames, drop this one. - LOG(LS_WARNING) << "Same/old NTP timestamp (" - << incoming_frame.ntp_time_ms() - << " <= " << last_captured_timestamp_ - << ") for incoming frame. Dropping."; - return; - } - - last_captured_timestamp_ = incoming_frame.ntp_time_ms(); - overuse_detector_.FrameCaptured(incoming_frame); - encoder_queue_.PostTask( - std::unique_ptr(new EncodeTask(incoming_frame, this))); -} - bool ViEEncoder::EncoderPaused() const { - RTC_DCHECK_RUN_ON(&encoder_queue_); // Pause video if paused by caller or as long as the network is down or the // pacer queue has grown too large in buffered mode. // If the pacer queue has grown too large or the network is down, // last_observed_bitrate_bps_ will be 0. - return last_observed_bitrate_bps_ == 0; + return video_suspended_ || last_observed_bitrate_bps_ == 0; } void ViEEncoder::TraceFrameDropStart() { - RTC_DCHECK_RUN_ON(&encoder_queue_); // Start trace event only on the first frame after encoder is paused. if (!encoder_paused_and_dropped_frame_) { TRACE_EVENT_ASYNC_BEGIN0("webrtc", "EncoderPaused", this); @@ -422,7 +128,6 @@ void ViEEncoder::TraceFrameDropStart() { } void ViEEncoder::TraceFrameDropEnd() { - RTC_DCHECK_RUN_ON(&encoder_queue_); // End trace event on first frame after encoder resumes, if frame was dropped. if (encoder_paused_and_dropped_frame_) { TRACE_EVENT_ASYNC_END0("webrtc", "EncoderPaused", this); @@ -431,15 +136,17 @@ void ViEEncoder::TraceFrameDropEnd() { } void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame) { - RTC_DCHECK_RUN_ON(&encoder_queue_); - if (pre_encode_callback_) - pre_encode_callback_->OnFrame(video_frame); - - if (EncoderPaused()) { - TraceFrameDropStart(); - return; + VideoCodecType codec_type; + { + rtc::CritScope lock(&data_cs_); + time_of_last_frame_activity_ms_ = rtc::TimeMillis(); + if (EncoderPaused()) { + TraceFrameDropStart(); + return; + } + TraceFrameDropEnd(); + codec_type = encoder_config_.codecType; } - TraceFrameDropEnd(); TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame.render_time_ms(), "Encode"); @@ -454,10 +161,11 @@ void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame) { } } - if (encoder_config_.codecType == webrtc::kVideoCodecVP8) { + if (codec_type == webrtc::kVideoCodecVP8) { webrtc::CodecSpecificInfo codec_specific_info; codec_specific_info.codecType = webrtc::kVideoCodecVP8; - + { + rtc::CritScope lock(&data_cs_); codec_specific_info.codecSpecific.VP8.hasReceivedRPSI = has_received_rpsi_; codec_specific_info.codecSpecific.VP8.hasReceivedSLI = @@ -468,6 +176,7 @@ void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame) { picture_id_sli_; has_received_sli_ = false; has_received_rpsi_ = false; + } video_sender_.AddVideoFrame(*frame_to_send, &codec_specific_info); return; @@ -476,21 +185,22 @@ void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame) { } void ViEEncoder::SendKeyFrame() { - if (!encoder_queue_.IsCurrent()) { - encoder_queue_.PostTask([this] { SendKeyFrame(); }); - return; - } - RTC_DCHECK_RUN_ON(&encoder_queue_); video_sender_.IntraFrameRequest(0); } +int64_t ViEEncoder::time_of_last_frame_activity_ms() { + rtc::CritScope lock(&data_cs_); + return time_of_last_frame_activity_ms_; +} + EncodedImageCallback::Result ViEEncoder::OnEncodedImage( const EncodedImage& encoded_image, const CodecSpecificInfo* codec_specific_info, const RTPFragmentationHeader* fragmentation) { - // Encoded is called on whatever thread the real encoder implementation run - // on. In the case of hardware encoders, there might be several encoders - // running in parallel on different threads. + { + rtc::CritScope lock(&data_cs_); + time_of_last_frame_activity_ms_ = rtc::TimeMillis(); + } if (stats_proxy_) { stats_proxy_->OnSendEncodedImage(encoded_image, codec_specific_info); } @@ -498,45 +208,30 @@ EncodedImageCallback::Result ViEEncoder::OnEncodedImage( EncodedImageCallback::Result result = sink_->OnEncodedImage(encoded_image, codec_specific_info, fragmentation); - overuse_detector_.FrameSent(encoded_image._timeStamp); + overuse_detector_->FrameSent(encoded_image._timeStamp); return result; } void ViEEncoder::SendStatistics(uint32_t bit_rate, uint32_t frame_rate, const std::string& encoder_name) { - RTC_DCHECK(module_process_thread_checker_.CalledOnValidThread()); if (stats_proxy_) stats_proxy_->OnEncoderStatsUpdate(frame_rate, bit_rate, encoder_name); } void ViEEncoder::OnReceivedSLI(uint8_t picture_id) { - if (!encoder_queue_.IsCurrent()) { - encoder_queue_.PostTask([this, picture_id] { OnReceivedSLI(picture_id); }); - return; - } - RTC_DCHECK_RUN_ON(&encoder_queue_); + rtc::CritScope lock(&data_cs_); picture_id_sli_ = picture_id; has_received_sli_ = true; } void ViEEncoder::OnReceivedRPSI(uint64_t picture_id) { - if (!encoder_queue_.IsCurrent()) { - encoder_queue_.PostTask([this, picture_id] { OnReceivedRPSI(picture_id); }); - return; - } - RTC_DCHECK_RUN_ON(&encoder_queue_); + rtc::CritScope lock(&data_cs_); picture_id_rpsi_ = picture_id; has_received_rpsi_ = true; } void ViEEncoder::OnReceivedIntraFrameRequest(size_t stream_index) { - if (!encoder_queue_.IsCurrent()) { - encoder_queue_.PostTask( - [this, stream_index] { OnReceivedIntraFrameRequest(stream_index); }); - return; - } - RTC_DCHECK_RUN_ON(&encoder_queue_); // Key frame request from remote side, signal to VCM. TRACE_EVENT0("webrtc", "OnKeyFrameRequest"); video_sender_.IntraFrameRequest(stream_index); @@ -545,29 +240,29 @@ void ViEEncoder::OnReceivedIntraFrameRequest(size_t stream_index) { void ViEEncoder::OnBitrateUpdated(uint32_t bitrate_bps, uint8_t fraction_lost, int64_t round_trip_time_ms) { - if (!encoder_queue_.IsCurrent()) { - encoder_queue_.PostTask( - [this, bitrate_bps, fraction_lost, round_trip_time_ms] { - OnBitrateUpdated(bitrate_bps, fraction_lost, round_trip_time_ms); - }); - return; - } - RTC_DCHECK_RUN_ON(&encoder_queue_); - RTC_DCHECK(sink_) << "sink_ must be set before the encoder is active."; - LOG(LS_VERBOSE) << "OnBitrateUpdated, bitrate " << bitrate_bps << " packet loss " << static_cast(fraction_lost) << " rtt " << round_trip_time_ms; - video_sender_.SetChannelParameters(bitrate_bps, fraction_lost, round_trip_time_ms); - - encoder_start_bitrate_bps_ = - bitrate_bps != 0 ? bitrate_bps : encoder_start_bitrate_bps_; + bool video_suspension_changed; bool video_is_suspended = bitrate_bps == 0; - bool video_suspension_changed = - video_is_suspended != (last_observed_bitrate_bps_ == 0); - last_observed_bitrate_bps_ = bitrate_bps; + { + rtc::CritScope lock(&data_cs_); + last_observed_bitrate_bps_ = bitrate_bps; + video_suspension_changed = video_suspended_ != video_is_suspended; + video_suspended_ = video_is_suspended; + // Set |time_of_last_frame_activity_ms_| to now if this is the first time + // the encoder is supposed to produce encoded frames. + // TODO(perkj): Remove this hack. It is here to avoid a race that the + // encoder report that it has timed out before it has processed the first + // frame. + if (last_observed_bitrate_bps_ != 0 && + time_of_last_frame_activity_ms_ == + std::numeric_limits::max()) { + time_of_last_frame_activity_ms_ = rtc::TimeMillis(); + } + } if (stats_proxy_ && video_suspension_changed) { LOG(LS_INFO) << "Video suspend state changed to: " @@ -576,19 +271,4 @@ void ViEEncoder::OnBitrateUpdated(uint32_t bitrate_bps, } } -void ViEEncoder::OveruseDetected() { - RTC_DCHECK_RUN_ON(&module_process_thread_checker_); - // TODO(perkj): When ViEEncoder inherit rtc::VideoSink instead of - // VideoCaptureInput |load_observer_| should be removed and overuse be - // expressed as rtc::VideoSinkWants instead. - if (load_observer_) - load_observer_->OnLoadUpdate(LoadObserver::kOveruse); -} - -void ViEEncoder::NormalUsage() { - RTC_DCHECK_RUN_ON(&module_process_thread_checker_); - if (load_observer_) - load_observer_->OnLoadUpdate(LoadObserver::kUnderuse); -} - } // namespace webrtc diff --git a/webrtc/video/vie_encoder.h b/webrtc/video/vie_encoder.h index 42fe03b188..f3f8340c28 100644 --- a/webrtc/video/vie_encoder.h +++ b/webrtc/video/vie_encoder.h @@ -16,72 +16,82 @@ #include #include "webrtc/base/criticalsection.h" -#include "webrtc/base/event.h" -#include "webrtc/base/sequenced_task_checker.h" -#include "webrtc/base/task_queue.h" -#include "webrtc/call.h" +#include "webrtc/base/scoped_ref_ptr.h" +#include "webrtc/base/thread_annotations.h" #include "webrtc/common_types.h" +#include "webrtc/video_encoder.h" #include "webrtc/media/base/videosinkinterface.h" +#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "webrtc/modules/video_coding/include/video_coding_defines.h" #include "webrtc/modules/video_coding/video_coding_impl.h" #include "webrtc/modules/video_processing/include/video_processing.h" -#include "webrtc/system_wrappers/include/atomic32.h" -#include "webrtc/video/overuse_frame_detector.h" -#include "webrtc/video_encoder.h" -#include "webrtc/video_send_stream.h" #include "webrtc/typedefs.h" namespace webrtc { +class Config; +class EncodedImageCallback; +class OveruseFrameDetector; +class PacedSender; class ProcessThread; class SendStatisticsProxy; +class ViEBitrateObserver; +class ViEEffectFilter; +class VideoEncoder; // VieEncoder represent a video encoder that accepts raw video frames as input // and produces an encoded bit stream. // Usage: -// Instantiate. -// Call SetStartRate and SetSink. -// Call ConfigureEncoder with the codec settings. -// Provide frames to encode by calling IncomingCapturedFrame. -// Call Stop() when done. -class ViEEncoder : public VideoCaptureInput, - public EncodedImageCallback, - public VCMSendStatisticsCallback, - public CpuOveruseObserver { +// 1. Instantiate +// 2. Call Init +// 3. Call RegisterExternalEncoder if available. +// 4. Call SetEncoder with the codec settings and the object that shall receive +// the encoded bit stream. +// 5. For each available raw video frame call EncodeVideoFrame. +class ViEEncoder : public EncodedImageCallback, + public VCMSendStatisticsCallback { public: + friend class ViEBitrateObserver; + ViEEncoder(uint32_t number_of_cores, + ProcessThread* module_process_thread, SendStatisticsProxy* stats_proxy, - const webrtc::VideoSendStream::Config::EncoderSettings& settings, - rtc::VideoSinkInterface* pre_encode_callback, - LoadObserver* overuse_callback, - EncodedFrameObserver* encoder_timing); + OveruseFrameDetector* overuse_detector, + EncodedImageCallback* sink); ~ViEEncoder(); - // RegisterProcessThread register |module_process_thread| with those objects - // that use it. Registration has to happen on the thread where - // |module_process_thread| was created (libjingle's worker thread). - // TODO(perkj): Replace the use of |module_process_thread| with a TaskQueue. - void RegisterProcessThread(ProcessThread* module_process_thread); - void DeRegisterProcessThread(); - void SetSink(EncodedImageCallback* sink); + vcm::VideoSender* video_sender(); - // TODO(perkj): Can we remove VideoCodec.startBitrate ? - void SetStartBitrate(int start_bitrate_bps); + // Returns the id of the owning channel. + int Owner() const; - void ConfigureEncoder(const VideoEncoderConfig& config, - size_t max_data_payload_length); - - // Permanently stop encoding. After this method has returned, it is - // guaranteed that no encoded frames will be delivered to the sink. - void Stop(); - - // Implements VideoCaptureInput. - // TODO(perkj): Refactor ViEEncoder to inherit rtc::VideoSink instead of - // VideoCaptureInput. - void IncomingCapturedFrame(const VideoFrame& video_frame) override; + // Codec settings. + int32_t RegisterExternalEncoder(VideoEncoder* encoder, + uint8_t pl_type, + bool internal_source); + int32_t DeRegisterExternalEncoder(uint8_t pl_type); + void SetEncoder(const VideoCodec& video_codec, + size_t max_data_payload_length); + void EncodeVideoFrame(const VideoFrame& video_frame); void SendKeyFrame(); + // Returns the time when the encoder last received an input frame or produced + // an encoded frame. + int64_t time_of_last_frame_activity_ms(); + + + // Implements EncodedImageCallback. + EncodedImageCallback::Result OnEncodedImage( + const EncodedImage& encoded_image, + const CodecSpecificInfo* codec_specific_info, + const RTPFragmentationHeader* fragmentation) override; + + // Implements VideoSendStatisticsCallback. + void SendStatistics(uint32_t bit_rate, + uint32_t frame_rate, + const std::string& encoder_name) override; + // virtual to test EncoderStateFeedback with mocks. virtual void OnReceivedIntraFrameRequest(size_t stream_index); virtual void OnReceivedSLI(uint8_t picture_id); @@ -92,69 +102,37 @@ class ViEEncoder : public VideoCaptureInput, int64_t round_trip_time_ms); private: - class EncodeTask; - - void ConfigureEncoderInternal(const VideoCodec& video_codec, - size_t max_data_payload_length); - - // Implements VideoSendStatisticsCallback. - void SendStatistics(uint32_t bit_rate, - uint32_t frame_rate, - const std::string& encoder_name) override; - - void EncodeVideoFrame(const VideoFrame& frame); - - // Implements EncodedImageCallback. - EncodedImageCallback::Result OnEncodedImage( - const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) override; - - // webrtc::CpuOveruseObserver implementation. - void OveruseDetected() override; - void NormalUsage() override; - - bool EncoderPaused() const; - void TraceFrameDropStart(); - void TraceFrameDropEnd(); - - rtc::Event shutdown_event_; + bool EncoderPaused() const EXCLUSIVE_LOCKS_REQUIRED(data_cs_); + void TraceFrameDropStart() EXCLUSIVE_LOCKS_REQUIRED(data_cs_); + void TraceFrameDropEnd() EXCLUSIVE_LOCKS_REQUIRED(data_cs_); const uint32_t number_of_cores_; - EncodedImageCallback* sink_; - const VideoSendStream::Config::EncoderSettings settings_; + EncodedImageCallback* const sink_; const std::unique_ptr vp_; - vcm::VideoSender video_sender_ ACCESS_ON(&encoder_queue_); - OveruseFrameDetector overuse_detector_; - LoadObserver* const load_observer_ ACCESS_ON(&module_process_thread_checker_); + vcm::VideoSender video_sender_; + + rtc::CriticalSection data_cs_; SendStatisticsProxy* const stats_proxy_; - rtc::VideoSinkInterface* const pre_encode_callback_; + OveruseFrameDetector* const overuse_detector_; + + // The time we last received an input frame or encoded frame. This is used to + // track when video is stopped long enough that we also want to stop sending + // padding. + int64_t time_of_last_frame_activity_ms_ GUARDED_BY(data_cs_); + VideoCodec encoder_config_ GUARDED_BY(data_cs_); + uint32_t last_observed_bitrate_bps_ GUARDED_BY(data_cs_); + bool encoder_paused_and_dropped_frame_ GUARDED_BY(data_cs_); + ProcessThread* module_process_thread_; - rtc::ThreadChecker module_process_thread_checker_; - VideoCodec encoder_config_ ACCESS_ON(&encoder_queue_); + bool has_received_sli_ GUARDED_BY(data_cs_); + uint8_t picture_id_sli_ GUARDED_BY(data_cs_); + bool has_received_rpsi_ GUARDED_BY(data_cs_); + uint64_t picture_id_rpsi_ GUARDED_BY(data_cs_); - int encoder_start_bitrate_bps_ ACCESS_ON(&encoder_queue_); - uint32_t last_observed_bitrate_bps_ ACCESS_ON(&encoder_queue_); - bool encoder_paused_and_dropped_frame_ ACCESS_ON(&encoder_queue_); - bool has_received_sli_ ACCESS_ON(&encoder_queue_); - uint8_t picture_id_sli_ ACCESS_ON(&encoder_queue_); - bool has_received_rpsi_ ACCESS_ON(&encoder_queue_); - uint64_t picture_id_rpsi_ ACCESS_ON(&encoder_queue_); - Clock* const clock_; - - rtc::RaceChecker incoming_frame_race_checker_; - Atomic32 posted_frames_waiting_for_encode_; - // Used to make sure incoming time stamp is increasing for every frame. - int64_t last_captured_timestamp_ GUARDED_BY(incoming_frame_race_checker_); - // Delta used for translating between NTP and internal timestamps. - const int64_t delta_ntp_internal_ms_; - - // All public methods are proxied to |encoder_queue_|. It must must be - // destroyed first to make sure no tasks are run that use other members. - rtc::TaskQueue encoder_queue_; + bool video_suspended_ GUARDED_BY(data_cs_); }; } // namespace webrtc diff --git a/webrtc/video/vie_encoder_unittest.cc b/webrtc/video/vie_encoder_unittest.cc deleted file mode 100644 index 698dd3a540..0000000000 --- a/webrtc/video/vie_encoder_unittest.cc +++ /dev/null @@ -1,255 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "testing/gtest/include/gtest/gtest.h" -#include "webrtc/base/logging.h" -#include "webrtc/test/encoder_settings.h" -#include "webrtc/test/fake_encoder.h" -#include "webrtc/video/send_statistics_proxy.h" -#include "webrtc/video/vie_encoder.h" - -namespace webrtc { - -class ViEEncoderTest : public ::testing::Test { - public: - static const int kDefaultTimeoutMs = 30 * 1000; - - ViEEncoderTest() - : video_send_config_(VideoSendStream::Config(nullptr)), - fake_encoder_(), - stats_proxy_(Clock::GetRealTimeClock(), - video_send_config_, - webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo), - sink_(&fake_encoder_) {} - - void SetUp() override { - video_send_config_ = VideoSendStream::Config(nullptr); - video_send_config_.encoder_settings.encoder = &fake_encoder_; - video_send_config_.encoder_settings.payload_name = "FAKE"; - video_send_config_.encoder_settings.payload_type = 125; - - video_encoder_config_.streams = test::CreateVideoStreams(1); - - vie_encoder_.reset(new ViEEncoder( - 1 /* number_of_cores */, &stats_proxy_, - video_send_config_.encoder_settings, nullptr /* pre_encode_callback */, - nullptr /* overuse_callback */, nullptr /* encoder_timing */)); - vie_encoder_->SetSink(&sink_); - vie_encoder_->SetStartBitrate(10000); - vie_encoder_->ConfigureEncoder(video_encoder_config_, 1440); - } - - VideoFrame CreateFrame(int64_t ntp_ts, rtc::Event* destruction_event) const { - class TestBuffer : public webrtc::I420Buffer { - public: - TestBuffer(rtc::Event* event, int width, int height) - : I420Buffer(width, height), event_(event) {} - - private: - friend class rtc::RefCountedObject; - ~TestBuffer() override { - if (event_) - event_->Set(); - } - rtc::Event* const event_; - }; - - VideoFrame frame( - new rtc::RefCountedObject( - destruction_event, - static_cast(video_encoder_config_.streams[0].width), - static_cast(video_encoder_config_.streams[0].height)), - 99, 99, kVideoRotation_0); - frame.set_ntp_time_ms(ntp_ts); - return frame; - } - - class TestEncoder : public test::FakeEncoder { - public: - TestEncoder() - : FakeEncoder(Clock::GetRealTimeClock()), - continue_encode_event_(false, false) {} - - int32_t Encode(const VideoFrame& input_image, - const CodecSpecificInfo* codec_specific_info, - const std::vector* frame_types) override { - bool block_encode; - { - rtc::CritScope lock(&crit_); - EXPECT_GT(input_image.timestamp(), timestamp_); - EXPECT_GT(input_image.ntp_time_ms(), ntp_time_ms_); - EXPECT_EQ(input_image.timestamp(), input_image.ntp_time_ms() * 90); - - timestamp_ = input_image.timestamp(); - ntp_time_ms_ = input_image.ntp_time_ms(); - block_encode = block_next_encode_; - block_next_encode_ = false; - } - int32_t result = - FakeEncoder::Encode(input_image, codec_specific_info, frame_types); - if (block_encode) - continue_encode_event_.Wait(kDefaultTimeoutMs); - return result; - } - - void BlockNextEncode() { - rtc::CritScope lock(&crit_); - block_next_encode_ = true; - } - - void ContinueEncode() { continue_encode_event_.Set(); } - - void CheckLastTimeStampsMatch(int64_t ntp_time_ms, - uint32_t timestamp) const { - rtc::CritScope lock(&crit_); - EXPECT_EQ(timestamp_, timestamp); - EXPECT_EQ(ntp_time_ms_, ntp_time_ms); - } - - private: - rtc::CriticalSection crit_; - bool block_next_encode_ = false; - rtc::Event continue_encode_event_; - uint32_t timestamp_ = 0; - int64_t ntp_time_ms_ = 0; - }; - - class TestSink : public EncodedImageCallback { - public: - explicit TestSink(TestEncoder* test_encoder) - : test_encoder_(test_encoder), encoded_frame_event_(false, false) {} - - int32_t Encoded(const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) override { - rtc::CritScope lock(&crit_); - EXPECT_TRUE(expect_frames_); - timestamp_ = encoded_image._timeStamp; - encoded_frame_event_.Set(); - return 0; - } - - void WaitForEncodedFrame(int64_t expected_ntp_time) { - uint32_t timestamp = 0; - encoded_frame_event_.Wait(kDefaultTimeoutMs); - { - rtc::CritScope lock(&crit_); - timestamp = timestamp_; - } - test_encoder_->CheckLastTimeStampsMatch(expected_ntp_time, timestamp); - } - - void SetExpectNoFrames() { - rtc::CritScope lock(&crit_); - expect_frames_ = false; - } - - private: - rtc::CriticalSection crit_; - TestEncoder* test_encoder_; - rtc::Event encoded_frame_event_; - uint32_t timestamp_ = 0; - bool expect_frames_ = true; - }; - - VideoSendStream::Config video_send_config_; - VideoEncoderConfig video_encoder_config_; - TestEncoder fake_encoder_; - SendStatisticsProxy stats_proxy_; - TestSink sink_; - std::unique_ptr vie_encoder_; -}; - -TEST_F(ViEEncoderTest, EncodeOneFrame) { - const int kTargetBitrateBps = 100000; - vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); - rtc::Event frame_destroyed_event(false, false); - vie_encoder_->IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event)); - sink_.WaitForEncodedFrame(1); - frame_destroyed_event.Wait(kDefaultTimeoutMs); - vie_encoder_->Stop(); -} - -TEST_F(ViEEncoderTest, DropsFramesBeforeFirstOnBitrateUpdated) { - // Dropped since no target bitrate has been set. - rtc::Event frame_destroyed_event(false, false); - vie_encoder_->IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event)); - frame_destroyed_event.Wait(kDefaultTimeoutMs); - - const int kTargetBitrateBps = 100000; - vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); - - vie_encoder_->IncomingCapturedFrame(CreateFrame(2, nullptr)); - sink_.WaitForEncodedFrame(2); - vie_encoder_->Stop(); -} - -TEST_F(ViEEncoderTest, DropsFramesWhenRateSetToZero) { - const int kTargetBitrateBps = 100000; - vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); - vie_encoder_->IncomingCapturedFrame(CreateFrame(1, nullptr)); - sink_.WaitForEncodedFrame(1); - - vie_encoder_->OnBitrateUpdated(0, 0, 0); - // Dropped since bitrate is zero. - vie_encoder_->IncomingCapturedFrame(CreateFrame(2, nullptr)); - - vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); - vie_encoder_->IncomingCapturedFrame(CreateFrame(3, nullptr)); - sink_.WaitForEncodedFrame(3); - vie_encoder_->Stop(); -} - -TEST_F(ViEEncoderTest, DropsFramesWithSameOrOldNtpTimestamp) { - const int kTargetBitrateBps = 100000; - vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); - vie_encoder_->IncomingCapturedFrame(CreateFrame(1, nullptr)); - sink_.WaitForEncodedFrame(1); - - // This frame will be dropped since it has the same ntp timestamp. - vie_encoder_->IncomingCapturedFrame(CreateFrame(1, nullptr)); - - vie_encoder_->IncomingCapturedFrame(CreateFrame(2, nullptr)); - sink_.WaitForEncodedFrame(2); - vie_encoder_->Stop(); -} - -TEST_F(ViEEncoderTest, DropsFrameAfterStop) { - const int kTargetBitrateBps = 100000; - vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); - - vie_encoder_->IncomingCapturedFrame(CreateFrame(1, nullptr)); - sink_.WaitForEncodedFrame(1); - - vie_encoder_->Stop(); - sink_.SetExpectNoFrames(); - rtc::Event frame_destroyed_event(false, false); - vie_encoder_->IncomingCapturedFrame(CreateFrame(2, &frame_destroyed_event)); - frame_destroyed_event.Wait(kDefaultTimeoutMs); -} - -TEST_F(ViEEncoderTest, DropsPendingFramesOnSlowEncode) { - const int kTargetBitrateBps = 100000; - vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); - - fake_encoder_.BlockNextEncode(); - vie_encoder_->IncomingCapturedFrame(CreateFrame(1, nullptr)); - sink_.WaitForEncodedFrame(1); - // Here, the encoder thread will be blocked in the TestEncoder waiting for a - // call to ContinueEncode. - vie_encoder_->IncomingCapturedFrame(CreateFrame(2, nullptr)); - vie_encoder_->IncomingCapturedFrame(CreateFrame(3, nullptr)); - fake_encoder_.ContinueEncode(); - sink_.WaitForEncodedFrame(3); - - vie_encoder_->Stop(); -} - -} // namespace webrtc diff --git a/webrtc/video/webrtc_video.gypi b/webrtc/video/webrtc_video.gypi index af162dead7..6b00e69d15 100644 --- a/webrtc/video/webrtc_video.gypi +++ b/webrtc/video/webrtc_video.gypi @@ -48,6 +48,8 @@ 'video/stats_counter.h', 'video/stream_synchronization.cc', 'video/stream_synchronization.h', + 'video/video_capture_input.cc', + 'video/video_capture_input.h', 'video/video_decoder.cc', 'video/video_encoder.cc', 'video/video_receive_stream.cc', diff --git a/webrtc/video_send_stream.h b/webrtc/video_send_stream.h index afdec43c74..b79f6dd30e 100644 --- a/webrtc/video_send_stream.h +++ b/webrtc/video_send_stream.h @@ -13,13 +13,13 @@ #include #include -#include #include "webrtc/common_types.h" #include "webrtc/common_video/include/frame_callback.h" #include "webrtc/config.h" #include "webrtc/media/base/videosinkinterface.h" #include "webrtc/transport.h" +#include "webrtc/media/base/videosinkinterface.h" namespace webrtc { @@ -72,28 +72,13 @@ class VideoSendStream { }; struct Config { - public: Config() = delete; - Config(Config&&) = default; explicit Config(Transport* send_transport) : send_transport(send_transport) {} - Config& operator=(Config&&) = default; - Config& operator=(const Config&) = delete; - - // Mostly used by tests. Avoid creating copies if you can. - Config Copy() const { return Config(*this); } - std::string ToString() const; struct EncoderSettings { - EncoderSettings() = default; - EncoderSettings(std::string payload_name, - int payload_type, - VideoEncoder* encoder) - : payload_name(std::move(payload_name)), - payload_type(payload_type), - encoder(encoder) {} std::string ToString() const; std::string payload_name; @@ -166,6 +151,10 @@ class VideoSendStream { // than the measuring window, since the sample data will have been dropped. EncodedFrameObserver* post_encode_callback = nullptr; + // Renderer for local preview. The local renderer will be called even if + // sending hasn't started. 'nullptr' disables local rendering. + rtc::VideoSinkInterface* local_renderer = nullptr; + // Expected delay needed by the renderer, i.e. the frame will be delivered // this many milliseconds, if possible, earlier than expected render time. // Only valid if |local_renderer| is set. @@ -179,11 +168,6 @@ class VideoSendStream { // below the minimum configured bitrate. If this variable is false, the // stream may send at a rate higher than the estimated available bitrate. bool suspend_below_min_bitrate = false; - - private: - // Access to the copy constructor is private to force use of the Copy() - // method for those exceptional cases where we do use it. - Config(const Config&) = default; }; // Starts stream activity. @@ -200,7 +184,7 @@ class VideoSendStream { // Set which streams to send. Must have at least as many SSRCs as configured // in the config. Encoder settings are passed on to the encoder instance along // with the VideoStream settings. - virtual void ReconfigureVideoEncoder(VideoEncoderConfig config) = 0; + virtual void ReconfigureVideoEncoder(const VideoEncoderConfig& config) = 0; virtual Stats GetStats() = 0; diff --git a/webrtc/webrtc_tests.gypi b/webrtc/webrtc_tests.gypi index cc837a5312..6a7936cc5f 100644 --- a/webrtc/webrtc_tests.gypi +++ b/webrtc/webrtc_tests.gypi @@ -378,10 +378,10 @@ 'video/send_statistics_proxy_unittest.cc', 'video/stats_counter_unittest.cc', 'video/stream_synchronization_unittest.cc', + 'video/video_capture_input_unittest.cc', 'video/video_decoder_unittest.cc', 'video/video_encoder_unittest.cc', 'video/video_send_stream_tests.cc', - 'video/vie_encoder_unittest.cc', 'video/vie_remb_unittest.cc', ], 'dependencies': [