VideoStreamEncoder: simplify threading.

VideoStreamEncoder receives frames on an undefined threading
context with the only requirement being that frames are serially
arriving. This CL changes this to post all frames arriving at the
FrameCadenceAdapter to the worker thread before further
processing, transitively leading to frame entry into the
VideoStreamEncoder on the worker thread.

Bug: chromium:1255737
Change-Id: I04d69cb4a5048d671d2dcd3bd6d669fbcda52b3f
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/237142
Reviewed-by: Stefan Holmer <stefan@webrtc.org>
Reviewed-by: Henrik Boström <hbos@webrtc.org>
Reviewed-by: Niels Moller <nisse@webrtc.org>
Commit-Queue: Markus Handell <handellm@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#35320}
This commit is contained in:
Markus Handell 2021-11-08 10:11:55 +01:00 committed by WebRTC LUCI CQ
parent 7aa89cedba
commit 28c7180999
9 changed files with 126 additions and 75 deletions

View File

@ -267,11 +267,14 @@ rtc_library("frame_cadence_adapter") {
deps = [
"../api:sequence_checker",
"../api/task_queue",
"../api/video:video_frame",
"../rtc_base:logging",
"../rtc_base:macromagic",
"../rtc_base:rtc_base_approved",
"../rtc_base/synchronization:mutex",
"../rtc_base/task_utils:pending_task_safety_flag",
"../rtc_base/task_utils:to_queued_task",
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
]

View File

@ -11,11 +11,15 @@
#include "video/frame_cadence_adapter.h"
#include <memory>
#include <utility>
#include "api/sequence_checker.h"
#include "api/task_queue/task_queue_base.h"
#include "rtc_base/logging.h"
#include "rtc_base/race_checker.h"
#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/task_utils/pending_task_safety_flag.h"
#include "rtc_base/task_utils/to_queued_task.h"
#include "system_wrappers/include/field_trial.h"
#include "system_wrappers/include/metrics.h"
@ -24,7 +28,7 @@ namespace {
class FrameCadenceAdapterImpl : public FrameCadenceAdapterInterface {
public:
FrameCadenceAdapterImpl();
explicit FrameCadenceAdapterImpl(TaskQueueBase* worker_queue);
// FrameCadenceAdapterInterface overrides.
void Initialize(Callback* callback) override;
@ -37,9 +41,13 @@ class FrameCadenceAdapterImpl : public FrameCadenceAdapterInterface {
const VideoTrackSourceConstraints& constraints) override;
private:
// Called from OnFrame in zero-hertz mode.
void OnFrameOnMainQueue(const VideoFrame& frame) RTC_RUN_ON(worker_queue_);
// Called to report on constraint UMAs.
void MaybeReportFrameRateConstraintUmas()
RTC_RUN_ON(&incoming_frame_race_checker_) RTC_LOCKS_EXCLUDED(mutex_);
void MaybeReportFrameRateConstraintUmas() RTC_RUN_ON(&worker_queue_);
TaskQueueBase* const worker_queue_;
// True if we support frame entry for screenshare with a minimum frequency of
// 0 Hz.
@ -48,35 +56,36 @@ class FrameCadenceAdapterImpl : public FrameCadenceAdapterInterface {
// Set up during Initialize.
Callback* callback_ = nullptr;
// Lock protecting zero-hertz activation state. This is needed because the
// threading contexts of OnFrame, OnConstraintsChanged, and ConfigureEncoder
// are mutating it.
Mutex mutex_;
// The source's constraints.
absl::optional<VideoTrackSourceConstraints> source_constraints_
RTC_GUARDED_BY(mutex_);
RTC_GUARDED_BY(worker_queue_);
// Whether zero-hertz and UMA reporting is enabled.
bool zero_hertz_and_uma_reporting_enabled_ RTC_GUARDED_BY(mutex_) = false;
bool zero_hertz_and_uma_reporting_enabled_ RTC_GUARDED_BY(worker_queue_) =
false;
// Race checker for incoming frames. This is the network thread in chromium,
// but may vary from test contexts.
rtc::RaceChecker incoming_frame_race_checker_;
bool has_reported_screenshare_frame_rate_umas_ RTC_GUARDED_BY(mutex_) = false;
bool has_reported_screenshare_frame_rate_umas_ RTC_GUARDED_BY(worker_queue_) =
false;
ScopedTaskSafety safety_;
};
FrameCadenceAdapterImpl::FrameCadenceAdapterImpl()
: zero_hertz_screenshare_enabled_(
field_trial::IsEnabled("WebRTC-ZeroHertzScreenshare")) {}
FrameCadenceAdapterImpl::FrameCadenceAdapterImpl(TaskQueueBase* worker_queue)
: worker_queue_(worker_queue),
zero_hertz_screenshare_enabled_(
field_trial::IsEnabled("WebRTC-ZeroHertzScreenshare")) {
RTC_DCHECK_RUN_ON(worker_queue_);
}
void FrameCadenceAdapterImpl::Initialize(Callback* callback) {
callback_ = callback;
}
void FrameCadenceAdapterImpl::SetZeroHertzModeEnabled(bool enabled) {
// This method is called on the worker thread.
MutexLock lock(&mutex_);
RTC_DCHECK_RUN_ON(worker_queue_);
if (enabled && !zero_hertz_and_uma_reporting_enabled_)
has_reported_screenshare_frame_rate_umas_ = false;
zero_hertz_and_uma_reporting_enabled_ = enabled;
@ -86,8 +95,11 @@ void FrameCadenceAdapterImpl::OnFrame(const VideoFrame& frame) {
// This method is called on the network thread under Chromium, or other
// various contexts in test.
RTC_DCHECK_RUNS_SERIALIZED(&incoming_frame_race_checker_);
callback_->OnFrame(frame);
MaybeReportFrameRateConstraintUmas();
worker_queue_->PostTask(ToQueuedTask(safety_, [this, frame] {
RTC_DCHECK_RUN_ON(worker_queue_);
OnFrameOnMainQueue(std::move(frame));
MaybeReportFrameRateConstraintUmas();
}));
}
void FrameCadenceAdapterImpl::OnConstraintsChanged(
@ -95,13 +107,19 @@ void FrameCadenceAdapterImpl::OnConstraintsChanged(
RTC_LOG(LS_INFO) << __func__ << " min_fps "
<< constraints.min_fps.value_or(-1) << " max_fps "
<< constraints.max_fps.value_or(-1);
MutexLock lock(&mutex_);
source_constraints_ = constraints;
worker_queue_->PostTask(ToQueuedTask(safety_, [this, constraints] {
RTC_DCHECK_RUN_ON(worker_queue_);
source_constraints_ = constraints;
}));
}
// RTC_RUN_ON(&incoming_frame_race_checker_)
// RTC_RUN_ON(worker_queue_)
void FrameCadenceAdapterImpl::OnFrameOnMainQueue(const VideoFrame& frame) {
callback_->OnFrame(frame);
}
// RTC_RUN_ON(worker_queue_)
void FrameCadenceAdapterImpl::MaybeReportFrameRateConstraintUmas() {
MutexLock lock(&mutex_);
if (has_reported_screenshare_frame_rate_umas_)
return;
has_reported_screenshare_frame_rate_umas_ = true;
@ -157,8 +175,8 @@ void FrameCadenceAdapterImpl::MaybeReportFrameRateConstraintUmas() {
} // namespace
std::unique_ptr<FrameCadenceAdapterInterface>
FrameCadenceAdapterInterface::Create() {
return std::make_unique<FrameCadenceAdapterImpl>();
FrameCadenceAdapterInterface::Create(TaskQueueBase* worker_queue) {
return std::make_unique<FrameCadenceAdapterImpl>(worker_queue);
}
} // namespace webrtc

View File

@ -13,6 +13,7 @@
#include <memory>
#include "api/task_queue/task_queue_base.h"
#include "api/video/video_frame.h"
#include "api/video/video_sink_interface.h"
#include "rtc_base/synchronization/mutex.h"
@ -41,7 +42,8 @@ class FrameCadenceAdapterInterface
// Factory function creating a production instance. Deletion of the returned
// instance needs to happen on the same sequence that Create() was called on.
static std::unique_ptr<FrameCadenceAdapterInterface> Create();
static std::unique_ptr<FrameCadenceAdapterInterface> Create(
TaskQueueBase* worker_queue);
// Call before using the rest of the API.
virtual void Initialize(Callback* callback) = 0;

View File

@ -13,6 +13,7 @@
#include <utility>
#include <vector>
#include "api/task_queue/task_queue_base.h"
#include "api/video/nv12_buffer.h"
#include "api/video/video_frame.h"
#include "rtc_base/ref_counted_object.h"
@ -20,6 +21,7 @@
#include "test/field_trial.h"
#include "test/gmock.h"
#include "test/gtest.h"
#include "test/time_controller/simulated_time_controller.h"
namespace webrtc {
namespace {
@ -37,6 +39,10 @@ VideoFrame CreateFrame() {
.build();
}
std::unique_ptr<FrameCadenceAdapterInterface> CreateAdapter() {
return FrameCadenceAdapterInterface::Create(TaskQueueBase::Current());
}
class MockCallback : public FrameCadenceAdapterInterface::Callback {
public:
MOCK_METHOD(void, OnFrame, (const VideoFrame&), (override));
@ -51,14 +57,16 @@ class ZeroHertzFieldTrialDisabler : public test::ScopedFieldTrials {
TEST(FrameCadenceAdapterTest,
ForwardsFramesOnConstructionAndUnderDisabledFieldTrial) {
GlobalSimulatedTimeController time_controller(Timestamp::Millis(1));
auto disabler = std::make_unique<ZeroHertzFieldTrialDisabler>();
for (int i = 0; i != 2; i++) {
MockCallback callback;
auto adapter = FrameCadenceAdapterInterface::Create();
auto adapter = CreateAdapter();
adapter->Initialize(&callback);
VideoFrame frame = CreateFrame();
EXPECT_CALL(callback, OnFrame(Ref(frame))).Times(1);
EXPECT_CALL(callback, OnFrame).Times(1);
adapter->OnFrame(frame);
time_controller.AdvanceTime(TimeDelta::Zero());
Mock::VerifyAndClearExpectations(&callback);
EXPECT_CALL(callback, OnDiscardedFrame).Times(1);
adapter->OnDiscardedFrame();
@ -70,12 +78,18 @@ TEST(FrameCadenceAdapterTest,
class FrameCadenceAdapterMetricsTest : public ::testing::Test {
public:
FrameCadenceAdapterMetricsTest() { metrics::Reset(); }
FrameCadenceAdapterMetricsTest() : time_controller_(Timestamp::Millis(1)) {
metrics::Reset();
}
void DepleteTaskQueues() { time_controller_.AdvanceTime(TimeDelta::Zero()); }
private:
GlobalSimulatedTimeController time_controller_;
};
TEST_F(FrameCadenceAdapterMetricsTest, RecordsNoUmasWithNoFrameTransfer) {
MockCallback callback;
auto adapter = FrameCadenceAdapterInterface::Create();
auto adapter = CreateAdapter();
adapter->Initialize(&callback);
adapter->OnConstraintsChanged(
VideoTrackSourceConstraints{absl::nullopt, absl::nullopt});
@ -83,6 +97,7 @@ TEST_F(FrameCadenceAdapterMetricsTest, RecordsNoUmasWithNoFrameTransfer) {
adapter->OnConstraintsChanged(VideoTrackSourceConstraints{2, 3});
adapter->OnConstraintsChanged(VideoTrackSourceConstraints{4, 4});
adapter->OnConstraintsChanged(VideoTrackSourceConstraints{5, absl::nullopt});
DepleteTaskQueues();
EXPECT_TRUE(metrics::Samples("WebRTC.Screenshare.FrameRateConstraints.Exists")
.empty());
EXPECT_TRUE(
@ -114,7 +129,7 @@ TEST_F(FrameCadenceAdapterMetricsTest, RecordsNoUmasWithNoFrameTransfer) {
TEST_F(FrameCadenceAdapterMetricsTest, RecordsNoUmasWithoutEnabledContentType) {
MockCallback callback;
auto adapter = FrameCadenceAdapterInterface::Create();
auto adapter = CreateAdapter();
adapter->Initialize(&callback);
adapter->OnFrame(CreateFrame());
adapter->OnConstraintsChanged(
@ -123,6 +138,7 @@ TEST_F(FrameCadenceAdapterMetricsTest, RecordsNoUmasWithoutEnabledContentType) {
adapter->OnConstraintsChanged(VideoTrackSourceConstraints{2, 3});
adapter->OnConstraintsChanged(VideoTrackSourceConstraints{4, 4});
adapter->OnConstraintsChanged(VideoTrackSourceConstraints{5, absl::nullopt});
DepleteTaskQueues();
EXPECT_TRUE(metrics::Samples("WebRTC.Screenshare.FrameRateConstraints.Exists")
.empty());
EXPECT_TRUE(
@ -154,10 +170,11 @@ TEST_F(FrameCadenceAdapterMetricsTest, RecordsNoUmasWithoutEnabledContentType) {
TEST_F(FrameCadenceAdapterMetricsTest, RecordsNoConstraintsIfUnsetOnFrame) {
MockCallback callback;
auto adapter = FrameCadenceAdapterInterface::Create();
auto adapter = CreateAdapter();
adapter->Initialize(&callback);
adapter->SetZeroHertzModeEnabled(true);
adapter->OnFrame(CreateFrame());
DepleteTaskQueues();
EXPECT_THAT(
metrics::Samples("WebRTC.Screenshare.FrameRateConstraints.Exists"),
ElementsAre(Pair(false, 1)));
@ -165,12 +182,13 @@ TEST_F(FrameCadenceAdapterMetricsTest, RecordsNoConstraintsIfUnsetOnFrame) {
TEST_F(FrameCadenceAdapterMetricsTest, RecordsEmptyConstraintsIfSetOnFrame) {
MockCallback callback;
auto adapter = FrameCadenceAdapterInterface::Create();
auto adapter = CreateAdapter();
adapter->Initialize(&callback);
adapter->SetZeroHertzModeEnabled(true);
adapter->OnConstraintsChanged(
VideoTrackSourceConstraints{absl::nullopt, absl::nullopt});
adapter->OnFrame(CreateFrame());
DepleteTaskQueues();
EXPECT_THAT(
metrics::Samples("WebRTC.Screenshare.FrameRateConstraints.Exists"),
ElementsAre(Pair(true, 1)));
@ -203,12 +221,13 @@ TEST_F(FrameCadenceAdapterMetricsTest, RecordsEmptyConstraintsIfSetOnFrame) {
TEST_F(FrameCadenceAdapterMetricsTest, RecordsMaxConstraintIfSetOnFrame) {
MockCallback callback;
auto adapter = FrameCadenceAdapterInterface::Create();
auto adapter = CreateAdapter();
adapter->Initialize(&callback);
adapter->SetZeroHertzModeEnabled(true);
adapter->OnConstraintsChanged(
VideoTrackSourceConstraints{absl::nullopt, 2.0});
adapter->OnFrame(CreateFrame());
DepleteTaskQueues();
EXPECT_THAT(
metrics::Samples("WebRTC.Screenshare.FrameRateConstraints.Min.Exists"),
ElementsAre(Pair(false, 1)));
@ -238,12 +257,13 @@ TEST_F(FrameCadenceAdapterMetricsTest, RecordsMaxConstraintIfSetOnFrame) {
TEST_F(FrameCadenceAdapterMetricsTest, RecordsMinConstraintIfSetOnFrame) {
MockCallback callback;
auto adapter = FrameCadenceAdapterInterface::Create();
auto adapter = CreateAdapter();
adapter->Initialize(&callback);
adapter->SetZeroHertzModeEnabled(true);
adapter->OnConstraintsChanged(
VideoTrackSourceConstraints{3.0, absl::nullopt});
adapter->OnFrame(CreateFrame());
DepleteTaskQueues();
EXPECT_THAT(
metrics::Samples("WebRTC.Screenshare.FrameRateConstraints.Min.Exists"),
ElementsAre(Pair(true, 1)));
@ -273,11 +293,12 @@ TEST_F(FrameCadenceAdapterMetricsTest, RecordsMinConstraintIfSetOnFrame) {
TEST_F(FrameCadenceAdapterMetricsTest, RecordsMinGtMaxConstraintIfSetOnFrame) {
MockCallback callback;
auto adapter = FrameCadenceAdapterInterface::Create();
auto adapter = CreateAdapter();
adapter->Initialize(&callback);
adapter->SetZeroHertzModeEnabled(true);
adapter->OnConstraintsChanged(VideoTrackSourceConstraints{5.0, 4.0});
adapter->OnFrame(CreateFrame());
DepleteTaskQueues();
EXPECT_THAT(
metrics::Samples("WebRTC.Screenshare.FrameRateConstraints.Min.Exists"),
ElementsAre(Pair(true, 1)));
@ -307,11 +328,12 @@ TEST_F(FrameCadenceAdapterMetricsTest, RecordsMinGtMaxConstraintIfSetOnFrame) {
TEST_F(FrameCadenceAdapterMetricsTest, RecordsMinLtMaxConstraintIfSetOnFrame) {
MockCallback callback;
auto adapter = FrameCadenceAdapterInterface::Create();
auto adapter = CreateAdapter();
adapter->Initialize(&callback);
adapter->SetZeroHertzModeEnabled(true);
adapter->OnConstraintsChanged(VideoTrackSourceConstraints{4.0, 5.0});
adapter->OnFrame(CreateFrame());
DepleteTaskQueues();
EXPECT_THAT(metrics::Samples(
"WebRTC.Screenshare.FrameRateConstraints.MinLessThanMax.Min"),
ElementsAre(Pair(4.0, 1)));

View File

@ -136,9 +136,10 @@ VideoSendStream::VideoSendStream(
&stats_proxy_,
config_.encoder_settings,
std::make_unique<OveruseFrameDetector>(&stats_proxy_),
FrameCadenceAdapterInterface::Create(),
FrameCadenceAdapterInterface::Create(
/*worker_queue=*/TaskQueueBase::Current()),
task_queue_factory,
network_queue,
/*worker_queue=*/TaskQueueBase::Current(),
GetBitrateAllocationCallbackType(config_))),
encoder_feedback_(
clock,

View File

@ -2130,6 +2130,11 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
CreateFrameGeneratorCapturer(kDefaultFramerate, kDefaultWidth,
kDefaultHeight);
frame_generator_capturer_->Start();
// TODO(crbug/1255737): Added manual current thread message processing because
// the test code context is interpreted as the worker thread and we assume
// progress on it. The test should probably be ported to use simulated time
// instead (ported to a scenario test perhaps?).
rtc::Thread::Current()->ProcessMessages(5000);
EXPECT_TRUE(encoder.WaitForStartBitrate());
EXPECT_EQ(GetVideoEncoderConfig()->max_bitrate_bps / 1000,
@ -2139,6 +2144,11 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
2 * bitrate_config.start_bitrate_bps;
GetVideoSendStream()->ReconfigureVideoEncoder(
GetVideoEncoderConfig()->Copy());
// TODO(crbug/1255737): Added manual current thread message processing because
// the test code context is interpreted as the worker thread and we assume
// progress on it. The test should probably be ported to use simulated time
// instead (ported to a scenario test perhaps?).
rtc::Thread::Current()->ProcessMessages(5000);
// New bitrate should be reconfigured above the previous max. As there's no
// network connection this shouldn't be flaky, as no bitrate should've been

View File

@ -594,10 +594,9 @@ VideoStreamEncoder::VideoStreamEncoder(
std::unique_ptr<OveruseFrameDetector> overuse_detector,
std::unique_ptr<FrameCadenceAdapterInterface> frame_cadence_adapter,
TaskQueueFactory* task_queue_factory,
TaskQueueBase* network_queue,
TaskQueueBase* worker_queue,
BitrateAllocationCallbackType allocation_cb_type)
: worker_queue_(TaskQueueBase::Current()),
network_queue_(network_queue),
number_of_cores_(number_of_cores),
sink_(nullptr),
settings_(settings),
@ -670,7 +669,7 @@ VideoStreamEncoder::VideoStreamEncoder(
"EncoderQueue",
TaskQueueFactory::Priority::NORMAL)) {
TRACE_EVENT0("webrtc", "VideoStreamEncoder::VideoStreamEncoder");
RTC_DCHECK(worker_queue_);
RTC_DCHECK_RUN_ON(worker_queue_);
RTC_DCHECK(encoder_stats_observer);
RTC_DCHECK_GE(number_of_cores, 1);
@ -1276,9 +1275,7 @@ void VideoStreamEncoder::OnEncoderSettingsChanged() {
}
void VideoStreamEncoder::OnFrame(const VideoFrame& video_frame) {
// Threading context here under Chromium is the network thread. Test
// environments may currently call in from other alien contexts.
RTC_DCHECK_RUNS_SERIALIZED(&incoming_frame_race_checker_);
RTC_DCHECK_RUN_ON(worker_queue_);
VideoFrame incoming_frame = video_frame;
// Local time in webrtc time base.

View File

@ -78,7 +78,7 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
std::unique_ptr<OveruseFrameDetector> overuse_detector,
std::unique_ptr<FrameCadenceAdapterInterface> frame_cadence_adapter,
TaskQueueFactory* task_queue_factory,
TaskQueueBase* network_queue,
TaskQueueBase* worker_queue,
BitrateAllocationCallbackType allocation_cb_type);
~VideoStreamEncoder() override;
@ -245,7 +245,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
RTC_RUN_ON(&encoder_queue_);
TaskQueueBase* const worker_queue_;
TaskQueueBase* const network_queue_;
const uint32_t number_of_cores_;
@ -298,16 +297,13 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
bool encoder_failed_ RTC_GUARDED_BY(&encoder_queue_);
Clock* const clock_;
rtc::RaceChecker incoming_frame_race_checker_
RTC_GUARDED_BY(incoming_frame_race_checker_);
std::atomic<int> posted_frames_waiting_for_encode_;
// Used to make sure incoming time stamp is increasing for every frame.
int64_t last_captured_timestamp_ RTC_GUARDED_BY(incoming_frame_race_checker_);
int64_t last_captured_timestamp_ RTC_GUARDED_BY(worker_queue_);
// Delta used for translating between NTP and internal timestamps.
const int64_t delta_ntp_internal_ms_
RTC_GUARDED_BY(incoming_frame_race_checker_);
const int64_t delta_ntp_internal_ms_ RTC_GUARDED_BY(worker_queue_);
int64_t last_frame_log_ms_ RTC_GUARDED_BY(incoming_frame_race_checker_);
int64_t last_frame_log_ms_ RTC_GUARDED_BY(worker_queue_);
int captured_frame_count_ RTC_GUARDED_BY(&encoder_queue_);
int dropped_frame_cwnd_pushback_count_ RTC_GUARDED_BY(&encoder_queue_);
int dropped_frame_encoder_block_count_ RTC_GUARDED_BY(&encoder_queue_);

View File

@ -352,17 +352,18 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
const VideoStreamEncoderSettings& settings,
VideoStreamEncoder::BitrateAllocationCallbackType
allocation_callback_type)
: VideoStreamEncoder(time_controller->GetClock(),
1 /* number_of_cores */,
stats_proxy,
settings,
std::unique_ptr<OveruseFrameDetector>(
overuse_detector_proxy_ =
new CpuOveruseDetectorProxy(stats_proxy)),
FrameCadenceAdapterInterface::Create(),
task_queue_factory,
TaskQueueBase::Current(),
allocation_callback_type),
: VideoStreamEncoder(
time_controller->GetClock(),
1 /* number_of_cores */,
stats_proxy,
settings,
std::unique_ptr<OveruseFrameDetector>(
overuse_detector_proxy_ =
new CpuOveruseDetectorProxy(stats_proxy)),
FrameCadenceAdapterInterface::Create(TaskQueueBase::Current()),
task_queue_factory,
TaskQueueBase::Current(),
allocation_callback_type),
time_controller_(time_controller),
fake_cpu_resource_(FakeResource::Create("FakeResource[CPU]")),
fake_quality_resource_(FakeResource::Create("FakeResource[QP]")),
@ -407,9 +408,7 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
// This is used as a synchronisation mechanism, to make sure that the
// encoder queue is not blocked before we start sending it frames.
void WaitUntilTaskQueueIsIdle() {
rtc::Event event;
encoder_queue()->PostTask([&event] { event.Set(); });
ASSERT_TRUE(event.Wait(5000));
time_controller_->AdvanceTime(TimeDelta::Zero());
}
// Triggers resource usage measurements on the fake CPU resource.
@ -420,7 +419,7 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
event.Set();
});
ASSERT_TRUE(event.Wait(5000));
time_controller_->AdvanceTime(TimeDelta::Millis(0));
time_controller_->AdvanceTime(TimeDelta::Zero());
}
void TriggerCpuUnderuse() {
@ -430,7 +429,7 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
event.Set();
});
ASSERT_TRUE(event.Wait(5000));
time_controller_->AdvanceTime(TimeDelta::Millis(0));
time_controller_->AdvanceTime(TimeDelta::Zero());
}
// Triggers resource usage measurements on the fake quality resource.
@ -441,7 +440,7 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
event.Set();
});
ASSERT_TRUE(event.Wait(5000));
time_controller_->AdvanceTime(TimeDelta::Millis(0));
time_controller_->AdvanceTime(TimeDelta::Zero());
}
void TriggerQualityHigh() {
rtc::Event event;
@ -450,7 +449,7 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
event.Set();
});
ASSERT_TRUE(event.Wait(5000));
time_controller_->AdvanceTime(TimeDelta::Millis(0));
time_controller_->AdvanceTime(TimeDelta::Zero());
}
TimeController* const time_controller_;
@ -508,7 +507,7 @@ class AdaptingFrameForwarder : public test::FrameForwarder {
void IncomingCapturedFrame(const VideoFrame& video_frame) override {
RTC_DCHECK(time_controller_->GetMainThread()->IsCurrent());
time_controller_->AdvanceTime(TimeDelta::Millis(0));
time_controller_->AdvanceTime(TimeDelta::Zero());
int cropped_width = 0;
int cropped_height = 0;
@ -1310,8 +1309,9 @@ class VideoStreamEncoderTest : public ::testing::Test {
bool WaitForFrame(int64_t timeout_ms) {
RTC_DCHECK(time_controller_->GetMainThread()->IsCurrent());
time_controller_->AdvanceTime(TimeDelta::Zero());
bool ret = encoded_frame_event_.Wait(timeout_ms);
time_controller_->AdvanceTime(TimeDelta::Millis(0));
time_controller_->AdvanceTime(TimeDelta::Zero());
return ret;
}
@ -1527,6 +1527,7 @@ TEST_F(VideoStreamEncoderTest, DropsFramesBeforeFirstOnBitrateUpdated) {
video_source_.IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event));
AdvanceTime(TimeDelta::Millis(10));
video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
AdvanceTime(TimeDelta::Zero());
EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeoutMs));
video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
@ -6143,7 +6144,7 @@ TEST_F(VideoStreamEncoderTest,
video_stream_encoder_->ConfigureEncoder(video_encoder_config_.Copy(),
kMaxPayloadLength);
video_stream_encoder_->WaitUntilTaskQueueIsIdle();
AdvanceTime(TimeDelta::Millis(0));
AdvanceTime(TimeDelta::Zero());
// Since we turned off the quality scaler, the adaptations made by it are
// removed.
EXPECT_THAT(source.sink_wants(), ResolutionMax());
@ -7495,6 +7496,7 @@ TEST_F(VideoStreamEncoderTest, EncoderSelectorCurrentEncoderIsSignaled) {
video_source_.IncomingCapturedFrame(
CreateFrame(kDontCare, kDontCare, kDontCare));
AdvanceTime(TimeDelta::Zero());
video_stream_encoder_->Stop();
// The encoders produces by the VideoEncoderProxyFactory have a pointer back
@ -7531,7 +7533,7 @@ TEST_F(VideoStreamEncoderTest, EncoderSelectorBitrateSwitch) {
/*fraction_lost=*/0,
/*round_trip_time_ms=*/0,
/*cwnd_reduce_ratio=*/0);
AdvanceTime(TimeDelta::Millis(0));
AdvanceTime(TimeDelta::Zero());
video_stream_encoder_->Stop();
}
@ -7580,7 +7582,7 @@ TEST_F(VideoStreamEncoderTest, EncoderSelectorBrokenEncoderSwitch) {
video_source_.IncomingCapturedFrame(CreateFrame(1, kDontCare, kDontCare));
encode_attempted.Wait(3000);
AdvanceTime(TimeDelta::Millis(0));
AdvanceTime(TimeDelta::Zero());
video_stream_encoder_->Stop();