Remove unused NextFrame function from FrameBuffer.

Also updated FrameBuffer unittests to use the GlobalSimulatedTimeController.

Bug: webrtc:7408, webrtc:9378
Change-Id: I8ade27492f66cdd8950b38f5f4a268714dbc35fc
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/164536
Reviewed-by: Sam Zackrisson <saza@webrtc.org>
Reviewed-by: Rasmus Brandt <brandtr@webrtc.org>
Commit-Queue: Philip Eliasson <philipel@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#30422}
This commit is contained in:
philipel 2020-01-29 17:36:11 +01:00 committed by Commit Bot
parent d69935c114
commit 190539717b
6 changed files with 73 additions and 143 deletions

View File

@ -248,7 +248,9 @@ rtc_source_set("codec_globals_headers") {
"codecs/vp9/include/vp9_globals.h",
]
deps = [ "../../rtc_base:checks" ]
deps = [
"../../rtc_base:checks",
]
}
rtc_library("video_coding_utility") {
@ -669,7 +671,9 @@ if (rtc_include_tests) {
bundle_data("video_coding_modules_tests_resources_bundle_data") {
testonly = true
sources = video_coding_modules_tests_resources
outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
outputs = [
"{{bundle_resources_dir}}/{{source_file_part}}",
]
}
}
}
@ -923,6 +927,7 @@ if (rtc_include_tests) {
"../../test:test_support",
"../../test:video_test_common",
"../../test:video_test_support",
"../../test/time_controller:time_controller",
"../rtp_rtcp:rtp_rtcp_format",
"../rtp_rtcp:rtp_video_header",
"//third_party/abseil-cpp/absl/memory",

View File

@ -116,50 +116,6 @@ void FrameBuffer::StartWaitForNextFrameOnQueue() {
});
}
FrameBuffer::ReturnReason FrameBuffer::NextFrame(
int64_t max_wait_time_ms,
std::unique_ptr<EncodedFrame>* frame_out,
bool keyframe_required) {
TRACE_EVENT0("webrtc", "FrameBuffer::NextFrame");
int64_t latest_return_time_ms =
clock_->TimeInMilliseconds() + max_wait_time_ms;
int64_t wait_ms = max_wait_time_ms;
int64_t now_ms = 0;
do {
now_ms = clock_->TimeInMilliseconds();
{
rtc::CritScope lock(&crit_);
new_continuous_frame_event_.Reset();
if (stopped_)
return kStopped;
keyframe_required_ = keyframe_required;
latest_return_time_ms_ = latest_return_time_ms;
wait_ms = FindNextFrame(now_ms);
}
} while (new_continuous_frame_event_.Wait(wait_ms));
{
rtc::CritScope lock(&crit_);
if (!frames_to_decode_.empty()) {
frame_out->reset(GetNextFrame());
return kFrameFound;
}
}
if (latest_return_time_ms - clock_->TimeInMilliseconds() > 0) {
// If |next_frame_it_ == frames_.end()| and there is still time left, it
// means that the frame buffer was cleared as the thread in this function
// was waiting to acquire |crit_| in order to return. Wait for the
// remaining time and then return.
return NextFrame(latest_return_time_ms - now_ms, frame_out,
keyframe_required);
}
return kTimeout;
}
int64_t FrameBuffer::FindNextFrame(int64_t now_ms) {
int64_t wait_ms = latest_return_time_ms_ - now_ms;
frames_to_decode_.clear();
@ -379,7 +335,6 @@ void FrameBuffer::Stop() {
TRACE_EVENT0("webrtc", "FrameBuffer::Stop");
rtc::CritScope lock(&crit_);
stopped_ = true;
new_continuous_frame_event_.Set();
CancelCallback();
}
@ -563,8 +518,6 @@ int64_t FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
// Since we now have new continuous frames there might be a better frame
// to return from NextFrame.
new_continuous_frame_event_.Set();
if (callback_queue_) {
callback_queue_->PostTask([this] {
rtc::CritScope lock(&crit_);

View File

@ -58,14 +58,6 @@ class FrameBuffer {
// Get the next frame for decoding. Will return at latest after
// |max_wait_time_ms|.
// - If a frame is available within |max_wait_time_ms| it will return
// kFrameFound and set |frame_out| to the resulting frame.
// - If no frame is available after |max_wait_time_ms| it will return
// kTimeout.
// - If the FrameBuffer is stopped then it will return kStopped.
ReturnReason NextFrame(int64_t max_wait_time_ms,
std::unique_ptr<EncodedFrame>* frame_out,
bool keyframe_required);
void NextFrame(
int64_t max_wait_time_ms,
bool keyframe_required,
@ -181,7 +173,6 @@ class FrameBuffer {
int64_t latest_return_time_ms_ RTC_GUARDED_BY(crit_);
bool keyframe_required_ RTC_GUARDED_BY(crit_);
rtc::Event new_continuous_frame_event_;
VCMJitterEstimator jitter_estimator_ RTC_GUARDED_BY(crit_);
VCMTiming* const timing_ RTC_GUARDED_BY(crit_);
VCMInterFrameDelay inter_frame_delay_ RTC_GUARDED_BY(crit_);

View File

@ -26,6 +26,7 @@
#include "test/field_trial.h"
#include "test/gmock.h"
#include "test/gtest.h"
#include "test/time_controller/simulated_time_controller.h"
using ::testing::_;
using ::testing::Return;
@ -134,20 +135,16 @@ class TestFrameBuffer2 : public ::testing::Test {
TestFrameBuffer2()
: trial_("WebRTC-AddRttToPlayoutDelay/Enabled/"),
clock_(0),
timing_(&clock_),
buffer_(new FrameBuffer(&clock_, &timing_, &stats_callback_)),
rand_(0x34678213),
tear_down_(false),
extract_thread_(&ExtractLoop, this, "Extract Thread") {}
void SetUp() override { extract_thread_.Start(); }
void TearDown() override {
tear_down_ = true;
trigger_extract_event_.Set();
extract_thread_.Stop();
}
time_controller_(Timestamp::seconds(0)),
time_task_queue_(
time_controller_.GetTaskQueueFactory()->CreateTaskQueue(
"extract queue",
TaskQueueFactory::Priority::NORMAL)),
timing_(time_controller_.GetClock()),
buffer_(new FrameBuffer(time_controller_.GetClock(),
&timing_,
&stats_callback_)),
rand_(0x34678213) {}
template <typename... T>
std::unique_ptr<FrameObjectFake> CreateFrame(uint16_t picture_id,
@ -198,25 +195,22 @@ class TestFrameBuffer2 : public ::testing::Test {
}
void ExtractFrame(int64_t max_wait_time = 0, bool keyframe_required = false) {
crit_.Enter();
if (max_wait_time == 0) {
std::unique_ptr<EncodedFrame> frame;
FrameBuffer::ReturnReason res =
buffer_->NextFrame(0, &frame, keyframe_required);
if (res != FrameBuffer::ReturnReason::kStopped)
time_task_queue_.PostTask([this, max_wait_time, keyframe_required]() {
buffer_->NextFrame(
max_wait_time, keyframe_required, &time_task_queue_,
[this](std::unique_ptr<video_coding::EncodedFrame> frame,
video_coding::FrameBuffer::ReturnReason reason) {
if (reason != FrameBuffer::ReturnReason::kStopped) {
frames_.emplace_back(std::move(frame));
crit_.Leave();
} else {
max_wait_time_ = max_wait_time;
trigger_extract_event_.Set();
crit_.Leave();
// Make sure |crit_| is aquired by |extract_thread_| before returning.
crit_acquired_event_.Wait(rtc::Event::kForever);
}
});
});
if (max_wait_time == 0) {
time_controller_.AdvanceTime(TimeDelta::ms(0));
}
}
void CheckFrame(size_t index, int picture_id, int spatial_layer) {
rtc::CritScope lock(&crit_);
ASSERT_LT(index, frames_.size());
ASSERT_TRUE(frames_[index]);
ASSERT_EQ(picture_id, frames_[index]->id.picture_id);
@ -224,54 +218,27 @@ class TestFrameBuffer2 : public ::testing::Test {
}
void CheckFrameSize(size_t index, size_t size) {
rtc::CritScope lock(&crit_);
ASSERT_LT(index, frames_.size());
ASSERT_TRUE(frames_[index]);
ASSERT_EQ(frames_[index]->size(), size);
}
void CheckNoFrame(size_t index) {
rtc::CritScope lock(&crit_);
ASSERT_LT(index, frames_.size());
ASSERT_FALSE(frames_[index]);
}
static void ExtractLoop(void* obj) {
TestFrameBuffer2* tfb = static_cast<TestFrameBuffer2*>(obj);
while (true) {
tfb->trigger_extract_event_.Wait(rtc::Event::kForever);
{
rtc::CritScope lock(&tfb->crit_);
tfb->crit_acquired_event_.Set();
if (tfb->tear_down_)
return;
std::unique_ptr<EncodedFrame> frame;
FrameBuffer::ReturnReason res =
tfb->buffer_->NextFrame(tfb->max_wait_time_, &frame, false);
if (res != FrameBuffer::ReturnReason::kStopped)
tfb->frames_.emplace_back(std::move(frame));
}
}
}
uint32_t Rand() { return rand_.Rand<uint32_t>(); }
// The ProtectionMode tests depends on rtt-multiplier experiment.
test::ScopedFieldTrials trial_;
SimulatedClock clock_;
webrtc::GlobalSimulatedTimeController time_controller_;
rtc::TaskQueue time_task_queue_;
VCMTimingFake timing_;
std::unique_ptr<FrameBuffer> buffer_;
std::vector<std::unique_ptr<EncodedFrame>> frames_;
Random rand_;
::testing::NiceMock<VCMReceiveStatisticsCallbackMock> stats_callback_;
int64_t max_wait_time_;
bool tear_down_;
rtc::PlatformThread extract_thread_;
rtc::Event trigger_extract_event_;
rtc::Event crit_acquired_event_;
rtc::CriticalSection crit_;
};
// From https://en.cppreference.com/w/cpp/language/static: "If ... a constexpr
@ -283,16 +250,13 @@ class TestFrameBuffer2 : public ::testing::Test {
constexpr size_t TestFrameBuffer2::kFrameSize;
#endif
// Following tests are timing dependent. Either the timeouts have to
// be increased by a large margin, which would slow down all trybots,
// or we disable them for the very slow ones, like we do here.
#if !defined(ADDRESS_SANITIZER) && !defined(MEMORY_SANITIZER)
TEST_F(TestFrameBuffer2, WaitForFrame) {
uint16_t pid = Rand();
uint32_t ts = Rand();
ExtractFrame(50);
InsertFrame(pid, 0, ts, false, true, kFrameSize);
time_controller_.AdvanceTime(TimeDelta::ms(50));
CheckFrame(0, pid, 0);
}
@ -308,8 +272,9 @@ TEST_F(TestFrameBuffer2, OneSuperFrame) {
}
TEST_F(TestFrameBuffer2, ZeroPlayoutDelay) {
VCMTiming timing(&clock_);
buffer_.reset(new FrameBuffer(&clock_, &timing, &stats_callback_));
VCMTiming timing(time_controller_.GetClock());
buffer_.reset(
new FrameBuffer(time_controller_.GetClock(), &timing, &stats_callback_));
const PlayoutDelay kPlayoutDelayMs = {0, 0};
std::unique_ptr<FrameObjectFake> test_frame(new FrameObjectFake());
test_frame->id.picture_id = 0;
@ -328,7 +293,7 @@ TEST_F(TestFrameBuffer2, DISABLED_OneUnorderedSuperFrame) {
ExtractFrame(50);
InsertFrame(pid, 1, ts, true, true, kFrameSize);
InsertFrame(pid, 0, ts, false, false, kFrameSize);
ExtractFrame();
time_controller_.AdvanceTime(TimeDelta::ms(0));
CheckFrame(0, pid, 0);
CheckFrame(1, pid, 1);
@ -345,16 +310,15 @@ TEST_F(TestFrameBuffer2, DISABLED_OneLayerStreamReordered) {
ExtractFrame(50);
InsertFrame(pid + i + 1, 0, ts + (i + 1) * kFps10, false, true, kFrameSize,
pid + i);
clock_.AdvanceTimeMilliseconds(kFps10);
time_controller_.AdvanceTime(TimeDelta::ms(kFps10));
InsertFrame(pid + i, 0, ts + i * kFps10, false, true, kFrameSize,
pid + i - 1);
clock_.AdvanceTimeMilliseconds(kFps10);
time_controller_.AdvanceTime(TimeDelta::ms(kFps10));
ExtractFrame();
CheckFrame(i, pid + i, 0);
CheckFrame(i + 1, pid + i + 1, 0);
}
}
#endif // Timing dependent tests.
TEST_F(TestFrameBuffer2, ExtractFromEmptyBuffer) {
ExtractFrame();
@ -388,7 +352,7 @@ TEST_F(TestFrameBuffer2, OneLayerStream) {
InsertFrame(pid + i, 0, ts + i * kFps10, false, true, kFrameSize,
pid + i - 1);
ExtractFrame();
clock_.AdvanceTimeMilliseconds(kFps10);
time_controller_.AdvanceTime(TimeDelta::ms(kFps10));
CheckFrame(i, pid + i, 0);
}
}
@ -410,7 +374,7 @@ TEST_F(TestFrameBuffer2, DropTemporalLayerSlowDecoder) {
for (int i = 0; i < 10; ++i) {
ExtractFrame();
clock_.AdvanceTimeMilliseconds(70);
time_controller_.AdvanceTime(TimeDelta::ms(70));
}
CheckFrame(0, pid, 0);
@ -436,7 +400,7 @@ TEST_F(TestFrameBuffer2, DropFramesIfSystemIsStalled) {
ExtractFrame();
// Jump forward in time, simulating the system being stalled for some reason.
clock_.AdvanceTimeMilliseconds(3 * kFps10);
time_controller_.AdvanceTime(TimeDelta::ms(3) * kFps10);
// Extract one more frame, expect second and third frame to be dropped.
EXPECT_CALL(stats_callback_, OnDroppedFrames(2)).Times(1);
ExtractFrame();
@ -719,7 +683,7 @@ TEST_F(TestFrameBuffer2, HigherSpatialLayerNonDecodable) {
InsertFrame(pid + 2, 0, ts + kFps10, false, false, kFrameSize, pid);
InsertFrame(pid + 2, 1, ts + kFps10, true, true, kFrameSize, pid + 1);
clock_.AdvanceTimeMilliseconds(1000);
time_controller_.AdvanceTime(TimeDelta::ms(1000));
// Frame pid+1 is decodable but too late.
// In superframe pid+2 frame sid=0 is decodable, but frame sid=1 is not.
// Incorrect implementation might skip pid+1 frame and output undecodable

View File

@ -513,7 +513,7 @@ webrtc_fuzzer_test("frame_buffer2_fuzzer") {
sources = [ "frame_buffer2_fuzzer.cc" ]
deps = [
"../../modules/video_coding/",
"../../system_wrappers",
"../time_controller:time_controller",
]
}

View File

@ -10,7 +10,7 @@
#include "modules/video_coding/frame_buffer2.h"
#include "modules/video_coding/timing.h"
#include "system_wrappers/include/clock.h"
#include "test/time_controller/simulated_time_controller.h"
namespace webrtc {
@ -64,15 +64,21 @@ void FuzzOneInput(const uint8_t* data, size_t size) {
return;
}
DataReader reader(data, size);
Clock* clock = Clock::GetRealTimeClock();
VCMTiming timing(clock);
video_coding::FrameBuffer frame_buffer(clock, &timing, nullptr);
GlobalSimulatedTimeController time_controller(Timestamp::seconds(0));
rtc::TaskQueue task_queue(
time_controller.GetTaskQueueFactory()->CreateTaskQueue(
"time_tq", TaskQueueFactory::Priority::NORMAL));
VCMTiming timing(time_controller.GetClock());
video_coding::FrameBuffer frame_buffer(time_controller.GetClock(), &timing,
nullptr);
bool next_frame_task_running = false;
while (reader.MoreToRead()) {
if (reader.GetNum<uint8_t>() & 1) {
if (reader.GetNum<uint8_t>() % 2) {
std::unique_ptr<FuzzyFrameObject> frame(new FuzzyFrameObject());
frame->id.picture_id = reader.GetNum<int64_t>();
frame->id.spatial_layer = reader.GetNum<uint8_t>();
frame->id.spatial_layer = reader.GetNum<uint8_t>() % 5;
frame->SetTimestamp(reader.GetNum<uint32_t>());
frame->num_references = reader.GetNum<uint8_t>() %
video_coding::EncodedFrame::kMaxFrameReferences;
@ -82,15 +88,26 @@ void FuzzOneInput(const uint8_t* data, size_t size) {
frame_buffer.InsertFrame(std::move(frame));
} else {
// Since we are not trying to trigger race conditions it does not make
// sense to have a wait time > 0.
const int kWaitTimeMs = 0;
std::unique_ptr<video_coding::EncodedFrame> frame(new FuzzyFrameObject());
if (!next_frame_task_running) {
next_frame_task_running = true;
bool keyframe_required = reader.GetNum<uint8_t>() % 2;
frame_buffer.NextFrame(kWaitTimeMs, &frame, keyframe_required);
int max_wait_time_ms = reader.GetNum<uint8_t>();
task_queue.PostTask([&task_queue, &frame_buffer,
&next_frame_task_running, keyframe_required,
max_wait_time_ms] {
frame_buffer.NextFrame(
max_wait_time_ms, keyframe_required, &task_queue,
[&next_frame_task_running](
std::unique_ptr<video_coding::EncodedFrame> frame,
video_coding::FrameBuffer::ReturnReason res) {
next_frame_task_running = false;
});
});
}
}
time_controller.AdvanceTime(TimeDelta::ms(reader.GetNum<uint8_t>()));
}
}
} // namespace webrtc