Refactor FakeVideoCapturer.

Extract the code to produce a stream of frames to its own class,
FakeFrameSource. Use in VideoAdapter unittests, to make the code simpler
and not depend on the deprecated cricket::VideoCapturer.

Bug: webrtc:6353
Change-Id: Ib5c34c6a0bd7f4338650459873ddc94b12d0c569
Reviewed-on: https://webrtc-review.googlesource.com/49740
Commit-Queue: Niels Moller <nisse@webrtc.org>
Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Taylor Brandstetter <deadbeef@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#21995}
This commit is contained in:
Niels Möller 2018-02-12 17:14:55 +01:00 committed by Commit Bot
parent 00cecb9804
commit a6cc0f94bf
8 changed files with 256 additions and 207 deletions

View File

@ -452,6 +452,8 @@ if (rtc_include_tests) {
"../rtc_base:stringutils",
]
sources = [
"base/fakeframesource.cc",
"base/fakeframesource.h",
"base/fakemediaengine.h",
"base/fakenetworkinterface.h",
"base/fakertp.cc",

View File

@ -0,0 +1,54 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "media/base/fakeframesource.h"
#include "api/video/i420_buffer.h"
namespace cricket {
FakeFrameSource::FakeFrameSource(int width, int height, int interval_us)
: width_(width), height_(height), interval_us_(interval_us) {
RTC_CHECK_GT(width_, 0);
RTC_CHECK_GT(height_, 0);
RTC_CHECK_GT(interval_us_, 0);
}
webrtc::VideoRotation FakeFrameSource::GetRotation() {
return rotation_;
}
void FakeFrameSource::SetRotation(webrtc::VideoRotation rotation) {
rotation_ = rotation;
}
webrtc::VideoFrame FakeFrameSource::GetFrame() {
return GetFrame(width_, height_, interval_us_);
}
webrtc::VideoFrame FakeFrameSource::GetFrame(int width,
int height,
int interval_us) {
RTC_CHECK_GT(width, 0);
RTC_CHECK_GT(height, 0);
RTC_CHECK_GT(interval_us, 0);
rtc::scoped_refptr<webrtc::I420Buffer> buffer(
webrtc::I420Buffer::Create(width, height));
buffer->InitializeData();
webrtc::VideoFrame frame =
webrtc::VideoFrame(buffer, rotation_, next_timestamp_us_);
next_timestamp_us_ += interval_us;
return frame;
}
} // namespace cricket

View File

@ -0,0 +1,42 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MEDIA_BASE_FAKEFRAMESOURCE_H_
#define MEDIA_BASE_FAKEFRAMESOURCE_H_
#include "api/video/video_frame.h"
#include "rtc_base/timeutils.h"
namespace cricket {
class FakeFrameSource {
public:
FakeFrameSource(int width, int height, int interval_us);
webrtc::VideoRotation GetRotation();
void SetRotation(webrtc::VideoRotation rotation);
webrtc::VideoFrame GetFrame();
// Override default size and interval.
webrtc::VideoFrame GetFrame(int width, int height, int interval_us);
private:
const int width_;
const int height_;
const int interval_us_;
webrtc::VideoRotation rotation_ = webrtc::kVideoRotation_0;
int64_t next_timestamp_us_ = rtc::kNumMicrosecsPerMillisec;
};
} // namespace cricket
#endif // MEDIA_BASE_FAKEFRAMESOURCE_H_

View File

@ -16,8 +16,6 @@ namespace cricket {
FakeVideoCapturer::FakeVideoCapturer(bool is_screencast)
: running_(false),
initial_timestamp_(rtc::TimeNanos()),
next_timestamp_(rtc::kNumNanosecsPerMillisec),
is_screencast_(is_screencast),
rotation_(webrtc::kVideoRotation_0) {
// Default supported formats. Use ResetSupportedFormats to over write.
@ -47,29 +45,22 @@ bool FakeVideoCapturer::CaptureFrame() {
if (!GetCaptureFormat()) {
return false;
}
return CaptureCustomFrame(
GetCaptureFormat()->width, GetCaptureFormat()->height,
GetCaptureFormat()->interval, GetCaptureFormat()->fourcc);
RTC_CHECK_EQ(GetCaptureFormat()->fourcc, FOURCC_I420);
return CaptureFrame(frame_source_->GetFrame());
}
bool FakeVideoCapturer::CaptureCustomFrame(int width,
int height,
uint32_t fourcc) {
bool FakeVideoCapturer::CaptureCustomFrame(int width, int height) {
// Default to 30fps.
return CaptureCustomFrame(width, height, rtc::kNumNanosecsPerSec / 30,
fourcc);
// TODO(nisse): Would anything break if we always stick to
// the configure frame interval?
return CaptureFrame(
frame_source_->GetFrame(width, height, rtc::kNumMicrosecsPerSec / 30));
}
bool FakeVideoCapturer::CaptureCustomFrame(int width,
int height,
int64_t timestamp_interval,
uint32_t fourcc) {
bool FakeVideoCapturer::CaptureFrame(const webrtc::VideoFrame& frame) {
if (!running_) {
return false;
}
RTC_CHECK(fourcc == FOURCC_I420);
RTC_CHECK(width > 0);
RTC_CHECK(height > 0);
int adapted_width;
int adapted_height;
@ -83,19 +74,16 @@ bool FakeVideoCapturer::CaptureCustomFrame(int width,
// AdaptFrame, and the test case
// VideoCapturerTest.SinkWantsMaxPixelAndMaxPixelCountStepUp
// depends on this.
if (AdaptFrame(width, height, next_timestamp_ / rtc::kNumNanosecsPerMicrosec,
next_timestamp_ / rtc::kNumNanosecsPerMicrosec, &adapted_width,
&adapted_height, &crop_width, &crop_height, &crop_x, &crop_y,
nullptr)) {
if (AdaptFrame(frame.width(), frame.height(), frame.timestamp_us(),
frame.timestamp_us(), &adapted_width, &adapted_height,
&crop_width, &crop_height, &crop_x, &crop_y, nullptr)) {
rtc::scoped_refptr<webrtc::I420Buffer> buffer(
webrtc::I420Buffer::Create(adapted_width, adapted_height));
buffer->InitializeData();
OnFrame(webrtc::VideoFrame(buffer, rotation_,
next_timestamp_ / rtc::kNumNanosecsPerMicrosec),
width, height);
OnFrame(webrtc::VideoFrame(buffer, frame.rotation(), frame.timestamp_us()),
frame.width(), frame.height());
}
next_timestamp_ += timestamp_interval;
return true;
}
@ -105,6 +93,10 @@ cricket::CaptureState FakeVideoCapturer::Start(
SetCaptureFormat(&format);
running_ = true;
SetCaptureState(cricket::CS_RUNNING);
frame_source_ = rtc::MakeUnique<FakeFrameSource>(
format.width, format.height,
format.interval / rtc::kNumNanosecsPerMicrosec);
frame_source_->SetRotation(rotation_);
return cricket::CS_RUNNING;
}
@ -130,6 +122,8 @@ bool FakeVideoCapturer::GetPreferredFourccs(std::vector<uint32_t>* fourccs) {
void FakeVideoCapturer::SetRotation(webrtc::VideoRotation rotation) {
rotation_ = rotation;
if (frame_source_)
frame_source_->SetRotation(rotation_);
}
webrtc::VideoRotation FakeVideoCapturer::GetRotation() {
@ -149,28 +143,12 @@ bool FakeVideoCapturerWithTaskQueue::CaptureFrame() {
return ret;
}
bool FakeVideoCapturerWithTaskQueue::CaptureCustomFrame(int width,
int height,
uint32_t fourcc) {
bool FakeVideoCapturerWithTaskQueue::CaptureCustomFrame(int width, int height) {
bool ret = false;
RunSynchronouslyOnTaskQueue([this, &ret, width, height, fourcc]() {
ret = FakeVideoCapturer::CaptureCustomFrame(width, height, fourcc);
RunSynchronouslyOnTaskQueue([this, &ret, width, height]() {
ret = FakeVideoCapturer::CaptureCustomFrame(width, height);
});
return ret;
}
bool FakeVideoCapturerWithTaskQueue::CaptureCustomFrame(
int width,
int height,
int64_t timestamp_interval,
uint32_t fourcc) {
bool ret = false;
RunSynchronouslyOnTaskQueue(
[this, &ret, width, height, timestamp_interval, fourcc]() {
ret = FakeVideoCapturer::CaptureCustomFrame(width, height,
timestamp_interval, fourcc);
});
return ret;
}
} // namespace cricket

View File

@ -18,6 +18,7 @@
#include "api/video/i420_buffer.h"
#include "api/video/video_frame.h"
#include "media/base/fakeframesource.h"
#include "media/base/videocapturer.h"
#include "media/base/videocommon.h"
#include "rtc_base/event.h"
@ -36,11 +37,7 @@ class FakeVideoCapturer : public cricket::VideoCapturer {
void ResetSupportedFormats(const std::vector<cricket::VideoFormat>& formats);
virtual bool CaptureFrame();
virtual bool CaptureCustomFrame(int width, int height, uint32_t fourcc);
virtual bool CaptureCustomFrame(int width,
int height,
int64_t timestamp_interval,
uint32_t fourcc);
virtual bool CaptureCustomFrame(int width, int height);
sigslot::signal1<FakeVideoCapturer*> SignalDestroyed;
@ -55,11 +52,14 @@ class FakeVideoCapturer : public cricket::VideoCapturer {
webrtc::VideoRotation GetRotation();
private:
bool CaptureFrame(const webrtc::VideoFrame& frame);
bool running_;
int64_t initial_timestamp_;
int64_t next_timestamp_;
const bool is_screencast_;
// Duplicates FakeFrameSource::rotation_, but needed to support
// SetRotation before Start.
webrtc::VideoRotation rotation_;
std::unique_ptr<FakeFrameSource> frame_source_;
};
// Inherits from FakeVideoCapturer but adds a TaskQueue so that frames can be
@ -70,11 +70,7 @@ class FakeVideoCapturerWithTaskQueue : public FakeVideoCapturer {
FakeVideoCapturerWithTaskQueue();
bool CaptureFrame() override;
bool CaptureCustomFrame(int width, int height, uint32_t fourcc) override;
bool CaptureCustomFrame(int width,
int height,
int64_t timestamp_interval,
uint32_t fourcc) override;
bool CaptureCustomFrame(int width, int height) override;
protected:
template <class Closure>

View File

@ -15,38 +15,36 @@
#include <string>
#include <vector>
#include "media/base/fakevideocapturer.h"
#include "media/base/fakeframesource.h"
#include "media/base/mediachannel.h"
#include "media/base/testutils.h"
#include "media/base/videoadapter.h"
#include "rtc_base/gunit.h"
#include "rtc_base/logging.h"
#include "rtc_base/ptr_util.h"
namespace cricket {
namespace {
const int kWidth = 1280;
const int kHeight = 720;
const int kDefaultFps = 30;
} // namespace
class VideoAdapterTest : public testing::Test {
public:
void SetUp() override {
capturer_.reset(new FakeVideoCapturerWithTaskQueue());
capture_format_ = capturer_->GetSupportedFormats()->at(0);
capture_format_.interval = VideoFormat::FpsToInterval(kDefaultFps);
capture_format_ = {kWidth, kHeight, VideoFormat::FpsToInterval(kDefaultFps),
cricket::FOURCC_I420};
frame_source_ = rtc::MakeUnique<FakeFrameSource>(
kWidth, kHeight,
VideoFormat::FpsToInterval(kDefaultFps) / rtc::kNumNanosecsPerMicrosec);
listener_.reset(new VideoCapturerListener(&adapter_));
capturer_->AddOrUpdateSink(listener_.get(), rtc::VideoSinkWants());
}
void TearDown() override {
// Explicitly disconnect the VideoCapturer before to avoid data races
// (frames delivered to VideoCapturerListener while it's being destructed).
capturer_->RemoveSink(listener_.get());
adapter_wrapper_ = rtc::MakeUnique<VideoAdapterWrapper>(&adapter_);
}
protected:
class VideoCapturerListener
: public rtc::VideoSinkInterface<webrtc::VideoFrame> {
// Wrap a VideoAdapter and collect stats.
class VideoAdapterWrapper {
public:
struct Stats {
int captured_frames;
@ -59,7 +57,7 @@ class VideoAdapterTest : public testing::Test {
int out_height;
};
explicit VideoCapturerListener(VideoAdapter* adapter)
explicit VideoAdapterWrapper(VideoAdapter* adapter)
: video_adapter_(adapter),
cropped_width_(0),
cropped_height_(0),
@ -69,8 +67,7 @@ class VideoAdapterTest : public testing::Test {
dropped_frames_(0),
last_adapt_was_no_op_(false) {}
void OnFrame(const webrtc::VideoFrame& frame) override {
rtc::CritScope lock(&crit_);
void AdaptFrame(const webrtc::VideoFrame& frame) {
const int in_width = frame.width();
const int in_height = frame.height();
int cropped_width;
@ -95,7 +92,6 @@ class VideoAdapterTest : public testing::Test {
}
Stats GetStats() {
rtc::CritScope lock(&crit_);
Stats stats;
stats.captured_frames = captured_frames_;
stats.dropped_frames = dropped_frames_;
@ -108,7 +104,6 @@ class VideoAdapterTest : public testing::Test {
}
private:
rtc::CriticalSection crit_;
VideoAdapter* video_adapter_;
int cropped_width_;
int cropped_height_;
@ -119,8 +114,7 @@ class VideoAdapterTest : public testing::Test {
bool last_adapt_was_no_op_;
};
void VerifyAdaptedResolution(const VideoCapturerListener::Stats& stats,
void VerifyAdaptedResolution(const VideoAdapterWrapper::Stats& stats,
int cropped_width,
int cropped_height,
int out_width,
@ -131,25 +125,24 @@ class VideoAdapterTest : public testing::Test {
EXPECT_EQ(out_height, stats.out_height);
}
std::unique_ptr<FakeVideoCapturerWithTaskQueue> capturer_;
std::unique_ptr<FakeFrameSource> frame_source_;
VideoAdapter adapter_;
int cropped_width_;
int cropped_height_;
int out_width_;
int out_height_;
std::unique_ptr<VideoCapturerListener> listener_;
std::unique_ptr<VideoAdapterWrapper> adapter_wrapper_;
VideoFormat capture_format_;
};
// Do not adapt the frame rate or the resolution. Expect no frame drop, no
// cropping, and no resolution change.
TEST_F(VideoAdapterTest, AdaptNothing) {
EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
for (int i = 0; i < 10; ++i)
capturer_->CaptureFrame();
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
// Verify no frame drop and no resolution change.
VideoCapturerListener::Stats stats = listener_->GetStats();
VideoAdapterWrapper::Stats stats = adapter_wrapper_->GetStats();
EXPECT_GE(stats.captured_frames, 10);
EXPECT_EQ(0, stats.dropped_frames);
VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
@ -158,15 +151,14 @@ TEST_F(VideoAdapterTest, AdaptNothing) {
}
TEST_F(VideoAdapterTest, AdaptZeroInterval) {
VideoFormat format = capturer_->GetSupportedFormats()->at(0);
VideoFormat format = capture_format_;
format.interval = 0;
adapter_.OnOutputFormatRequest(format);
EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
for (int i = 0; i < 10; ++i)
capturer_->CaptureFrame();
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
// Verify no crash and that frames aren't dropped.
VideoCapturerListener::Stats stats = listener_->GetStats();
VideoAdapterWrapper::Stats stats = adapter_wrapper_->GetStats();
EXPECT_GE(stats.captured_frames, 10);
EXPECT_EQ(0, stats.dropped_frames);
VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
@ -179,49 +171,48 @@ TEST_F(VideoAdapterTest, AdaptFramerateToHalf) {
VideoFormat request_format = capture_format_;
request_format.interval *= 2;
adapter_.OnOutputFormatRequest(request_format);
EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
// Capture 10 frames and verify that every other frame is dropped. The first
// frame should not be dropped.
capturer_->CaptureFrame();
EXPECT_GE(listener_->GetStats().captured_frames, 1);
EXPECT_EQ(0, listener_->GetStats().dropped_frames);
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
EXPECT_GE(adapter_wrapper_->GetStats().captured_frames, 1);
EXPECT_EQ(0, adapter_wrapper_->GetStats().dropped_frames);
capturer_->CaptureFrame();
EXPECT_GE(listener_->GetStats().captured_frames, 2);
EXPECT_EQ(1, listener_->GetStats().dropped_frames);
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
EXPECT_GE(adapter_wrapper_->GetStats().captured_frames, 2);
EXPECT_EQ(1, adapter_wrapper_->GetStats().dropped_frames);
capturer_->CaptureFrame();
EXPECT_GE(listener_->GetStats().captured_frames, 3);
EXPECT_EQ(1, listener_->GetStats().dropped_frames);
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
EXPECT_GE(adapter_wrapper_->GetStats().captured_frames, 3);
EXPECT_EQ(1, adapter_wrapper_->GetStats().dropped_frames);
capturer_->CaptureFrame();
EXPECT_GE(listener_->GetStats().captured_frames, 4);
EXPECT_EQ(2, listener_->GetStats().dropped_frames);
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
EXPECT_GE(adapter_wrapper_->GetStats().captured_frames, 4);
EXPECT_EQ(2, adapter_wrapper_->GetStats().dropped_frames);
capturer_->CaptureFrame();
EXPECT_GE(listener_->GetStats().captured_frames, 5);
EXPECT_EQ(2, listener_->GetStats().dropped_frames);
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
EXPECT_GE(adapter_wrapper_->GetStats().captured_frames, 5);
EXPECT_EQ(2, adapter_wrapper_->GetStats().dropped_frames);
capturer_->CaptureFrame();
EXPECT_GE(listener_->GetStats().captured_frames, 6);
EXPECT_EQ(3, listener_->GetStats().dropped_frames);
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
EXPECT_GE(adapter_wrapper_->GetStats().captured_frames, 6);
EXPECT_EQ(3, adapter_wrapper_->GetStats().dropped_frames);
capturer_->CaptureFrame();
EXPECT_GE(listener_->GetStats().captured_frames, 7);
EXPECT_EQ(3, listener_->GetStats().dropped_frames);
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
EXPECT_GE(adapter_wrapper_->GetStats().captured_frames, 7);
EXPECT_EQ(3, adapter_wrapper_->GetStats().dropped_frames);
capturer_->CaptureFrame();
EXPECT_GE(listener_->GetStats().captured_frames, 8);
EXPECT_EQ(4, listener_->GetStats().dropped_frames);
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
EXPECT_GE(adapter_wrapper_->GetStats().captured_frames, 8);
EXPECT_EQ(4, adapter_wrapper_->GetStats().dropped_frames);
capturer_->CaptureFrame();
EXPECT_GE(listener_->GetStats().captured_frames, 9);
EXPECT_EQ(4, listener_->GetStats().dropped_frames);
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
EXPECT_GE(adapter_wrapper_->GetStats().captured_frames, 9);
EXPECT_EQ(4, adapter_wrapper_->GetStats().dropped_frames);
capturer_->CaptureFrame();
EXPECT_GE(listener_->GetStats().captured_frames, 10);
EXPECT_EQ(5, listener_->GetStats().dropped_frames);
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
EXPECT_GE(adapter_wrapper_->GetStats().captured_frames, 10);
EXPECT_EQ(5, adapter_wrapper_->GetStats().dropped_frames);
}
// Adapt the frame rate to be two thirds of the capture rate at the beginning.
@ -231,49 +222,48 @@ TEST_F(VideoAdapterTest, AdaptFramerateToTwoThirds) {
VideoFormat request_format = capture_format_;
request_format.interval = request_format.interval * 3 / 2;
adapter_.OnOutputFormatRequest(request_format);
EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
// Capture 10 frames and verify that every third frame is dropped. The first
// frame should not be dropped.
capturer_->CaptureFrame();
EXPECT_GE(listener_->GetStats().captured_frames, 1);
EXPECT_EQ(0, listener_->GetStats().dropped_frames);
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
EXPECT_GE(adapter_wrapper_->GetStats().captured_frames, 1);
EXPECT_EQ(0, adapter_wrapper_->GetStats().dropped_frames);
capturer_->CaptureFrame();
EXPECT_GE(listener_->GetStats().captured_frames, 2);
EXPECT_EQ(0, listener_->GetStats().dropped_frames);
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
EXPECT_GE(adapter_wrapper_->GetStats().captured_frames, 2);
EXPECT_EQ(0, adapter_wrapper_->GetStats().dropped_frames);
capturer_->CaptureFrame();
EXPECT_GE(listener_->GetStats().captured_frames, 3);
EXPECT_EQ(1, listener_->GetStats().dropped_frames);
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
EXPECT_GE(adapter_wrapper_->GetStats().captured_frames, 3);
EXPECT_EQ(1, adapter_wrapper_->GetStats().dropped_frames);
capturer_->CaptureFrame();
EXPECT_GE(listener_->GetStats().captured_frames, 4);
EXPECT_EQ(1, listener_->GetStats().dropped_frames);
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
EXPECT_GE(adapter_wrapper_->GetStats().captured_frames, 4);
EXPECT_EQ(1, adapter_wrapper_->GetStats().dropped_frames);
capturer_->CaptureFrame();
EXPECT_GE(listener_->GetStats().captured_frames, 5);
EXPECT_EQ(1, listener_->GetStats().dropped_frames);
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
EXPECT_GE(adapter_wrapper_->GetStats().captured_frames, 5);
EXPECT_EQ(1, adapter_wrapper_->GetStats().dropped_frames);
capturer_->CaptureFrame();
EXPECT_GE(listener_->GetStats().captured_frames, 6);
EXPECT_EQ(2, listener_->GetStats().dropped_frames);
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
EXPECT_GE(adapter_wrapper_->GetStats().captured_frames, 6);
EXPECT_EQ(2, adapter_wrapper_->GetStats().dropped_frames);
capturer_->CaptureFrame();
EXPECT_GE(listener_->GetStats().captured_frames, 7);
EXPECT_EQ(2, listener_->GetStats().dropped_frames);
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
EXPECT_GE(adapter_wrapper_->GetStats().captured_frames, 7);
EXPECT_EQ(2, adapter_wrapper_->GetStats().dropped_frames);
capturer_->CaptureFrame();
EXPECT_GE(listener_->GetStats().captured_frames, 8);
EXPECT_EQ(2, listener_->GetStats().dropped_frames);
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
EXPECT_GE(adapter_wrapper_->GetStats().captured_frames, 8);
EXPECT_EQ(2, adapter_wrapper_->GetStats().dropped_frames);
capturer_->CaptureFrame();
EXPECT_GE(listener_->GetStats().captured_frames, 9);
EXPECT_EQ(3, listener_->GetStats().dropped_frames);
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
EXPECT_GE(adapter_wrapper_->GetStats().captured_frames, 9);
EXPECT_EQ(3, adapter_wrapper_->GetStats().dropped_frames);
capturer_->CaptureFrame();
EXPECT_GE(listener_->GetStats().captured_frames, 10);
EXPECT_EQ(3, listener_->GetStats().dropped_frames);
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
EXPECT_GE(adapter_wrapper_->GetStats().captured_frames, 10);
EXPECT_EQ(3, adapter_wrapper_->GetStats().dropped_frames);
}
// Request frame rate twice as high as captured frame rate. Expect no frame
@ -282,12 +272,11 @@ TEST_F(VideoAdapterTest, AdaptFramerateHighLimit) {
VideoFormat request_format = capture_format_;
request_format.interval /= 2;
adapter_.OnOutputFormatRequest(request_format);
EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
for (int i = 0; i < 10; ++i)
capturer_->CaptureFrame();
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
// Verify no frame drop.
EXPECT_EQ(0, listener_->GetStats().dropped_frames);
EXPECT_EQ(0, adapter_wrapper_->GetStats().dropped_frames);
}
// After the first timestamp, add a big offset to the timestamps. Expect that
@ -370,37 +359,35 @@ TEST_F(VideoAdapterTest, AdaptFramerateTimestampJitter) {
TEST_F(VideoAdapterTest, AdaptFramerateOntheFly) {
VideoFormat request_format = capture_format_;
adapter_.OnOutputFormatRequest(request_format);
EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
for (int i = 0; i < 10; ++i)
capturer_->CaptureFrame();
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
// Verify no frame drop before adaptation.
EXPECT_EQ(0, listener_->GetStats().dropped_frames);
EXPECT_EQ(0, adapter_wrapper_->GetStats().dropped_frames);
// Adapat the frame rate.
request_format.interval *= 2;
adapter_.OnOutputFormatRequest(request_format);
for (int i = 0; i < 20; ++i)
capturer_->CaptureFrame();
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
// Verify frame drop after adaptation.
EXPECT_GT(listener_->GetStats().dropped_frames, 0);
EXPECT_GT(adapter_wrapper_->GetStats().dropped_frames, 0);
}
// Do not adapt the frame rate or the resolution. Expect no frame drop, no
// cropping, and no resolution change.
TEST_F(VideoAdapterTest, OnFramerateRequestMax) {
TEST_F(VideoAdapterTest, AdaptFramerateRequestMax) {
adapter_.OnResolutionFramerateRequest(rtc::nullopt,
std::numeric_limits<int>::max(),
std::numeric_limits<int>::max());
EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
for (int i = 0; i < 10; ++i)
capturer_->CaptureFrame();
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
// Verify no frame drop and no resolution change.
VideoCapturerListener::Stats stats = listener_->GetStats();
VideoAdapterWrapper::Stats stats = adapter_wrapper_->GetStats();
EXPECT_GE(stats.captured_frames, 10);
EXPECT_EQ(0, stats.dropped_frames);
VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
@ -408,30 +395,28 @@ TEST_F(VideoAdapterTest, OnFramerateRequestMax) {
EXPECT_TRUE(stats.last_adapt_was_no_op);
}
TEST_F(VideoAdapterTest, OnFramerateRequestZero) {
TEST_F(VideoAdapterTest, AdaptFramerateRequestZero) {
adapter_.OnResolutionFramerateRequest(rtc::nullopt,
std::numeric_limits<int>::max(), 0);
EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
for (int i = 0; i < 10; ++i)
capturer_->CaptureFrame();
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
// Verify no crash and that frames aren't dropped.
VideoCapturerListener::Stats stats = listener_->GetStats();
VideoAdapterWrapper::Stats stats = adapter_wrapper_->GetStats();
EXPECT_GE(stats.captured_frames, 10);
EXPECT_EQ(10, stats.dropped_frames);
}
// Adapt the frame rate to be half of the capture rate at the beginning. Expect
// the number of dropped frames to be half of the number the captured frames.
TEST_F(VideoAdapterTest, OnFramerateRequestHalf) {
TEST_F(VideoAdapterTest, AdaptFramerateRequestHalf) {
adapter_.OnResolutionFramerateRequest(
rtc::nullopt, std::numeric_limits<int>::max(), kDefaultFps / 2);
EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
for (int i = 0; i < 10; ++i)
capturer_->CaptureFrame();
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
// Verify no crash and that frames aren't dropped.
VideoCapturerListener::Stats stats = listener_->GetStats();
VideoAdapterWrapper::Stats stats = adapter_wrapper_->GetStats();
EXPECT_GE(stats.captured_frames, 10);
EXPECT_EQ(5, stats.dropped_frames);
VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
@ -505,12 +490,11 @@ TEST_F(VideoAdapterTest, AdaptResolution) {
request_format.width /= 2;
request_format.height /= 2;
adapter_.OnOutputFormatRequest(request_format);
EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
for (int i = 0; i < 10; ++i)
capturer_->CaptureFrame();
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
// Verify no frame drop, no cropping, and resolution change.
VideoCapturerListener::Stats stats = listener_->GetStats();
VideoAdapterWrapper::Stats stats = adapter_wrapper_->GetStats();
EXPECT_EQ(0, stats.dropped_frames);
VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
request_format.width, request_format.height);
@ -522,38 +506,36 @@ TEST_F(VideoAdapterTest, AdaptResolution) {
TEST_F(VideoAdapterTest, AdaptResolutionOnTheFly) {
VideoFormat request_format = capture_format_;
adapter_.OnOutputFormatRequest(request_format);
EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
for (int i = 0; i < 10; ++i)
capturer_->CaptureFrame();
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
// Verify no resolution change before adaptation.
VerifyAdaptedResolution(listener_->GetStats(),
capture_format_.width, capture_format_.height,
request_format.width, request_format.height);
VerifyAdaptedResolution(adapter_wrapper_->GetStats(), capture_format_.width,
capture_format_.height, request_format.width,
request_format.height);
// Adapt the frame resolution.
request_format.width /= 2;
request_format.height /= 2;
adapter_.OnOutputFormatRequest(request_format);
for (int i = 0; i < 10; ++i)
capturer_->CaptureFrame();
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
// Verify resolution change after adaptation.
VerifyAdaptedResolution(listener_->GetStats(),
capture_format_.width, capture_format_.height,
request_format.width, request_format.height);
VerifyAdaptedResolution(adapter_wrapper_->GetStats(), capture_format_.width,
capture_format_.height, request_format.width,
request_format.height);
}
// Drop all frames.
TEST_F(VideoAdapterTest, DropAllFrames) {
VideoFormat format; // with resolution 0x0.
adapter_.OnOutputFormatRequest(format);
EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
for (int i = 0; i < 10; ++i)
capturer_->CaptureFrame();
adapter_wrapper_->AdaptFrame(frame_source_->GetFrame());
// Verify all frames are dropped.
VideoCapturerListener::Stats stats = listener_->GetStats();
VideoAdapterWrapper::Stats stats = adapter_wrapper_->GetStats();
EXPECT_GE(stats.captured_frames, 10);
EXPECT_EQ(stats.captured_frames, stats.dropped_frames);
}

View File

@ -473,8 +473,7 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_TRUE(channel_->AddRecvStream(
cricket::StreamParams::CreateLegacy(5678)));
EXPECT_TRUE(channel_->SetSink(5678, &renderer2));
EXPECT_TRUE(capturer->CaptureCustomFrame(kTestWidth, kTestHeight,
cricket::FOURCC_I420));
EXPECT_TRUE(capturer->CaptureCustomFrame(kTestWidth, kTestHeight));
EXPECT_FRAME_ON_RENDERER_WAIT(
renderer2, 1, kTestWidth, kTestHeight, kTimeout);
@ -689,15 +688,13 @@ class VideoMediaChannelTest : public testing::Test,
// All capturers start generating frames with the same timestamp. ViE does
// not allow the same timestamp to be used. Capture one frame before
// associating the capturer with the channel.
EXPECT_TRUE(
capturer->CaptureCustomFrame(format.width, format.height, FOURCC_I420));
EXPECT_TRUE(capturer->CaptureCustomFrame(format.width, format.height));
int captured_frames = 1;
for (int iterations = 0; iterations < 2; ++iterations) {
EXPECT_TRUE(channel_->SetVideoSend(kSsrc, true, nullptr, capturer.get()));
rtc::Thread::Current()->ProcessMessages(time_between_send_ms);
EXPECT_TRUE(capturer->CaptureCustomFrame(format.width, format.height,
FOURCC_I420));
EXPECT_TRUE(capturer->CaptureCustomFrame(format.width, format.height));
++captured_frames;
// Wait until frame of right size is captured.
EXPECT_TRUE_WAIT(renderer_.num_rendered_frames() >= captured_frames &&
@ -726,8 +723,7 @@ class VideoMediaChannelTest : public testing::Test,
// timestamp is set to the last frame's timestamp + interval. WebRTC will
// not render a frame with the same timestamp so capture another frame
// with the frame capturer to increment the next frame's timestamp.
EXPECT_TRUE(capturer->CaptureCustomFrame(format.width, format.height,
FOURCC_I420));
EXPECT_TRUE(capturer->CaptureCustomFrame(format.width, format.height));
}
}
@ -764,8 +760,9 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_TRUE(channel_->AddRecvStream(
cricket::StreamParams::CreateLegacy(kSsrc)));
EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_));
cricket::VideoFormat capture_format; // default format
capture_format.interval = cricket::VideoFormat::FpsToInterval(kFramerate);
cricket::VideoFormat capture_format(
kVideoWidth, kVideoHeight,
cricket::VideoFormat::FpsToInterval(kFramerate), cricket::FOURCC_I420);
// Set up additional stream 1.
cricket::FakeVideoRenderer renderer1;
EXPECT_FALSE(channel_->SetSink(1, &renderer1));
@ -799,13 +796,11 @@ class VideoMediaChannelTest : public testing::Test,
// Test capturer associated with engine.
const int kTestWidth = 160;
const int kTestHeight = 120;
EXPECT_TRUE(capturer1->CaptureCustomFrame(kTestWidth, kTestHeight,
cricket::FOURCC_I420));
EXPECT_TRUE(capturer1->CaptureCustomFrame(kTestWidth, kTestHeight));
EXPECT_FRAME_ON_RENDERER_WAIT(
renderer1, 1, kTestWidth, kTestHeight, kTimeout);
// Capture a frame with additional capturer2, frames should be received
EXPECT_TRUE(capturer2->CaptureCustomFrame(kTestWidth, kTestHeight,
cricket::FOURCC_I420));
EXPECT_TRUE(capturer2->CaptureCustomFrame(kTestWidth, kTestHeight));
EXPECT_FRAME_ON_RENDERER_WAIT(
renderer2, 1, kTestWidth, kTestHeight, kTimeout);
// Successfully remove the capturer.

View File

@ -2341,7 +2341,7 @@ TEST_F(WebRtcVideoChannelTest, AdaptsOnOveruseAndChangeResolution) {
ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size());
FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front();
EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720));
EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
EXPECT_EQ(1280, send_stream->GetLastWidth());
EXPECT_EQ(720, send_stream->GetLastHeight());
@ -2351,7 +2351,7 @@ TEST_F(WebRtcVideoChannelTest, AdaptsOnOveruseAndChangeResolution) {
wants.max_pixel_count =
send_stream->GetLastWidth() * send_stream->GetLastHeight() - 1;
send_stream->InjectVideoSinkWants(wants);
EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720));
EXPECT_EQ(2, send_stream->GetNumberOfSwappedFrames());
EXPECT_EQ(1280 * 3 / 4, send_stream->GetLastWidth());
EXPECT_EQ(720 * 3 / 4, send_stream->GetLastHeight());
@ -2360,13 +2360,13 @@ TEST_F(WebRtcVideoChannelTest, AdaptsOnOveruseAndChangeResolution) {
wants.max_pixel_count =
send_stream->GetLastWidth() * send_stream->GetLastHeight() - 1;
send_stream->InjectVideoSinkWants(wants);
EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720));
EXPECT_EQ(3, send_stream->GetNumberOfSwappedFrames());
EXPECT_EQ(1280 * 2 / 4, send_stream->GetLastWidth());
EXPECT_EQ(720 * 2 / 4, send_stream->GetLastHeight());
// Change input resolution.
EXPECT_TRUE(capturer.CaptureCustomFrame(1284, 724, cricket::FOURCC_I420));
EXPECT_TRUE(capturer.CaptureCustomFrame(1284, 724));
EXPECT_EQ(4, send_stream->GetNumberOfSwappedFrames());
EXPECT_EQ(1284 / 2, send_stream->GetLastWidth());
EXPECT_EQ(724 / 2, send_stream->GetLastHeight());
@ -2380,7 +2380,7 @@ TEST_F(WebRtcVideoChannelTest, AdaptsOnOveruseAndChangeResolution) {
// Default step down is 3/5 pixel count, so go up by 5/3.
wants.target_pixel_count = (current_pixel_count * 5 / 3);
send_stream->InjectVideoSinkWants(wants);
EXPECT_TRUE(capturer.CaptureCustomFrame(1284, 724, cricket::FOURCC_I420));
EXPECT_TRUE(capturer.CaptureCustomFrame(1284, 724));
EXPECT_EQ(5, send_stream->GetNumberOfSwappedFrames());
EXPECT_EQ(1284 * 3 / 4, send_stream->GetLastWidth());
EXPECT_EQ(724 * 3 / 4, send_stream->GetLastHeight());
@ -2391,7 +2391,7 @@ TEST_F(WebRtcVideoChannelTest, AdaptsOnOveruseAndChangeResolution) {
wants.max_pixel_count = current_pixel_count * 4;
wants.target_pixel_count = (current_pixel_count * 5 / 3);
send_stream->InjectVideoSinkWants(wants);
EXPECT_TRUE(capturer.CaptureCustomFrame(1284, 724, cricket::FOURCC_I420));
EXPECT_TRUE(capturer.CaptureCustomFrame(1284, 724));
EXPECT_EQ(6, send_stream->GetNumberOfSwappedFrames());
EXPECT_EQ(1284, send_stream->GetLastWidth());
EXPECT_EQ(724, send_stream->GetLastHeight());
@ -2425,7 +2425,7 @@ TEST_F(WebRtcVideoChannelTest, PreviousAdaptationDoesNotApplyToScreenshare) {
ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size());
FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front();
EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720));
EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
EXPECT_EQ(1280, send_stream->GetLastWidth());
EXPECT_EQ(720, send_stream->GetLastHeight());
@ -2435,7 +2435,7 @@ TEST_F(WebRtcVideoChannelTest, PreviousAdaptationDoesNotApplyToScreenshare) {
wants.max_pixel_count =
send_stream->GetLastWidth() * send_stream->GetLastHeight() - 1;
send_stream->InjectVideoSinkWants(wants);
EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720));
EXPECT_EQ(2, send_stream->GetNumberOfSwappedFrames());
EXPECT_EQ(1280 * 3 / 4, send_stream->GetLastWidth());
EXPECT_EQ(720 * 3 / 4, send_stream->GetLastHeight());
@ -2448,7 +2448,7 @@ TEST_F(WebRtcVideoChannelTest, PreviousAdaptationDoesNotApplyToScreenshare) {
screenshare_options.is_screencast = true;
channel_->SetVideoSend(last_ssrc_, true /* enable */, &screenshare_options,
&screen_share);
EXPECT_TRUE(screen_share.CaptureCustomFrame(1284, 724, cricket::FOURCC_I420));
EXPECT_TRUE(screen_share.CaptureCustomFrame(1284, 724));
ASSERT_EQ(2, fake_call_->GetNumCreatedSendStreams());
send_stream = fake_call_->GetVideoSendStreams().front();
EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
@ -2463,7 +2463,7 @@ TEST_F(WebRtcVideoChannelTest, PreviousAdaptationDoesNotApplyToScreenshare) {
// In practice, it will be populated from
// VideoStreamEncoder::VideoSourceProxy::SetSource(), so simulate that here.
send_stream->InjectVideoSinkWants(wants);
EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720));
ASSERT_EQ(3, fake_call_->GetNumCreatedSendStreams());
send_stream = fake_call_->GetVideoSendStreams().front();
EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());