Add multiplex case to webrtc_perf_tests

This CL adds two new tests to perf, covering I420 and I420A input to multiplex
codec. In order to have the correct input, it adds I420A case to
SquareGenerator and corresponding PSNR and SSIM calculations.

Bug: webrtc:7671
Change-Id: I9735d725bbfba457e804e29907cee55406ae5c8d
Reviewed-on: https://webrtc-review.googlesource.com/52180
Reviewed-by: Patrik Höglund <phoglund@webrtc.org>
Reviewed-by: Niklas Enbom <niklas.enbom@webrtc.org>
Reviewed-by: Erik Språng <sprang@webrtc.org>
Commit-Queue: Emircan Uysaler <emircan@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22330}
This commit is contained in:
Emircan Uysaler 2018-03-01 12:19:54 -08:00 committed by Commit Bot
parent 98bb968f7b
commit d90a7e8424
15 changed files with 263 additions and 75 deletions

View File

@ -172,8 +172,8 @@ class BitrateEstimatorTest : public test::CallTest {
test_->video_encoder_config_.Copy());
RTC_DCHECK_EQ(1, test_->video_encoder_config_.number_of_streams);
frame_generator_capturer_.reset(test::FrameGeneratorCapturer::Create(
kDefaultWidth, kDefaultHeight, kDefaultFramerate,
Clock::GetRealTimeClock()));
kDefaultWidth, kDefaultHeight, rtc::nullopt, rtc::nullopt,
kDefaultFramerate, Clock::GetRealTimeClock()));
send_stream_->SetSource(
frame_generator_capturer_.get(),
VideoSendStream::DegradationPreference::kMaintainFramerate);

View File

@ -13,6 +13,8 @@
#include <string.h>
#include "api/video/i420_buffer.h"
#include "common_video/include/video_frame_buffer.h"
#include "rtc_base/bind.h"
#include "rtc_base/checks.h"
#include "third_party/libyuv/include/libyuv.h"
@ -203,6 +205,78 @@ int ConvertFromI420(const VideoFrame& src_frame,
ConvertVideoType(dst_video_type));
}
// Helper functions for keeping references alive.
void KeepBufferRefs(rtc::scoped_refptr<webrtc::VideoFrameBuffer>,
rtc::scoped_refptr<webrtc::VideoFrameBuffer>) {}
rtc::scoped_refptr<I420ABufferInterface> ScaleI420ABuffer(
const I420ABufferInterface& buffer,
int target_width,
int target_height) {
rtc::scoped_refptr<I420Buffer> yuv_buffer =
I420Buffer::Create(target_width, target_height);
yuv_buffer->ScaleFrom(buffer);
rtc::scoped_refptr<I420Buffer> axx_buffer =
I420Buffer::Create(target_width, target_height);
libyuv::ScalePlane(buffer.DataA(), buffer.StrideA(), buffer.width(),
buffer.height(), axx_buffer->MutableDataY(),
axx_buffer->StrideY(), target_width, target_height,
libyuv::kFilterBox);
rtc::scoped_refptr<I420ABufferInterface> merged_buffer = WrapI420ABuffer(
yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(),
yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(),
yuv_buffer->DataV(), yuv_buffer->StrideV(), axx_buffer->DataY(),
axx_buffer->StrideY(),
rtc::Bind(&KeepBufferRefs, yuv_buffer, axx_buffer));
return merged_buffer;
}
// Compute PSNR for an I420A frame (all planes). Can upscale test frame.
double I420APSNR(const I420ABufferInterface& ref_buffer,
const I420ABufferInterface& test_buffer) {
RTC_DCHECK_GE(ref_buffer.width(), test_buffer.width());
RTC_DCHECK_GE(ref_buffer.height(), test_buffer.height());
if ((ref_buffer.width() != test_buffer.width()) ||
(ref_buffer.height() != test_buffer.height())) {
rtc::scoped_refptr<I420ABufferInterface> scaled_buffer =
ScaleI420ABuffer(test_buffer, ref_buffer.width(), ref_buffer.height());
return I420APSNR(ref_buffer, *scaled_buffer);
}
const int width = test_buffer.width();
const int height = test_buffer.height();
const uint64_t sse_y = libyuv::ComputeSumSquareErrorPlane(
ref_buffer.DataY(), ref_buffer.StrideY(), test_buffer.DataY(),
test_buffer.StrideY(), width, height);
const int width_uv = (width + 1) >> 1;
const int height_uv = (height + 1) >> 1;
const uint64_t sse_u = libyuv::ComputeSumSquareErrorPlane(
ref_buffer.DataU(), ref_buffer.StrideU(), test_buffer.DataU(),
test_buffer.StrideU(), width_uv, height_uv);
const uint64_t sse_v = libyuv::ComputeSumSquareErrorPlane(
ref_buffer.DataV(), ref_buffer.StrideV(), test_buffer.DataV(),
test_buffer.StrideV(), width_uv, height_uv);
const uint64_t sse_a = libyuv::ComputeSumSquareErrorPlane(
ref_buffer.DataA(), ref_buffer.StrideA(), test_buffer.DataA(),
test_buffer.StrideA(), width, height);
const uint64_t samples = 2 * (uint64_t)width * (uint64_t)height +
2 * ((uint64_t)width_uv * (uint64_t)height_uv);
const uint64_t sse = sse_y + sse_u + sse_v + sse_a;
const double psnr = libyuv::SumSquareErrorToPsnr(sse, samples);
return (psnr > kPerfectPSNR) ? kPerfectPSNR : psnr;
}
// Compute PSNR for an I420A frame (all planes)
double I420APSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame) {
if (!ref_frame || !test_frame)
return -1;
RTC_DCHECK(ref_frame->video_frame_buffer()->type() ==
VideoFrameBuffer::Type::kI420A);
RTC_DCHECK(test_frame->video_frame_buffer()->type() ==
VideoFrameBuffer::Type::kI420A);
return I420APSNR(*ref_frame->video_frame_buffer()->GetI420A(),
*test_frame->video_frame_buffer()->GetI420A());
}
// Compute PSNR for an I420 frame (all planes). Can upscale test frame.
double I420PSNR(const I420BufferInterface& ref_buffer,
const I420BufferInterface& test_buffer) {
@ -234,6 +308,41 @@ double I420PSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame) {
*test_frame->video_frame_buffer()->ToI420());
}
// Compute SSIM for an I420A frame (all planes). Can upscale test frame.
double I420ASSIM(const I420ABufferInterface& ref_buffer,
const I420ABufferInterface& test_buffer) {
RTC_DCHECK_GE(ref_buffer.width(), test_buffer.width());
RTC_DCHECK_GE(ref_buffer.height(), test_buffer.height());
if ((ref_buffer.width() != test_buffer.width()) ||
(ref_buffer.height() != test_buffer.height())) {
rtc::scoped_refptr<I420ABufferInterface> scaled_buffer =
ScaleI420ABuffer(test_buffer, ref_buffer.width(), ref_buffer.height());
return I420ASSIM(ref_buffer, *scaled_buffer);
}
const double yuv_ssim = libyuv::I420Ssim(
ref_buffer.DataY(), ref_buffer.StrideY(), ref_buffer.DataU(),
ref_buffer.StrideU(), ref_buffer.DataV(), ref_buffer.StrideV(),
test_buffer.DataY(), test_buffer.StrideY(), test_buffer.DataU(),
test_buffer.StrideU(), test_buffer.DataV(), test_buffer.StrideV(),
test_buffer.width(), test_buffer.height());
const double a_ssim = libyuv::CalcFrameSsim(
ref_buffer.DataA(), ref_buffer.StrideA(), test_buffer.DataA(),
test_buffer.StrideA(), test_buffer.width(), test_buffer.height());
return (yuv_ssim + (a_ssim * 0.8)) / 1.8;
}
// Compute SSIM for an I420A frame (all planes)
double I420ASSIM(const VideoFrame* ref_frame, const VideoFrame* test_frame) {
if (!ref_frame || !test_frame)
return -1;
RTC_DCHECK(ref_frame->video_frame_buffer()->type() ==
VideoFrameBuffer::Type::kI420A);
RTC_DCHECK(test_frame->video_frame_buffer()->type() ==
VideoFrameBuffer::Type::kI420A);
return I420ASSIM(*ref_frame->video_frame_buffer()->GetI420A(),
*test_frame->video_frame_buffer()->GetI420A());
}
// Compute SSIM for an I420 frame (all planes). Can upscale test_buffer.
double I420SSIM(const I420BufferInterface& ref_buffer,
const I420BufferInterface& test_buffer) {
@ -253,6 +362,7 @@ double I420SSIM(const I420BufferInterface& ref_buffer,
test_buffer.StrideU(), test_buffer.DataV(), test_buffer.StrideV(),
test_buffer.width(), test_buffer.height());
}
double I420SSIM(const VideoFrame* ref_frame, const VideoFrame* test_frame) {
if (!ref_frame || !test_frame)
return -1;

View File

@ -613,6 +613,7 @@ rtc_source_set("test_common") {
"../common_video",
"../logging:rtc_event_log_api",
"../logging:rtc_event_log_impl_base",
"../media:rtc_internal_video_codecs",
"../media:rtc_media_base",
"../modules/audio_device:mock_audio_device",
"../modules/audio_mixer:audio_mixer_impl",
@ -623,6 +624,7 @@ rtc_source_set("test_common") {
"../modules/video_coding:video_codec_interface",
"../modules/video_coding:video_coding_utility",
"../modules/video_coding:webrtc_h264",
"../modules/video_coding:webrtc_multiplex",
"../modules/video_coding:webrtc_vp8",
"../modules/video_coding:webrtc_vp9",
"../rtc_base:checks",

View File

@ -6,6 +6,7 @@ include_rules = [
"+common_video",
"+logging/rtc_event_log",
"+media/base",
"+media/engine",
"+modules/audio_coding",
"+modules/audio_device",
"+modules/audio_mixer",

View File

@ -316,7 +316,7 @@ void CallTest::CreateFrameGeneratorCapturerWithDrift(Clock* clock,
int width,
int height) {
frame_generator_capturer_.reset(test::FrameGeneratorCapturer::Create(
width, height, framerate * speed, clock));
width, height, rtc::nullopt, rtc::nullopt, framerate * speed, clock));
video_send_stream_->SetSource(
frame_generator_capturer_.get(),
VideoSendStream::DegradationPreference::kMaintainFramerate);
@ -325,8 +325,8 @@ void CallTest::CreateFrameGeneratorCapturerWithDrift(Clock* clock,
void CallTest::CreateFrameGeneratorCapturer(int framerate,
int width,
int height) {
frame_generator_capturer_.reset(
test::FrameGeneratorCapturer::Create(width, height, framerate, clock_));
frame_generator_capturer_.reset(test::FrameGeneratorCapturer::Create(
width, height, rtc::nullopt, rtc::nullopt, framerate, clock_));
video_send_stream_->SetSource(
frame_generator_capturer_.get(),
VideoSendStream::DegradationPreference::kMaintainFramerate);

View File

@ -12,7 +12,9 @@
#include <algorithm>
#include <string>
#include "media/engine/internaldecoderfactory.h"
#include "modules/video_coding/codecs/h264/include/h264.h"
#include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h"
#include "modules/video_coding/codecs/vp8/include/vp8.h"
#include "modules/video_coding/codecs/vp9/include/vp9.h"
#include "rtc_base/refcountedobject.h"
@ -102,6 +104,9 @@ VideoReceiveStream::Decoder CreateMatchingDecoder(
decoder.decoder = VP8Decoder::Create().release();
} else if (encoder_settings.payload_name == "VP9") {
decoder.decoder = VP9Decoder::Create().release();
} else if (encoder_settings.payload_name == "multiplex") {
decoder.decoder = new MultiplexDecoderAdapter(
new InternalDecoderFactory(), SdpVideoFormat(cricket::kVp9CodecName));
} else {
decoder.decoder = new FakeDecoder();
}

View File

@ -28,12 +28,17 @@ namespace webrtc {
namespace test {
namespace {
// Helper method for keeping a reference to passed pointers.
void KeepBufferRefs(rtc::scoped_refptr<webrtc::VideoFrameBuffer>,
rtc::scoped_refptr<webrtc::VideoFrameBuffer>) {}
// SquareGenerator is a FrameGenerator that draws a given amount of randomly
// sized and colored squares. Between each new generated frame, the squares
// are moved slightly towards the lower right corner.
class SquareGenerator : public FrameGenerator {
public:
SquareGenerator(int width, int height, int num_squares) {
SquareGenerator(int width, int height, OutputType type, int num_squares)
: type_(type) {
ChangeResolution(width, height);
for (int i = 0; i < num_squares; ++i) {
squares_.emplace_back(new Square(width, height, i + 1));
@ -48,16 +53,39 @@ class SquareGenerator : public FrameGenerator {
RTC_CHECK(height_ > 0);
}
VideoFrame* NextFrame() override {
rtc::CritScope lock(&crit_);
rtc::scoped_refptr<I420Buffer> buffer(I420Buffer::Create(width_, height_));
memset(buffer->MutableDataY(), 127, height_ * buffer->StrideY());
rtc::scoped_refptr<I420Buffer> CreateI420Buffer(int width, int height) {
rtc::scoped_refptr<I420Buffer> buffer(I420Buffer::Create(width, height));
memset(buffer->MutableDataY(), 127, height * buffer->StrideY());
memset(buffer->MutableDataU(), 127,
buffer->ChromaHeight() * buffer->StrideU());
memset(buffer->MutableDataV(), 127,
buffer->ChromaHeight() * buffer->StrideV());
return buffer;
}
VideoFrame* NextFrame() override {
rtc::CritScope lock(&crit_);
rtc::scoped_refptr<VideoFrameBuffer> buffer = nullptr;
switch (type_) {
case OutputType::I420: {
buffer = CreateI420Buffer(width_, height_);
break;
}
case OutputType::I420A: {
rtc::scoped_refptr<I420Buffer> yuv_buffer =
CreateI420Buffer(width_, height_);
rtc::scoped_refptr<I420Buffer> axx_buffer =
CreateI420Buffer(width_, height_);
buffer = WrapI420ABuffer(
yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(),
yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(),
yuv_buffer->DataV(), yuv_buffer->StrideV(), axx_buffer->DataY(),
axx_buffer->StrideY(),
rtc::Bind(&KeepBufferRefs, yuv_buffer, axx_buffer));
break;
}
}
for (const auto& square : squares_)
square->Draw(buffer);
@ -76,25 +104,41 @@ class SquareGenerator : public FrameGenerator {
length_(random_generator_.Rand(1, width > 4 ? width / 4 : 1)),
yuv_y_(random_generator_.Rand(0, 255)),
yuv_u_(random_generator_.Rand(0, 255)),
yuv_v_(random_generator_.Rand(0, 255)) {}
yuv_v_(random_generator_.Rand(0, 255)),
yuv_a_(random_generator_.Rand(0, 255)) {}
void Draw(const rtc::scoped_refptr<I420Buffer>& buffer) {
void Draw(const rtc::scoped_refptr<VideoFrameBuffer>& frame_buffer) {
RTC_DCHECK(frame_buffer->type() == VideoFrameBuffer::Type::kI420 ||
frame_buffer->type() == VideoFrameBuffer::Type::kI420A);
rtc::scoped_refptr<I420BufferInterface> buffer = frame_buffer->ToI420();
x_ = (x_ + random_generator_.Rand(0, 4)) % (buffer->width() - length_);
y_ = (y_ + random_generator_.Rand(0, 4)) % (buffer->height() - length_);
for (int y = y_; y < y_ + length_; ++y) {
uint8_t* pos_y =
(buffer->MutableDataY() + x_ + y * buffer->StrideY());
memset(pos_y, yuv_y_, length_);
}
for (int y = y_; y < y_ + length_; ++y) {
uint8_t* pos_y = (const_cast<uint8_t*>(buffer->DataY()) + x_ +
y * buffer->StrideY());
memset(pos_y, yuv_y_, length_);
}
for (int y = y_; y < y_ + length_; y = y + 2) {
uint8_t* pos_u =
(buffer->MutableDataU() + x_ / 2 + y / 2 * buffer->StrideU());
memset(pos_u, yuv_u_, length_ / 2);
uint8_t* pos_v =
(buffer->MutableDataV() + x_ / 2 + y / 2 * buffer->StrideV());
memset(pos_v, yuv_v_, length_ / 2);
}
for (int y = y_; y < y_ + length_; y = y + 2) {
uint8_t* pos_u = (const_cast<uint8_t*>(buffer->DataU()) + x_ / 2 +
y / 2 * buffer->StrideU());
memset(pos_u, yuv_u_, length_ / 2);
uint8_t* pos_v = (const_cast<uint8_t*>(buffer->DataV()) + x_ / 2 +
y / 2 * buffer->StrideV());
memset(pos_v, yuv_v_, length_ / 2);
}
if (frame_buffer->type() == VideoFrameBuffer::Type::kI420)
return;
// Optionally draw on alpha plane if given.
const webrtc::I420ABufferInterface* yuva_buffer =
frame_buffer->GetI420A();
for (int y = y_; y < y_ + length_; ++y) {
uint8_t* pos_y = (const_cast<uint8_t*>(yuva_buffer->DataA()) + x_ +
y * yuva_buffer->StrideA());
memset(pos_y, yuv_a_, length_);
}
}
private:
@ -105,9 +149,11 @@ class SquareGenerator : public FrameGenerator {
const uint8_t yuv_y_;
const uint8_t yuv_u_;
const uint8_t yuv_v_;
const uint8_t yuv_a_;
};
rtc::CriticalSection crit_;
const OutputType type_;
int width_ RTC_GUARDED_BY(&crit_);
int height_ RTC_GUARDED_BY(&crit_);
std::vector<std::unique_ptr<Square>> squares_ RTC_GUARDED_BY(&crit_);
@ -396,15 +442,12 @@ bool FrameForwarder::has_sinks() const {
std::unique_ptr<FrameGenerator> FrameGenerator::CreateSquareGenerator(
int width,
int height) {
int height,
rtc::Optional<OutputType> type,
rtc::Optional<int> num_squares) {
return std::unique_ptr<FrameGenerator>(
new SquareGenerator(width, height, 10));
}
std::unique_ptr<FrameGenerator>
FrameGenerator::CreateSquareGenerator(int width, int height, int num_squares) {
return std::unique_ptr<FrameGenerator>(
new SquareGenerator(width, height, num_squares));
new SquareGenerator(width, height, type.value_or(OutputType::I420),
num_squares.value_or(10)));
}
std::unique_ptr<FrameGenerator> FrameGenerator::CreateSlideGenerator(

View File

@ -58,13 +58,14 @@ class FrameGenerator {
RTC_NOTREACHED();
}
enum class OutputType { I420, I420A };
// Creates a frame generator that produces frames with small squares that
// move randomly towards the lower right corner.
static std::unique_ptr<FrameGenerator> CreateSquareGenerator(int width,
int height);
static std::unique_ptr<FrameGenerator> CreateSquareGenerator(int width,
int height,
int num_squares);
static std::unique_ptr<FrameGenerator> CreateSquareGenerator(
int width,
int height,
rtc::Optional<OutputType> type,
rtc::Optional<int> num_squares);
// Creates a frame generator that repeatedly plays a set of yuv files.
// The frame_repeat_count determines how many times each frame is shown,

View File

@ -19,7 +19,6 @@
#include "rtc_base/task_queue.h"
#include "rtc_base/timeutils.h"
#include "system_wrappers/include/clock.h"
#include "test/frame_generator.h"
#include "call/video_send_stream.h"
namespace webrtc {
@ -85,25 +84,16 @@ class FrameGeneratorCapturer::InsertFrameTask : public rtc::QueuedTask {
int64_t intended_run_time_ms_;
};
FrameGeneratorCapturer* FrameGeneratorCapturer::Create(int width,
int height,
int target_fps,
Clock* clock) {
FrameGeneratorCapturer* FrameGeneratorCapturer::Create(
int width,
int height,
rtc::Optional<FrameGenerator::OutputType> type,
rtc::Optional<int> num_squares,
int target_fps,
Clock* clock) {
std::unique_ptr<FrameGeneratorCapturer> capturer(new FrameGeneratorCapturer(
clock, FrameGenerator::CreateSquareGenerator(width, height), target_fps));
if (!capturer->Init())
return nullptr;
return capturer.release();
}
FrameGeneratorCapturer* FrameGeneratorCapturer::Create(int width,
int height,
int num_squares,
int target_fps,
Clock* clock) {
std::unique_ptr<FrameGeneratorCapturer> capturer(new FrameGeneratorCapturer(
clock, FrameGenerator::CreateSquareGenerator(width, height, num_squares),
clock,
FrameGenerator::CreateSquareGenerator(width, height, type, num_squares),
target_fps));
if (!capturer->Init())
return nullptr;

View File

@ -16,6 +16,7 @@
#include "api/video/video_frame.h"
#include "rtc_base/criticalsection.h"
#include "rtc_base/task_queue.h"
#include "test/frame_generator.h"
#include "test/video_capturer.h"
#include "typedefs.h" // NOLINT(build/include)
@ -40,16 +41,13 @@ class FrameGeneratorCapturer : public VideoCapturer {
virtual ~SinkWantsObserver() {}
};
static FrameGeneratorCapturer* Create(int width,
int height,
int target_fps,
Clock* clock);
static FrameGeneratorCapturer* Create(int width,
int height,
int num_squares,
int target_fps,
Clock* clock);
static FrameGeneratorCapturer* Create(
int width,
int height,
rtc::Optional<FrameGenerator::OutputType> type,
rtc::Optional<int> num_squares,
int target_fps,
Clock* clock);
static FrameGeneratorCapturer* CreateFromYuvFile(const std::string& file_name,
size_t width,

View File

@ -118,6 +118,7 @@ if (rtc_include_tests) {
"../modules/audio_mixer:audio_mixer_impl",
"../modules/rtp_rtcp",
"../modules/video_coding:webrtc_h264",
"../modules/video_coding:webrtc_multiplex",
"../modules/video_coding:webrtc_vp8",
"../modules/video_coding:webrtc_vp9",
"../rtc_base:rtc_base_approved",

View File

@ -131,7 +131,8 @@ TEST_P(CallOperationEndToEndTest, RendersSingleDelayedFrame) {
// Create frames that are smaller than the send width/height, this is done
// to check that the callbacks are done after processing video.
std::unique_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::CreateSquareGenerator(kWidth, kHeight));
test::FrameGenerator::CreateSquareGenerator(
kWidth, kHeight, rtc::nullopt, rtc::nullopt));
video_send_stream_->SetSource(
&frame_forwarder,
VideoSendStream::DegradationPreference::kMaintainFramerate);
@ -188,7 +189,7 @@ TEST_P(CallOperationEndToEndTest, TransmitsFirstFrame) {
Start();
frame_generator = test::FrameGenerator::CreateSquareGenerator(
kDefaultWidth, kDefaultHeight);
kDefaultWidth, kDefaultHeight, rtc::nullopt, rtc::nullopt);
video_send_stream_->SetSource(
&frame_forwarder,
VideoSendStream::DegradationPreference::kMaintainFramerate);
@ -267,7 +268,7 @@ TEST_P(CallOperationEndToEndTest, ObserversEncodedFrames) {
Start();
frame_generator = test::FrameGenerator::CreateSquareGenerator(
kDefaultWidth, kDefaultHeight);
kDefaultWidth, kDefaultHeight, rtc::nullopt, rtc::nullopt);
video_send_stream_->SetSource(
&forwarder, VideoSendStream::DegradationPreference::kMaintainFramerate);
forwarder.IncomingCapturedFrame(*frame_generator->NextFrame());

View File

@ -100,7 +100,8 @@ void MultiStreamTester::RunTest() {
receive_streams[i]->Start();
frame_generators[i] = test::FrameGeneratorCapturer::Create(
width, height, 30, Clock::GetRealTimeClock());
width, height, rtc::nullopt, rtc::nullopt, 30,
Clock::GetRealTimeClock());
send_streams[i]->SetSource(
frame_generators[i],
VideoSendStream::DegradationPreference::kMaintainFramerate);

View File

@ -73,6 +73,29 @@ TEST_F(FullStackTest, ForemanCifPlr5Vp9) {
foreman_cif.pipe.queue_delay_ms = 50;
RunTest(foreman_cif);
}
TEST_F(FullStackTest, ForemanCifWithoutPacketLossMultiplexI420Frame) {
VideoQualityTest::Params foreman_cif;
foreman_cif.call.send_side_bwe = true;
foreman_cif.video[0] = {true, 352, 288, 30, 700000,
700000, 700000, false, "multiplex", 1,
0, 0, false, false, "foreman_cif"};
foreman_cif.analyzer = {"foreman_cif_net_delay_0_0_plr_0_Multiplex", 0.0, 0.0,
kFullStackTestDurationSecs};
RunTest(foreman_cif);
}
TEST_F(FullStackTest, ForemanCifWithoutPacketLossMultiplexI420AFrame) {
VideoQualityTest::Params foreman_cif;
foreman_cif.call.send_side_bwe = true;
foreman_cif.video[0] = {true, 352, 288, 30, 700000,
700000, 700000, false, "multiplex", 1,
0, 0, false, false, "GeneratorI420A"};
foreman_cif.analyzer = {"foreman_cif_net_delay_0_0_plr_0_Multiplex", 0.0, 0.0,
kFullStackTestDurationSecs};
RunTest(foreman_cif);
}
#endif // !defined(RTC_DISABLE_VP9)
TEST_F(FullStackTest, ParisQcifWithoutPacketLoss) {

View File

@ -24,6 +24,7 @@
#include "modules/rtp_rtcp/source/rtp_format.h"
#include "modules/rtp_rtcp/source/rtp_utility.h"
#include "modules/video_coding/codecs/h264/include/h264.h"
#include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h"
#include "modules/video_coding/codecs/vp8/include/vp8_common_types.h"
#include "modules/video_coding/codecs/vp9/include/vp9.h"
#include "rtc_base/cpu_time.h"
@ -1386,6 +1387,10 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
} else if (params_.video[video_idx].codec == "VP9") {
video_encoders_[video_idx] = VP9Encoder::Create();
payload_type = kPayloadTypeVP9;
} else if (params_.video[video_idx].codec == "multiplex") {
video_encoders_[video_idx] = rtc::MakeUnique<MultiplexEncoderAdapter>(
new InternalEncoderFactory(), SdpVideoFormat(cricket::kVp9CodecName));
payload_type = kPayloadTypeVP9;
} else {
RTC_NOTREACHED() << "Codec not supported!";
return;
@ -1701,7 +1706,7 @@ void VideoQualityTest::SetupThumbnailCapturers(size_t num_thumbnail_streams) {
for (size_t i = 0; i < num_thumbnail_streams; ++i) {
thumbnail_capturers_.emplace_back(test::FrameGeneratorCapturer::Create(
static_cast<int>(thumbnail.width), static_cast<int>(thumbnail.height),
thumbnail.max_framerate, clock_));
rtc::nullopt, rtc::nullopt, thumbnail.max_framerate, clock_));
RTC_DCHECK(thumbnail_capturers_.back());
}
}
@ -1765,9 +1770,15 @@ void VideoQualityTest::CreateCapturers() {
video_capturers_[video_idx].reset(frame_generator_capturer);
} else {
if (params_.video[video_idx].clip_name == "Generator") {
video_capturers_[video_idx].reset(test::FrameGeneratorCapturer::Create(
static_cast<int>(params_.video[video_idx].width),
static_cast<int>(params_.video[video_idx].height), rtc::nullopt,
rtc::nullopt, params_.video[video_idx].fps, clock_));
} else if (params_.video[video_idx].clip_name == "GeneratorI420A") {
video_capturers_[video_idx].reset(test::FrameGeneratorCapturer::Create(
static_cast<int>(params_.video[video_idx].width),
static_cast<int>(params_.video[video_idx].height),
test::FrameGenerator::OutputType::I420A, rtc::nullopt,
params_.video[video_idx].fps, clock_));
} else if (params_.video[video_idx].clip_name.empty()) {
video_capturers_[video_idx].reset(test::VcmCapturer::Create(
@ -1780,7 +1791,8 @@ void VideoQualityTest::CreateCapturers() {
test::FrameGeneratorCapturer::Create(
static_cast<int>(params_.video[video_idx].width),
static_cast<int>(params_.video[video_idx].height),
params_.video[video_idx].fps, clock_));
rtc::nullopt, rtc::nullopt, params_.video[video_idx].fps,
clock_));
}
} else {
video_capturers_[video_idx].reset(