Added Vp9 simulcast tests.

For them implemeted upscaling in libyuv metrics calculation.
Updated maximum number of SL in vp9 encoder to 3.
Refactored names of some fields in Video_quality_check analyzer.

BUG=webrtc:7095

Review-Url: https://codereview.webrtc.org/2681683003
Cr-Commit-Position: refs/heads/master@{#16625}
This commit is contained in:
ilnik 2017-02-15 02:23:28 -08:00 committed by Commit bot
parent 27260ced9f
commit 2a8c2f589a
5 changed files with 149 additions and 108 deletions

View File

@ -288,22 +288,24 @@ int ConvertFromI420(const VideoFrame& src_frame,
ConvertVideoType(dst_video_type));
}
// Compute PSNR for an I420 frame (all planes)
// Compute PSNR for an I420 frame (all planes). Can upscale test frame.
double I420PSNR(const VideoFrameBuffer& ref_buffer,
const VideoFrameBuffer& test_buffer) {
RTC_DCHECK_GE(ref_buffer.width(), test_buffer.width());
RTC_DCHECK_GE(ref_buffer.height(), test_buffer.height());
if ((ref_buffer.width() != test_buffer.width()) ||
(ref_buffer.height() != test_buffer.height()))
return -1;
else if (ref_buffer.width() < 0 || ref_buffer.height() < 0)
return -1;
double psnr = libyuv::I420Psnr(ref_buffer.DataY(), ref_buffer.StrideY(),
ref_buffer.DataU(), ref_buffer.StrideU(),
ref_buffer.DataV(), ref_buffer.StrideV(),
test_buffer.DataY(), test_buffer.StrideY(),
test_buffer.DataU(), test_buffer.StrideU(),
test_buffer.DataV(), test_buffer.StrideV(),
test_buffer.width(), test_buffer.height());
(ref_buffer.height() != test_buffer.height())) {
rtc::scoped_refptr<I420Buffer> scaled_buffer =
I420Buffer::Create(ref_buffer.width(), ref_buffer.height());
scaled_buffer->ScaleFrom(test_buffer);
return I420PSNR(ref_buffer, *scaled_buffer);
}
double psnr = libyuv::I420Psnr(
ref_buffer.DataY(), ref_buffer.StrideY(), ref_buffer.DataU(),
ref_buffer.StrideU(), ref_buffer.DataV(), ref_buffer.StrideV(),
test_buffer.DataY(), test_buffer.StrideY(), test_buffer.DataU(),
test_buffer.StrideU(), test_buffer.DataV(), test_buffer.StrideV(),
test_buffer.width(), test_buffer.height());
// LibYuv sets the max psnr value to 128, we restrict it here.
// In case of 0 mse in one frame, 128 can skew the results significantly.
return (psnr > kPerfectPSNR) ? kPerfectPSNR : psnr;
@ -317,22 +319,24 @@ double I420PSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame) {
*test_frame->video_frame_buffer());
}
// Compute SSIM for an I420 frame (all planes)
// Compute SSIM for an I420 frame (all planes). Can upscale test_buffer.
double I420SSIM(const VideoFrameBuffer& ref_buffer,
const VideoFrameBuffer& test_buffer) {
RTC_DCHECK_GE(ref_buffer.width(), test_buffer.width());
RTC_DCHECK_GE(ref_buffer.height(), test_buffer.height());
if ((ref_buffer.width() != test_buffer.width()) ||
(ref_buffer.height() != test_buffer.height()))
return -1;
else if (ref_buffer.width() < 0 || ref_buffer.height() < 0)
return -1;
return libyuv::I420Ssim(ref_buffer.DataY(), ref_buffer.StrideY(),
ref_buffer.DataU(), ref_buffer.StrideU(),
ref_buffer.DataV(), ref_buffer.StrideV(),
test_buffer.DataY(), test_buffer.StrideY(),
test_buffer.DataU(), test_buffer.StrideU(),
test_buffer.DataV(), test_buffer.StrideV(),
test_buffer.width(), test_buffer.height());
(ref_buffer.height() != test_buffer.height())) {
rtc::scoped_refptr<I420Buffer> scaled_buffer =
I420Buffer::Create(ref_buffer.width(), ref_buffer.height());
scaled_buffer->ScaleFrom(test_buffer);
return I420SSIM(ref_buffer, *scaled_buffer);
}
return libyuv::I420Ssim(
ref_buffer.DataY(), ref_buffer.StrideY(), ref_buffer.DataU(),
ref_buffer.StrideU(), ref_buffer.DataV(), ref_buffer.StrideV(),
test_buffer.DataY(), test_buffer.StrideY(), test_buffer.DataU(),
test_buffer.StrideU(), test_buffer.DataV(), test_buffer.StrideV(),
test_buffer.width(), test_buffer.height());
}
double I420SSIM(const VideoFrame* ref_frame, const VideoFrame* test_frame) {
if (!ref_frame || !test_frame)

View File

@ -251,8 +251,8 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
if (inst->VP9().numberOfTemporalLayers > 3) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
// libvpx currently supports only one or two spatial layers.
if (inst->VP9().numberOfSpatialLayers > 2) {
// libvpx probably does not support more than 3 spatial layers.
if (inst->VP9().numberOfSpatialLayers > 3) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}

View File

@ -360,6 +360,47 @@ TEST_F(FullStackTest, ScreenshareSlidesVP9_2SL) {
screenshare.ss = {std::vector<VideoStream>(), 0, 2, 1};
RunTest(screenshare);
}
TEST_F(FullStackTest, VP9SVC_3SL_High) {
VideoQualityTest::Params simulcast;
simulcast.call.send_side_bwe = true;
simulcast.video = {true, 1280, 720, 50,
800000, 2500000, 2500000, false,
"VP9", 1, 0, 400000,
false, false, "", "ConferenceMotion_1280_720_50"};
simulcast.analyzer = {"vp9svc_3sl_high", 0.0, 0.0,
kFullStackTestDurationSecs};
simulcast.logs = false;
simulcast.ss = {std::vector<VideoStream>(), 0, 3, 2};
RunTest(simulcast);
}
TEST_F(FullStackTest, VP9SVC_3SL_Medium) {
VideoQualityTest::Params simulcast;
simulcast.call.send_side_bwe = true;
simulcast.video = {true, 1280, 720, 50,
800000, 2500000, 2500000, false,
"VP9", 1, 0, 400000,
false, false, "", "ConferenceMotion_1280_720_50"};
simulcast.analyzer = {"vp9svc_3sl_medium", 0.0, 0.0,
kFullStackTestDurationSecs};
simulcast.logs = false;
simulcast.ss = {std::vector<VideoStream>(), 0, 3, 1};
RunTest(simulcast);
}
TEST_F(FullStackTest, VP9SVC_3SL_Low) {
VideoQualityTest::Params simulcast;
simulcast.call.send_side_bwe = true;
simulcast.video = {true, 1280, 720, 50,
800000, 2500000, 2500000, false,
"VP9", 1, 0, 400000,
false, false, "", "ConferenceMotion_1280_720_50"};
simulcast.analyzer = {"vp9svc_3sl_low", 0.0, 0.0, kFullStackTestDurationSecs};
simulcast.logs = false;
simulcast.ss = {std::vector<VideoStream>(), 0, 3, 0};
RunTest(simulcast);
}
#endif // !defined(RTC_DISABLE_VP9)
TEST_F(FullStackTest, SimulcastVP8_3SL_High) {
@ -369,7 +410,7 @@ TEST_F(FullStackTest, SimulcastVP8_3SL_High) {
800000, 2500000, 2500000, false,
"VP8", 1, 0, 400000,
false, false, "", "ConferenceMotion_1280_720_50"};
simulcast.analyzer = {"simulcast_vp8_3sl_demo", 0.0, 0.0,
simulcast.analyzer = {"simulcast_vp8_3sl_high", 0.0, 0.0,
kFullStackTestDurationSecs};
simulcast.pipe.loss_percent = 0;
simulcast.pipe.queue_delay_ms = 100;
@ -406,7 +447,7 @@ TEST_F(FullStackTest, SimulcastVP8_3SL_Medium) {
800000, 2500000, 2500000, false,
"VP8", 1, 0, 400000,
false, false, "", "ConferenceMotion_1280_720_50"};
simulcast.analyzer = {"simulcast_vp8_3sl_demo", 0.0, 0.0,
simulcast.analyzer = {"simulcast_vp8_3sl_medium", 0.0, 0.0,
kFullStackTestDurationSecs};
simulcast.pipe.loss_percent = 0;
simulcast.pipe.queue_delay_ms = 100;
@ -443,7 +484,7 @@ TEST_F(FullStackTest, SimulcastVP8_3SL_Low) {
800000, 2500000, 2500000, false,
"VP8", 1, 0, 400000,
false, false, "", "ConferenceMotion_1280_720_50"};
simulcast.analyzer = {"simulcast_vp8_3sl_demo", 0.0, 0.0,
simulcast.analyzer = {"simulcast_vp8_3sl_low", 0.0, 0.0,
kFullStackTestDurationSecs};
simulcast.pipe.loss_percent = 0;
simulcast.pipe.queue_delay_ms = 100;

View File

@ -137,8 +137,8 @@ class VideoAnalyzer : public PacketReceiver,
const std::string& graph_title,
uint32_t ssrc_to_analyze,
uint32_t rtx_ssrc_to_analyze,
uint32_t selected_width,
uint32_t selected_height,
uint32_t selected_stream_width,
uint32_t selected_stream_height,
bool is_quick_test_enabled)
: transport_(transport),
receiver_(nullptr),
@ -150,8 +150,8 @@ class VideoAnalyzer : public PacketReceiver,
graph_title_(graph_title),
ssrc_to_analyze_(ssrc_to_analyze),
rtx_ssrc_to_analyze_(rtx_ssrc_to_analyze),
selected_width_(selected_width),
selected_height_(selected_height),
selected_stream_width_(selected_stream_width),
selected_stream_height_(selected_stream_height),
pre_encode_proxy_(this),
encode_timing_proxy_(this),
frames_to_process_(duration_frames),
@ -232,7 +232,6 @@ class VideoAnalyzer : public PacketReceiver,
if (RtpHeaderParser::IsRtcp(packet, length)) {
return receiver_->DeliverPacket(media_type, packet, length, packet_time);
}
RtpUtility::RtpHeaderParser parser(packet, length);
RTPHeader header;
parser.Parse(&header);
@ -274,8 +273,8 @@ class VideoAnalyzer : public PacketReceiver,
void PostEncodeFrameCallback(const EncodedFrame& encoded_frame) {
rtc::CritScope lock(&crit_);
if (!first_sent_timestamp_ &&
encoded_frame.encoded_width_ == selected_width_ &&
encoded_frame.encoded_height_ == selected_height_) {
encoded_frame.encoded_width_ == selected_stream_width_ &&
encoded_frame.encoded_height_ == selected_stream_height_) {
first_sent_timestamp_ = rtc::Optional<uint32_t>(encoded_frame.timestamp_);
}
}
@ -289,6 +288,7 @@ class VideoAnalyzer : public PacketReceiver,
int64_t current_time =
Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
bool result = transport_->SendRtp(packet, length, options);
{
rtc::CritScope lock(&crit_);
@ -857,8 +857,8 @@ class VideoAnalyzer : public PacketReceiver,
const std::string graph_title_;
const uint32_t ssrc_to_analyze_;
const uint32_t rtx_ssrc_to_analyze_;
const uint32_t selected_width_;
const uint32_t selected_height_;
const uint32_t selected_stream_width_;
const uint32_t selected_stream_height_;
PreEncodeProxy pre_encode_proxy_;
OnEncodeTimingProxy encode_timing_proxy_;
std::vector<Sample> samples_ GUARDED_BY(comparison_lock_);
@ -1206,25 +1206,67 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
}
}
void VideoQualityTest::SetupScreenshare() {
RTC_CHECK(params_.screenshare.enabled);
void VideoQualityTest::SetupScreenshareOrSVC() {
if (params_.screenshare.enabled) {
// Fill out codec settings.
video_encoder_config_.content_type =
VideoEncoderConfig::ContentType::kScreen;
degradation_preference_ =
VideoSendStream::DegradationPreference::kMaintainResolution;
if (params_.video.codec == "VP8") {
VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
vp8_settings.denoisingOn = false;
vp8_settings.frameDroppingOn = false;
vp8_settings.numberOfTemporalLayers =
static_cast<unsigned char>(params_.video.num_temporal_layers);
video_encoder_config_.encoder_specific_settings =
new rtc::RefCountedObject<
VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
} else if (params_.video.codec == "VP9") {
VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
vp9_settings.denoisingOn = false;
vp9_settings.frameDroppingOn = false;
vp9_settings.numberOfTemporalLayers =
static_cast<unsigned char>(params_.video.num_temporal_layers);
vp9_settings.numberOfSpatialLayers =
static_cast<unsigned char>(params_.ss.num_spatial_layers);
video_encoder_config_.encoder_specific_settings =
new rtc::RefCountedObject<
VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
}
// Setup frame generator.
const size_t kWidth = 1850;
const size_t kHeight = 1110;
std::vector<std::string> slides;
slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv"));
slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv"));
slides.push_back(test::ResourcePath("photo_1850_1110", "yuv"));
slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv"));
// Fill out codec settings.
video_encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen;
degradation_preference_ =
VideoSendStream::DegradationPreference::kMaintainResolution;
if (params_.video.codec == "VP8") {
VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
vp8_settings.denoisingOn = false;
vp8_settings.frameDroppingOn = false;
vp8_settings.numberOfTemporalLayers =
static_cast<unsigned char>(params_.video.num_temporal_layers);
video_encoder_config_.encoder_specific_settings = new rtc::RefCountedObject<
VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
} else if (params_.video.codec == "VP9") {
if (params_.screenshare.scroll_duration == 0) {
// Cycle image every slide_change_interval seconds.
frame_generator_.reset(test::FrameGenerator::CreateFromYuvFile(
slides, kWidth, kHeight,
params_.screenshare.slide_change_interval * params_.video.fps));
} else {
RTC_CHECK_LE(params_.video.width, kWidth);
RTC_CHECK_LE(params_.video.height, kHeight);
RTC_CHECK_GT(params_.screenshare.slide_change_interval, 0);
const int kPauseDurationMs = (params_.screenshare.slide_change_interval -
params_.screenshare.scroll_duration) *
1000;
RTC_CHECK_LE(params_.screenshare.scroll_duration,
params_.screenshare.slide_change_interval);
frame_generator_.reset(
test::FrameGenerator::CreateScrollingInputFromYuvFiles(
clock_, slides, kWidth, kHeight, params_.video.width,
params_.video.height, params_.screenshare.scroll_duration * 1000,
kPauseDurationMs));
}
} else if (params_.ss.num_spatial_layers > 1) { // For non-screenshare case.
RTC_CHECK(params_.video.codec == "VP9");
VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
vp9_settings.denoisingOn = false;
vp9_settings.frameDroppingOn = false;
vp9_settings.numberOfTemporalLayers =
static_cast<unsigned char>(params_.video.num_temporal_layers);
vp9_settings.numberOfSpatialLayers =
@ -1232,37 +1274,6 @@ void VideoQualityTest::SetupScreenshare() {
video_encoder_config_.encoder_specific_settings = new rtc::RefCountedObject<
VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
}
// Setup frame generator.
const size_t kWidth = 1850;
const size_t kHeight = 1110;
std::vector<std::string> slides;
slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv"));
slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv"));
slides.push_back(test::ResourcePath("photo_1850_1110", "yuv"));
slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv"));
if (params_.screenshare.scroll_duration == 0) {
// Cycle image every slide_change_interval seconds.
frame_generator_.reset(test::FrameGenerator::CreateFromYuvFile(
slides, kWidth, kHeight,
params_.screenshare.slide_change_interval * params_.video.fps));
} else {
RTC_CHECK_LE(params_.video.width, kWidth);
RTC_CHECK_LE(params_.video.height, kHeight);
RTC_CHECK_GT(params_.screenshare.slide_change_interval, 0);
const int kPauseDurationMs = (params_.screenshare.slide_change_interval -
params_.screenshare.scroll_duration) *
1000;
RTC_CHECK_LE(params_.screenshare.scroll_duration,
params_.screenshare.slide_change_interval);
frame_generator_.reset(
test::FrameGenerator::CreateScrollingInputFromYuvFiles(
clock_, slides, kWidth, kHeight, params_.video.width,
params_.video.height, params_.screenshare.scroll_duration * 1000,
kPauseDurationMs));
}
}
void VideoQualityTest::CreateCapturer() {
@ -1332,27 +1343,13 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) {
// 0.0 by default. Setting the thresholds to -1.1 prevents the unnecessary
// abort.
VideoStream& selected_stream = params_.ss.streams[params_.ss.selected_stream];
int selected_sl = params_.ss.selected_sl != -1
? params_.ss.selected_sl
: params_.ss.num_spatial_layers - 1;
bool disable_quality_check =
selected_stream.width != params_.video.width ||
selected_stream.height != params_.video.height ||
(!params_.ss.spatial_layers.empty() &&
params_.ss.spatial_layers[selected_sl].scaling_factor_num !=
params_.ss.spatial_layers[selected_sl].scaling_factor_den);
if (disable_quality_check) {
fprintf(stderr,
"Warning: Calculating PSNR and SSIM for downsized resolution "
"not implemented yet! Skipping PSNR and SSIM calculations!\n");
}
bool is_quick_test_enabled =
field_trial::FindFullName("WebRTC-QuickPerfTest") == "Enabled";
VideoAnalyzer analyzer(
&send_transport, params_.analyzer.test_label,
disable_quality_check ? -1.1 : params_.analyzer.avg_psnr_threshold,
disable_quality_check ? -1.1 : params_.analyzer.avg_ssim_threshold,
params_.analyzer.avg_psnr_threshold, params_.analyzer.avg_ssim_threshold,
is_quick_test_enabled
? kFramesSentInQuickTest
: params_.analyzer.test_durations_secs * params_.video.fps,
@ -1373,8 +1370,7 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) {
RTC_DCHECK(!video_send_config_.post_encode_callback);
video_send_config_.post_encode_callback = analyzer.encode_timing_proxy();
if (params_.screenshare.enabled)
SetupScreenshare();
SetupScreenshareOrSVC();
CreateFlexfecStreams();
CreateVideoStreams();
@ -1513,7 +1509,7 @@ void VideoQualityTest::RunWithRenderers(const Params& params) {
video_receive_configs_[stream_id].sync_group = kSyncGroup;
if (params_.screenshare.enabled)
SetupScreenshare();
SetupScreenshareOrSVC();
video_send_stream_ = call->CreateVideoSendStream(
video_send_config_.Copy(), video_encoder_config_.Copy());

View File

@ -111,7 +111,7 @@ class VideoQualityTest : public test::CallTest {
// Helper methods for setting up the call.
void CreateCapturer();
void SetupVideo(Transport* send_transport, Transport* recv_transport);
void SetupScreenshare();
void SetupScreenshareOrSVC();
void SetupAudio(int send_channel_id,
int receive_channel_id,
Call* call,