Make VideoProcessor::Init/Release methods private and call from constructor/destructor.
TestConfig: Replace Print method with ToString and add test. Bug: none Change-Id: I9853cb16875199a51c5731d1cec326159751d001 Reviewed-on: https://webrtc-review.googlesource.com/14320 Commit-Queue: Åsa Persson <asapersson@webrtc.org> Reviewed-by: Rasmus Brandt <brandtr@webrtc.org> Cr-Commit-Position: refs/heads/master@{#20420}
This commit is contained in:
parent
c22a3a6a7d
commit
f0c44672df
@ -10,7 +10,7 @@
|
|||||||
|
|
||||||
#include "modules/video_coding/codecs/test/test_config.h"
|
#include "modules/video_coding/codecs/test/test_config.h"
|
||||||
|
|
||||||
#include <string.h>
|
#include <sstream>
|
||||||
|
|
||||||
#include "modules/video_coding/include/video_codec_interface.h"
|
#include "modules/video_coding/include/video_codec_interface.h"
|
||||||
#include "rtc_base/checks.h"
|
#include "rtc_base/checks.h"
|
||||||
@ -22,6 +22,46 @@ namespace test {
|
|||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
const int kBaseKeyFrameInterval = 3000;
|
const int kBaseKeyFrameInterval = 3000;
|
||||||
|
|
||||||
|
std::string CodecSpecificToString(const webrtc::VideoCodec& codec) {
|
||||||
|
std::stringstream ss;
|
||||||
|
switch (codec.codecType) {
|
||||||
|
case kVideoCodecVP8:
|
||||||
|
ss << "\n Complexity : " << codec.VP8().complexity;
|
||||||
|
ss << "\n Resilience : " << codec.VP8().resilience;
|
||||||
|
ss << "\n # temporal layers : "
|
||||||
|
<< static_cast<int>(codec.VP8().numberOfTemporalLayers);
|
||||||
|
ss << "\n Denoising : " << codec.VP8().denoisingOn;
|
||||||
|
ss << "\n Error concealment : " << codec.VP8().errorConcealmentOn;
|
||||||
|
ss << "\n Automatic resize : " << codec.VP8().automaticResizeOn;
|
||||||
|
ss << "\n Frame dropping : " << codec.VP8().frameDroppingOn;
|
||||||
|
ss << "\n Key frame interval: " << codec.VP8().keyFrameInterval;
|
||||||
|
break;
|
||||||
|
case kVideoCodecVP9:
|
||||||
|
ss << "\n Complexity : " << codec.VP9().complexity;
|
||||||
|
ss << "\n Resilience : " << codec.VP9().resilienceOn;
|
||||||
|
ss << "\n # temporal layers : "
|
||||||
|
<< static_cast<int>(codec.VP9().numberOfTemporalLayers);
|
||||||
|
ss << "\n Denoising : " << codec.VP9().denoisingOn;
|
||||||
|
ss << "\n Frame dropping : " << codec.VP9().frameDroppingOn;
|
||||||
|
ss << "\n Key frame interval: " << codec.VP9().keyFrameInterval;
|
||||||
|
ss << "\n Adaptive QP mode : " << codec.VP9().adaptiveQpMode;
|
||||||
|
ss << "\n Automatic resize : " << codec.VP9().automaticResizeOn;
|
||||||
|
ss << "\n # spatial layers : "
|
||||||
|
<< static_cast<int>(codec.VP9().numberOfSpatialLayers);
|
||||||
|
ss << "\n Flexible mode : " << codec.VP9().flexibleMode;
|
||||||
|
break;
|
||||||
|
case kVideoCodecH264:
|
||||||
|
ss << "\n Frame dropping : " << codec.H264().frameDroppingOn;
|
||||||
|
ss << "\n Key frame interval: " << codec.H264().keyFrameInterval;
|
||||||
|
ss << "\n Profile : " << codec.H264().profile;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
ss << "\n";
|
||||||
|
return ss.str();
|
||||||
|
}
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
void TestConfig::SetCodecSettings(VideoCodecType codec_type,
|
void TestConfig::SetCodecSettings(VideoCodecType codec_type,
|
||||||
@ -83,57 +123,53 @@ int TestConfig::NumberOfTemporalLayers() const {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void TestConfig::Print() const {
|
int TestConfig::TemporalLayerForFrame(int frame_idx) const {
|
||||||
printf("Video config:\n");
|
int tl = -1;
|
||||||
printf(" Filename : %s\n", filename.c_str());
|
switch (NumberOfTemporalLayers()) {
|
||||||
printf(" # CPU cores used : %u\n", NumberOfCores());
|
case 1:
|
||||||
PrintCodecSettings();
|
tl = 0;
|
||||||
printf("\n");
|
break;
|
||||||
|
case 2:
|
||||||
|
// temporal layer 1: 1 3
|
||||||
|
// temporal layer 0: 0 2 4 ...
|
||||||
|
tl = (frame_idx % 2 == 0) ? 0 : 1;
|
||||||
|
break;
|
||||||
|
case 3:
|
||||||
|
// temporal layer 2: 1 3 5 7
|
||||||
|
// temporal layer 1: 2 6
|
||||||
|
// temporal layer 0: 0 4 8 ...
|
||||||
|
if (frame_idx % 4 == 0) {
|
||||||
|
tl = 0;
|
||||||
|
} else if ((frame_idx + 2) % 4 == 0) {
|
||||||
|
tl = 1;
|
||||||
|
} else if ((frame_idx + 1) % 2 == 0) {
|
||||||
|
tl = 2;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
RTC_NOTREACHED();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return tl;
|
||||||
}
|
}
|
||||||
|
|
||||||
void TestConfig::PrintCodecSettings() const {
|
std::string TestConfig::ToString() const {
|
||||||
printf(" Codec settings:\n");
|
std::stringstream ss;
|
||||||
printf(" Codec type : %s\n",
|
ss << "Video config:";
|
||||||
CodecTypeToPayloadString(codec_settings.codecType));
|
ss << "\n Filename : " << filename;
|
||||||
printf(" Start bitrate : %d kbps\n", codec_settings.startBitrate);
|
ss << "\n # CPU cores used : " << NumberOfCores();
|
||||||
printf(" Max bitrate : %d kbps\n", codec_settings.maxBitrate);
|
ss << "\n Codec settings:";
|
||||||
printf(" Min bitrate : %d kbps\n", codec_settings.minBitrate);
|
ss << "\n Codec type : "
|
||||||
printf(" Width : %d\n", codec_settings.width);
|
<< CodecTypeToPayloadString(codec_settings.codecType);
|
||||||
printf(" Height : %d\n", codec_settings.height);
|
ss << "\n Start bitrate : " << codec_settings.startBitrate << " kbps";
|
||||||
printf(" Max frame rate : %d\n", codec_settings.maxFramerate);
|
ss << "\n Max bitrate : " << codec_settings.maxBitrate << " kbps";
|
||||||
printf(" QPmax : %d\n", codec_settings.qpMax);
|
ss << "\n Min bitrate : " << codec_settings.minBitrate << " kbps";
|
||||||
if (codec_settings.codecType == kVideoCodecVP8) {
|
ss << "\n Width : " << codec_settings.width;
|
||||||
printf(" Complexity : %d\n", codec_settings.VP8().complexity);
|
ss << "\n Height : " << codec_settings.height;
|
||||||
printf(" Resilience : %d\n", codec_settings.VP8().resilience);
|
ss << "\n Max frame rate : " << codec_settings.maxFramerate;
|
||||||
printf(" # temporal layers : %d\n",
|
ss << "\n QPmax : " << codec_settings.qpMax;
|
||||||
codec_settings.VP8().numberOfTemporalLayers);
|
ss << CodecSpecificToString(codec_settings);
|
||||||
printf(" Denoising : %d\n", codec_settings.VP8().denoisingOn);
|
return ss.str();
|
||||||
printf(" Error concealment : %d\n",
|
|
||||||
codec_settings.VP8().errorConcealmentOn);
|
|
||||||
printf(" Automatic resize : %d\n",
|
|
||||||
codec_settings.VP8().automaticResizeOn);
|
|
||||||
printf(" Frame dropping : %d\n", codec_settings.VP8().frameDroppingOn);
|
|
||||||
printf(" Key frame interval: %d\n", codec_settings.VP8().keyFrameInterval);
|
|
||||||
} else if (codec_settings.codecType == kVideoCodecVP9) {
|
|
||||||
printf(" Complexity : %d\n", codec_settings.VP9().complexity);
|
|
||||||
printf(" Resilience : %d\n", codec_settings.VP9().resilienceOn);
|
|
||||||
printf(" # temporal layers : %d\n",
|
|
||||||
codec_settings.VP9().numberOfTemporalLayers);
|
|
||||||
printf(" Denoising : %d\n", codec_settings.VP9().denoisingOn);
|
|
||||||
printf(" Frame dropping : %d\n", codec_settings.VP9().frameDroppingOn);
|
|
||||||
printf(" Key frame interval: %d\n", codec_settings.VP9().keyFrameInterval);
|
|
||||||
printf(" Adaptive QP mode : %d\n", codec_settings.VP9().adaptiveQpMode);
|
|
||||||
printf(" Automatic resize : %d\n",
|
|
||||||
codec_settings.VP9().automaticResizeOn);
|
|
||||||
printf(" # spatial layers : %d\n",
|
|
||||||
codec_settings.VP9().numberOfSpatialLayers);
|
|
||||||
printf(" Flexible mode : %d\n", codec_settings.VP9().flexibleMode);
|
|
||||||
} else if (codec_settings.codecType == kVideoCodecH264) {
|
|
||||||
printf(" Frame dropping : %d\n", codec_settings.H264().frameDroppingOn);
|
|
||||||
printf(" Key frame interval: %d\n",
|
|
||||||
codec_settings.H264().keyFrameInterval);
|
|
||||||
printf(" Profile : %d\n", codec_settings.H264().profile);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace test
|
} // namespace test
|
||||||
|
|||||||
@ -52,8 +52,8 @@ struct TestConfig {
|
|||||||
|
|
||||||
int NumberOfCores() const;
|
int NumberOfCores() const;
|
||||||
int NumberOfTemporalLayers() const;
|
int NumberOfTemporalLayers() const;
|
||||||
void Print() const;
|
int TemporalLayerForFrame(int frame_idx) const;
|
||||||
void PrintCodecSettings() const;
|
std::string ToString() const;
|
||||||
|
|
||||||
// Plain name of YUV file to process without file extension.
|
// Plain name of YUV file to process without file extension.
|
||||||
std::string filename;
|
std::string filename;
|
||||||
|
|||||||
@ -51,5 +51,81 @@ TEST(TestConfig, NumberOfTemporalLayers_Vp9) {
|
|||||||
EXPECT_EQ(kNumTemporalLayers, config.NumberOfTemporalLayers());
|
EXPECT_EQ(kNumTemporalLayers, config.NumberOfTemporalLayers());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST(TestConfig, TemporalLayersForFrame_OneLayer) {
|
||||||
|
TestConfig config;
|
||||||
|
webrtc::test::CodecSettings(kVideoCodecVP8, &config.codec_settings);
|
||||||
|
config.codec_settings.VP8()->numberOfTemporalLayers = 1;
|
||||||
|
EXPECT_EQ(0, config.TemporalLayerForFrame(0));
|
||||||
|
EXPECT_EQ(0, config.TemporalLayerForFrame(1));
|
||||||
|
EXPECT_EQ(0, config.TemporalLayerForFrame(2));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(TestConfig, TemporalLayersForFrame_TwoLayers) {
|
||||||
|
TestConfig config;
|
||||||
|
webrtc::test::CodecSettings(kVideoCodecVP8, &config.codec_settings);
|
||||||
|
config.codec_settings.VP8()->numberOfTemporalLayers = 2;
|
||||||
|
EXPECT_EQ(0, config.TemporalLayerForFrame(0));
|
||||||
|
EXPECT_EQ(1, config.TemporalLayerForFrame(1));
|
||||||
|
EXPECT_EQ(0, config.TemporalLayerForFrame(2));
|
||||||
|
EXPECT_EQ(1, config.TemporalLayerForFrame(3));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(TestConfig, TemporalLayersForFrame_ThreeLayers) {
|
||||||
|
TestConfig config;
|
||||||
|
webrtc::test::CodecSettings(kVideoCodecVP8, &config.codec_settings);
|
||||||
|
config.codec_settings.VP8()->numberOfTemporalLayers = 3;
|
||||||
|
EXPECT_EQ(0, config.TemporalLayerForFrame(0));
|
||||||
|
EXPECT_EQ(2, config.TemporalLayerForFrame(1));
|
||||||
|
EXPECT_EQ(1, config.TemporalLayerForFrame(2));
|
||||||
|
EXPECT_EQ(2, config.TemporalLayerForFrame(3));
|
||||||
|
EXPECT_EQ(0, config.TemporalLayerForFrame(4));
|
||||||
|
EXPECT_EQ(2, config.TemporalLayerForFrame(5));
|
||||||
|
EXPECT_EQ(1, config.TemporalLayerForFrame(6));
|
||||||
|
EXPECT_EQ(2, config.TemporalLayerForFrame(7));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(TestConfig, ToString_Vp8) {
|
||||||
|
TestConfig config;
|
||||||
|
config.filename = "yuvfile";
|
||||||
|
config.use_single_core = true;
|
||||||
|
|
||||||
|
config.SetCodecSettings(kVideoCodecVP8, 2, true, // error_concealment_on,
|
||||||
|
false, // denoising_on,
|
||||||
|
false, // frame_dropper_on,
|
||||||
|
true, // spatial_resize_on,
|
||||||
|
false, // resilience_on,
|
||||||
|
320, 180);
|
||||||
|
config.codec_settings.startBitrate = 400;
|
||||||
|
config.codec_settings.maxBitrate = 500;
|
||||||
|
config.codec_settings.minBitrate = 70;
|
||||||
|
config.codec_settings.maxFramerate = 35;
|
||||||
|
config.codec_settings.qpMax = 66;
|
||||||
|
config.codec_settings.VP8()->complexity = kComplexityNormal;
|
||||||
|
config.codec_settings.VP8()->keyFrameInterval = 999;
|
||||||
|
|
||||||
|
EXPECT_EQ(
|
||||||
|
"Video config:"
|
||||||
|
"\n Filename : yuvfile"
|
||||||
|
"\n # CPU cores used : 1"
|
||||||
|
"\n Codec settings:"
|
||||||
|
"\n Codec type : VP8"
|
||||||
|
"\n Start bitrate : 400 kbps"
|
||||||
|
"\n Max bitrate : 500 kbps"
|
||||||
|
"\n Min bitrate : 70 kbps"
|
||||||
|
"\n Width : 320"
|
||||||
|
"\n Height : 180"
|
||||||
|
"\n Max frame rate : 35"
|
||||||
|
"\n QPmax : 66"
|
||||||
|
"\n Complexity : 0"
|
||||||
|
"\n Resilience : 0"
|
||||||
|
"\n # temporal layers : 2"
|
||||||
|
"\n Denoising : 0"
|
||||||
|
"\n Error concealment : 1"
|
||||||
|
"\n Automatic resize : 1"
|
||||||
|
"\n Frame dropping : 0"
|
||||||
|
"\n Key frame interval: 999\n",
|
||||||
|
config.ToString());
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace test
|
} // namespace test
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
|||||||
@ -68,6 +68,31 @@ int GetElapsedTimeMicroseconds(int64_t start_ns, int64_t stop_ns) {
|
|||||||
return static_cast<int>(diff_us);
|
return static_cast<int>(diff_us);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void ExtractBufferWithSize(const VideoFrame& image,
|
||||||
|
int width,
|
||||||
|
int height,
|
||||||
|
rtc::Buffer* buffer) {
|
||||||
|
if (image.width() != width || image.height() != height) {
|
||||||
|
EXPECT_DOUBLE_EQ(static_cast<double>(width) / height,
|
||||||
|
static_cast<double>(image.width()) / image.height());
|
||||||
|
// Same aspect ratio, no cropping needed.
|
||||||
|
rtc::scoped_refptr<I420Buffer> scaled(I420Buffer::Create(width, height));
|
||||||
|
scaled->ScaleFrom(*image.video_frame_buffer()->ToI420());
|
||||||
|
|
||||||
|
size_t length =
|
||||||
|
CalcBufferSize(VideoType::kI420, scaled->width(), scaled->height());
|
||||||
|
buffer->SetSize(length);
|
||||||
|
RTC_CHECK_NE(ExtractBuffer(scaled, length, buffer->data()), -1);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// No resize.
|
||||||
|
size_t length =
|
||||||
|
CalcBufferSize(VideoType::kI420, image.width(), image.height());
|
||||||
|
buffer->SetSize(length);
|
||||||
|
RTC_CHECK_NE(ExtractBuffer(image, length, buffer->data()), -1);
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder,
|
VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder,
|
||||||
@ -79,8 +104,7 @@ VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder,
|
|||||||
Stats* stats,
|
Stats* stats,
|
||||||
IvfFileWriter* encoded_frame_writer,
|
IvfFileWriter* encoded_frame_writer,
|
||||||
FrameWriter* decoded_frame_writer)
|
FrameWriter* decoded_frame_writer)
|
||||||
: initialized_(false),
|
: config_(config),
|
||||||
config_(config),
|
|
||||||
encoder_(encoder),
|
encoder_(encoder),
|
||||||
decoder_(decoder),
|
decoder_(decoder),
|
||||||
bitrate_allocator_(CreateBitrateAllocator(&config_)),
|
bitrate_allocator_(CreateBitrateAllocator(&config_)),
|
||||||
@ -104,37 +128,24 @@ VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder,
|
|||||||
RTC_DCHECK(analysis_frame_reader);
|
RTC_DCHECK(analysis_frame_reader);
|
||||||
RTC_DCHECK(analysis_frame_writer);
|
RTC_DCHECK(analysis_frame_writer);
|
||||||
RTC_DCHECK(stats);
|
RTC_DCHECK(stats);
|
||||||
}
|
|
||||||
|
|
||||||
VideoProcessor::~VideoProcessor() = default;
|
|
||||||
|
|
||||||
void VideoProcessor::Init() {
|
|
||||||
RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_);
|
|
||||||
RTC_DCHECK(!initialized_) << "VideoProcessor already initialized.";
|
|
||||||
initialized_ = true;
|
|
||||||
|
|
||||||
// Setup required callbacks for the encoder and decoder.
|
// Setup required callbacks for the encoder and decoder.
|
||||||
RTC_CHECK_EQ(encoder_->RegisterEncodeCompleteCallback(&encode_callback_),
|
RTC_CHECK_EQ(encoder_->RegisterEncodeCompleteCallback(&encode_callback_),
|
||||||
WEBRTC_VIDEO_CODEC_OK)
|
WEBRTC_VIDEO_CODEC_OK);
|
||||||
<< "Failed to register encode complete callback";
|
|
||||||
RTC_CHECK_EQ(decoder_->RegisterDecodeCompleteCallback(&decode_callback_),
|
RTC_CHECK_EQ(decoder_->RegisterDecodeCompleteCallback(&decode_callback_),
|
||||||
WEBRTC_VIDEO_CODEC_OK)
|
WEBRTC_VIDEO_CODEC_OK);
|
||||||
<< "Failed to register decode complete callback";
|
|
||||||
|
|
||||||
// Initialize the encoder and decoder.
|
// Initialize the encoder and decoder.
|
||||||
RTC_CHECK_EQ(
|
RTC_CHECK_EQ(
|
||||||
encoder_->InitEncode(&config_.codec_settings, config_.NumberOfCores(),
|
encoder_->InitEncode(&config_.codec_settings, config_.NumberOfCores(),
|
||||||
config_.networking_config.max_payload_size_in_bytes),
|
config_.networking_config.max_payload_size_in_bytes),
|
||||||
WEBRTC_VIDEO_CODEC_OK)
|
WEBRTC_VIDEO_CODEC_OK);
|
||||||
<< "Failed to initialize VideoEncoder";
|
|
||||||
|
|
||||||
RTC_CHECK_EQ(
|
RTC_CHECK_EQ(
|
||||||
decoder_->InitDecode(&config_.codec_settings, config_.NumberOfCores()),
|
decoder_->InitDecode(&config_.codec_settings, config_.NumberOfCores()),
|
||||||
WEBRTC_VIDEO_CODEC_OK)
|
WEBRTC_VIDEO_CODEC_OK);
|
||||||
<< "Failed to initialize VideoDecoder";
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void VideoProcessor::Release() {
|
VideoProcessor::~VideoProcessor() {
|
||||||
RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_);
|
RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_);
|
||||||
|
|
||||||
RTC_CHECK_EQ(encoder_->Release(), WEBRTC_VIDEO_CODEC_OK);
|
RTC_CHECK_EQ(encoder_->Release(), WEBRTC_VIDEO_CODEC_OK);
|
||||||
@ -142,13 +153,10 @@ void VideoProcessor::Release() {
|
|||||||
|
|
||||||
encoder_->RegisterEncodeCompleteCallback(nullptr);
|
encoder_->RegisterEncodeCompleteCallback(nullptr);
|
||||||
decoder_->RegisterDecodeCompleteCallback(nullptr);
|
decoder_->RegisterDecodeCompleteCallback(nullptr);
|
||||||
|
|
||||||
initialized_ = false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void VideoProcessor::ProcessFrame() {
|
void VideoProcessor::ProcessFrame() {
|
||||||
RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_);
|
RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_);
|
||||||
RTC_DCHECK(initialized_) << "VideoProcessor not initialized.";
|
|
||||||
++last_inputed_frame_num_;
|
++last_inputed_frame_num_;
|
||||||
|
|
||||||
// Get frame from file.
|
// Get frame from file.
|
||||||
@ -233,16 +241,7 @@ void VideoProcessor::FrameEncoded(webrtc::VideoCodecType codec,
|
|||||||
// For dropped frames, we write out the last decoded frame to avoid
|
// For dropped frames, we write out the last decoded frame to avoid
|
||||||
// getting out of sync for the computation of PSNR and SSIM.
|
// getting out of sync for the computation of PSNR and SSIM.
|
||||||
for (int i = 0; i < num_dropped_from_last_encode; i++) {
|
for (int i = 0; i < num_dropped_from_last_encode; i++) {
|
||||||
RTC_DCHECK_EQ(last_decoded_frame_buffer_.size(),
|
WriteDecodedFrameToFile(&last_decoded_frame_buffer_);
|
||||||
analysis_frame_writer_->FrameLength());
|
|
||||||
RTC_CHECK(analysis_frame_writer_->WriteFrame(
|
|
||||||
last_decoded_frame_buffer_.data()));
|
|
||||||
if (decoded_frame_writer_) {
|
|
||||||
RTC_DCHECK_EQ(last_decoded_frame_buffer_.size(),
|
|
||||||
decoded_frame_writer_->FrameLength());
|
|
||||||
RTC_CHECK(decoded_frame_writer_->WriteFrame(
|
|
||||||
last_decoded_frame_buffer_.data()));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const FrameStatistic* last_encoded_frame_stat =
|
const FrameStatistic* last_encoded_frame_stat =
|
||||||
@ -266,40 +265,19 @@ void VideoProcessor::FrameEncoded(webrtc::VideoCodecType codec,
|
|||||||
frame_stat->total_packets =
|
frame_stat->total_packets =
|
||||||
encoded_image._length / config_.networking_config.packet_size_in_bytes +
|
encoded_image._length / config_.networking_config.packet_size_in_bytes +
|
||||||
1;
|
1;
|
||||||
|
|
||||||
frame_stat->max_nalu_length = GetMaxNaluLength(encoded_image, config_);
|
frame_stat->max_nalu_length = GetMaxNaluLength(encoded_image, config_);
|
||||||
|
|
||||||
// Simulate packet loss.
|
// Make a raw copy of |encoded_image| to feed to the decoder.
|
||||||
bool exclude_this_frame = false;
|
|
||||||
if (encoded_image._frameType == kVideoFrameKey) {
|
|
||||||
// Only keyframes can be excluded.
|
|
||||||
switch (config_.exclude_frame_types) {
|
|
||||||
case kExcludeOnlyFirstKeyFrame:
|
|
||||||
if (!first_key_frame_has_been_excluded_) {
|
|
||||||
first_key_frame_has_been_excluded_ = true;
|
|
||||||
exclude_this_frame = true;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case kExcludeAllKeyFrames:
|
|
||||||
exclude_this_frame = true;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
RTC_NOTREACHED();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Make a raw copy of the |encoded_image| buffer.
|
|
||||||
size_t copied_buffer_size = encoded_image._length +
|
size_t copied_buffer_size = encoded_image._length +
|
||||||
EncodedImage::GetBufferPaddingBytes(codec);
|
EncodedImage::GetBufferPaddingBytes(codec);
|
||||||
std::unique_ptr<uint8_t[]> copied_buffer(new uint8_t[copied_buffer_size]);
|
std::unique_ptr<uint8_t[]> copied_buffer(new uint8_t[copied_buffer_size]);
|
||||||
memcpy(copied_buffer.get(), encoded_image._buffer, encoded_image._length);
|
memcpy(copied_buffer.get(), encoded_image._buffer, encoded_image._length);
|
||||||
// The image to feed to the decoder.
|
EncodedImage copied_image = encoded_image;
|
||||||
EncodedImage copied_image;
|
|
||||||
memcpy(&copied_image, &encoded_image, sizeof(copied_image));
|
|
||||||
copied_image._size = copied_buffer_size;
|
copied_image._size = copied_buffer_size;
|
||||||
copied_image._buffer = copied_buffer.get();
|
copied_image._buffer = copied_buffer.get();
|
||||||
|
|
||||||
if (!exclude_this_frame) {
|
// Simulate packet loss.
|
||||||
|
if (!ExcludeFrame(copied_image)) {
|
||||||
frame_stat->packets_dropped =
|
frame_stat->packets_dropped =
|
||||||
packet_manipulator_->ManipulatePackets(&copied_image);
|
packet_manipulator_->ManipulatePackets(&copied_image);
|
||||||
}
|
}
|
||||||
@ -314,16 +292,7 @@ void VideoProcessor::FrameEncoded(webrtc::VideoCodecType codec,
|
|||||||
if (frame_stat->decode_return_code != WEBRTC_VIDEO_CODEC_OK) {
|
if (frame_stat->decode_return_code != WEBRTC_VIDEO_CODEC_OK) {
|
||||||
// Write the last successful frame the output file to avoid getting it out
|
// Write the last successful frame the output file to avoid getting it out
|
||||||
// of sync with the source file for SSIM and PSNR comparisons.
|
// of sync with the source file for SSIM and PSNR comparisons.
|
||||||
RTC_DCHECK_EQ(last_decoded_frame_buffer_.size(),
|
WriteDecodedFrameToFile(&last_decoded_frame_buffer_);
|
||||||
analysis_frame_writer_->FrameLength());
|
|
||||||
RTC_CHECK(
|
|
||||||
analysis_frame_writer_->WriteFrame(last_decoded_frame_buffer_.data()));
|
|
||||||
if (decoded_frame_writer_) {
|
|
||||||
RTC_DCHECK_EQ(last_decoded_frame_buffer_.size(),
|
|
||||||
decoded_frame_writer_->FrameLength());
|
|
||||||
RTC_CHECK(
|
|
||||||
decoded_frame_writer_->WriteFrame(last_decoded_frame_buffer_.data()));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (encoded_frame_writer_) {
|
if (encoded_frame_writer_) {
|
||||||
@ -364,39 +333,45 @@ void VideoProcessor::FrameDecoded(const VideoFrame& image) {
|
|||||||
RTC_CHECK_GT(frame_number, last_decoded_frame_num_);
|
RTC_CHECK_GT(frame_number, last_decoded_frame_num_);
|
||||||
last_decoded_frame_num_ = frame_number;
|
last_decoded_frame_num_ = frame_number;
|
||||||
|
|
||||||
// Check if frame size is different from the original size, and if so, scale
|
// If the frame size is different from the original size, scale back to the
|
||||||
// back to original size. This is needed for the PSNR and SSIM calculations.
|
// original size. This is needed for the PSNR and SSIM calculations.
|
||||||
size_t extracted_length;
|
rtc::Buffer buffer;
|
||||||
rtc::Buffer extracted_buffer;
|
ExtractBufferWithSize(image, config_.codec_settings.width,
|
||||||
if (image.width() != config_.codec_settings.width ||
|
config_.codec_settings.height, &buffer);
|
||||||
image.height() != config_.codec_settings.height) {
|
WriteDecodedFrameToFile(&buffer);
|
||||||
rtc::scoped_refptr<I420Buffer> scaled_buffer(I420Buffer::Create(
|
|
||||||
config_.codec_settings.width, config_.codec_settings.height));
|
|
||||||
// Should be the same aspect ratio, no cropping needed.
|
|
||||||
scaled_buffer->ScaleFrom(*image.video_frame_buffer()->ToI420());
|
|
||||||
|
|
||||||
size_t length = CalcBufferSize(VideoType::kI420, scaled_buffer->width(),
|
last_decoded_frame_buffer_ = std::move(buffer);
|
||||||
scaled_buffer->height());
|
}
|
||||||
extracted_buffer.SetSize(length);
|
|
||||||
extracted_length =
|
|
||||||
ExtractBuffer(scaled_buffer, length, extracted_buffer.data());
|
|
||||||
} else {
|
|
||||||
// No resize.
|
|
||||||
size_t length =
|
|
||||||
CalcBufferSize(VideoType::kI420, image.width(), image.height());
|
|
||||||
extracted_buffer.SetSize(length);
|
|
||||||
extracted_length = ExtractBuffer(image.video_frame_buffer()->ToI420(),
|
|
||||||
length, extracted_buffer.data());
|
|
||||||
}
|
|
||||||
|
|
||||||
RTC_DCHECK_EQ(extracted_length, analysis_frame_writer_->FrameLength());
|
void VideoProcessor::WriteDecodedFrameToFile(rtc::Buffer* buffer) {
|
||||||
RTC_CHECK(analysis_frame_writer_->WriteFrame(extracted_buffer.data()));
|
RTC_DCHECK_EQ(buffer->size(), analysis_frame_writer_->FrameLength());
|
||||||
|
RTC_CHECK(analysis_frame_writer_->WriteFrame(buffer->data()));
|
||||||
if (decoded_frame_writer_) {
|
if (decoded_frame_writer_) {
|
||||||
RTC_DCHECK_EQ(extracted_length, decoded_frame_writer_->FrameLength());
|
RTC_DCHECK_EQ(buffer->size(), decoded_frame_writer_->FrameLength());
|
||||||
RTC_CHECK(decoded_frame_writer_->WriteFrame(extracted_buffer.data()));
|
RTC_CHECK(decoded_frame_writer_->WriteFrame(buffer->data()));
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
last_decoded_frame_buffer_ = std::move(extracted_buffer);
|
bool VideoProcessor::ExcludeFrame(const EncodedImage& encoded_image) {
|
||||||
|
RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_);
|
||||||
|
if (encoded_image._frameType != kVideoFrameKey) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
bool exclude_frame = false;
|
||||||
|
switch (config_.exclude_frame_types) {
|
||||||
|
case kExcludeOnlyFirstKeyFrame:
|
||||||
|
if (!first_key_frame_has_been_excluded_) {
|
||||||
|
first_key_frame_has_been_excluded_ = true;
|
||||||
|
exclude_frame = true;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case kExcludeAllKeyFrames:
|
||||||
|
exclude_frame = true;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
RTC_NOTREACHED();
|
||||||
|
}
|
||||||
|
return exclude_frame;
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace test
|
} // namespace test
|
||||||
|
|||||||
@ -70,12 +70,6 @@ class VideoProcessor {
|
|||||||
FrameWriter* decoded_frame_writer);
|
FrameWriter* decoded_frame_writer);
|
||||||
~VideoProcessor();
|
~VideoProcessor();
|
||||||
|
|
||||||
// Sets up callbacks and initializes the encoder and decoder.
|
|
||||||
void Init();
|
|
||||||
|
|
||||||
// Tears down callbacks and releases the encoder and decoder.
|
|
||||||
void Release();
|
|
||||||
|
|
||||||
// Reads a frame from the analysis frame reader and sends it to the encoder.
|
// Reads a frame from the analysis frame reader and sends it to the encoder.
|
||||||
// When the encode callback is received, the encoded frame is sent to the
|
// When the encode callback is received, the encoded frame is sent to the
|
||||||
// decoder. The decoded frame is written to disk by the analysis frame writer.
|
// decoder. The decoded frame is written to disk by the analysis frame writer.
|
||||||
@ -189,7 +183,9 @@ class VideoProcessor {
|
|||||||
// Invoked by the callback adapter when a frame has completed decoding.
|
// Invoked by the callback adapter when a frame has completed decoding.
|
||||||
void FrameDecoded(const webrtc::VideoFrame& image);
|
void FrameDecoded(const webrtc::VideoFrame& image);
|
||||||
|
|
||||||
bool initialized_ RTC_GUARDED_BY(sequence_checker_);
|
void WriteDecodedFrameToFile(rtc::Buffer* buffer);
|
||||||
|
bool ExcludeFrame(const EncodedImage& encoded_image);
|
||||||
|
|
||||||
TestConfig config_ RTC_GUARDED_BY(sequence_checker_);
|
TestConfig config_ RTC_GUARDED_BY(sequence_checker_);
|
||||||
|
|
||||||
webrtc::VideoEncoder* const encoder_;
|
webrtc::VideoEncoder* const encoder_;
|
||||||
|
|||||||
@ -212,9 +212,7 @@ void VideoProcessorIntegrationTest::ProcessFramesAndMaybeVerify(
|
|||||||
SleepMs(1 * rtc::kNumMillisecsPerSec);
|
SleepMs(1 * rtc::kNumMillisecsPerSec);
|
||||||
}
|
}
|
||||||
cpu_process_time_->Stop();
|
cpu_process_time_->Stop();
|
||||||
ReleaseAndCloseObjects(&task_queue);
|
|
||||||
|
|
||||||
// Calculate and print rate control statistics.
|
|
||||||
std::vector<int> num_dropped_frames;
|
std::vector<int> num_dropped_frames;
|
||||||
std::vector<int> num_spatial_resizes;
|
std::vector<int> num_spatial_resizes;
|
||||||
sync_event.Reset();
|
sync_event.Reset();
|
||||||
@ -226,6 +224,9 @@ void VideoProcessorIntegrationTest::ProcessFramesAndMaybeVerify(
|
|||||||
});
|
});
|
||||||
sync_event.Wait(rtc::Event::kForever);
|
sync_event.Wait(rtc::Event::kForever);
|
||||||
|
|
||||||
|
ReleaseAndCloseObjects(&task_queue);
|
||||||
|
|
||||||
|
// Calculate and print rate control statistics.
|
||||||
rate_update_index = 0;
|
rate_update_index = 0;
|
||||||
frame_number = 0;
|
frame_number = 0;
|
||||||
ResetRateControlMetrics(rate_update_index, rate_profiles);
|
ResetRateControlMetrics(rate_update_index, rate_profiles);
|
||||||
@ -416,7 +417,6 @@ void VideoProcessorIntegrationTest::SetUpAndInitObjects(
|
|||||||
encoder_.get(), decoder_.get(), analysis_frame_reader_.get(),
|
encoder_.get(), decoder_.get(), analysis_frame_reader_.get(),
|
||||||
analysis_frame_writer_.get(), packet_manipulator_.get(), config_,
|
analysis_frame_writer_.get(), packet_manipulator_.get(), config_,
|
||||||
&stats_, encoded_frame_writer_.get(), decoded_frame_writer_.get());
|
&stats_, encoded_frame_writer_.get(), decoded_frame_writer_.get());
|
||||||
processor_->Init();
|
|
||||||
sync_event.Set();
|
sync_event.Set();
|
||||||
});
|
});
|
||||||
sync_event.Wait(rtc::Event::kForever);
|
sync_event.Wait(rtc::Event::kForever);
|
||||||
@ -426,12 +426,12 @@ void VideoProcessorIntegrationTest::ReleaseAndCloseObjects(
|
|||||||
rtc::TaskQueue* task_queue) {
|
rtc::TaskQueue* task_queue) {
|
||||||
rtc::Event sync_event(false, false);
|
rtc::Event sync_event(false, false);
|
||||||
task_queue->PostTask([this, &sync_event]() {
|
task_queue->PostTask([this, &sync_event]() {
|
||||||
processor_->Release();
|
processor_.reset();
|
||||||
sync_event.Set();
|
sync_event.Set();
|
||||||
});
|
});
|
||||||
sync_event.Wait(rtc::Event::kForever);
|
sync_event.Wait(rtc::Event::kForever);
|
||||||
|
|
||||||
// The VideoProcessor must be ::Release()'d before we destroy the codecs.
|
// The VideoProcessor must be destroyed before the codecs.
|
||||||
DestroyEncoderAndDecoder();
|
DestroyEncoderAndDecoder();
|
||||||
|
|
||||||
// Close the analysis files before we use them for SSIM/PSNR calculations.
|
// Close the analysis files before we use them for SSIM/PSNR calculations.
|
||||||
@ -451,7 +451,7 @@ void VideoProcessorIntegrationTest::ReleaseAndCloseObjects(
|
|||||||
void VideoProcessorIntegrationTest::UpdateRateControlMetrics(int frame_number) {
|
void VideoProcessorIntegrationTest::UpdateRateControlMetrics(int frame_number) {
|
||||||
RTC_CHECK_GE(frame_number, 0);
|
RTC_CHECK_GE(frame_number, 0);
|
||||||
|
|
||||||
const int tl_idx = TemporalLayerIndexForFrame(frame_number);
|
const int tl_idx = config_.TemporalLayerForFrame(frame_number);
|
||||||
++actual_.num_frames_layer[tl_idx];
|
++actual_.num_frames_layer[tl_idx];
|
||||||
++actual_.num_frames;
|
++actual_.num_frames;
|
||||||
|
|
||||||
@ -563,7 +563,7 @@ void VideoProcessorIntegrationTest::MaybePrintSettings() const {
|
|||||||
if (!config_.verbose)
|
if (!config_.verbose)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
config_.Print();
|
printf("%s\n", config_.ToString().c_str());
|
||||||
printf(" Total # of frames: %d\n", analysis_frame_reader_->NumberOfFrames());
|
printf(" Total # of frames: %d\n", analysis_frame_reader_->NumberOfFrames());
|
||||||
const char* encoder_name = encoder_->ImplementationName();
|
const char* encoder_name = encoder_->ImplementationName();
|
||||||
const char* decoder_name = decoder_->ImplementationName();
|
const char* decoder_name = decoder_->ImplementationName();
|
||||||
@ -585,38 +585,6 @@ void VideoProcessorIntegrationTest::VerifyBitstream(
|
|||||||
EXPECT_LE(*(frame_stat->max_nalu_length), bs_thresholds.max_nalu_length);
|
EXPECT_LE(*(frame_stat->max_nalu_length), bs_thresholds.max_nalu_length);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Temporal layer index corresponding to frame number, for up to 3 layers.
|
|
||||||
int VideoProcessorIntegrationTest::TemporalLayerIndexForFrame(
|
|
||||||
int frame_number) const {
|
|
||||||
int tl_idx = -1;
|
|
||||||
switch (config_.NumberOfTemporalLayers()) {
|
|
||||||
case 1:
|
|
||||||
tl_idx = 0;
|
|
||||||
break;
|
|
||||||
case 2:
|
|
||||||
// temporal layer 0: 0 2 4 ...
|
|
||||||
// temporal layer 1: 1 3
|
|
||||||
tl_idx = (frame_number % 2 == 0) ? 0 : 1;
|
|
||||||
break;
|
|
||||||
case 3:
|
|
||||||
// temporal layer 0: 0 4 8 ...
|
|
||||||
// temporal layer 1: 2 6
|
|
||||||
// temporal layer 2: 1 3 5 7
|
|
||||||
if (frame_number % 4 == 0) {
|
|
||||||
tl_idx = 0;
|
|
||||||
} else if ((frame_number + 2) % 4 == 0) {
|
|
||||||
tl_idx = 1;
|
|
||||||
} else if ((frame_number + 1) % 2 == 0) {
|
|
||||||
tl_idx = 2;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
RTC_NOTREACHED();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
return tl_idx;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reset quantities before each encoder rate update.
|
// Reset quantities before each encoder rate update.
|
||||||
void VideoProcessorIntegrationTest::ResetRateControlMetrics(
|
void VideoProcessorIntegrationTest::ResetRateControlMetrics(
|
||||||
int rate_update_index,
|
int rate_update_index,
|
||||||
|
|||||||
@ -164,7 +164,6 @@ class VideoProcessorIntegrationTest : public testing::Test {
|
|||||||
const int initial_framerate_fps,
|
const int initial_framerate_fps,
|
||||||
const VisualizationParams* visualization_params);
|
const VisualizationParams* visualization_params);
|
||||||
void ReleaseAndCloseObjects(rtc::TaskQueue* task_queue);
|
void ReleaseAndCloseObjects(rtc::TaskQueue* task_queue);
|
||||||
int TemporalLayerIndexForFrame(int frame_number) const;
|
|
||||||
|
|
||||||
// Rate control metrics.
|
// Rate control metrics.
|
||||||
void ResetRateControlMetrics(int rate_update_index,
|
void ResetRateControlMetrics(int rate_update_index,
|
||||||
|
|||||||
@ -27,7 +27,6 @@
|
|||||||
#include "typedefs.h" // NOLINT(build/include)
|
#include "typedefs.h" // NOLINT(build/include)
|
||||||
|
|
||||||
using ::testing::_;
|
using ::testing::_;
|
||||||
using ::testing::AtLeast;
|
|
||||||
using ::testing::ElementsAre;
|
using ::testing::ElementsAre;
|
||||||
using ::testing::Property;
|
using ::testing::Property;
|
||||||
using ::testing::Return;
|
using ::testing::Return;
|
||||||
@ -40,7 +39,6 @@ namespace {
|
|||||||
const int kWidth = 352;
|
const int kWidth = 352;
|
||||||
const int kHeight = 288;
|
const int kHeight = 288;
|
||||||
const int kFrameSize = kWidth * kHeight * 3 / 2; // I420.
|
const int kFrameSize = kWidth * kHeight * 3 / 2; // I420.
|
||||||
const int kNumFrames = 2;
|
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
@ -52,8 +50,7 @@ class VideoProcessorTest : public testing::Test {
|
|||||||
config_.codec_settings.width = kWidth;
|
config_.codec_settings.width = kWidth;
|
||||||
config_.codec_settings.height = kHeight;
|
config_.codec_settings.height = kHeight;
|
||||||
|
|
||||||
EXPECT_CALL(frame_reader_mock_, NumberOfFrames())
|
ExpectInit();
|
||||||
.WillRepeatedly(Return(kNumFrames));
|
|
||||||
EXPECT_CALL(frame_reader_mock_, FrameLength())
|
EXPECT_CALL(frame_reader_mock_, FrameLength())
|
||||||
.WillRepeatedly(Return(kFrameSize));
|
.WillRepeatedly(Return(kFrameSize));
|
||||||
video_processor_ = rtc::MakeUnique<VideoProcessor>(
|
video_processor_ = rtc::MakeUnique<VideoProcessor>(
|
||||||
@ -88,19 +85,15 @@ class VideoProcessorTest : public testing::Test {
|
|||||||
};
|
};
|
||||||
|
|
||||||
TEST_F(VideoProcessorTest, InitRelease) {
|
TEST_F(VideoProcessorTest, InitRelease) {
|
||||||
ExpectInit();
|
|
||||||
video_processor_->Init();
|
|
||||||
|
|
||||||
ExpectRelease();
|
ExpectRelease();
|
||||||
video_processor_->Release();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(VideoProcessorTest, ProcessFrames_FixedFramerate) {
|
TEST_F(VideoProcessorTest, ProcessFrames_FixedFramerate) {
|
||||||
ExpectInit();
|
|
||||||
video_processor_->Init();
|
|
||||||
|
|
||||||
const int kBitrateKbps = 456;
|
const int kBitrateKbps = 456;
|
||||||
const int kFramerateFps = 31;
|
const int kFramerateFps = 31;
|
||||||
|
EXPECT_CALL(encoder_mock_, SetRateAllocation(_, kFramerateFps))
|
||||||
|
.Times(1)
|
||||||
|
.WillOnce(Return(0));
|
||||||
video_processor_->SetRates(kBitrateKbps, kFramerateFps);
|
video_processor_->SetRates(kBitrateKbps, kFramerateFps);
|
||||||
|
|
||||||
EXPECT_CALL(frame_reader_mock_, ReadFrame())
|
EXPECT_CALL(frame_reader_mock_, ReadFrame())
|
||||||
@ -118,15 +111,14 @@ TEST_F(VideoProcessorTest, ProcessFrames_FixedFramerate) {
|
|||||||
video_processor_->ProcessFrame();
|
video_processor_->ProcessFrame();
|
||||||
|
|
||||||
ExpectRelease();
|
ExpectRelease();
|
||||||
video_processor_->Release();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(VideoProcessorTest, ProcessFrames_VariableFramerate) {
|
TEST_F(VideoProcessorTest, ProcessFrames_VariableFramerate) {
|
||||||
ExpectInit();
|
|
||||||
video_processor_->Init();
|
|
||||||
|
|
||||||
const int kBitrateKbps = 456;
|
const int kBitrateKbps = 456;
|
||||||
const int kStartFramerateFps = 27;
|
const int kStartFramerateFps = 27;
|
||||||
|
EXPECT_CALL(encoder_mock_, SetRateAllocation(_, kStartFramerateFps))
|
||||||
|
.Times(1)
|
||||||
|
.WillOnce(Return(0));
|
||||||
video_processor_->SetRates(kBitrateKbps, kStartFramerateFps);
|
video_processor_->SetRates(kBitrateKbps, kStartFramerateFps);
|
||||||
|
|
||||||
EXPECT_CALL(frame_reader_mock_, ReadFrame())
|
EXPECT_CALL(frame_reader_mock_, ReadFrame())
|
||||||
@ -138,6 +130,9 @@ TEST_F(VideoProcessorTest, ProcessFrames_VariableFramerate) {
|
|||||||
video_processor_->ProcessFrame();
|
video_processor_->ProcessFrame();
|
||||||
|
|
||||||
const int kNewFramerateFps = 13;
|
const int kNewFramerateFps = 13;
|
||||||
|
EXPECT_CALL(encoder_mock_, SetRateAllocation(_, kNewFramerateFps))
|
||||||
|
.Times(1)
|
||||||
|
.WillOnce(Return(0));
|
||||||
video_processor_->SetRates(kBitrateKbps, kNewFramerateFps);
|
video_processor_->SetRates(kBitrateKbps, kNewFramerateFps);
|
||||||
|
|
||||||
EXPECT_CALL(encoder_mock_, Encode(Property(&VideoFrame::timestamp,
|
EXPECT_CALL(encoder_mock_, Encode(Property(&VideoFrame::timestamp,
|
||||||
@ -147,13 +142,9 @@ TEST_F(VideoProcessorTest, ProcessFrames_VariableFramerate) {
|
|||||||
video_processor_->ProcessFrame();
|
video_processor_->ProcessFrame();
|
||||||
|
|
||||||
ExpectRelease();
|
ExpectRelease();
|
||||||
video_processor_->Release();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(VideoProcessorTest, SetRates) {
|
TEST_F(VideoProcessorTest, SetRates) {
|
||||||
ExpectInit();
|
|
||||||
video_processor_->Init();
|
|
||||||
|
|
||||||
const int kBitrateKbps = 123;
|
const int kBitrateKbps = 123;
|
||||||
const int kFramerateFps = 17;
|
const int kFramerateFps = 17;
|
||||||
EXPECT_CALL(encoder_mock_,
|
EXPECT_CALL(encoder_mock_,
|
||||||
@ -181,7 +172,6 @@ TEST_F(VideoProcessorTest, SetRates) {
|
|||||||
ElementsAre(0, 0));
|
ElementsAre(0, 0));
|
||||||
|
|
||||||
ExpectRelease();
|
ExpectRelease();
|
||||||
video_processor_->Release();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace test
|
} // namespace test
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user