Lint fix for webrtc/modules/video_coding PART 3!
Trying to submit all changes at once proved impossible since there were too many changes in too many files. The changes to PRESUBMIT.py will be uploaded in the last CL. (original CL: https://codereview.webrtc.org/1528503003/) BUG=webrtc:5309 TBR=mflodman@webrtc.org Review URL: https://codereview.webrtc.org/1540243002 Cr-Commit-Position: refs/heads/master@{#11105}
This commit is contained in:
parent
f5b1abf5b0
commit
5908c71128
@ -22,6 +22,7 @@ CPPLINT_DIRS = [
|
|||||||
'webrtc/examples',
|
'webrtc/examples',
|
||||||
'webrtc/modules/remote_bitrate_estimator',
|
'webrtc/modules/remote_bitrate_estimator',
|
||||||
'webrtc/modules/rtp_rtcp',
|
'webrtc/modules/rtp_rtcp',
|
||||||
|
'webrtc/modules/video_coding',
|
||||||
'webrtc/modules/video_processing',
|
'webrtc/modules/video_processing',
|
||||||
'webrtc/sound',
|
'webrtc/sound',
|
||||||
'webrtc/tools',
|
'webrtc/tools',
|
||||||
|
|||||||
@ -11,6 +11,9 @@
|
|||||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_RECEIVER_TESTS_H_
|
#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_RECEIVER_TESTS_H_
|
||||||
#define WEBRTC_MODULES_VIDEO_CODING_TEST_RECEIVER_TESTS_H_
|
#define WEBRTC_MODULES_VIDEO_CODING_TEST_RECEIVER_TESTS_H_
|
||||||
|
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
#include "webrtc/common_types.h"
|
#include "webrtc/common_types.h"
|
||||||
#include "webrtc/modules/include/module_common_types.h"
|
#include "webrtc/modules/include/module_common_types.h"
|
||||||
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
|
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
|
||||||
@ -19,12 +22,9 @@
|
|||||||
#include "webrtc/modules/video_coding/test/video_source.h"
|
#include "webrtc/modules/video_coding/test/video_source.h"
|
||||||
#include "webrtc/typedefs.h"
|
#include "webrtc/typedefs.h"
|
||||||
|
|
||||||
#include <stdio.h>
|
|
||||||
#include <string>
|
|
||||||
|
|
||||||
class RtpDataCallback : public webrtc::NullRtpData {
|
class RtpDataCallback : public webrtc::NullRtpData {
|
||||||
public:
|
public:
|
||||||
RtpDataCallback(webrtc::VideoCodingModule* vcm) : vcm_(vcm) {}
|
explicit RtpDataCallback(webrtc::VideoCodingModule* vcm) : vcm_(vcm) {}
|
||||||
virtual ~RtpDataCallback() {}
|
virtual ~RtpDataCallback() {}
|
||||||
|
|
||||||
int32_t OnReceivedPayloadData(
|
int32_t OnReceivedPayloadData(
|
||||||
|
|||||||
@ -26,9 +26,9 @@
|
|||||||
#include "webrtc/test/rtp_file_reader.h"
|
#include "webrtc/test/rtp_file_reader.h"
|
||||||
|
|
||||||
#if 1
|
#if 1
|
||||||
# define DEBUG_LOG1(text, arg)
|
#define DEBUG_LOG1(text, arg)
|
||||||
#else
|
#else
|
||||||
# define DEBUG_LOG1(text, arg) (printf(text "\n", arg))
|
#define DEBUG_LOG1(text, arg) (printf(text "\n", arg))
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
@ -41,7 +41,9 @@ enum {
|
|||||||
|
|
||||||
class RawRtpPacket {
|
class RawRtpPacket {
|
||||||
public:
|
public:
|
||||||
RawRtpPacket(const uint8_t* data, size_t length, uint32_t ssrc,
|
RawRtpPacket(const uint8_t* data,
|
||||||
|
size_t length,
|
||||||
|
uint32_t ssrc,
|
||||||
uint16_t seq_num)
|
uint16_t seq_num)
|
||||||
: data_(new uint8_t[length]),
|
: data_(new uint8_t[length]),
|
||||||
length_(length),
|
length_(length),
|
||||||
@ -234,14 +236,11 @@ class SsrcHandlers {
|
|||||||
it != payload_types_.end(); ++it) {
|
it != payload_types_.end(); ++it) {
|
||||||
VideoCodec codec;
|
VideoCodec codec;
|
||||||
memset(&codec, 0, sizeof(codec));
|
memset(&codec, 0, sizeof(codec));
|
||||||
strncpy(codec.plName, it->name().c_str(), sizeof(codec.plName)-1);
|
strncpy(codec.plName, it->name().c_str(), sizeof(codec.plName) - 1);
|
||||||
codec.plType = it->payload_type();
|
codec.plType = it->payload_type();
|
||||||
codec.codecType = it->codec_type();
|
codec.codecType = it->codec_type();
|
||||||
if (handler->rtp_module_->RegisterReceivePayload(codec.plName,
|
if (handler->rtp_module_->RegisterReceivePayload(
|
||||||
codec.plType,
|
codec.plName, codec.plType, 90000, 0, codec.maxBitrate) < 0) {
|
||||||
90000,
|
|
||||||
0,
|
|
||||||
codec.maxBitrate) < 0) {
|
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -267,7 +266,8 @@ class SsrcHandlers {
|
|||||||
private:
|
private:
|
||||||
class Handler : public RtpStreamInterface {
|
class Handler : public RtpStreamInterface {
|
||||||
public:
|
public:
|
||||||
Handler(uint32_t ssrc, const PayloadTypes& payload_types,
|
Handler(uint32_t ssrc,
|
||||||
|
const PayloadTypes& payload_types,
|
||||||
LostPackets* lost_packets)
|
LostPackets* lost_packets)
|
||||||
: rtp_header_parser_(RtpHeaderParser::Create()),
|
: rtp_header_parser_(RtpHeaderParser::Create()),
|
||||||
rtp_payload_registry_(new RTPPayloadRegistry(
|
rtp_payload_registry_(new RTPPayloadRegistry(
|
||||||
@ -290,9 +290,7 @@ class SsrcHandlers {
|
|||||||
}
|
}
|
||||||
|
|
||||||
virtual uint32_t ssrc() const { return ssrc_; }
|
virtual uint32_t ssrc() const { return ssrc_; }
|
||||||
virtual const PayloadTypes& payload_types() const {
|
virtual const PayloadTypes& payload_types() const { return payload_types_; }
|
||||||
return payload_types_;
|
|
||||||
}
|
|
||||||
|
|
||||||
rtc::scoped_ptr<RtpHeaderParser> rtp_header_parser_;
|
rtc::scoped_ptr<RtpHeaderParser> rtp_header_parser_;
|
||||||
rtc::scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
|
rtc::scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
|
||||||
@ -351,8 +349,7 @@ class RtpPlayerImpl : public RtpPlayerInterface {
|
|||||||
virtual int NextPacket(int64_t time_now) {
|
virtual int NextPacket(int64_t time_now) {
|
||||||
// Send any packets ready to be resent.
|
// Send any packets ready to be resent.
|
||||||
for (RawRtpPacket* packet = lost_packets_.NextPacketToResend(time_now);
|
for (RawRtpPacket* packet = lost_packets_.NextPacketToResend(time_now);
|
||||||
packet != NULL;
|
packet != NULL; packet = lost_packets_.NextPacketToResend(time_now)) {
|
||||||
packet = lost_packets_.NextPacketToResend(time_now)) {
|
|
||||||
int ret = SendPacket(packet->data(), packet->length());
|
int ret = SendPacket(packet->data(), packet->length());
|
||||||
if (ret > 0) {
|
if (ret > 0) {
|
||||||
printf("Resend: %08x:%u\n", packet->ssrc(), packet->seq_num());
|
printf("Resend: %08x:%u\n", packet->ssrc(), packet->seq_num());
|
||||||
@ -392,8 +389,7 @@ class RtpPlayerImpl : public RtpPlayerInterface {
|
|||||||
if (!packet_source_->NextPacket(&next_packet_)) {
|
if (!packet_source_->NextPacket(&next_packet_)) {
|
||||||
end_of_file_ = true;
|
end_of_file_ = true;
|
||||||
return 0;
|
return 0;
|
||||||
}
|
} else if (next_packet_.length == 0) {
|
||||||
else if (next_packet_.length == 0) {
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -438,7 +434,7 @@ class RtpPlayerImpl : public RtpPlayerInterface {
|
|||||||
|
|
||||||
if (no_loss_startup_ > 0) {
|
if (no_loss_startup_ > 0) {
|
||||||
no_loss_startup_--;
|
no_loss_startup_--;
|
||||||
} else if ((rand() + 1.0)/(RAND_MAX + 1.0) < loss_rate_) {
|
} else if ((rand() + 1.0) / (RAND_MAX + 1.0) < loss_rate_) { // NOLINT
|
||||||
uint16_t seq_num = header.sequenceNumber;
|
uint16_t seq_num = header.sequenceNumber;
|
||||||
lost_packets_.AddPacket(new RawRtpPacket(data, length, ssrc, seq_num));
|
lost_packets_.AddPacket(new RawRtpPacket(data, length, ssrc, seq_num));
|
||||||
DEBUG_LOG1("Dropped packet: %d!", header.header.sequenceNumber);
|
DEBUG_LOG1("Dropped packet: %d!", header.header.sequenceNumber);
|
||||||
@ -470,8 +466,11 @@ class RtpPlayerImpl : public RtpPlayerInterface {
|
|||||||
};
|
};
|
||||||
|
|
||||||
RtpPlayerInterface* Create(const std::string& input_filename,
|
RtpPlayerInterface* Create(const std::string& input_filename,
|
||||||
PayloadSinkFactoryInterface* payload_sink_factory, Clock* clock,
|
PayloadSinkFactoryInterface* payload_sink_factory,
|
||||||
const PayloadTypes& payload_types, float loss_rate, int64_t rtt_ms,
|
Clock* clock,
|
||||||
|
const PayloadTypes& payload_types,
|
||||||
|
float loss_rate,
|
||||||
|
int64_t rtt_ms,
|
||||||
bool reordering) {
|
bool reordering) {
|
||||||
rtc::scoped_ptr<test::RtpFileReader> packet_source(
|
rtc::scoped_ptr<test::RtpFileReader> packet_source(
|
||||||
test::RtpFileReader::Create(test::RtpFileReader::kRtpDump,
|
test::RtpFileReader::Create(test::RtpFileReader::kRtpDump,
|
||||||
|
|||||||
@ -24,12 +24,12 @@ namespace rtpplayer {
|
|||||||
|
|
||||||
class PayloadCodecTuple {
|
class PayloadCodecTuple {
|
||||||
public:
|
public:
|
||||||
PayloadCodecTuple(uint8_t payload_type, const std::string& codec_name,
|
PayloadCodecTuple(uint8_t payload_type,
|
||||||
|
const std::string& codec_name,
|
||||||
VideoCodecType codec_type)
|
VideoCodecType codec_type)
|
||||||
: name_(codec_name),
|
: name_(codec_name),
|
||||||
payload_type_(payload_type),
|
payload_type_(payload_type),
|
||||||
codec_type_(codec_type) {
|
codec_type_(codec_type) {}
|
||||||
}
|
|
||||||
|
|
||||||
const std::string& name() const { return name_; }
|
const std::string& name() const { return name_; }
|
||||||
uint8_t payload_type() const { return payload_type_; }
|
uint8_t payload_type() const { return payload_type_; }
|
||||||
@ -87,8 +87,11 @@ class RtpPlayerInterface {
|
|||||||
};
|
};
|
||||||
|
|
||||||
RtpPlayerInterface* Create(const std::string& inputFilename,
|
RtpPlayerInterface* Create(const std::string& inputFilename,
|
||||||
PayloadSinkFactoryInterface* payloadSinkFactory, Clock* clock,
|
PayloadSinkFactoryInterface* payloadSinkFactory,
|
||||||
const PayloadTypes& payload_types, float lossRate, int64_t rttMs,
|
Clock* clock,
|
||||||
|
const PayloadTypes& payload_types,
|
||||||
|
float lossRate,
|
||||||
|
int64_t rttMs,
|
||||||
bool reordering);
|
bool reordering);
|
||||||
|
|
||||||
} // namespace rtpplayer
|
} // namespace rtpplayer
|
||||||
|
|||||||
@ -22,8 +22,7 @@
|
|||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
StreamGenerator::StreamGenerator(uint16_t start_seq_num, int64_t current_time)
|
StreamGenerator::StreamGenerator(uint16_t start_seq_num, int64_t current_time)
|
||||||
: packets_(), sequence_number_(start_seq_num), start_time_(current_time) {
|
: packets_(), sequence_number_(start_seq_num), start_time_(current_time) {}
|
||||||
}
|
|
||||||
|
|
||||||
void StreamGenerator::Init(uint16_t start_seq_num, int64_t current_time) {
|
void StreamGenerator::Init(uint16_t start_seq_num, int64_t current_time) {
|
||||||
packets_.clear();
|
packets_.clear();
|
||||||
@ -41,8 +40,8 @@ void StreamGenerator::GenerateFrame(FrameType type,
|
|||||||
const int packet_size =
|
const int packet_size =
|
||||||
(kFrameSize + num_media_packets / 2) / num_media_packets;
|
(kFrameSize + num_media_packets / 2) / num_media_packets;
|
||||||
bool marker_bit = (i == num_media_packets - 1);
|
bool marker_bit = (i == num_media_packets - 1);
|
||||||
packets_.push_back(GeneratePacket(
|
packets_.push_back(GeneratePacket(sequence_number_, timestamp, packet_size,
|
||||||
sequence_number_, timestamp, packet_size, (i == 0), marker_bit, type));
|
(i == 0), marker_bit, type));
|
||||||
++sequence_number_;
|
++sequence_number_;
|
||||||
}
|
}
|
||||||
for (int i = 0; i < num_empty_packets; ++i) {
|
for (int i = 0; i < num_empty_packets; ++i) {
|
||||||
@ -104,7 +103,9 @@ bool StreamGenerator::NextPacket(VCMPacket* packet) {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
void StreamGenerator::DropLastPacket() { packets_.pop_back(); }
|
void StreamGenerator::DropLastPacket() {
|
||||||
|
packets_.pop_back();
|
||||||
|
}
|
||||||
|
|
||||||
uint16_t StreamGenerator::NextSequenceNumber() const {
|
uint16_t StreamGenerator::NextSequenceNumber() const {
|
||||||
if (packets_.empty())
|
if (packets_.empty())
|
||||||
@ -112,7 +113,9 @@ uint16_t StreamGenerator::NextSequenceNumber() const {
|
|||||||
return packets_.front().seqNum;
|
return packets_.front().seqNum;
|
||||||
}
|
}
|
||||||
|
|
||||||
int StreamGenerator::PacketsRemaining() const { return packets_.size(); }
|
int StreamGenerator::PacketsRemaining() const {
|
||||||
|
return packets_.size();
|
||||||
|
}
|
||||||
|
|
||||||
std::list<VCMPacket>::iterator StreamGenerator::GetPacketIterator(int index) {
|
std::list<VCMPacket>::iterator StreamGenerator::GetPacketIterator(int index) {
|
||||||
std::list<VCMPacket>::iterator it = packets_.begin();
|
std::list<VCMPacket>::iterator it = packets_.begin();
|
||||||
|
|||||||
@ -28,12 +28,12 @@ CmdArgs::CmdArgs()
|
|||||||
rtt(0),
|
rtt(0),
|
||||||
inputFile(webrtc::test::ProjectRootPath() + "/resources/foreman_cif.yuv"),
|
inputFile(webrtc::test::ProjectRootPath() + "/resources/foreman_cif.yuv"),
|
||||||
outputFile(webrtc::test::OutputPath() +
|
outputFile(webrtc::test::OutputPath() +
|
||||||
"video_coding_test_output_352x288.yuv") {
|
"video_coding_test_output_352x288.yuv") {}
|
||||||
}
|
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
|
|
||||||
void SplitFilename(const std::string& filename, std::string* basename,
|
void SplitFilename(const std::string& filename,
|
||||||
|
std::string* basename,
|
||||||
std::string* extension) {
|
std::string* extension) {
|
||||||
assert(basename);
|
assert(basename);
|
||||||
assert(extension);
|
assert(extension);
|
||||||
@ -41,7 +41,7 @@ void SplitFilename(const std::string& filename, std::string* basename,
|
|||||||
std::string::size_type idx;
|
std::string::size_type idx;
|
||||||
idx = filename.rfind('.');
|
idx = filename.rfind('.');
|
||||||
|
|
||||||
if(idx != std::string::npos) {
|
if (idx != std::string::npos) {
|
||||||
*basename = filename.substr(0, idx);
|
*basename = filename.substr(0, idx);
|
||||||
*extension = filename.substr(idx + 1);
|
*extension = filename.substr(idx + 1);
|
||||||
} else {
|
} else {
|
||||||
@ -50,21 +50,24 @@ void SplitFilename(const std::string& filename, std::string* basename,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string AppendWidthHeightCount(const std::string& filename, int width,
|
std::string AppendWidthHeightCount(const std::string& filename,
|
||||||
int height, int count) {
|
int width,
|
||||||
|
int height,
|
||||||
|
int count) {
|
||||||
std::string basename;
|
std::string basename;
|
||||||
std::string extension;
|
std::string extension;
|
||||||
SplitFilename(filename, &basename, &extension);
|
SplitFilename(filename, &basename, &extension);
|
||||||
std::stringstream ss;
|
std::stringstream ss;
|
||||||
ss << basename << "_" << count << "." << width << "_" << height << "." <<
|
ss << basename << "_" << count << "." << width << "_" << height << "."
|
||||||
extension;
|
<< extension;
|
||||||
return ss.str();
|
return ss.str();
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
FileOutputFrameReceiver::FileOutputFrameReceiver(
|
FileOutputFrameReceiver::FileOutputFrameReceiver(
|
||||||
const std::string& base_out_filename, uint32_t ssrc)
|
const std::string& base_out_filename,
|
||||||
|
uint32_t ssrc)
|
||||||
: out_filename_(),
|
: out_filename_(),
|
||||||
out_file_(NULL),
|
out_file_(NULL),
|
||||||
timing_file_(NULL),
|
timing_file_(NULL),
|
||||||
@ -80,8 +83,8 @@ FileOutputFrameReceiver::FileOutputFrameReceiver(
|
|||||||
SplitFilename(base_out_filename, &basename, &extension);
|
SplitFilename(base_out_filename, &basename, &extension);
|
||||||
}
|
}
|
||||||
std::stringstream ss;
|
std::stringstream ss;
|
||||||
ss << basename << "_" << std::hex << std::setw(8) << std::setfill('0') <<
|
ss << basename << "_" << std::hex << std::setw(8) << std::setfill('0') << ssrc
|
||||||
ssrc << "." << extension;
|
<< "." << extension;
|
||||||
out_filename_ = ss.str();
|
out_filename_ = ss.str();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -113,8 +116,8 @@ int32_t FileOutputFrameReceiver::FrameToRender(
|
|||||||
printf("New size: %dx%d\n", video_frame.width(), video_frame.height());
|
printf("New size: %dx%d\n", video_frame.width(), video_frame.height());
|
||||||
width_ = video_frame.width();
|
width_ = video_frame.width();
|
||||||
height_ = video_frame.height();
|
height_ = video_frame.height();
|
||||||
std::string filename_with_width_height = AppendWidthHeightCount(
|
std::string filename_with_width_height =
|
||||||
out_filename_, width_, height_, count_);
|
AppendWidthHeightCount(out_filename_, width_, height_, count_);
|
||||||
++count_;
|
++count_;
|
||||||
out_file_ = fopen(filename_with_width_height.c_str(), "wb");
|
out_file_ = fopen(filename_with_width_height.c_str(), "wb");
|
||||||
if (out_file_ == NULL) {
|
if (out_file_ == NULL) {
|
||||||
@ -130,7 +133,7 @@ int32_t FileOutputFrameReceiver::FrameToRender(
|
|||||||
}
|
}
|
||||||
|
|
||||||
webrtc::RtpVideoCodecTypes ConvertCodecType(const char* plname) {
|
webrtc::RtpVideoCodecTypes ConvertCodecType(const char* plname) {
|
||||||
if (strncmp(plname,"VP8" , 3) == 0) {
|
if (strncmp(plname, "VP8", 3) == 0) {
|
||||||
return webrtc::kRtpVideoVp8;
|
return webrtc::kRtpVideoVp8;
|
||||||
} else {
|
} else {
|
||||||
// Default value.
|
// Default value.
|
||||||
|
|||||||
@ -33,11 +33,13 @@ class NullEvent : public webrtc::EventWrapper {
|
|||||||
|
|
||||||
virtual bool Reset() { return true; }
|
virtual bool Reset() { return true; }
|
||||||
|
|
||||||
virtual webrtc::EventTypeWrapper Wait(unsigned long max_time) {
|
virtual webrtc::EventTypeWrapper Wait(unsigned long max_time) { // NOLINT
|
||||||
return webrtc::kEventTimeout;
|
return webrtc::kEventTimeout;
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual bool StartTimer(bool periodic, unsigned long time) { return true; }
|
virtual bool StartTimer(bool periodic, unsigned long time) { // NOLINT
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
virtual bool StopTimer() { return true; }
|
virtual bool StopTimer() { return true; }
|
||||||
};
|
};
|
||||||
@ -46,9 +48,7 @@ class NullEventFactory : public webrtc::EventFactory {
|
|||||||
public:
|
public:
|
||||||
virtual ~NullEventFactory() {}
|
virtual ~NullEventFactory() {}
|
||||||
|
|
||||||
virtual webrtc::EventWrapper* CreateEvent() {
|
virtual webrtc::EventWrapper* CreateEvent() { return new NullEvent; }
|
||||||
return new NullEvent;
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
class FileOutputFrameReceiver : public webrtc::VCMReceiveCallback {
|
class FileOutputFrameReceiver : public webrtc::VCMReceiveCallback {
|
||||||
@ -57,7 +57,7 @@ class FileOutputFrameReceiver : public webrtc::VCMReceiveCallback {
|
|||||||
virtual ~FileOutputFrameReceiver();
|
virtual ~FileOutputFrameReceiver();
|
||||||
|
|
||||||
// VCMReceiveCallback
|
// VCMReceiveCallback
|
||||||
virtual int32_t FrameToRender(webrtc::VideoFrame& video_frame);
|
virtual int32_t FrameToRender(webrtc::VideoFrame& video_frame); // NOLINT
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::string out_filename_;
|
std::string out_filename_;
|
||||||
|
|||||||
@ -8,7 +8,6 @@
|
|||||||
* be found in the AUTHORS file in the root of the source tree.
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
@ -21,12 +20,15 @@ DEFINE_string(codec, "VP8", "Codec to use (VP8 or I420).");
|
|||||||
DEFINE_int32(width, 352, "Width in pixels of the frames in the input file.");
|
DEFINE_int32(width, 352, "Width in pixels of the frames in the input file.");
|
||||||
DEFINE_int32(height, 288, "Height in pixels of the frames in the input file.");
|
DEFINE_int32(height, 288, "Height in pixels of the frames in the input file.");
|
||||||
DEFINE_int32(rtt, 0, "RTT (round-trip time), in milliseconds.");
|
DEFINE_int32(rtt, 0, "RTT (round-trip time), in milliseconds.");
|
||||||
DEFINE_string(input_filename, webrtc::test::ProjectRootPath() +
|
DEFINE_string(input_filename,
|
||||||
"/resources/foreman_cif.yuv", "Input file.");
|
webrtc::test::ProjectRootPath() + "/resources/foreman_cif.yuv",
|
||||||
DEFINE_string(output_filename, webrtc::test::OutputPath() +
|
"Input file.");
|
||||||
"video_coding_test_output_352x288.yuv", "Output file.");
|
DEFINE_string(output_filename,
|
||||||
|
webrtc::test::OutputPath() +
|
||||||
|
"video_coding_test_output_352x288.yuv",
|
||||||
|
"Output file.");
|
||||||
|
|
||||||
using namespace webrtc;
|
namespace webrtc {
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Build with EVENT_DEBUG defined
|
* Build with EVENT_DEBUG defined
|
||||||
@ -36,36 +38,37 @@ using namespace webrtc;
|
|||||||
int vcmMacrosTests = 0;
|
int vcmMacrosTests = 0;
|
||||||
int vcmMacrosErrors = 0;
|
int vcmMacrosErrors = 0;
|
||||||
|
|
||||||
int ParseArguments(CmdArgs& args) {
|
int ParseArguments(CmdArgs* args) {
|
||||||
args.width = FLAGS_width;
|
args->width = FLAGS_width;
|
||||||
args.height = FLAGS_height;
|
args->height = FLAGS_height;
|
||||||
if (args.width < 1 || args.height < 1) {
|
if (args->width < 1 || args->height < 1) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
args.codecName = FLAGS_codec;
|
args->codecName = FLAGS_codec;
|
||||||
if (args.codecName == "VP8") {
|
if (args->codecName == "VP8") {
|
||||||
args.codecType = kVideoCodecVP8;
|
args->codecType = kVideoCodecVP8;
|
||||||
} else if (args.codecName == "VP9") {
|
} else if (args->codecName == "VP9") {
|
||||||
args.codecType = kVideoCodecVP9;
|
args->codecType = kVideoCodecVP9;
|
||||||
} else if (args.codecName == "I420") {
|
} else if (args->codecName == "I420") {
|
||||||
args.codecType = kVideoCodecI420;
|
args->codecType = kVideoCodecI420;
|
||||||
} else {
|
} else {
|
||||||
printf("Invalid codec: %s\n", args.codecName.c_str());
|
printf("Invalid codec: %s\n", args->codecName.c_str());
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
args.inputFile = FLAGS_input_filename;
|
args->inputFile = FLAGS_input_filename;
|
||||||
args.outputFile = FLAGS_output_filename;
|
args->outputFile = FLAGS_output_filename;
|
||||||
args.rtt = FLAGS_rtt;
|
args->rtt = FLAGS_rtt;
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
} // namespace webrtc
|
||||||
|
|
||||||
int main(int argc, char **argv) {
|
int main(int argc, char** argv) {
|
||||||
// Initialize WebRTC fileutils.h so paths to resources can be resolved.
|
// Initialize WebRTC fileutils.h so paths to resources can be resolved.
|
||||||
webrtc::test::SetExecutablePath(argv[0]);
|
webrtc::test::SetExecutablePath(argv[0]);
|
||||||
google::ParseCommandLineFlags(&argc, &argv, true);
|
google::ParseCommandLineFlags(&argc, &argv, true);
|
||||||
|
|
||||||
CmdArgs args;
|
CmdArgs args;
|
||||||
if (ParseArguments(args) != 0) {
|
if (webrtc::ParseArguments(&args) != 0) {
|
||||||
printf("Unable to parse input arguments\n");
|
printf("Unable to parse input arguments\n");
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -22,8 +22,7 @@
|
|||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
namespace rtpplayer {
|
namespace rtpplayer {
|
||||||
|
|
||||||
class VcmPayloadSinkFactory::VcmPayloadSink
|
class VcmPayloadSinkFactory::VcmPayloadSink : public PayloadSinkInterface,
|
||||||
: public PayloadSinkInterface,
|
|
||||||
public VCMPacketRequestCallback {
|
public VCMPacketRequestCallback {
|
||||||
public:
|
public:
|
||||||
VcmPayloadSink(VcmPayloadSinkFactory* factory,
|
VcmPayloadSink(VcmPayloadSinkFactory* factory,
|
||||||
@ -43,9 +42,7 @@ class VcmPayloadSinkFactory::VcmPayloadSink
|
|||||||
vcm_->RegisterReceiveCallback(frame_receiver_.get());
|
vcm_->RegisterReceiveCallback(frame_receiver_.get());
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual ~VcmPayloadSink() {
|
virtual ~VcmPayloadSink() { factory_->Remove(this); }
|
||||||
factory_->Remove(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
// PayloadSinkInterface
|
// PayloadSinkInterface
|
||||||
int32_t OnReceivedPayloadData(const uint8_t* payload_data,
|
int32_t OnReceivedPayloadData(const uint8_t* payload_data,
|
||||||
@ -136,8 +133,7 @@ PayloadSinkInterface* VcmPayloadSinkFactory::Create(
|
|||||||
}
|
}
|
||||||
|
|
||||||
const PayloadTypes& plt = stream->payload_types();
|
const PayloadTypes& plt = stream->payload_types();
|
||||||
for (PayloadTypesIterator it = plt.begin(); it != plt.end();
|
for (PayloadTypesIterator it = plt.begin(); it != plt.end(); ++it) {
|
||||||
++it) {
|
|
||||||
if (it->codec_type() != kVideoCodecULPFEC &&
|
if (it->codec_type() != kVideoCodecULPFEC &&
|
||||||
it->codec_type() != kVideoCodecRED) {
|
it->codec_type() != kVideoCodecRED) {
|
||||||
VideoCodec codec;
|
VideoCodec codec;
|
||||||
|
|||||||
@ -29,9 +29,11 @@ namespace rtpplayer {
|
|||||||
class VcmPayloadSinkFactory : public PayloadSinkFactoryInterface {
|
class VcmPayloadSinkFactory : public PayloadSinkFactoryInterface {
|
||||||
public:
|
public:
|
||||||
VcmPayloadSinkFactory(const std::string& base_out_filename,
|
VcmPayloadSinkFactory(const std::string& base_out_filename,
|
||||||
Clock* clock, bool protection_enabled,
|
Clock* clock,
|
||||||
|
bool protection_enabled,
|
||||||
VCMVideoProtection protection_method,
|
VCMVideoProtection protection_method,
|
||||||
int64_t rtt_ms, uint32_t render_delay_ms,
|
int64_t rtt_ms,
|
||||||
|
uint32_t render_delay_ms,
|
||||||
uint32_t min_playout_delay_ms);
|
uint32_t min_playout_delay_ms);
|
||||||
virtual ~VcmPayloadSinkFactory();
|
virtual ~VcmPayloadSinkFactory();
|
||||||
|
|
||||||
|
|||||||
@ -48,9 +48,9 @@ int RtpPlay(const CmdArgs& args) {
|
|||||||
output_file = webrtc::test::OutputPath() + "RtpPlay_decoded.yuv";
|
output_file = webrtc::test::OutputPath() + "RtpPlay_decoded.yuv";
|
||||||
|
|
||||||
webrtc::SimulatedClock clock(0);
|
webrtc::SimulatedClock clock(0);
|
||||||
webrtc::rtpplayer::VcmPayloadSinkFactory factory(output_file, &clock,
|
webrtc::rtpplayer::VcmPayloadSinkFactory factory(
|
||||||
kConfigProtectionEnabled, kConfigProtectionMethod, kConfigRttMs,
|
output_file, &clock, kConfigProtectionEnabled, kConfigProtectionMethod,
|
||||||
kConfigRenderDelayMs, kConfigMinPlayoutDelayMs);
|
kConfigRttMs, kConfigRenderDelayMs, kConfigMinPlayoutDelayMs);
|
||||||
rtc::scoped_ptr<webrtc::rtpplayer::RtpPlayerInterface> rtp_player(
|
rtc::scoped_ptr<webrtc::rtpplayer::RtpPlayerInterface> rtp_player(
|
||||||
webrtc::rtpplayer::Create(args.inputFile, &factory, &clock, payload_types,
|
webrtc::rtpplayer::Create(args.inputFile, &factory, &clock, payload_types,
|
||||||
kConfigLossRate, kConfigRttMs,
|
kConfigLossRate, kConfigRttMs,
|
||||||
|
|||||||
@ -11,13 +11,12 @@
|
|||||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
|
#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
|
||||||
#define WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
|
#define WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
|
||||||
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
||||||
#include "webrtc/typedefs.h"
|
#include "webrtc/typedefs.h"
|
||||||
|
|
||||||
#include <string>
|
enum VideoSize {
|
||||||
|
|
||||||
enum VideoSize
|
|
||||||
{
|
|
||||||
kUndefined,
|
kUndefined,
|
||||||
kSQCIF, // 128*96 = 12 288
|
kSQCIF, // 128*96 = 12 288
|
||||||
kQQVGA, // 160*120 = 19 200
|
kQQVGA, // 160*120 = 19 200
|
||||||
@ -48,30 +47,34 @@ enum VideoSize
|
|||||||
kWFullHD, // 1920*1080 = 2 073 600
|
kWFullHD, // 1920*1080 = 2 073 600
|
||||||
|
|
||||||
kNumberOfVideoSizes
|
kNumberOfVideoSizes
|
||||||
};
|
};
|
||||||
|
|
||||||
|
class VideoSource {
|
||||||
class VideoSource
|
public:
|
||||||
{
|
|
||||||
public:
|
|
||||||
VideoSource();
|
VideoSource();
|
||||||
VideoSource(std::string fileName, VideoSize size, float frameRate, webrtc::VideoType type = webrtc::kI420);
|
VideoSource(std::string fileName,
|
||||||
VideoSource(std::string fileName, uint16_t width, uint16_t height,
|
VideoSize size,
|
||||||
float frameRate = 30, webrtc::VideoType type = webrtc::kI420);
|
float frameRate,
|
||||||
|
webrtc::VideoType type = webrtc::kI420);
|
||||||
|
VideoSource(std::string fileName,
|
||||||
|
uint16_t width,
|
||||||
|
uint16_t height,
|
||||||
|
float frameRate = 30,
|
||||||
|
webrtc::VideoType type = webrtc::kI420);
|
||||||
|
|
||||||
std::string GetFileName() const { return _fileName; }
|
std::string GetFileName() const { return _fileName; }
|
||||||
uint16_t GetWidth() const { return _width; }
|
uint16_t GetWidth() const { return _width; }
|
||||||
uint16_t GetHeight() const { return _height; }
|
uint16_t GetHeight() const { return _height; }
|
||||||
webrtc::VideoType GetType() const { return _type; }
|
webrtc::VideoType GetType() const { return _type; }
|
||||||
float GetFrameRate() const { return _frameRate; }
|
float GetFrameRate() const { return _frameRate; }
|
||||||
int GetWidthHeight( VideoSize size);
|
int GetWidthHeight(VideoSize size);
|
||||||
|
|
||||||
// Returns the filename with the path (including the leading slash) removed.
|
// Returns the filename with the path (including the leading slash) removed.
|
||||||
std::string GetName() const;
|
std::string GetName() const;
|
||||||
|
|
||||||
size_t GetFrameLength() const;
|
size_t GetFrameLength() const;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::string _fileName;
|
std::string _fileName;
|
||||||
uint16_t _width;
|
uint16_t _width;
|
||||||
uint16_t _height;
|
uint16_t _height;
|
||||||
|
|||||||
@ -20,11 +20,9 @@ VCMTimestampMap::VCMTimestampMap(size_t capacity)
|
|||||||
: ring_buffer_(new TimestampDataTuple[capacity]),
|
: ring_buffer_(new TimestampDataTuple[capacity]),
|
||||||
capacity_(capacity),
|
capacity_(capacity),
|
||||||
next_add_idx_(0),
|
next_add_idx_(0),
|
||||||
next_pop_idx_(0) {
|
next_pop_idx_(0) {}
|
||||||
}
|
|
||||||
|
|
||||||
VCMTimestampMap::~VCMTimestampMap() {
|
VCMTimestampMap::~VCMTimestampMap() {}
|
||||||
}
|
|
||||||
|
|
||||||
void VCMTimestampMap::Add(uint32_t timestamp, VCMFrameInformation* data) {
|
void VCMTimestampMap::Add(uint32_t timestamp, VCMFrameInformation* data) {
|
||||||
ring_buffer_[next_add_idx_].timestamp = timestamp;
|
ring_buffer_[next_add_idx_].timestamp = timestamp;
|
||||||
@ -62,4 +60,4 @@ VCMFrameInformation* VCMTimestampMap::Pop(uint32_t timestamp) {
|
|||||||
bool VCMTimestampMap::IsEmpty() const {
|
bool VCMTimestampMap::IsEmpty() const {
|
||||||
return (next_add_idx_ == next_pop_idx_);
|
return (next_add_idx_ == next_pop_idx_);
|
||||||
}
|
}
|
||||||
}
|
} // namespace webrtc
|
||||||
|
|||||||
@ -10,17 +10,17 @@
|
|||||||
|
|
||||||
#include "webrtc/modules/video_coding/timing.h"
|
#include "webrtc/modules/video_coding/timing.h"
|
||||||
|
|
||||||
|
#include <algorithm>
|
||||||
|
|
||||||
#include "webrtc/modules/video_coding/internal_defines.h"
|
#include "webrtc/modules/video_coding/internal_defines.h"
|
||||||
#include "webrtc/modules/video_coding/jitter_buffer_common.h"
|
#include "webrtc/modules/video_coding/jitter_buffer_common.h"
|
||||||
#include "webrtc/system_wrappers/include/clock.h"
|
#include "webrtc/system_wrappers/include/clock.h"
|
||||||
#include "webrtc/system_wrappers/include/metrics.h"
|
#include "webrtc/system_wrappers/include/metrics.h"
|
||||||
#include "webrtc/system_wrappers/include/timestamp_extrapolator.h"
|
#include "webrtc/system_wrappers/include/timestamp_extrapolator.h"
|
||||||
|
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
VCMTiming::VCMTiming(Clock* clock,
|
VCMTiming::VCMTiming(Clock* clock, VCMTiming* master_timing)
|
||||||
VCMTiming* master_timing)
|
|
||||||
: crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
|
: crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
|
||||||
clock_(clock),
|
clock_(clock),
|
||||||
master_(false),
|
master_(false),
|
||||||
@ -120,8 +120,8 @@ void VCMTiming::UpdateCurrentDelay(uint32_t frame_timestamp) {
|
|||||||
// Not initialized, set current delay to target.
|
// Not initialized, set current delay to target.
|
||||||
current_delay_ms_ = target_delay_ms;
|
current_delay_ms_ = target_delay_ms;
|
||||||
} else if (target_delay_ms != current_delay_ms_) {
|
} else if (target_delay_ms != current_delay_ms_) {
|
||||||
int64_t delay_diff_ms = static_cast<int64_t>(target_delay_ms) -
|
int64_t delay_diff_ms =
|
||||||
current_delay_ms_;
|
static_cast<int64_t>(target_delay_ms) - current_delay_ms_;
|
||||||
// Never change the delay with more than 100 ms every second. If we're
|
// Never change the delay with more than 100 ms every second. If we're
|
||||||
// changing the delay in too large steps we will get noticeable freezes. By
|
// changing the delay in too large steps we will get noticeable freezes. By
|
||||||
// limiting the change we can increase the delay in smaller steps, which
|
// limiting the change we can increase the delay in smaller steps, which
|
||||||
@ -130,8 +130,10 @@ void VCMTiming::UpdateCurrentDelay(uint32_t frame_timestamp) {
|
|||||||
int64_t max_change_ms = 0;
|
int64_t max_change_ms = 0;
|
||||||
if (frame_timestamp < 0x0000ffff && prev_frame_timestamp_ > 0xffff0000) {
|
if (frame_timestamp < 0x0000ffff && prev_frame_timestamp_ > 0xffff0000) {
|
||||||
// wrap
|
// wrap
|
||||||
max_change_ms = kDelayMaxChangeMsPerS * (frame_timestamp +
|
max_change_ms = kDelayMaxChangeMsPerS *
|
||||||
(static_cast<int64_t>(1) << 32) - prev_frame_timestamp_) / 90000;
|
(frame_timestamp + (static_cast<int64_t>(1) << 32) -
|
||||||
|
prev_frame_timestamp_) /
|
||||||
|
90000;
|
||||||
} else {
|
} else {
|
||||||
max_change_ms = kDelayMaxChangeMsPerS *
|
max_change_ms = kDelayMaxChangeMsPerS *
|
||||||
(frame_timestamp - prev_frame_timestamp_) / 90000;
|
(frame_timestamp - prev_frame_timestamp_) / 90000;
|
||||||
@ -193,8 +195,8 @@ void VCMTiming::IncomingTimestamp(uint32_t time_stamp, int64_t now_ms) {
|
|||||||
ts_extrapolator_->Update(now_ms, time_stamp);
|
ts_extrapolator_->Update(now_ms, time_stamp);
|
||||||
}
|
}
|
||||||
|
|
||||||
int64_t VCMTiming::RenderTimeMs(uint32_t frame_timestamp, int64_t now_ms)
|
int64_t VCMTiming::RenderTimeMs(uint32_t frame_timestamp,
|
||||||
const {
|
int64_t now_ms) const {
|
||||||
CriticalSectionScoped cs(crit_sect_);
|
CriticalSectionScoped cs(crit_sect_);
|
||||||
const int64_t render_time_ms = RenderTimeMsInternal(frame_timestamp, now_ms);
|
const int64_t render_time_ms = RenderTimeMsInternal(frame_timestamp, now_ms);
|
||||||
return render_time_ms;
|
return render_time_ms;
|
||||||
@ -214,19 +216,19 @@ int64_t VCMTiming::RenderTimeMsInternal(uint32_t frame_timestamp,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Must be called from inside a critical section.
|
// Must be called from inside a critical section.
|
||||||
int32_t VCMTiming::MaxDecodeTimeMs(FrameType frame_type /*= kVideoFrameDelta*/)
|
int32_t VCMTiming::MaxDecodeTimeMs(
|
||||||
const {
|
FrameType frame_type /*= kVideoFrameDelta*/) const {
|
||||||
const int32_t decode_time_ms = codec_timer_.RequiredDecodeTimeMs(frame_type);
|
const int32_t decode_time_ms = codec_timer_.RequiredDecodeTimeMs(frame_type);
|
||||||
assert(decode_time_ms >= 0);
|
assert(decode_time_ms >= 0);
|
||||||
return decode_time_ms;
|
return decode_time_ms;
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t VCMTiming::MaxWaitingTime(int64_t render_time_ms, int64_t now_ms)
|
uint32_t VCMTiming::MaxWaitingTime(int64_t render_time_ms,
|
||||||
const {
|
int64_t now_ms) const {
|
||||||
CriticalSectionScoped cs(crit_sect_);
|
CriticalSectionScoped cs(crit_sect_);
|
||||||
|
|
||||||
const int64_t max_wait_time_ms = render_time_ms - now_ms -
|
const int64_t max_wait_time_ms =
|
||||||
MaxDecodeTimeMs() - render_delay_ms_;
|
render_time_ms - now_ms - MaxDecodeTimeMs() - render_delay_ms_;
|
||||||
|
|
||||||
if (max_wait_time_ms < 0) {
|
if (max_wait_time_ms < 0) {
|
||||||
return 0;
|
return 0;
|
||||||
@ -234,8 +236,8 @@ uint32_t VCMTiming::MaxWaitingTime(int64_t render_time_ms, int64_t now_ms)
|
|||||||
return static_cast<uint32_t>(max_wait_time_ms);
|
return static_cast<uint32_t>(max_wait_time_ms);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool VCMTiming::EnoughTimeToDecode(uint32_t available_processing_time_ms)
|
bool VCMTiming::EnoughTimeToDecode(
|
||||||
const {
|
uint32_t available_processing_time_ms) const {
|
||||||
CriticalSectionScoped cs(crit_sect_);
|
CriticalSectionScoped cs(crit_sect_);
|
||||||
int32_t max_decode_time_ms = MaxDecodeTimeMs();
|
int32_t max_decode_time_ms = MaxDecodeTimeMs();
|
||||||
if (max_decode_time_ms < 0) {
|
if (max_decode_time_ms < 0) {
|
||||||
@ -248,7 +250,8 @@ bool VCMTiming::EnoughTimeToDecode(uint32_t available_processing_time_ms)
|
|||||||
max_decode_time_ms = 1;
|
max_decode_time_ms = 1;
|
||||||
}
|
}
|
||||||
return static_cast<int32_t>(available_processing_time_ms) -
|
return static_cast<int32_t>(available_processing_time_ms) -
|
||||||
max_decode_time_ms > 0;
|
max_decode_time_ms >
|
||||||
|
0;
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t VCMTiming::TargetVideoDelay() const {
|
uint32_t VCMTiming::TargetVideoDelay() const {
|
||||||
|
|||||||
@ -25,8 +25,7 @@ class VCMTiming {
|
|||||||
public:
|
public:
|
||||||
// The primary timing component should be passed
|
// The primary timing component should be passed
|
||||||
// if this is the dual timing component.
|
// if this is the dual timing component.
|
||||||
VCMTiming(Clock* clock,
|
explicit VCMTiming(Clock* clock, VCMTiming* master_timing = NULL);
|
||||||
VCMTiming* master_timing = NULL);
|
|
||||||
~VCMTiming();
|
~VCMTiming();
|
||||||
|
|
||||||
// Resets the timing to the initial state.
|
// Resets the timing to the initial state.
|
||||||
|
|||||||
@ -55,8 +55,9 @@ TEST(ReceiverTiming, Tests) {
|
|||||||
clock.AdvanceTimeMilliseconds(1000);
|
clock.AdvanceTimeMilliseconds(1000);
|
||||||
timing.SetJitterDelay(jitterDelayMs);
|
timing.SetJitterDelay(jitterDelayMs);
|
||||||
timing.UpdateCurrentDelay(timeStamp);
|
timing.UpdateCurrentDelay(timeStamp);
|
||||||
waitTime = timing.MaxWaitingTime(timing.RenderTimeMs(
|
waitTime = timing.MaxWaitingTime(
|
||||||
timeStamp, clock.TimeInMilliseconds()), clock.TimeInMilliseconds());
|
timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()),
|
||||||
|
clock.TimeInMilliseconds());
|
||||||
// Since we gradually increase the delay we only get 100 ms every second.
|
// Since we gradually increase the delay we only get 100 ms every second.
|
||||||
EXPECT_EQ(jitterDelayMs - 10, waitTime);
|
EXPECT_EQ(jitterDelayMs - 10, waitTime);
|
||||||
|
|
||||||
@ -85,11 +86,10 @@ TEST(ReceiverTiming, Tests) {
|
|||||||
for (int i = 0; i < 10; i++) {
|
for (int i = 0; i < 10; i++) {
|
||||||
int64_t startTimeMs = clock.TimeInMilliseconds();
|
int64_t startTimeMs = clock.TimeInMilliseconds();
|
||||||
clock.AdvanceTimeMilliseconds(10);
|
clock.AdvanceTimeMilliseconds(10);
|
||||||
timing.StopDecodeTimer(timeStamp,
|
timing.StopDecodeTimer(
|
||||||
clock.TimeInMilliseconds() - startTimeMs,
|
timeStamp, clock.TimeInMilliseconds() - startTimeMs,
|
||||||
clock.TimeInMilliseconds(),
|
clock.TimeInMilliseconds(),
|
||||||
timing.RenderTimeMs(
|
timing.RenderTimeMs(timeStamp, clock.TimeInMilliseconds()));
|
||||||
timeStamp, clock.TimeInMilliseconds()));
|
|
||||||
timeStamp += 90000 / 25;
|
timeStamp += 90000 / 25;
|
||||||
clock.AdvanceTimeMilliseconds(1000 / 25 - 10);
|
clock.AdvanceTimeMilliseconds(1000 / 25 - 10);
|
||||||
timing.IncomingTimestamp(timeStamp, clock.TimeInMilliseconds());
|
timing.IncomingTimestamp(timeStamp, clock.TimeInMilliseconds());
|
||||||
@ -107,7 +107,7 @@ TEST(ReceiverTiming, Tests) {
|
|||||||
uint32_t minTotalDelayMs = 200;
|
uint32_t minTotalDelayMs = 200;
|
||||||
timing.set_min_playout_delay(minTotalDelayMs);
|
timing.set_min_playout_delay(minTotalDelayMs);
|
||||||
clock.AdvanceTimeMilliseconds(5000);
|
clock.AdvanceTimeMilliseconds(5000);
|
||||||
timeStamp += 5*90000;
|
timeStamp += 5 * 90000;
|
||||||
timing.UpdateCurrentDelay(timeStamp);
|
timing.UpdateCurrentDelay(timeStamp);
|
||||||
const int kRenderDelayMs = 10;
|
const int kRenderDelayMs = 10;
|
||||||
timing.set_render_delay(kRenderDelayMs);
|
timing.set_render_delay(kRenderDelayMs);
|
||||||
@ -123,7 +123,7 @@ TEST(ReceiverTiming, Tests) {
|
|||||||
// Reset playout delay.
|
// Reset playout delay.
|
||||||
timing.set_min_playout_delay(0);
|
timing.set_min_playout_delay(0);
|
||||||
clock.AdvanceTimeMilliseconds(5000);
|
clock.AdvanceTimeMilliseconds(5000);
|
||||||
timeStamp += 5*90000;
|
timeStamp += 5 * 90000;
|
||||||
timing.UpdateCurrentDelay(timeStamp);
|
timing.UpdateCurrentDelay(timeStamp);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -137,8 +137,8 @@ TEST(ReceiverTiming, WrapAround) {
|
|||||||
timing.IncomingTimestamp(timestamp, clock.TimeInMilliseconds());
|
timing.IncomingTimestamp(timestamp, clock.TimeInMilliseconds());
|
||||||
clock.AdvanceTimeMilliseconds(1000 / kFramerate);
|
clock.AdvanceTimeMilliseconds(1000 / kFramerate);
|
||||||
timestamp += 90000 / kFramerate;
|
timestamp += 90000 / kFramerate;
|
||||||
int64_t render_time = timing.RenderTimeMs(0xFFFFFFFFu,
|
int64_t render_time =
|
||||||
clock.TimeInMilliseconds());
|
timing.RenderTimeMs(0xFFFFFFFFu, clock.TimeInMilliseconds());
|
||||||
EXPECT_EQ(3 * 1000 / kFramerate, render_time);
|
EXPECT_EQ(3 * 1000 / kFramerate, render_time);
|
||||||
render_time = timing.RenderTimeMs(89u, // One second later in 90 kHz.
|
render_time = timing.RenderTimeMs(89u, // One second later in 90 kHz.
|
||||||
clock.TimeInMilliseconds());
|
clock.TimeInMilliseconds());
|
||||||
|
|||||||
@ -12,8 +12,7 @@
|
|||||||
|
|
||||||
#include "webrtc/system_wrappers/include/trace.h"
|
#include "webrtc/system_wrappers/include/trace.h"
|
||||||
|
|
||||||
namespace webrtc
|
namespace webrtc {
|
||||||
{
|
|
||||||
|
|
||||||
const float kDefaultKeyFrameSizeAvgKBits = 0.9f;
|
const float kDefaultKeyFrameSizeAvgKBits = 0.9f;
|
||||||
const float kDefaultKeyFrameRatio = 0.99f;
|
const float kDefaultKeyFrameRatio = 0.99f;
|
||||||
@ -22,32 +21,27 @@ const float kDefaultDropRatioMax = 0.96f;
|
|||||||
const float kDefaultMaxTimeToDropFrames = 4.0f; // In seconds.
|
const float kDefaultMaxTimeToDropFrames = 4.0f; // In seconds.
|
||||||
|
|
||||||
FrameDropper::FrameDropper()
|
FrameDropper::FrameDropper()
|
||||||
:
|
: _keyFrameSizeAvgKbits(kDefaultKeyFrameSizeAvgKBits),
|
||||||
_keyFrameSizeAvgKbits(kDefaultKeyFrameSizeAvgKBits),
|
_keyFrameRatio(kDefaultKeyFrameRatio),
|
||||||
_keyFrameRatio(kDefaultKeyFrameRatio),
|
_dropRatio(kDefaultDropRatioAlpha, kDefaultDropRatioMax),
|
||||||
_dropRatio(kDefaultDropRatioAlpha, kDefaultDropRatioMax),
|
_enabled(true),
|
||||||
_enabled(true),
|
_max_time_drops(kDefaultMaxTimeToDropFrames) {
|
||||||
_max_time_drops(kDefaultMaxTimeToDropFrames)
|
|
||||||
{
|
|
||||||
Reset();
|
Reset();
|
||||||
}
|
}
|
||||||
|
|
||||||
FrameDropper::FrameDropper(float max_time_drops)
|
FrameDropper::FrameDropper(float max_time_drops)
|
||||||
:
|
: _keyFrameSizeAvgKbits(kDefaultKeyFrameSizeAvgKBits),
|
||||||
_keyFrameSizeAvgKbits(kDefaultKeyFrameSizeAvgKBits),
|
_keyFrameRatio(kDefaultKeyFrameRatio),
|
||||||
_keyFrameRatio(kDefaultKeyFrameRatio),
|
_dropRatio(kDefaultDropRatioAlpha, kDefaultDropRatioMax),
|
||||||
_dropRatio(kDefaultDropRatioAlpha, kDefaultDropRatioMax),
|
_enabled(true),
|
||||||
_enabled(true),
|
_max_time_drops(max_time_drops) {
|
||||||
_max_time_drops(max_time_drops)
|
|
||||||
{
|
|
||||||
Reset();
|
Reset();
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void FrameDropper::Reset() {
|
||||||
FrameDropper::Reset()
|
|
||||||
{
|
|
||||||
_keyFrameRatio.Reset(0.99f);
|
_keyFrameRatio.Reset(0.99f);
|
||||||
_keyFrameRatio.Apply(1.0f, 1.0f/300.0f); // 1 key frame every 10th second in 30 fps
|
_keyFrameRatio.Apply(
|
||||||
|
1.0f, 1.0f / 300.0f); // 1 key frame every 10th second in 30 fps
|
||||||
_keyFrameSizeAvgKbits.Reset(0.9f);
|
_keyFrameSizeAvgKbits.Reset(0.9f);
|
||||||
_keyFrameCount = 0;
|
_keyFrameCount = 0;
|
||||||
_accumulator = 0.0f;
|
_accumulator = 0.0f;
|
||||||
@ -68,54 +62,41 @@ FrameDropper::Reset()
|
|||||||
_max_time_drops = 4.0f;
|
_max_time_drops = 4.0f;
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void FrameDropper::Enable(bool enable) {
|
||||||
FrameDropper::Enable(bool enable)
|
|
||||||
{
|
|
||||||
_enabled = enable;
|
_enabled = enable;
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void FrameDropper::Fill(size_t frameSizeBytes, bool deltaFrame) {
|
||||||
FrameDropper::Fill(size_t frameSizeBytes, bool deltaFrame)
|
if (!_enabled) {
|
||||||
{
|
|
||||||
if (!_enabled)
|
|
||||||
{
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
float frameSizeKbits = 8.0f * static_cast<float>(frameSizeBytes) / 1000.0f;
|
float frameSizeKbits = 8.0f * static_cast<float>(frameSizeBytes) / 1000.0f;
|
||||||
if (!deltaFrame && !_fastMode) // fast mode does not treat key-frames any different
|
if (!deltaFrame &&
|
||||||
{
|
!_fastMode) { // fast mode does not treat key-frames any different
|
||||||
_keyFrameSizeAvgKbits.Apply(1, frameSizeKbits);
|
_keyFrameSizeAvgKbits.Apply(1, frameSizeKbits);
|
||||||
_keyFrameRatio.Apply(1.0, 1.0);
|
_keyFrameRatio.Apply(1.0, 1.0);
|
||||||
if (frameSizeKbits > _keyFrameSizeAvgKbits.filtered())
|
if (frameSizeKbits > _keyFrameSizeAvgKbits.filtered()) {
|
||||||
{
|
|
||||||
// Remove the average key frame size since we
|
// Remove the average key frame size since we
|
||||||
// compensate for key frames when adding delta
|
// compensate for key frames when adding delta
|
||||||
// frames.
|
// frames.
|
||||||
frameSizeKbits -= _keyFrameSizeAvgKbits.filtered();
|
frameSizeKbits -= _keyFrameSizeAvgKbits.filtered();
|
||||||
}
|
} else {
|
||||||
else
|
|
||||||
{
|
|
||||||
// Shouldn't be negative, so zero is the lower bound.
|
// Shouldn't be negative, so zero is the lower bound.
|
||||||
frameSizeKbits = 0;
|
frameSizeKbits = 0;
|
||||||
}
|
}
|
||||||
if (_keyFrameRatio.filtered() > 1e-5 &&
|
if (_keyFrameRatio.filtered() > 1e-5 &&
|
||||||
1 / _keyFrameRatio.filtered() < _keyFrameSpreadFrames)
|
1 / _keyFrameRatio.filtered() < _keyFrameSpreadFrames) {
|
||||||
{
|
|
||||||
// We are sending key frames more often than our upper bound for
|
// We are sending key frames more often than our upper bound for
|
||||||
// how much we allow the key frame compensation to be spread
|
// how much we allow the key frame compensation to be spread
|
||||||
// out in time. Therefor we must use the key frame ratio rather
|
// out in time. Therefor we must use the key frame ratio rather
|
||||||
// than keyFrameSpreadFrames.
|
// than keyFrameSpreadFrames.
|
||||||
_keyFrameCount =
|
_keyFrameCount =
|
||||||
static_cast<int32_t>(1 / _keyFrameRatio.filtered() + 0.5);
|
static_cast<int32_t>(1 / _keyFrameRatio.filtered() + 0.5);
|
||||||
}
|
} else {
|
||||||
else
|
|
||||||
{
|
|
||||||
// Compensate for the key frame the following frames
|
// Compensate for the key frame the following frames
|
||||||
_keyFrameCount = static_cast<int32_t>(_keyFrameSpreadFrames + 0.5);
|
_keyFrameCount = static_cast<int32_t>(_keyFrameSpreadFrames + 0.5);
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
else
|
|
||||||
{
|
|
||||||
// Decrease the keyFrameRatio
|
// Decrease the keyFrameRatio
|
||||||
_keyFrameRatio.Apply(1.0, 0.0);
|
_keyFrameRatio.Apply(1.0, 0.0);
|
||||||
}
|
}
|
||||||
@ -124,201 +105,151 @@ FrameDropper::Fill(size_t frameSizeBytes, bool deltaFrame)
|
|||||||
CapAccumulator();
|
CapAccumulator();
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void FrameDropper::Leak(uint32_t inputFrameRate) {
|
||||||
FrameDropper::Leak(uint32_t inputFrameRate)
|
if (!_enabled) {
|
||||||
{
|
|
||||||
if (!_enabled)
|
|
||||||
{
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (inputFrameRate < 1)
|
if (inputFrameRate < 1) {
|
||||||
{
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (_targetBitRate < 0.0f)
|
if (_targetBitRate < 0.0f) {
|
||||||
{
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
_keyFrameSpreadFrames = 0.5f * inputFrameRate;
|
_keyFrameSpreadFrames = 0.5f * inputFrameRate;
|
||||||
// T is the expected bits per frame (target). If all frames were the same size,
|
// T is the expected bits per frame (target). If all frames were the same
|
||||||
|
// size,
|
||||||
// we would get T bits per frame. Notice that T is also weighted to be able to
|
// we would get T bits per frame. Notice that T is also weighted to be able to
|
||||||
// force a lower frame rate if wanted.
|
// force a lower frame rate if wanted.
|
||||||
float T = _targetBitRate / inputFrameRate;
|
float T = _targetBitRate / inputFrameRate;
|
||||||
if (_keyFrameCount > 0)
|
if (_keyFrameCount > 0) {
|
||||||
{
|
|
||||||
// Perform the key frame compensation
|
// Perform the key frame compensation
|
||||||
if (_keyFrameRatio.filtered() > 0 &&
|
if (_keyFrameRatio.filtered() > 0 &&
|
||||||
1 / _keyFrameRatio.filtered() < _keyFrameSpreadFrames)
|
1 / _keyFrameRatio.filtered() < _keyFrameSpreadFrames) {
|
||||||
{
|
|
||||||
T -= _keyFrameSizeAvgKbits.filtered() * _keyFrameRatio.filtered();
|
T -= _keyFrameSizeAvgKbits.filtered() * _keyFrameRatio.filtered();
|
||||||
}
|
} else {
|
||||||
else
|
|
||||||
{
|
|
||||||
T -= _keyFrameSizeAvgKbits.filtered() / _keyFrameSpreadFrames;
|
T -= _keyFrameSizeAvgKbits.filtered() / _keyFrameSpreadFrames;
|
||||||
}
|
}
|
||||||
_keyFrameCount--;
|
_keyFrameCount--;
|
||||||
}
|
}
|
||||||
_accumulator -= T;
|
_accumulator -= T;
|
||||||
if (_accumulator < 0.0f)
|
if (_accumulator < 0.0f) {
|
||||||
{
|
|
||||||
_accumulator = 0.0f;
|
_accumulator = 0.0f;
|
||||||
}
|
}
|
||||||
UpdateRatio();
|
UpdateRatio();
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void FrameDropper::UpdateNack(uint32_t nackBytes) {
|
||||||
FrameDropper::UpdateNack(uint32_t nackBytes)
|
if (!_enabled) {
|
||||||
{
|
|
||||||
if (!_enabled)
|
|
||||||
{
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
_accumulator += static_cast<float>(nackBytes) * 8.0f / 1000.0f;
|
_accumulator += static_cast<float>(nackBytes) * 8.0f / 1000.0f;
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void FrameDropper::FillBucket(float inKbits, float outKbits) {
|
||||||
FrameDropper::FillBucket(float inKbits, float outKbits)
|
|
||||||
{
|
|
||||||
_accumulator += (inKbits - outKbits);
|
_accumulator += (inKbits - outKbits);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void FrameDropper::UpdateRatio() {
|
||||||
FrameDropper::UpdateRatio()
|
if (_accumulator > 1.3f * _accumulatorMax) {
|
||||||
{
|
|
||||||
if (_accumulator > 1.3f * _accumulatorMax)
|
|
||||||
{
|
|
||||||
// Too far above accumulator max, react faster
|
// Too far above accumulator max, react faster
|
||||||
_dropRatio.UpdateBase(0.8f);
|
_dropRatio.UpdateBase(0.8f);
|
||||||
}
|
} else {
|
||||||
else
|
|
||||||
{
|
|
||||||
// Go back to normal reaction
|
// Go back to normal reaction
|
||||||
_dropRatio.UpdateBase(0.9f);
|
_dropRatio.UpdateBase(0.9f);
|
||||||
}
|
}
|
||||||
if (_accumulator > _accumulatorMax)
|
if (_accumulator > _accumulatorMax) {
|
||||||
{
|
|
||||||
// We are above accumulator max, and should ideally
|
// We are above accumulator max, and should ideally
|
||||||
// drop a frame. Increase the dropRatio and drop
|
// drop a frame. Increase the dropRatio and drop
|
||||||
// the frame later.
|
// the frame later.
|
||||||
if (_wasBelowMax)
|
if (_wasBelowMax) {
|
||||||
{
|
|
||||||
_dropNext = true;
|
_dropNext = true;
|
||||||
}
|
}
|
||||||
if (_fastMode)
|
if (_fastMode) {
|
||||||
{
|
|
||||||
// always drop in aggressive mode
|
// always drop in aggressive mode
|
||||||
_dropNext = true;
|
_dropNext = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
_dropRatio.Apply(1.0f, 1.0f);
|
_dropRatio.Apply(1.0f, 1.0f);
|
||||||
_dropRatio.UpdateBase(0.9f);
|
_dropRatio.UpdateBase(0.9f);
|
||||||
}
|
} else {
|
||||||
else
|
|
||||||
{
|
|
||||||
_dropRatio.Apply(1.0f, 0.0f);
|
_dropRatio.Apply(1.0f, 0.0f);
|
||||||
}
|
}
|
||||||
_wasBelowMax = _accumulator < _accumulatorMax;
|
_wasBelowMax = _accumulator < _accumulatorMax;
|
||||||
}
|
}
|
||||||
|
|
||||||
// This function signals when to drop frames to the caller. It makes use of the dropRatio
|
// This function signals when to drop frames to the caller. It makes use of the
|
||||||
|
// dropRatio
|
||||||
// to smooth out the drops over time.
|
// to smooth out the drops over time.
|
||||||
bool
|
bool FrameDropper::DropFrame() {
|
||||||
FrameDropper::DropFrame()
|
if (!_enabled) {
|
||||||
{
|
|
||||||
if (!_enabled)
|
|
||||||
{
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (_dropNext)
|
if (_dropNext) {
|
||||||
{
|
|
||||||
_dropNext = false;
|
_dropNext = false;
|
||||||
_dropCount = 0;
|
_dropCount = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_dropRatio.filtered() >= 0.5f) // Drops per keep
|
if (_dropRatio.filtered() >= 0.5f) { // Drops per keep
|
||||||
{
|
|
||||||
// limit is the number of frames we should drop between each kept frame
|
// limit is the number of frames we should drop between each kept frame
|
||||||
// to keep our drop ratio. limit is positive in this case.
|
// to keep our drop ratio. limit is positive in this case.
|
||||||
float denom = 1.0f - _dropRatio.filtered();
|
float denom = 1.0f - _dropRatio.filtered();
|
||||||
if (denom < 1e-5)
|
if (denom < 1e-5) {
|
||||||
{
|
denom = 1e-5f;
|
||||||
denom = (float)1e-5;
|
|
||||||
}
|
}
|
||||||
int32_t limit = static_cast<int32_t>(1.0f / denom - 1.0f + 0.5f);
|
int32_t limit = static_cast<int32_t>(1.0f / denom - 1.0f + 0.5f);
|
||||||
// Put a bound on the max amount of dropped frames between each kept
|
// Put a bound on the max amount of dropped frames between each kept
|
||||||
// frame, in terms of frame rate and window size (secs).
|
// frame, in terms of frame rate and window size (secs).
|
||||||
int max_limit = static_cast<int>(_incoming_frame_rate *
|
int max_limit = static_cast<int>(_incoming_frame_rate * _max_time_drops);
|
||||||
_max_time_drops);
|
|
||||||
if (limit > max_limit) {
|
if (limit > max_limit) {
|
||||||
limit = max_limit;
|
limit = max_limit;
|
||||||
}
|
}
|
||||||
if (_dropCount < 0)
|
if (_dropCount < 0) {
|
||||||
{
|
|
||||||
// Reset the _dropCount since it was negative and should be positive.
|
// Reset the _dropCount since it was negative and should be positive.
|
||||||
if (_dropRatio.filtered() > 0.4f)
|
if (_dropRatio.filtered() > 0.4f) {
|
||||||
{
|
|
||||||
_dropCount = -_dropCount;
|
_dropCount = -_dropCount;
|
||||||
}
|
} else {
|
||||||
else
|
|
||||||
{
|
|
||||||
_dropCount = 0;
|
_dropCount = 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (_dropCount < limit)
|
if (_dropCount < limit) {
|
||||||
{
|
|
||||||
// As long we are below the limit we should drop frames.
|
// As long we are below the limit we should drop frames.
|
||||||
_dropCount++;
|
_dropCount++;
|
||||||
return true;
|
return true;
|
||||||
}
|
} else {
|
||||||
else
|
|
||||||
{
|
|
||||||
// Only when we reset _dropCount a frame should be kept.
|
// Only when we reset _dropCount a frame should be kept.
|
||||||
_dropCount = 0;
|
_dropCount = 0;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
} else if (_dropRatio.filtered() > 0.0f &&
|
||||||
else if (_dropRatio.filtered() > 0.0f &&
|
_dropRatio.filtered() < 0.5f) { // Keeps per drop
|
||||||
_dropRatio.filtered() < 0.5f) // Keeps per drop
|
|
||||||
{
|
|
||||||
// limit is the number of frames we should keep between each drop
|
// limit is the number of frames we should keep between each drop
|
||||||
// in order to keep the drop ratio. limit is negative in this case,
|
// in order to keep the drop ratio. limit is negative in this case,
|
||||||
// and the _dropCount is also negative.
|
// and the _dropCount is also negative.
|
||||||
float denom = _dropRatio.filtered();
|
float denom = _dropRatio.filtered();
|
||||||
if (denom < 1e-5)
|
if (denom < 1e-5) {
|
||||||
{
|
denom = 1e-5f;
|
||||||
denom = (float)1e-5;
|
|
||||||
}
|
}
|
||||||
int32_t limit = -static_cast<int32_t>(1.0f / denom - 1.0f + 0.5f);
|
int32_t limit = -static_cast<int32_t>(1.0f / denom - 1.0f + 0.5f);
|
||||||
if (_dropCount > 0)
|
if (_dropCount > 0) {
|
||||||
{
|
|
||||||
// Reset the _dropCount since we have a positive
|
// Reset the _dropCount since we have a positive
|
||||||
// _dropCount, and it should be negative.
|
// _dropCount, and it should be negative.
|
||||||
if (_dropRatio.filtered() < 0.6f)
|
if (_dropRatio.filtered() < 0.6f) {
|
||||||
{
|
|
||||||
_dropCount = -_dropCount;
|
_dropCount = -_dropCount;
|
||||||
}
|
} else {
|
||||||
else
|
|
||||||
{
|
|
||||||
_dropCount = 0;
|
_dropCount = 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (_dropCount > limit)
|
if (_dropCount > limit) {
|
||||||
{
|
if (_dropCount == 0) {
|
||||||
if (_dropCount == 0)
|
|
||||||
{
|
|
||||||
// Drop frames when we reset _dropCount.
|
// Drop frames when we reset _dropCount.
|
||||||
_dropCount--;
|
_dropCount--;
|
||||||
return true;
|
return true;
|
||||||
}
|
} else {
|
||||||
else
|
|
||||||
{
|
|
||||||
// Keep frames as long as we haven't reached limit.
|
// Keep frames as long as we haven't reached limit.
|
||||||
_dropCount--;
|
_dropCount--;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
else
|
|
||||||
{
|
|
||||||
_dropCount = 0;
|
_dropCount = 0;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -327,18 +258,16 @@ FrameDropper::DropFrame()
|
|||||||
return false;
|
return false;
|
||||||
|
|
||||||
// A simpler version, unfiltered and quicker
|
// A simpler version, unfiltered and quicker
|
||||||
//bool dropNext = _dropNext;
|
// bool dropNext = _dropNext;
|
||||||
//_dropNext = false;
|
// _dropNext = false;
|
||||||
//return dropNext;
|
// return dropNext;
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void FrameDropper::SetRates(float bitRate, float incoming_frame_rate) {
|
||||||
FrameDropper::SetRates(float bitRate, float incoming_frame_rate)
|
|
||||||
{
|
|
||||||
// Bit rate of -1 means infinite bandwidth.
|
// Bit rate of -1 means infinite bandwidth.
|
||||||
_accumulatorMax = bitRate * _windowSize; // bitRate * windowSize (in seconds)
|
_accumulatorMax = bitRate * _windowSize; // bitRate * windowSize (in seconds)
|
||||||
if (_targetBitRate > 0.0f && bitRate < _targetBitRate && _accumulator > _accumulatorMax)
|
if (_targetBitRate > 0.0f && bitRate < _targetBitRate &&
|
||||||
{
|
_accumulator > _accumulatorMax) {
|
||||||
// Rescale the accumulator level if the accumulator max decreases
|
// Rescale the accumulator level if the accumulator max decreases
|
||||||
_accumulator = bitRate / _targetBitRate * _accumulator;
|
_accumulator = bitRate / _targetBitRate * _accumulator;
|
||||||
}
|
}
|
||||||
@ -347,11 +276,8 @@ FrameDropper::SetRates(float bitRate, float incoming_frame_rate)
|
|||||||
_incoming_frame_rate = incoming_frame_rate;
|
_incoming_frame_rate = incoming_frame_rate;
|
||||||
}
|
}
|
||||||
|
|
||||||
float
|
float FrameDropper::ActualFrameRate(uint32_t inputFrameRate) const {
|
||||||
FrameDropper::ActualFrameRate(uint32_t inputFrameRate) const
|
if (!_enabled) {
|
||||||
{
|
|
||||||
if (!_enabled)
|
|
||||||
{
|
|
||||||
return static_cast<float>(inputFrameRate);
|
return static_cast<float>(inputFrameRate);
|
||||||
}
|
}
|
||||||
return inputFrameRate * (1.0f - _dropRatio.filtered());
|
return inputFrameRate * (1.0f - _dropRatio.filtered());
|
||||||
@ -366,5 +292,4 @@ void FrameDropper::CapAccumulator() {
|
|||||||
_accumulator = max_accumulator;
|
_accumulator = max_accumulator;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} // namespace webrtc
|
||||||
}
|
|
||||||
|
|||||||
@ -20,20 +20,13 @@ namespace webrtc {
|
|||||||
|
|
||||||
class MockFrameDropper : public FrameDropper {
|
class MockFrameDropper : public FrameDropper {
|
||||||
public:
|
public:
|
||||||
MOCK_METHOD0(Reset,
|
MOCK_METHOD0(Reset, void());
|
||||||
void());
|
MOCK_METHOD1(Enable, void(bool enable));
|
||||||
MOCK_METHOD1(Enable,
|
MOCK_METHOD0(DropFrame, bool());
|
||||||
void(bool enable));
|
MOCK_METHOD2(Fill, void(size_t frameSizeBytes, bool deltaFrame));
|
||||||
MOCK_METHOD0(DropFrame,
|
MOCK_METHOD1(Leak, void(uint32_t inputFrameRate));
|
||||||
bool());
|
MOCK_METHOD2(SetRates, void(float bitRate, float incoming_frame_rate));
|
||||||
MOCK_METHOD2(Fill,
|
MOCK_CONST_METHOD1(ActualFrameRate, float(uint32_t inputFrameRate));
|
||||||
void(size_t frameSizeBytes, bool deltaFrame));
|
|
||||||
MOCK_METHOD1(Leak,
|
|
||||||
void(uint32_t inputFrameRate));
|
|
||||||
MOCK_METHOD2(SetRates,
|
|
||||||
void(float bitRate, float incoming_frame_rate));
|
|
||||||
MOCK_CONST_METHOD1(ActualFrameRate,
|
|
||||||
float(uint32_t inputFrameRate));
|
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
|||||||
@ -16,7 +16,7 @@
|
|||||||
#include "webrtc/typedefs.h"
|
#include "webrtc/typedefs.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
template<class T>
|
template <class T>
|
||||||
class MovingAverage {
|
class MovingAverage {
|
||||||
public:
|
public:
|
||||||
MovingAverage();
|
MovingAverage();
|
||||||
@ -30,17 +30,17 @@ class MovingAverage {
|
|||||||
std::list<T> samples_;
|
std::list<T> samples_;
|
||||||
};
|
};
|
||||||
|
|
||||||
template<class T>
|
template <class T>
|
||||||
MovingAverage<T>::MovingAverage() : sum_(static_cast<T>(0)) {
|
MovingAverage<T>::MovingAverage()
|
||||||
}
|
: sum_(static_cast<T>(0)) {}
|
||||||
|
|
||||||
template<class T>
|
template <class T>
|
||||||
void MovingAverage<T>::AddSample(T sample) {
|
void MovingAverage<T>::AddSample(T sample) {
|
||||||
samples_.push_back(sample);
|
samples_.push_back(sample);
|
||||||
sum_ += sample;
|
sum_ += sample;
|
||||||
}
|
}
|
||||||
|
|
||||||
template<class T>
|
template <class T>
|
||||||
bool MovingAverage<T>::GetAverage(size_t num_samples, T* avg) {
|
bool MovingAverage<T>::GetAverage(size_t num_samples, T* avg) {
|
||||||
if (num_samples > samples_.size())
|
if (num_samples > samples_.size())
|
||||||
return false;
|
return false;
|
||||||
@ -55,13 +55,13 @@ bool MovingAverage<T>::GetAverage(size_t num_samples, T* avg) {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
template<class T>
|
template <class T>
|
||||||
void MovingAverage<T>::Reset() {
|
void MovingAverage<T>::Reset() {
|
||||||
sum_ = static_cast<T>(0);
|
sum_ = static_cast<T>(0);
|
||||||
samples_.clear();
|
samples_.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
template<class T>
|
template <class T>
|
||||||
int MovingAverage<T>::size() {
|
int MovingAverage<T>::size() {
|
||||||
return samples_.size();
|
return samples_.size();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -26,8 +26,7 @@ QualityScaler::QualityScaler()
|
|||||||
downscale_shift_(0),
|
downscale_shift_(0),
|
||||||
framerate_down_(false),
|
framerate_down_(false),
|
||||||
min_width_(kDefaultMinDownscaleDimension),
|
min_width_(kDefaultMinDownscaleDimension),
|
||||||
min_height_(kDefaultMinDownscaleDimension) {
|
min_height_(kDefaultMinDownscaleDimension) {}
|
||||||
}
|
|
||||||
|
|
||||||
void QualityScaler::Init(int low_qp_threshold,
|
void QualityScaler::Init(int low_qp_threshold,
|
||||||
int high_qp_threshold,
|
int high_qp_threshold,
|
||||||
@ -124,13 +123,8 @@ const VideoFrame& QualityScaler::GetScaledFrame(const VideoFrame& frame) {
|
|||||||
if (res.width == frame.width())
|
if (res.width == frame.width())
|
||||||
return frame;
|
return frame;
|
||||||
|
|
||||||
scaler_.Set(frame.width(),
|
scaler_.Set(frame.width(), frame.height(), res.width, res.height, kI420,
|
||||||
frame.height(),
|
kI420, kScaleBox);
|
||||||
res.width,
|
|
||||||
res.height,
|
|
||||||
kI420,
|
|
||||||
kI420,
|
|
||||||
kScaleBox);
|
|
||||||
if (scaler_.Scale(frame, &scaled_frame_) != 0)
|
if (scaler_.Scale(frame, &scaled_frame_) != 0)
|
||||||
return frame;
|
return frame;
|
||||||
|
|
||||||
|
|||||||
@ -33,6 +33,7 @@ class QualityScalerTest : public ::testing::Test {
|
|||||||
int width;
|
int width;
|
||||||
int height;
|
int height;
|
||||||
};
|
};
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
enum ScaleDirection {
|
enum ScaleDirection {
|
||||||
kKeepScaleAtHighQp,
|
kKeepScaleAtHighQp,
|
||||||
@ -43,8 +44,8 @@ class QualityScalerTest : public ::testing::Test {
|
|||||||
enum BadQualityMetric { kDropFrame, kReportLowQP };
|
enum BadQualityMetric { kDropFrame, kReportLowQP };
|
||||||
|
|
||||||
QualityScalerTest() {
|
QualityScalerTest() {
|
||||||
input_frame_.CreateEmptyFrame(
|
input_frame_.CreateEmptyFrame(kWidth, kHeight, kWidth, kHalfWidth,
|
||||||
kWidth, kHeight, kWidth, kHalfWidth, kHalfWidth);
|
kHalfWidth);
|
||||||
qs_.Init(kMaxQp / QualityScaler::kDefaultLowQpDenominator, kHighQp, false);
|
qs_.Init(kMaxQp / QualityScaler::kDefaultLowQpDenominator, kHighQp, false);
|
||||||
qs_.ReportFramerate(kFramerate);
|
qs_.ReportFramerate(kFramerate);
|
||||||
qs_.OnEncodeFrame(input_frame_);
|
qs_.OnEncodeFrame(input_frame_);
|
||||||
@ -97,7 +98,8 @@ class QualityScalerTest : public ::testing::Test {
|
|||||||
int num_second,
|
int num_second,
|
||||||
int initial_framerate);
|
int initial_framerate);
|
||||||
|
|
||||||
void VerifyQualityAdaptation(int initial_framerate, int seconds,
|
void VerifyQualityAdaptation(int initial_framerate,
|
||||||
|
int seconds,
|
||||||
bool expect_spatial_resize,
|
bool expect_spatial_resize,
|
||||||
bool expect_framerate_reduction);
|
bool expect_framerate_reduction);
|
||||||
|
|
||||||
@ -229,14 +231,14 @@ TEST_F(QualityScalerTest,
|
|||||||
const int kOddWidth = 517;
|
const int kOddWidth = 517;
|
||||||
const int kHalfOddWidth = (kOddWidth + 1) / 2;
|
const int kHalfOddWidth = (kOddWidth + 1) / 2;
|
||||||
const int kOddHeight = 1239;
|
const int kOddHeight = 1239;
|
||||||
input_frame_.CreateEmptyFrame(
|
input_frame_.CreateEmptyFrame(kOddWidth, kOddHeight, kOddWidth, kHalfOddWidth,
|
||||||
kOddWidth, kOddHeight, kOddWidth, kHalfOddWidth, kHalfOddWidth);
|
kHalfOddWidth);
|
||||||
ContinuouslyDownscalesByHalfDimensionsAndBackUp();
|
ContinuouslyDownscalesByHalfDimensionsAndBackUp();
|
||||||
}
|
}
|
||||||
|
|
||||||
void QualityScalerTest::DoesNotDownscaleFrameDimensions(int width, int height) {
|
void QualityScalerTest::DoesNotDownscaleFrameDimensions(int width, int height) {
|
||||||
input_frame_.CreateEmptyFrame(
|
input_frame_.CreateEmptyFrame(width, height, width, (width + 1) / 2,
|
||||||
width, height, width, (width + 1) / 2, (width + 1) / 2);
|
(width + 1) / 2);
|
||||||
|
|
||||||
for (int i = 0; i < kFramerate * kNumSeconds; ++i) {
|
for (int i = 0; i < kFramerate * kNumSeconds; ++i) {
|
||||||
qs_.ReportDroppedFrame();
|
qs_.ReportDroppedFrame();
|
||||||
@ -259,7 +261,9 @@ TEST_F(QualityScalerTest, DoesNotDownscaleFrom1Px) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
QualityScalerTest::Resolution QualityScalerTest::TriggerResolutionChange(
|
QualityScalerTest::Resolution QualityScalerTest::TriggerResolutionChange(
|
||||||
BadQualityMetric dropframe_lowqp, int num_second, int initial_framerate) {
|
BadQualityMetric dropframe_lowqp,
|
||||||
|
int num_second,
|
||||||
|
int initial_framerate) {
|
||||||
QualityScalerTest::Resolution res;
|
QualityScalerTest::Resolution res;
|
||||||
res.framerate = initial_framerate;
|
res.framerate = initial_framerate;
|
||||||
qs_.OnEncodeFrame(input_frame_);
|
qs_.OnEncodeFrame(input_frame_);
|
||||||
@ -288,7 +292,9 @@ QualityScalerTest::Resolution QualityScalerTest::TriggerResolutionChange(
|
|||||||
}
|
}
|
||||||
|
|
||||||
void QualityScalerTest::VerifyQualityAdaptation(
|
void QualityScalerTest::VerifyQualityAdaptation(
|
||||||
int initial_framerate, int seconds, bool expect_spatial_resize,
|
int initial_framerate,
|
||||||
|
int seconds,
|
||||||
|
bool expect_spatial_resize,
|
||||||
bool expect_framerate_reduction) {
|
bool expect_framerate_reduction) {
|
||||||
const int kDisabledBadQpThreshold = kMaxQp + 1;
|
const int kDisabledBadQpThreshold = kMaxQp + 1;
|
||||||
qs_.Init(kMaxQp / QualityScaler::kDefaultLowQpDenominator,
|
qs_.Init(kMaxQp / QualityScaler::kDefaultLowQpDenominator,
|
||||||
@ -298,8 +304,8 @@ void QualityScalerTest::VerifyQualityAdaptation(
|
|||||||
int init_height = qs_.GetScaledResolution().height;
|
int init_height = qs_.GetScaledResolution().height;
|
||||||
|
|
||||||
// Test reducing framerate by dropping frame continuously.
|
// Test reducing framerate by dropping frame continuously.
|
||||||
QualityScalerTest::Resolution res = TriggerResolutionChange(
|
QualityScalerTest::Resolution res =
|
||||||
kDropFrame, seconds, initial_framerate);
|
TriggerResolutionChange(kDropFrame, seconds, initial_framerate);
|
||||||
|
|
||||||
if (expect_framerate_reduction) {
|
if (expect_framerate_reduction) {
|
||||||
EXPECT_LT(res.framerate, initial_framerate);
|
EXPECT_LT(res.framerate, initial_framerate);
|
||||||
|
|||||||
@ -38,36 +38,24 @@ struct VP8BitReader {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const uint8_t kVP8Log2Range[128] = {
|
const uint8_t kVP8Log2Range[128] = {
|
||||||
7, 6, 6, 5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4,
|
7, 6, 6, 5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3,
|
||||||
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
|
3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
|
||||||
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
|
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1,
|
||||||
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
|
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0};
|
||||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
|
||||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
|
||||||
0
|
|
||||||
};
|
|
||||||
|
|
||||||
// range = ((range - 1) << kVP8Log2Range[range]) + 1
|
// range = ((range - 1) << kVP8Log2Range[range]) + 1
|
||||||
const uint8_t kVP8NewRange[128] = {
|
const uint8_t kVP8NewRange[128] = {
|
||||||
127, 127, 191, 127, 159, 191, 223, 127,
|
127, 127, 191, 127, 159, 191, 223, 127, 143, 159, 175, 191, 207, 223, 239,
|
||||||
143, 159, 175, 191, 207, 223, 239, 127,
|
127, 135, 143, 151, 159, 167, 175, 183, 191, 199, 207, 215, 223, 231, 239,
|
||||||
135, 143, 151, 159, 167, 175, 183, 191,
|
247, 127, 131, 135, 139, 143, 147, 151, 155, 159, 163, 167, 171, 175, 179,
|
||||||
199, 207, 215, 223, 231, 239, 247, 127,
|
183, 187, 191, 195, 199, 203, 207, 211, 215, 219, 223, 227, 231, 235, 239,
|
||||||
131, 135, 139, 143, 147, 151, 155, 159,
|
243, 247, 251, 127, 129, 131, 133, 135, 137, 139, 141, 143, 145, 147, 149,
|
||||||
163, 167, 171, 175, 179, 183, 187, 191,
|
151, 153, 155, 157, 159, 161, 163, 165, 167, 169, 171, 173, 175, 177, 179,
|
||||||
195, 199, 203, 207, 211, 215, 219, 223,
|
181, 183, 185, 187, 189, 191, 193, 195, 197, 199, 201, 203, 205, 207, 209,
|
||||||
227, 231, 235, 239, 243, 247, 251, 127,
|
211, 213, 215, 217, 219, 221, 223, 225, 227, 229, 231, 233, 235, 237, 239,
|
||||||
129, 131, 133, 135, 137, 139, 141, 143,
|
241, 243, 245, 247, 249, 251, 253, 127};
|
||||||
145, 147, 149, 151, 153, 155, 157, 159,
|
|
||||||
161, 163, 165, 167, 169, 171, 173, 175,
|
|
||||||
177, 179, 181, 183, 185, 187, 189, 191,
|
|
||||||
193, 195, 197, 199, 201, 203, 205, 207,
|
|
||||||
209, 211, 213, 215, 217, 219, 221, 223,
|
|
||||||
225, 227, 229, 231, 233, 235, 237, 239,
|
|
||||||
241, 243, 245, 247, 249, 251, 253, 127
|
|
||||||
};
|
|
||||||
|
|
||||||
// Gets the QP, QP range: [0, 127].
|
// Gets the QP, QP range: [0, 127].
|
||||||
// Returns true on success, false otherwise.
|
// Returns true on success, false otherwise.
|
||||||
|
|||||||
@ -8,32 +8,32 @@
|
|||||||
* be found in the AUTHORS file in the root of the source tree.
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
#include "webrtc/modules/video_coding/video_coding_impl.h"
|
||||||
|
|
||||||
|
#include <algorithm>
|
||||||
|
|
||||||
#include "webrtc/common_types.h"
|
#include "webrtc/common_types.h"
|
||||||
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
||||||
#include "webrtc/modules/video_coding/include/video_codec_interface.h"
|
#include "webrtc/modules/video_coding/include/video_codec_interface.h"
|
||||||
#include "webrtc/modules/video_coding/encoded_frame.h"
|
#include "webrtc/modules/video_coding/encoded_frame.h"
|
||||||
#include "webrtc/modules/video_coding/jitter_buffer.h"
|
#include "webrtc/modules/video_coding/jitter_buffer.h"
|
||||||
#include "webrtc/modules/video_coding/packet.h"
|
#include "webrtc/modules/video_coding/packet.h"
|
||||||
#include "webrtc/modules/video_coding/video_coding_impl.h"
|
|
||||||
#include "webrtc/system_wrappers/include/clock.h"
|
#include "webrtc/system_wrappers/include/clock.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
namespace vcm {
|
namespace vcm {
|
||||||
|
|
||||||
int64_t
|
int64_t VCMProcessTimer::Period() const {
|
||||||
VCMProcessTimer::Period() const {
|
|
||||||
return _periodMs;
|
return _periodMs;
|
||||||
}
|
}
|
||||||
|
|
||||||
int64_t
|
int64_t VCMProcessTimer::TimeUntilProcess() const {
|
||||||
VCMProcessTimer::TimeUntilProcess() const {
|
|
||||||
const int64_t time_since_process = _clock->TimeInMilliseconds() - _latestMs;
|
const int64_t time_since_process = _clock->TimeInMilliseconds() - _latestMs;
|
||||||
const int64_t time_until_process = _periodMs - time_since_process;
|
const int64_t time_until_process = _periodMs - time_since_process;
|
||||||
return std::max<int64_t>(time_until_process, 0);
|
return std::max<int64_t>(time_until_process, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void VCMProcessTimer::Processed() {
|
||||||
VCMProcessTimer::Processed() {
|
|
||||||
_latestMs = _clock->TimeInMilliseconds();
|
_latestMs = _clock->TimeInMilliseconds();
|
||||||
}
|
}
|
||||||
} // namespace vcm
|
} // namespace vcm
|
||||||
@ -59,8 +59,8 @@ class EncodedImageCallbackWrapper : public EncodedImageCallback {
|
|||||||
const RTPFragmentationHeader* fragmentation) {
|
const RTPFragmentationHeader* fragmentation) {
|
||||||
CriticalSectionScoped cs(cs_.get());
|
CriticalSectionScoped cs(cs_.get());
|
||||||
if (callback_)
|
if (callback_)
|
||||||
return callback_->Encoded(
|
return callback_->Encoded(encoded_image, codec_specific_info,
|
||||||
encoded_image, codec_specific_info, fragmentation);
|
fragmentation);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -84,9 +84,7 @@ class VideoCodingModuleImpl : public VideoCodingModule {
|
|||||||
receiver_(clock, event_factory),
|
receiver_(clock, event_factory),
|
||||||
own_event_factory_(owns_event_factory ? event_factory : NULL) {}
|
own_event_factory_(owns_event_factory ? event_factory : NULL) {}
|
||||||
|
|
||||||
virtual ~VideoCodingModuleImpl() {
|
virtual ~VideoCodingModuleImpl() { own_event_factory_.reset(); }
|
||||||
own_event_factory_.reset();
|
|
||||||
}
|
|
||||||
|
|
||||||
int64_t TimeUntilNextProcess() override {
|
int64_t TimeUntilNextProcess() override {
|
||||||
int64_t sender_time = sender_.TimeUntilNextProcess();
|
int64_t sender_time = sender_.TimeUntilNextProcess();
|
||||||
@ -321,8 +319,7 @@ VideoCodingModule* VideoCodingModule::Create(
|
|||||||
encoder_rate_observer, qm_settings_callback);
|
encoder_rate_observer, qm_settings_callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
VideoCodingModule* VideoCodingModule::Create(
|
VideoCodingModule* VideoCodingModule::Create(Clock* clock,
|
||||||
Clock* clock,
|
|
||||||
EventFactory* event_factory) {
|
EventFactory* event_factory) {
|
||||||
assert(clock);
|
assert(clock);
|
||||||
assert(event_factory);
|
assert(event_factory);
|
||||||
|
|||||||
@ -47,9 +47,7 @@ class VCMRobustnessTest : public ::testing::Test {
|
|||||||
vcm_->RegisterExternalDecoder(&decoder_, video_codec_.plType);
|
vcm_->RegisterExternalDecoder(&decoder_, video_codec_.plType);
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual void TearDown() {
|
virtual void TearDown() { VideoCodingModule::Destroy(vcm_); }
|
||||||
VideoCodingModule::Destroy(vcm_);
|
|
||||||
}
|
|
||||||
|
|
||||||
void InsertPacket(uint32_t timestamp,
|
void InsertPacket(uint32_t timestamp,
|
||||||
uint16_t seq_no,
|
uint16_t seq_no,
|
||||||
@ -87,19 +85,17 @@ TEST_F(VCMRobustnessTest, TestHardNack) {
|
|||||||
.With(Args<0, 1>(ElementsAre(6, 7)))
|
.With(Args<0, 1>(ElementsAre(6, 7)))
|
||||||
.Times(1);
|
.Times(1);
|
||||||
for (int ts = 0; ts <= 6000; ts += 3000) {
|
for (int ts = 0; ts <= 6000; ts += 3000) {
|
||||||
EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, ts),
|
EXPECT_CALL(decoder_,
|
||||||
Field(&EncodedImage::_length,
|
Decode(AllOf(Field(&EncodedImage::_timeStamp, ts),
|
||||||
kPayloadLen * 3),
|
Field(&EncodedImage::_length, kPayloadLen * 3),
|
||||||
Field(&EncodedImage::_completeFrame,
|
Field(&EncodedImage::_completeFrame, true)),
|
||||||
true)),
|
|
||||||
false, _, _, _))
|
false, _, _, _))
|
||||||
.Times(1)
|
.Times(1)
|
||||||
.InSequence(s);
|
.InSequence(s);
|
||||||
}
|
}
|
||||||
|
|
||||||
ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(
|
ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(
|
||||||
VideoCodingModule::kHardNack,
|
VideoCodingModule::kHardNack, kNoErrors));
|
||||||
kNoErrors));
|
|
||||||
|
|
||||||
InsertPacket(0, 0, true, false, kVideoFrameKey);
|
InsertPacket(0, 0, true, false, kVideoFrameKey);
|
||||||
InsertPacket(0, 1, false, false, kVideoFrameKey);
|
InsertPacket(0, 1, false, false, kVideoFrameKey);
|
||||||
@ -136,14 +132,11 @@ TEST_F(VCMRobustnessTest, TestHardNack) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(VCMRobustnessTest, TestHardNackNoneDecoded) {
|
TEST_F(VCMRobustnessTest, TestHardNackNoneDecoded) {
|
||||||
EXPECT_CALL(request_callback_, ResendPackets(_, _))
|
EXPECT_CALL(request_callback_, ResendPackets(_, _)).Times(0);
|
||||||
.Times(0);
|
EXPECT_CALL(frame_type_callback_, RequestKeyFrame()).Times(1);
|
||||||
EXPECT_CALL(frame_type_callback_, RequestKeyFrame())
|
|
||||||
.Times(1);
|
|
||||||
|
|
||||||
ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(
|
ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(
|
||||||
VideoCodingModule::kHardNack,
|
VideoCodingModule::kHardNack, kNoErrors));
|
||||||
kNoErrors));
|
|
||||||
|
|
||||||
InsertPacket(3000, 3, true, false, kVideoFrameDelta);
|
InsertPacket(3000, 3, true, false, kVideoFrameDelta);
|
||||||
InsertPacket(3000, 4, false, false, kVideoFrameDelta);
|
InsertPacket(3000, 4, false, false, kVideoFrameDelta);
|
||||||
@ -166,39 +159,36 @@ TEST_F(VCMRobustnessTest, TestModeNoneWithErrors) {
|
|||||||
.With(Args<0, 1>(ElementsAre(4)))
|
.With(Args<0, 1>(ElementsAre(4)))
|
||||||
.Times(0);
|
.Times(0);
|
||||||
|
|
||||||
EXPECT_CALL(decoder_, Copy())
|
EXPECT_CALL(decoder_, Copy()).Times(0);
|
||||||
.Times(0);
|
EXPECT_CALL(decoderCopy_, Copy()).Times(0);
|
||||||
EXPECT_CALL(decoderCopy_, Copy())
|
|
||||||
.Times(0);
|
|
||||||
|
|
||||||
// Decode operations
|
// Decode operations
|
||||||
EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 0),
|
EXPECT_CALL(decoder_,
|
||||||
Field(&EncodedImage::_completeFrame,
|
Decode(AllOf(Field(&EncodedImage::_timeStamp, 0),
|
||||||
true)),
|
Field(&EncodedImage::_completeFrame, true)),
|
||||||
false, _, _, _))
|
false, _, _, _))
|
||||||
.Times(1)
|
.Times(1)
|
||||||
.InSequence(s1);
|
.InSequence(s1);
|
||||||
EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 3000),
|
EXPECT_CALL(decoder_,
|
||||||
Field(&EncodedImage::_completeFrame,
|
Decode(AllOf(Field(&EncodedImage::_timeStamp, 3000),
|
||||||
false)),
|
Field(&EncodedImage::_completeFrame, false)),
|
||||||
false, _, _, _))
|
false, _, _, _))
|
||||||
.Times(1)
|
.Times(1)
|
||||||
.InSequence(s1);
|
.InSequence(s1);
|
||||||
EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 6000),
|
EXPECT_CALL(decoder_,
|
||||||
Field(&EncodedImage::_completeFrame,
|
Decode(AllOf(Field(&EncodedImage::_timeStamp, 6000),
|
||||||
true)),
|
Field(&EncodedImage::_completeFrame, true)),
|
||||||
false, _, _, _))
|
false, _, _, _))
|
||||||
.Times(1)
|
.Times(1)
|
||||||
.InSequence(s1);
|
.InSequence(s1);
|
||||||
EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 9000),
|
EXPECT_CALL(decoder_,
|
||||||
Field(&EncodedImage::_completeFrame,
|
Decode(AllOf(Field(&EncodedImage::_timeStamp, 9000),
|
||||||
true)),
|
Field(&EncodedImage::_completeFrame, true)),
|
||||||
false, _, _, _))
|
false, _, _, _))
|
||||||
.Times(1)
|
.Times(1)
|
||||||
.InSequence(s1);
|
.InSequence(s1);
|
||||||
|
|
||||||
ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(
|
ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(VideoCodingModule::kNone,
|
||||||
VideoCodingModule::kNone,
|
|
||||||
kWithErrors));
|
kWithErrors));
|
||||||
|
|
||||||
InsertPacket(0, 0, true, false, kVideoFrameKey);
|
InsertPacket(0, 0, true, false, kVideoFrameKey);
|
||||||
|
|||||||
@ -8,12 +8,12 @@
|
|||||||
* be found in the AUTHORS file in the root of the source tree.
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include "webrtc/common_types.h"
|
|
||||||
|
|
||||||
#include <algorithm> // std::max
|
#include <algorithm> // std::max
|
||||||
|
|
||||||
#include "webrtc/base/checks.h"
|
#include "webrtc/base/checks.h"
|
||||||
#include "webrtc/base/logging.h"
|
#include "webrtc/base/logging.h"
|
||||||
|
#include "webrtc/common_types.h"
|
||||||
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
||||||
#include "webrtc/modules/video_coding/include/video_codec_interface.h"
|
#include "webrtc/modules/video_coding/include/video_codec_interface.h"
|
||||||
#include "webrtc/modules/video_coding/encoded_frame.h"
|
#include "webrtc/modules/video_coding/encoded_frame.h"
|
||||||
@ -126,14 +126,10 @@ int32_t VideoSender::RegisterSendCodec(const VideoCodec* sendCodec,
|
|||||||
_nextFrameTypes.resize(VCM_MAX(sendCodec->numberOfSimulcastStreams, 1),
|
_nextFrameTypes.resize(VCM_MAX(sendCodec->numberOfSimulcastStreams, 1),
|
||||||
kVideoFrameDelta);
|
kVideoFrameDelta);
|
||||||
|
|
||||||
_mediaOpt.SetEncodingData(sendCodec->codecType,
|
_mediaOpt.SetEncodingData(sendCodec->codecType, sendCodec->maxBitrate * 1000,
|
||||||
sendCodec->maxBitrate * 1000,
|
sendCodec->startBitrate * 1000, sendCodec->width,
|
||||||
sendCodec->startBitrate * 1000,
|
sendCodec->height, sendCodec->maxFramerate,
|
||||||
sendCodec->width,
|
numLayers, maxPayloadSize);
|
||||||
sendCodec->height,
|
|
||||||
sendCodec->maxFramerate,
|
|
||||||
numLayers,
|
|
||||||
maxPayloadSize);
|
|
||||||
return VCM_OK;
|
return VCM_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -174,8 +170,8 @@ void VideoSender::RegisterExternalEncoder(VideoEncoder* externalEncoder,
|
|||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
_codecDataBase.RegisterExternalEncoder(
|
_codecDataBase.RegisterExternalEncoder(externalEncoder, payloadType,
|
||||||
externalEncoder, payloadType, internalSource);
|
internalSource);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get encode bitrate
|
// Get encode bitrate
|
||||||
|
|||||||
@ -41,9 +41,7 @@ using webrtc::test::FrameGenerator;
|
|||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
namespace vcm {
|
namespace vcm {
|
||||||
namespace {
|
namespace {
|
||||||
enum {
|
enum { kMaxNumberOfTemporalLayers = 3 };
|
||||||
kMaxNumberOfTemporalLayers = 3
|
|
||||||
};
|
|
||||||
|
|
||||||
struct Vp8StreamInfo {
|
struct Vp8StreamInfo {
|
||||||
float framerate_fps[kMaxNumberOfTemporalLayers];
|
float framerate_fps[kMaxNumberOfTemporalLayers];
|
||||||
@ -87,7 +85,7 @@ class EmptyFrameGenerator : public FrameGenerator {
|
|||||||
|
|
||||||
class PacketizationCallback : public VCMPacketizationCallback {
|
class PacketizationCallback : public VCMPacketizationCallback {
|
||||||
public:
|
public:
|
||||||
PacketizationCallback(Clock* clock)
|
explicit PacketizationCallback(Clock* clock)
|
||||||
: clock_(clock), start_time_ms_(clock_->TimeInMilliseconds()) {}
|
: clock_(clock), start_time_ms_(clock_->TimeInMilliseconds()) {}
|
||||||
|
|
||||||
virtual ~PacketizationCallback() {}
|
virtual ~PacketizationCallback() {}
|
||||||
@ -211,16 +209,12 @@ class TestVideoSenderWithMockEncoder : public TestVideoSender {
|
|||||||
memset(&settings_, 0, sizeof(settings_));
|
memset(&settings_, 0, sizeof(settings_));
|
||||||
EXPECT_EQ(0, VideoCodingModule::Codec(kVideoCodecVP8, &settings_));
|
EXPECT_EQ(0, VideoCodingModule::Codec(kVideoCodecVP8, &settings_));
|
||||||
settings_.numberOfSimulcastStreams = kNumberOfStreams;
|
settings_.numberOfSimulcastStreams = kNumberOfStreams;
|
||||||
ConfigureStream(kDefaultWidth / 4,
|
ConfigureStream(kDefaultWidth / 4, kDefaultHeight / 4, 100,
|
||||||
kDefaultHeight / 4,
|
|
||||||
100,
|
|
||||||
&settings_.simulcastStream[0]);
|
&settings_.simulcastStream[0]);
|
||||||
ConfigureStream(kDefaultWidth / 2,
|
ConfigureStream(kDefaultWidth / 2, kDefaultHeight / 2, 500,
|
||||||
kDefaultHeight / 2,
|
|
||||||
500,
|
|
||||||
&settings_.simulcastStream[1]);
|
&settings_.simulcastStream[1]);
|
||||||
ConfigureStream(
|
ConfigureStream(kDefaultWidth, kDefaultHeight, 1200,
|
||||||
kDefaultWidth, kDefaultHeight, 1200, &settings_.simulcastStream[2]);
|
&settings_.simulcastStream[2]);
|
||||||
settings_.plType = kUnusedPayloadType; // Use the mocked encoder.
|
settings_.plType = kUnusedPayloadType; // Use the mocked encoder.
|
||||||
generator_.reset(
|
generator_.reset(
|
||||||
new EmptyFrameGenerator(settings_.width, settings_.height));
|
new EmptyFrameGenerator(settings_.width, settings_.height));
|
||||||
@ -244,12 +238,11 @@ class TestVideoSenderWithMockEncoder : public TestVideoSender {
|
|||||||
assert(stream < kNumberOfStreams);
|
assert(stream < kNumberOfStreams);
|
||||||
std::vector<FrameType> frame_types(kNumberOfStreams, kVideoFrameDelta);
|
std::vector<FrameType> frame_types(kNumberOfStreams, kVideoFrameDelta);
|
||||||
frame_types[stream] = kVideoFrameKey;
|
frame_types[stream] = kVideoFrameKey;
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(encoder_,
|
||||||
encoder_,
|
Encode(_, _, Pointee(ElementsAreArray(&frame_types[0],
|
||||||
Encode(_,
|
frame_types.size()))))
|
||||||
_,
|
.Times(1)
|
||||||
Pointee(ElementsAreArray(&frame_types[0], frame_types.size()))))
|
.WillRepeatedly(Return(0));
|
||||||
.Times(1).WillRepeatedly(Return(0));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static void ConfigureStream(int width,
|
static void ConfigureStream(int width,
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user