Revert of Initialize/configure video encoders asychronously. (patchset #4 id:60001 of https://codereview.webrtc.org/1757313002/ )
Reason for revert:
Breaks RTCVideoEncoder which has incorrect assumptions on where InitEncode etc. is called from. Temporarily reverting until RTCVideoEncoder has been updated.
Original issue's description:
> Initialize/configure video encoders asychronously.
>
> Greatly speeds up setRemoteDescription() by moving encoder initialization
> off the main worker thread, which is free to move onto gathering ICE
> candidates and other tasks while InitEncode() is performed. It also
> un-blocks PeerConnection GetStats() which is no longer blocked on
> encoder initialization.
>
> BUG=webrtc:5410
> R=stefan@webrtc.org
>
> Committed: fb647a67be
R=stefan@webrtc.org
# Not skipping CQ checks because original CL landed more than 1 days ago.
BUG=chromium:595274, chromium:595308, webrtc:5410
Review URL: https://codereview.webrtc.org/1821983002 .
Cr-Commit-Position: refs/heads/master@{#12086}
This commit is contained in:
parent
60624cd6bf
commit
81cbd92444
@ -39,12 +39,13 @@ static const int kMaxVideoBitrate = 1000;
|
||||
// renderer for a channel or it is adding a renderer for a capturer.
|
||||
static const int kViEChannelIdBase = 0;
|
||||
static const int kViEChannelIdMax = 1000;
|
||||
static const int kEventTimeoutMs = 10000;
|
||||
|
||||
// Fake class for mocking out webrtc::VideoDecoder
|
||||
class FakeWebRtcVideoDecoder : public webrtc::VideoDecoder {
|
||||
public:
|
||||
FakeWebRtcVideoDecoder() : num_frames_received_(0) {}
|
||||
FakeWebRtcVideoDecoder()
|
||||
: num_frames_received_(0) {
|
||||
}
|
||||
|
||||
virtual int32_t InitDecode(const webrtc::VideoCodec*, int32_t) {
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
@ -120,20 +121,16 @@ class FakeWebRtcVideoDecoderFactory : public WebRtcVideoDecoderFactory {
|
||||
// Fake class for mocking out webrtc::VideoEnoder
|
||||
class FakeWebRtcVideoEncoder : public webrtc::VideoEncoder {
|
||||
public:
|
||||
FakeWebRtcVideoEncoder()
|
||||
: init_encode_event_(false, false), num_frames_encoded_(0) {}
|
||||
FakeWebRtcVideoEncoder() : num_frames_encoded_(0) {}
|
||||
|
||||
virtual int32_t InitEncode(const webrtc::VideoCodec* codecSettings,
|
||||
int32_t numberOfCores,
|
||||
size_t maxPayloadSize) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
codec_settings_ = *codecSettings;
|
||||
init_encode_event_.Set();
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
bool WaitForInitEncode() { return init_encode_event_.Wait(kEventTimeoutMs); }
|
||||
|
||||
webrtc::VideoCodec GetCodecSettings() {
|
||||
rtc::CritScope lock(&crit_);
|
||||
return codec_settings_;
|
||||
@ -144,7 +141,6 @@ class FakeWebRtcVideoEncoder : public webrtc::VideoEncoder {
|
||||
const std::vector<webrtc::FrameType>* frame_types) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
++num_frames_encoded_;
|
||||
init_encode_event_.Set();
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
@ -170,7 +166,6 @@ class FakeWebRtcVideoEncoder : public webrtc::VideoEncoder {
|
||||
|
||||
private:
|
||||
rtc::CriticalSection crit_;
|
||||
rtc::Event init_encode_event_;
|
||||
int num_frames_encoded_ GUARDED_BY(crit_);
|
||||
webrtc::VideoCodec codec_settings_ GUARDED_BY(crit_);
|
||||
};
|
||||
@ -179,33 +174,20 @@ class FakeWebRtcVideoEncoder : public webrtc::VideoEncoder {
|
||||
class FakeWebRtcVideoEncoderFactory : public WebRtcVideoEncoderFactory {
|
||||
public:
|
||||
FakeWebRtcVideoEncoderFactory()
|
||||
: created_video_encoder_event_(false, false),
|
||||
num_created_encoders_(0),
|
||||
encoders_have_internal_sources_(false) {}
|
||||
: num_created_encoders_(0), encoders_have_internal_sources_(false) {}
|
||||
|
||||
virtual webrtc::VideoEncoder* CreateVideoEncoder(
|
||||
webrtc::VideoCodecType type) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
if (supported_codec_types_.count(type) == 0) {
|
||||
return NULL;
|
||||
}
|
||||
FakeWebRtcVideoEncoder* encoder = new FakeWebRtcVideoEncoder();
|
||||
encoders_.push_back(encoder);
|
||||
num_created_encoders_++;
|
||||
created_video_encoder_event_.Set();
|
||||
return encoder;
|
||||
}
|
||||
|
||||
bool WaitForCreatedVideoEncoders(int num_encoders) {
|
||||
while (created_video_encoder_event_.Wait(kEventTimeoutMs)) {
|
||||
if (GetNumCreatedEncoders() >= num_encoders)
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
virtual void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
encoders_.erase(
|
||||
std::remove(encoders_.begin(), encoders_.end(), encoder),
|
||||
encoders_.end());
|
||||
@ -234,22 +216,18 @@ class FakeWebRtcVideoEncoderFactory : public WebRtcVideoEncoderFactory {
|
||||
}
|
||||
|
||||
int GetNumCreatedEncoders() {
|
||||
rtc::CritScope lock(&crit_);
|
||||
return num_created_encoders_;
|
||||
}
|
||||
|
||||
const std::vector<FakeWebRtcVideoEncoder*> encoders() {
|
||||
rtc::CritScope lock(&crit_);
|
||||
const std::vector<FakeWebRtcVideoEncoder*>& encoders() {
|
||||
return encoders_;
|
||||
}
|
||||
|
||||
private:
|
||||
rtc::CriticalSection crit_;
|
||||
rtc::Event created_video_encoder_event_;
|
||||
std::set<webrtc::VideoCodecType> supported_codec_types_;
|
||||
std::vector<WebRtcVideoEncoderFactory::VideoCodec> codecs_;
|
||||
std::vector<FakeWebRtcVideoEncoder*> encoders_ GUARDED_BY(crit_);
|
||||
int num_created_encoders_ GUARDED_BY(crit_);
|
||||
std::vector<FakeWebRtcVideoEncoder*> encoders_;
|
||||
int num_created_encoders_;
|
||||
bool encoders_have_internal_sources_;
|
||||
};
|
||||
|
||||
|
||||
@ -347,7 +347,6 @@ TEST_F(WebRtcVideoEngine2Test, UseExternalFactoryForVp8WhenSupported) {
|
||||
|
||||
EXPECT_TRUE(
|
||||
channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc)));
|
||||
ASSERT_TRUE(encoder_factory.WaitForCreatedVideoEncoders(1));
|
||||
ASSERT_EQ(1u, encoder_factory.encoders().size());
|
||||
EXPECT_TRUE(channel->SetSend(true));
|
||||
|
||||
@ -356,13 +355,11 @@ TEST_F(WebRtcVideoEngine2Test, UseExternalFactoryForVp8WhenSupported) {
|
||||
EXPECT_EQ(cricket::CS_RUNNING,
|
||||
capturer.Start(capturer.GetSupportedFormats()->front()));
|
||||
EXPECT_TRUE(capturer.CaptureFrame());
|
||||
// Sending one frame will have reallocated the encoder since input size
|
||||
// changes from a small default to the actual frame width/height. Wait for
|
||||
// that to happen then for the frame to be sent.
|
||||
ASSERT_TRUE(encoder_factory.WaitForCreatedVideoEncoders(2));
|
||||
EXPECT_TRUE_WAIT(encoder_factory.encoders()[0]->GetNumEncodedFrames() > 0,
|
||||
kTimeout);
|
||||
|
||||
// Sending one frame will have reallocated the encoder since input size
|
||||
// changes from a small default to the actual frame width/height.
|
||||
int num_created_encoders = encoder_factory.GetNumCreatedEncoders();
|
||||
EXPECT_EQ(num_created_encoders, 2);
|
||||
|
||||
@ -593,13 +590,12 @@ TEST_F(WebRtcVideoEngine2Test, UsesSimulcastAdapterForVp8Factories) {
|
||||
capturer.Start(capturer.GetSupportedFormats()->front()));
|
||||
EXPECT_TRUE(capturer.CaptureFrame());
|
||||
|
||||
ASSERT_TRUE(encoder_factory.WaitForCreatedVideoEncoders(2));
|
||||
EXPECT_GT(encoder_factory.encoders().size(), 1u);
|
||||
|
||||
// Verify that encoders are configured for simulcast through adapter
|
||||
// (increasing resolution and only configured to send one stream each).
|
||||
int prev_width = -1;
|
||||
for (size_t i = 0; i < encoder_factory.encoders().size(); ++i) {
|
||||
ASSERT_TRUE(encoder_factory.encoders()[i]->WaitForInitEncode());
|
||||
webrtc::VideoCodec codec_settings =
|
||||
encoder_factory.encoders()[i]->GetCodecSettings();
|
||||
EXPECT_EQ(0, codec_settings.numberOfSimulcastStreams);
|
||||
@ -675,8 +671,7 @@ TEST_F(WebRtcVideoEngine2Test,
|
||||
capturer.Start(capturer.GetSupportedFormats()->front()));
|
||||
EXPECT_TRUE(capturer.CaptureFrame());
|
||||
|
||||
ASSERT_TRUE(encoder_factory.WaitForCreatedVideoEncoders(2));
|
||||
ASSERT_TRUE(encoder_factory.encoders()[0]->WaitForInitEncode());
|
||||
ASSERT_GT(encoder_factory.encoders().size(), 1u);
|
||||
EXPECT_EQ(webrtc::kVideoCodecVP8,
|
||||
encoder_factory.encoders()[0]->GetCodecSettings().codecType);
|
||||
|
||||
@ -700,7 +695,6 @@ TEST_F(WebRtcVideoEngine2Test,
|
||||
EXPECT_TRUE(
|
||||
channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc)));
|
||||
ASSERT_EQ(1u, encoder_factory.encoders().size());
|
||||
ASSERT_TRUE(encoder_factory.encoders()[0]->WaitForInitEncode());
|
||||
EXPECT_EQ(webrtc::kVideoCodecH264,
|
||||
encoder_factory.encoders()[0]->GetCodecSettings().codecType);
|
||||
|
||||
@ -732,7 +726,6 @@ TEST_F(WebRtcVideoEngine2Test, SimulcastDisabledForH264) {
|
||||
|
||||
ASSERT_EQ(1u, encoder_factory.encoders().size());
|
||||
FakeWebRtcVideoEncoder* encoder = encoder_factory.encoders()[0];
|
||||
ASSERT_TRUE(encoder_factory.encoders()[0]->WaitForInitEncode());
|
||||
EXPECT_EQ(webrtc::kVideoCodecH264, encoder->GetCodecSettings().codecType);
|
||||
EXPECT_EQ(1u, encoder->GetCodecSettings().numberOfSimulcastStreams);
|
||||
EXPECT_TRUE(channel->SetCapturer(ssrcs[0], nullptr));
|
||||
|
||||
@ -43,7 +43,6 @@ static const int kPayloadTypeVP9 = 124;
|
||||
|
||||
class VideoAnalyzer : public PacketReceiver,
|
||||
public Transport,
|
||||
public I420FrameCallback,
|
||||
public VideoRenderer,
|
||||
public VideoCaptureInput,
|
||||
public EncodedFrameObserver {
|
||||
@ -69,8 +68,6 @@ class VideoAnalyzer : public PacketReceiver,
|
||||
frames_recorded_(0),
|
||||
frames_processed_(0),
|
||||
dropped_frames_(0),
|
||||
dropped_frames_before_first_encode_(0),
|
||||
dropped_frames_before_rendering_(0),
|
||||
last_render_time_(0),
|
||||
rtp_timestamp_delta_(0),
|
||||
avg_psnr_threshold_(avg_psnr_threshold),
|
||||
@ -146,26 +143,18 @@ class VideoAnalyzer : public PacketReceiver,
|
||||
void IncomingCapturedFrame(const VideoFrame& video_frame) override {
|
||||
VideoFrame copy = video_frame;
|
||||
copy.set_timestamp(copy.ntp_time_ms() * 90);
|
||||
|
||||
{
|
||||
rtc::CritScope lock(&crit_);
|
||||
if (first_send_frame_.IsZeroSize() && rtp_timestamp_delta_ == 0)
|
||||
first_send_frame_ = copy;
|
||||
|
||||
frames_.push_back(copy);
|
||||
}
|
||||
|
||||
input_->IncomingCapturedFrame(video_frame);
|
||||
}
|
||||
|
||||
void FrameCallback(VideoFrame* video_frame) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
if (first_send_frame_.IsZeroSize() && rtp_timestamp_delta_ == 0) {
|
||||
while (frames_.front().timestamp() != video_frame->timestamp()) {
|
||||
++dropped_frames_before_first_encode_;
|
||||
frames_.pop_front();
|
||||
RTC_CHECK(!frames_.empty());
|
||||
}
|
||||
first_send_frame_ = *video_frame;
|
||||
}
|
||||
}
|
||||
|
||||
bool SendRtp(const uint8_t* packet,
|
||||
size_t length,
|
||||
const PacketOptions& options) override {
|
||||
@ -179,7 +168,7 @@ class VideoAnalyzer : public PacketReceiver,
|
||||
{
|
||||
rtc::CritScope lock(&crit_);
|
||||
|
||||
if (!first_send_frame_.IsZeroSize()) {
|
||||
if (rtp_timestamp_delta_ == 0) {
|
||||
rtp_timestamp_delta_ = header.timestamp - first_send_frame_.timestamp();
|
||||
first_send_frame_.Reset();
|
||||
}
|
||||
@ -214,18 +203,9 @@ class VideoAnalyzer : public PacketReceiver,
|
||||
wrap_handler_.Unwrap(video_frame.timestamp() - rtp_timestamp_delta_);
|
||||
|
||||
while (wrap_handler_.Unwrap(frames_.front().timestamp()) < send_timestamp) {
|
||||
if (last_rendered_frame_.IsZeroSize()) {
|
||||
// No previous frame rendered, this one was dropped after sending but
|
||||
// before rendering.
|
||||
++dropped_frames_before_rendering_;
|
||||
frames_.pop_front();
|
||||
RTC_CHECK(!frames_.empty());
|
||||
continue;
|
||||
}
|
||||
AddFrameComparison(frames_.front(), last_rendered_frame_, true,
|
||||
render_time_ms);
|
||||
frames_.pop_front();
|
||||
RTC_DCHECK(!frames_.empty());
|
||||
}
|
||||
|
||||
VideoFrame reference_frame = frames_.front();
|
||||
@ -375,7 +355,6 @@ class VideoAnalyzer : public PacketReceiver,
|
||||
bool dropped,
|
||||
int64_t render_time_ms)
|
||||
EXCLUSIVE_LOCKS_REQUIRED(crit_) {
|
||||
RTC_DCHECK(!render.IsZeroSize());
|
||||
int64_t reference_timestamp = wrap_handler_.Unwrap(reference.timestamp());
|
||||
int64_t send_time_ms = send_times_[reference_timestamp];
|
||||
send_times_.erase(reference_timestamp);
|
||||
@ -508,6 +487,8 @@ class VideoAnalyzer : public PacketReceiver,
|
||||
PrintResult("psnr", psnr_, " dB");
|
||||
PrintResult("ssim", ssim_, " score");
|
||||
PrintResult("sender_time", sender_time_, " ms");
|
||||
printf("RESULT dropped_frames: %s = %d frames\n", test_label_.c_str(),
|
||||
dropped_frames_);
|
||||
PrintResult("receiver_time", receiver_time_, " ms");
|
||||
PrintResult("total_delay_incl_network", end_to_end_, " ms");
|
||||
PrintResult("time_between_rendered_frames", rendered_delta_, " ms");
|
||||
@ -517,13 +498,6 @@ class VideoAnalyzer : public PacketReceiver,
|
||||
PrintResult("encode_usage_percent", encode_usage_percent, " percent");
|
||||
PrintResult("media_bitrate", media_bitrate_bps, " bps");
|
||||
|
||||
printf("RESULT dropped_frames: %s = %d frames\n", test_label_.c_str(),
|
||||
dropped_frames_);
|
||||
printf("RESULT dropped_frames_before_first_encode: %s = %d frames\n",
|
||||
test_label_.c_str(), dropped_frames_before_first_encode_);
|
||||
printf("RESULT dropped_frames_before_rendering: %s = %d frames\n",
|
||||
test_label_.c_str(), dropped_frames_before_rendering_);
|
||||
|
||||
EXPECT_GT(psnr_.Mean(), avg_psnr_threshold_);
|
||||
EXPECT_GT(ssim_.Mean(), avg_ssim_threshold_);
|
||||
}
|
||||
@ -638,8 +612,6 @@ class VideoAnalyzer : public PacketReceiver,
|
||||
int frames_recorded_;
|
||||
int frames_processed_;
|
||||
int dropped_frames_;
|
||||
int dropped_frames_before_first_encode_;
|
||||
int dropped_frames_before_rendering_;
|
||||
int64_t last_render_time_;
|
||||
uint32_t rtp_timestamp_delta_;
|
||||
|
||||
@ -1033,7 +1005,6 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) {
|
||||
|
||||
SetupCommon(&analyzer, &recv_transport);
|
||||
video_receive_configs_[params_.ss.selected_stream].renderer = &analyzer;
|
||||
video_send_config_.pre_encode_callback = &analyzer;
|
||||
for (auto& config : video_receive_configs_)
|
||||
config.pre_decode_callback = &analyzer;
|
||||
RTC_DCHECK(!video_send_config_.post_encode_callback);
|
||||
|
||||
@ -151,149 +151,6 @@ CpuOveruseOptions GetCpuOveruseOptions(bool full_overuse_time) {
|
||||
}
|
||||
return options;
|
||||
}
|
||||
|
||||
VideoCodec VideoEncoderConfigToVideoCodec(const VideoEncoderConfig& config,
|
||||
const std::string& payload_name,
|
||||
int payload_type) {
|
||||
const std::vector<VideoStream>& streams = config.streams;
|
||||
static const int kEncoderMinBitrateKbps = 30;
|
||||
RTC_DCHECK(!streams.empty());
|
||||
RTC_DCHECK_GE(config.min_transmit_bitrate_bps, 0);
|
||||
|
||||
VideoCodec video_codec;
|
||||
memset(&video_codec, 0, sizeof(video_codec));
|
||||
video_codec.codecType = PayloadNameToCodecType(payload_name);
|
||||
|
||||
switch (config.content_type) {
|
||||
case VideoEncoderConfig::ContentType::kRealtimeVideo:
|
||||
video_codec.mode = kRealtimeVideo;
|
||||
break;
|
||||
case VideoEncoderConfig::ContentType::kScreen:
|
||||
video_codec.mode = kScreensharing;
|
||||
if (config.streams.size() == 1 &&
|
||||
config.streams[0].temporal_layer_thresholds_bps.size() == 1) {
|
||||
video_codec.targetBitrate =
|
||||
config.streams[0].temporal_layer_thresholds_bps[0] / 1000;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
switch (video_codec.codecType) {
|
||||
case kVideoCodecVP8: {
|
||||
if (config.encoder_specific_settings) {
|
||||
video_codec.codecSpecific.VP8 = *reinterpret_cast<const VideoCodecVP8*>(
|
||||
config.encoder_specific_settings);
|
||||
} else {
|
||||
video_codec.codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings();
|
||||
}
|
||||
video_codec.codecSpecific.VP8.numberOfTemporalLayers =
|
||||
static_cast<unsigned char>(
|
||||
streams.back().temporal_layer_thresholds_bps.size() + 1);
|
||||
break;
|
||||
}
|
||||
case kVideoCodecVP9: {
|
||||
if (config.encoder_specific_settings) {
|
||||
video_codec.codecSpecific.VP9 = *reinterpret_cast<const VideoCodecVP9*>(
|
||||
config.encoder_specific_settings);
|
||||
if (video_codec.mode == kScreensharing) {
|
||||
video_codec.codecSpecific.VP9.flexibleMode = true;
|
||||
// For now VP9 screensharing use 1 temporal and 2 spatial layers.
|
||||
RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfTemporalLayers,
|
||||
1);
|
||||
RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfSpatialLayers, 2);
|
||||
}
|
||||
} else {
|
||||
video_codec.codecSpecific.VP9 = VideoEncoder::GetDefaultVp9Settings();
|
||||
}
|
||||
video_codec.codecSpecific.VP9.numberOfTemporalLayers =
|
||||
static_cast<unsigned char>(
|
||||
streams.back().temporal_layer_thresholds_bps.size() + 1);
|
||||
break;
|
||||
}
|
||||
case kVideoCodecH264: {
|
||||
if (config.encoder_specific_settings) {
|
||||
video_codec.codecSpecific.H264 =
|
||||
*reinterpret_cast<const VideoCodecH264*>(
|
||||
config.encoder_specific_settings);
|
||||
} else {
|
||||
video_codec.codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings();
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
// TODO(pbos): Support encoder_settings codec-agnostically.
|
||||
RTC_DCHECK(config.encoder_specific_settings == nullptr)
|
||||
<< "Encoder-specific settings for codec type not wired up.";
|
||||
break;
|
||||
}
|
||||
|
||||
strncpy(video_codec.plName, payload_name.c_str(), kPayloadNameSize - 1);
|
||||
video_codec.plName[kPayloadNameSize - 1] = '\0';
|
||||
video_codec.plType = payload_type;
|
||||
video_codec.numberOfSimulcastStreams =
|
||||
static_cast<unsigned char>(streams.size());
|
||||
video_codec.minBitrate = streams[0].min_bitrate_bps / 1000;
|
||||
if (video_codec.minBitrate < kEncoderMinBitrateKbps)
|
||||
video_codec.minBitrate = kEncoderMinBitrateKbps;
|
||||
RTC_DCHECK_LE(streams.size(), static_cast<size_t>(kMaxSimulcastStreams));
|
||||
if (video_codec.codecType == kVideoCodecVP9) {
|
||||
// If the vector is empty, bitrates will be configured automatically.
|
||||
RTC_DCHECK(config.spatial_layers.empty() ||
|
||||
config.spatial_layers.size() ==
|
||||
video_codec.codecSpecific.VP9.numberOfSpatialLayers);
|
||||
RTC_DCHECK_LE(video_codec.codecSpecific.VP9.numberOfSpatialLayers,
|
||||
kMaxSimulcastStreams);
|
||||
for (size_t i = 0; i < config.spatial_layers.size(); ++i)
|
||||
video_codec.spatialLayers[i] = config.spatial_layers[i];
|
||||
}
|
||||
for (size_t i = 0; i < streams.size(); ++i) {
|
||||
SimulcastStream* sim_stream = &video_codec.simulcastStream[i];
|
||||
RTC_DCHECK_GT(streams[i].width, 0u);
|
||||
RTC_DCHECK_GT(streams[i].height, 0u);
|
||||
RTC_DCHECK_GT(streams[i].max_framerate, 0);
|
||||
// Different framerates not supported per stream at the moment.
|
||||
RTC_DCHECK_EQ(streams[i].max_framerate, streams[0].max_framerate);
|
||||
RTC_DCHECK_GE(streams[i].min_bitrate_bps, 0);
|
||||
RTC_DCHECK_GE(streams[i].target_bitrate_bps, streams[i].min_bitrate_bps);
|
||||
RTC_DCHECK_GE(streams[i].max_bitrate_bps, streams[i].target_bitrate_bps);
|
||||
RTC_DCHECK_GE(streams[i].max_qp, 0);
|
||||
|
||||
sim_stream->width = static_cast<uint16_t>(streams[i].width);
|
||||
sim_stream->height = static_cast<uint16_t>(streams[i].height);
|
||||
sim_stream->minBitrate = streams[i].min_bitrate_bps / 1000;
|
||||
sim_stream->targetBitrate = streams[i].target_bitrate_bps / 1000;
|
||||
sim_stream->maxBitrate = streams[i].max_bitrate_bps / 1000;
|
||||
sim_stream->qpMax = streams[i].max_qp;
|
||||
sim_stream->numberOfTemporalLayers = static_cast<unsigned char>(
|
||||
streams[i].temporal_layer_thresholds_bps.size() + 1);
|
||||
|
||||
video_codec.width = std::max(video_codec.width,
|
||||
static_cast<uint16_t>(streams[i].width));
|
||||
video_codec.height = std::max(
|
||||
video_codec.height, static_cast<uint16_t>(streams[i].height));
|
||||
video_codec.minBitrate =
|
||||
std::min(static_cast<uint16_t>(video_codec.minBitrate),
|
||||
static_cast<uint16_t>(streams[i].min_bitrate_bps / 1000));
|
||||
video_codec.maxBitrate += streams[i].max_bitrate_bps / 1000;
|
||||
video_codec.qpMax = std::max(video_codec.qpMax,
|
||||
static_cast<unsigned int>(streams[i].max_qp));
|
||||
}
|
||||
|
||||
if (video_codec.maxBitrate == 0) {
|
||||
// Unset max bitrate -> cap to one bit per pixel.
|
||||
video_codec.maxBitrate =
|
||||
(video_codec.width * video_codec.height * video_codec.maxFramerate) /
|
||||
1000;
|
||||
}
|
||||
if (video_codec.maxBitrate < kEncoderMinBitrateKbps)
|
||||
video_codec.maxBitrate = kEncoderMinBitrateKbps;
|
||||
|
||||
RTC_DCHECK_GT(streams[0].max_framerate, 0);
|
||||
video_codec.maxFramerate = streams[0].max_framerate;
|
||||
|
||||
return video_codec;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
namespace internal {
|
||||
@ -432,6 +289,11 @@ VideoSendStream::VideoSendStream(
|
||||
RTC_DCHECK(config.encoder_settings.encoder != nullptr);
|
||||
RTC_DCHECK_GE(config.encoder_settings.payload_type, 0);
|
||||
RTC_DCHECK_LE(config.encoder_settings.payload_type, 127);
|
||||
RTC_CHECK_EQ(0, vie_encoder_.RegisterExternalEncoder(
|
||||
config.encoder_settings.encoder,
|
||||
config.encoder_settings.payload_type,
|
||||
config.encoder_settings.internal_source));
|
||||
|
||||
ReconfigureVideoEncoder(encoder_config);
|
||||
|
||||
vie_channel_.RegisterSendSideDelayObserver(&stats_proxy_);
|
||||
@ -439,6 +301,11 @@ VideoSendStream::VideoSendStream(
|
||||
if (config_.post_encode_callback)
|
||||
vie_encoder_.RegisterPostEncodeImageCallback(&encoded_frame_proxy_);
|
||||
|
||||
if (config_.suspend_below_min_bitrate) {
|
||||
vcm_->SuspendBelowMinBitrate();
|
||||
bitrate_allocator_->EnforceMinBitrate(false);
|
||||
}
|
||||
|
||||
vie_channel_.RegisterRtcpPacketTypeCounterObserver(&stats_proxy_);
|
||||
vie_channel_.RegisterSendBitrateObserver(&stats_proxy_);
|
||||
vie_channel_.RegisterSendFrameCountObserver(&stats_proxy_);
|
||||
@ -468,6 +335,8 @@ VideoSendStream::~VideoSendStream() {
|
||||
vie_channel_.RegisterSendBitrateObserver(nullptr);
|
||||
vie_channel_.RegisterRtcpPacketTypeCounterObserver(nullptr);
|
||||
|
||||
vie_encoder_.DeRegisterExternalEncoder(config_.encoder_settings.payload_type);
|
||||
|
||||
call_stats_->DeregisterStatsObserver(vie_channel_.GetStatsObserver());
|
||||
rtp_rtcp_modules_[0]->SetREMBStatus(false);
|
||||
remb_->RemoveRembSender(rtp_rtcp_modules_[0]);
|
||||
@ -510,63 +379,158 @@ bool VideoSendStream::EncoderThreadFunction(void* obj) {
|
||||
}
|
||||
|
||||
void VideoSendStream::EncoderProcess() {
|
||||
RTC_CHECK_EQ(0, vie_encoder_.RegisterExternalEncoder(
|
||||
config_.encoder_settings.encoder,
|
||||
config_.encoder_settings.payload_type,
|
||||
config_.encoder_settings.internal_source));
|
||||
|
||||
while (true) {
|
||||
encoder_wakeup_event_.Wait(rtc::Event::kForever);
|
||||
if (rtc::AtomicOps::AcquireLoad(&stop_encoder_thread_))
|
||||
break;
|
||||
rtc::Optional<EncoderSettings> encoder_settings;
|
||||
{
|
||||
rtc::CritScope lock(&encoder_settings_crit_);
|
||||
if (pending_encoder_settings_) {
|
||||
encoder_settings = pending_encoder_settings_;
|
||||
pending_encoder_settings_ = rtc::Optional<EncoderSettings>();
|
||||
}
|
||||
}
|
||||
if (encoder_settings) {
|
||||
encoder_settings->video_codec.startBitrate =
|
||||
bitrate_allocator_->AddObserver(
|
||||
this, encoder_settings->video_codec.minBitrate * 1000,
|
||||
encoder_settings->video_codec.maxBitrate * 1000) /
|
||||
1000;
|
||||
vie_encoder_.SetEncoder(encoder_settings->video_codec,
|
||||
encoder_settings->min_transmit_bitrate_bps);
|
||||
if (config_.suspend_below_min_bitrate) {
|
||||
vcm_->SuspendBelowMinBitrate();
|
||||
bitrate_allocator_->EnforceMinBitrate(false);
|
||||
}
|
||||
// We might've gotten new settings while configuring the encoder settings,
|
||||
// restart from the top to see if that's the case before trying to encode
|
||||
// a frame (which might correspond to the last frame size).
|
||||
encoder_wakeup_event_.Set();
|
||||
continue;
|
||||
}
|
||||
return;
|
||||
|
||||
VideoFrame frame;
|
||||
if (input_.GetVideoFrame(&frame))
|
||||
vie_encoder_.EncodeVideoFrame(frame);
|
||||
}
|
||||
vie_encoder_.DeRegisterExternalEncoder(config_.encoder_settings.payload_type);
|
||||
}
|
||||
|
||||
void VideoSendStream::ReconfigureVideoEncoder(
|
||||
const VideoEncoderConfig& config) {
|
||||
TRACE_EVENT0("webrtc", "VideoSendStream::(Re)configureVideoEncoder");
|
||||
LOG(LS_INFO) << "(Re)configureVideoEncoder: " << config.ToString();
|
||||
RTC_DCHECK_GE(config_.rtp.ssrcs.size(), config.streams.size());
|
||||
VideoCodec video_codec = VideoEncoderConfigToVideoCodec(
|
||||
config, config_.encoder_settings.payload_name,
|
||||
config_.encoder_settings.payload_type);
|
||||
{
|
||||
rtc::CritScope lock(&encoder_settings_crit_);
|
||||
pending_encoder_settings_ = rtc::Optional<EncoderSettings>(
|
||||
{video_codec, config.min_transmit_bitrate_bps});
|
||||
const std::vector<VideoStream>& streams = config.streams;
|
||||
static const int kEncoderMinBitrateKbps = 30;
|
||||
RTC_DCHECK(!streams.empty());
|
||||
RTC_DCHECK_GE(config_.rtp.ssrcs.size(), streams.size());
|
||||
RTC_DCHECK_GE(config.min_transmit_bitrate_bps, 0);
|
||||
|
||||
VideoCodec video_codec;
|
||||
memset(&video_codec, 0, sizeof(video_codec));
|
||||
video_codec.codecType =
|
||||
PayloadNameToCodecType(config_.encoder_settings.payload_name);
|
||||
|
||||
switch (config.content_type) {
|
||||
case VideoEncoderConfig::ContentType::kRealtimeVideo:
|
||||
video_codec.mode = kRealtimeVideo;
|
||||
break;
|
||||
case VideoEncoderConfig::ContentType::kScreen:
|
||||
video_codec.mode = kScreensharing;
|
||||
if (config.streams.size() == 1 &&
|
||||
config.streams[0].temporal_layer_thresholds_bps.size() == 1) {
|
||||
video_codec.targetBitrate =
|
||||
config.streams[0].temporal_layer_thresholds_bps[0] / 1000;
|
||||
}
|
||||
break;
|
||||
}
|
||||
encoder_wakeup_event_.Set();
|
||||
|
||||
if (video_codec.codecType == kVideoCodecVP8) {
|
||||
video_codec.codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings();
|
||||
} else if (video_codec.codecType == kVideoCodecVP9) {
|
||||
video_codec.codecSpecific.VP9 = VideoEncoder::GetDefaultVp9Settings();
|
||||
} else if (video_codec.codecType == kVideoCodecH264) {
|
||||
video_codec.codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings();
|
||||
}
|
||||
|
||||
if (video_codec.codecType == kVideoCodecVP8) {
|
||||
if (config.encoder_specific_settings != nullptr) {
|
||||
video_codec.codecSpecific.VP8 = *reinterpret_cast<const VideoCodecVP8*>(
|
||||
config.encoder_specific_settings);
|
||||
}
|
||||
video_codec.codecSpecific.VP8.numberOfTemporalLayers =
|
||||
static_cast<unsigned char>(
|
||||
streams.back().temporal_layer_thresholds_bps.size() + 1);
|
||||
} else if (video_codec.codecType == kVideoCodecVP9) {
|
||||
if (config.encoder_specific_settings != nullptr) {
|
||||
video_codec.codecSpecific.VP9 = *reinterpret_cast<const VideoCodecVP9*>(
|
||||
config.encoder_specific_settings);
|
||||
if (video_codec.mode == kScreensharing) {
|
||||
video_codec.codecSpecific.VP9.flexibleMode = true;
|
||||
// For now VP9 screensharing use 1 temporal and 2 spatial layers.
|
||||
RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfTemporalLayers, 1);
|
||||
RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfSpatialLayers, 2);
|
||||
}
|
||||
}
|
||||
video_codec.codecSpecific.VP9.numberOfTemporalLayers =
|
||||
static_cast<unsigned char>(
|
||||
streams.back().temporal_layer_thresholds_bps.size() + 1);
|
||||
} else if (video_codec.codecType == kVideoCodecH264) {
|
||||
if (config.encoder_specific_settings != nullptr) {
|
||||
video_codec.codecSpecific.H264 = *reinterpret_cast<const VideoCodecH264*>(
|
||||
config.encoder_specific_settings);
|
||||
}
|
||||
} else {
|
||||
// TODO(pbos): Support encoder_settings codec-agnostically.
|
||||
RTC_DCHECK(config.encoder_specific_settings == nullptr)
|
||||
<< "Encoder-specific settings for codec type not wired up.";
|
||||
}
|
||||
|
||||
strncpy(video_codec.plName,
|
||||
config_.encoder_settings.payload_name.c_str(),
|
||||
kPayloadNameSize - 1);
|
||||
video_codec.plName[kPayloadNameSize - 1] = '\0';
|
||||
video_codec.plType = config_.encoder_settings.payload_type;
|
||||
video_codec.numberOfSimulcastStreams =
|
||||
static_cast<unsigned char>(streams.size());
|
||||
video_codec.minBitrate = streams[0].min_bitrate_bps / 1000;
|
||||
if (video_codec.minBitrate < kEncoderMinBitrateKbps)
|
||||
video_codec.minBitrate = kEncoderMinBitrateKbps;
|
||||
RTC_DCHECK_LE(streams.size(), static_cast<size_t>(kMaxSimulcastStreams));
|
||||
if (video_codec.codecType == kVideoCodecVP9) {
|
||||
// If the vector is empty, bitrates will be configured automatically.
|
||||
RTC_DCHECK(config.spatial_layers.empty() ||
|
||||
config.spatial_layers.size() ==
|
||||
video_codec.codecSpecific.VP9.numberOfSpatialLayers);
|
||||
RTC_DCHECK_LE(video_codec.codecSpecific.VP9.numberOfSpatialLayers,
|
||||
kMaxSimulcastStreams);
|
||||
for (size_t i = 0; i < config.spatial_layers.size(); ++i)
|
||||
video_codec.spatialLayers[i] = config.spatial_layers[i];
|
||||
}
|
||||
for (size_t i = 0; i < streams.size(); ++i) {
|
||||
SimulcastStream* sim_stream = &video_codec.simulcastStream[i];
|
||||
RTC_DCHECK_GT(streams[i].width, 0u);
|
||||
RTC_DCHECK_GT(streams[i].height, 0u);
|
||||
RTC_DCHECK_GT(streams[i].max_framerate, 0);
|
||||
// Different framerates not supported per stream at the moment.
|
||||
RTC_DCHECK_EQ(streams[i].max_framerate, streams[0].max_framerate);
|
||||
RTC_DCHECK_GE(streams[i].min_bitrate_bps, 0);
|
||||
RTC_DCHECK_GE(streams[i].target_bitrate_bps, streams[i].min_bitrate_bps);
|
||||
RTC_DCHECK_GE(streams[i].max_bitrate_bps, streams[i].target_bitrate_bps);
|
||||
RTC_DCHECK_GE(streams[i].max_qp, 0);
|
||||
|
||||
sim_stream->width = static_cast<uint16_t>(streams[i].width);
|
||||
sim_stream->height = static_cast<uint16_t>(streams[i].height);
|
||||
sim_stream->minBitrate = streams[i].min_bitrate_bps / 1000;
|
||||
sim_stream->targetBitrate = streams[i].target_bitrate_bps / 1000;
|
||||
sim_stream->maxBitrate = streams[i].max_bitrate_bps / 1000;
|
||||
sim_stream->qpMax = streams[i].max_qp;
|
||||
sim_stream->numberOfTemporalLayers = static_cast<unsigned char>(
|
||||
streams[i].temporal_layer_thresholds_bps.size() + 1);
|
||||
|
||||
video_codec.width = std::max(video_codec.width,
|
||||
static_cast<uint16_t>(streams[i].width));
|
||||
video_codec.height = std::max(
|
||||
video_codec.height, static_cast<uint16_t>(streams[i].height));
|
||||
video_codec.minBitrate =
|
||||
std::min(static_cast<uint16_t>(video_codec.minBitrate),
|
||||
static_cast<uint16_t>(streams[i].min_bitrate_bps / 1000));
|
||||
video_codec.maxBitrate += streams[i].max_bitrate_bps / 1000;
|
||||
video_codec.qpMax = std::max(video_codec.qpMax,
|
||||
static_cast<unsigned int>(streams[i].max_qp));
|
||||
}
|
||||
|
||||
if (video_codec.maxBitrate == 0) {
|
||||
// Unset max bitrate -> cap to one bit per pixel.
|
||||
video_codec.maxBitrate =
|
||||
(video_codec.width * video_codec.height * video_codec.maxFramerate) /
|
||||
1000;
|
||||
}
|
||||
if (video_codec.maxBitrate < kEncoderMinBitrateKbps)
|
||||
video_codec.maxBitrate = kEncoderMinBitrateKbps;
|
||||
|
||||
RTC_DCHECK_GT(streams[0].max_framerate, 0);
|
||||
video_codec.maxFramerate = streams[0].max_framerate;
|
||||
|
||||
video_codec.startBitrate =
|
||||
bitrate_allocator_->AddObserver(this,
|
||||
video_codec.minBitrate * 1000,
|
||||
video_codec.maxBitrate * 1000) / 1000;
|
||||
vie_encoder_.SetEncoder(video_codec, config.min_transmit_bitrate_bps);
|
||||
}
|
||||
|
||||
bool VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) {
|
||||
|
||||
@ -15,7 +15,6 @@
|
||||
#include <vector>
|
||||
|
||||
#include "webrtc/call/bitrate_allocator.h"
|
||||
#include "webrtc/base/criticalsection.h"
|
||||
#include "webrtc/call.h"
|
||||
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
||||
#include "webrtc/video/encoded_frame_callback_adapter.h"
|
||||
@ -83,10 +82,6 @@ class VideoSendStream : public webrtc::VideoSendStream,
|
||||
int64_t rtt) override;
|
||||
|
||||
private:
|
||||
struct EncoderSettings {
|
||||
VideoCodec video_codec;
|
||||
int min_transmit_bitrate_bps;
|
||||
};
|
||||
static bool EncoderThreadFunction(void* obj);
|
||||
void EncoderProcess();
|
||||
|
||||
@ -106,9 +101,6 @@ class VideoSendStream : public webrtc::VideoSendStream,
|
||||
rtc::PlatformThread encoder_thread_;
|
||||
rtc::Event encoder_wakeup_event_;
|
||||
volatile int stop_encoder_thread_;
|
||||
rtc::CriticalSection encoder_settings_crit_;
|
||||
rtc::Optional<EncoderSettings> pending_encoder_settings_
|
||||
GUARDED_BY(encoder_settings_crit_);
|
||||
|
||||
OveruseFrameDetector overuse_detector_;
|
||||
PayloadRouter payload_router_;
|
||||
|
||||
@ -1115,22 +1115,18 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
|
||||
class StartBitrateObserver : public test::FakeEncoder {
|
||||
public:
|
||||
StartBitrateObserver()
|
||||
: FakeEncoder(Clock::GetRealTimeClock()),
|
||||
start_bitrate_changed_(false, false),
|
||||
start_bitrate_kbps_(0) {}
|
||||
: FakeEncoder(Clock::GetRealTimeClock()), start_bitrate_kbps_(0) {}
|
||||
int32_t InitEncode(const VideoCodec* config,
|
||||
int32_t number_of_cores,
|
||||
size_t max_payload_size) override {
|
||||
rtc::CritScope lock(&crit_);
|
||||
start_bitrate_kbps_ = config->startBitrate;
|
||||
start_bitrate_changed_.Set();
|
||||
return FakeEncoder::InitEncode(config, number_of_cores, max_payload_size);
|
||||
}
|
||||
|
||||
int32_t SetRates(uint32_t new_target_bitrate, uint32_t framerate) override {
|
||||
rtc::CritScope lock(&crit_);
|
||||
start_bitrate_kbps_ = new_target_bitrate;
|
||||
start_bitrate_changed_.Set();
|
||||
return FakeEncoder::SetRates(new_target_bitrate, framerate);
|
||||
}
|
||||
|
||||
@ -1139,14 +1135,8 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
|
||||
return start_bitrate_kbps_;
|
||||
}
|
||||
|
||||
bool WaitForStartBitrate() {
|
||||
return start_bitrate_changed_.Wait(
|
||||
VideoSendStreamTest::kDefaultTimeoutMs);
|
||||
}
|
||||
|
||||
private:
|
||||
rtc::CriticalSection crit_;
|
||||
rtc::Event start_bitrate_changed_;
|
||||
int start_bitrate_kbps_ GUARDED_BY(crit_);
|
||||
};
|
||||
|
||||
@ -1165,7 +1155,6 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
|
||||
|
||||
CreateVideoStreams();
|
||||
|
||||
EXPECT_TRUE(encoder.WaitForStartBitrate());
|
||||
EXPECT_EQ(video_encoder_config_.streams[0].max_bitrate_bps / 1000,
|
||||
encoder.GetStartBitrateKbps());
|
||||
|
||||
@ -1176,7 +1165,6 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
|
||||
// New bitrate should be reconfigured above the previous max. As there's no
|
||||
// network connection this shouldn't be flaky, as no bitrate should've been
|
||||
// reported in between.
|
||||
EXPECT_TRUE(encoder.WaitForStartBitrate());
|
||||
EXPECT_EQ(bitrate_config.start_bitrate_bps / 1000,
|
||||
encoder.GetStartBitrateKbps());
|
||||
|
||||
@ -1390,6 +1378,9 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
|
||||
void OnVideoStreamsCreated(
|
||||
VideoSendStream* send_stream,
|
||||
const std::vector<VideoReceiveStream*>& receive_streams) override {
|
||||
// Encoder initialization should be done in stream construction before
|
||||
// starting.
|
||||
EXPECT_TRUE(IsReadyForEncode());
|
||||
stream_ = send_stream;
|
||||
}
|
||||
|
||||
@ -1437,7 +1428,6 @@ TEST_F(VideoSendStreamTest, EncoderSetupPropagatesCommonEncoderConfigValues) {
|
||||
VideoCodecConfigObserver()
|
||||
: SendTest(kDefaultTimeoutMs),
|
||||
FakeEncoder(Clock::GetRealTimeClock()),
|
||||
init_encode_event_(false, false),
|
||||
num_initializations_(0) {}
|
||||
|
||||
private:
|
||||
@ -1466,23 +1456,19 @@ TEST_F(VideoSendStreamTest, EncoderSetupPropagatesCommonEncoderConfigValues) {
|
||||
EXPECT_EQ(kScreensharing, config->mode);
|
||||
}
|
||||
++num_initializations_;
|
||||
init_encode_event_.Set();
|
||||
return FakeEncoder::InitEncode(config, number_of_cores, max_payload_size);
|
||||
}
|
||||
|
||||
void PerformTest() override {
|
||||
EXPECT_TRUE(init_encode_event_.Wait(kDefaultTimeoutMs));
|
||||
EXPECT_EQ(1u, num_initializations_) << "VideoEncoder not initialized.";
|
||||
|
||||
encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen;
|
||||
stream_->ReconfigureVideoEncoder(encoder_config_);
|
||||
EXPECT_TRUE(init_encode_event_.Wait(kDefaultTimeoutMs));
|
||||
EXPECT_EQ(2u, num_initializations_)
|
||||
<< "ReconfigureVideoEncoder did not reinitialize the encoder with "
|
||||
"new encoder settings.";
|
||||
}
|
||||
|
||||
rtc::Event init_encode_event_;
|
||||
size_t num_initializations_;
|
||||
VideoSendStream* stream_;
|
||||
VideoEncoderConfig encoder_config_;
|
||||
@ -1502,7 +1488,6 @@ class VideoCodecConfigObserver : public test::SendTest,
|
||||
FakeEncoder(Clock::GetRealTimeClock()),
|
||||
video_codec_type_(video_codec_type),
|
||||
codec_name_(codec_name),
|
||||
init_encode_event_(false, false),
|
||||
num_initializations_(0) {
|
||||
memset(&encoder_settings_, 0, sizeof(encoder_settings_));
|
||||
}
|
||||
@ -1536,21 +1521,16 @@ class VideoCodecConfigObserver : public test::SendTest,
|
||||
EXPECT_EQ(video_codec_type_, config->codecType);
|
||||
VerifyCodecSpecifics(*config);
|
||||
++num_initializations_;
|
||||
init_encode_event_.Set();
|
||||
return FakeEncoder::InitEncode(config, number_of_cores, max_payload_size);
|
||||
}
|
||||
|
||||
void VerifyCodecSpecifics(const VideoCodec& config) const;
|
||||
|
||||
void PerformTest() override {
|
||||
EXPECT_TRUE(
|
||||
init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
|
||||
ASSERT_EQ(1u, num_initializations_) << "VideoEncoder not initialized.";
|
||||
EXPECT_EQ(1u, num_initializations_) << "VideoEncoder not initialized.";
|
||||
|
||||
encoder_settings_.frameDroppingOn = true;
|
||||
stream_->ReconfigureVideoEncoder(encoder_config_);
|
||||
ASSERT_TRUE(
|
||||
init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
|
||||
EXPECT_EQ(2u, num_initializations_)
|
||||
<< "ReconfigureVideoEncoder did not reinitialize the encoder with "
|
||||
"new encoder settings.";
|
||||
@ -1566,7 +1546,6 @@ class VideoCodecConfigObserver : public test::SendTest,
|
||||
T encoder_settings_;
|
||||
const VideoCodecType video_codec_type_;
|
||||
const char* const codec_name_;
|
||||
rtc::Event init_encode_event_;
|
||||
size_t num_initializations_;
|
||||
VideoSendStream* stream_;
|
||||
VideoEncoderConfig encoder_config_;
|
||||
@ -1755,7 +1734,6 @@ TEST_F(VideoSendStreamTest,
|
||||
EncoderBitrateThresholdObserver()
|
||||
: SendTest(kDefaultTimeoutMs),
|
||||
FakeEncoder(Clock::GetRealTimeClock()),
|
||||
init_encode_event_(false, false),
|
||||
num_initializations_(0) {}
|
||||
|
||||
private:
|
||||
@ -1784,7 +1762,6 @@ TEST_F(VideoSendStreamTest,
|
||||
codecSettings->startBitrate);
|
||||
}
|
||||
++num_initializations_;
|
||||
init_encode_event_.Set();
|
||||
return FakeEncoder::InitEncode(codecSettings, numberOfCores,
|
||||
maxPayloadSize);
|
||||
}
|
||||
@ -1820,9 +1797,6 @@ TEST_F(VideoSendStreamTest,
|
||||
}
|
||||
|
||||
void PerformTest() override {
|
||||
ASSERT_TRUE(
|
||||
init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs))
|
||||
<< "Timed out while waiting encoder to be configured.";
|
||||
Call::Config::BitrateConfig bitrate_config;
|
||||
bitrate_config.start_bitrate_bps = kIncreasedStartBitrateKbps * 1000;
|
||||
bitrate_config.max_bitrate_bps = kIncreasedMaxBitrateKbps * 1000;
|
||||
@ -1832,8 +1806,6 @@ TEST_F(VideoSendStreamTest,
|
||||
encoder_config_.streams[0].min_bitrate_bps = 0;
|
||||
encoder_config_.streams[0].max_bitrate_bps = kLowerMaxBitrateKbps * 1000;
|
||||
send_stream_->ReconfigureVideoEncoder(encoder_config_);
|
||||
ASSERT_TRUE(
|
||||
init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
|
||||
EXPECT_EQ(2, num_initializations_)
|
||||
<< "Encoder should have been reconfigured with the new value.";
|
||||
encoder_config_.streams[0].target_bitrate_bps =
|
||||
@ -1841,13 +1813,10 @@ TEST_F(VideoSendStreamTest,
|
||||
encoder_config_.streams[0].max_bitrate_bps =
|
||||
kIncreasedMaxBitrateKbps * 1000;
|
||||
send_stream_->ReconfigureVideoEncoder(encoder_config_);
|
||||
ASSERT_TRUE(
|
||||
init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
|
||||
EXPECT_EQ(3, num_initializations_)
|
||||
<< "Encoder should have been reconfigured with the new value.";
|
||||
}
|
||||
|
||||
rtc::Event init_encode_event_;
|
||||
int num_initializations_;
|
||||
webrtc::Call* call_;
|
||||
webrtc::VideoSendStream* send_stream_;
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user