Revert of Let ViEEncoder handle resolution changes. (patchset #17 id:340001 of https://codereview.webrtc.org/2351633002/ )

Reason for revert:
Fails on a content_browsertest (and also webrtc_perf?)

https://build.chromium.org/p/chromium.webrtc.fyi/builders/Mac%20Tester/builds/34336

https://build.chromium.org/p/client.webrtc/builders/Linux64%20Release%20%5Blarge%20tests%5D/builds/9091/steps/webrtc_perf_tests/logs/stdio
[  FAILED  ] FullStackTest.ParisQcifWithoutPacketLoss (59436 ms)

Original issue's description:
> Let ViEEncoder handle resolution changes.
>
> This cl move codec reconfiguration due to video frame size changes from WebRtcVideoSendStream to ViEEncoder.
>
> With this change, many variables in WebRtcVideoSendStream no longer need to be locked.
>
> BUG=webrtc:5687, webrtc:6371, webrtc:5332
>
> Committed: https://crrev.com/26105b41b4f97642ee30cb067dc786c2737709ad
> Cr-Commit-Position: refs/heads/master@{#14445}

TBR=sprang@webrtc.org,mflodman@webrtc.org,stefan@webrtc.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=webrtc:5687, webrtc:6371, webrtc:5332

Review-Url: https://codereview.webrtc.org/2383493005
Cr-Commit-Position: refs/heads/master@{#14447}
This commit is contained in:
perkj 2016-09-29 23:25:40 -07:00 committed by Commit bot
parent b73d269707
commit 3b703ede8b
30 changed files with 673 additions and 1133 deletions

View File

@ -184,7 +184,7 @@ TEST_F(VideoCapturerTrackSourceTest, MandatoryConstraintCif5Fps) {
ASSERT_TRUE(format != NULL);
EXPECT_EQ(352, format->width);
EXPECT_EQ(288, format->height);
EXPECT_EQ(5, format->framerate());
EXPECT_EQ(30, format->framerate());
}
// Test that the capture output is 720P if the camera support it and the
@ -401,7 +401,7 @@ TEST_F(VideoCapturerTrackSourceTest, MixedOptionsAndConstraints) {
ASSERT_TRUE(format != NULL);
EXPECT_EQ(352, format->width);
EXPECT_EQ(288, format->height);
EXPECT_EQ(5, format->framerate());
EXPECT_EQ(30, format->framerate());
EXPECT_EQ(rtc::Optional<bool>(false), source_->needs_denoising());
}
@ -492,5 +492,5 @@ TEST_F(VideoCapturerTrackSourceTest, OptionalSubOneFpsConstraints) {
kMaxWaitMs);
const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
ASSERT_TRUE(format != NULL);
EXPECT_EQ(1, format->framerate());
EXPECT_EQ(30, format->framerate());
}

View File

@ -127,7 +127,7 @@ class BitrateEstimatorTest : public test::CallTest {
video_send_config_.encoder_settings.payload_name = "FAKE";
video_send_config_.encoder_settings.payload_type =
kFakeVideoSendPayloadType;
test::FillEncoderConfiguration(1, &video_encoder_config_);
video_encoder_config_.streams = test::CreateVideoStreams(1);
receive_config_ = VideoReceiveStream::Config(receive_transport_.get());
// receive_config_.decoders will be set by every stream separately.
@ -175,9 +175,10 @@ class BitrateEstimatorTest : public test::CallTest {
send_stream_ = test_->sender_call_->CreateVideoSendStream(
test_->video_send_config_.Copy(),
test_->video_encoder_config_.Copy());
RTC_DCHECK_EQ(1u, test_->video_encoder_config_.number_of_streams);
RTC_DCHECK_EQ(1u, test_->video_encoder_config_.streams.size());
frame_generator_capturer_.reset(test::FrameGeneratorCapturer::Create(
kDefaultWidth, kDefaultHeight, kDefaultFramerate,
test_->video_encoder_config_.streams[0].width,
test_->video_encoder_config_.streams[0].height, 30,
Clock::GetRealTimeClock()));
send_stream_->SetSource(frame_generator_capturer_.get());
send_stream_->Start();

View File

@ -264,9 +264,7 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec,
EXPECT_EQ(1u, video_receive_streams_.size());
observer.set_receive_stream(video_receive_streams_[0]);
DriftingClock drifting_clock(clock_, video_ntp_speed);
CreateFrameGeneratorCapturerWithDrift(&drifting_clock, video_rtp_speed,
kDefaultFramerate, kDefaultWidth,
kDefaultHeight);
CreateFrameGeneratorCapturerWithDrift(&drifting_clock, video_rtp_speed);
Start();
@ -620,24 +618,6 @@ TEST_F(CallPerfTest, KeepsHighBitrateWhenReconfiguringSender) {
static const uint32_t kReconfigureThresholdKbps = 600;
static const uint32_t kPermittedReconfiguredBitrateDiffKbps = 100;
class VideoStreamFactory
: public VideoEncoderConfig::VideoStreamFactoryInterface {
public:
VideoStreamFactory() {}
private:
std::vector<VideoStream> CreateEncoderStreams(
int width,
int height,
const VideoEncoderConfig& encoder_config) override {
std::vector<VideoStream> streams =
test::CreateVideoStreams(width, height, encoder_config);
streams[0].min_bitrate_bps = 50000;
streams[0].target_bitrate_bps = streams[0].max_bitrate_bps = 2000000;
return streams;
}
};
class BitrateObserver : public test::EndToEndTest, public test::FakeEncoder {
public:
BitrateObserver()
@ -651,18 +631,12 @@ TEST_F(CallPerfTest, KeepsHighBitrateWhenReconfiguringSender) {
int32_t InitEncode(const VideoCodec* config,
int32_t number_of_cores,
size_t max_payload_size) override {
++encoder_inits_;
if (encoder_inits_ == 1) {
// First time initialization. Frame size is not known.
if (encoder_inits_ == 0) {
EXPECT_EQ(kInitialBitrateKbps, config->startBitrate)
<< "Encoder not initialized at expected bitrate.";
} else if (encoder_inits_ == 2) {
// First time initialization. Frame size is known.
EXPECT_EQ(kDefaultWidth, config->width);
EXPECT_EQ(kDefaultHeight, config->height);
} else if (encoder_inits_ == 3) {
EXPECT_EQ(2 * kDefaultWidth, config->width);
EXPECT_EQ(2 * kDefaultHeight, config->height);
}
++encoder_inits_;
if (encoder_inits_ == 2) {
EXPECT_GE(last_set_bitrate_, kReconfigureThresholdKbps);
EXPECT_NEAR(config->startBitrate,
last_set_bitrate_,
@ -676,7 +650,7 @@ TEST_F(CallPerfTest, KeepsHighBitrateWhenReconfiguringSender) {
int32_t SetRates(uint32_t new_target_bitrate_kbps,
uint32_t framerate) override {
last_set_bitrate_ = new_target_bitrate_kbps;
if (encoder_inits_ == 2 &&
if (encoder_inits_ == 1 &&
new_target_bitrate_kbps > kReconfigureThresholdKbps) {
time_to_reconfigure_.Set();
}
@ -694,8 +668,9 @@ TEST_F(CallPerfTest, KeepsHighBitrateWhenReconfiguringSender) {
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
encoder_config->video_stream_factory =
new rtc::RefCountedObject<VideoStreamFactory>();
encoder_config->streams[0].min_bitrate_bps = 50000;
encoder_config->streams[0].target_bitrate_bps =
encoder_config->streams[0].max_bitrate_bps = 2000000;
encoder_config_ = encoder_config->Copy();
}
@ -706,15 +681,11 @@ TEST_F(CallPerfTest, KeepsHighBitrateWhenReconfiguringSender) {
send_stream_ = send_stream;
}
void OnFrameGeneratorCapturerCreated(
test::FrameGeneratorCapturer* frame_generator_capturer) override {
frame_generator_ = frame_generator_capturer;
}
void PerformTest() override {
ASSERT_TRUE(time_to_reconfigure_.Wait(kDefaultTimeoutMs))
<< "Timed out before receiving an initial high bitrate.";
frame_generator_->ChangeResolution(kDefaultWidth * 2, kDefaultHeight * 2);
encoder_config_.streams[0].width *= 2;
encoder_config_.streams[0].height *= 2;
send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy());
EXPECT_TRUE(Wait())
<< "Timed out while waiting for a couple of high bitrate estimates "
@ -726,7 +697,6 @@ TEST_F(CallPerfTest, KeepsHighBitrateWhenReconfiguringSender) {
int encoder_inits_;
uint32_t last_set_bitrate_;
VideoSendStream* send_stream_;
test::FrameGeneratorCapturer* frame_generator_;
VideoEncoderConfig encoder_config_;
} test;

View File

@ -13,7 +13,6 @@
#include "webrtc/test/gtest.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/platform_thread.h"
#include "webrtc/test/encoder_settings.h"
#include "webrtc/test/testsupport/perf_test.h"
namespace webrtc {
@ -97,47 +96,24 @@ size_t RampUpTester::GetNumAudioStreams() const {
return num_audio_streams_;
}
class RampUpTester::VideoStreamFactory
: public VideoEncoderConfig::VideoStreamFactoryInterface {
public:
VideoStreamFactory() {}
private:
std::vector<VideoStream> CreateEncoderStreams(
int width,
int height,
const VideoEncoderConfig& encoder_config) override {
std::vector<VideoStream> streams =
test::CreateVideoStreams(width, height, encoder_config);
if (encoder_config.number_of_streams == 1) {
streams[0].target_bitrate_bps = streams[0].max_bitrate_bps = 2000000;
}
return streams;
}
};
void RampUpTester::ModifyVideoConfigs(
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) {
send_config->suspend_below_min_bitrate = true;
encoder_config->number_of_streams = num_video_streams_;
encoder_config->max_bitrate_bps = 2000000;
encoder_config->video_stream_factory =
new rtc::RefCountedObject<RampUpTester::VideoStreamFactory>();
if (num_video_streams_ == 1) {
encoder_config->streams[0].target_bitrate_bps =
encoder_config->streams[0].max_bitrate_bps = 2000000;
// For single stream rampup until 1mbps
expected_bitrate_bps_ = kSingleStreamTargetBps;
} else {
// For multi stream rampup until all streams are being sent. That means
// enough bitrate to send all the target streams plus the min bitrate of
// enough birate to send all the target streams plus the min bitrate of
// the last one.
std::vector<VideoStream> streams = test::CreateVideoStreams(
test::CallTest::kDefaultWidth, test::CallTest::kDefaultHeight,
*encoder_config);
expected_bitrate_bps_ = streams.back().min_bitrate_bps;
for (size_t i = 0; i < streams.size() - 1; ++i) {
expected_bitrate_bps_ += streams[i].target_bitrate_bps;
expected_bitrate_bps_ = encoder_config->streams.back().min_bitrate_bps;
for (size_t i = 0; i < encoder_config->streams.size() - 1; ++i) {
expected_bitrate_bps_ += encoder_config->streams[i].target_bitrate_bps;
}
}

View File

@ -70,7 +70,6 @@ class RampUpTester : public test::EndToEndTest {
private:
typedef std::map<uint32_t, uint32_t> SsrcMap;
class VideoStreamFactory;
Call::Config GetSenderCallConfig() override;
void OnVideoStreamsCreated(

View File

@ -117,14 +117,21 @@ VideoEncoderConfig::VideoEncoderConfig()
: content_type(ContentType::kRealtimeVideo),
encoder_specific_settings(nullptr),
min_transmit_bitrate_bps(0),
max_bitrate_bps(0),
number_of_streams(0) {}
expect_encode_from_texture(false) {}
VideoEncoderConfig::~VideoEncoderConfig() = default;
std::string VideoEncoderConfig::ToString() const {
std::stringstream ss;
ss << "{content_type: ";
ss << "{streams: [";
for (size_t i = 0; i < streams.size(); ++i) {
ss << streams[i].ToString();
if (i != streams.size() - 1)
ss << ", ";
}
ss << ']';
ss << ", content_type: ";
switch (content_type) {
case ContentType::kRealtimeVideo:
ss << "kRealtimeVideo";

View File

@ -125,7 +125,7 @@ struct VideoStream {
std::vector<int> temporal_layer_thresholds_bps;
};
class VideoEncoderConfig {
struct VideoEncoderConfig {
public:
// These are reference counted to permit copying VideoEncoderConfig and be
// kept alive until all encoder_specific_settings go out of scope.
@ -143,13 +143,14 @@ class VideoEncoderConfig {
virtual void FillVideoCodecH264(VideoCodecH264* h264_settings) const;
private:
virtual ~EncoderSpecificSettings() {}
friend class VideoEncoderConfig;
friend struct VideoEncoderConfig;
};
class H264EncoderSpecificSettings : public EncoderSpecificSettings {
public:
explicit H264EncoderSpecificSettings(const VideoCodecH264& specifics);
void FillVideoCodecH264(VideoCodecH264* h264_settings) const override;
virtual void FillVideoCodecH264(
VideoCodecH264* h264_settings) const override;
private:
VideoCodecH264 specifics_;
@ -158,7 +159,7 @@ class VideoEncoderConfig {
class Vp8EncoderSpecificSettings : public EncoderSpecificSettings {
public:
explicit Vp8EncoderSpecificSettings(const VideoCodecVP8& specifics);
void FillVideoCodecVp8(VideoCodecVP8* vp8_settings) const override;
virtual void FillVideoCodecVp8(VideoCodecVP8* vp8_settings) const override;
private:
VideoCodecVP8 specifics_;
@ -167,7 +168,7 @@ class VideoEncoderConfig {
class Vp9EncoderSpecificSettings : public EncoderSpecificSettings {
public:
explicit Vp9EncoderSpecificSettings(const VideoCodecVP9& specifics);
void FillVideoCodecVp9(VideoCodecVP9* vp9_settings) const override;
virtual void FillVideoCodecVp9(VideoCodecVP9* vp9_settings) const override;
private:
VideoCodecVP9 specifics_;
@ -178,21 +179,6 @@ class VideoEncoderConfig {
kScreen,
};
class VideoStreamFactoryInterface : public rtc::RefCountInterface {
public:
// An implementation should return a std::vector<VideoStream> with the
// wanted VideoStream settings for the given video resolution.
// The size of the vector may not be larger than
// |encoder_config.number_of_streams|.
virtual std::vector<VideoStream> CreateEncoderStreams(
int width,
int height,
const VideoEncoderConfig& encoder_config) = 0;
protected:
virtual ~VideoStreamFactoryInterface() {}
};
VideoEncoderConfig& operator=(VideoEncoderConfig&&) = default;
VideoEncoderConfig& operator=(const VideoEncoderConfig&) = delete;
@ -204,7 +190,7 @@ class VideoEncoderConfig {
~VideoEncoderConfig();
std::string ToString() const;
rtc::scoped_refptr<VideoStreamFactoryInterface> video_stream_factory;
std::vector<VideoStream> streams;
std::vector<SpatialLayer> spatial_layers;
ContentType content_type;
rtc::scoped_refptr<const EncoderSpecificSettings> encoder_specific_settings;
@ -214,10 +200,7 @@ class VideoEncoderConfig {
// maintaining a higher bitrate estimate. Padding will however not be sent
// unless the estimated bandwidth indicates that the link can handle it.
int min_transmit_bitrate_bps;
int max_bitrate_bps;
// Max number of encoded VideoStreams to produce.
size_t number_of_streams;
bool expect_encode_from_texture;
private:
// Access to the copy constructor is private to force use of the Copy()

View File

@ -29,7 +29,7 @@ namespace cricket {
// Fake video capturer that allows the test to manually pump in frames.
class FakeVideoCapturer : public cricket::VideoCapturer {
public:
explicit FakeVideoCapturer(bool is_screencast)
FakeVideoCapturer(bool is_screencast)
: running_(false),
initial_timestamp_(rtc::TimeNanos()),
next_timestamp_(rtc::kNumNanosecsPerMillisec),
@ -124,7 +124,10 @@ class FakeVideoCapturer : public cricket::VideoCapturer {
sigslot::signal1<FakeVideoCapturer*> SignalDestroyed;
cricket::CaptureState Start(const cricket::VideoFormat& format) override {
SetCaptureFormat(&format);
cricket::VideoFormat supported;
if (GetBestCaptureFormat(format, &supported)) {
SetCaptureFormat(&supported);
}
running_ = true;
SetCaptureState(cricket::CS_RUNNING);
return cricket::CS_RUNNING;

View File

@ -18,7 +18,9 @@ extern const int kVideoRtpBufferSize;
extern const char kH264CodecName[];
extern const int kMinVideoBitrateKbps;
extern const int kMinVideoBitrate;
extern const int kStartVideoBitrate;
extern const int kMaxVideoBitrate;
} // namespace cricket

View File

@ -124,9 +124,8 @@ const webrtc::VideoEncoderConfig& FakeVideoSendStream::GetEncoderConfig()
return encoder_config_;
}
const std::vector<webrtc::VideoStream>& FakeVideoSendStream::GetVideoStreams()
const {
return video_streams_;
std::vector<webrtc::VideoStream> FakeVideoSendStream::GetVideoStreams() {
return encoder_config_.streams;
}
bool FakeVideoSendStream::IsSending() const {
@ -172,12 +171,6 @@ int64_t FakeVideoSendStream::GetLastTimestamp() const {
void FakeVideoSendStream::OnFrame(const webrtc::VideoFrame& frame) {
++num_swapped_frames_;
if (frame.width() != last_frame_.width() ||
frame.height() != last_frame_.height() ||
frame.rotation() != last_frame_.rotation()) {
video_streams_ = encoder_config_.video_stream_factory->CreateEncoderStreams(
frame.width(), frame.height(), encoder_config_);
}
last_frame_.ShallowCopy(frame);
}
@ -199,20 +192,18 @@ void FakeVideoSendStream::EnableEncodedFrameRecording(
void FakeVideoSendStream::ReconfigureVideoEncoder(
webrtc::VideoEncoderConfig config) {
video_streams_ = config.video_stream_factory->CreateEncoderStreams(
last_frame_.width(), last_frame_.height(), config);
if (config.encoder_specific_settings != NULL) {
if (config_.encoder_settings.payload_name == "VP8") {
config.encoder_specific_settings->FillVideoCodecVp8(&vpx_settings_.vp8);
if (!video_streams_.empty()) {
if (!config.streams.empty()) {
vpx_settings_.vp8.numberOfTemporalLayers = static_cast<unsigned char>(
video_streams_.back().temporal_layer_thresholds_bps.size() + 1);
config.streams.back().temporal_layer_thresholds_bps.size() + 1);
}
} else if (config_.encoder_settings.payload_name == "VP9") {
config.encoder_specific_settings->FillVideoCodecVp9(&vpx_settings_.vp9);
if (!video_streams_.empty()) {
if (!config.streams.empty()) {
vpx_settings_.vp9.numberOfTemporalLayers = static_cast<unsigned char>(
video_streams_.back().temporal_layer_thresholds_bps.size() + 1);
config.streams.back().temporal_layer_thresholds_bps.size() + 1);
}
} else {
ADD_FAILURE() << "Unsupported encoder payload: "

View File

@ -109,7 +109,7 @@ class FakeVideoSendStream final
~FakeVideoSendStream() override;
const webrtc::VideoSendStream::Config& GetConfig() const;
const webrtc::VideoEncoderConfig& GetEncoderConfig() const;
const std::vector<webrtc::VideoStream>& GetVideoStreams() const;
std::vector<webrtc::VideoStream> GetVideoStreams();
bool IsSending() const;
bool GetVp8Settings(webrtc::VideoCodecVP8* settings) const;
@ -142,7 +142,6 @@ class FakeVideoSendStream final
bool sending_;
webrtc::VideoSendStream::Config config_;
webrtc::VideoEncoderConfig encoder_config_;
std::vector<webrtc::VideoStream> video_streams_;
bool codec_settings_set_;
union VpxSettings {
webrtc::VideoCodecVP8 vp8;

View File

@ -14,7 +14,6 @@
#include <algorithm>
#include <set>
#include <string>
#include <utility>
#include "webrtc/base/copyonwritebuffer.h"
#include "webrtc/base/logging.h"
@ -323,83 +322,12 @@ int GetDefaultVp9TemporalLayers() {
}
return 1;
}
class EncoderStreamFactory
: public webrtc::VideoEncoderConfig::VideoStreamFactoryInterface {
public:
EncoderStreamFactory(std::string codec_name,
int max_qp,
int max_framerate,
bool is_screencast,
bool conference_mode)
: codec_name_(codec_name),
max_qp_(max_qp),
max_framerate_(max_framerate),
is_screencast_(is_screencast),
conference_mode_(conference_mode) {}
private:
std::vector<webrtc::VideoStream> CreateEncoderStreams(
int width,
int height,
const webrtc::VideoEncoderConfig& encoder_config) override {
RTC_DCHECK(encoder_config.number_of_streams > 1 ? !is_screencast_ : true);
if (encoder_config.number_of_streams > 1) {
return GetSimulcastConfig(encoder_config.number_of_streams, width, height,
encoder_config.max_bitrate_bps, max_qp_,
max_framerate_);
}
// For unset max bitrates set default bitrate for non-simulcast.
int max_bitrate_bps =
(encoder_config.max_bitrate_bps > 0)
? encoder_config.max_bitrate_bps
: GetMaxDefaultVideoBitrateKbps(width, height) * 1000;
webrtc::VideoStream stream;
stream.width = width;
stream.height = height;
stream.max_framerate = max_framerate_;
stream.min_bitrate_bps = kMinVideoBitrateKbps * 1000;
stream.target_bitrate_bps = stream.max_bitrate_bps = max_bitrate_bps;
stream.max_qp = max_qp_;
// Conference mode screencast uses 2 temporal layers split at 100kbit.
if (conference_mode_ && is_screencast_) {
ScreenshareLayerConfig config = ScreenshareLayerConfig::GetDefault();
// For screenshare in conference mode, tl0 and tl1 bitrates are
// piggybacked
// on the VideoCodec struct as target and max bitrates, respectively.
// See eg. webrtc::VP8EncoderImpl::SetRates().
stream.target_bitrate_bps = config.tl0_bitrate_kbps * 1000;
stream.max_bitrate_bps = config.tl1_bitrate_kbps * 1000;
stream.temporal_layer_thresholds_bps.clear();
stream.temporal_layer_thresholds_bps.push_back(config.tl0_bitrate_kbps *
1000);
}
if (CodecNamesEq(codec_name_, kVp9CodecName) && !is_screencast_) {
stream.temporal_layer_thresholds_bps.resize(
GetDefaultVp9TemporalLayers() - 1);
}
std::vector<webrtc::VideoStream> streams;
streams.push_back(stream);
return streams;
}
const std::string codec_name_;
const int max_qp_;
const int max_framerate_;
const bool is_screencast_;
const bool conference_mode_;
};
} // namespace
// Constants defined in webrtc/media/engine/constants.h
// TODO(pbos): Move these to a separate constants.cc file.
const int kMinVideoBitrateKbps = 30;
const int kMinVideoBitrate = 30;
const int kStartVideoBitrate = 300;
const int kVideoMtu = 1200;
const int kVideoRtpBufferSize = 65536;
@ -470,10 +398,61 @@ std::vector<VideoCodec> DefaultVideoCodecList() {
return codecs;
}
std::vector<webrtc::VideoStream>
WebRtcVideoChannel2::WebRtcVideoSendStream::CreateSimulcastVideoStreams(
const VideoCodec& codec,
const VideoOptions& options,
int max_bitrate_bps,
size_t num_streams) {
int max_qp = kDefaultQpMax;
codec.GetParam(kCodecParamMaxQuantization, &max_qp);
return GetSimulcastConfig(
num_streams, codec.width, codec.height, max_bitrate_bps, max_qp,
codec.framerate != 0 ? codec.framerate : kDefaultVideoMaxFramerate);
}
std::vector<webrtc::VideoStream>
WebRtcVideoChannel2::WebRtcVideoSendStream::CreateVideoStreams(
const VideoCodec& codec,
const VideoOptions& options,
int max_bitrate_bps,
size_t num_streams) {
int codec_max_bitrate_kbps;
if (codec.GetParam(kCodecParamMaxBitrate, &codec_max_bitrate_kbps)) {
max_bitrate_bps = codec_max_bitrate_kbps * 1000;
}
if (num_streams != 1) {
return CreateSimulcastVideoStreams(codec, options, max_bitrate_bps,
num_streams);
}
// For unset max bitrates set default bitrate for non-simulcast.
if (max_bitrate_bps <= 0) {
max_bitrate_bps =
GetMaxDefaultVideoBitrateKbps(codec.width, codec.height) * 1000;
}
webrtc::VideoStream stream;
stream.width = codec.width;
stream.height = codec.height;
stream.max_framerate =
codec.framerate != 0 ? codec.framerate : kDefaultVideoMaxFramerate;
stream.min_bitrate_bps = kMinVideoBitrate * 1000;
stream.target_bitrate_bps = stream.max_bitrate_bps = max_bitrate_bps;
int max_qp = kDefaultQpMax;
codec.GetParam(kCodecParamMaxQuantization, &max_qp);
stream.max_qp = max_qp;
std::vector<webrtc::VideoStream> streams;
streams.push_back(stream);
return streams;
}
rtc::scoped_refptr<webrtc::VideoEncoderConfig::EncoderSpecificSettings>
WebRtcVideoChannel2::WebRtcVideoSendStream::ConfigureVideoEncoderSettings(
const VideoCodec& codec) {
RTC_DCHECK_RUN_ON(&thread_checker_);
bool is_screencast = parameters_.options.is_screencast.value_or(false);
// No automatic resizing when using simulcast or screencast.
bool automatic_resize =
@ -1564,7 +1543,6 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::VideoSendStreamParameters::
: config(std::move(config)),
options(options),
max_bitrate_bps(max_bitrate_bps),
conference_mode(false),
codec_settings(codec_settings) {}
WebRtcVideoChannel2::WebRtcVideoSendStream::AllocatedEncoder::AllocatedEncoder(
@ -1609,6 +1587,7 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream(
encoder_sink_(nullptr),
parameters_(std::move(config), options, max_bitrate_bps, codec_settings),
rtp_parameters_(CreateRtpParametersWithOneEncoding()),
pending_encoder_reconfiguration_(false),
allocated_encoder_(nullptr, webrtc::kVideoCodecUnknown, false),
sending_(false),
last_frame_timestamp_us_(0) {
@ -1676,6 +1655,7 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame(
last_frame_info_.height = video_frame.height();
last_frame_info_.rotation = video_frame.rotation();
last_frame_info_.is_texture = video_frame.is_texture();
pending_encoder_reconfiguration_ = true;
LOG(LS_INFO) << "Video frame parameters changed: dimensions="
<< last_frame_info_.width << "x" << last_frame_info_.height
@ -1690,13 +1670,22 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame(
last_frame_timestamp_us_ = video_frame.timestamp_us();
if (pending_encoder_reconfiguration_) {
ReconfigureEncoder();
pending_encoder_reconfiguration_ = false;
}
// Not sending, abort after reconfiguration. Reconfiguration should still
// occur to permit sending this input as quickly as possible once we start
// sending (without having to reconfigure then).
if (!sending_) {
return;
}
++frame_count_;
if (cpu_restricted_counter_ > 0)
++cpu_restricted_frame_count_;
// Forward frame to the encoder regardless if we are sending or not. This is
// to ensure that the encoder can be reconfigured with the correct frame size
// as quickly as possible.
encoder_sink_->OnFrame(video_frame);
}
@ -1705,7 +1694,7 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetVideoSend(
const VideoOptions* options,
rtc::VideoSourceInterface<cricket::VideoFrame>* source) {
TRACE_EVENT0("webrtc", "WebRtcVideoSendStream::SetVideoSend");
RTC_DCHECK_RUN_ON(&thread_checker_);
RTC_DCHECK(thread_checker_.CalledOnValidThread());
// Ignore |options| pointer if |enable| is false.
bool options_present = enable && options;
@ -1714,47 +1703,50 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetVideoSend(
DisconnectSource();
}
if (options_present) {
VideoOptions old_options = parameters_.options;
parameters_.options.SetAll(*options);
// If options has changed and SetCodec has been called.
if (parameters_.options != old_options && stream_) {
ReconfigureEncoder();
}
}
if (source_changing) {
if (options_present || source_changing) {
rtc::CritScope cs(&lock_);
if (source == nullptr && encoder_sink_ != nullptr &&
last_frame_info_.width > 0) {
LOG(LS_VERBOSE) << "Disabling capturer, sending black frame.";
// Force this black frame not to be dropped due to timestamp order
// check. As IncomingCapturedFrame will drop the frame if this frame's
// timestamp is less than or equal to last frame's timestamp, it is
// necessary to give this black frame a larger timestamp than the
// previous one.
last_frame_timestamp_us_ += rtc::kNumMicrosecsPerMillisec;
rtc::scoped_refptr<webrtc::I420Buffer> black_buffer(
webrtc::I420Buffer::Create(last_frame_info_.width,
last_frame_info_.height));
black_buffer->SetToBlack();
encoder_sink_->OnFrame(webrtc::VideoFrame(
black_buffer, last_frame_info_.rotation, last_frame_timestamp_us_));
if (options_present) {
VideoOptions old_options = parameters_.options;
parameters_.options.SetAll(*options);
// Reconfigure encoder settings on the next frame or stream
// recreation if the options changed.
if (parameters_.options != old_options) {
pending_encoder_reconfiguration_ = true;
}
}
if (source_changing) {
if (source == nullptr && encoder_sink_ != nullptr) {
LOG(LS_VERBOSE) << "Disabling capturer, sending black frame.";
// Force this black frame not to be dropped due to timestamp order
// check. As IncomingCapturedFrame will drop the frame if this frame's
// timestamp is less than or equal to last frame's timestamp, it is
// necessary to give this black frame a larger timestamp than the
// previous one.
last_frame_timestamp_us_ += rtc::kNumMicrosecsPerMillisec;
rtc::scoped_refptr<webrtc::I420Buffer> black_buffer(
webrtc::I420Buffer::Create(last_frame_info_.width,
last_frame_info_.height));
black_buffer->SetToBlack();
encoder_sink_->OnFrame(webrtc::VideoFrame(
black_buffer, last_frame_info_.rotation, last_frame_timestamp_us_));
}
source_ = source;
}
source_ = source;
}
// |source_->AddOrUpdateSink| may not be called while holding |lock_| since
// that might cause a lock order inversion.
if (source_changing && source_) {
// |source_->AddOrUpdateSink| may not be called while holding |lock_| since
// that might cause a lock order inversion.
source_->AddOrUpdateSink(this, sink_wants_);
}
return true;
}
void WebRtcVideoChannel2::WebRtcVideoSendStream::DisconnectSource() {
RTC_DCHECK_RUN_ON(&thread_checker_);
RTC_DCHECK(thread_checker_.CalledOnValidThread());
if (source_ == nullptr) {
return;
}
@ -1789,7 +1781,6 @@ webrtc::VideoCodecType CodecTypeFromName(const std::string& name) {
WebRtcVideoChannel2::WebRtcVideoSendStream::AllocatedEncoder
WebRtcVideoChannel2::WebRtcVideoSendStream::CreateVideoEncoder(
const VideoCodec& codec) {
RTC_DCHECK_RUN_ON(&thread_checker_);
webrtc::VideoCodecType type = CodecTypeFromName(codec.name);
// Do not re-create encoders of the same type.
@ -1824,7 +1815,6 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::CreateVideoEncoder(
void WebRtcVideoChannel2::WebRtcVideoSendStream::DestroyVideoEncoder(
AllocatedEncoder* encoder) {
RTC_DCHECK_RUN_ON(&thread_checker_);
if (encoder->external) {
external_encoder_factory_->DestroyVideoEncoder(encoder->external_encoder);
}
@ -1833,9 +1823,8 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::DestroyVideoEncoder(
void WebRtcVideoChannel2::WebRtcVideoSendStream::SetCodec(
const VideoCodecSettings& codec_settings) {
RTC_DCHECK_RUN_ON(&thread_checker_);
parameters_.encoder_config = CreateVideoEncoderConfig(codec_settings.codec);
RTC_DCHECK_GT(parameters_.encoder_config.number_of_streams, 0u);
RTC_DCHECK(!parameters_.encoder_config.streams.empty());
AllocatedEncoder new_encoder = CreateVideoEncoder(codec_settings.codec);
parameters_.config.encoder_settings.encoder = new_encoder.encoder;
@ -1876,38 +1865,41 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::SetCodec(
void WebRtcVideoChannel2::WebRtcVideoSendStream::SetSendParameters(
const ChangedSendParameters& params) {
RTC_DCHECK_RUN_ON(&thread_checker_);
// |recreate_stream| means construction-time parameters have changed and the
// sending stream needs to be reset with the new config.
bool recreate_stream = false;
if (params.rtcp_mode) {
parameters_.config.rtp.rtcp_mode = *params.rtcp_mode;
recreate_stream = true;
}
if (params.rtp_header_extensions) {
parameters_.config.rtp.extensions = *params.rtp_header_extensions;
recreate_stream = true;
}
if (params.max_bandwidth_bps) {
parameters_.max_bitrate_bps = *params.max_bandwidth_bps;
ReconfigureEncoder();
}
if (params.conference_mode) {
parameters_.conference_mode = *params.conference_mode;
}
{
rtc::CritScope cs(&lock_);
// |recreate_stream| means construction-time parameters have changed and the
// sending stream needs to be reset with the new config.
bool recreate_stream = false;
if (params.rtcp_mode) {
parameters_.config.rtp.rtcp_mode = *params.rtcp_mode;
recreate_stream = true;
}
if (params.rtp_header_extensions) {
parameters_.config.rtp.extensions = *params.rtp_header_extensions;
recreate_stream = true;
}
if (params.max_bandwidth_bps) {
parameters_.max_bitrate_bps = *params.max_bandwidth_bps;
pending_encoder_reconfiguration_ = true;
}
if (params.conference_mode) {
parameters_.conference_mode = *params.conference_mode;
}
// Set codecs and options.
if (params.codec) {
SetCodec(*params.codec);
recreate_stream = false; // SetCodec has already recreated the stream.
} else if (params.conference_mode && parameters_.codec_settings) {
SetCodec(*parameters_.codec_settings);
recreate_stream = false; // SetCodec has already recreated the stream.
}
if (recreate_stream) {
LOG(LS_INFO) << "RecreateWebRtcStream (send) because of SetSendParameters";
RecreateWebRtcStream();
}
// Set codecs and options.
if (params.codec) {
SetCodec(*params.codec);
recreate_stream = false; // SetCodec has already recreated the stream.
} else if (params.conference_mode && parameters_.codec_settings) {
SetCodec(*parameters_.codec_settings);
recreate_stream = false; // SetCodec has already recreated the stream.
}
if (recreate_stream) {
LOG(LS_INFO)
<< "RecreateWebRtcStream (send) because of SetSendParameters";
RecreateWebRtcStream();
}
} // release |lock_|
// |source_->AddOrUpdateSink| may not be called while holding |lock_| since
// that might cause a lock order inversion.
@ -1922,19 +1914,18 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::SetSendParameters(
bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetRtpParameters(
const webrtc::RtpParameters& new_parameters) {
RTC_DCHECK_RUN_ON(&thread_checker_);
if (!ValidateRtpParameters(new_parameters)) {
return false;
}
bool reconfigure_encoder = new_parameters.encodings[0].max_bitrate_bps !=
rtp_parameters_.encodings[0].max_bitrate_bps;
rtc::CritScope cs(&lock_);
if (new_parameters.encodings[0].max_bitrate_bps !=
rtp_parameters_.encodings[0].max_bitrate_bps) {
pending_encoder_reconfiguration_ = true;
}
rtp_parameters_ = new_parameters;
// Codecs are currently handled at the WebRtcVideoChannel2 level.
rtp_parameters_.codecs.clear();
if (reconfigure_encoder) {
ReconfigureEncoder();
}
// Encoding may have been activated/deactivated.
UpdateSendState();
return true;
@ -1942,7 +1933,7 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetRtpParameters(
webrtc::RtpParameters
WebRtcVideoChannel2::WebRtcVideoSendStream::GetRtpParameters() const {
RTC_DCHECK_RUN_ON(&thread_checker_);
rtc::CritScope cs(&lock_);
return rtp_parameters_;
}
@ -1957,7 +1948,6 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::ValidateRtpParameters(
}
void WebRtcVideoChannel2::WebRtcVideoSendStream::UpdateSendState() {
RTC_DCHECK_RUN_ON(&thread_checker_);
// TODO(deadbeef): Need to handle more than one encoding in the future.
RTC_DCHECK(rtp_parameters_.encodings.size() == 1u);
if (sending_ && rtp_parameters_.encodings[0].active) {
@ -1973,7 +1963,6 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::UpdateSendState() {
webrtc::VideoEncoderConfig
WebRtcVideoChannel2::WebRtcVideoSendStream::CreateVideoEncoderConfig(
const VideoCodec& codec) const {
RTC_DCHECK_RUN_ON(&thread_checker_);
webrtc::VideoEncoderConfig encoder_config;
bool is_screencast = parameters_.options.is_screencast.value_or(false);
if (is_screencast) {
@ -1987,39 +1976,60 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::CreateVideoEncoderConfig(
webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo;
}
// Restrict dimensions according to codec max.
int width = last_frame_info_.width;
int height = last_frame_info_.height;
if (!is_screencast) {
if (codec.width < width)
width = codec.width;
if (codec.height < height)
height = codec.height;
}
VideoCodec clamped_codec = codec;
clamped_codec.width = width;
clamped_codec.height = height;
// By default, the stream count for the codec configuration should match the
// number of negotiated ssrcs. But if the codec is blacklisted for simulcast
// or a screencast, only configure a single stream.
encoder_config.number_of_streams = parameters_.config.rtp.ssrcs.size();
size_t stream_count = parameters_.config.rtp.ssrcs.size();
if (IsCodecBlacklistedForSimulcast(codec.name) || is_screencast) {
encoder_config.number_of_streams = 1;
stream_count = 1;
}
int stream_max_bitrate =
MinPositive(rtp_parameters_.encodings[0].max_bitrate_bps,
parameters_.max_bitrate_bps);
encoder_config.streams = CreateVideoStreams(
clamped_codec, parameters_.options, stream_max_bitrate, stream_count);
encoder_config.expect_encode_from_texture = last_frame_info_.is_texture;
int codec_max_bitrate_kbps;
if (codec.GetParam(kCodecParamMaxBitrate, &codec_max_bitrate_kbps)) {
stream_max_bitrate = codec_max_bitrate_kbps * 1000;
// Conference mode screencast uses 2 temporal layers split at 100kbit.
if (parameters_.conference_mode && is_screencast &&
encoder_config.streams.size() == 1) {
ScreenshareLayerConfig config = ScreenshareLayerConfig::GetDefault();
// For screenshare in conference mode, tl0 and tl1 bitrates are piggybacked
// on the VideoCodec struct as target and max bitrates, respectively.
// See eg. webrtc::VP8EncoderImpl::SetRates().
encoder_config.streams[0].target_bitrate_bps =
config.tl0_bitrate_kbps * 1000;
encoder_config.streams[0].max_bitrate_bps = config.tl1_bitrate_kbps * 1000;
encoder_config.streams[0].temporal_layer_thresholds_bps.clear();
encoder_config.streams[0].temporal_layer_thresholds_bps.push_back(
config.tl0_bitrate_kbps * 1000);
}
if (CodecNamesEq(codec.name, kVp9CodecName) && !is_screencast &&
encoder_config.streams.size() == 1) {
encoder_config.streams[0].temporal_layer_thresholds_bps.resize(
GetDefaultVp9TemporalLayers() - 1);
}
encoder_config.max_bitrate_bps = stream_max_bitrate;
int max_qp = kDefaultQpMax;
codec.GetParam(kCodecParamMaxQuantization, &max_qp);
int max_framerate =
codec.framerate != 0 ? codec.framerate : kDefaultVideoMaxFramerate;
encoder_config.video_stream_factory =
new rtc::RefCountedObject<EncoderStreamFactory>(
codec.name, max_qp, max_framerate, is_screencast,
parameters_.conference_mode);
return encoder_config;
}
void WebRtcVideoChannel2::WebRtcVideoSendStream::ReconfigureEncoder() {
RTC_DCHECK_RUN_ON(&thread_checker_);
RTC_DCHECK_GT(parameters_.encoder_config.number_of_streams, 0u);
RTC_DCHECK(!parameters_.encoder_config.streams.empty());
RTC_CHECK(parameters_.codec_settings);
VideoCodecSettings codec_settings = *parameters_.codec_settings;
@ -2038,7 +2048,7 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::ReconfigureEncoder() {
}
void WebRtcVideoChannel2::WebRtcVideoSendStream::SetSend(bool send) {
RTC_DCHECK_RUN_ON(&thread_checker_);
rtc::CritScope cs(&lock_);
sending_ = send;
UpdateSendState();
}
@ -2068,62 +2078,63 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::OnLoadUpdate(Load load) {
this, load));
return;
}
RTC_DCHECK_RUN_ON(&thread_checker_);
RTC_DCHECK(thread_checker_.CalledOnValidThread());
if (!source_) {
return;
}
LOG(LS_INFO) << "OnLoadUpdate " << load << ", is_screencast: "
<< (parameters_.options.is_screencast
? (*parameters_.options.is_screencast ? "true" : "false")
: "unset");
// Do not adapt resolution for screen content as this will likely result in
// blurry and unreadable text.
if (parameters_.options.is_screencast.value_or(false))
return;
rtc::Optional<int> max_pixel_count;
rtc::Optional<int> max_pixel_count_step_up;
if (load == kOveruse) {
{
rtc::CritScope cs(&lock_);
if (cpu_restricted_counter_ >= kMaxCpuDowngrades) {
LOG(LS_INFO) << "OnLoadUpdate " << load << ", is_screencast: "
<< (parameters_.options.is_screencast
? (*parameters_.options.is_screencast ? "true"
: "false")
: "unset");
// Do not adapt resolution for screen content as this will likely result in
// blurry and unreadable text.
if (parameters_.options.is_screencast.value_or(false))
return;
rtc::Optional<int> max_pixel_count;
rtc::Optional<int> max_pixel_count_step_up;
if (load == kOveruse) {
if (cpu_restricted_counter_ >= kMaxCpuDowngrades) {
return;
}
// The input video frame size will have a resolution with less than or
// equal to |max_pixel_count| depending on how the source can scale the
// input frame size.
max_pixel_count = rtc::Optional<int>(
(last_frame_info_.height * last_frame_info_.width * 3) / 5);
// Increase |number_of_cpu_adapt_changes_| if
// sink_wants_.max_pixel_count will be changed since
// last time |source_->AddOrUpdateSink| was called. That is, this will
// result in a new request for the source to change resolution.
if (!sink_wants_.max_pixel_count ||
*sink_wants_.max_pixel_count > *max_pixel_count) {
++number_of_cpu_adapt_changes_;
++cpu_restricted_counter_;
}
} else {
RTC_DCHECK(load == kUnderuse);
// The input video frame size will have a resolution with "one step up"
// pixels than |max_pixel_count_step_up| where "one step up" depends on
// how the source can scale the input frame size.
max_pixel_count_step_up =
rtc::Optional<int>(last_frame_info_.height * last_frame_info_.width);
// Increase |number_of_cpu_adapt_changes_| if
// sink_wants_.max_pixel_count_step_up will be changed since
// last time |source_->AddOrUpdateSink| was called. That is, this will
// result in a new request for the source to change resolution.
if (sink_wants_.max_pixel_count ||
(sink_wants_.max_pixel_count_step_up &&
*sink_wants_.max_pixel_count_step_up < *max_pixel_count_step_up)) {
++number_of_cpu_adapt_changes_;
--cpu_restricted_counter_;
}
}
// The input video frame size will have a resolution with less than or
// equal to |max_pixel_count| depending on how the source can scale the
// input frame size.
max_pixel_count = rtc::Optional<int>(
(last_frame_info_.height * last_frame_info_.width * 3) / 5);
// Increase |number_of_cpu_adapt_changes_| if
// sink_wants_.max_pixel_count will be changed since
// last time |source_->AddOrUpdateSink| was called. That is, this will
// result in a new request for the source to change resolution.
if (!sink_wants_.max_pixel_count ||
*sink_wants_.max_pixel_count > *max_pixel_count) {
++number_of_cpu_adapt_changes_;
++cpu_restricted_counter_;
}
} else {
RTC_DCHECK(load == kUnderuse);
rtc::CritScope cs(&lock_);
// The input video frame size will have a resolution with "one step up"
// pixels than |max_pixel_count_step_up| where "one step up" depends on
// how the source can scale the input frame size.
max_pixel_count_step_up =
rtc::Optional<int>(last_frame_info_.height * last_frame_info_.width);
// Increase |number_of_cpu_adapt_changes_| if
// sink_wants_.max_pixel_count_step_up will be changed since
// last time |source_->AddOrUpdateSink| was called. That is, this will
// result in a new request for the source to change resolution.
if (sink_wants_.max_pixel_count ||
(sink_wants_.max_pixel_count_step_up &&
*sink_wants_.max_pixel_count_step_up < *max_pixel_count_step_up)) {
++number_of_cpu_adapt_changes_;
--cpu_restricted_counter_;
}
sink_wants_.max_pixel_count = max_pixel_count;
sink_wants_.max_pixel_count_step_up = max_pixel_count_step_up;
}
sink_wants_.max_pixel_count = max_pixel_count;
sink_wants_.max_pixel_count_step_up = max_pixel_count_step_up;
// |source_->AddOrUpdateSink| may not be called while holding |lock_| since
// that might cause a lock order inversion.
source_->AddOrUpdateSink(this, sink_wants_);
@ -2132,17 +2143,21 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::OnLoadUpdate(Load load) {
VideoSenderInfo WebRtcVideoChannel2::WebRtcVideoSendStream::GetVideoSenderInfo(
bool log_stats) {
VideoSenderInfo info;
RTC_DCHECK_RUN_ON(&thread_checker_);
for (uint32_t ssrc : parameters_.config.rtp.ssrcs)
info.add_ssrc(ssrc);
webrtc::VideoSendStream::Stats stats;
RTC_DCHECK(thread_checker_.CalledOnValidThread());
{
rtc::CritScope cs(&lock_);
for (uint32_t ssrc : parameters_.config.rtp.ssrcs)
info.add_ssrc(ssrc);
if (parameters_.codec_settings)
info.codec_name = parameters_.codec_settings->codec.name;
if (parameters_.codec_settings)
info.codec_name = parameters_.codec_settings->codec.name;
if (stream_ == NULL)
return info;
if (stream_ == NULL)
return info;
webrtc::VideoSendStream::Stats stats = stream_->GetStats();
stats = stream_->GetStats();
}
if (log_stats)
LOG(LS_INFO) << stats.ToString(rtc::TimeMillis());
@ -2203,7 +2218,7 @@ VideoSenderInfo WebRtcVideoChannel2::WebRtcVideoSendStream::GetVideoSenderInfo(
void WebRtcVideoChannel2::WebRtcVideoSendStream::FillBandwidthEstimationInfo(
BandwidthEstimationInfo* bwe_info) {
RTC_DCHECK_RUN_ON(&thread_checker_);
rtc::CritScope cs(&lock_);
if (stream_ == NULL) {
return;
}
@ -2219,7 +2234,6 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::FillBandwidthEstimationInfo(
}
void WebRtcVideoChannel2::WebRtcVideoSendStream::RecreateWebRtcStream() {
RTC_DCHECK_RUN_ON(&thread_checker_);
if (stream_ != NULL) {
call_->DestroyVideoSendStream(stream_);
}
@ -2243,6 +2257,7 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::RecreateWebRtcStream() {
stream_->SetSource(this);
parameters_.encoder_config.encoder_specific_settings = NULL;
pending_encoder_reconfiguration_ = false;
// Call stream_->Start() if necessary conditions are met.
UpdateSendState();

View File

@ -320,13 +320,17 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
bool external;
};
// TODO(perkj): VideoFrameInfo is currently used for CPU adaptation since
// we currently do not express CPU overuse using SinkWants in lower
// layers. This will be fixed in an upcoming cl.
struct VideoFrameInfo {
// Initial encoder configuration (QCIF, 176x144) frame (to ensure that
// hardware encoders can be initialized). This gives us low memory usage
// but also makes it so configuration errors are discovered at the time we
// apply the settings rather than when we get the first frame (waiting for
// the first frame to know that you gave a bad codec parameter could make
// debugging hard).
// TODO(pbos): Consider setting up encoders lazily.
VideoFrameInfo()
: width(0),
height(0),
: width(176),
height(144),
rotation(webrtc::kVideoRotation_0),
is_texture(false) {}
int width;
@ -335,63 +339,79 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
bool is_texture;
};
static std::vector<webrtc::VideoStream> CreateVideoStreams(
const VideoCodec& codec,
const VideoOptions& options,
int max_bitrate_bps,
size_t num_streams);
static std::vector<webrtc::VideoStream> CreateSimulcastVideoStreams(
const VideoCodec& codec,
const VideoOptions& options,
int max_bitrate_bps,
size_t num_streams);
rtc::scoped_refptr<webrtc::VideoEncoderConfig::EncoderSpecificSettings>
ConfigureVideoEncoderSettings(const VideoCodec& codec);
AllocatedEncoder CreateVideoEncoder(const VideoCodec& codec);
void DestroyVideoEncoder(AllocatedEncoder* encoder);
void SetCodec(const VideoCodecSettings& codec);
void RecreateWebRtcStream();
ConfigureVideoEncoderSettings(const VideoCodec& codec)
EXCLUSIVE_LOCKS_REQUIRED(lock_);
AllocatedEncoder CreateVideoEncoder(const VideoCodec& codec)
EXCLUSIVE_LOCKS_REQUIRED(lock_);
void DestroyVideoEncoder(AllocatedEncoder* encoder)
EXCLUSIVE_LOCKS_REQUIRED(lock_);
void SetCodec(const VideoCodecSettings& codec)
EXCLUSIVE_LOCKS_REQUIRED(lock_);
void RecreateWebRtcStream() EXCLUSIVE_LOCKS_REQUIRED(lock_);
webrtc::VideoEncoderConfig CreateVideoEncoderConfig(
const VideoCodec& codec) const;
void ReconfigureEncoder();
const VideoCodec& codec) const EXCLUSIVE_LOCKS_REQUIRED(lock_);
void ReconfigureEncoder() EXCLUSIVE_LOCKS_REQUIRED(lock_);
bool ValidateRtpParameters(const webrtc::RtpParameters& parameters);
// Calls Start or Stop according to whether or not |sending_| is true,
// and whether or not the encoding in |rtp_parameters_| is active.
void UpdateSendState();
void UpdateSendState() EXCLUSIVE_LOCKS_REQUIRED(lock_);
void UpdateHistograms() const EXCLUSIVE_LOCKS_REQUIRED(lock_);
rtc::ThreadChecker thread_checker_;
rtc::AsyncInvoker invoker_;
rtc::Thread* worker_thread_;
const std::vector<uint32_t> ssrcs_ ACCESS_ON(&thread_checker_);
const std::vector<SsrcGroup> ssrc_groups_ ACCESS_ON(&thread_checker_);
const std::vector<uint32_t> ssrcs_;
const std::vector<SsrcGroup> ssrc_groups_;
webrtc::Call* const call_;
rtc::VideoSinkWants sink_wants_ ACCESS_ON(&thread_checker_);
rtc::VideoSinkWants sink_wants_;
// Counter used for deciding if the video resolution is currently
// restricted by CPU usage. It is reset if |source_| is changed.
int cpu_restricted_counter_;
// Total number of times resolution as been requested to be changed due to
// CPU adaptation.
int number_of_cpu_adapt_changes_ ACCESS_ON(&thread_checker_);
int number_of_cpu_adapt_changes_;
// Total number of frames sent to |stream_|.
int frame_count_ GUARDED_BY(lock_);
// Total number of cpu restricted frames sent to |stream_|.
int cpu_restricted_frame_count_ GUARDED_BY(lock_);
rtc::VideoSourceInterface<cricket::VideoFrame>* source_
ACCESS_ON(&thread_checker_);
rtc::VideoSourceInterface<cricket::VideoFrame>* source_;
WebRtcVideoEncoderFactory* const external_encoder_factory_
ACCESS_ON(&thread_checker_);
GUARDED_BY(lock_);
rtc::CriticalSection lock_;
webrtc::VideoSendStream* stream_ ACCESS_ON(&thread_checker_);
webrtc::VideoSendStream* stream_ GUARDED_BY(lock_);
rtc::VideoSinkInterface<webrtc::VideoFrame>* encoder_sink_
GUARDED_BY(lock_);
// Contains settings that are the same for all streams in the MediaChannel,
// such as codecs, header extensions, and the global bitrate limit for the
// entire channel.
VideoSendStreamParameters parameters_ ACCESS_ON(&thread_checker_);
VideoSendStreamParameters parameters_ GUARDED_BY(lock_);
// Contains settings that are unique for each stream, such as max_bitrate.
// Does *not* contain codecs, however.
// TODO(skvlad): Move ssrcs_ and ssrc_groups_ into rtp_parameters_.
// TODO(skvlad): Combine parameters_ and rtp_parameters_ once we have only
// one stream per MediaChannel.
webrtc::RtpParameters rtp_parameters_ ACCESS_ON(&thread_checker_);
AllocatedEncoder allocated_encoder_ ACCESS_ON(&thread_checker_);
webrtc::RtpParameters rtp_parameters_ GUARDED_BY(lock_);
bool pending_encoder_reconfiguration_ GUARDED_BY(lock_);
AllocatedEncoder allocated_encoder_ GUARDED_BY(lock_);
VideoFrameInfo last_frame_info_ GUARDED_BY(lock_);
bool sending_ ACCESS_ON(&thread_checker_);
bool sending_ GUARDED_BY(lock_);
// The timestamp of the last frame received
// Used to generate timestamp for the black frame when source is removed

View File

@ -1077,16 +1077,22 @@ class WebRtcVideoChannel2Test : public WebRtcVideoEngine2Test {
return AddSendStream(CreateSimStreamParams("cname", ssrcs));
}
int GetMaxEncoderBitrate() {
int GetMaxEncoderBitrate(cricket::FakeVideoCapturer& capturer) {
EXPECT_TRUE(capturer.CaptureFrame());
std::vector<FakeVideoSendStream*> streams =
fake_call_->GetVideoSendStreams();
EXPECT_EQ(1u, streams.size());
EXPECT_TRUE(streams.size() > 0);
FakeVideoSendStream* stream = streams[streams.size() - 1];
EXPECT_EQ(1, stream->GetEncoderConfig().number_of_streams);
return stream->GetVideoStreams()[0].max_bitrate_bps;
webrtc::VideoEncoderConfig encoder_config =
stream->GetEncoderConfig().Copy();
EXPECT_EQ(1, encoder_config.streams.size());
return encoder_config.streams[0].max_bitrate_bps;
}
void SetAndExpectMaxBitrate(int global_max,
void SetAndExpectMaxBitrate(cricket::FakeVideoCapturer& capturer,
int global_max,
int stream_max,
int expected_encoder_bitrate) {
VideoSendParameters limited_send_params = send_parameters_;
@ -1102,7 +1108,7 @@ class WebRtcVideoChannel2Test : public WebRtcVideoEngine2Test {
EXPECT_EQ(1UL, parameters.encodings.size());
EXPECT_EQ(stream_max, parameters.encodings[0].max_bitrate_bps);
// Verify that the new value propagated down to the encoder
EXPECT_EQ(expected_encoder_bitrate, GetMaxEncoderBitrate());
EXPECT_EQ(expected_encoder_bitrate, GetMaxEncoderBitrate(capturer));
}
std::unique_ptr<FakeCall> fake_call_;
@ -1514,10 +1520,10 @@ TEST_F(WebRtcVideoChannel2Test, ReconfiguresEncodersWhenNotSending) {
FakeVideoSendStream* stream = AddSendStream();
// No frames entered.
// No frames entered, using default dimensions.
std::vector<webrtc::VideoStream> streams = stream->GetVideoStreams();
EXPECT_EQ(0u, streams[0].width);
EXPECT_EQ(0u, streams[0].height);
EXPECT_EQ(176u, streams[0].width);
EXPECT_EQ(144u, streams[0].height);
cricket::FakeVideoCapturer capturer;
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, &capturer));
@ -1529,6 +1535,8 @@ TEST_F(WebRtcVideoChannel2Test, ReconfiguresEncodersWhenNotSending) {
streams = stream->GetVideoStreams();
EXPECT_EQ(kVp8Codec720p.width, streams[0].width);
EXPECT_EQ(kVp8Codec720p.height, streams[0].height);
// No frames should have been actually put in there though.
EXPECT_EQ(0, stream->GetNumberOfSwappedFrames());
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
}
@ -1566,9 +1574,8 @@ TEST_F(WebRtcVideoChannel2Test, UsesCorrectSettingsForScreencast) {
send_stream->GetEncoderConfig().Copy();
EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo,
encoder_config.content_type);
std::vector<webrtc::VideoStream> streams = send_stream->GetVideoStreams();
EXPECT_EQ(capture_format_hd.width, streams.front().width);
EXPECT_EQ(capture_format_hd.height, streams.front().height);
EXPECT_EQ(codec.width, encoder_config.streams.front().width);
EXPECT_EQ(codec.height, encoder_config.streams.front().height);
EXPECT_EQ(0, encoder_config.min_transmit_bitrate_bps)
<< "Non-screenshare shouldn't use min-transmit bitrate.";
@ -1591,10 +1598,10 @@ TEST_F(WebRtcVideoChannel2Test, UsesCorrectSettingsForScreencast) {
EXPECT_EQ(kScreenshareMinBitrateKbps * 1000,
encoder_config.min_transmit_bitrate_bps);
streams = send_stream->GetVideoStreams();
EXPECT_EQ(capture_format_hd.width, streams.front().width);
EXPECT_EQ(capture_format_hd.height, streams.front().height);
EXPECT_TRUE(streams[0].temporal_layer_thresholds_bps.empty());
EXPECT_EQ(capture_format_hd.width, encoder_config.streams.front().width);
EXPECT_EQ(capture_format_hd.height, encoder_config.streams.front().height);
EXPECT_TRUE(encoder_config.streams[0].temporal_layer_thresholds_bps.empty());
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
}
@ -1678,12 +1685,10 @@ TEST_F(WebRtcVideoChannel2Test,
encoder_config = send_stream->GetEncoderConfig().Copy();
EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kScreen,
encoder_config.content_type);
std::vector<webrtc::VideoStream> streams = send_stream->GetVideoStreams();
ASSERT_EQ(1u, streams.size());
ASSERT_EQ(1u, streams[0].temporal_layer_thresholds_bps.size());
ASSERT_EQ(1u, encoder_config.streams.size());
ASSERT_EQ(1u, encoder_config.streams[0].temporal_layer_thresholds_bps.size());
EXPECT_EQ(kConferenceScreencastTemporalBitrateBps,
streams[0].temporal_layer_thresholds_bps[0]);
encoder_config.streams[0].temporal_layer_thresholds_bps[0]);
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
}
@ -1801,27 +1806,18 @@ TEST_F(WebRtcVideoChannel2Test, SetIdenticalOptionsDoesntReconfigureEncoder) {
VideoOptions options;
cricket::FakeVideoCapturer capturer;
AddSendStream();
FakeVideoSendStream* send_stream = AddSendStream();
EXPECT_EQ(cricket::CS_RUNNING,
capturer.Start(capturer.GetSupportedFormats()->front()));
cricket::VideoSendParameters parameters;
parameters.codecs.push_back(kVp8Codec720p);
ASSERT_TRUE(channel_->SetSendParameters(parameters));
FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front();
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, &options, &capturer));
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, &options, &capturer));
EXPECT_TRUE(capturer.CaptureFrame());
// Expect 1 reconfigurations at this point from the initial configuration.
EXPECT_EQ(1, send_stream->num_encoder_reconfigurations());
// Expect 2 reconfigurations at this point, from the initial configuration
// and from the dimensions of the first frame.
EXPECT_EQ(2, send_stream->num_encoder_reconfigurations());
// Set the options one more time and expect no additional reconfigurations.
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, &options, &capturer));
EXPECT_EQ(1, send_stream->num_encoder_reconfigurations());
// Change |options| and expect 2 reconfigurations.
options.is_screencast = rtc::Optional<bool>(true);
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, &options, &capturer));
EXPECT_TRUE(capturer.CaptureFrame());
EXPECT_EQ(2, send_stream->num_encoder_reconfigurations());
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
@ -2314,26 +2310,28 @@ TEST_F(WebRtcVideoChannel2Test, SetSendCodecsWithoutFecDisablesFec) {
TEST_F(WebRtcVideoChannel2Test, SetSendCodecsChangesExistingStreams) {
cricket::VideoSendParameters parameters;
cricket::VideoCodec codec720p(100, "VP8", 1280, 720, 30);
codec720p.SetParam(kCodecParamMaxQuantization, kDefaultQpMax);
parameters.codecs.push_back(codec720p);
parameters.codecs.push_back(kVp8Codec720p);
ASSERT_TRUE(channel_->SetSendParameters(parameters));
channel_->SetSend(true);
FakeVideoSendStream* stream = AddSendStream();
cricket::FakeVideoCapturer capturer;
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, &capturer));
EXPECT_EQ(cricket::CS_RUNNING,
capturer.Start(capturer.GetSupportedFormats()->front()));
EXPECT_TRUE(capturer.CaptureFrame());
std::vector<webrtc::VideoStream> streams = stream->GetVideoStreams();
EXPECT_EQ(kDefaultQpMax, streams[0].max_qp);
EXPECT_EQ(kVp8Codec720p.width, streams[0].width);
EXPECT_EQ(kVp8Codec720p.height, streams[0].height);
parameters.codecs.clear();
codec720p.SetParam(kCodecParamMaxQuantization, kDefaultQpMax + 1);
parameters.codecs.push_back(codec720p);
parameters.codecs.push_back(kVp8Codec360p);
ASSERT_TRUE(channel_->SetSendParameters(parameters));
streams = fake_call_->GetVideoSendStreams()[0]->GetVideoStreams();
EXPECT_EQ(kDefaultQpMax + 1, streams[0].max_qp);
EXPECT_EQ(kVp8Codec360p.width, streams[0].width);
EXPECT_EQ(kVp8Codec360p.height, streams[0].height);
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
}
@ -3418,9 +3416,8 @@ TEST_F(WebRtcVideoChannel2Test, CanSentMaxBitrateForExistingStream) {
EXPECT_EQ(720, capture_format_hd.height);
EXPECT_EQ(cricket::CS_RUNNING, capturer.Start(capture_format_hd));
EXPECT_TRUE(channel_->SetSend(true));
capturer.CaptureFrame();
int default_encoder_bitrate = GetMaxEncoderBitrate();
int default_encoder_bitrate = GetMaxEncoderBitrate(capturer);
EXPECT_TRUE(default_encoder_bitrate > 1000);
// TODO(skvlad): Resolve the inconsistency between the interpretation
@ -3431,11 +3428,11 @@ TEST_F(WebRtcVideoChannel2Test, CanSentMaxBitrateForExistingStream) {
// max_bandwidth_bps = -1 - do not change the previously set
// limit.
SetAndExpectMaxBitrate(1000, 0, 1000);
SetAndExpectMaxBitrate(1000, 800, 800);
SetAndExpectMaxBitrate(600, 800, 600);
SetAndExpectMaxBitrate(0, 800, 800);
SetAndExpectMaxBitrate(0, 0, default_encoder_bitrate);
SetAndExpectMaxBitrate(capturer, 1000, 0, 1000);
SetAndExpectMaxBitrate(capturer, 1000, 800, 800);
SetAndExpectMaxBitrate(capturer, 600, 800, 600);
SetAndExpectMaxBitrate(capturer, 0, 800, 800);
SetAndExpectMaxBitrate(capturer, 0, 0, default_encoder_bitrate);
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, nullptr, nullptr));
}

View File

@ -11,6 +11,7 @@
#include "webrtc/config.h"
#include "webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h"
#include "webrtc/test/call_test.h"
#include "webrtc/test/encoder_settings.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_codec.h"
@ -94,11 +95,7 @@ void CallTest::RunBaseTest(BaseTest* test) {
}
if (num_video_streams_ > 0) {
int width = kDefaultWidth;
int height = kDefaultHeight;
int frame_rate = kDefaultFramerate;
test->ModifyVideoCaptureStartResolution(&width, &height, &frame_rate);
CreateFrameGeneratorCapturer(frame_rate, width, height);
CreateFrameGeneratorCapturer();
test->OnFrameGeneratorCapturerCreated(frame_generator_capturer_.get());
}
@ -189,8 +186,7 @@ void CallTest::CreateSendConfig(size_t num_video_streams,
kFakeVideoSendPayloadType;
video_send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId));
FillEncoderConfiguration(num_video_streams, &video_encoder_config_);
video_encoder_config_.streams = test::CreateVideoStreams(num_video_streams);
for (size_t i = 0; i < num_video_streams; ++i)
video_send_config_.rtp.ssrcs.push_back(kVideoSendSsrcs[i]);
video_send_config_.rtp.extensions.push_back(RtpExtension(
@ -240,20 +236,17 @@ void CallTest::CreateMatchingReceiveConfigs(Transport* rtcp_send_transport) {
}
void CallTest::CreateFrameGeneratorCapturerWithDrift(Clock* clock,
float speed,
int framerate,
int width,
int height) {
float speed) {
VideoStream stream = video_encoder_config_.streams.back();
frame_generator_capturer_.reset(test::FrameGeneratorCapturer::Create(
width, height, framerate * speed, clock));
stream.width, stream.height, stream.max_framerate * speed, clock));
video_send_stream_->SetSource(frame_generator_capturer_.get());
}
void CallTest::CreateFrameGeneratorCapturer(int framerate,
int width,
int height) {
frame_generator_capturer_.reset(
test::FrameGeneratorCapturer::Create(width, height, framerate, clock_));
void CallTest::CreateFrameGeneratorCapturer() {
VideoStream stream = video_encoder_config_.streams.back();
frame_generator_capturer_.reset(test::FrameGeneratorCapturer::Create(
stream.width, stream.height, stream.max_framerate, clock_));
video_send_stream_->SetSource(frame_generator_capturer_.get());
}
@ -353,9 +346,6 @@ void CallTest::DestroyVoiceEngines() {
voe_recv_.voice_engine = nullptr;
}
const int CallTest::kDefaultWidth;
const int CallTest::kDefaultHeight;
const int CallTest::kDefaultFramerate;
const int CallTest::kDefaultTimeoutMs = 30 * 1000;
const int CallTest::kLongTimeoutMs = 120 * 1000;
const uint8_t CallTest::kVideoSendPayloadType = 100;
@ -414,10 +404,6 @@ void BaseTest::ModifyVideoConfigs(
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) {}
void BaseTest::ModifyVideoCaptureStartResolution(int* width,
int* heigt,
int* frame_rate) {}
void BaseTest::OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) {}

View File

@ -14,7 +14,6 @@
#include <vector>
#include "webrtc/call.h"
#include "webrtc/test/encoder_settings.h"
#include "webrtc/test/fake_audio_device.h"
#include "webrtc/test/fake_decoder.h"
#include "webrtc/test/fake_encoder.h"
@ -36,9 +35,7 @@ class CallTest : public ::testing::Test {
virtual ~CallTest();
static const size_t kNumSsrcs = 3;
static const int kDefaultWidth = 320;
static const int kDefaultHeight = 180;
static const int kDefaultFramerate = 30;
static const int kDefaultTimeoutMs;
static const int kLongTimeoutMs;
static const uint8_t kVideoSendPayloadType;
@ -72,12 +69,8 @@ class CallTest : public ::testing::Test {
Transport* send_transport);
void CreateMatchingReceiveConfigs(Transport* rtcp_send_transport);
void CreateFrameGeneratorCapturerWithDrift(Clock* drift_clock,
float speed,
int framerate,
int width,
int height);
void CreateFrameGeneratorCapturer(int framerate, int width, int height);
void CreateFrameGeneratorCapturerWithDrift(Clock* drift_clock, float speed);
void CreateFrameGeneratorCapturer();
void CreateFakeAudioDevices();
void CreateVideoStreams();
@ -161,9 +154,6 @@ class BaseTest : public RtpRtcpObserver {
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config);
virtual void ModifyVideoCaptureStartResolution(int* width,
int* heigt,
int* frame_rate);
virtual void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams);

View File

@ -9,76 +9,50 @@
*/
#include "webrtc/test/encoder_settings.h"
#include <algorithm>
#include <string>
#include <assert.h>
#include <string.h>
#include "webrtc/test/fake_decoder.h"
#include "webrtc/video_decoder.h"
namespace webrtc {
namespace test {
std::vector<VideoStream> CreateVideoStreams(size_t num_streams) {
assert(num_streams > 0);
const size_t DefaultVideoStreamFactory::kMaxNumberOfStreams;
const int DefaultVideoStreamFactory::kMaxBitratePerStream[] = {150000, 450000,
1500000};
const int DefaultVideoStreamFactory::kDefaultMinBitratePerStream[] = {
50000, 200000, 700000};
// Add more streams to the settings above with reasonable values if required.
static const size_t kNumSettings = 3;
assert(num_streams <= kNumSettings);
// static
std::vector<VideoStream> CreateVideoStreams(
int width,
int height,
const webrtc::VideoEncoderConfig& encoder_config) {
RTC_DCHECK(encoder_config.number_of_streams <=
DefaultVideoStreamFactory::kMaxNumberOfStreams);
std::vector<VideoStream> stream_settings(kNumSettings);
std::vector<VideoStream> stream_settings(encoder_config.number_of_streams);
int bitrate_left_bps = encoder_config.max_bitrate_bps;
stream_settings[0].width = 320;
stream_settings[0].height = 180;
stream_settings[0].max_framerate = 30;
stream_settings[0].min_bitrate_bps = 50000;
stream_settings[0].target_bitrate_bps = stream_settings[0].max_bitrate_bps =
150000;
stream_settings[0].max_qp = 56;
for (size_t i = 0; i < encoder_config.number_of_streams; ++i) {
stream_settings[i].width =
(i + 1) * width / encoder_config.number_of_streams;
stream_settings[i].height =
(i + 1) * height / encoder_config.number_of_streams;
stream_settings[i].max_framerate = 30;
stream_settings[i].min_bitrate_bps =
DefaultVideoStreamFactory::kDefaultMinBitratePerStream[i];
stream_settings[i].target_bitrate_bps = stream_settings[i].max_bitrate_bps =
std::min(bitrate_left_bps,
DefaultVideoStreamFactory::kMaxBitratePerStream[i]);
stream_settings[i].max_qp = 56;
bitrate_left_bps -= stream_settings[i].target_bitrate_bps;
}
stream_settings[encoder_config.number_of_streams - 1].max_bitrate_bps +=
bitrate_left_bps;
stream_settings[1].width = 640;
stream_settings[1].height = 360;
stream_settings[1].max_framerate = 30;
stream_settings[1].min_bitrate_bps = 200000;
stream_settings[1].target_bitrate_bps = stream_settings[1].max_bitrate_bps =
450000;
stream_settings[1].max_qp = 56;
stream_settings[2].width = 1280;
stream_settings[2].height = 720;
stream_settings[2].max_framerate = 30;
stream_settings[2].min_bitrate_bps = 700000;
stream_settings[2].target_bitrate_bps = stream_settings[2].max_bitrate_bps =
1500000;
stream_settings[2].max_qp = 56;
stream_settings.resize(num_streams);
return stream_settings;
}
DefaultVideoStreamFactory::DefaultVideoStreamFactory() {}
std::vector<VideoStream> DefaultVideoStreamFactory::CreateEncoderStreams(
int width,
int height,
const webrtc::VideoEncoderConfig& encoder_config) {
return CreateVideoStreams(width, height, encoder_config);
}
void FillEncoderConfiguration(size_t num_streams,
VideoEncoderConfig* configuration) {
RTC_DCHECK_LE(num_streams, DefaultVideoStreamFactory::kMaxNumberOfStreams);
configuration->number_of_streams = num_streams;
configuration->video_stream_factory =
new rtc::RefCountedObject<DefaultVideoStreamFactory>();
configuration->max_bitrate_bps = 0;
for (size_t i = 0; i < num_streams; ++i) {
configuration->max_bitrate_bps +=
DefaultVideoStreamFactory::kMaxBitratePerStream[i];
}
}
VideoReceiveStream::Decoder CreateMatchingDecoder(
const VideoSendStream::Config::EncoderSettings& encoder_settings) {
VideoReceiveStream::Decoder decoder;

View File

@ -10,43 +10,12 @@
#ifndef WEBRTC_TEST_ENCODER_SETTINGS_H_
#define WEBRTC_TEST_ENCODER_SETTINGS_H_
#include <vector>
#include "webrtc/video_receive_stream.h"
#include "webrtc/video_send_stream.h"
namespace webrtc {
namespace test {
class DefaultVideoStreamFactory
: public VideoEncoderConfig::VideoStreamFactoryInterface {
public:
DefaultVideoStreamFactory();
static const size_t kMaxNumberOfStreams = 3;
// Defined as {150000, 450000, 1500000};
static const int kMaxBitratePerStream[];
// Defined as {50000, 200000, 700000};
static const int kDefaultMinBitratePerStream[];
private:
std::vector<VideoStream> CreateEncoderStreams(
int width,
int height,
const VideoEncoderConfig& encoder_config) override;
};
// Creates |encoder_config.number_of_streams| VideoStreams where index
// |encoder_config.number_of_streams -1| have width = |width|, height =
// |height|. The total max bitrate of all VideoStreams is
// |encoder_config.max_bitrate_bps|.
std::vector<VideoStream> CreateVideoStreams(
int width,
int height,
const webrtc::VideoEncoderConfig& encoder_config);
void FillEncoderConfiguration(size_t num_streams,
VideoEncoderConfig* configuration);
std::vector<VideoStream> CreateVideoStreams(size_t num_streams);
VideoReceiveStream::Decoder CreateMatchingDecoder(
const VideoSendStream::Config::EncoderSettings& encoder_settings);

View File

@ -26,14 +26,8 @@ namespace {
class ChromaGenerator : public FrameGenerator {
public:
ChromaGenerator(size_t width, size_t height) : angle_(0.0) {
ChangeResolution(width, height);
}
void ChangeResolution(size_t width, size_t height) override {
rtc::CritScope lock(&crit_);
width_ = width;
height_ = height;
ChromaGenerator(size_t width, size_t height)
: angle_(0.0), width_(width), height_(height) {
RTC_CHECK(width_ > 0);
RTC_CHECK(height_ > 0);
half_width_ = (width_ + 1) / 2;
@ -42,7 +36,6 @@ class ChromaGenerator : public FrameGenerator {
}
VideoFrame* NextFrame() override {
rtc::CritScope lock(&crit_);
angle_ += 30.0;
uint8_t u = fabs(sin(angle_)) * 0xFF;
uint8_t v = fabs(cos(angle_)) * 0xFF;
@ -62,14 +55,13 @@ class ChromaGenerator : public FrameGenerator {
}
private:
rtc::CriticalSection crit_;
double angle_ GUARDED_BY(&crit_);
size_t width_ GUARDED_BY(&crit_);
size_t height_ GUARDED_BY(&crit_);
size_t half_width_ GUARDED_BY(&crit_);
size_t y_size_ GUARDED_BY(&crit_);
size_t uv_size_ GUARDED_BY(&crit_);
std::unique_ptr<VideoFrame> frame_ GUARDED_BY(&crit_);
double angle_;
size_t width_;
size_t height_;
size_t half_width_;
size_t y_size_;
size_t uv_size_;
std::unique_ptr<VideoFrame> frame_;
};
class YuvFileGenerator : public FrameGenerator {

View File

@ -49,11 +49,6 @@ class FrameGenerator {
// Returns video frame that remains valid until next call.
virtual VideoFrame* NextFrame() = 0;
// Change the capture resolution.
virtual void ChangeResolution(size_t width, size_t height) {
RTC_NOTREACHED();
}
// Creates a test frame generator that creates fully saturated frames with
// varying U, V values over time.
static FrameGenerator* CreateChromaGenerator(size_t width, size_t height);

View File

@ -124,11 +124,6 @@ void FrameGeneratorCapturer::Stop() {
sending_ = false;
}
void FrameGeneratorCapturer::ChangeResolution(size_t width, size_t height) {
rtc::CritScope cs(&lock_);
frame_generator_->ChangeResolution(width, height);
}
void FrameGeneratorCapturer::AddOrUpdateSink(
rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {

View File

@ -45,7 +45,6 @@ class FrameGeneratorCapturer : public VideoCapturer {
void Start() override;
void Stop() override;
void ChangeResolution(size_t width, size_t height);
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override;

View File

@ -259,8 +259,9 @@ TEST_F(EndToEndTest, TransmitsFirstFrame) {
Start();
std::unique_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::CreateChromaGenerator(kDefaultWidth,
kDefaultHeight));
test::FrameGenerator::CreateChromaGenerator(
video_encoder_config_.streams[0].width,
video_encoder_config_.streams[0].height));
test::FrameForwarder frame_forwarder;
video_send_stream_->SetSource(&frame_forwarder);
frame_forwarder.IncomingCapturedFrame(*frame_generator->NextFrame());
@ -304,6 +305,9 @@ class CodecObserver : public test::EndToEndTest,
send_config->encoder_settings.encoder = encoder_.get();
send_config->encoder_settings.payload_name = payload_name_;
send_config->encoder_settings.payload_type = 126;
encoder_config->streams[0].min_bitrate_bps = 50000;
encoder_config->streams[0].target_bitrate_bps =
encoder_config->streams[0].max_bitrate_bps = 2000000;
(*receive_configs)[0].renderer = this;
(*receive_configs)[0].decoders.resize(1);
@ -789,6 +793,9 @@ TEST_F(EndToEndTest, ReceivedFecPacketsNotNacked) {
send_config->encoder_settings.encoder = encoder_.get();
send_config->encoder_settings.payload_name = "VP8";
send_config->encoder_settings.payload_type = kFakeVideoSendPayloadType;
encoder_config->streams[0].min_bitrate_bps = 50000;
encoder_config->streams[0].max_bitrate_bps =
encoder_config->streams[0].target_bitrate_bps = 2000000;
(*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
(*receive_configs)[0].rtp.fec.red_payload_type = kRedPayloadType;
@ -1114,8 +1121,7 @@ TEST_F(EndToEndTest, UnknownRtpPacketGivesUnknownSsrcReturnCode) {
CreateMatchingReceiveConfigs(&receive_transport);
CreateVideoStreams();
CreateFrameGeneratorCapturer(kDefaultFramerate, kDefaultWidth,
kDefaultHeight);
CreateFrameGeneratorCapturer();
Start();
receiver_call_->DestroyVideoReceiveStream(video_receive_streams_[0]);
@ -1272,8 +1278,13 @@ class MultiStreamTest {
send_config.encoder_settings.payload_name = "VP8";
send_config.encoder_settings.payload_type = 124;
VideoEncoderConfig encoder_config;
test::FillEncoderConfiguration(1, &encoder_config);
encoder_config.max_bitrate_bps = 100000;
encoder_config.streams = test::CreateVideoStreams(1);
VideoStream* stream = &encoder_config.streams[0];
stream->width = width;
stream->height = height;
stream->max_framerate = 5;
stream->min_bitrate_bps = stream->target_bitrate_bps =
stream->max_bitrate_bps = 100000;
UpdateSendConfig(i, &send_config, &encoder_config, &frame_generators[i]);
@ -1551,9 +1562,11 @@ TEST_F(EndToEndTest, AssignsTransportSequenceNumbers) {
// Force some padding to be sent.
const int kPaddingBitrateBps = 50000;
encoder_config->max_bitrate_bps = 1000000;
int total_target_bitrate = 0;
for (const VideoStream& stream : encoder_config->streams)
total_target_bitrate += stream.target_bitrate_bps;
encoder_config->min_transmit_bitrate_bps =
encoder_config->max_bitrate_bps + kPaddingBitrateBps;
total_target_bitrate + kPaddingBitrateBps;
// Configure RTX for redundant payload padding.
send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
@ -1753,8 +1766,9 @@ TEST_F(EndToEndTest, ObserversEncodedFrames) {
Start();
std::unique_ptr<test::FrameGenerator> frame_generator(
test::FrameGenerator::CreateChromaGenerator(kDefaultWidth,
kDefaultHeight));
test::FrameGenerator::CreateChromaGenerator(
video_encoder_config_.streams[0].width,
video_encoder_config_.streams[0].height));
test::FrameForwarder forwarder;
video_send_stream_->SetSource(&forwarder);
forwarder.IncomingCapturedFrame(*frame_generator->NextFrame());
@ -1905,7 +1919,8 @@ TEST_F(EndToEndTest, RembWithSendSideBwe) {
test::kTransportSequenceNumberExtensionId));
sender_ssrc_ = send_config->rtp.ssrcs[0];
encoder_config->max_bitrate_bps = 2000000;
encoder_config->streams[0].max_bitrate_bps =
encoder_config->streams[0].target_bitrate_bps = 2000000;
ASSERT_EQ(1u, receive_configs->size());
(*receive_configs)[0].rtp.remb = false;
@ -2246,18 +2261,24 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx,
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.ReceivedWidthInPixels"));
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.ReceivedHeightInPixels"));
EXPECT_EQ(1, metrics::NumEvents(video_prefix + "InputWidthInPixels",
kDefaultWidth));
EXPECT_EQ(1, metrics::NumEvents(video_prefix + "InputHeightInPixels",
kDefaultHeight));
EXPECT_EQ(
1, metrics::NumEvents(video_prefix + "SentWidthInPixels", kDefaultWidth));
EXPECT_EQ(1, metrics::NumEvents(video_prefix + "SentHeightInPixels",
kDefaultHeight));
EXPECT_EQ(1, metrics::NumEvents("WebRTC.Video.ReceivedWidthInPixels",
kDefaultWidth));
EXPECT_EQ(1, metrics::NumEvents("WebRTC.Video.ReceivedHeightInPixels",
kDefaultHeight));
EXPECT_EQ(1, metrics::NumEvents(
video_prefix + "InputWidthInPixels",
static_cast<int>(video_encoder_config_.streams[0].width)));
EXPECT_EQ(1, metrics::NumEvents(
video_prefix + "InputHeightInPixels",
static_cast<int>(video_encoder_config_.streams[0].height)));
EXPECT_EQ(1, metrics::NumEvents(
video_prefix + "SentWidthInPixels",
static_cast<int>(video_encoder_config_.streams[0].width)));
EXPECT_EQ(1, metrics::NumEvents(
video_prefix + "SentHeightInPixels",
static_cast<int>(video_encoder_config_.streams[0].height)));
EXPECT_EQ(1, metrics::NumEvents(
"WebRTC.Video.ReceivedWidthInPixels",
static_cast<int>(video_encoder_config_.streams[0].width)));
EXPECT_EQ(1, metrics::NumEvents(
"WebRTC.Video.ReceivedHeightInPixels",
static_cast<int>(video_encoder_config_.streams[0].height)));
EXPECT_EQ(1, metrics::NumSamples(video_prefix + "InputFramesPerSecond"));
EXPECT_EQ(1, metrics::NumSamples(video_prefix + "SentFramesPerSecond"));
@ -2463,41 +2484,22 @@ void EndToEndTest::TestSendsSetSsrcs(size_t num_ssrcs,
size_t GetNumVideoStreams() const override { return num_ssrcs_; }
// This test use other VideoStream settings than the the default settings
// implemented in DefaultVideoStreamFactory. Therefore this test implement
// its own VideoEncoderConfig::VideoStreamFactoryInterface which is created
// in ModifyVideoConfigs.
class VideoStreamFactory
: public VideoEncoderConfig::VideoStreamFactoryInterface {
public:
VideoStreamFactory() {}
private:
std::vector<VideoStream> CreateEncoderStreams(
int width,
int height,
const VideoEncoderConfig& encoder_config) override {
std::vector<VideoStream> streams =
test::CreateVideoStreams(width, height, encoder_config);
// Set low simulcast bitrates to not have to wait for bandwidth ramp-up.
for (size_t i = 0; i < encoder_config.number_of_streams; ++i) {
streams[i].min_bitrate_bps = 10000;
streams[i].target_bitrate_bps = 15000;
streams[i].max_bitrate_bps = 20000;
}
return streams;
}
};
void ModifyVideoConfigs(
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) override {
encoder_config->video_stream_factory =
new rtc::RefCountedObject<VideoStreamFactory>();
if (num_ssrcs_ > 1) {
// Set low simulcast bitrates to not have to wait for bandwidth ramp-up.
for (size_t i = 0; i < encoder_config->streams.size(); ++i) {
encoder_config->streams[i].min_bitrate_bps = 10000;
encoder_config->streams[i].target_bitrate_bps = 15000;
encoder_config->streams[i].max_bitrate_bps = 20000;
}
}
video_encoder_config_all_streams_ = encoder_config->Copy();
if (send_single_ssrc_first_)
encoder_config->number_of_streams = 1;
encoder_config->streams.resize(1);
}
void OnVideoStreamsCreated(
@ -2557,7 +2559,7 @@ TEST_F(EndToEndTest, ReportsSetEncoderRates) {
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
RTC_DCHECK_EQ(1u, encoder_config->number_of_streams);
RTC_DCHECK_EQ(1u, encoder_config->streams.size());
}
int32_t SetRates(uint32_t new_target_bitrate, uint32_t framerate) override {
@ -2829,38 +2831,16 @@ TEST_F(EndToEndTest, GetStats) {
return config;
}
// This test use other VideoStream settings than the the default settings
// implemented in DefaultVideoStreamFactory. Therefore this test implement
// its own VideoEncoderConfig::VideoStreamFactoryInterface which is created
// in ModifyVideoConfigs.
class VideoStreamFactory
: public VideoEncoderConfig::VideoStreamFactoryInterface {
public:
VideoStreamFactory() {}
private:
std::vector<VideoStream> CreateEncoderStreams(
int width,
int height,
const VideoEncoderConfig& encoder_config) override {
std::vector<VideoStream> streams =
test::CreateVideoStreams(width, height, encoder_config);
// Set low simulcast bitrates to not have to wait for bandwidth ramp-up.
for (size_t i = 0; i < encoder_config.number_of_streams; ++i) {
streams[i].min_bitrate_bps = 10000;
streams[i].target_bitrate_bps = 15000;
streams[i].max_bitrate_bps = 20000;
}
return streams;
}
};
void ModifyVideoConfigs(
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) override {
encoder_config->video_stream_factory =
new rtc::RefCountedObject<VideoStreamFactory>();
// Set low rates to avoid waiting for rampup.
for (size_t i = 0; i < encoder_config->streams.size(); ++i) {
encoder_config->streams[i].min_bitrate_bps = 10000;
encoder_config->streams[i].target_bitrate_bps = 15000;
encoder_config->streams[i].max_bitrate_bps = 20000;
}
send_config->pre_encode_callback = this; // Used to inject delay.
expected_cname_ = send_config->rtp.c_name = "SomeCName";
@ -3053,39 +3033,17 @@ TEST_F(EndToEndTest, DISABLED_RedundantPayloadsTransmittedOnAllSsrcs) {
size_t GetNumVideoStreams() const override { return kNumSsrcs; }
// This test use other VideoStream settings than the the default settings
// implemented in DefaultVideoStreamFactory. Therefore this test implement
// its own VideoEncoderConfig::VideoStreamFactoryInterface which is created
// in ModifyVideoConfigs.
class VideoStreamFactory
: public VideoEncoderConfig::VideoStreamFactoryInterface {
public:
VideoStreamFactory() {}
private:
std::vector<VideoStream> CreateEncoderStreams(
int width,
int height,
const VideoEncoderConfig& encoder_config) override {
std::vector<VideoStream> streams =
test::CreateVideoStreams(width, height, encoder_config);
// Set low simulcast bitrates to not have to wait for bandwidth ramp-up.
for (size_t i = 0; i < encoder_config.number_of_streams; ++i) {
streams[i].min_bitrate_bps = 10000;
streams[i].target_bitrate_bps = 15000;
streams[i].max_bitrate_bps = 20000;
}
return streams;
}
};
void ModifyVideoConfigs(
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) override {
// Set low simulcast bitrates to not have to wait for bandwidth ramp-up.
encoder_config->video_stream_factory =
new rtc::RefCountedObject<VideoStreamFactory>();
for (size_t i = 0; i < encoder_config->streams.size(); ++i) {
encoder_config->streams[i].min_bitrate_bps = 10000;
encoder_config->streams[i].target_bitrate_bps = 15000;
encoder_config->streams[i].max_bitrate_bps = 20000;
}
send_config->rtp.rtx.payload_type = kSendRtxPayloadType;
for (size_t i = 0; i < kNumSsrcs; ++i)
@ -3112,43 +3070,6 @@ TEST_F(EndToEndTest, DISABLED_RedundantPayloadsTransmittedOnAllSsrcs) {
void EndToEndTest::TestRtpStatePreservation(bool use_rtx,
bool provoke_rtcpsr_before_rtp) {
// This test use other VideoStream settings than the the default settings
// implemented in DefaultVideoStreamFactory. Therefore this test implement
// its own VideoEncoderConfig::VideoStreamFactoryInterface which is created
// in ModifyVideoConfigs.
class VideoStreamFactory
: public VideoEncoderConfig::VideoStreamFactoryInterface {
public:
VideoStreamFactory() {}
private:
std::vector<VideoStream> CreateEncoderStreams(
int width,
int height,
const VideoEncoderConfig& encoder_config) override {
std::vector<VideoStream> streams =
test::CreateVideoStreams(width, height, encoder_config);
if (encoder_config.number_of_streams > 1) {
// Lower bitrates so that all streams send initially.
RTC_DCHECK_EQ(3u, encoder_config.number_of_streams);
for (size_t i = 0; i < encoder_config.number_of_streams; ++i) {
streams[i].min_bitrate_bps = 10000;
streams[i].target_bitrate_bps = 15000;
streams[i].max_bitrate_bps = 20000;
}
} else {
// Use the same total bitrates when sending a single stream to avoid
// lowering
// the bitrate estimate and requiring a subsequent rampup.
streams[0].min_bitrate_bps = 3 * 10000;
streams[0].target_bitrate_bps = 3 * 15000;
streams[0].max_bitrate_bps = 3 * 20000;
}
return streams;
}
};
class RtpSequenceObserver : public test::RtpRtcpObserver {
public:
explicit RtpSequenceObserver(bool use_rtx)
@ -3284,17 +3205,30 @@ void EndToEndTest::TestRtpStatePreservation(bool use_rtx,
video_send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
}
video_encoder_config_.video_stream_factory =
new rtc::RefCountedObject<VideoStreamFactory>();
// Lower bitrates so that all streams send initially.
for (size_t i = 0; i < video_encoder_config_.streams.size(); ++i) {
video_encoder_config_.streams[i].min_bitrate_bps = 10000;
video_encoder_config_.streams[i].target_bitrate_bps = 15000;
video_encoder_config_.streams[i].max_bitrate_bps = 20000;
}
// Use the same total bitrates when sending a single stream to avoid lowering
// the bitrate estimate and requiring a subsequent rampup.
VideoEncoderConfig one_stream = video_encoder_config_.Copy();
// one_stream.streams.resize(1);
one_stream.number_of_streams = 1;
one_stream.streams.resize(1);
for (size_t i = 1; i < video_encoder_config_.streams.size(); ++i) {
one_stream.streams.front().min_bitrate_bps +=
video_encoder_config_.streams[i].min_bitrate_bps;
one_stream.streams.front().target_bitrate_bps +=
video_encoder_config_.streams[i].target_bitrate_bps;
one_stream.streams.front().max_bitrate_bps +=
video_encoder_config_.streams[i].max_bitrate_bps;
}
CreateMatchingReceiveConfigs(&receive_transport);
CreateVideoStreams();
CreateFrameGeneratorCapturer(30, 1280, 720);
CreateFrameGeneratorCapturer();
Start();
EXPECT_TRUE(observer.Wait())
@ -3319,7 +3253,7 @@ void EndToEndTest::TestRtpStatePreservation(bool use_rtx,
static_cast<webrtc::test::DirectTransport&>(receive_transport)
.SendRtcp(packet.data(), packet.size());
}
CreateFrameGeneratorCapturer(30, 1280, 720);
CreateFrameGeneratorCapturer();
frame_generator_capturer_->Start();
observer.ResetExpectedSsrcs(1);
@ -3574,8 +3508,7 @@ TEST_F(EndToEndTest, CallReportsRttForSender) {
CreateMatchingReceiveConfigs(&receiver_transport);
CreateVideoStreams();
CreateFrameGeneratorCapturer(kDefaultFramerate, kDefaultWidth,
kDefaultHeight);
CreateFrameGeneratorCapturer();
Start();
int64_t start_time_ms = clock_->TimeInMilliseconds();
@ -3608,8 +3541,7 @@ void EndToEndTest::VerifyNewVideoSendStreamsRespectNetworkState(
CreateSendConfig(1, 0, transport);
video_send_config_.encoder_settings.encoder = encoder;
CreateVideoStreams();
CreateFrameGeneratorCapturer(kDefaultFramerate, kDefaultWidth,
kDefaultHeight);
CreateFrameGeneratorCapturer();
Start();
SleepMs(kSilenceTimeoutMs);
@ -3630,8 +3562,7 @@ void EndToEndTest::VerifyNewVideoReceiveStreamsRespectNetworkState(
CreateSendConfig(1, 0, &sender_transport);
CreateMatchingReceiveConfigs(transport);
CreateVideoStreams();
CreateFrameGeneratorCapturer(kDefaultFramerate, kDefaultWidth,
kDefaultHeight);
CreateFrameGeneratorCapturer();
Start();
SleepMs(kSilenceTimeoutMs);

View File

@ -29,7 +29,6 @@
#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/include/cpu_info.h"
#include "webrtc/test/encoder_settings.h"
#include "webrtc/test/layer_filtering_transport.h"
#include "webrtc/test/run_loop.h"
#include "webrtc/test/statistics.h"
@ -1015,8 +1014,7 @@ void VideoQualityTest::SetupCommon(Transport* send_transport,
video_encoder_config_.min_transmit_bitrate_bps =
params_.common.min_transmit_bps;
test::FillEncoderConfiguration(params_.ss.streams.size(),
&video_encoder_config_);
video_encoder_config_.streams = params_.ss.streams;
video_encoder_config_.spatial_layers = params_.ss.spatial_layers;
CreateMatchingReceiveConfigs(recv_transport);

View File

@ -521,11 +521,18 @@ VideoSendStream::VideoSendStream(
config_.pre_encode_callback, config_.overuse_callback,
config_.post_encode_callback));
// TODO(perkj): Remove vector<VideoStreams> from VideoEncoderConfig and
// replace with max_bitrate. The VideoStream should be created by ViEEncoder
// when the video resolution is known.
int initial_max_encoder_bitrate = 0;
for (const auto& stream : encoder_config.streams)
initial_max_encoder_bitrate += stream.max_bitrate_bps;
worker_queue_->PostTask(std::unique_ptr<rtc::QueuedTask>(new ConstructionTask(
&send_stream_, &thread_sync_event_, &stats_proxy_, vie_encoder_.get(),
module_process_thread, call_stats, congestion_controller,
bitrate_allocator, send_delay_stats, remb, event_log, &config_,
encoder_config.max_bitrate_bps, suspended_ssrcs)));
initial_max_encoder_bitrate, suspended_ssrcs)));
// Wait for ConstructionTask to complete so that |send_stream_| can be used.
// |module_process_thread| must be registered and deregistered on the thread
@ -572,9 +579,12 @@ void VideoSendStream::SetSource(
}
void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config) {
// TODO(perkj): Some test cases in VideoSendStreamTest call
// ReconfigureVideoEncoder from the network thread.
// RTC_DCHECK_RUN_ON(&thread_checker_);
// ReconfigureVideoEncoder will be called on the thread that deliverers video
// frames. We must change the encoder settings immediately so that
// the codec settings matches the next frame.
// TODO(perkj): Move logic for reconfiguration the encoder due to frame size
// change from WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame to
// be internally handled by ViEEncoder.
vie_encoder_->ConfigureEncoder(std::move(config),
config_.rtp.max_packet_size);
}

View File

@ -37,7 +37,6 @@
#include "webrtc/test/frame_utils.h"
#include "webrtc/test/null_transport.h"
#include "webrtc/test/testsupport/perf_test.h"
#include "webrtc/video/send_statistics_proxy.h"
#include "webrtc/video_frame.h"
#include "webrtc/video_send_stream.h"
@ -909,18 +908,16 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) override {
RTC_DCHECK_EQ(1u, encoder_config->number_of_streams);
transport_adapter_.reset(
new internal::TransportAdapter(send_config->send_transport));
transport_adapter_->Enable();
send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
send_config->pre_encode_callback = this;
send_config->suspend_below_min_bitrate = true;
int min_bitrate_bps =
test::DefaultVideoStreamFactory::kDefaultMinBitratePerStream[0];
int min_bitrate_bps = encoder_config->streams[0].min_bitrate_bps;
set_low_remb_bps(min_bitrate_bps - 10000);
int threshold_window = std::max(min_bitrate_bps / 10, 20000);
ASSERT_GT(encoder_config->max_bitrate_bps,
ASSERT_GT(encoder_config->streams[0].max_bitrate_bps,
min_bitrate_bps + threshold_window + 5000);
set_high_remb_bps(min_bitrate_bps + threshold_window + 5000);
}
@ -1209,7 +1206,7 @@ class MaxPaddingSetTest : public test::SendTest {
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) override {
RTC_DCHECK_EQ(1u, encoder_config->number_of_streams);
RTC_DCHECK_EQ(1u, encoder_config->streams.size());
if (running_without_padding_) {
encoder_config->min_transmit_bitrate_bps = 0;
encoder_config->content_type =
@ -1277,84 +1274,6 @@ TEST_F(VideoSendStreamTest, RespectsMinTransmitBitrateAfterContentSwitch) {
RunBaseTest(&test);
}
// This test verifies that new frame sizes reconfigures encoders even though not
// (yet) sending. The purpose of this is to permit encoding as quickly as
// possible once we start sending. Likely the frames being input are from the
// same source that will be sent later, which just means that we're ready
// earlier.
TEST_F(VideoSendStreamTest,
EncoderReconfigureOnResolutionChangeWhenNotSending) {
class EncoderObserver : public test::FakeEncoder {
public:
EncoderObserver()
: FakeEncoder(Clock::GetRealTimeClock()),
init_encode_called_(false, false),
number_of_initializations_(0),
last_initialized_frame_width_(0),
last_initialized_frame_height_(0) {}
void WaitForResolution(int width, int height) {
{
rtc::CritScope lock(&crit_);
if (last_initialized_frame_width_ == width &&
last_initialized_frame_height_ == height) {
return;
}
}
init_encode_called_.Wait(VideoSendStreamTest::kDefaultTimeoutMs);
{
rtc::CritScope lock(&crit_);
EXPECT_EQ(width, last_initialized_frame_width_);
EXPECT_EQ(height, last_initialized_frame_height_);
}
}
private:
int32_t InitEncode(const VideoCodec* config,
int32_t number_of_cores,
size_t max_payload_size) override {
rtc::CritScope lock(&crit_);
last_initialized_frame_width_ = config->width;
last_initialized_frame_height_ = config->height;
++number_of_initializations_;
// First time InitEncode is called, the frame size is unknown.
if (number_of_initializations_ > 1)
init_encode_called_.Set();
return FakeEncoder::InitEncode(config, number_of_cores, max_payload_size);
}
int32_t Encode(const VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const std::vector<FrameType>* frame_types) override {
ADD_FAILURE()
<< "Unexpected Encode call since the send stream is not started";
return 0;
}
rtc::CriticalSection crit_;
rtc::Event init_encode_called_;
size_t number_of_initializations_ GUARDED_BY(&crit_);
int last_initialized_frame_width_ GUARDED_BY(&crit_);
int last_initialized_frame_height_ GUARDED_BY(&crit_);
};
CreateSenderCall(Call::Config());
test::NullTransport transport;
CreateSendConfig(1, 0, &transport);
EncoderObserver encoder;
video_send_config_.encoder_settings.encoder = &encoder;
CreateVideoStreams();
CreateFrameGeneratorCapturer(kDefaultFramerate, kDefaultWidth,
kDefaultHeight);
frame_generator_capturer_->Start();
encoder.WaitForResolution(kDefaultWidth, kDefaultHeight);
frame_generator_capturer_->ChangeResolution(kDefaultWidth * 2,
kDefaultHeight * 2);
encoder.WaitForResolution(kDefaultWidth * 2, kDefaultHeight * 2);
DestroyStreams();
}
TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
class StartBitrateObserver : public test::FakeEncoder {
public:
@ -1400,22 +1319,21 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
CreateSendConfig(1, 0, &transport);
Call::Config::BitrateConfig bitrate_config;
bitrate_config.start_bitrate_bps = 2 * video_encoder_config_.max_bitrate_bps;
bitrate_config.start_bitrate_bps =
2 * video_encoder_config_.streams[0].max_bitrate_bps;
sender_call_->SetBitrateConfig(bitrate_config);
StartBitrateObserver encoder;
video_send_config_.encoder_settings.encoder = &encoder;
// Since this test does not use a capturer, set |internal_source| = true.
// Encoder configuration is otherwise updated on the next video frame.
video_send_config_.encoder_settings.internal_source = true;
CreateVideoStreams();
EXPECT_TRUE(encoder.WaitForStartBitrate());
EXPECT_EQ(video_encoder_config_.max_bitrate_bps / 1000,
EXPECT_EQ(video_encoder_config_.streams[0].max_bitrate_bps / 1000,
encoder.GetStartBitrateKbps());
video_encoder_config_.max_bitrate_bps = 2 * bitrate_config.start_bitrate_bps;
video_encoder_config_.streams[0].max_bitrate_bps =
2 * bitrate_config.start_bitrate_bps;
video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_.Copy());
// New bitrate should be reconfigured above the previous max. As there's no
@ -1541,9 +1459,8 @@ TEST_F(VideoSendStreamTest, CapturesTextureAndVideoFrames) {
// Prepare five input frames. Send ordinary VideoFrame and texture frames
// alternatively.
std::vector<VideoFrame> input_frames;
int width = 168;
int height = 132;
int width = static_cast<int>(video_encoder_config_.streams[0].width);
int height = static_cast<int>(video_encoder_config_.streams[0].height);
test::FakeNativeHandle* handle1 = new test::FakeNativeHandle();
test::FakeNativeHandle* handle2 = new test::FakeNativeHandle();
test::FakeNativeHandle* handle3 = new test::FakeNativeHandle();
@ -1561,6 +1478,9 @@ TEST_F(VideoSendStreamTest, CapturesTextureAndVideoFrames) {
video_send_stream_->SetSource(&forwarder);
for (size_t i = 0; i < input_frames.size(); i++) {
forwarder.IncomingCapturedFrame(input_frames[i]);
// Do not send the next frame too fast, so the frame dropper won't drop it.
if (i < input_frames.size() - 1)
SleepMs(1000 / video_encoder_config_.streams[0].max_framerate);
// Wait until the output frame is received before sending the next input
// frame. Or the previous input frame may be replaced without delivering.
observer.WaitOutputFrame();
@ -1687,12 +1607,9 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
void PerformTest() override {
EXPECT_TRUE(Wait()) << "Timed out while waiting for Encode.";
// Expect |num_releases| == 1 since the encoder has been reconfigured
// once when the first frame is encoded. Not until at that point is the
// frame size known and the encoder can be properly initialized.
EXPECT_EQ(1u, num_releases());
EXPECT_EQ(0u, num_releases());
stream_->ReconfigureVideoEncoder(std::move(encoder_config_));
EXPECT_EQ(1u, num_releases());
EXPECT_EQ(0u, num_releases());
stream_->Stop();
// Encoder should not be released before destroying the VideoSendStream.
EXPECT_FALSE(IsReleased());
@ -1714,7 +1631,7 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
RunBaseTest(&test_encoder);
EXPECT_TRUE(test_encoder.IsReleased());
EXPECT_EQ(2u, test_encoder.num_releases());
EXPECT_EQ(1u, test_encoder.num_releases());
}
TEST_F(VideoSendStreamTest, EncoderSetupPropagatesCommonEncoderConfigValues) {
@ -1797,26 +1714,6 @@ class VideoCodecConfigObserver : public test::SendTest,
}
private:
class VideoStreamFactory
: public VideoEncoderConfig::VideoStreamFactoryInterface {
public:
VideoStreamFactory() {}
private:
std::vector<VideoStream> CreateEncoderStreams(
int width,
int height,
const VideoEncoderConfig& encoder_config) override {
std::vector<VideoStream> streams =
test::CreateVideoStreams(width, height, encoder_config);
for (size_t i = 0; i < streams.size(); ++i) {
streams[i].temporal_layer_thresholds_bps.resize(
kVideoCodecConfigObserverNumberOfTemporalLayers - 1);
}
return streams;
}
};
void ModifyVideoConfigs(
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
@ -1824,9 +1721,12 @@ class VideoCodecConfigObserver : public test::SendTest,
send_config->encoder_settings.encoder = this;
send_config->encoder_settings.payload_name = codec_name_;
for (size_t i = 0; i < encoder_config->streams.size(); ++i) {
encoder_config->streams[i].temporal_layer_thresholds_bps.resize(
kVideoCodecConfigObserverNumberOfTemporalLayers - 1);
}
encoder_config->encoder_specific_settings = GetEncoderSpecificSettings();
encoder_config->video_stream_factory =
new rtc::RefCountedObject<VideoStreamFactory>();
encoder_config_ = encoder_config->Copy();
}
@ -2022,26 +1922,6 @@ TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) {
TEST_F(VideoSendStreamTest, TranslatesTwoLayerScreencastToTargetBitrate) {
static const int kScreencastTargetBitrateKbps = 200;
class VideoStreamFactory
: public VideoEncoderConfig::VideoStreamFactoryInterface {
public:
VideoStreamFactory() {}
private:
std::vector<VideoStream> CreateEncoderStreams(
int width,
int height,
const VideoEncoderConfig& encoder_config) override {
std::vector<VideoStream> streams =
test::CreateVideoStreams(width, height, encoder_config);
EXPECT_TRUE(streams[0].temporal_layer_thresholds_bps.empty());
streams[0].temporal_layer_thresholds_bps.push_back(
kScreencastTargetBitrateKbps * 1000);
return streams;
}
};
class ScreencastTargetBitrateTest : public test::SendTest,
public test::FakeEncoder {
public:
@ -2064,9 +1944,11 @@ TEST_F(VideoSendStreamTest, TranslatesTwoLayerScreencastToTargetBitrate) {
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
EXPECT_EQ(1u, encoder_config->number_of_streams);
encoder_config->video_stream_factory =
new rtc::RefCountedObject<VideoStreamFactory>();
EXPECT_EQ(1u, encoder_config->streams.size());
EXPECT_TRUE(
encoder_config->streams[0].temporal_layer_thresholds_bps.empty());
encoder_config->streams[0].temporal_layer_thresholds_bps.push_back(
kScreencastTargetBitrateKbps * 1000);
encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen;
}
@ -2107,8 +1989,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
size_t maxPayloadSize) override {
EXPECT_GE(codecSettings->startBitrate, codecSettings->minBitrate);
EXPECT_LE(codecSettings->startBitrate, codecSettings->maxBitrate);
// First reinitialization happens due to that the frame size is updated.
if (num_initializations_ == 0 || num_initializations_ == 1) {
if (num_initializations_ == 0) {
EXPECT_EQ(static_cast<unsigned int>(kMinBitrateKbps),
codecSettings->minBitrate);
EXPECT_EQ(static_cast<unsigned int>(kStartBitrateKbps),
@ -2116,14 +1997,14 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
EXPECT_EQ(static_cast<unsigned int>(kMaxBitrateKbps),
codecSettings->maxBitrate);
observation_complete_.Set();
} else if (num_initializations_ == 2) {
} else if (num_initializations_ == 1) {
EXPECT_EQ(static_cast<unsigned int>(kLowerMaxBitrateKbps),
codecSettings->maxBitrate);
// The start bitrate should be kept (-1) and capped to the max bitrate.
// Since this is not an end-to-end call no receiver should have been
// returning a REMB that could lower this estimate.
EXPECT_EQ(codecSettings->startBitrate, codecSettings->maxBitrate);
} else if (num_initializations_ == 3) {
} else if (num_initializations_ == 2) {
EXPECT_EQ(static_cast<unsigned int>(kIncreasedMaxBitrateKbps),
codecSettings->maxBitrate);
// The start bitrate will be whatever the rate BitRateController
@ -2131,9 +2012,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
// bitrate.
}
++num_initializations_;
if (num_initializations_ > 1) {
init_encode_event_.Set();
}
init_encode_event_.Set();
return FakeEncoder::InitEncode(codecSettings, numberOfCores,
maxPayloadSize);
}
@ -2141,9 +2020,6 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
int32_t SetRates(uint32_t newBitRate, uint32_t frameRate) override {
{
rtc::CritScope lock(&crit_);
if (target_bitrate_ == newBitRate) {
return FakeEncoder::SetRates(newBitRate, frameRate);
}
target_bitrate_ = newBitRate;
}
bitrate_changed_event_.Set();
@ -2166,26 +2042,6 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
return config;
}
class VideoStreamFactory
: public VideoEncoderConfig::VideoStreamFactoryInterface {
public:
explicit VideoStreamFactory(int min_bitrate_bps)
: min_bitrate_bps_(min_bitrate_bps) {}
private:
std::vector<VideoStream> CreateEncoderStreams(
int width,
int height,
const VideoEncoderConfig& encoder_config) override {
std::vector<VideoStream> streams =
test::CreateVideoStreams(width, height, encoder_config);
streams[0].min_bitrate_bps = min_bitrate_bps_;
return streams;
}
const int min_bitrate_bps_;
};
void ModifyVideoConfigs(
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
@ -2193,11 +2049,8 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
send_config->encoder_settings.encoder = this;
// Set bitrates lower/higher than min/max to make sure they are properly
// capped.
encoder_config->max_bitrate_bps = kMaxBitrateKbps * 1000;
// Create a new StreamFactory to be able to set
// |VideoStream.min_bitrate_bps|.
encoder_config->video_stream_factory =
new rtc::RefCountedObject<VideoStreamFactory>(kMinBitrateKbps * 1000);
encoder_config->streams.front().min_bitrate_bps = kMinBitrateKbps * 1000;
encoder_config->streams.front().max_bitrate_bps = kMaxBitrateKbps * 1000;
encoder_config_ = encoder_config->Copy();
}
@ -2222,20 +2075,25 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
call_->SetBitrateConfig(bitrate_config);
// Encoder rate is capped by EncoderConfig max_bitrate_bps.
WaitForSetRates(kMaxBitrateKbps);
encoder_config_.max_bitrate_bps = kLowerMaxBitrateKbps * 1000;
encoder_config_.streams[0].min_bitrate_bps = 0;
encoder_config_.streams[0].max_bitrate_bps = kLowerMaxBitrateKbps * 1000;
send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy());
ASSERT_TRUE(
init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
EXPECT_EQ(2, num_initializations_)
<< "Encoder should have been reconfigured with the new value.";
WaitForSetRates(kLowerMaxBitrateKbps);
encoder_config_.streams[0].target_bitrate_bps =
encoder_config_.streams[0].min_bitrate_bps;
encoder_config_.streams[0].max_bitrate_bps =
kIncreasedMaxBitrateKbps * 1000;
send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy());
ASSERT_TRUE(
init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
EXPECT_EQ(3, num_initializations_)
<< "Encoder should have been reconfigured with the new value.";
WaitForSetRates(kLowerMaxBitrateKbps);
encoder_config_.max_bitrate_bps = kIncreasedMaxBitrateKbps * 1000;
send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy());
ASSERT_TRUE(
init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
EXPECT_EQ(4, num_initializations_)
<< "Encoder should have been reconfigured with the new value.";
// Expected target bitrate is the start bitrate set in the call to
// call_->SetBitrateConfig.
WaitForSetRates(kIncreasedStartBitrateKbps);
@ -2245,7 +2103,6 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
rtc::Event bitrate_changed_event_;
rtc::CriticalSection crit_;
uint32_t target_bitrate_ GUARDED_BY(&crit_);
int num_initializations_;
webrtc::Call* call_;
webrtc::VideoSendStream* send_stream_;
@ -2301,7 +2158,7 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) {
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) override {
send_config->encoder_settings.encoder = this;
EXPECT_EQ(kNumStreams, encoder_config->number_of_streams);
EXPECT_EQ(kNumStreams, encoder_config->streams.size());
}
size_t GetNumVideoStreams() const override { return kNumStreams; }
@ -2343,9 +2200,7 @@ class Vp9HeaderObserver : public test::SendTest {
vp9_encoder_(VP9Encoder::Create()),
vp9_settings_(VideoEncoder::GetDefaultVp9Settings()),
packets_sent_(0),
frames_sent_(0),
expected_width_(0),
expected_height_(0) {}
frames_sent_(0) {}
virtual void ModifyVideoConfigsHook(
VideoSendStream::Config* send_config,
@ -2357,27 +2212,6 @@ class Vp9HeaderObserver : public test::SendTest {
private:
const int kVp9PayloadType = 105;
class VideoStreamFactory
: public VideoEncoderConfig::VideoStreamFactoryInterface {
public:
explicit VideoStreamFactory(size_t number_of_temporal_layers)
: number_of_temporal_layers_(number_of_temporal_layers) {}
private:
std::vector<VideoStream> CreateEncoderStreams(
int width,
int height,
const VideoEncoderConfig& encoder_config) override {
std::vector<VideoStream> streams =
test::CreateVideoStreams(width, height, encoder_config);
streams[0].temporal_layer_thresholds_bps.resize(
number_of_temporal_layers_ - 1);
return streams;
}
const size_t number_of_temporal_layers_;
};
void ModifyVideoConfigs(
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
@ -2388,20 +2222,12 @@ class Vp9HeaderObserver : public test::SendTest {
ModifyVideoConfigsHook(send_config, receive_configs, encoder_config);
encoder_config->encoder_specific_settings = new rtc::RefCountedObject<
VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings_);
EXPECT_EQ(1u, encoder_config->number_of_streams);
encoder_config->video_stream_factory =
new rtc::RefCountedObject<VideoStreamFactory>(
vp9_settings_.numberOfTemporalLayers);
EXPECT_EQ(1u, encoder_config->streams.size());
encoder_config->streams[0].temporal_layer_thresholds_bps.resize(
vp9_settings_.numberOfTemporalLayers - 1);
encoder_config_ = encoder_config->Copy();
}
void ModifyVideoCaptureStartResolution(int* width,
int* height,
int* frame_rate) override {
expected_width_ = *width;
expected_height_ = *height;
}
void PerformTest() override {
EXPECT_TRUE(Wait()) << "Test timed out waiting for VP9 packet, num frames "
<< frames_sent_;
@ -2591,8 +2417,8 @@ class Vp9HeaderObserver : public test::SendTest {
EXPECT_EQ(vp9_settings_.numberOfSpatialLayers, // N_S + 1
vp9.num_spatial_layers);
EXPECT_TRUE(vp9.spatial_layer_resolution_present); // Y:1
int expected_width = expected_width_;
int expected_height = expected_height_;
size_t expected_width = encoder_config_.streams[0].width;
size_t expected_height = encoder_config_.streams[0].height;
for (int i = static_cast<int>(vp9.num_spatial_layers) - 1; i >= 0; --i) {
EXPECT_EQ(expected_width, vp9.width[i]); // WIDTH
EXPECT_EQ(expected_height, vp9.height[i]); // HEIGHT
@ -2636,8 +2462,6 @@ class Vp9HeaderObserver : public test::SendTest {
RTPVideoHeaderVP9 last_vp9_;
size_t packets_sent_;
size_t frames_sent_;
int expected_width_;
int expected_height_;
};
TEST_F(VideoSendStreamTest, Vp9NonFlexMode_1Tl1SLayers) {
@ -2739,22 +2563,15 @@ TEST_F(VideoSendStreamTest, Vp9NonFlexModeSmallResolution) {
vp9_settings_.numberOfTemporalLayers = 1;
vp9_settings_.numberOfSpatialLayers = 1;
EXPECT_EQ(1u, encoder_config->number_of_streams);
EXPECT_EQ(1u, encoder_config->streams.size());
encoder_config->streams[0].width = kWidth;
encoder_config->streams[0].height = kHeight;
}
void InspectHeader(const RTPVideoHeaderVP9& vp9_header) override {
if (frames_sent_ > kNumFramesToSend)
observation_complete_.Set();
}
void ModifyVideoCaptureStartResolution(int* width,
int* height,
int* frame_rate) override {
expected_width_ = kWidth;
expected_height_ = kHeight;
*width = kWidth;
*height = kHeight;
}
} test;
RunBaseTest(&test);

View File

@ -41,11 +41,10 @@ VideoCodecType PayloadNameToCodecType(const std::string& payload_name) {
return kVideoCodecGeneric;
}
VideoCodec VideoEncoderConfigToVideoCodec(
const VideoEncoderConfig& config,
const std::vector<VideoStream>& streams,
const std::string& payload_name,
int payload_type) {
VideoCodec VideoEncoderConfigToVideoCodec(const VideoEncoderConfig& config,
const std::string& payload_name,
int payload_type) {
const std::vector<VideoStream>& streams = config.streams;
static const int kEncoderMinBitrateKbps = 30;
RTC_DCHECK(!streams.empty());
RTC_DCHECK_GE(config.min_transmit_bitrate_bps, 0);
@ -60,10 +59,10 @@ VideoCodec VideoEncoderConfigToVideoCodec(
break;
case VideoEncoderConfig::ContentType::kScreen:
video_codec.mode = kScreensharing;
if (streams.size() == 1 &&
streams[0].temporal_layer_thresholds_bps.size() == 1) {
if (config.streams.size() == 1 &&
config.streams[0].temporal_layer_thresholds_bps.size() == 1) {
video_codec.targetBitrate =
streams[0].temporal_layer_thresholds_bps[0] / 1000;
config.streams[0].temporal_layer_thresholds_bps[0] / 1000;
}
break;
}
@ -170,6 +169,8 @@ VideoCodec VideoEncoderConfigToVideoCodec(
RTC_DCHECK_GT(streams[0].max_framerate, 0);
video_codec.maxFramerate = streams[0].max_framerate;
video_codec.expect_encode_from_texture = config.expect_encode_from_texture;
return video_codec;
}
@ -305,6 +306,7 @@ ViEEncoder::ViEEncoder(uint32_t number_of_cores,
sink_(nullptr),
settings_(settings),
codec_type_(PayloadNameToCodecType(settings.payload_name)),
vp_(VideoProcessing::Create()),
video_sender_(Clock::GetRealTimeClock(), this, this),
overuse_detector_(Clock::GetRealTimeClock(),
GetCpuOveruseOptions(settings.full_overuse_time),
@ -315,7 +317,7 @@ ViEEncoder::ViEEncoder(uint32_t number_of_cores,
stats_proxy_(stats_proxy),
pre_encode_callback_(pre_encode_callback),
module_process_thread_(nullptr),
pending_encoder_reconfiguration_(false),
encoder_config_(),
encoder_start_bitrate_bps_(0),
max_data_payload_length_(0),
last_observed_bitrate_bps_(0),
@ -332,6 +334,8 @@ ViEEncoder::ViEEncoder(uint32_t number_of_cores,
captured_frame_count_(0),
dropped_frame_count_(0),
encoder_queue_("EncoderQueue") {
vp_->EnableTemporalDecimation(false);
encoder_queue_.PostTask([this, encoder_timing] {
RTC_DCHECK_RUN_ON(&encoder_queue_);
video_sender_.RegisterExternalEncoder(
@ -403,62 +407,41 @@ void ViEEncoder::ConfigureEncoderOnTaskQueue(VideoEncoderConfig config,
size_t max_data_payload_length) {
RTC_DCHECK_RUN_ON(&encoder_queue_);
RTC_DCHECK(sink_);
LOG(LS_INFO) << "ConfigureEncoder requested.";
LOG(LS_INFO) << "ConfigureEncoderOnTaskQueue";
max_data_payload_length_ = max_data_payload_length;
encoder_config_ = std::move(config);
pending_encoder_reconfiguration_ = true;
// Reconfigure the encoder now if the encoder has an internal source or
// if this is the first time the encoder is configured.
// Otherwise, the reconfiguration is deferred until the next frame to minimize
// the number of reconfigurations. The codec configuration depends on incoming
// video frame size.
if (!last_frame_info_ || settings_.internal_source) {
if (!last_frame_info_) {
last_frame_info_ = rtc::Optional<VideoFrameInfo>(
VideoFrameInfo(176, 144, kVideoRotation_0, false));
}
ReconfigureEncoder();
}
}
VideoCodec video_codec = VideoEncoderConfigToVideoCodec(
encoder_config_, settings_.payload_name, settings_.payload_type);
void ViEEncoder::ReconfigureEncoder() {
RTC_DCHECK_RUN_ON(&encoder_queue_);
RTC_DCHECK(pending_encoder_reconfiguration_);
std::vector<VideoStream> streams =
encoder_config_.video_stream_factory->CreateEncoderStreams(
last_frame_info_->width, last_frame_info_->height, encoder_config_);
// Setting target width and height for VPM.
RTC_CHECK_EQ(VPM_OK,
vp_->SetTargetResolution(video_codec.width, video_codec.height,
video_codec.maxFramerate));
VideoCodec codec = VideoEncoderConfigToVideoCodec(
encoder_config_, streams, settings_.payload_name, settings_.payload_type);
codec.startBitrate =
std::max(encoder_start_bitrate_bps_ / 1000, codec.minBitrate);
codec.startBitrate = std::min(codec.startBitrate, codec.maxBitrate);
codec.expect_encode_from_texture = last_frame_info_->is_texture;
video_codec.startBitrate =
std::max(encoder_start_bitrate_bps_ / 1000, video_codec.minBitrate);
video_codec.startBitrate =
std::min(video_codec.startBitrate, video_codec.maxBitrate);
bool success = video_sender_.RegisterSendCodec(
&codec, number_of_cores_,
static_cast<uint32_t>(max_data_payload_length_)) == VCM_OK;
&video_codec, number_of_cores_,
static_cast<uint32_t>(max_data_payload_length)) == VCM_OK;
if (!success) {
LOG(LS_ERROR) << "Failed to configure encoder.";
RTC_DCHECK(success);
}
rate_allocator_.reset(new SimulcastRateAllocator(codec));
rate_allocator_.reset(new SimulcastRateAllocator(video_codec));
if (stats_proxy_) {
stats_proxy_->OnEncoderReconfigured(encoder_config_,
rate_allocator_->GetPreferedBitrate());
}
pending_encoder_reconfiguration_ = false;
if (stats_proxy_) {
stats_proxy_->OnEncoderReconfigured(encoder_config_,
rate_allocator_->GetPreferedBitrate());
}
sink_->OnEncoderConfigurationChanged(
std::move(streams), encoder_config_.min_transmit_bitrate_bps);
encoder_config_.streams, encoder_config_.min_transmit_bitrate_bps);
}
void ViEEncoder::OnFrame(const VideoFrame& video_frame) {
@ -541,24 +524,6 @@ void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame,
if (pre_encode_callback_)
pre_encode_callback_->OnFrame(video_frame);
if (video_frame.width() != last_frame_info_->width ||
video_frame.height() != last_frame_info_->height ||
video_frame.rotation() != last_frame_info_->rotation ||
video_frame.is_texture() != last_frame_info_->is_texture) {
pending_encoder_reconfiguration_ = true;
last_frame_info_ = rtc::Optional<VideoFrameInfo>(
VideoFrameInfo(video_frame.width(), video_frame.height(),
video_frame.rotation(), video_frame.is_texture()));
LOG(LS_INFO) << "Video frame parameters changed: dimensions="
<< last_frame_info_->width << "x" << last_frame_info_->height
<< ", rotation=" << last_frame_info_->rotation
<< ", texture=" << last_frame_info_->is_texture;
}
if (pending_encoder_reconfiguration_) {
ReconfigureEncoder();
}
if (EncoderPaused()) {
TraceFrameDropStart();
return;
@ -567,6 +532,16 @@ void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame,
TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame.render_time_ms(),
"Encode");
const VideoFrame* frame_to_send = &video_frame;
// TODO(wuchengli): support texture frames.
if (!video_frame.video_frame_buffer()->native_handle()) {
// Pass frame via preprocessor.
frame_to_send = vp_->PreprocessFrame(video_frame);
if (!frame_to_send) {
// Drop this frame, or there was an error processing it.
return;
}
}
overuse_detector_.FrameCaptured(video_frame, time_when_posted_in_ms);
@ -585,10 +560,10 @@ void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame,
has_received_sli_ = false;
has_received_rpsi_ = false;
video_sender_.AddVideoFrame(video_frame, &codec_specific_info);
return;
video_sender_.AddVideoFrame(*frame_to_send, &codec_specific_info);
return;
}
video_sender_.AddVideoFrame(video_frame, nullptr);
video_sender_.AddVideoFrame(*frame_to_send, nullptr);
}
void ViEEncoder::SendKeyFrame() {

View File

@ -21,7 +21,6 @@
#include "webrtc/base/task_queue.h"
#include "webrtc/call.h"
#include "webrtc/common_types.h"
#include "webrtc/common_video/rotation.h"
#include "webrtc/media/base/videosinkinterface.h"
#include "webrtc/modules/video_coding/include/video_coding_defines.h"
#include "webrtc/modules/video_coding/utility/simulcast_rate_allocator.h"
@ -103,24 +102,8 @@ class ViEEncoder : public rtc::VideoSinkInterface<VideoFrame>,
class EncodeTask;
class VideoSourceProxy;
struct VideoFrameInfo {
VideoFrameInfo(int width,
int height,
VideoRotation rotation,
bool is_texture)
: width(width),
height(height),
rotation(rotation),
is_texture(is_texture) {}
int width;
int height;
webrtc::VideoRotation rotation;
bool is_texture;
};
void ConfigureEncoderOnTaskQueue(VideoEncoderConfig config,
size_t max_data_payload_length);
void ReconfigureEncoder();
// Implements VideoSinkInterface.
void OnFrame(const VideoFrame& video_frame) override;
@ -155,6 +138,7 @@ class ViEEncoder : public rtc::VideoSinkInterface<VideoFrame>,
const VideoSendStream::Config::EncoderSettings settings_;
const VideoCodecType codec_type_;
const std::unique_ptr<VideoProcessing> vp_;
vcm::VideoSender video_sender_ ACCESS_ON(&encoder_queue_);
OveruseFrameDetector overuse_detector_ ACCESS_ON(&encoder_queue_);
@ -175,10 +159,6 @@ class ViEEncoder : public rtc::VideoSinkInterface<VideoFrame>,
std::unique_ptr<SimulcastRateAllocator> rate_allocator_
ACCESS_ON(&encoder_queue_);
// Set when ConfigureEncoder has been called in order to lazy reconfigure the
// encoder on the next frame.
bool pending_encoder_reconfiguration_ ACCESS_ON(&encoder_queue_);
rtc::Optional<VideoFrameInfo> last_frame_info_ ACCESS_ON(&encoder_queue_);
uint32_t encoder_start_bitrate_bps_ ACCESS_ON(&encoder_queue_);
size_t max_data_payload_length_ ACCESS_ON(&encoder_queue_);
uint32_t last_observed_bitrate_bps_ ACCESS_ON(&encoder_queue_);

View File

@ -26,8 +26,6 @@ class ViEEncoderTest : public ::testing::Test {
ViEEncoderTest()
: video_send_config_(VideoSendStream::Config(nullptr)),
codec_width_(320),
codec_height_(240),
fake_encoder_(),
stats_proxy_(Clock::GetRealTimeClock(),
video_send_config_,
@ -41,7 +39,10 @@ class ViEEncoderTest : public ::testing::Test {
video_send_config_.encoder_settings.payload_type = 125;
VideoEncoderConfig video_encoder_config;
test::FillEncoderConfiguration(1, &video_encoder_config);
video_encoder_config.streams = test::CreateVideoStreams(1);
codec_width_ = static_cast<int>(video_encoder_config.streams[0].width);
codec_height_ = static_cast<int>(video_encoder_config.streams[0].height);
vie_encoder_.reset(new ViEEncoder(
1 /* number_of_cores */, &stats_proxy_,
video_send_config_.encoder_settings, nullptr /* pre_encode_callback */,
@ -80,26 +81,6 @@ class ViEEncoderTest : public ::testing::Test {
: FakeEncoder(Clock::GetRealTimeClock()),
continue_encode_event_(false, false) {}
VideoCodec codec_config() {
rtc::CritScope lock(&crit_);
return config_;
}
void BlockNextEncode() {
rtc::CritScope lock(&crit_);
block_next_encode_ = true;
}
void ContinueEncode() { continue_encode_event_.Set(); }
void CheckLastTimeStampsMatch(int64_t ntp_time_ms,
uint32_t timestamp) const {
rtc::CritScope lock(&crit_);
EXPECT_EQ(timestamp_, timestamp);
EXPECT_EQ(ntp_time_ms_, ntp_time_ms);
}
private:
int32_t Encode(const VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const std::vector<FrameType>* frame_types) override {
@ -122,8 +103,21 @@ class ViEEncoderTest : public ::testing::Test {
return result;
}
void BlockNextEncode() {
rtc::CritScope lock(&crit_);
block_next_encode_ = true;
}
void ContinueEncode() { continue_encode_event_.Set(); }
void CheckLastTimeStampsMatch(int64_t ntp_time_ms,
uint32_t timestamp) const {
rtc::CritScope lock(&crit_);
EXPECT_EQ(timestamp_, timestamp);
EXPECT_EQ(ntp_time_ms_, ntp_time_ms);
}
private:
rtc::CriticalSection crit_;
bool block_next_encode_ = false;
rtc::Event continue_encode_event_;
@ -290,48 +284,20 @@ TEST_F(ViEEncoderTest, ConfigureEncoderTriggersOnEncoderConfigurationChanged) {
// Capture a frame and wait for it to synchronize with the encoder thread.
video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
sink_.WaitForEncodedFrame(1);
// The encoder will have been configured twice. First time before the first
// frame has been received. Then a second time when the resolution is known.
EXPECT_EQ(2, sink_.number_of_reconfigurations());
EXPECT_EQ(1, sink_.number_of_reconfigurations());
VideoEncoderConfig video_encoder_config;
test::FillEncoderConfiguration(1, &video_encoder_config);
video_encoder_config.streams = test::CreateVideoStreams(1);
video_encoder_config.min_transmit_bitrate_bps = 9999;
vie_encoder_->ConfigureEncoder(std::move(video_encoder_config), 1440);
// Capture a frame and wait for it to synchronize with the encoder thread.
video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
sink_.WaitForEncodedFrame(2);
EXPECT_EQ(3, sink_.number_of_reconfigurations());
EXPECT_EQ(2, sink_.number_of_reconfigurations());
EXPECT_EQ(9999, sink_.last_min_transmit_bitrate());
vie_encoder_->Stop();
}
TEST_F(ViEEncoderTest, FrameResolutionChangeReconfigureEncoder) {
const int kTargetBitrateBps = 100000;
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
// Capture a frame and wait for it to synchronize with the encoder thread.
video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
sink_.WaitForEncodedFrame(1);
// The encoder will have been configured twice. First time before the first
// frame has been received. Then a second time when the resolution is known.
EXPECT_EQ(2, sink_.number_of_reconfigurations());
EXPECT_EQ(codec_width_, fake_encoder_.codec_config().width);
EXPECT_EQ(codec_height_, fake_encoder_.codec_config().height);
codec_width_ *= 2;
codec_height_ *= 2;
// Capture a frame with a higher resolution and wait for it to synchronize
// with the encoder thread.
video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
sink_.WaitForEncodedFrame(2);
EXPECT_EQ(codec_width_, fake_encoder_.codec_config().width);
EXPECT_EQ(codec_height_, fake_encoder_.codec_config().height);
EXPECT_EQ(3, sink_.number_of_reconfigurations());
vie_encoder_->Stop();
}
} // namespace webrtc

View File

@ -172,7 +172,7 @@ class VideoFrame {
const;
// Return true if the frame is stored in a texture.
bool is_texture() const {
bool is_texture() {
return video_frame_buffer() &&
video_frame_buffer()->native_handle() != nullptr;
}